prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>res_users.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from openerp.osv import osv, fields import openerp.addons.product.product class res_users(osv.osv): _inherit = 'res.users' _columns = {<|fim▁hole|><|fim▁end|>
'target_sales_invoiced': fields.integer('Invoiced in Sale Orders Target'), }
<|file_name|>server.go<|end_file_name|><|fim▁begin|>package server import (<|fim▁hole|> "autoscaler/routes" "autoscaler/scalingengine" "autoscaler/scalingengine/config" "code.cloudfoundry.org/cfhttp" "code.cloudfoundry.org/lager" "github.com/gorilla/mux" "github.com/tedsuo/ifrit" "github.com/tedsuo/ifrit/http_server" "fmt" "net/http" ) type VarsFunc func(w http.ResponseWriter, r *http.Request, vars map[string]string) func (vh VarsFunc) ServeHTTP(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) vh(w, r, vars) } func NewServer(logger lager.Logger, conf *config.Config, scalingEngineDB db.ScalingEngineDB, scalingEngine scalingengine.ScalingEngine) (ifrit.Runner, error) { handler := NewScalingHandler(logger, scalingEngineDB, scalingEngine) r := routes.ScalingEngineRoutes() r.Get(routes.ScaleRouteName).Handler(VarsFunc(handler.Scale)) r.Get(routes.GetScalingHistoriesRouteName).Handler(VarsFunc(handler.GetScalingHistories)) r.Get(routes.SetActiveScheduleRouteName).Handler(VarsFunc(handler.StartActiveSchedule)) r.Get(routes.DeleteActiveScheduleRouteName).Handler(VarsFunc(handler.RemoveActiveSchedule)) r.Get(routes.GetActiveSchedulesRouteName).Handler(VarsFunc(handler.GetActiveSchedule)) addr := fmt.Sprintf("0.0.0.0:%d", conf.Server.Port) logger.Info("new-http-server", lager.Data{"serverConfig": conf.Server}) if (conf.Server.TLS.KeyFile != "") && (conf.Server.TLS.CertFile != "") { tlsConfig, err := cfhttp.NewTLSConfig(conf.Server.TLS.CertFile, conf.Server.TLS.KeyFile, conf.Server.TLS.CACertFile) if err != nil { logger.Error("failed-new-server-new-tls-config", err, lager.Data{"tls": conf.Server.TLS}) return nil, err } return http_server.NewTLSServer(addr, r, tlsConfig), nil } return http_server.New(addr, r), nil }<|fim▁end|>
"autoscaler/db"
<|file_name|>router.ts<|end_file_name|><|fim▁begin|>import Vue from 'vue' import Router from 'vue-router' import index from '../components/index' import project from '../components/project/index' import proAdd from '../components/project/proAdd' import proList from '../components/project/proList' import apiList from '../components/project/apiList' import apiView from '../components/project/apiView'<|fim▁hole|>import message from '../components/message' import member from '../components/member' import doc from '../components/doc' import set from '../components/set' import userSet from '../components/user/set' import login from '../components/user/login' Vue.use(Router) const router:any = new Router({ mode: 'history', routes: [ { path: '/', name: 'index', component: index }, { path: '/project', name: 'project', component: project, children: [ { path: 'list', name: 'proList', component: proList, meta: { requireLogin: true } }, { path: 'add', name: 'proAdd', component: proAdd, meta: { requireLogin: true } }, { path: ':proId/edit', name: 'proEdit', component: proAdd, meta: { requireLogin: true } }, { path: ':proId/api', name: 'proApiList', component: apiList, children: [ { path: 'add', name: 'apiAdd', component: apiEdit, meta: { requireLogin: true } }, { path: ':apiId/detail', name: 'apiView', component: apiView, meta: { requireLogin: true }, }, { path: ':apiId/edit', name: 'apiEdit', component: apiEdit, meta: { requireLogin: true } }, { path: ':apiId/history', name: 'apiHistory', component: apiHistory, meta: { requireLogin: true } } ] } ] }, { path: '/test', name: 'test', component: test, meta: { requireLogin: true } }, { path: '/message', name: 'message', component: message, meta: { requireLogin: true } }, { path: '/member', name: 'member', component: member, meta: { requireLogin: true } }, { path: '/doc', name: 'doc', component: doc }, { path: '/set', name: 'set', component: set, meta: { requireLogin: true } }, { path: '/user/set', name: 'userSet', component: userSet, meta: { requireLogin: true } }, { path: '/user/login', name: 'login', component: login } ] }) router.beforeEach((to:any, from:any, next:any) => { if (to.matched.some((res:any) => res.meta.requireLogin)) { if (localStorage.getItem('token')) { next() } else { next('/user/login') } } else { next() } }) export default router<|fim▁end|>
import apiEdit from '../components/project/apiEdit' import apiHistory from '../components/project/apiHistory' import test from '../components/test'
<|file_name|>TeamPdfReader.java<|end_file_name|><|fim▁begin|>package me.anthonybruno.soccerSim.reader; import org.apache.pdfbox.io.RandomAccessFile; import org.apache.pdfbox.pdfparser.PDFParser; import org.apache.pdfbox.text.PDFTextStripperByArea; import java.awt.geom.Rectangle2D; import java.io.File; import java.io.IOException; /** * A class that parsers team PDF files into XML files. */ public class TeamPdfReader { private static final Rectangle2D.Double firstTeamFirstPageRegion = new Rectangle2D.Double(0, 0, 330, 550); private static final Rectangle2D.Double secondTeamFirstPageRegion = new Rectangle2D.Double(350, 0, 350, 550); private final File file; public TeamPdfReader(String fileName) { this.file = new File(fileName); } public TeamPdfReader(File file) { this.file = file; } // public String read() { //Using IText :( // try { // PdfReader pdfReader = new PdfReader(file.getAbsolutePath()); // PdfDocument pdfDocument = new PdfDocument(pdfReader); // // LocationTextExtractionStrategy strategy = new LocationTextExtractionStrategy(); // // PdfCanvasProcessor parser = new PdfCanvasProcessor(strategy); // parser.processPageContent(pdfDocument.getFirstPage()); // return strategy.getResultantText(); // // } catch (IOException e) { // e.printStackTrace(); // } // } public void readAllTeamsToFiles() { //Using PDFBox try { PDFParser parser = new PDFParser(new RandomAccessFile(file, "r")); parser.parse(); PDFTextStripperByArea pdfTextStripperByArea = new PDFTextStripperByArea(); pdfTextStripperByArea.addRegion("First", firstTeamFirstPageRegion); pdfTextStripperByArea.addRegion("Second", secondTeamFirstPageRegion); for (int i = 0; i < parser.getPDDocument().getNumberOfPages(); i++) { pdfTextStripperByArea.extractRegions(parser.getPDDocument().getPage(i)); writeTeamToFile(pdfTextStripperByArea.getTextForRegion("First"), "teams"); writeTeamToFile(pdfTextStripperByArea.getTextForRegion("Second"), "teams"); } } catch (IOException e) { e.printStackTrace(); } } public void writeTeamToFile(String teamExtractedFromPDf, String saveDirectory) { //FIXME: Reduce size of method if (teamExtractedFromPDf.isEmpty() || !teamExtractedFromPDf.contains(" ")) { return; //reached a blank page } XmlWriter xmlWriter = new XmlWriter("UTF-8"); String text = teamExtractedFromPDf; if (text.indexOf('\n') < text.indexOf(' ')) { text = text.substring(text.indexOf('\n') + 1); } xmlWriter.createOpenTag("team"); if (text.startsWith(" ")) { text = text.substring(text.indexOf('\n') + 1); //need this for El Salvador } String name = text.substring(0, text.indexOf(" ")); text = text.substring(text.indexOf(" ") + 1); if (!Character.isDigit(text.charAt(0))) { //handles countries with two words in name name += " " + text.substring(0, text.indexOf(" ")); } xmlWriter.createTagWithValue("name", name); for (int i = 0; i < 3; i++) { //skipping stuff we don't care about text = moveToNextLine(text); } text = text.substring(text.indexOf(':') + 2); String firstHalfAttempts = text.substring(0, text.indexOf(' ')); text = text.substring(text.indexOf(' ') + 1); String secondHalfAttempts = text.substring(0, text.indexOf('\n')); text = moveToNextLine(text); xmlWriter.createTagWithValue("goalRating", text.substring(text.indexOf('-') + 1, text.indexOf('\n'))); text = moveToNextLine(text); String[] defensiveAttempts = parseHalfValues(text); text = defensiveAttempts[0]; String firstHalfDefensiveAttempts = defensiveAttempts[1]; String secondHalfDefensiveAttempts = defensiveAttempts[2]; String[] defensiveSOG = parseHalfValues(text); text = defensiveSOG[0]; String firstHalfSOG = defensiveSOG[1]; String secondHalfSOG = defensiveSOG[2]; xmlWriter.createTagWithValue("formation", text.substring(text.indexOf(':') + 2, text.indexOf('\n'))); text = moveToNextLine(text); text = text.substring(text.indexOf(':') + 2); if (text.indexOf(' ') < text.indexOf('\n')) { xmlWriter.createTagWithValue("strategy", text.substring(0, text.indexOf(' '))); //team has fair play score } else { xmlWriter.createTagWithValue("strategy", text.substring(0, text.indexOf("\n"))); } text = moveToNextLine(text); text = moveToNextLine(text); parseHalfStats(xmlWriter, "halfStats", firstHalfAttempts, firstHalfDefensiveAttempts, firstHalfSOG); parseHalfStats(xmlWriter, "halfStats", secondHalfAttempts, secondHalfDefensiveAttempts, secondHalfSOG); xmlWriter.createOpenTag("players"); while (!text.startsWith("Goalies")) { text = parsePlayer(xmlWriter, text);<|fim▁hole|> parseGoalies(xmlWriter, text); xmlWriter.createCloseTag("players"); xmlWriter.createCloseTag("team"); File saveDir = new File(saveDirectory); try { saveDir.createNewFile(); } catch (IOException e) { e.printStackTrace(); } if (!saveDir.exists()) { file.mkdir(); } xmlWriter.writeToFile(new File("src/main/resources/teams/" + name + ".xml")); } private void parseGoalies(XmlWriter xmlWriter, String text) { while (!text.isEmpty()) { xmlWriter.createOpenTag("goalie"); String playerName = ""; do { playerName += text.substring(0, text.indexOf(' ')); text = text.substring(text.indexOf(' ') + 1); } while (!isNumeric(text.substring(0, text.indexOf(' ')))); xmlWriter.createTagWithValue("name", playerName); xmlWriter.createTagWithValue("rating", text.substring(0, text.indexOf(' '))); text = text.substring(text.indexOf(' ') + 1); text = text.substring(text.indexOf(' ') + 1); text = parsePlayerAttribute(xmlWriter, "injury", text); createMultiplierTag(xmlWriter, text); text = moveToNextLine(text); xmlWriter.createCloseTag("goalie"); } } private boolean isNumeric(char c) { return isNumeric(c + ""); } private boolean isNumeric(String string) { return string.matches("^[-+]?\\d+$"); } private void createMultiplierTag(XmlWriter xmlWriter, String text) { if (text.charAt(0) != '-') { xmlWriter.createTagWithValue("multiplier", text.charAt(1) + ""); } else { xmlWriter.createTagWithValue("multiplier", "0"); } } private String parsePlayer(XmlWriter xmlWriter, String text) { xmlWriter.createOpenTag("player"); text = parsePlayerName(xmlWriter, text); text = parsePlayerAttribute(xmlWriter, "shotRange", text); text = parsePlayerAttribute(xmlWriter, "goal", text); text = parsePlayerAttribute(xmlWriter, "injury", text); createMultiplierTag(xmlWriter, text); xmlWriter.createCloseTag("player"); text = moveToNextLine(text); return text; } private String parsePlayerName(XmlWriter xmlWriter, String text) { if (isNumeric(text.charAt(text.indexOf(' ') + 1))) { return parsePlayerAttribute(xmlWriter, "name", text); //Player has single name } else { String playerName = text.substring(0, text.indexOf(' ')); text = text.substring(text.indexOf(' ') + 1); while (!isNumeric(text.charAt(0))) { playerName += ' ' + text.substring(0, text.indexOf(' ')); text = text.substring(text.indexOf(' ') + 1); } xmlWriter.createTagWithValue("name", playerName); return text; } } private String parsePlayerAttribute(XmlWriter xmlWriter, String tagName, String text) { xmlWriter.createTagWithValue(tagName, text.substring(0, text.indexOf(' '))); text = text.substring(text.indexOf(' ') + 1); return text; } private String[] parseHalfValues(String text) { text = text.substring(text.indexOf(':') + 2); String firstHalf = text.substring(0, text.indexOf(' ')); text = text.substring(text.indexOf(' ') + 1); String secondHalf = text.substring(0, text.indexOf('\n')); text = moveToNextLine(text); return new String[]{text, firstHalf, secondHalf}; } private void parseHalfStats(XmlWriter xmlWriter, String halfName, String attempts, String defensiveAttempts, String defensiveShotsOnGoal) { xmlWriter.createOpenTag(halfName); xmlWriter.createTagWithValue("attempts", attempts); xmlWriter.createTagWithValue("defensiveAttempts", defensiveAttempts); xmlWriter.createTagWithValue("defensiveShotsOnGoal", defensiveShotsOnGoal); xmlWriter.createCloseTag(halfName); } private String moveToNextLine(String text) { return text.substring(text.indexOf("\n") + 1); } public static void main(String[] args) { TeamPdfReader teamPdfReader = new TeamPdfReader("src/main/resources/ruleFiles/Cards1.pdf"); teamPdfReader.readAllTeamsToFiles(); teamPdfReader = new TeamPdfReader("src/main/resources/ruleFiles/Cards2.pdf"); teamPdfReader.readAllTeamsToFiles(); } }<|fim▁end|>
} text = moveToNextLine(text);
<|file_name|>statuses.js<|end_file_name|><|fim▁begin|>exports.BattleStatuses = { brn: { effectType: 'Status', onStart: function (target, source, sourceEffect) { if (sourceEffect && sourceEffect.id === 'flameorb') { this.add('-status', target, 'brn', '[from] item: Flame Orb'); return; } this.add('-status', target, 'brn'); }, onBasePower: function (basePower, attacker, defender, move) { if (move && move.category === 'Physical' && attacker && attacker.ability !== 'guts' && move.id !== 'facade') { return this.chainModify(0.5); // This should really take place directly in the damage function but it's here for now } }, onResidualOrder: 9, onResidual: function (pokemon) { this.damage(pokemon.maxhp / 8); } }, par: { effectType: 'Status', onStart: function (target) { this.add('-status', target, 'par'); }, onModifySpe: function (speMod, pokemon) { if (pokemon.ability !== 'quickfeet') { return this.chain(speMod, 0.25); } }, onBeforeMovePriority: 2, onBeforeMove: function (pokemon) { if (this.random(4) === 0) { this.add('cant', pokemon, 'par'); return false; } } }, slp: { effectType: 'Status', onStart: function (target) { this.add('-status', target, 'slp'); // 1-3 turns this.effectData.startTime = this.random(2, 5); this.effectData.time = this.effectData.startTime; }, onBeforeMovePriority: 2, onBeforeMove: function (pokemon, target, move) { if (pokemon.getAbility().isHalfSleep) { pokemon.statusData.time--; } pokemon.statusData.time--; if (pokemon.statusData.time <= 0) { pokemon.cureStatus(); return; } this.add('cant', pokemon, 'slp'); if (move.sleepUsable) { return; } return false; } }, frz: { effectType: 'Status', onStart: function (target) { this.add('-status', target, 'frz'); if (target.species === 'Shaymin-Sky' && target.baseTemplate.species === target.species) { var template = this.getTemplate('Shaymin'); target.formeChange(template); target.baseTemplate = template; target.setAbility(template.abilities['0']); target.baseAbility = target.ability; target.details = template.species + (target.level === 100 ? '' : ', L' + target.level) + (target.gender === '' ? '' : ', ' + target.gender) + (target.set.shiny ? ', shiny' : ''); this.add('detailschange', target, target.details); this.add('message', target.species + " has reverted to Land Forme! (placeholder)"); } }, onBeforeMovePriority: 2, onBeforeMove: function (pokemon, target, move) { if (move.thawsUser || this.random(5) === 0) { pokemon.cureStatus(); return; } this.add('cant', pokemon, 'frz'); return false; }, onHit: function (target, source, move) { if (move.type === 'Fire' && move.category !== 'Status') { target.cureStatus(); } } }, psn: { effectType: 'Status', onStart: function (target) { this.add('-status', target, 'psn'); }, onResidualOrder: 9, onResidual: function (pokemon) { this.damage(pokemon.maxhp / 8); } }, tox: { effectType: 'Status', onStart: function (target, source, sourceEffect) { this.effectData.stage = 0; if (sourceEffect && sourceEffect.id === 'toxicorb') { this.add('-status', target, 'tox', '[from] item: Toxic Orb'); return; } this.add('-status', target, 'tox'); }, onSwitchIn: function () { this.effectData.stage = 0; }, onResidualOrder: 9, onResidual: function (pokemon) { if (this.effectData.stage < 15) { this.effectData.stage++; } this.damage(this.clampIntRange(pokemon.maxhp / 16, 1) * this.effectData.stage); } }, confusion: { // this is a volatile status onStart: function (target, source, sourceEffect) { var result = this.runEvent('TryConfusion', target, source, sourceEffect); if (!result) return result; this.add('-start', target, 'confusion'); this.effectData.time = this.random(2, 6); }, onEnd: function (target) { this.add('-end', target, 'confusion'); }, onBeforeMove: function (pokemon) { pokemon.volatiles.confusion.time--; if (!pokemon.volatiles.confusion.time) { pokemon.removeVolatile('confusion'); return; } this.add('-activate', pokemon, 'confusion'); if (this.random(2) === 0) { return; } this.directDamage(this.getDamage(pokemon, pokemon, 40)); return false; } }, flinch: { duration: 1, onBeforeMovePriority: 1, onBeforeMove: function (pokemon) { if (!this.runEvent('Flinch', pokemon)) { return; } this.add('cant', pokemon, 'flinch'); return false; } }, trapped: { noCopy: true, onModifyPokemon: function (pokemon) { if (!this.effectData.source || !this.effectData.source.isActive) { delete pokemon.volatiles['trapped']; return; } pokemon.tryTrap(); }, onStart: function (target) { this.add('-activate', target, 'trapped'); } }, partiallytrapped: { duration: 5, durationCallback: function (target, source) { if (source.item === 'gripclaw') return 8; return this.random(5, 7); }, onStart: function (pokemon, source) { this.add('-activate', pokemon, 'move: ' +this.effectData.sourceEffect, '[of] ' + source); }, onResidualOrder: 11, onResidual: function (pokemon) { if (this.effectData.source && (!this.effectData.source.isActive || this.effectData.source.hp <= 0)) { pokemon.removeVolatile('partiallytrapped'); return; } if (this.effectData.source.item === 'bindingband') { this.damage(pokemon.maxhp / 6); } else { this.damage(pokemon.maxhp / 8); } }, onEnd: function (pokemon) { this.add('-end', pokemon, this.effectData.sourceEffect, '[partiallytrapped]'); }, onModifyPokemon: function (pokemon) { pokemon.tryTrap(); } }, lockedmove: { // Outrage, Thrash, Petal Dance... duration: 2, onResidual: function (target) { if (target.status === 'slp') { // don't lock, and bypass confusion for calming delete target.volatiles['lockedmove']; } this.effectData.trueDuration--; }, onStart: function (target, source, effect) { this.effectData.trueDuration = this.random(2, 4); this.effectData.move = effect.id; }, onRestart: function () { if (this.effectData.trueDuration >= 2) { this.effectData.duration = 2; } }, onEnd: function (target) { if (this.effectData.trueDuration > 1) return; this.add('-end', target, 'rampage'); target.addVolatile('confusion'); }, onLockMove: function (pokemon) { return this.effectData.move; } }, twoturnmove: { // Skull Bash, SolarBeam, Sky Drop... duration: 2, onStart: function (target, source, effect) { this.effectData.move = effect.id; // source and target are reversed since the event target is the // pokemon using the two-turn move this.effectData.targetLoc = this.getTargetLoc(source, target); target.addVolatile(effect.id, source); }, onEnd: function (target) { target.removeVolatile(this.effectData.move); }, onLockMove: function () { return this.effectData.move; }, onLockMoveTarget: function () { return this.effectData.targetLoc; } }, choicelock: { onStart: function (pokemon) { this.effectData.move = this.activeMove.id; if (!this.effectData.move) return false; }, onModifyPokemon: function (pokemon) { if (!pokemon.getItem().isChoice || !pokemon.hasMove(this.effectData.move)) { pokemon.removeVolatile('choicelock'); return; } if (pokemon.ignore['Item']) { return; } var moves = pokemon.moveset; for (var i = 0; i < moves.length; i++) { if (moves[i].id !== this.effectData.move) { moves[i].disabled = true; } } } }, mustrecharge: { duration: 2, onBeforeMove: function (pokemon) { this.add('cant', pokemon, 'recharge'); pokemon.removeVolatile('mustrecharge'); return false; }, onLockMove: 'recharge' }, futuremove: { // this is a side condition onStart: function (side) { this.effectData.positions = []; for (var i = 0; i < side.active.length; i++) { this.effectData.positions[i] = null; } }, onResidualOrder: 3, onResidual: function (side) { var finished = true; for (var i = 0; i < side.active.length; i++) { var posData = this.effectData.positions[i]; if (!posData) continue; posData.duration--; if (posData.duration > 0) { finished = false; continue; } // time's up; time to hit! :D var target = side.foe.active[posData.targetPosition]; var move = this.getMove(posData.move); if (target.fainted) { this.add('-hint', '' + move.name + ' did not hit because the target is fainted.'); this.effectData.positions[i] = null; continue; } this.add('-message', '' + move.name + ' hit! (placeholder)'); target.removeVolatile('Protect'); target.removeVolatile('Endure'); if (typeof posData.moveData.affectedByImmunities === 'undefined') { posData.moveData.affectedByImmunities = true; } this.moveHit(target, posData.source, move, posData.moveData); this.effectData.positions[i] = null; } if (finished) { side.removeSideCondition('futuremove'); } } }, stall: { // Protect, Detect, Endure counter duration: 2, counterMax: 256, onStart: function () { this.effectData.counter = 3; }, onStallMove: function () { // this.effectData.counter should never be undefined here. // However, just in case, use 1 if it is undefined. var counter = this.effectData.counter || 1; this.debug("Success chance: " + Math.round(100 / counter) + "%"); return (this.random(counter) === 0); }, onRestart: function () { if (this.effectData.counter < this.effect.counterMax) { this.effectData.counter *= 3; } this.effectData.duration = 2; } }, gem: { duration: 1, affectsFainted: true, onBasePower: function (basePower, user, target, move) { this.debug('Gem Boost'); return this.chainModify([0x14CD, 0x1000]); } }, // weather // weather is implemented here since it's so important to the game raindance: { effectType: 'Weather', duration: 5, durationCallback: function (source, effect) { if (source && source.item === 'damprock') { return 8; } return 5; }, onBasePower: function (basePower, attacker, defender, move) { if (move.type === 'Water') { this.debug('rain water boost'); return this.chainModify(1.5); } if (move.type === 'Fire') { this.debug('rain fire suppress'); return this.chainModify(0.5); } }, onStart: function (battle, source, effect) { if (effect && effect.effectType === 'Ability' && this.gen <= 5) { this.effectData.duration = 0; this.add('-weather', 'RainDance', '[from] ability: ' + effect, '[of] ' + source); } else { this.add('-weather', 'RainDance'); } }, onResidualOrder: 1, onResidual: function () { this.add('-weather', 'RainDance', '[upkeep]'); this.eachEvent('Weather'); }, onEnd: function () { this.add('-weather', 'none'); } }, sunnyday: { effectType: 'Weather', duration: 5, durationCallback: function (source, effect) { if (source && source.item === 'heatrock') { return 8; } return 5; }, onBasePower: function (basePower, attacker, defender, move) { if (move.type === 'Fire') { this.debug('Sunny Day fire boost'); return this.chainModify(1.5); } if (move.type === 'Water') { this.debug('Sunny Day water suppress'); return this.chainModify(0.5); } }, onStart: function (battle, source, effect) { if (effect && effect.effectType === 'Ability' && this.gen <= 5) { this.effectData.duration = 0; this.add('-weather', 'SunnyDay', '[from] ability: ' + effect, '[of] ' + source); } else { this.add('-weather', 'SunnyDay'); } }, onImmunity: function (type) { if (type === 'frz') return false; }, onResidualOrder: 1, onResidual: function () { this.add('-weather', 'SunnyDay', '[upkeep]'); this.eachEvent('Weather'); }, onEnd: function () { this.add('-weather', 'none'); } }, sandstorm: { effectType: 'Weather', duration: 5, durationCallback: function (source, effect) { if (source && source.item === 'smoothrock') { return 8; } return 5; }, // This should be applied directly to the stat before any of the other modifiers are chained // So we give it increased priority. onModifySpDPriority: 10, onModifySpD: function (spd, pokemon) { if (pokemon.hasType('Rock') && this.isWeather('sandstorm')) { return this.modify(spd, 1.5); } }, onStart: function (battle, source, effect) { if (effect && effect.effectType === 'Ability' && this.gen <= 5) { this.effectData.duration = 0; this.add('-weather', 'Sandstorm', '[from] ability: ' + effect, '[of] ' + source); } else { this.add('-weather', 'Sandstorm'); } }, onResidualOrder: 1, onResidual: function () { this.add('-weather', 'Sandstorm', '[upkeep]'); if (this.isWeather('sandstorm')) this.eachEvent('Weather'); }, onWeather: function (target) { this.damage(target.maxhp / 16); }, onEnd: function () { this.add('-weather', 'none'); } }, hail: { effectType: 'Weather', duration: 5, durationCallback: function (source, effect) { if (source && source.item === 'icyrock') { return 8; } return 5; }, onStart: function (battle, source, effect) { if (effect && effect.effectType === 'Ability' && this.gen <= 5) { this.effectData.duration = 0; this.add('-weather', 'Hail', '[from] ability: ' + effect, '[of] ' + source); } else { this.add('-weather', 'Hail'); } }, onResidualOrder: 1, onResidual: function () { this.add('-weather', 'Hail', '[upkeep]'); if (this.isWeather('hail')) this.eachEvent('Weather'); }, onWeather: function (target) { this.damage(target.maxhp / 16); }, onEnd: function () { this.add('-weather', 'none'); } }, <|fim▁hole|> arceus: { // Arceus's actual typing is implemented here // Arceus's true typing for all its formes is Normal, and it's only // Multitype that changes its type, but its formes are specified to // be their corresponding type in the Pokedex, so that needs to be // overridden. This is mainly relevant for Hackmons and Balanced // Hackmons. onSwitchInPriority: 101, onSwitchIn: function (pokemon) { var type = 'Normal'; if (pokemon.ability === 'multitype') { type = this.runEvent('Plate', pokemon); if (!type || type === true) { type = 'Normal'; } } pokemon.setType(type, true); } } };<|fim▁end|>
<|file_name|>history.js<|end_file_name|><|fim▁begin|>/** * History.js Core * @author Benjamin Arthur Lupton <contact@balupton.com> * @copyright 2010-2011 Benjamin Arthur Lupton <contact@balupton.com> * @license New BSD License <http://creativecommons.org/licenses/BSD/> */ (function(window,undefined){ "use strict"; // ======================================================================== // Initialise // Localise Globals var console = window.console||undefined, // Prevent a JSLint complain document = window.document, // Make sure we are using the correct document navigator = window.navigator, // Make sure we are using the correct navigator sessionStorage = false, // sessionStorage setTimeout = window.setTimeout, clearTimeout = window.clearTimeout, setInterval = window.setInterval, clearInterval = window.clearInterval, JSON = window.JSON, alert = window.alert, History = window.History = window.History||{}, // Public History Object history = window.history; // Old History Object // MooTools Compatibility JSON.stringify = JSON.stringify||JSON.encode; JSON.parse = JSON.parse||JSON.decode; try { sessionStorage = window.sessionStorage; // This will throw an exception in some browsers when cookies/localStorage are explicitly disabled (i.e. Chrome) sessionStorage.setItem('TEST', '1'); sessionStorage.removeItem('TEST'); } catch(e) { sessionStorage = false; } // Check Existence if ( typeof History.init !== 'undefined' ) { throw new Error('History.js Core has already been loaded...'); } // Initialise History History.init = function(){ // Check Load Status of Adapter if ( typeof History.Adapter === 'undefined' ) { return false; } // Check Load Status of Core if ( typeof History.initCore !== 'undefined' ) { History.initCore(); } // Check Load Status of HTML4 Support if ( typeof History.initHtml4 !== 'undefined' ) { History.initHtml4(); } // Return true return true; }; // ======================================================================== // Initialise Core // Initialise Core History.initCore = function(){ // Initialise if ( typeof History.initCore.initialized !== 'undefined' ) { // Already Loaded return false; } else { History.initCore.initialized = true; } // ==================================================================== // Options /** * History.options * Configurable options */ History.options = History.options||{}; /** * History.options.hashChangeInterval * How long should the interval be before hashchange checks */ History.options.hashChangeInterval = History.options.hashChangeInterval || 100; /** * History.options.safariPollInterval * How long should the interval be before safari poll checks */ History.options.safariPollInterval = History.options.safariPollInterval || 500; /** * History.options.doubleCheckInterval * How long should the interval be before we perform a double check */ History.options.doubleCheckInterval = History.options.doubleCheckInterval || 500; /** * History.options.storeInterval * How long should we wait between store calls */ History.options.storeInterval = History.options.storeInterval || 5000; /** * History.options.busyDelay * How long should we wait between busy events */ History.options.busyDelay = History.options.busyDelay || 250; /** * History.options.debug * If true will enable debug messages to be logged */ History.options.debug = History.options.debug || false; /** * History.options.initialTitle * What is the title of the initial state */ History.options.initialTitle = History.options.initialTitle || document.title; // ==================================================================== // Interval record /** * History.intervalList * List of intervals set, to be cleared when document is unloaded. */ History.intervalList = []; /** * History.clearAllIntervals * Clears all setInterval instances. */ History.clearAllIntervals = function(){ var i, il = History.intervalList; if (typeof il !== "undefined" && il !== null) { for (i = 0; i < il.length; i++) { clearInterval(il[i]); } History.intervalList = null; } }; // ==================================================================== // Debug /** * History.debug(message,...) * Logs the passed arguments if debug enabled */ History.debug = function(){ if ( (History.options.debug||false) ) { History.log.apply(History,arguments); } }; /** * History.log(message,...) * Logs the passed arguments */ History.log = function(){ // Prepare var consoleExists = !(typeof console === 'undefined' || typeof console.log === 'undefined' || typeof console.log.apply === 'undefined'), textarea = document.getElementById('log'), message, i,n, args,arg ; // Write to Console if ( consoleExists ) { args = Array.prototype.slice.call(arguments); message = args.shift(); if ( typeof console.debug !== 'undefined' ) { console.debug.apply(console,[message,args]); } else { console.log.apply(console,[message,args]); } } else { message = ("\n"+arguments[0]+"\n"); } // Write to log for ( i=1,n=arguments.length; i<n; ++i ) { arg = arguments[i]; if ( typeof arg === 'object' && typeof JSON !== 'undefined' ) { try { arg = JSON.stringify(arg); } catch ( Exception ) { // Recursive Object } } message += "\n"+arg+"\n"; } // Textarea if ( textarea ) { textarea.value += message+"\n-----\n"; textarea.scrollTop = textarea.scrollHeight - textarea.clientHeight; } // No Textarea, No Console else if ( !consoleExists ) { alert(message); } // Return true return true; }; // ==================================================================== // Emulated Status /** * History.getInternetExplorerMajorVersion() * Get's the major version of Internet Explorer * @return {integer} * @license Public Domain * @author Benjamin Arthur Lupton <contact@balupton.com> * @author James Padolsey <https://gist.github.com/527683> */ History.getInternetExplorerMajorVersion = function(){ var result = History.getInternetExplorerMajorVersion.cached = (typeof History.getInternetExplorerMajorVersion.cached !== 'undefined') ? History.getInternetExplorerMajorVersion.cached : (function(){ var v = 3, div = document.createElement('div'), all = div.getElementsByTagName('i'); while ( (div.innerHTML = '<!--[if gt IE ' + (++v) + ']><i></i><![endif]-->') && all[0] ) {} return (v > 4) ? v : false; })() ; return result; }; /** * History.isInternetExplorer() * Are we using Internet Explorer? * @return {boolean} * @license Public Domain * @author Benjamin Arthur Lupton <contact@balupton.com> */ History.isInternetExplorer = function(){ var result = History.isInternetExplorer.cached = (typeof History.isInternetExplorer.cached !== 'undefined') ? History.isInternetExplorer.cached : Boolean(History.getInternetExplorerMajorVersion()) ; return result; }; /** * History.emulated * Which features require emulating? */ History.emulated = { pushState: !Boolean( window.history && window.history.pushState && window.history.replaceState && !( (/ Mobile\/([1-7][a-z]|(8([abcde]|f(1[0-8]))))/i).test(navigator.userAgent) /* disable for versions of iOS before version 4.3 (8F190) */ || (/AppleWebKit\/5([0-2]|3[0-2])/i).test(navigator.userAgent) /* disable for the mercury iOS browser, or at least older versions of the webkit engine */ ) ), hashChange: Boolean( !(('onhashchange' in window) || ('onhashchange' in document)) || (History.isInternetExplorer() && History.getInternetExplorerMajorVersion() < 8) ) }; /** * History.enabled * Is History enabled? */ History.enabled = !History.emulated.pushState; /** * History.bugs * Which bugs are present */ History.bugs = { /** * Safari 5 and Safari iOS 4 fail to return to the correct state once a hash is replaced by a `replaceState` call * https://bugs.webkit.org/show_bug.cgi?id=56249 */ setHash: Boolean(!History.emulated.pushState && navigator.vendor === 'Apple Computer, Inc.' && /AppleWebKit\/5([0-2]|3[0-3])/.test(navigator.userAgent)), /** * Safari 5 and Safari iOS 4 sometimes fail to apply the state change under busy conditions * https://bugs.webkit.org/show_bug.cgi?id=42940 */ safariPoll: Boolean(!History.emulated.pushState && navigator.vendor === 'Apple Computer, Inc.' && /AppleWebKit\/5([0-2]|3[0-3])/.test(navigator.userAgent)), /** * MSIE 6 and 7 sometimes do not apply a hash even it was told to (requiring a second call to the apply function) */ ieDoubleCheck: Boolean(History.isInternetExplorer() && History.getInternetExplorerMajorVersion() < 8), /** * MSIE 6 requires the entire hash to be encoded for the hashes to trigger the onHashChange event */ hashEscape: Boolean(History.isInternetExplorer() && History.getInternetExplorerMajorVersion() < 7) }; /** * History.isEmptyObject(obj) * Checks to see if the Object is Empty * @param {Object} obj * @return {boolean} */ History.isEmptyObject = function(obj) { for ( var name in obj ) { return false; } return true; }; /** * History.cloneObject(obj) * Clones a object and eliminate all references to the original contexts * @param {Object} obj * @return {Object} */ History.cloneObject = function(obj) { var hash,newObj; if ( obj ) { hash = JSON.stringify(obj); newObj = JSON.parse(hash); } else { newObj = {}; } return newObj; }; History.extendObject = function(obj, extension) { for (var key in extension) { if (extension.hasOwnProperty(key)) { obj[key] = extension[key]; } } }; History.setSessionStorageItem = function(key, value) { try { sessionStorage.setItem(key, value); } catch(e) { try { // hack: Workaround for a bug seen on iPads. Sometimes the quota exceeded error comes up and simply // removing/resetting the storage can work. sessionStorage.removeItem(key); sessionStorage.setItem(key, value); } catch(e) { try { // no permissions or quota exceed if (e.name === 'QuotaExceededError' || e.name === 'NS_ERROR_DOM_QUOTA_REACHED') { History.Adapter.trigger(window, 'storageQuotaExceed'); sessionStorage.setItem(key, value); } } catch(e) { } } } } // ==================================================================== // URL Helpers /** * History.getRootUrl() * Turns "http://mysite.com/dir/page.html?asd" into "http://mysite.com" * @return {String} rootUrl */ History.getRootUrl = function(){ // Create var rootUrl = document.location.protocol+'//'+(document.location.hostname||document.location.host); if ( document.location.port||false ) { rootUrl += ':'+document.location.port; } rootUrl += '/'; // Return return rootUrl; }; /** * History.getBaseHref() * Fetches the `href` attribute of the `<base href="...">` element if it exists * @return {String} baseHref */ History.getBaseHref = function(){ // Create var baseElements = document.getElementsByTagName('base'), baseElement = null, baseHref = ''; // Test for Base Element if ( baseElements.length === 1 ) { // Prepare for Base Element baseElement = baseElements[0]; baseHref = baseElement.href.replace(/[^\/]+$/,''); } // Adjust trailing slash baseHref = baseHref.replace(/\/+$/,''); if ( baseHref ) baseHref += '/'; // Return return baseHref; }; /** * History.getBaseUrl() * Fetches the baseHref or basePageUrl or rootUrl (whichever one exists first) * @return {String} baseUrl */ History.getBaseUrl = function(){ // Create var baseUrl = History.getBaseHref()||History.getBasePageUrl()||History.getRootUrl(); // Return return baseUrl; }; /** * History.getPageUrl() * Fetches the URL of the current page * @return {String} pageUrl */ History.getPageUrl = function(){ // Fetch var State = History.getState(false,false), stateUrl = (State||{}).url||document.location.href, pageUrl; // Create pageUrl = stateUrl.replace(/\/+$/,'').replace(/[^\/]+$/,function(part,index,string){ return (/\./).test(part) ? part : part+'/'; }); // Return return pageUrl; }; /** * History.getBasePageUrl() * Fetches the Url of the directory of the current page * @return {String} basePageUrl */ History.getBasePageUrl = function(){ // Create var basePageUrl = document.location.href.replace(/[#\?].*/,'').replace(/[^\/]+$/,function(part,index,string){ return (/[^\/]$/).test(part) ? '' : part; }).replace(/\/+$/,'')+'/'; // Return return basePageUrl; }; /** * History.getFullUrl(url) * Ensures that we have an absolute URL and not a relative URL * @param {string} url * @param {Boolean} allowBaseHref * @return {string} fullUrl */ History.getFullUrl = function(url,allowBaseHref){ // Prepare var fullUrl = url, firstChar = url.substring(0,1); allowBaseHref = (typeof allowBaseHref === 'undefined') ? true : allowBaseHref; // Check if ( /[a-z]+\:\/\//.test(url) ) { // Full URL } else if ( firstChar === '/' ) { // Root URL fullUrl = History.getRootUrl()+url.replace(/^\/+/,''); } else if ( firstChar === '#' ) { // Anchor URL fullUrl = History.getPageUrl().replace(/#.*/,'')+url; } else if ( firstChar === '?' ) { // Query URL fullUrl = History.getPageUrl().replace(/[\?#].*/,'')+url; } else { // Relative URL if ( allowBaseHref ) { fullUrl = History.getBaseUrl()+url.replace(/^(\.\/)+/,''); } else { fullUrl = History.getBasePageUrl()+url.replace(/^(\.\/)+/,''); } // We have an if condition above as we do not want hashes // which are relative to the baseHref in our URLs // as if the baseHref changes, then all our bookmarks // would now point to different locations // whereas the basePageUrl will always stay the same } // Return return fullUrl.replace(/\#$/,''); }; /** * History.getShortUrl(url) * Ensures that we have a relative URL and not a absolute URL * @param {string} url * @return {string} url */ History.getShortUrl = function(url){ // Prepare var shortUrl = url, baseUrl = History.getBaseUrl(), rootUrl = History.getRootUrl(); // Trim baseUrl if ( History.emulated.pushState ) { // We are in a if statement as when pushState is not emulated // The actual url these short urls are relative to can change // So within the same session, we the url may end up somewhere different shortUrl = shortUrl.replace(baseUrl,''); } // Trim rootUrl shortUrl = shortUrl.replace(rootUrl,'/'); // Ensure we can still detect it as a state if ( History.isTraditionalAnchor(shortUrl) ) { shortUrl = './'+shortUrl; } // Clean It shortUrl = shortUrl.replace(/^(\.\/)+/g,'./').replace(/\#$/,''); // Return return shortUrl; }; // ==================================================================== // State Storage /** * History.store * The store for all session specific data */ History.store = {}; /** * History.idToState * 1-1: State ID to State Object */ History.idToState = History.idToState||{}; /** * History.stateToId * 1-1: State String to State ID */ History.stateToId = History.stateToId||{}; /** * History.urlToId * 1-1: State URL to State ID */ History.urlToId = History.urlToId||{}; /** * History.storedStates * Store the states in an array */ History.storedStates = History.storedStates||[]; /** * History.savedStates * Saved the states in an array */ History.savedStates = History.savedStates||[]; /** * History.noramlizeStore() * Noramlize the store by adding necessary values */ History.normalizeStore = function(){ History.store.idToState = History.store.idToState||{}; History.store.urlToId = History.store.urlToId||{}; History.store.stateToId = History.store.stateToId||{}; }; /** * History.getState() * Get an object containing the data, title and url of the current state * @param {Boolean} friendly * @param {Boolean} create * @return {Object} State */ History.getState = function(friendly,create){ // Prepare if ( typeof friendly === 'undefined' ) { friendly = true; } if ( typeof create === 'undefined' ) { create = true; } // Fetch var State = History.getLastSavedState(); // Create if ( !State && create ) { State = History.createStateObject(); } // Adjust if ( friendly ) { State = History.cloneObject(State); State.url = State.cleanUrl||State.url; } // Return return State; }; /** * History.getIdByState(State) * Gets a ID for a State * @param {State} newState * @return {String} id */ History.getIdByState = function(newState){ // Fetch ID var id = History.extractId(newState.url), lastSavedState, str; if ( !id ) { // Find ID via State String str = History.getStateString(newState); if ( typeof History.stateToId[str] !== 'undefined' ) { id = History.stateToId[str]; } else if ( typeof History.store.stateToId[str] !== 'undefined' ) { id = History.store.stateToId[str]; } else { id = sessionStorage ? sessionStorage.getItem('uniqId') : new Date().getTime(); if (id == undefined){ id = 0; } lastSavedState = History.getLastSavedState(); if (lastSavedState) { id = lastSavedState.id + 1; if (sessionStorage) { History.setSessionStorageItem('uniqId', id); } } else { // Generate a new ID while (true) { ++id; if (typeof History.idToState[id] === 'undefined' && typeof History.store.idToState[id] === 'undefined') { if (sessionStorage) { History.setSessionStorageItem('uniqId', id); } break; } } } // Apply the new State to the ID History.stateToId[str] = id; History.idToState[id] = newState; } } // Return ID return id; }; /** * History.normalizeState(State) * Expands a State Object * @param {object} State * @return {object} */ History.normalizeState = function(oldState){ // Variables var newState, dataNotEmpty; // Prepare if ( !oldState || (typeof oldState !== 'object') ) { oldState = {}; } // Check if ( typeof oldState.normalized !== 'undefined' ) { return oldState; } // Adjust if ( !oldState.data || (typeof oldState.data !== 'object') ) { oldState.data = {}; } // ---------------------------------------------------------------- // Create newState = {}; newState.normalized = true; newState.title = oldState.title||''; newState.url = History.getFullUrl(History.unescapeString(oldState.url||document.location.href)); newState.hash = History.getShortUrl(newState.url); newState.data = History.cloneObject(oldState.data); // Fetch ID newState.id = History.getIdByState(newState); // ---------------------------------------------------------------- // Clean the URL newState.cleanUrl = newState.url.replace(/\??\&_suid.*/,''); newState.url = newState.cleanUrl; // Check to see if we have more than just a url dataNotEmpty = !History.isEmptyObject(newState.data); // Apply if ( newState.title || dataNotEmpty ) { // Add ID to Hash newState.hash = History.getShortUrl(newState.url).replace(/\??\&_suid.*/,''); if ( !/\?/.test(newState.hash) ) { newState.hash += '?'; } newState.hash += '&_suid='+newState.id; } // Create the Hashed URL newState.hashedUrl = History.getFullUrl(newState.hash); // ---------------------------------------------------------------- // Update the URL if we have a duplicate if ( (History.emulated.pushState || History.bugs.safariPoll) && History.hasUrlDuplicate(newState) ) { newState.url = newState.hashedUrl; } // ---------------------------------------------------------------- // Return return newState; }; /** * History.createStateObject(data,title,url) * Creates a object based on the data, title and url state params * @param {object} data * @param {string} title * @param {string} url * @return {object} */ History.createStateObject = function(data,title,url){ // Hashify var State = { 'data': data, 'title': title, 'url': url }; // Expand the State State = History.normalizeState(State); // Return object return State; }; /** * History.getStateById(id) * Get a state by it's UID * @param {String} id */ History.getStateById = function(id){ // Prepare id = String(id); // Retrieve var State = History.idToState[id] || History.store.idToState[id] || undefined; // Return State return State; }; /** * Get a State's String * @param {State} passedState */ History.getStateString = function(passedState){ // Prepare var State, cleanedState, str; // Fetch State = History.normalizeState(passedState); // Clean cleanedState = { data: State.data, title: passedState.title, url: passedState.url }; // Fetch str = JSON.stringify(cleanedState); // Return return str; }; /** * Get a State's ID * @param {State} passedState * @return {String} id */ History.getStateId = function(passedState){ // Prepare var State, id; // Fetch State = History.normalizeState(passedState); // Fetch id = State.id; // Return return id; }; /** * History.getHashByState(State) * Creates a Hash for the State Object * @param {State} passedState * @return {String} hash */ History.getHashByState = function(passedState){ // Prepare var State, hash; // Fetch State = History.normalizeState(passedState); // Hash hash = State.hash; // Return return hash; }; /** * History.extractId(url_or_hash) * Get a State ID by it's URL or Hash * @param {string} url_or_hash * @return {string} id */ History.extractId = function ( url_or_hash ) { // Prepare var id,parts,url; // Extract parts = /(.*)\&_suid=([0-9]+)$/.exec(url_or_hash); url = parts ? (parts[1]||url_or_hash) : url_or_hash; id = parts ? String(parts[2]||'') : ''; // Return return id||false; }; /** * History.isTraditionalAnchor * Checks to see if the url is a traditional anchor or not * @param {String} url_or_hash * @return {Boolean} */ History.isTraditionalAnchor = function(url_or_hash){ // Check var isTraditional = !(/[\/\?\.]/.test(url_or_hash)); // Return return isTraditional; }; /** * History.extractState * Get a State by it's URL or Hash * @param {String} url_or_hash * @return {State|null} */ History.extractState = function(url_or_hash,create){ // Prepare var State = null, id, url; create = create||false; // Fetch SUID id = History.extractId(url_or_hash); if ( id ) { State = History.getStateById(id); } // Fetch SUID returned no State if ( !State ) { // Fetch URL url = History.getFullUrl(url_or_hash); // Check URL id = History.getIdByUrl(url)||false; if ( id ) { State = History.getStateById(id); } // Create State if ( !State && create && !History.isTraditionalAnchor(url_or_hash) ) { State = History.createStateObject(null,null,url); } } // Return return State; }; /** * History.getIdByUrl() * Get a State ID by a State URL */ History.getIdByUrl = function(url){ // Fetch var id = History.urlToId[url] || History.store.urlToId[url] || undefined; // Return return id; }; /** * History.getLastSavedState() * Get an object containing the data, title and url of the current state * @return {Object} State */ History.getLastSavedState = function(){ return History.savedStates[History.savedStates.length-1]||undefined; }; /** * History.getLastStoredState() * Get an object containing the data, title and url of the current state * @return {Object} State */ History.getLastStoredState = function(){ return History.storedStates[History.storedStates.length-1]||undefined; }; /** * History.hasUrlDuplicate * Checks if a Url will have a url conflict * @param {Object} newState * @return {Boolean} hasDuplicate */ History.hasUrlDuplicate = function(newState) { // Prepare var hasDuplicate = false, oldState; // Fetch oldState = History.extractState(newState.url); // Check hasDuplicate = oldState && oldState.id !== newState.id; // Return return hasDuplicate; }; /** * History.storeState * Store a State * @param {Object} newState * @return {Object} newState */ History.storeState = function(newState){ // Store the State History.urlToId[newState.url] = newState.id; // Push the State History.storedStates.push(History.cloneObject(newState)); // Return newState return newState; }; /** * History.isLastSavedState(newState) * Tests to see if the state is the last state * @param {Object} newState * @return {boolean} isLast */ History.isLastSavedState = function(newState){ // Prepare var isLast = false, newId, oldState, oldId; // Check if ( History.savedStates.length ) { newId = newState.id; oldState = History.getLastSavedState(); oldId = oldState.id; // Check isLast = (newId === oldId); } // Return return isLast; }; /** * History.saveState * Push a State * @param {Object} newState * @return {boolean} changed */ History.saveState = function(newState){ // Check Hash if ( History.isLastSavedState(newState) ) { return false; } // Push the State History.savedStates.push(History.cloneObject(newState)); // Return true return true; }; /** * History.getStateByIndex() * Gets a state by the index * @param {integer} index * @return {Object} */ History.getStateByIndex = function(index){ // Prepare var State = null; // Handle if ( typeof index === 'undefined' ) { // Get the last inserted State = History.savedStates[History.savedStates.length-1]; } else if ( index < 0 ) { // Get from the end State = History.savedStates[History.savedStates.length+index]; } else { // Get from the beginning State = History.savedStates[index]; } // Return State return State; }; // ==================================================================== // Hash Helpers /** * History.getHash() * Gets the current document hash * @return {string} */ History.getHash = function(){ var hash = History.unescapeHash(document.location.hash); return hash; }; /** * History.unescapeString() * Unescape a string * @param {String} str * @return {string} */ History.unescapeString = function(str){ // Prepare var result = str, tmp; <|fim▁hole|> while ( true ) { tmp = window.unescape(result); if ( tmp === result ) { break; } result = tmp; } // Return result return result; }; /** * History.unescapeHash() * normalize and Unescape a Hash * @param {String} hash * @return {string} */ History.unescapeHash = function(hash){ // Prepare var result = History.normalizeHash(hash); // Unescape hash result = History.unescapeString(result); // Return result return result; }; /** * History.normalizeHash() * normalize a hash across browsers * @return {string} */ History.normalizeHash = function(hash){ // Prepare var result = hash.replace(/[^#]*#/,'').replace(/#.*/, ''); // Return result return result; }; /** * History.setHash(hash) * Sets the document hash * @param {string} hash * @return {History} */ History.setHash = function(hash,queue){ // Prepare var adjustedHash, State, pageUrl; // Handle Queueing if ( queue !== false && History.busy() ) { // Wait + Push to Queue //History.debug('History.setHash: we must wait', arguments); History.pushQueue({ scope: History, callback: History.setHash, args: arguments, queue: queue }); return false; } // Log //History.debug('History.setHash: called',hash); // Prepare adjustedHash = History.escapeHash(hash); // Make Busy + Continue History.busy(true); // Check if hash is a state State = History.extractState(hash,true); if ( State && !History.emulated.pushState ) { // Hash is a state so skip the setHash //History.debug('History.setHash: Hash is a state so skipping the hash set with a direct pushState call',arguments); // PushState History.pushState(State.data,State.title,State.url,false); } else if ( document.location.hash !== adjustedHash ) { // Hash is a proper hash, so apply it // Handle browser bugs if ( History.bugs.setHash ) { // Fix Safari Bug https://bugs.webkit.org/show_bug.cgi?id=56249 // Fetch the base page pageUrl = History.getPageUrl(); // Safari hash apply History.pushState(null,null,pageUrl+'#'+adjustedHash,false); } else { // Normal hash apply document.location.hash = adjustedHash; } } // Chain return History; }; /** * History.escape() * normalize and Escape a Hash * @return {string} */ History.escapeHash = function(hash){ // Prepare var result = History.normalizeHash(hash); // Escape hash result = window.escape(result); // IE6 Escape Bug if ( !History.bugs.hashEscape ) { // Restore common parts result = result .replace(/\%21/g,'!') .replace(/\%26/g,'&') .replace(/\%3D/g,'=') .replace(/\%3F/g,'?'); } // Return result return result; }; /** * History.getHashByUrl(url) * Extracts the Hash from a URL * @param {string} url * @return {string} url */ History.getHashByUrl = function(url){ // Extract the hash var hash = String(url) .replace(/([^#]*)#?([^#]*)#?(.*)/, '$2') ; // Unescape hash hash = History.unescapeHash(hash); // Return hash return hash; }; /** * History.setTitle(title) * Applies the title to the document * @param {State} newState * @return {Boolean} */ History.setTitle = function(newState){ // Prepare var title = newState.title, firstState; // Initial if ( !title ) { firstState = History.getStateByIndex(0); if ( firstState && firstState.url === newState.url ) { title = firstState.title||History.options.initialTitle; } } // Apply try { document.getElementsByTagName('title')[0].innerHTML = title.replace('<','&lt;').replace('>','&gt;').replace(' & ',' &amp; '); } catch ( Exception ) { } document.title = title; // Chain return History; }; // ==================================================================== // Queueing /** * History.queues * The list of queues to use * First In, First Out */ History.queues = []; /** * History.busy(value) * @param {boolean} value [optional] * @return {boolean} busy */ History.busy = function(value){ // Apply if ( typeof value !== 'undefined' ) { //History.debug('History.busy: changing ['+(History.busy.flag||false)+'] to ['+(value||false)+']', History.queues.length); History.busy.flag = value; } // Default else if ( typeof History.busy.flag === 'undefined' ) { History.busy.flag = false; } // Queue if ( !History.busy.flag ) { // Execute the next item in the queue clearTimeout(History.busy.timeout); var fireNext = function(){ var i, queue, item; if ( History.busy.flag ) return; for ( i=History.queues.length-1; i >= 0; --i ) { queue = History.queues[i]; if ( queue.length === 0 ) continue; item = queue.shift(); History.fireQueueItem(item); History.busy.timeout = setTimeout(fireNext,History.options.busyDelay); } }; History.busy.timeout = setTimeout(fireNext,History.options.busyDelay); } // Return return History.busy.flag; }; /** * History.busy.flag */ History.busy.flag = false; /** * History.fireQueueItem(item) * Fire a Queue Item * @param {Object} item * @return {Mixed} result */ History.fireQueueItem = function(item){ return item.callback.apply(item.scope||History,item.args||[]); }; /** * History.pushQueue(callback,args) * Add an item to the queue * @param {Object} item [scope,callback,args,queue] */ History.pushQueue = function(item){ // Prepare the queue History.queues[item.queue||0] = History.queues[item.queue||0]||[]; // Add to the queue History.queues[item.queue||0].push(item); // Chain return History; }; /** * History.queue (item,queue), (func,queue), (func), (item) * Either firs the item now if not busy, or adds it to the queue */ History.queue = function(item,queue){ // Prepare if ( typeof item === 'function' ) { item = { callback: item }; } if ( typeof queue !== 'undefined' ) { item.queue = queue; } // Handle if ( History.busy() ) { History.pushQueue(item); } else { History.fireQueueItem(item); } // Chain return History; }; /** * History.clearQueue() * Clears the Queue */ History.clearQueue = function(){ History.busy.flag = false; History.queues = []; return History; }; // ==================================================================== // IE Bug Fix /** * History.stateChanged * States whether or not the state has changed since the last double check was initialised */ History.stateChanged = false; /** * History.doubleChecker * Contains the timeout used for the double checks */ History.doubleChecker = false; /** * History.doubleCheckComplete() * Complete a double check * @return {History} */ History.doubleCheckComplete = function(){ // Update History.stateChanged = true; // Clear History.doubleCheckClear(); // Chain return History; }; /** * History.doubleCheckClear() * Clear a double check * @return {History} */ History.doubleCheckClear = function(){ // Clear if ( History.doubleChecker ) { clearTimeout(History.doubleChecker); History.doubleChecker = false; } // Chain return History; }; /** * History.doubleCheck() * Create a double check * @return {History} */ History.doubleCheck = function(tryAgain){ // Reset History.stateChanged = false; History.doubleCheckClear(); // Fix IE6,IE7 bug where calling history.back or history.forward does not actually change the hash (whereas doing it manually does) // Fix Safari 5 bug where sometimes the state does not change: https://bugs.webkit.org/show_bug.cgi?id=42940 if ( History.bugs.ieDoubleCheck ) { // Apply Check History.doubleChecker = setTimeout( function(){ History.doubleCheckClear(); if ( !History.stateChanged ) { //History.debug('History.doubleCheck: State has not yet changed, trying again', arguments); // Re-Attempt tryAgain(); } return true; }, History.options.doubleCheckInterval ); } // Chain return History; }; // ==================================================================== // Safari Bug Fix /** * History.safariStatePoll() * Poll the current state * @return {History} */ History.safariStatePoll = function(){ // Poll the URL // Get the Last State which has the new URL var urlState = History.extractState(document.location.href), newState; // Check for a difference if ( !History.isLastSavedState(urlState) ) { newState = urlState; } else { return; } // Check if we have a state with that url // If not create it if ( !newState ) { //History.debug('History.safariStatePoll: new'); newState = History.createStateObject(); } // Apply the New State //History.debug('History.safariStatePoll: trigger'); History.Adapter.trigger(window,'popstate'); // Chain return History; }; // ==================================================================== // State Aliases /** * History.back(queue) * Send the browser history back one item * @param {Integer} queue [optional] */ History.back = function(queue){ //History.debug('History.back: called', arguments); // Handle Queueing if ( queue !== false && History.busy() ) { // Wait + Push to Queue //History.debug('History.back: we must wait', arguments); History.pushQueue({ scope: History, callback: History.back, args: arguments, queue: queue }); return false; } // Make Busy + Continue History.busy(true); // Fix certain browser bugs that prevent the state from changing History.doubleCheck(function(){ History.back(false); }); // Go back history.go(-1); // End back closure return true; }; /** * History.forward(queue) * Send the browser history forward one item * @param {Integer} queue [optional] */ History.forward = function(queue){ //History.debug('History.forward: called', arguments); // Handle Queueing if ( queue !== false && History.busy() ) { // Wait + Push to Queue //History.debug('History.forward: we must wait', arguments); History.pushQueue({ scope: History, callback: History.forward, args: arguments, queue: queue }); return false; } // Make Busy + Continue History.busy(true); // Fix certain browser bugs that prevent the state from changing History.doubleCheck(function(){ History.forward(false); }); // Go forward history.go(1); // End forward closure return true; }; /** * History.go(index,queue) * Send the browser history back or forward index times * @param {Integer} queue [optional] */ History.go = function(index,queue){ //History.debug('History.go: called', arguments); // Prepare var i; // Handle if ( index > 0 ) { // Forward for ( i=1; i<=index; ++i ) { History.forward(queue); } } else if ( index < 0 ) { // Backward for ( i=-1; i>=index; --i ) { History.back(queue); } } else { throw new Error('History.go: History.go requires a positive or negative integer passed.'); } // Chain return History; }; // ==================================================================== // HTML5 State Support // Non-Native pushState Implementation if ( History.emulated.pushState ) { /* * Provide Skeleton for HTML4 Browsers */ // Prepare var emptyFunction = function(){}; History.pushState = History.pushState||emptyFunction; History.replaceState = History.replaceState||emptyFunction; } // History.emulated.pushState // Native pushState Implementation else { /* * Use native HTML5 History API Implementation */ /** * History.onPopState(event,extra) * Refresh the Current State */ History.onPopState = function(event,extra){ // Prepare var stateId = false, newState = false, currentHash, currentState; // Reset the double check History.doubleCheckComplete(); // Check for a Hash, and handle apporiatly currentHash = History.getHash(); if ( currentHash ) { // Expand Hash currentState = History.extractState(currentHash||document.location.href,true); if ( currentState ) { // We were able to parse it, it must be a State! // Let's forward to replaceState //History.debug('History.onPopState: state anchor', currentHash, currentState); History.replaceState(currentState.data, currentState.title, currentState.url, false); } else { // Traditional Anchor //History.debug('History.onPopState: traditional anchor', currentHash); History.Adapter.trigger(window,'anchorchange'); History.busy(false); } // We don't care for hashes History.expectedStateId = false; return false; } // Ensure stateId = History.Adapter.extractEventData('state',event,extra) || false; // Fetch State if ( stateId ) { // Vanilla: Back/forward button was used newState = History.getStateById(stateId); } else if ( History.expectedStateId ) { // Vanilla: A new state was pushed, and popstate was called manually newState = History.getStateById(History.expectedStateId); } else { // Initial State newState = History.extractState(document.location.href); } // The State did not exist in our store if ( !newState ) { // Regenerate the State newState = History.createStateObject(null,null,document.location.href); } // Clean History.expectedStateId = false; // Check if we are the same state if ( History.isLastSavedState(newState) ) { // There has been no change (just the page's hash has finally propagated) //History.debug('History.onPopState: no change', newState, History.savedStates); History.busy(false); return false; } // Store the State History.storeState(newState); History.saveState(newState); // Force update of the title History.setTitle(newState); // Fire Our Event History.Adapter.trigger(window,'statechange'); History.busy(false); // Return true return true; }; History.Adapter.bind(window,'popstate',History.onPopState); /** * History.pushState(data,title,url) * Add a new State to the history object, become it, and trigger onpopstate * We have to trigger for HTML4 compatibility * @param {object} data * @param {string} title * @param {string} url * @return {true} */ History.pushState = function(data,title,url,queue){ //History.debug('History.pushState: called', arguments); // Check the State if ( History.getHashByUrl(url) && History.emulated.pushState ) { throw new Error('History.js does not support states with fragement-identifiers (hashes/anchors).'); } // Handle Queueing if ( queue !== false && History.busy() ) { // Wait + Push to Queue //History.debug('History.pushState: we must wait', arguments); History.pushQueue({ scope: History, callback: History.pushState, args: arguments, queue: queue }); return false; } // Make Busy + Continue History.busy(true); // Create the newState var newState = History.createStateObject(data,title,url); // Check it if ( History.isLastSavedState(newState) ) { // Won't be a change History.busy(false); } else { //remove previously stored state, because it can be and can be non empty History.Adapter.trigger(window, 'stateremove', { stateId: newState.id }); // Store the newState History.storeState(newState); History.expectedStateId = newState.id; // Push the newState history.pushState(newState.id,newState.title,newState.url); // Fire HTML5 Event History.Adapter.trigger(window,'popstate'); } // End pushState closure return true; }; /** * History.replaceState(data,title,url) * Replace the State and trigger onpopstate * We have to trigger for HTML4 compatibility * @param {object} data * @param {string} title * @param {string} url * @param {object} queue * @param {boolean} createNewState * @return {true} */ History.replaceState = function(data,title,url,queue,createNewState){ //History.debug('History.replaceState: called', arguments); // Check the State if ( History.getHashByUrl(url) && History.emulated.pushState ) { throw new Error('History.js does not support states with fragement-identifiers (hashes/anchors).'); } // Handle Queueing if ( queue !== false && History.busy() ) { // Wait + Push to Queue //History.debug('History.replaceState: we must wait', arguments); History.pushQueue({ scope: History, callback: History.replaceState, args: arguments, queue: queue }); return false; } // Make Busy + Continue History.busy(true); // Create the newState var newState; if (createNewState) { data.rnd = new Date().getTime(); newState = History.createStateObject(data, title, url); } else { newState = History.getState(); newState.data = data; History.idToState[newState.id] = newState; History.extendObject(History.getLastSavedState(), newState); } // Check it if ( History.isLastSavedState(newState) ) { // Won't be a change History.busy(false); } else { // Store the newState History.storeState(newState); History.expectedStateId = newState.id; // Push the newState history.replaceState(newState.id,newState.title,newState.url); // Fire HTML5 Event History.Adapter.trigger(window,'popstate'); } // End replaceState closure return true; }; } // !History.emulated.pushState // ==================================================================== // Initialise /** * Load the Store */ if ( sessionStorage ) { // Fetch try { History.store = JSON.parse(/*LZString.decompress*/(sessionStorage.getItem('History.store')))||{}; } catch ( err ) { History.store = {}; } // Normalize History.normalizeStore(); } else { // Default Load History.store = {}; History.normalizeStore(); } /** * Clear Intervals on exit to prevent memory leaks */ History.Adapter.bind(window,"beforeunload",History.clearAllIntervals); History.Adapter.bind(window,"unload",History.clearAllIntervals); /** * Create the initial State */ History.saveState(History.storeState(History.extractState(document.location.href,true))); /** * Bind for Saving Store */ if ( sessionStorage ) { // When the page is closed History.onUnload = function(){ // Prepare var currentStore, item; // Fetch try { currentStore = JSON.parse(/*LZString.decompress*/(sessionStorage.getItem('History.store')))||{}; } catch ( err ) { currentStore = {}; } // Ensure currentStore.idToState = currentStore.idToState || {}; currentStore.urlToId = currentStore.urlToId || {}; currentStore.stateToId = currentStore.stateToId || {}; // Sync for ( item in History.idToState ) { if ( !History.idToState.hasOwnProperty(item) ) { continue; } currentStore.idToState[item] = History.idToState[item]; } for ( item in History.urlToId ) { if ( !History.urlToId.hasOwnProperty(item) ) { continue; } currentStore.urlToId[item] = History.urlToId[item]; } for ( item in History.stateToId ) { if ( !History.stateToId.hasOwnProperty(item) ) { continue; } currentStore.stateToId[item] = History.stateToId[item]; } var historyEntries = []; var maxHistoryEntriesCount = 10; //slice overweight entries for ( item in currentStore.idToState ) { if ( !currentStore.idToState.hasOwnProperty(item) ) { continue; } currentStore.idToState[item].entryId = item; historyEntries.push(currentStore.idToState[item]); } if (historyEntries.length > maxHistoryEntriesCount) { historyEntries.sort(function(e1, e2) { return e1.entryId - e2.entryId; }); var excludedEntries = historyEntries.slice(0, historyEntries.length - maxHistoryEntriesCount); for (var entryIndex = 0; entryIndex < excludedEntries.length; entryIndex++) { var entry = excludedEntries[entryIndex]; delete currentStore.idToState[entry.entryId]; for (var url in currentStore.urlToId ) { if (currentStore.urlToId.hasOwnProperty(url) && currentStore.urlToId[url] == entry.entryId) { delete currentStore.urlToId[url]; } } for (var state in currentStore.stateToId ) { if (currentStore.stateToId.hasOwnProperty(state) && currentStore.stateToId[state] == entry.entryId) { delete currentStore.stateToId[state]; } } History.Adapter.trigger(window, 'stateremove', { stateId: entry.entryId }); } } // Update History.store = currentStore; History.normalizeStore(); // Store History.setSessionStorageItem('History.store', /*LZString.compress*/(JSON.stringify(currentStore))); }; // For Internet Explorer History.intervalList.push(setInterval(History.onUnload,History.options.storeInterval)); // For Other Browsers History.Adapter.bind(window,'beforeunload',History.onUnload); History.Adapter.bind(window,'unload',History.onUnload); // Both are enabled for consistency } // Non-Native pushState Implementation if ( !History.emulated.pushState ) { // Be aware, the following is only for native pushState implementations // If you are wanting to include something for all browsers // Then include it above this if block /** * Setup Safari Fix */ if ( History.bugs.safariPoll ) { History.intervalList.push(setInterval(History.safariStatePoll, History.options.safariPollInterval)); } /** * Ensure Cross Browser Compatibility */ if ( navigator.vendor === 'Apple Computer, Inc.' || (navigator.appCodeName||'') === 'Mozilla' ) { /** * Fix Safari HashChange Issue */ // Setup Alias History.Adapter.bind(window,'hashchange',function(){ History.Adapter.trigger(window,'popstate'); }); // Initialise Alias if ( History.getHash() ) { History.Adapter.onDomLoad(function(){ History.Adapter.trigger(window,'hashchange'); }); } } } // !History.emulated.pushState }; // History.initCore // Try and Initialise History History.init(); })(window);<|fim▁end|>
// Unescape hash
<|file_name|>route.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
# TODO: this must be a stub!
<|file_name|>popup.ts<|end_file_name|><|fim▁begin|>import * as firefox from './firefox'; import * as calcpass2017a from './calcpass2017a'; import {execute_parallel_bcrypt_webworkers} from './execute_parallel_bcrypt_webworkers'; import {stringToUTF8} from './utf8'; import {erase} from './util'; import * as hex from './hex' import * as calcpass_misc from './calcpass_misc' let gCurrentScreen = null; class ContentFrameInfo { tabId:number; frameId:number; origin:string; //hasFocusedInput:boolean; hasChildFrames:boolean; } //let gContentFrames = new Array<ContentFrameInfo>(); //The content frame which had the focused password field //when the user clicked the toolbar. //If there was no focused input this will point to the root frame. //let gTargetFrame:ContentFrameInfo = null; //holds the root frame let gRootFrame:ContentFrameInfo = null; let gAllFrames = new Array<ContentFrameInfo>(); function setContent(html) { document.getElementById('content').innerHTML = html; } function setScreen(screen:Object) { if (typeof(screen['html']) != 'string') { throw new Error('screen object missing `html` member.'); } setContent(screen['html']); //Connect all "event_*" methods to the corresponding document elements let k, eventType, elmId, m, elm, func; let re_event = /^event_([a-z]+)_(.+)$/ let re_elm = /^elm_(.+)$/ for (k in screen) { if (k.indexOf('event_') == 0) { m = re_event.exec(k); if (!m || !m[1] || !m[2]) { throw new Error('screen object has invalid event method name: ' + k); } eventType = m[1]; elmId = m[2]; elm = document.getElementById(elmId); if (!elm) { throw new Error(`screen object has method "${k}" but document has no element "${elmId}"`); } func = screen[k].bind(screen); elm.addEventListener(eventType, func); console.log(`Connected ${k}`); } else if (k.indexOf('elm_') == 0) { m = re_elm.exec(k); if (!m || !m[1]) { throw new Error('screen object has invalid elm field name: ' + k); } elmId = m[1]; elm = document.getElementById(elmId); if (!elm) { throw new Error(`screen object has field "${k}" but document has no element "${elmId}"`); } screen[k] = elm; //console.log(`set elm ${k}`); } } gCurrentScreen = screen; //Call onScreenReady if provided if (screen['onScreenReady']) { screen['onScreenReady'](); } } function setElmText(elementOrId, text:string) { let elm = elementOrId; if (typeof(elementOrId) == 'string') elm = document.getElementById(elementOrId); if (!elm || !elm.firstChild) { console.log('setElmText: ' + elementOrId + ' is missing or has no firstChild'); return; } elm.firstChild.nodeValue = text; } /* class WelcomeScreen { html:string; constructor() { this.html = ` <b>Welcome to CalcPass!</b> <p> <button id="btnCreate">Create a new wallet card.</button> <button id="btnAlready">I already have a card.</button> `; } event_click_btnCreate(e) { console.log('on create!'); } event_click_btnAlready(e) { console.log('on already!'); } } */ /**Called upon an uncaught exception or other fatal error. Shows a friendly message to the user. */ function showUnexpectedError(info:any) { console.log('showUnexpectedError was called:'); console.log(info); if (!info) info = "?"; let infoStr = '' + info; //fill the <textarea> let elm = document.getElementById('unexpected_error_text'); if (elm) elm.firstChild.nodeValue = infoStr; //set the support email elm = document.getElementById('unexpected_error_email'); if (elm) { let noSpamPlease = 'cruxic' + '@' + 'gmail' + '.' + 'com'; elm.firstChild.nodeValue = noSpamPlease; } //show the <div> elm = document.getElementById('unexpected_error'); if (elm) elm.style.display = 'block'; //hide all other content elm = document.getElementById('content'); if (elm) elm.innerHTML = ' '; } function showErr(text:string) { let elm = document.getElementById('err'); if (!elm) { showUnexpectedError('Screen has no #err element.'); return; } setElmText(elm, text); elm.style.display = 'block'; } /* class MasterPassPrompt { html:string; elm_pass:any = null; ctx:Context; constructor(ctx:Context) { this.ctx = ctx; this.html = ` <input id="pass" type="password" placeholder="Master Password" size="20"/> <button id="btnOK">OK</button> <div id="err">?</div> `; } event_click_btnOK(e) { let pass = this.elm_pass.value.trim(); if (pass.length < 8) { console.log('here'); showErr('Password must be at least 8 characters.'); return; } let rawPlain = stringToUTF8(pass); setScreen(new StretchingMasterPass(this.ctx, rawPlain)); } async onScreenReady() { this.elm_pass.focus(); } } */<|fim▁hole|> //when the user started calculating the password. //Example: '["https","example.com",8080]' origin:string = null; //The host name or program name the user chose to //calculate a password for (not necessarily a domain // name) sitename:string = null; stretchTookMillis:number = 0; siteKey: calcpass2017a.SiteKey = null; revision:number = 0; /*toPlainObject():any { return { origHostname: this.origHostname, sitename: }; }*/ } class StretchingMasterPass { html:string; elm_progBar:any = null; elm_progPercent:any = null; ctx:Context; plaintext:Uint8Array; constructor(ctx:Context, plaintext:Uint8Array) { this.ctx = ctx; this.plaintext = plaintext; this.html = ` <div class="progressBar"> <div id="progBar">Stretching password. <span id="progPercent">0%</span></div> </div> `; } setProgressBar(percent:number) { if (percent < 0.0) percent = 0.0; if (percent > 1.0) percent = 1.0; var percentStr = '' + Math.floor(percent * 100.0); this.elm_progBar.style.width = percentStr + '%'; setElmText(this.elm_progPercent, percentStr + '%'); } async onScreenReady() { console.log("TODO: get salt from local storage"); let userEmail = "a@b.c"; let pass = this.plaintext; let pbc = new calcpass2017a.ParallelBcrypt(); pbc.execute = execute_parallel_bcrypt_webworkers; pbc.progressCallback = (percent:number) => { this.setProgressBar(percent); }; let t1 = performance.now(); let stretchedMaster = await calcpass2017a.StretchMasterPassword(pass, userEmail, pbc); let t2 = performance.now(); this.ctx.stretchTookMillis = Math.ceil(t2 - t1); erase(this.plaintext); this.plaintext = null; setScreen(new PromptCardCode(this.ctx, stretchedMaster)); } } class PromptCardCode { html:string; ctx:Context; elm_chars:any = null; codeNum:number; constructor(ctx:Context, stretchedMaster:calcpass2017a.StretchedMaster) { this.ctx = ctx; ctx.siteKey = calcpass2017a.MakeSiteKey(stretchedMaster, ctx.sitename, ctx.revision); erase(stretchedMaster.bytes); stretchedMaster.bytes = null; //Remember the siteKey in RAM of the background script so that we can skip the lengthy // stretching step if the user requests the same password again firefox.sendMessage({ SET_STATE:true, origin: ctx.origin, sitename: ctx.sitename, revision: ctx.revision, siteKeyHex: hex.encode(ctx.siteKey.bytes) }); this.codeNum = calcpass2017a.GetCardCodeNumber(ctx.siteKey); this.html = ` <table> <tr> <td> Enter code <span class="codeNum">${this.codeNum}</span> from your card. <p> <input id="chars" type="password" size="8" maxlength="8"/> <img src="/icons/reveal.png" alt="reveal" title="TODO" style="height:20px" /> <button id="next">OK</button> </td> <td> <img src="img/enter-card-code.png" style="width: 5em; margin-left: 2em;" alt="picture of card"/> </td> </tr> </table> <div id="err">?</div> `; } async onScreenReady() { this.elm_chars.focus(); } event_click_next(e) { let chars = this.elm_chars.value.trim(); if (chars.length != 8) { showErr("Please enter 8 characters."); return; } let codeFromCard = calcpass2017a.CheckCardCode(chars, this.codeNum); if (!codeFromCard) { showErr('Checksum failed. Typo? Wrong code?'); return; } let siteCardMix = calcpass2017a.MixSiteAndCard(this.ctx.siteKey, codeFromCard); erase(codeFromCard); setScreen(new StretchingFinal(this.ctx, siteCardMix)); } } class StretchingFinal { html:string; elm_progBar:any = null; elm_progPercent:any = null; ctx:Context; siteCardMix:calcpass2017a.SiteCardMix; constructor(ctx:Context, siteCardMix:calcpass2017a.SiteCardMix) { this.ctx = ctx; this.siteCardMix = siteCardMix; this.html = ` <div class="progressBar"> <div id="progBar">Stretching card code. <span id="progPercent">0%</span></div> </div> `; } setProgressBar(percent:number) { if (percent < 0.0) percent = 0.0; if (percent > 1.0) percent = 1.0; var percentStr = '' + Math.floor(percent * 100.0); this.elm_progBar.style.width = percentStr + '%'; setElmText(this.elm_progPercent, percentStr + '%'); } async onScreenReady() { let pbc = new calcpass2017a.ParallelBcrypt(); pbc.execute = execute_parallel_bcrypt_webworkers; pbc.progressCallback = (percent:number) => { this.setProgressBar(percent); }; let passwordSeed = await calcpass2017a.StretchSiteCardMix(this.siteCardMix, pbc); let password = calcpass2017a.MakeFriendlyPassword12a(passwordSeed); erase(this.siteCardMix); erase(passwordSeed); //Remember the password in RAM of the background script for a short period of time firefox.sendMessage({ SET_STATE:true, origin: ctx.origin, sitename: ctx.sitename, revision: ctx.revision, password: password }); setScreen(new PasswordReady(this.ctx, password)); } } class PasswordReady { html:string; ctx:Context; elm_warnCopy = null; didCopyWarning = false; elm_showPassDiv = null; password:string; constructor(ctx:Context, password:string) { this.ctx = ctx; //Send the password to all our content-scripts //TODO: use a random shared secret stored in local storage to sign each message and xor the password. //Should I use window.crypto for the sha256 to keep the content-script light? //Perhaps it would be best to prove out the attack(s) I am trying to prevent... firefox.sendMessageToAllContentScriptsInTab(gRootFrame.tabId, { PASSWORD_READY:true, password: password, }); this.password = password; this.html = ` <h2>Password Ready</h2> <p> To insert your password, select the desired password field and press the <b>Control</b> key. <p> <button id="btnShow">Show Password</button> <button id="btnCopy">Copy to Clipboard</button> <div id="warnCopy" style="display:none; width: 85%;"> Copying makes the password visible to every program on this computer, including Malware and Spyware. Click Copy again to confirm. </div> <div id="showPassDiv" style="display:none"> <table border="1"> <tr> <td>Abcd</td> <td>efgh</td> <td>ijk1</td> </tr> </table> </div> <div id="err">?</div> `; } event_click_btnShow(e) { this.elm_showPassDiv.style.display = 'block'; //TODO: fill in table } event_click_btnCopy(e) { if (this.didCopyWarning) { //this.elm_warnCopy.style.display = 'none'; this.elm_warnCopy.firstChild.nodeValue = 'Copied'; showErr('copy not yet ready'); return; } else { this.didCopyWarning = true; this.elm_warnCopy.style.display = 'block'; } } } function escapeHTML(text:string): string { //TODO return text; } class PromptParameters { html:string; elm_selSite = null; elm_jumble_symbols:any = null; elm_spanJumbleSymbols:any = null; elm_pass = null; elm_selFormat = null; shortDomain:string; fullDomain:string; ctx:Context; contentFrames:Array<ContentFrameInfo>; constructor() { //Parse '["https","example.com",8080]' let rootOrigin = JSON.parse(gRootFrame.origin); if (rootOrigin.length != 3 || !rootOrigin[0] || !rootOrigin[1]) throw new Error('Invalid origin'); let hostname = rootOrigin[1].toLowerCase(); this.ctx = new Context(); this.ctx.origin = gRootFrame.origin; //TODO: show warning if not HTTPS //let isHTTPS = scheme.toLowerCase() === 'https'; this.shortDomain = calcpass_misc.removeSubdomains(hostname); this.fullDomain = hostname; this.html = `<h2>Calculate Password</h2> <div class="inputRow"> <label class="pushRight" for="selSite">Password For:</label> <select id="selSite"> <option value="short" selected="selected">${escapeHTML(this.shortDomain)}</option> <option value="full">${escapeHTML(this.fullDomain)}</option> <option value="other">Other</option> </select> </div> <div class="inputRow"> <label class="pushRight" for="selRev">Password Revision:</label> <select id="selRev"> <option value="0" selected="selected">0</option> <option value="1">1</option> <option value="2">2</option> <option value="3">3</option> <option value="4">4</option> <option value="other">Other</option> </select> </div> <div class="inputRow"> <label class="pushRight" for="selFormat">Password Format:</label> <select id="selFormat" title="Example: Alqingezioe7"> <option value="default" selected="selected">Default</option> <option value="sixteen">Sixteen</option> <option value="jumble">Jumble</option> <!-- I do not wish to offer any choice which could yield a password weaker than the default format because that opens an attack vector. --> </select> <span id="spanJumbleSymbols" style="display:none"> with <input id="jumble_symbols" type="text" size="2" value="@,"/></span> </div> <div class="inputRow"> <label class="pushRight" for="pass">Master Password</label> <input id="pass" type="password" value="" size="20"/> <img src="/icons/reveal.png" alt="reveal" title="TODO" style="height:20px" /> </div> <div id="err">?</div> <div style="margin-top: 2em"> <button id="btnNext" style="width: 10em; float:right; margin-right: 1em;">Next</button> </div> `; } on_selFormat_change(e) { let jumble = this.elm_selFormat.value == 'jumble'; this.elm_spanJumbleSymbols.style.display = jumble ? 'inline' : 'none'; } event_click_btnNext(e) { let pass = this.elm_pass.value.trim(); if (pass.length < 8) { showErr('Master Password must be 8 characters.'); return; } let hostname:string; switch (this.elm_selSite.value) { case 'short': hostname = this.shortDomain; break; case 'full': hostname = this.fullDomain; break; case 'other': showUnexpectedError('selSite other not yet implemented'); return; default: showUnexpectedError('invalid selSite'); return; } this.ctx.sitename = hostname; //TODO: assign format etc let rawPlain = stringToUTF8(pass); setScreen(new StretchingMasterPass(this.ctx, rawPlain)); } onScreenReady() { this.elm_selFormat.addEventListener('change', (e) => {this.on_selFormat_change(e);}); this.elm_pass.focus(); } } /*class WarnNoSelectedInput { html:string; constructor() { this.html =` <h2>No Field Selected</h2> <p> Please select a password or text field on the current web page. <p> If you wish to calculate a password but not insert it into this web page you can <a href="#">proceed anyway</a>. `; } }*/ class WarnUnableToLoadContentScript { html:string; constructor() { this.html =` Please browse to the website which you need a password for. <p> If you wish to calculate a password but not insert it into a web page you can <a href="#">proceed anyway</a>. `; } } async function showFirstScreen() { //Skip some prompts if the user has already entered info for this origin. //The state is remembered in RAM by the background-script. let state = await firefox.sendMessage({GET_STATE:true}); if (state && state.origin === gRootFrame.origin) { /* if (state.password) setScreen(new PasswordReady(ctx, state.password)); else if (state.siteKeyHex) setScreen(new PromptCardCode( */ } setScreen(new PromptParameters()); } function onMessage(msg, sender) { console.log('popup onMessage: ', JSON.stringify(msg)); //If sent from the content-script, get the tabId and frameId let cfi = null; if (sender.tab) { cfi = new ContentFrameInfo(); cfi.tabId = sender.tab.id; cfi.frameId = sender.frameId; } //sender is a runtime.MessageSender if (msg.CONTENT_SCRIPT_READY) { if (!cfi) throw new Error('Received CONTENT_SCRIPT_READY from unknown tab'); cfi.origin = msg.origin; cfi.hasChildFrames = msg.hasChildFrames; //Remember each frame which reports in. //We will refuse to reveal the password to any //other frame gAllFrames.push(cfi); //TODO: verify sane origin //if (!isSaneOrigin(cfi.origin)) // throw ... //root frame? if (cfi.frameId == 0) { if (gRootFrame) throw new Error('Multiple CONTENT_SCRIPT_READY from the root frame!'); gRootFrame = cfi; showFirstScreen(); } } } //Fires when we get tired of waiting for the child frames to report back /*function onWarnNoSelectedInputTimeout() { //If still no target then show a warning if (!gTargetFrame) { gTargetFrame = gRootFrame; setScreen(new WarnNoSelectedInput()); } }*/ async function load() { /* Our first task is to tell the user the domain name of the website they are logging in to. This is made tricky by the fact that some websites use an <iframe> for login (eg gog.com). The iframe often has a different domain name. Other sites have a login <form> which submits to a different domain. Some sites set the <form> action attribute dyamically, with JavaScript, so you can't tell by looking at the markup where it will submit to. I'll bet some sites even do login with AJAX. The fact is, when you type a password into a web page, you have no guarantee where that password will be sent to. You must simply TRUST that the parent website will take good care of your precious password. So it call comes down to trust. When I visit example.com and start typing my password I must trust example.com. example.com would not trick me into typing into an <iframe> of a malicious website. Nor would they trick me by submitting the <form> to a malicious website. If I don't trust example.com then I shouldn't type a password into it. If example.com was trustworthy but is later compromised then it's game over - the attacker will get your password and theres little we can do to stop it. In summary, the calcpass extension will use the domain name which appears in the address bar. It will not use the domain of the <iframe> or the domain where the <form> submits to. This solution is simple, easy to explain, and completely deterministic. */ console.log('popup load()'); try { firefox.addOnMessageListener(onMessage); try { await firefox.loadContentScriptIntoActiveTab(true); //onMessage() will be called repeatedly for each frame which // loaded the content script } catch (e) { //This happens when the active tab is not a normal web page (eg about:blank) console.log('Unable to load content script: ' + e); setScreen(new WarnUnableToLoadContentScript()); return; } } catch (e) { showUnexpectedError(e); } } load();<|fim▁end|>
class Context { //the scheme, host and port that was in the address bar
<|file_name|>basicTypesLab.ts<|end_file_name|><|fim▁begin|>let color: string = "green"; let squareSizeNum: number = 100; let squareSize: string = `${ squareSizeNum }px`; let button: Element = document.createElement('button'); let div: Element = document.createElement('div'); (div as HTMLElement).style.width = squareSize; (div as HTMLElement).style.height = squareSize; button.textContent = "Change Color"; let colorChange: Function = (elem: Element, color: string) : boolean => { (elem as HTMLElement).style.backgroundColor = color; return true; } (button as HTMLElement).onclick = (event) => { colorChange(div, color); } document.body.appendChild(button);<|fim▁hole|><|fim▁end|>
document.body.appendChild(div);
<|file_name|>model.py<|end_file_name|><|fim▁begin|>import numpy as np from menpo.model import PCAModel from menpo.visualize import print_progress def prune(weights, n_retained=50): w_norm = (weights[:, :n_retained] ** 2).sum(axis=1) # High weights here suggest problematic samples bad_to_good_index = np.argsort(w_norm)[::-1] return w_norm, bad_to_good_index def pca_and_weights(meshes, retain_eig_cum_val=0.997, verbose=False): model = PCAModel(meshes, verbose=verbose) n_comps_retained = (model.eigenvalues_cumulative_ratio() < retain_eig_cum_val).sum() if verbose: print('\nRetaining {:.2%} of eigenvalues keeps {} components'.format( retain_eig_cum_val, n_comps_retained)) model.trim_components(retain_eig_cum_val) if verbose: meshes = print_progress(meshes, prefix='Calculating weights')<|fim▁hole|> / np.sqrt(model.eigenvalues)) return model, weights<|fim▁end|>
weights = (np.vstack([model.project(m) for m in meshes])
<|file_name|>kubernetes.spec.js<|end_file_name|><|fim▁begin|>/* global describe, beforeEach, it */ const path = require('path'); const assert = require('yeoman-assert'); const helpers = require('yeoman-test'); const fse = require('fs-extra'); const expectedFiles = { eurekaregistry: [ './k8s/registry/jhipster-registry.yml', './k8s/registry/application-configmap.yml' ], consulregistry: [ './k8s/registry/consul.yml', './k8s/registry/consul-config-loader.yml', './k8s/registry/application-configmap.yml' ], jhgate: [ './k8s/jhgate/jhgate-deployment.yml', './k8s/jhgate/jhgate-mysql.yml', './k8s/jhgate/jhgate-service.yml' ], jhgateingress: [ './k8s/jhgate/jhgate-deployment.yml', './k8s/jhgate/jhgate-mysql.yml', './k8s/jhgate/jhgate-service.yml', './k8s/jhgate/jhgate-ingress.yml' ], customnamespace: [ './k8s/namespace.yml' ], jhconsole: [ './k8s/console/jhipster-console.yml', './k8s/console/jhipster-elasticsearch.yml', './k8s/console/jhipster-logstash.yml', './k8s/console/jhipster-dashboard-console.yml', './k8s/console/jhipster-zipkin.yml' ], msmysql: [ './k8s/msmysql/msmysql-deployment.yml', './k8s/msmysql/msmysql-mysql.yml', './k8s/msmysql/msmysql-service.yml' ], mspsql: [ './k8s/mspsql/mspsql-deployment.yml', './k8s/mspsql/mspsql-postgresql.yml', './k8s/mspsql/mspsql-service.yml', './k8s/mspsql/mspsql-elasticsearch.yml' ], msmongodb: [ './k8s/msmongodb/msmongodb-deployment.yml', './k8s/msmongodb/msmongodb-mongodb.yml', './k8s/msmongodb/msmongodb-service.yml' ], msmariadb: [ './k8s/msmariadb/msmariadb-deployment.yml', './k8s/msmariadb/msmariadb-mariadb.yml', './k8s/msmariadb/msmariadb-service.yml' ], monolith: [ './k8s/samplemysql/samplemysql-deployment.yml', './k8s/samplemysql/samplemysql-mysql.yml', './k8s/samplemysql/samplemysql-service.yml', './k8s/samplemysql/samplemysql-elasticsearch.yml' ], kafka: [ './k8s/samplekafka/samplekafka-deployment.yml', './k8s/samplekafka/samplekafka-mysql.yml', './k8s/samplekafka/samplekafka-service.yml', './k8s/messagebroker/kafka.yml' ], prometheusmonit: [ './k8s/monitoring/jhipster-prometheus-crd.yml', './k8s/monitoring/jhipster-prometheus-cr.yml', './k8s/monitoring/jhipster-grafana.yml', './k8s/monitoring/jhipster-grafana-dashboard.yml' ] }; describe('JHipster Kubernetes Sub Generator', () => { describe('only gateway', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'microservice', directoryPath: './', chosenApps: [ '01-gateway' ], adminPassword: 'meetup', dockerRepositoryName: 'jhipsterrepository', dockerPushCommand: 'docker push', kubernetesNamespace: 'jhipsternamespace', jhipsterConsole: false, kubernetesServiceType: 'LoadBalancer', clusteredDbApps: [] }) .on('end', done); }); it('creates expected registry files and content', () => { assert.file(expectedFiles.eurekaregistry); assert.fileContent('./k8s/registry/jhipster-registry.yml', /# base64 encoded "meetup"/); }); it('creates expected gateway files and content', () => { assert.file(expectedFiles.jhgate); assert.fileContent('./k8s/jhgate/jhgate-deployment.yml', /image: jhipsterrepository\/jhgate/); assert.fileContent('./k8s/jhgate/jhgate-deployment.yml', /jhipsternamespace.svc.cluster/); }); }); describe('gateway and mysql microservice', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'microservice', directoryPath: './', chosenApps: [ '01-gateway', '02-mysql' ], dockerRepositoryName: 'jhipster', dockerPushCommand: 'docker push', kubernetesNamespace: 'default', jhipsterConsole: false, kubernetesServiceType: 'LoadBalancer', clusteredDbApps: [] }) .on('end', done); }); it('creates expected registry files', () => { assert.file(expectedFiles.eurekaregistry); }); it('creates expected gateway files', () => { assert.file(expectedFiles.jhgate); }); it('creates expected mysql files', () => { assert.file(expectedFiles.msmysql); }); }); describe('mysql microservice with custom namespace and jhipster-console (with zipkin)', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'microservice', directoryPath: './', chosenApps: [ '02-mysql' ], dockerRepositoryName: 'jhipster', dockerPushCommand: 'docker push', kubernetesNamespace: 'mynamespace', monitoring: 'elk', jhipsterConsole: true, kubernetesServiceType: 'LoadBalancer', clusteredDbApps: [] }) .on('end', done); }); it('creates expected registry files', () => { assert.file(expectedFiles.eurekaregistry); }); it('creates expected mysql files', () => { assert.file(expectedFiles.msmysql); }); it('creates expected jhipster-console files', () => { assert.file(expectedFiles.jhconsole); }); it('creates expected namespace file', () => { assert.file(expectedFiles.customnamespace); }); }); describe('gateway and ingress', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'microservice', directoryPath: './', chosenApps: [ '01-gateway' ], dockerRepositoryName: 'jhipster', dockerPushCommand: 'docker push', kubernetesNamespace: 'default', kubernetesServiceType: 'Ingress',<|fim▁hole|> }) .on('end', done); }); it('creates expected registry files', () => { assert.file(expectedFiles.eurekaregistry); }); it('creates expected gateway files', () => { assert.file(expectedFiles.jhgate); }); it('creates expected ingress files', () => { assert.file(expectedFiles.jhgateingress); }); }); describe('MySQL and PostgreSQL microservices without gateway', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'microservice', directoryPath: './', chosenApps: [ '02-mysql', '03-psql' ], dockerRepositoryName: 'jhipster', dockerPushCommand: 'docker push', kubernetesNamespace: 'default', jhipsterConsole: false, kubernetesServiceType: 'LoadBalancer', clusteredDbApps: [] }) .on('end', done); }); it('creates expected registry files', () => { assert.file(expectedFiles.eurekaregistry); }); it('doesn\'t creates gateway files', () => { assert.noFile(expectedFiles.jhgate); }); it('creates expected mysql files', () => { assert.file(expectedFiles.msmysql); }); it('creates expected psql files', () => { assert.file(expectedFiles.mspsql); }); }); describe('gateway, mysql, psql, mongodb, mariadb microservices', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'microservice', directoryPath: './', chosenApps: [ '01-gateway', '02-mysql', '03-psql', '04-mongo', '07-mariadb' ], dockerRepositoryName: 'jhipster', dockerPushCommand: 'docker push', kubernetesNamespace: 'default', jhipsterConsole: false, kubernetesServiceType: 'LoadBalancer', clusteredDbApps: [] }) .on('end', done); }); it('creates expected registry files', () => { assert.file(expectedFiles.eurekaregistry); }); it('creates expected gateway files', () => { assert.file(expectedFiles.jhgate); }); it('creates expected mysql files', () => { assert.file(expectedFiles.msmysql); }); it('creates expected psql files', () => { assert.file(expectedFiles.mspsql); }); it('creates expected mongodb files', () => { assert.file(expectedFiles.msmongodb); }); it('creates expected mariadb files', () => { assert.file(expectedFiles.msmariadb); }); }); describe('monolith application', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'monolith', directoryPath: './', chosenApps: [ '08-monolith' ], dockerRepositoryName: 'jhipster', dockerPushCommand: 'docker push', kubernetesNamespace: 'default', jhipsterConsole: false, kubernetesServiceType: 'LoadBalancer', clusteredDbApps: [] }) .on('end', done); }); it('doesn\'t creates registry files', () => { assert.noFile(expectedFiles.eurekaregistry); }); it('creates expected default files', () => { assert.file(expectedFiles.monolith); }); }); describe('Kafka application', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'monolith', directoryPath: './', chosenApps: [ '09-kafka' ], dockerRepositoryName: 'jhipster', dockerPushCommand: 'docker push', kubernetesNamespace: 'default', jhipsterConsole: false, kubernetesServiceType: 'LoadBalancer', clusteredDbApps: [] }) .on('end', done); }); it('doesn\'t creates registry files', () => { assert.noFile(expectedFiles.eurekaregistry); }); it('creates expected default files', () => { assert.file(expectedFiles.kafka); }); }); describe('mysql microservice with custom namespace and jhipster prometheus monitoring', () => { beforeEach((done) => { helpers .run(require.resolve('../generators/kubernetes')) .inTmpDir((dir) => { fse.copySync(path.join(__dirname, './templates/compose/'), dir); }) .withOptions({ skipChecks: true }) .withPrompts({ composeApplicationType: 'microservice', directoryPath: './', chosenApps: [ '02-mysql' ], dockerRepositoryName: 'jhipster', dockerPushCommand: 'docker push', kubernetesNamespace: 'mynamespace', monitoring: 'prometheus', kubernetesServiceType: 'LoadBalancer' }) .on('end', done); }); it('creates expected registry files', () => { assert.file(expectedFiles.eurekaregistry); }); it('creates expected mysql files', () => { assert.file(expectedFiles.msmysql); }); it('creates expected prometheus files', () => { assert.file(expectedFiles.prometheusmonit); }); it('creates expected namespace file', () => { assert.file(expectedFiles.customnamespace); }); }); });<|fim▁end|>
ingressDomain: 'example.com', clusteredDbApps: []
<|file_name|>http.go<|end_file_name|><|fim▁begin|>// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package rafthttp import ( "errors" "fmt" "io/ioutil" "net/http" "path" "github.com/coreos/etcd/Godeps/_workspace/src/golang.org/x/net/context" pioutil "github.com/coreos/etcd/pkg/ioutil" "github.com/coreos/etcd/pkg/types" "github.com/coreos/etcd/raft/raftpb" "github.com/coreos/etcd/snap" "github.com/coreos/etcd/version" ) const ( // connReadLimitByte limits the number of bytes // a single read can read out. // // 64KB should be large enough for not causing // throughput bottleneck as well as small enough // for not causing a read timeout. connReadLimitByte = 64 * 1024 ) var ( RaftPrefix = "/raft" ProbingPrefix = path.Join(RaftPrefix, "probing") RaftStreamPrefix = path.Join(RaftPrefix, "stream") RaftSnapshotPrefix = path.Join(RaftPrefix, "snapshot") errIncompatibleVersion = errors.New("incompatible version") errClusterIDMismatch = errors.New("cluster ID mismatch") ) type peerGetter interface { Get(id types.ID) Peer } type writerToResponse interface { WriteTo(w http.ResponseWriter) } type pipelineHandler struct { r Raft cid types.ID } // newPipelineHandler returns a handler for handling raft messages // from pipeline for RaftPrefix. // // The handler reads out the raft message from request body, // and forwards it to the given raft state machine for processing. func newPipelineHandler(r Raft, cid types.ID) http.Handler { return &pipelineHandler{ r: r, cid: cid, } } func (h *pipelineHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { if r.Method != "POST" { w.Header().Set("Allow", "POST") http.Error(w, "Method Not Allowed", http.StatusMethodNotAllowed) return } w.Header().Set("X-Etcd-Cluster-ID", h.cid.String()) if err := checkClusterCompatibilityFromHeader(r.Header, h.cid); err != nil { http.Error(w, err.Error(), http.StatusPreconditionFailed) return } // Limit the data size that could be read from the request body, which ensures that read from // connection will not time out accidentally due to possible blocking in underlying implementation. limitedr := pioutil.NewLimitedBufferReader(r.Body, connReadLimitByte) b, err := ioutil.ReadAll(limitedr) if err != nil { plog.Errorf("failed to read raft message (%v)", err) http.Error(w, "error reading raft message", http.StatusBadRequest) return } var m raftpb.Message if err := m.Unmarshal(b); err != nil { plog.Errorf("failed to unmarshal raft message (%v)", err) http.Error(w, "error unmarshaling raft message", http.StatusBadRequest) return } if err := h.r.Process(context.TODO(), m); err != nil { switch v := err.(type) { case writerToResponse: v.WriteTo(w) default: plog.Warningf("failed to process raft message (%v)", err) http.Error(w, "error processing raft message", http.StatusInternalServerError) } return } // Write StatusNoContet header after the message has been processed by // raft, which facilitates the client to report MsgSnap status. w.WriteHeader(http.StatusNoContent) } type snapshotHandler struct { r Raft snapshotter *snap.Snapshotter cid types.ID } func newSnapshotHandler(r Raft, snapshotter *snap.Snapshotter, cid types.ID) http.Handler { return &snapshotHandler{ r: r, snapshotter: snapshotter, cid: cid, } } // ServeHTTP serves HTTP request to receive and process snapshot message. // // If request sender dies without closing underlying TCP connection, // the handler will keep waiting for the request body until TCP keepalive // finds out that the connection is broken after several minutes. // This is acceptable because // 1. snapshot messages sent through other TCP connections could still be // received and processed. // 2. this case should happen rarely, so no further optimization is done. func (h *snapshotHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { if r.Method != "POST" { w.Header().Set("Allow", "POST") http.Error(w, "Method Not Allowed", http.StatusMethodNotAllowed) return } w.Header().Set("X-Etcd-Cluster-ID", h.cid.String()) if err := checkClusterCompatibilityFromHeader(r.Header, h.cid); err != nil { http.Error(w, err.Error(), http.StatusPreconditionFailed) return } dec := &messageDecoder{r: r.Body} m, err := dec.decode() if err != nil { msg := fmt.Sprintf("failed to decode raft message (%v)", err) plog.Errorf(msg) http.Error(w, msg, http.StatusBadRequest) return } if m.Type != raftpb.MsgSnap { plog.Errorf("unexpected raft message type %s on snapshot path", m.Type) http.Error(w, "wrong raft message type", http.StatusBadRequest) return } // save incoming database snapshot. if err := h.snapshotter.SaveDBFrom(r.Body, m.Snapshot.Metadata.Index); err != nil { msg := fmt.Sprintf("failed to save KV snapshot (%v)", err) plog.Error(msg) http.Error(w, msg, http.StatusInternalServerError) return } plog.Infof("received and saved database snapshot [index: %d, from: %s] successfully", m.Snapshot.Metadata.Index, types.ID(m.From)) if err := h.r.Process(context.TODO(), m); err != nil { switch v := err.(type) { // Process may return writerToResponse error when doing some // additional checks before calling raft.Node.Step. case writerToResponse: v.WriteTo(w) default: msg := fmt.Sprintf("failed to process raft message (%v)", err) plog.Warningf(msg) http.Error(w, msg, http.StatusInternalServerError) } return } // Write StatusNoContet header after the message has been processed by // raft, which facilitates the client to report MsgSnap status. w.WriteHeader(http.StatusNoContent) } type streamHandler struct { peerGetter peerGetter r Raft id types.ID cid types.ID } func newStreamHandler(peerGetter peerGetter, r Raft, id, cid types.ID) http.Handler { return &streamHandler{ peerGetter: peerGetter, r: r, id: id, cid: cid, } } func (h *streamHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { if r.Method != "GET" { w.Header().Set("Allow", "GET") http.Error(w, "Method Not Allowed", http.StatusMethodNotAllowed) return } w.Header().Set("X-Server-Version", version.Version) w.Header().Set("X-Etcd-Cluster-ID", h.cid.String()) if err := checkClusterCompatibilityFromHeader(r.Header, h.cid); err != nil { http.Error(w, err.Error(), http.StatusPreconditionFailed) return } var t streamType switch path.Dir(r.URL.Path) { case streamTypeMsgAppV2.endpoint(): t = streamTypeMsgAppV2 case streamTypeMessage.endpoint(): t = streamTypeMessage default: plog.Debugf("ignored unexpected streaming request path %s", r.URL.Path) http.Error(w, "invalid path", http.StatusNotFound) return } fromStr := path.Base(r.URL.Path) from, err := types.IDFromString(fromStr) if err != nil { plog.Errorf("failed to parse from %s into ID (%v)", fromStr, err) http.Error(w, "invalid from", http.StatusNotFound) return } if h.r.IsIDRemoved(uint64(from)) { plog.Warningf("rejected the stream from peer %s since it was removed", from) http.Error(w, "removed member", http.StatusGone) return } p := h.peerGetter.Get(from) if p == nil { // This may happen in following cases: // 1. user starts a remote peer that belongs to a different cluster // with the same cluster ID. // 2. local etcd falls behind of the cluster, and cannot recognize // the members that joined after its current progress. plog.Errorf("failed to find member %s in cluster %s", from, h.cid) http.Error(w, "error sender not found", http.StatusNotFound) return } wto := h.id.String() if gto := r.Header.Get("X-Raft-To"); gto != wto { plog.Errorf("streaming request ignored (ID mismatch got %s want %s)", gto, wto) http.Error(w, "to field mismatch", http.StatusPreconditionFailed) return } w.WriteHeader(http.StatusOK) w.(http.Flusher).Flush() c := newCloseNotifier() conn := &outgoingConn{ t: t,<|fim▁hole|> } p.attachOutgoingConn(conn) <-c.closeNotify() } // checkClusterCompatibilityFromHeader checks the cluster compatibility of // the local member from the given header. // It checks whether the version of local member is compatible with // the versions in the header, and whether the cluster ID of local member // matches the one in the header. func checkClusterCompatibilityFromHeader(header http.Header, cid types.ID) error { if err := checkVersionCompability(header.Get("X-Server-From"), serverVersion(header), minClusterVersion(header)); err != nil { plog.Errorf("request version incompatibility (%v)", err) return errIncompatibleVersion } if gcid := header.Get("X-Etcd-Cluster-ID"); gcid != cid.String() { plog.Errorf("request cluster ID mismatch (got %s want %s)", gcid, cid) return errClusterIDMismatch } return nil } type closeNotifier struct { done chan struct{} } func newCloseNotifier() *closeNotifier { return &closeNotifier{ done: make(chan struct{}), } } func (n *closeNotifier) Close() error { close(n.done) return nil } func (n *closeNotifier) closeNotify() <-chan struct{} { return n.done }<|fim▁end|>
Writer: w, Flusher: w.(http.Flusher), Closer: c,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ gspread ~~~~~~~ Google Spreadsheets client library. """ __version__ = '0.2.1' __author__ = 'Anton Burnashev' from .client import Client, login from .models import Spreadsheet, Worksheet, Cell<|fim▁hole|> IncorrectCellLabel, WorksheetNotFound, UpdateCellError, RequestError)<|fim▁end|>
from .exceptions import (GSpreadException, AuthenticationError, SpreadsheetNotFound, NoValidUrlKeyFound,
<|file_name|>watchType.ts<|end_file_name|><|fim▁begin|>/* @internal */ namespace ts { // Additional tsserver specific watch information export interface WatchTypeRegistry { ClosedScriptInfo: "Closed Script info", ConfigFileForInferredRoot: "Config file for the inferred project root", <|fim▁hole|> MissingSourceMapFile: "Missing source map file", NoopConfigFileForInferredRoot: "Noop Config file for the inferred project root", MissingGeneratedFile: "Missing generated file", PackageJsonFile: "package.json file for import suggestions" } WatchType.ClosedScriptInfo = "Closed Script info"; WatchType.ConfigFileForInferredRoot = "Config file for the inferred project root"; WatchType.NodeModulesForClosedScriptInfo = "node_modules for closed script infos in them"; WatchType.MissingSourceMapFile = "Missing source map file"; WatchType.NoopConfigFileForInferredRoot = "Noop Config file for the inferred project root"; WatchType.MissingGeneratedFile = "Missing generated file"; WatchType.PackageJsonFile = "package.json file for import suggestions"; }<|fim▁end|>
NodeModulesForClosedScriptInfo: "node_modules for closed script infos in them",
<|file_name|>potencia.rs<|end_file_name|><|fim▁begin|>//A potencia elétrica em qualquer circuito é dada por :P = i . v<|fim▁hole|><|fim▁end|>
pub fn f_potencia(u: f64, r: f64) -> f64 { u * r }
<|file_name|>vaccaleibundgut.py<|end_file_name|><|fim▁begin|>import sys import os import glob import inspect import pylab as pl from numpy import * from scipy import optimize import pickle import time import copy cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]) + "/templates") if cmd_folder not in sys.path: sys.path.insert(0, cmd_folder) from templutils import * import pylabsetup pl.ion() #fits the vacca leibundgut model to data: # a linear decay, with a gaussian peak on top, an exponential rise, and possibly a second gaussian (typically the Ia second bump around phase=25 days def minfunc(p, y, x, e, secondg, plot=False): ''' p is the parameter list if secondg=1: secondgaussian added if secondg=0: secondgaussian not parameters are: p[0]=first gaussian normalization (negative if fitting mag) p[1]=first gaussian mean p[2]=first gaussian sigma p[3]=linear decay offset p[4]=linear decay slope p[5]=exponxential rise slope p[6]=exponential zero point p[7]=second gaussian normalization (negative if fitting mag) p[8]=second gaussian mean p[9]=second gaussian sigma ''' if plot: pl.figure(3) pl.errorbar(x, y, yerr=e, color='k') import time # time.sleep(1)<|fim▁hole|> # print sum(((y-mycavvaccaleib(x,p,secondg=True))**2)) if secondg > 0: return sum(((y - mycavvaccaleib(x, p, secondg=True)) ** 2) / e ** 2) else: return sum(((y - mycavvaccaleib(x, p, secondg=False)) ** 2) / e ** 2) import scipy.optimize if __name__ == '__main__': lcv = np.loadtxt(sys.argv[1], unpack=True) secondg = False try: if int(sys.argv[2]) > 0: secondg = True except: pass x = lcv[1] y = lcv[2] e = lcv[3] mjd = lcv[0] ax = pl.figure(0, figsize=(10,5)).add_subplot(111) #pl.errorbar(x, y, yerr=e, color="#47b56c", label="data") p0 = [0] * 10 p0[0] = -4 peakdate = x[np.where(y == min(y))[0]] if len(peakdate) > 1: peakdate = peakdate[0] p0[1] = peakdate + 5 p0[2] = 10 # sigma #pl.draw() lintail = np.where(x > peakdate + 50)[0] if len(lintail) < 1: print "no tail data" linfit = np.polyfit(x[-2:], y[-2:], 1) p0[3] = linfit[1] p0[4] = linfit[0] else: linfit = np.polyfit(x[lintail], y[lintail], 1) p0[3] = linfit[1] p0[4] = linfit[0] p0[5] = 0.1 p0[6] = peakdate - 20 p0[7] = -1 p0[8] = peakdate + 25 p0[9] = 10 pl.figure(3) pl.clf() # pf= scipy.optimize.minimize(minfunc,p0,args=(y,x,1), method='Powell')#,options={'maxiter':5}) if secondg: p0[0] += 1.5 p0[1] *= 2 pl.plot(x[10:], mycavvaccaleib(x[10:], p0, secondg=True), 'm') pf = scipy.optimize.minimize(minfunc, p0, args=(y[10:], x[10:], e[10:], 1), method='Powell') # ,options={'maxiter':5}) else: pl.plot(x[10:], mycavvaccaleib(x[10:], p0, secondg=False), 'k') pf = scipy.optimize.minimize(minfunc, p0, args=(y[10:], x[10:], e[10:], 0), method='Powell') # ,options={'maxiter':5}) #pl.figure(4) pl.figure(0) ax.errorbar(mjd+0.5-53000, y, yerr=e, fmt=None, ms=7, alpha = 0.5, color='k', markersize=10,) ax.plot(mjd+0.5-53000, y, '.', ms=7, alpha = 0.5, color='#47b56c', markersize=10, label = "SN 19"+sys.argv[1].split('/')[-1].\ replace('.dat', '').replace('.', ' ')) # mycavvaccaleib(x,pf.x, secondg=True) mycavvaccaleib(x, pf.x, secondg=secondg) ax.plot(mjd[10:]+0.5-53000, mycavvaccaleib(x[10:], pf.x, secondg=secondg), 'k', linewidth=2, label="vacca leibundgut fit") # , alpha=0.5) # pl.plot(x,mycavvaccaleib(x,pf.x, secondg=True), 'k',linewidth=2, label="fit") xlen = mjd.max() - mjd.min() ax.set_xlim(mjd.min()-xlen*0.02+0.5-53000, mjd.max()+xlen*0.02+0.5-53000) ax.set_ylim(max(y + 0.1), min(y - 0.1)) ax2 = ax.twiny() Vmax = 2449095.23-2453000 ax2.tick_params('both', length=10, width=1, which='major') ax2.tick_params('both', length=5, width=1, which='minor') ax2.set_xlabel("phase (days)") ax2.set_xlim((ax.get_xlim()[0] - Vmax, ax.get_xlim()[1] - Vmax)) # pl.ylim(10,21) pl.draw() pl.legend() ax.set_xlabel("JD - 24530000") ax.set_ylabel("magnitude") #pl.title(sys.argv[1].split('/')[-1].replace('.dat', '').replace('.', ' ')) #pl.show() pl.tight_layout() pl.savefig("../fits/" + sys.argv[1].split('/')[-1].replace('.dat', '.vdfit.pdf')) cmd = "pdfcrop " + "../fits/" + sys.argv[1].split('/')[-1].replace('.dat', '.vdfit.pdf') print cmd os.system(cmd)<|fim▁end|>
<|file_name|>scrollspy.js<|end_file_name|><|fim▁begin|>(function($, UI) { "use strict"; var $win = $(window), $doc = $(document), scrollspies = [], checkScrollSpy = function() { for(var i=0; i < scrollspies.length; i++) { UI.support.requestAnimationFrame.apply(window, [scrollspies[i].check]); } }; UI.component('scrollspy', { defaults: { "cls" : "uk-scrollspy-inview", "initcls" : "uk-scrollspy-init-inview", "topoffset" : 0, "leftoffset" : 0, "repeat" : false, "delay" : 0 }, init: function() { var $this = this, idle, inviewstate, initinview, fn = function(){ var inview = UI.Utils.isInView($this.element, $this.options); if(inview && !inviewstate) { if(idle) clearTimeout(idle); if(!initinview) { $this.element.addClass($this.options.initcls); $this.offset = $this.element.offset(); initinview = true; $this.trigger("uk.scrollspy.init"); } idle = setTimeout(function(){ if(inview) { $this.element.addClass("uk-scrollspy-inview").addClass($this.options.cls).width(); }<|fim▁hole|> inviewstate = true; $this.trigger("uk.scrollspy.inview"); } if (!inview && inviewstate && $this.options.repeat) { $this.element.removeClass("uk-scrollspy-inview").removeClass($this.options.cls); inviewstate = false; $this.trigger("uk.scrollspy.outview"); } }; fn(); this.check = fn; scrollspies.push(this); } }); var scrollspynavs = [], checkScrollSpyNavs = function() { for(var i=0; i < scrollspynavs.length; i++) { UI.support.requestAnimationFrame.apply(window, [scrollspynavs[i].check]); } }; UI.component('scrollspynav', { defaults: { "cls" : 'uk-active', "closest" : false, "topoffset" : 0, "leftoffset" : 0, "smoothscroll" : false }, init: function() { var ids = [], links = this.find("a[href^='#']").each(function(){ ids.push($(this).attr("href")); }), targets = $(ids.join(",")); var $this = this, inviews, fn = function(){ inviews = []; for(var i=0 ; i < targets.length ; i++) { if(UI.Utils.isInView(targets.eq(i), $this.options)) { inviews.push(targets.eq(i)); } } if(inviews.length) { var scrollTop = $win.scrollTop(), target = (function(){ for(var i=0; i< inviews.length;i++){ if(inviews[i].offset().top >= scrollTop){ return inviews[i]; } } })(); if(!target) return; if($this.options.closest) { links.closest($this.options.closest).removeClass($this.options.cls).end().filter("a[href='#"+target.attr("id")+"']").closest($this.options.closest).addClass($this.options.cls); } else { links.removeClass($this.options.cls).filter("a[href='#"+target.attr("id")+"']").addClass($this.options.cls); } } }; if(this.options.smoothscroll && UI["smoothScroll"]) { links.each(function(){ UI.smoothScroll(this, $this.options.smoothscroll); }); } fn(); this.element.data("scrollspynav", this); this.check = fn; scrollspynavs.push(this); } }); var fnCheck = function(){ checkScrollSpy(); checkScrollSpyNavs(); }; // listen to scroll and resize $doc.on("uk-scroll", fnCheck); $win.on("resize orientationchange", UI.Utils.debounce(fnCheck, 50)); // init code $doc.on("uk-domready", function(e) { $("[data-uk-scrollspy]").each(function() { var element = $(this); if (!element.data("scrollspy")) { var obj = UI.scrollspy(element, UI.Utils.options(element.attr("data-uk-scrollspy"))); } }); $("[data-uk-scrollspy-nav]").each(function() { var element = $(this); if (!element.data("scrollspynav")) { var obj = UI.scrollspynav(element, UI.Utils.options(element.attr("data-uk-scrollspy-nav"))); } }); }); })(jQuery, jQuery.UIkit);<|fim▁end|>
}, $this.options.delay);
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::env; use std::error::Error; use std::ffi::OsStr; use std::fmt; use std::fs; use std::io::prelude::*; use std::os; use std::path::{Path, PathBuf}; use std::process::Output; use std::str; use std::usize; use rustc_serialize::json::Json; use url::Url; use hamcrest as ham; use cargo::util::ProcessBuilder; use cargo::util::ProcessError; use cargo::util::process; use support::paths::CargoPathExt; pub mod paths; pub mod git; pub mod registry; /* * * ===== Builders ===== * */ #[derive(PartialEq,Clone)] struct FileBuilder { path: PathBuf, body: String } impl FileBuilder { pub fn new(path: PathBuf, body: &str) -> FileBuilder { FileBuilder { path: path, body: body.to_string() } } fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); let mut file = try!( fs::File::create(&self.path) .with_err_msg(format!("Could not create file; path={}", self.path.display()))); file.write_all(self.body.as_bytes()) .with_err_msg(format!("Could not write to file; path={}", self.path.display())) } fn dirname(&self) -> &Path { self.path.parent().unwrap() } } #[derive(PartialEq,Clone)] struct SymlinkBuilder { dst: PathBuf, src: PathBuf, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder { SymlinkBuilder { dst: dst, src: src } } #[cfg(unix)] fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); os::unix::fs::symlink(&self.dst, &self.src) .with_err_msg(format!("Could not create symlink; dst={} src={}", self.dst.display(), self.src.display())) } #[cfg(windows)] fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); os::windows::fs::symlink_file(&self.dst, &self.src) .with_err_msg(format!("Could not create symlink; dst={} src={}", self.dst.display(), self.src.display())) } fn dirname(&self) -> &Path { self.src.parent().unwrap() } } #[derive(PartialEq,Clone)] pub struct ProjectBuilder { name: String, root: PathBuf, files: Vec<FileBuilder>, symlinks: Vec<SymlinkBuilder> } impl ProjectBuilder { pub fn new(name: &str, root: PathBuf) -> ProjectBuilder { ProjectBuilder { name: name.to_string(), root: root, files: vec![], symlinks: vec![] } } pub fn root(&self) -> PathBuf { self.root.clone() } pub fn url(&self) -> Url { path2url(self.root()) } pub fn bin(&self, b: &str) -> PathBuf { self.build_dir().join("debug").join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn release_bin(&self, b: &str) -> PathBuf { self.build_dir().join("release").join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { self.build_dir().join(target).join("debug") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn build_dir(&self) -> PathBuf { self.root.join("target") } pub fn process<T: AsRef<OsStr>>(&self, program: T) -> ProcessBuilder { let mut p = ::process(program); p.cwd(self.root()); return p } pub fn cargo(&self, cmd: &str) -> ProcessBuilder { let mut p = self.process(&cargo_dir().join("cargo")); p.arg(cmd); return p; } pub fn cargo_process(&self, cmd: &str) -> ProcessBuilder { self.build(); self.cargo(cmd) } pub fn file<B: AsRef<Path>>(mut self, path: B, body: &str) -> ProjectBuilder { self.files.push(FileBuilder::new(self.root.join(path), body)); self } pub fn symlink<T: AsRef<Path>>(mut self, dst: T, src: T) -> ProjectBuilder { self.symlinks.push(SymlinkBuilder::new(self.root.join(dst), self.root.join(src))); self } // TODO: return something different than a ProjectBuilder pub fn build(&self) -> &ProjectBuilder { match self.build_with_result() { Err(e) => panic!(e), _ => return self } } pub fn build_with_result(&self) -> Result<(), String> { // First, clean the directory if it already exists try!(self.rm_root()); // Create the empty directory try!(mkdir_recursive(&self.root)); for file in self.files.iter() { try!(file.mk()); } for symlink in self.symlinks.iter() { try!(symlink.mk()); } Ok(()) } fn rm_root(&self) -> Result<(), String> { if self.root.c_exists() { rmdir_recursive(&self.root) } else { Ok(()) } } } // Generates a project layout pub fn project(name: &str) -> ProjectBuilder { ProjectBuilder::new(name, paths::root().join(name)) } // === Helpers === pub fn mkdir_recursive(path: &Path) -> Result<(), String> { fs::create_dir_all(path) .with_err_msg(format!("could not create directory; path={}", path.display())) } pub fn rmdir_recursive(path: &Path) -> Result<(), String> { path.rm_rf() .with_err_msg(format!("could not rm directory; path={}", path.display())) } pub fn main_file(println: &str, deps: &[&str]) -> String { let mut buf = String::new(); for dep in deps.iter() { buf.push_str(&format!("extern crate {};\n", dep)); } buf.push_str("fn main() { println!("); buf.push_str(&println); buf.push_str("); }\n"); buf.to_string() } trait ErrMsg<T> { fn with_err_msg(self, val: String) -> Result<T, String>; } impl<T, E: fmt::Display> ErrMsg<T> for Result<T, E> { fn with_err_msg(self, val: String) -> Result<T, String> { match self { Ok(val) => Ok(val), Err(err) => Err(format!("{}; original={}", val, err)) }<|fim▁hole|> // Path to cargo executables pub fn cargo_dir() -> PathBuf { env::var_os("CARGO_BIN_PATH").map(PathBuf::from).or_else(|| { env::current_exe().ok().as_ref().and_then(|s| s.parent()) .map(|s| s.to_path_buf()) }).unwrap_or_else(|| { panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test") }) } /// Returns an absolute path in the filesystem that `path` points to. The /// returned path does not contain any symlinks in its hierarchy. /* * * ===== Matchers ===== * */ #[derive(Clone)] pub struct Execs { expect_stdout: Option<String>, expect_stdin: Option<String>, expect_stderr: Option<String>, expect_exit_code: Option<i32>, expect_stdout_contains: Vec<String>, expect_stderr_contains: Vec<String>, expect_json: Option<Json>, } impl Execs { pub fn with_stdout<S: ToString>(mut self, expected: S) -> Execs { self.expect_stdout = Some(expected.to_string()); self } pub fn with_stderr<S: ToString>(mut self, expected: S) -> Execs { self.expect_stderr = Some(expected.to_string()); self } pub fn with_status(mut self, expected: i32) -> Execs { self.expect_exit_code = Some(expected); self } pub fn with_stdout_contains<S: ToString>(mut self, expected: S) -> Execs { self.expect_stdout_contains.push(expected.to_string()); self } pub fn with_stderr_contains<S: ToString>(mut self, expected: S) -> Execs { self.expect_stderr_contains.push(expected.to_string()); self } pub fn with_json(mut self, expected: &str) -> Execs { self.expect_json = Some(Json::from_str(expected).unwrap()); self } fn match_output(&self, actual: &Output) -> ham::MatchResult { self.match_status(actual) .and(self.match_stdout(actual)) .and(self.match_stderr(actual)) } fn match_status(&self, actual: &Output) -> ham::MatchResult { match self.expect_exit_code { None => ham::success(), Some(code) => { ham::expect( actual.status.code() == Some(code), format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}", actual.status, String::from_utf8_lossy(&actual.stdout), String::from_utf8_lossy(&actual.stderr))) } } } fn match_stdout(&self, actual: &Output) -> ham::MatchResult { try!(self.match_std(self.expect_stdout.as_ref(), &actual.stdout, "stdout", &actual.stderr, false)); for expect in self.expect_stdout_contains.iter() { try!(self.match_std(Some(expect), &actual.stdout, "stdout", &actual.stderr, true)); } for expect in self.expect_stderr_contains.iter() { try!(self.match_std(Some(expect), &actual.stderr, "stderr", &actual.stdout, true)); } if let Some(ref expect_json) = self.expect_json { try!(self.match_json(expect_json, &actual.stdout)); } Ok(()) } fn match_stderr(&self, actual: &Output) -> ham::MatchResult { self.match_std(self.expect_stderr.as_ref(), &actual.stderr, "stderr", &actual.stdout, false) } #[allow(deprecated)] // connect => join in 1.3 fn match_std(&self, expected: Option<&String>, actual: &[u8], description: &str, extra: &[u8], partial: bool) -> ham::MatchResult { let out = match expected { Some(out) => out, None => return ham::success(), }; let actual = match str::from_utf8(actual) { Err(..) => return Err(format!("{} was not utf8 encoded", description)), Ok(actual) => actual, }; // Let's not deal with \r\n vs \n on windows... let actual = actual.replace("\r", ""); let actual = actual.replace("\t", "<tab>"); let mut a = actual.lines(); let e = out.lines(); let diffs = if partial { let mut min = self.diff_lines(a.clone(), e.clone(), partial); while let Some(..) = a.next() { let a = self.diff_lines(a.clone(), e.clone(), partial); if a.len() < min.len() { min = a; } } min } else { self.diff_lines(a, e, partial) }; ham::expect(diffs.is_empty(), format!("differences:\n\ {}\n\n\ other output:\n\ `{}`", diffs.connect("\n"), String::from_utf8_lossy(extra))) } fn match_json(&self, expected: &Json, stdout: &[u8]) -> ham::MatchResult { let stdout = match str::from_utf8(stdout) { Err(..) => return Err("stdout was not utf8 encoded".to_owned()), Ok(stdout) => stdout, }; let actual = match Json::from_str(stdout) { Err(..) => return Err(format!("Invalid json {}", stdout)), Ok(actual) => actual, }; match find_mismatch(expected, &actual) { Some((expected_part, actual_part)) => Err(format!( "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", expected.pretty(), actual.pretty(), expected_part.pretty(), actual_part.pretty() )), None => Ok(()), } } fn diff_lines<'a>(&self, actual: str::Lines<'a>, expected: str::Lines<'a>, partial: bool) -> Vec<String> { let actual = actual.take(if partial { expected.clone().count() } else { usize::MAX }); zip_all(actual, expected).enumerate().filter_map(|(i, (a,e))| { match (a, e) { (Some(a), Some(e)) => { if lines_match(&e, &a) { None } else { Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a)) } }, (Some(a), None) => { Some(format!("{:3} -\n + |{}|\n", i, a)) }, (None, Some(e)) => { Some(format!("{:3} - |{}|\n +\n", i, e)) }, (None, None) => panic!("Cannot get here") } }).collect() } } fn lines_match(expected: &str, mut actual: &str) -> bool { for part in expected.split("[..]") { match actual.find(part) { Some(i) => actual = &actual[i + part.len()..], None => { return false } } } actual.is_empty() || expected.ends_with("[..]") } // Compares JSON object for approximate equality. // You can use `[..]` wildcard in strings (useful for OS dependent things such as paths). // Arrays are sorted before comparison. fn find_mismatch<'a>(expected: &'a Json, actual: &'a Json) -> Option<(&'a Json, &'a Json)> { use rustc_serialize::json::Json::*; match (expected, actual) { (&I64(l), &I64(r)) if l == r => None, (&F64(l), &F64(r)) if l == r => None, (&U64(l), &U64(r)) if l == r => None, (&Boolean(l), &Boolean(r)) if l == r => None, (&String(ref l), &String(ref r)) if lines_match(l, r) => None, (&Array(ref l), &Array(ref r)) => { if l.len() != r.len() { return Some((expected, actual)); } fn sorted(xs: &Vec<Json>) -> Vec<&Json> { let mut result = xs.iter().collect::<Vec<_>>(); // `unwrap` should be safe because JSON spec does not allow NaNs result.sort_by(|x, y| x.partial_cmp(y).unwrap()); result } sorted(l).iter().zip(sorted(r)) .filter_map(|(l, r)| find_mismatch(l, r)) .nth(0) } (&Object(ref l), &Object(ref r)) => { let same_keys = l.len() == r.len() && l.keys().all(|k| r.contains_key(k)); if !same_keys { return Some((expected, actual)); } l.values().zip(r.values()) .filter_map(|(l, r)| find_mismatch(l, r)) .nth(0) } (&Null, &Null) => None, _ => Some((expected, actual)), } } struct ZipAll<I1: Iterator, I2: Iterator> { first: I1, second: I2, } impl<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>> Iterator for ZipAll<I1, I2> { type Item = (Option<T>, Option<T>); fn next(&mut self) -> Option<(Option<T>, Option<T>)> { let first = self.first.next(); let second = self.second.next(); match (first, second) { (None, None) => None, (a, b) => Some((a, b)) } } } fn zip_all<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>>(a: I1, b: I2) -> ZipAll<I1, I2> { ZipAll { first: a, second: b, } } impl fmt::Display for Execs { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "execs") } } impl ham::Matcher<ProcessBuilder> for Execs { fn matches(&self, mut process: ProcessBuilder) -> ham::MatchResult { self.matches(&mut process) } } impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs { fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult { let res = process.exec_with_output(); match res { Ok(out) => self.match_output(&out), Err(ProcessError { output: Some(ref out), .. }) => { self.match_output(out) } Err(e) => { let mut s = format!("could not exec process {}: {}", process, e); match e.cause() { Some(cause) => s.push_str(&format!("\ncaused by: {}", cause.description())), None => {} } Err(s) } } } } pub fn execs() -> Execs { Execs { expect_stdout: None, expect_stderr: None, expect_stdin: None, expect_exit_code: None, expect_stdout_contains: Vec::new(), expect_stderr_contains: Vec::new(), expect_json: None, } } #[derive(Clone)] pub struct ShellWrites { expected: String } impl fmt::Display for ShellWrites { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "`{}` written to the shell", self.expected) } } impl<'a> ham::Matcher<&'a [u8]> for ShellWrites { fn matches(&self, actual: &[u8]) -> ham::MatchResult { let actual = String::from_utf8_lossy(actual); let actual = actual.to_string(); ham::expect(actual == self.expected, actual) } } pub fn shell_writes<T: fmt::Display>(string: T) -> ShellWrites { ShellWrites { expected: string.to_string() } } pub trait Tap { fn tap<F: FnOnce(&mut Self)>(mut self, callback: F) -> Self; } impl<T> Tap for T { fn tap<F: FnOnce(&mut Self)>(mut self, callback: F) -> T { callback(&mut self); self } } pub fn basic_bin_manifest(name: &str) -> String { format!(r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] [[bin]] name = "{}" "#, name, name) } pub fn basic_lib_manifest(name: &str) -> String { format!(r#" [package] name = "{}" version = "0.5.0" authors = ["wycats@example.com"] [lib] name = "{}" "#, name, name) } pub fn path2url(p: PathBuf) -> Url { Url::from_file_path(&*p).ok().unwrap() } pub static RUNNING: &'static str = " Running"; pub static COMPILING: &'static str = " Compiling"; pub static DOCUMENTING: &'static str = " Documenting"; pub static FRESH: &'static str = " Fresh"; pub static UPDATING: &'static str = " Updating"; pub static ADDING: &'static str = " Adding"; pub static REMOVING: &'static str = " Removing"; pub static DOCTEST: &'static str = " Doc-tests"; pub static PACKAGING: &'static str = " Packaging"; pub static DOWNLOADING: &'static str = " Downloading"; pub static UPLOADING: &'static str = " Uploading"; pub static VERIFYING: &'static str = " Verifying"; pub static ARCHIVING: &'static str = " Archiving"; pub static INSTALLING: &'static str = " Installing";<|fim▁end|>
} }
<|file_name|>read_pool.rs<|end_file_name|><|fim▁begin|>// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0. //! Distinct thread pools to handle read commands having different priority levels. use crate::config::StorageReadPoolConfig; use crate::storage::kv::{destroy_tls_engine, set_tls_engine, Engine, FlowStatsReporter}; use crate::storage::metrics; use file_system::{set_io_type, IOType}; use std::sync::{Arc, Mutex}; use tikv_util::yatp_pool::{Config, DefaultTicker, FuturePool, PoolTicker, YatpPoolBuilder}; #[derive(Clone)] struct FuturePoolTicker<R: FlowStatsReporter> { pub reporter: R, } impl<R: FlowStatsReporter> PoolTicker for FuturePoolTicker<R> { fn on_tick(&mut self) { metrics::tls_flush(&self.reporter); } } /// Build respective thread pools to handle read commands of different priority levels. pub fn build_read_pool<E: Engine, R: FlowStatsReporter>( config: &StorageReadPoolConfig, reporter: R, engine: E, ) -> Vec<FuturePool> { let names = vec!["store-read-low", "store-read-normal", "store-read-high"]; let configs: Vec<Config> = config.to_yatp_pool_configs(); assert_eq!(configs.len(), 3); configs .into_iter() .zip(names) .map(|(config, name)| { let reporter = reporter.clone(); let engine = Arc::new(Mutex::new(engine.clone())); YatpPoolBuilder::new(FuturePoolTicker { reporter }) .name_prefix(name) .config(config) .after_start(move || { set_tls_engine(engine.lock().unwrap().clone()); set_io_type(IOType::ForegroundRead); }) .before_stop(move || unsafe { // Safety: we call `set_` and `destroy_` with the same engine type. destroy_tls_engine::<E>(); }) .build_future_pool() }) .collect() } /// Build a thread pool that has default tick behavior for testing. pub fn build_read_pool_for_test<E: Engine>( config: &StorageReadPoolConfig, engine: E, ) -> Vec<FuturePool> { let names = vec!["store-read-low", "store-read-normal", "store-read-high"]; let configs: Vec<Config> = config.to_yatp_pool_configs(); assert_eq!(configs.len(), 3); configs .into_iter() .zip(names) .map(|(config, name)| { let engine = Arc::new(Mutex::new(engine.clone())); YatpPoolBuilder::new(DefaultTicker::default()) .config(config) .name_prefix(name) .after_start(move || { set_tls_engine(engine.lock().unwrap().clone()); set_io_type(IOType::ForegroundRead); }) // Safety: we call `set_` and `destroy_` with the same engine type. .before_stop(|| unsafe { destroy_tls_engine::<E>() })<|fim▁hole|> }) .collect() }<|fim▁end|>
.build_future_pool()
<|file_name|>link-documents-return-blockly-component.ts<|end_file_name|><|fim▁begin|>/* * Lumeer: Modern Data Definition and Processing Platform * * Copyright (C) since 2017 Lumeer.io, s.r.o. and/or its affiliates. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. *<|fim▁hole|> * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ import {COLOR_PRIMARY} from '../../../../core/constants'; import {BlocklyUtils, MasterBlockType} from '../blockly-utils'; import {LinkType} from '../../../../core/store/link-types/link.type'; import {LinkDocumentsNoReturnBlocklyComponent} from './link-documents-no-return-blockly-component'; import {isNotNullOrUndefined} from '../../../utils/common.utils'; declare var Blockly: any; export class LinkDocumentsReturnBlocklyComponent extends LinkDocumentsNoReturnBlocklyComponent { public constructor(public blocklyUtils: BlocklyUtils, protected linkTypes: LinkType[]) { super(blocklyUtils, linkTypes); } public getVisibility(): MasterBlockType[] { return [MasterBlockType.Rule, MasterBlockType.Link]; } public registerBlock(workspace: any) { const this_ = this; Blockly.Blocks[BlocklyUtils.LINK_DOCUMENTS_RETURN] = { init: function () { this.jsonInit({ type: BlocklyUtils.LINK_DOCUMENTS_RETURN, message0: '%{BKY_BLOCK_LINK_DOCUMENTS_RETURN}', // link records via %1 %2 %3 args0: [ { type: 'field_dropdown', name: 'LINKTYPE', options: this_.linkTypeOptions, }, { type: 'input_value', name: 'DOCUMENT1', }, { type: 'input_value', name: 'DOCUMENT2', }, ], output: '', colour: COLOR_PRIMARY, tooltip: this_.tooltip, helpUrl: '', }); }, }; Blockly.JavaScript[BlocklyUtils.LINK_DOCUMENTS_RETURN] = function (block) { const dropdown_linktype = block.getFieldValue('LINKTYPE') || null; const value_document1 = Blockly.JavaScript.valueToCode(block, 'DOCUMENT1', Blockly.JavaScript.ORDER_ATOMIC) || null; const value_document2 = Blockly.JavaScript.valueToCode(block, 'DOCUMENT2', Blockly.JavaScript.ORDER_ATOMIC) || null; const code = this_.blocklyUtils.getLumeerVariable() + '.linkDocuments(' + value_document1 + ', ' + value_document2 + ", '" + dropdown_linktype + "')"; return [code, Blockly.JavaScript.ORDER_FUNCTION_CALL]; }; } public getLinkVariablesXml(workspace: any): string { return '<xml><block type="' + BlocklyUtils.LINK_DOCUMENTS_RETURN + '"></block></xml>'; } public onWorkspaceChange(workspace, changeEvent) { super.onWorkspaceChange(workspace, changeEvent); if ( changeEvent instanceof Blockly.Events.Create || changeEvent instanceof Blockly.Events.Change || changeEvent instanceof Blockly.Events.Move ) { const block = workspace.getBlockById(changeEvent.blockId); if (isNotNullOrUndefined(block) && block.type === BlocklyUtils.LINK_DOCUMENTS_RETURN) { const linkTypeId = block.getField('LINKTYPE').value_; block.setOutput(true, linkTypeId + BlocklyUtils.LINK_VAR_SUFFIX); this.blocklyUtils.checkVariablesType(changeEvent, workspace); } } } }<|fim▁end|>
* This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<|file_name|>mididevice.py<|end_file_name|><|fim▁begin|>#################################################################################################### # Copyright 2013 John Crawford # # This file is part of PatchCorral. # # PatchCorral is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PatchCorral is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PatchCorral. If not, see <http://www.gnu.org/licenses/>. #################################################################################################### ## @file # Defines a base class for MIDI devices. # @date 03/08/2013 Created file. -jc # @author John Crawford import re #For user-defined iteration filters. import rtmidi import threading import time import yaml ## # Returns a list of the available MIDI Input Devices. # @return List of tuples "(portNum, portName)". def getMIDIInDevices(): midi = rtmidi.RtMidiIn() return list((port, midi.getPortName(port)) for port in range(midi.getPortCount())) ## # Returns a list of the available MIDI Output Devices. # @return List of tuples "(portNum, portName)". def getMIDIOutDevices(): midi = rtmidi.RtMidiOut() return list((port, midi.getPortName(port)) for port in range(midi.getPortCount())) ## # Class representing a specific MIDI voice. class MIDIVoice(): tags = [ 'name', 'msb', 'lsb', '_pc', 'device.portNum', 'device.portName', 'channel', 'category', 'voiceNum', ] ## # Class constructor. # @param name String # @param device MIDIOutDevice object # @param channel MIDI Channcel (1-16) # @param msb Most Significant Bit # @param lsb Least Significant Bit # @param pc Program Change value # @param category Category of the voice # @param voiceNum Number of the voice as displayed on the device def __init__(self, name, device, channel, msb, lsb, pc, category=None, voiceNum=None): self.name = name self.msb = msb self.lsb = lsb self._pc = pc self.device = device self.channel = channel self.category = category self.voiceNum = voiceNum def __getitem__(self, key): keys = key.split('.') v = self for k in keys: try: v = getattr(v, k) except AttributeError: raise KeyError('Unable to find key {}.'.format(key)) return v def __iter__(self): return (tag for tag in self.tags) def items(self): for key in iter(self): yield key, self[key] def keys(self): return iter(self) ## # Sends the MIDI messages that will select this voice on the given device. # @return None. def pc(self): self.device.sendMessage(rtmidi.MidiMessage.controllerEvent(self.channel, 0x00, self.msb)) self.device.sendMessage(rtmidi.MidiMessage.controllerEvent(self.channel, 0x20, self.lsb)) self.device.sendMessage(rtmidi.MidiMessage.programChange(self.channel, self._pc)) ## # For use by PyYAML. def __repr__(self): return '{}({})'.format( self.__class__.__name__, ', '.join('{}={}'.format(attr, val) for attr, val in self.items()), ) def __setitem__(self, key, val): keys = key.split('.') v = self for k in keys[:-1]: try: v = getattr(v, k) except AttributeError: raise KeyError('Unable to find key {}.'.format(key)) setattr(v, keys[-1], val) ## # Method for converting this object to string. Prints out essential information. def __str__(self): return '\n'.join('{}: {}'.format(key, val) for key, val in self.items()) def values(self): for key in iter(self): yield self[key] ## # Class representing a MIDI Device. This is an abstract base class that doesn't do anything on its # own. class MIDIDevice(): ## # Class initializer. # @param id Identifier for the device interface. Can be an integer (index) or a string (name). # @pre "self.midi" has been initialized.<|fim▁hole|> # self.midi = None #INITIALIZE THIS IN THE SUBCLASS! #Resolve missing details. if port is None: if name is None: raise ValueError('Must provide at least the "name" or "port" to identify a MIDI device.') portNames = list(self.midi.getPortName(port) for port in range(self.midi.getPortCount())) for port, portName in enumerate(portNames): if portName == name: self.portNum = port break else: raise ValueError('Unable to find device matching name "{}" in list "{}".'.format(name, portNames)) else: portCount = self.midi.getPortCount() if 0 > port > portCount: raise ValueError('Given port "{}" is outside the expected range (0-{}).'.format(port, portCount)) self.portNum = port if name is None: self.portName = self.midi.getPortName(port) else: self.portName = name #Open the MIDI port! self.midi.openPort(self.portNum) def getPortName(self): return self.portName ## # Class representing a MIDI Input Device. class MIDIInDevice(MIDIDevice): ## # Class initializer. # @param id Identifier for the device input and output interfaces. Can be an integer (index) or # a string (name). def __init__(self, id): self.midi = rtmidi.RtMidiIn() self.midi.setCallback(self.onMIDIMsg) MIDIDevice.__init__(self, id) self.midiOutDevice = None self.midiOutChannel = None self.forwardingLock = threading.Lock() ## # Enables/disables forwarding of incoming MIDI events to the given output device. # @param midiOutDevice MIDIOutDevice object. If "None", will disable forwarding. # @param channel If not "None", will change the channel of any incoming messages to this channel # before forwarding it to the output device. # @return None. def setMIDIOutDevice(self, midiOutDevice=None, channel=None): if not isinstance(midiOutDevice, MIDIOutDevice): raise ValueError('Given midiOutDevice "{0}" is of type "{1}"; expected type "MIDIOutDevice".'.format( midiOutDevice, type(midiOutDevice), )) if channel is not None and 0 > channel > 15: raise ValueError('Unexpected channel value "{0}". Expected integer 0-15 or "None".'.format(channel)) with self.forwardingLock: self.midiOutDevice = midiOutDevice self.midiOutChannel = channel def onMIDIMsg(self, data): # print 'id(self): onMIDIMsg: Recieved data "{0}".'.format(data) with self.forwardingLock: if self.midiOutChannel is not None: data.setChannel(self.midiOutChannel) if self.midiOutDevice is not None: self.midiOutDevice.sendMessage(data) ## # Class representing a MIDI Output Device. class MIDIOutDevice(MIDIDevice): ID = 'Generic USB-MIDI Device' ## Number of the note "A0", usually the lowest supported note on the MIDI device. noteNumA0 = 21 ## Offsets for the different note letters noteOffsets = { 'Ab': 11, 'A': 0, 'A#': 1, 'Bb': 1, 'B': 2, 'C': 3, 'C#': 4, 'Db': 4, 'D': 5, 'D#': 6, 'Eb': 6, 'E': 7, 'F': 8, 'F#': 9, 'Gb': 9, 'G': 10, 'G#': 11, } ## # Class initializer. # @param port Integer port number for the MIDI device. # @param name String name of the MIDI device. If "None", will use this class's ID string. # @param voices List of MIDIVoice objects available from this MIDI Device. # @param defaultChannel If given, will use this channel by default for all outgoing commands. def __init__(self, port, name=None, voices=None, defaultChannel=None): if name is None: name = MIDIOutDevice.ID if voices is None: voices = [] self.midi = rtmidi.RtMidiOut() super().__init__(port, name) self.voices = voices self._defaultChannel = defaultChannel self.sendLock = threading.Lock() ## # Sets/Gets the default MIDI channel. # @param defaultChannel Integer 0-15. # @return Current default channel ("None" if a default hasn't been defined). def defaultChannel(self, defaultChannel=None): if defaultChannel is not None: if self._defaultChannel is None: raise ValueError('No default channel defined and no channel given.') self._defaultChannel = defaultChannel return self._defaultChannel ## # Returns the full list of voices available from this device. def getVoiceList(self): return list(self.voices) ## # Returns an iterator that steps over the voices. Supports filtering. # @param filter Python statement that can be evaluated such that "v" stands for a MIDIVoice # object. # @return Iterator object that returns MIDIVoice objects. def iter(self, filter='True'): for v in self.voices: if eval(filter): yield v ## # Converts the given note name to a MIDI note number. # @param noteName String # @return Integer def noteName2Num(self, noteName): m = re.match(r'^([A-Ga-g][#b]?)(-?\d)$', noteName) if m is None: raise ValueError('Unable to parse note name "{0}".'.format(noteName)) return self.noteNumA0 + self.noteOffsets[m.group(1)] + 12 * int(m.group(2)) ## # Plays the given note for the given number of seconds. Returns immediately (doesn't block). # @param duration Seconds to play (float or int) # @param noteNum Note to play # @param vel Velocity to play at # @param channel Channel to play on. If "None", will use default channel for object. # @return None. def playNote(self, duration, note, vel, channel=None): if channel is None: if self._defaultChannel is None: raise ValueError('No default channel defined and no channel given.') channel = self._defaultChannel if isinstance(note, str): note = self.noteName2Num(note) onMsg = rtmidi.MidiMessage.noteOn(channel, note, vel) offMsg = rtmidi.MidiMessage.noteOff(channel, note) def play(): self.sendMessage(onMsg) time.sleep(duration) self.sendMessage(offMsg) t = threading.Thread(target=play) t.start() ## # Sends the given message to the MIDI device. # @param msg rtmidi.MidiMessage object # @return None. def sendMessage(self, msg): with self.sendLock: self.midi.sendMessage(msg) ## # Sends the given messages to the MIDI device. # @param msgs Any number of rtmidi.MidiMessage objects. # @return None. def sendMessages(self, *msgs): for msg in msgs: self.midi.sendMessage(msg)<|fim▁end|>
def __init__(self, port, name):
<|file_name|>uva_1260.cpp<|end_file_name|><|fim▁begin|>#include<bits/stdc++.h> using namespace std; int main() { freopen("in.txt","r",stdin); int tc,n; int A[1000],B[1000]; scanf("%d",&tc); while(tc--) { scanf("%d",&n); int sum=0; for(int i=0;i<n;i++) { scanf("%d",&A[i]); int j=i; B[i]=0; while(j--) { if(A[j]<=A[i]) B[i]++; <|fim▁hole|> printf("%d\n",sum); } return 0; }<|fim▁end|>
} sum+=B[i]; }
<|file_name|>AlignmentTest.java<|end_file_name|><|fim▁begin|>//----------------------------------------------------------------------------// // // // A l i g n m e n t T e s t // // // //----------------------------------------------------------------------------// // <editor-fold defaultstate="collapsed" desc="hdr"> // // Copyright (C) Hervé Bitteur 2000-2011. All rights reserved. // // This software is released under the GNU General Public License. // // Goto http://kenai.com/projects/audiveris to report bugs or suggestions. // //----------------------------------------------------------------------------// // </editor-fold> package omr.ui.symbol; import omr.ui.symbol.Alignment.Horizontal; import static omr.ui.symbol.Alignment.Horizontal.*; import omr.ui.symbol.Alignment.Vertical; import static omr.ui.symbol.Alignment.Vertical.*; import static org.junit.Assert.*; import org.junit.Test; import java.awt.Point; import java.awt.Rectangle; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; /** * Class {@code AlignmentTest} * * @author Hervé Bitteur */ public class AlignmentTest { //~ Instance fields -------------------------------------------------------- /** Map Alignment -> Point */ Map<Alignment, Point> points = new HashMap<>(); //~ Constructors ----------------------------------------------------------- /** * Creates a new AlignmentTest object. */ public AlignmentTest () { } //~ Methods ---------------------------------------------------------------- /** * Test of toPoint method, of class Alignment. */ @Test public void testToPoint () { System.out.println("toPoint"); Rectangle rect = new Rectangle(-6, -26, 50, 38); assignPoints(); for (Vertical vert : Vertical.values()) { for (Horizontal hori : Horizontal.values()) { Alignment instance = new Alignment(vert, hori); Point start = points.get(instance); for (Vertical v : Vertical.values()) { for (Horizontal h : Horizontal.values()) { Alignment expAlign = new Alignment(v, h); Point to = instance.toPoint(expAlign, rect); Point target = new Point(start);<|fim▁hole|> System.out.print( instance + " + " + to + " = " + target); Alignment align = getAlign(target); Point expTarget = points.get(expAlign); System.out.println(" " + expAlign + " =? " + align); assertEquals("Different points", expTarget, target); assertEquals("Different aligns", expAlign, align); } } System.out.println(); } } } // /** // * Test of toPoint method, of class Alignment. // */ // @Test // public void testToPoint2D () // { // System.out.println("toPoint2D"); // // Rectangle2D rect = new Rectangle2D.Float(-5.8f, -26.0f, 50.0f, 37.4f); // Point2D expTo = null; // // for (Vertical vert : Vertical.values()) { // for (Horizontal hori : Horizontal.values()) { // Alignment instance = new Alignment(vert, hori); // // for (Vertical v : Vertical.values()) { // for (Horizontal h : Horizontal.values()) { // Alignment that = new Alignment(v, h); // Point2D to = instance.toPoint(that, rect); // // System.out.println( // instance + " + " + to + " = " + that); // } // } // // System.out.println(); // } // } // } private Alignment getAlign (Point target) { for (Entry<Alignment, Point> entry : points.entrySet()) { if (entry.getValue() .equals(target)) { return entry.getKey(); } } return null; } private void assignPoints () { points.put(new Alignment(TOP, LEFT), new Point(-6, -26)); points.put(new Alignment(TOP, CENTER), new Point(19, -26)); points.put(new Alignment(TOP, RIGHT), new Point(44, -26)); points.put(new Alignment(TOP, XORIGIN), new Point(0, -26)); points.put(new Alignment(MIDDLE, LEFT), new Point(-6, -7)); points.put(new Alignment(MIDDLE, CENTER), new Point(19, -7)); points.put(new Alignment(MIDDLE, RIGHT), new Point(44, -7)); points.put(new Alignment(MIDDLE, XORIGIN), new Point(0, -7)); points.put(new Alignment(BOTTOM, LEFT), new Point(-6, 12)); points.put(new Alignment(BOTTOM, CENTER), new Point(19, 12)); points.put(new Alignment(BOTTOM, RIGHT), new Point(44, 12)); points.put(new Alignment(BOTTOM, XORIGIN), new Point(0, 12)); points.put(new Alignment(BASELINE, LEFT), new Point(-6, 0)); points.put(new Alignment(BASELINE, CENTER), new Point(19, 0)); points.put(new Alignment(BASELINE, RIGHT), new Point(44, 0)); points.put(new Alignment(BASELINE, XORIGIN), new Point(0, 0)); } }<|fim▁end|>
target.translate(to.x, to.y);
<|file_name|>ASparkUtil.java<|end_file_name|><|fim▁begin|>/* * ASpark * Copyright (C) 2015 Nikolay Platov * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package nikoladasm.aspark; import java.io.IOException; import java.io.InputStream; import java.util.Deque; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; import io.netty.buffer.ByteBuf; import static nikoladasm.aspark.HttpMethod.*; public final class ASparkUtil { private static final String PARAMETERS_PATTERN = "(?i)(:[A-Z_][A-Z_0-9]*)"; private static final Pattern PATTERN = Pattern.compile(PARAMETERS_PATTERN); private static final String DEFAULT_ACCEPT_TYPE = "*/*"; private static final String REGEXP_METACHARS = "<([{\\^-=$!|]})?*+.>"; private static final int DEFAULT_BUFFER_SIZE = 1024 * 4; private static final String FOLDER_SEPARATOR = "/"; private static final String WINDOWS_FOLDER_SEPARATOR = "\\"; private static final String TOP_PATH = ".."; private static final String CURRENT_PATH = "."; private static final String QUERY_KEYS_PATTERN = "\\s*\\[?\\s*([^\\]\\[\\s]+)\\s*\\]?\\s*"; private static final Pattern QK_PATTERN = Pattern.compile(QUERY_KEYS_PATTERN); private ASparkUtil() {} private static String processRegexPath(String path, String asteriskReplacement, String slashAsteriskReplacement) { String pathToUse = sanitizePath(path); int length = pathToUse.length(); StringBuilder sb = new StringBuilder(); boolean startWithWildcard = false; for (int i = 0; i < length; i++) { char c = pathToUse.charAt(i); if (i == 0 && c == '*') { sb.append(asteriskReplacement); startWithWildcard = true; continue; } if (i == length-2 && c == '/' && pathToUse.charAt(i+1) == '*') { if (startWithWildcard) throw new IllegalArgumentException("Path can't contain first and last star wildcard"); sb.append(slashAsteriskReplacement); break; } if (i == length-1 && c == '*') { if (startWithWildcard) throw new IllegalArgumentException("Path can't contain first and last star wildcard"); sb.append(asteriskReplacement); continue; } if (REGEXP_METACHARS.contains(String.valueOf(c))) { sb.append('\\').append(c); continue; } sb.append(c); } return sb.toString(); } public static Pattern buildParameterizedPathPattern(String path, Map<String, Integer> parameterNamesMap, Boolean startWithWildcard) { String pathToUse = processRegexPath(path, "(.*)", "(?:/?|/(.+))"); Matcher parameterMatcher = PATTERN.matcher(pathToUse); int i = 1; while (parameterMatcher.find()) { String parameterName = parameterMatcher.group(1); if (parameterNamesMap.containsKey(parameterName)) throw new ASparkException("Duplicate parameter name."); parameterNamesMap.put(parameterName, i); i++; }<|fim▁hole|> public static Pattern buildPathPattern(String path) { String pathToUse = processRegexPath(path, ".*", "(?:/?|/.+)"); return Pattern.compile("^"+pathToUse+"$"); } public static boolean isAcceptContentType(String requestAcceptTypes, String routeAcceptType) { if (requestAcceptTypes == null) return routeAcceptType.trim().equals(DEFAULT_ACCEPT_TYPE); String[] requestAcceptTypesArray = requestAcceptTypes.split(","); String[] rtat = routeAcceptType.trim().split("/"); for (int i=0; i<requestAcceptTypesArray.length; i++) { String requestAcceptType = requestAcceptTypesArray[i].split(";")[0]; String[] rqat = requestAcceptType.trim().split("/"); if (((rtat[0].equals("*")) ? true : rqat[0].trim().equals(rtat[0])) && ((rtat[1].equals("*")) ? true : rqat[1].equals(rtat[1]))) return true; } return false; } public static long copyStreamToByteBuf(InputStream input, ByteBuf buf) throws IOException { byte[] buffer = new byte[DEFAULT_BUFFER_SIZE]; long count = 0; int n = 0; while ((n = input.read(buffer)) != -1) { buf.writeBytes(buffer, 0, n); count += n; } return count; } public static String collapsePath(String path) { String pathToUse = path.trim(); if (pathToUse.isEmpty()) return pathToUse; String rpath = pathToUse.replace(WINDOWS_FOLDER_SEPARATOR, FOLDER_SEPARATOR); String[] directories = rpath.split(FOLDER_SEPARATOR); Deque<String> newDirectories = new LinkedList<>(); for (int i=0; i<directories.length; i++) { String directory = directories[i].trim(); if (directory.equals(TOP_PATH) && !newDirectories.isEmpty()) newDirectories.removeLast(); else if (!directory.equals(CURRENT_PATH) && !directory.isEmpty()) newDirectories.addLast(directory); } String result = FOLDER_SEPARATOR; for (String directory : newDirectories) result += directory + FOLDER_SEPARATOR; if (!path.startsWith(FOLDER_SEPARATOR)) result = result.substring(1); if (!path.endsWith(FOLDER_SEPARATOR) && result.equals(FOLDER_SEPARATOR)) result = result.substring(0, result.length()-1); return result; } public static boolean isEqualHttpMethod(HttpMethod requestHttpMethod, HttpMethod routeHttpMethod) { if (requestHttpMethod.equals(HEAD) && routeHttpMethod.equals(GET)) return true; return requestHttpMethod.equals(routeHttpMethod); } public static ParamsMap parseParams(Map<String, List<String>> params) { ParamsMap result = new ParamsMap(); params.forEach((keys, values) -> { ParamsMap root = result; Matcher keyMatcher = QK_PATTERN.matcher(keys); while (keyMatcher.find()) { String key = keyMatcher.group(1); root = root.createIfAbsentAndGet(key); } root.values(values.toArray(new String[values.size()])); }); return result; } public static ParamsMap parseUniqueParams(Map<String, String> params) { ParamsMap result = new ParamsMap(); params.forEach((key, value) -> { result.createIfAbsentAndGet(key).value(value); }); return result; } public static String sanitizePath(String path) { String pathToUse = collapsePath(path); if (pathToUse.isEmpty()) return pathToUse; if (pathToUse.endsWith("/")) pathToUse = pathToUse.substring(0, pathToUse.length()-1); return pathToUse; } public static String mimeType(String file, Properties mimeTypes) { int extIndex = file.lastIndexOf('.'); extIndex = (extIndex < 0 ) ? 0 : extIndex; String ext = file.substring(extIndex); return mimeTypes.getProperty(ext, "application/octet-stream"); } }<|fim▁end|>
return Pattern.compile("^"+parameterMatcher.replaceAll("([^/]+)")+"$"); }
<|file_name|>audio.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import logging from . import SubprocessHook logger = logging.getLogger("barython") class PulseAudioHook(SubprocessHook): """<|fim▁hole|> super().__init__(*args, **kwargs, cmd=cmd)<|fim▁end|>
Listen on pulseaudio events with pactl """ def __init__(self, cmd=["pactl", "subscribe", "-n", "barython"], *args, **kwargs):
<|file_name|>sha1.js<|end_file_name|><|fim▁begin|>/** * * Secure Hash Algorithm (SHA1) * http://www.webtoolkit.info/ * **/ export function SHA1(msg) { function rotate_left(n, s) { var t4 = (n << s) | (n >>> (32 - s)); return t4; }; function lsb_hex(val) { var str = ""; var i; var vh; var vl; for (i = 0; i <= 6; i += 2) { vh = (val >>> (i * 4 + 4)) & 0x0f; vl = (val >>> (i * 4)) & 0x0f; str += vh.toString(16) + vl.toString(16); } return str; }; function cvt_hex(val) { var str = ""; var i; var v; for (i = 7; i >= 0; i--) { v = (val >>> (i * 4)) & 0x0f; str += v.toString(16); } return str; }; function Utf8Encode(string) { string = string.replace(/\r\n/g, "\n"); var utftext = ""; for (var n = 0; n < string.length; n++) { var c = string.charCodeAt(n); if (c < 128) { utftext += String.fromCharCode(c); } else if ((c > 127) && (c < 2048)) { utftext += String.fromCharCode((c >> 6) | 192); utftext += String.fromCharCode((c & 63) | 128); } else { utftext += String.fromCharCode((c >> 12) | 224); utftext += String.fromCharCode(((c >> 6) & 63) | 128); utftext += String.fromCharCode((c & 63) | 128); } } return utftext; }; var blockstart; var i, j; var W = new Array(80); var H0 = 0x67452301; var H1 = 0xEFCDAB89; var H2 = 0x98BADCFE; var H3 = 0x10325476; var H4 = 0xC3D2E1F0; var A, B, C, D, E; var temp; msg = Utf8Encode(msg); var msg_len = msg.length;<|fim▁hole|> msg.charCodeAt(i + 2) << 8 | msg.charCodeAt(i + 3); word_array.push(j); } switch (msg_len % 4) { case 0: i = 0x080000000; break; case 1: i = msg.charCodeAt(msg_len - 1) << 24 | 0x0800000; break; case 2: i = msg.charCodeAt(msg_len - 2) << 24 | msg.charCodeAt(msg_len - 1) << 16 | 0x08000; break; case 3: i = msg.charCodeAt(msg_len - 3) << 24 | msg.charCodeAt(msg_len - 2) << 16 | msg.charCodeAt(msg_len - 1) << 8 | 0x80; break; } word_array.push(i); while ((word_array.length % 16) != 14) word_array.push(0); word_array.push(msg_len >>> 29); word_array.push((msg_len << 3) & 0x0ffffffff); for (blockstart = 0; blockstart < word_array.length; blockstart += 16) { for (i = 0; i < 16; i++) W[i] = word_array[blockstart + i]; for (i = 16; i <= 79; i++) W[i] = rotate_left(W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16], 1); A = H0; B = H1; C = H2; D = H3; E = H4; for (i = 0; i <= 19; i++) { temp = (rotate_left(A, 5) + ((B & C) | (~B & D)) + E + W[i] + 0x5A827999) & 0x0ffffffff; E = D; D = C; C = rotate_left(B, 30); B = A; A = temp; } for (i = 20; i <= 39; i++) { temp = (rotate_left(A, 5) + (B ^ C ^ D) + E + W[i] + 0x6ED9EBA1) & 0x0ffffffff; E = D; D = C; C = rotate_left(B, 30); B = A; A = temp; } for (i = 40; i <= 59; i++) { temp = (rotate_left(A, 5) + ((B & C) | (B & D) | (C & D)) + E + W[i] + 0x8F1BBCDC) & 0x0ffffffff; E = D; D = C; C = rotate_left(B, 30); B = A; A = temp; } for (i = 60; i <= 79; i++) { temp = (rotate_left(A, 5) + (B ^ C ^ D) + E + W[i] + 0xCA62C1D6) & 0x0ffffffff; E = D; D = C; C = rotate_left(B, 30); B = A; A = temp; } H0 = (H0 + A) & 0x0ffffffff; H1 = (H1 + B) & 0x0ffffffff; H2 = (H2 + C) & 0x0ffffffff; H3 = (H3 + D) & 0x0ffffffff; H4 = (H4 + E) & 0x0ffffffff; } var temp = cvt_hex(H0) + cvt_hex(H1) + cvt_hex(H2) + cvt_hex(H3) + cvt_hex(H4); return temp.toLowerCase(); };<|fim▁end|>
var word_array = new Array(); for (i = 0; i < msg_len - 3; i += 4) { j = msg.charCodeAt(i) << 24 | msg.charCodeAt(i + 1) << 16 |
<|file_name|>bitrig.rs<|end_file_name|><|fim▁begin|>pub type clock_t = i64; pub type suseconds_t = i64; pub type dev_t = i32; pub type sigset_t = ::c_uint; pub type blksize_t = ::uint32_t; pub type fsblkcnt_t = ::c_uint; pub type fsfilcnt_t = ::c_uint; pub type pthread_attr_t = *mut ::c_void; pub type pthread_mutex_t = *mut ::c_void; pub type pthread_mutexattr_t = *mut ::c_void; pub type pthread_cond_t = *mut ::c_void; pub type pthread_rwlock_t = *mut ::c_void; s! { pub struct dirent { pub d_fileno: ::ino_t, pub d_off: ::off_t, pub d_reclen: u16, pub d_type: u8, pub d_namlen: u8, __d_padding: [u8; 4], pub d_name: [::c_char; 256], } pub struct glob_t { pub gl_pathc: ::c_int, pub gl_matchc: ::c_int, pub gl_offs: ::c_int, pub gl_flags: ::c_int, pub gl_pathv: *mut *mut ::c_char, __unused1: *mut ::c_void, __unused2: *mut ::c_void, __unused3: *mut ::c_void, __unused4: *mut ::c_void, __unused5: *mut ::c_void, __unused6: *mut ::c_void, __unused7: *mut ::c_void, } pub struct stat { pub st_mode: ::mode_t, pub st_dev: ::dev_t, pub st_ino: ::ino_t, pub st_nlink: ::nlink_t, pub st_uid: ::uid_t, pub st_gid: ::gid_t, pub st_rdev: ::dev_t, pub st_atime: ::time_t, pub st_atime_nsec: ::c_long, pub st_mtime: ::time_t, pub st_mtime_nsec: ::c_long, pub st_ctime: ::time_t, pub st_ctime_nsec: ::c_long, pub st_size: ::off_t, pub st_blocks: ::blkcnt_t, pub st_blksize: ::blksize_t, pub st_flags: ::uint32_t, pub st_gen: ::uint32_t, pub st_birthtime: ::time_t, pub st_birthtime_nsec: ::c_long, } pub struct statvfs { pub f_bsize: ::c_ulong, pub f_frsize: ::c_ulong, pub f_blocks: ::fsblkcnt_t, pub f_bfree: ::fsblkcnt_t, pub f_bavail: ::fsblkcnt_t, pub f_files: ::fsfilcnt_t, pub f_ffree: ::fsfilcnt_t, pub f_favail: ::fsfilcnt_t, pub f_fsid: ::c_ulong, pub f_flag: ::c_ulong, pub f_namemax: ::c_ulong, } pub struct addrinfo { pub ai_flags: ::c_int, pub ai_family: ::c_int, pub ai_socktype: ::c_int, pub ai_protocol: ::c_int, pub ai_addrlen: ::socklen_t, pub ai_addr: *mut ::sockaddr, pub ai_canonname: *mut ::c_char, pub ai_next: *mut ::addrinfo, } pub struct sockaddr_storage { pub ss_len: u8, pub ss_family: ::sa_family_t, __ss_pad1: [u8; 6], __ss_pad2: i64, __ss_pad3: [u8; 240], } pub struct siginfo_t { pub si_signo: ::c_int, pub si_code: ::c_int, pub si_errno: ::c_int, pub si_addr: *mut ::c_void } pub struct Dl_info { pub dli_fname: *const ::c_char, pub dli_fbase: *mut ::c_void, pub dli_sname: *const ::c_char, pub dli_saddr: *mut ::c_void, } } <|fim▁hole|>pub const O_CLOEXEC: ::c_int = 0x10000; pub const MS_SYNC : ::c_int = 0x0002; pub const MS_INVALIDATE : ::c_int = 0x0004; pub const PTHREAD_STACK_MIN : ::size_t = 2048; pub const ENOATTR : ::c_int = 83; pub const EILSEQ : ::c_int = 84; pub const EOVERFLOW : ::c_int = 87; pub const ECANCELED : ::c_int = 88; pub const EIDRM : ::c_int = 89; pub const ENOMSG : ::c_int = 90; pub const ENOTSUP : ::c_int = 91; pub const ELAST : ::c_int = 91; pub const F_DUPFD_CLOEXEC : ::c_int = 10; pub const RLIM_NLIMITS: ::c_int = 9; pub const SO_SNDTIMEO: ::c_int = 0x1005; pub const SO_RCVTIMEO: ::c_int = 0x1006; pub const KERN_PROC : ::c_int = 66; pub const O_DSYNC : ::c_int = 128; pub const MAP_RENAME : ::c_int = 0x0000; pub const MAP_NORESERVE : ::c_int = 0x0000; pub const MAP_HASSEMAPHORE : ::c_int = 0x0000; pub const EIPSEC : ::c_int = 82; pub const ENOMEDIUM : ::c_int = 85; pub const EMEDIUMTYPE : ::c_int = 86; pub const RUSAGE_THREAD: ::c_int = 1; pub const IPV6_ADD_MEMBERSHIP: ::c_int = 12; pub const IPV6_DROP_MEMBERSHIP: ::c_int = 13; pub const MAP_COPY : ::c_int = 0x0002; pub const MAP_NOEXTEND : ::c_int = 0x0000; pub const _SC_IOV_MAX : ::c_int = 51; pub const _SC_GETGR_R_SIZE_MAX : ::c_int = 100; pub const _SC_GETPW_R_SIZE_MAX : ::c_int = 101; pub const _SC_LOGIN_NAME_MAX : ::c_int = 102; pub const _SC_MQ_PRIO_MAX : ::c_int = 59; pub const _SC_NPROCESSORS_ONLN : ::c_int = 503; pub const _SC_THREADS : ::c_int = 91; pub const _SC_THREAD_ATTR_STACKADDR : ::c_int = 77; pub const _SC_THREAD_ATTR_STACKSIZE : ::c_int = 78; pub const _SC_THREAD_DESTRUCTOR_ITERATIONS : ::c_int = 80; pub const _SC_THREAD_KEYS_MAX : ::c_int = 81; pub const _SC_THREAD_PRIO_INHERIT : ::c_int = 82; pub const _SC_THREAD_PRIO_PROTECT : ::c_int = 83; pub const _SC_THREAD_PRIORITY_SCHEDULING : ::c_int = 84; pub const _SC_THREAD_PROCESS_SHARED : ::c_int = 85; pub const _SC_THREAD_SAFE_FUNCTIONS : ::c_int = 103; pub const _SC_THREAD_STACK_MIN : ::c_int = 89; pub const _SC_THREAD_THREADS_MAX : ::c_int = 90; pub const _SC_TTY_NAME_MAX : ::c_int = 107; pub const _SC_ATEXIT_MAX : ::c_int = 46; pub const _SC_CLK_TCK : ::c_int = 3; pub const _SC_AIO_LISTIO_MAX : ::c_int = 42; pub const _SC_AIO_MAX : ::c_int = 43; pub const _SC_ASYNCHRONOUS_IO : ::c_int = 45; pub const _SC_MAPPED_FILES : ::c_int = 53; pub const _SC_MEMLOCK : ::c_int = 54; pub const _SC_MEMLOCK_RANGE : ::c_int = 55; pub const _SC_MEMORY_PROTECTION : ::c_int = 56; pub const _SC_MESSAGE_PASSING : ::c_int = 57; pub const _SC_MQ_OPEN_MAX : ::c_int = 58; pub const _SC_PRIORITY_SCHEDULING : ::c_int = 61; pub const _SC_SEMAPHORES : ::c_int = 67; pub const _SC_SHARED_MEMORY_OBJECTS : ::c_int = 68; pub const _SC_SYNCHRONIZED_IO : ::c_int = 75; pub const _SC_TIMERS : ::c_int = 94; pub const _SC_XOPEN_CRYPT : ::c_int = 117; pub const _SC_XOPEN_ENH_I18N : ::c_int = 118; pub const _SC_XOPEN_LEGACY : ::c_int = 119; pub const _SC_XOPEN_REALTIME : ::c_int = 120; pub const _SC_XOPEN_REALTIME_THREADS : ::c_int = 121; pub const _SC_XOPEN_UNIX : ::c_int = 123; pub const _SC_XOPEN_VERSION : ::c_int = 125; pub const _SC_SEM_NSEMS_MAX : ::c_int = 31; pub const _SC_SEM_VALUE_MAX : ::c_int = 32; pub const _SC_AIO_PRIO_DELTA_MAX : ::c_int = 44; pub const _SC_DELAYTIMER_MAX : ::c_int = 50; pub const _SC_PRIORITIZED_IO : ::c_int = 60; pub const _SC_REALTIME_SIGNALS : ::c_int = 64; pub const _SC_RTSIG_MAX : ::c_int = 66; pub const _SC_SIGQUEUE_MAX : ::c_int = 70; pub const _SC_TIMER_MAX : ::c_int = 93; pub const FD_SETSIZE: usize = 1024; pub const ST_NOSUID: ::c_ulong = 2; pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = 0 as *mut _; pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = 0 as *mut _; pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = 0 as *mut _; pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 2; pub const KERN_PROC_ARGS: ::c_int = 55; pub const TMP_MAX : ::c_uint = 0x7fffffff; pub const NI_MAXHOST: ::size_t = 256; extern { pub fn getnameinfo(sa: *const ::sockaddr, salen: ::socklen_t, host: *mut ::c_char, hostlen: ::size_t, serv: *mut ::c_char, servlen: ::size_t, flags: ::c_int) -> ::c_int; pub fn mprotect(addr: *const ::c_void, len: ::size_t, prot: ::c_int) -> ::c_int; pub fn sysctl(name: *mut ::c_int, namelen: ::c_uint, oldp: *mut ::c_void, oldlenp: *mut ::size_t, newp: *mut ::c_void, newlen: ::size_t) -> ::c_int; pub fn sysctlbyname(name: *const ::c_char, oldp: *mut ::c_void, oldlenp: *mut ::size_t, newp: *mut ::c_void, newlen: ::size_t) -> ::c_int; }<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate dyon; extern crate piston_meta; use dyon::*; pub fn test_src(source: &str) { let mut module = Module::new(); load(source, &mut module).unwrap_or_else(|err| { panic!("In `{}`:\n{}", source, err); }); } pub fn test_fail_src(source: &str) { let mut module = Module::new(); match load(source, &mut module) { Ok(_) => panic!("`{}` should fail", source), Err(err) => { if err.starts_with(&format!("Could not open `{}`", source)) { panic!("{}", err) } } }; } pub fn debug_src(source: &str) { let mut module = Module::new(); load(source, &mut module).unwrap_or_else(|err| { panic!("In `{}`:\n{}", source, err); }); } #[test] fn test_syntax() { test_src("source/syntax/main.dyon"); test_src("source/syntax/args.dyon"); test_src("source/syntax/id.dyon"); test_src("source/syntax/call.dyon"); test_src("source/syntax/array.dyon"); test_src("source/syntax/prop.dyon"); test_src("source/syntax/for.dyon"); test_src("source/syntax/compare_pass_1.dyon"); test_src("source/syntax/compare_pass_2.dyon"); test_fail_src("source/syntax/compare_fail_1.dyon"); test_src("source/syntax/add_pass_1.dyon"); test_src("source/syntax/add_pass_2.dyon"); test_src("source/syntax/add_pass_3.dyon"); test_fail_src("source/syntax/add_fail_1.dyon"); test_src("source/syntax/mul.dyon"); test_src("source/syntax/pow.dyon"); test_src("source/syntax/add_mul.dyon"); test_src("source/syntax/mul_add.dyon"); test_src("source/syntax/pos_len.dyon"); test_src("source/syntax/if.dyon"); test_src("source/syntax/else_if.dyon"); test_src("source/syntax/assign_if.dyon"); test_src("source/syntax/new_pos.dyon"); test_src("source/syntax/lifetime.dyon"); test_fail_src("source/syntax/lifetime_2.dyon"); test_fail_src("source/syntax/lifetime_3.dyon"); test_fail_src("source/syntax/lifetime_4.dyon"); test_fail_src("source/syntax/lifetime_5.dyon"); test_src("source/syntax/lifetime_6.dyon"); test_src("source/syntax/lifetime_7.dyon"); test_src("source/syntax/lifetime_8.dyon"); test_fail_src("source/syntax/lifetime_9.dyon"); test_fail_src("source/syntax/lifetime_10.dyon"); test_src("source/syntax/lifetime_11.dyon"); test_fail_src("source/syntax/lifetime_12.dyon"); test_fail_src("source/syntax/lifetime_13.dyon"); test_fail_src("source/syntax/lifetime_14.dyon"); test_src("source/syntax/lifetime_15.dyon"); test_fail_src("source/syntax/lifetime_16.dyon"); test_src("source/syntax/lifetime_17.dyon"); test_fail_src("source/syntax/lifetime_18.dyon"); test_fail_src("source/syntax/lifetime_19.dyon"); test_fail_src("source/syntax/lifetime_20.dyon"); test_src("source/syntax/insert.dyon"); test_src("source/syntax/named_call.dyon"); test_src("source/syntax/max_min.dyon"); test_src("source/syntax/return_void.dyon"); test_src("source/syntax/return_void_2.dyon"); test_fail_src("source/syntax/return_void_3.dyon"); test_src("source/syntax/typeof.dyon"); test_src("source/syntax/load_module.dyon"); test_src("source/syntax/println_colon.dyon"); test_src("source/syntax/neg.dyon"); test_src("source/syntax/some.dyon"); test_src("source/syntax/pop.dyon"); test_src("source/syntax/accessor.dyon"); test_src("source/syntax/sum.dyon"); test_src("source/syntax/link_for.dyon"); test_src("source/syntax/min_max.dyon"); test_src("source/syntax/vec4.dyon"); test_src("source/syntax/vec4_2.dyon"); test_src("source/syntax/vec4_un_loop.dyon"); test_src("source/syntax/vec4_un_loop_2.dyon"); test_src("source/syntax/swizzle.dyon"); test_src("source/syntax/color.dyon"); test_src("source/syntax/parens.dyon"); test_src("source/syntax/infer_pass.dyon"); test_src("source/syntax/infer_pass_2.dyon"); test_src("source/syntax/infer_pass_3.dyon"); test_src("source/syntax/infer_pass_4.dyon"); test_src("source/syntax/infer_pass_5.dyon"); test_src("source/syntax/infer_pass_6.dyon"); test_src("source/syntax/infer_pass_7.dyon"); test_src("source/syntax/infer_pass_8.dyon"); test_fail_src("source/syntax/infer_fail_1.dyon"); test_fail_src("source/syntax/infer_fail_2.dyon"); test_fail_src("source/syntax/infer_fail_3.dyon"); test_fail_src("source/syntax/infer_fail_4.dyon"); test_fail_src("source/syntax/infer_fail_5.dyon"); test_fail_src("source/syntax/infer_fail_6.dyon"); test_src("source/syntax/space_before_function.dyon"); test_src("source/syntax/current.dyon"); test_fail_src("source/syntax/mut.dyon"); test_src("source/syntax/closure.dyon"); test_src("source/syntax/closure_2.dyon"); test_src("source/syntax/closure_3.dyon"); test_fail_src("source/syntax/closure_4.dyon"); test_src("source/syntax/closure_5.dyon"); test_src("source/syntax/closure_6.dyon"); test_src("source/syntax/or.dyon"); test_src("source/syntax/try_expr.dyon"); test_src("source/syntax/start_true.dyon"); test_fail_src("source/syntax/push_ref.dyon"); test_src("source/syntax/for_in.dyon"); test_src("source/syntax/return_arr.dyon"); test_src("source/syntax/return_cmp.dyon"); test_src("source/syntax/try_pass_1.dyon"); test_src("source/syntax/try_pass_2.dyon"); test_fail_src("source/syntax/try_fail_1.dyon"); test_fail_src("source/syntax/try_fail_2.dyon"); test_src("source/syntax/div_pass_1.dyon"); test_fail_src("source/syntax/div_fail_1.dyon"); test_src("source/syntax/continue_call.dyon"); test_src("source/syntax/mat4_1.dyon"); test_fail_src("source/syntax/secret_fail.dyon"); test_src("source/syntax/lazy_pass_1.dyon"); test_src("source/syntax/lazy_pass_2.dyon"); test_src("source/syntax/lazy_pass_3.dyon"); test_src("source/syntax/lazy_pass_4.dyon"); test_src("source/syntax/lazy_pass_5.dyon"); test_src("source/syntax/lazy_pass_6.dyon"); test_src("source/syntax/lazy_pass_7.dyon"); test_src("source/syntax/lazy_pass_8.dyon"); } #[test] fn test_typechk() { test_fail_src("source/typechk/opt.dyon"); test_fail_src("source/typechk/return.dyon"); test_fail_src("source/typechk/return_2.dyon"); test_fail_src("source/typechk/return_3.dyon"); test_fail_src("source/typechk/return_4.dyon"); test_fail_src("source/typechk/return_5.dyon"); test_fail_src("source/typechk/return_6.dyon"); test_fail_src("source/typechk/return_7.dyon"); test_fail_src("source/typechk/return_8.dyon"); test_src("source/typechk/return_9.dyon"); test_fail_src("source/typechk/return_10.dyon"); test_fail_src("source/typechk/return_11.dyon"); test_fail_src("source/typechk/return_12.dyon"); test_src("source/typechk/return_13.dyon"); test_src("source/typechk/return_14.dyon"); test_fail_src("source/typechk/return_15.dyon"); test_fail_src("source/typechk/return_16.dyon"); test_fail_src("source/typechk/return_17.dyon"); test_src("source/typechk/add.dyon"); test_src("source/typechk/mat_expr.dyon"); test_src("source/typechk/or.dyon"); test_fail_src("source/typechk/or_2.dyon"); test_fail_src("source/typechk/mul.dyon"); test_fail_src("source/typechk/pow.dyon"); test_src("source/typechk/pow_2.dyon"); test_fail_src("source/typechk/pow_3.dyon"); test_fail_src("source/typechk/call.dyon"); test_fail_src("source/typechk/call_2.dyon"); test_src("source/typechk/call_4.dyon"); test_src("source/typechk/obj_pass_1.dyon"); test_src("source/typechk/arr_pass_1.dyon"); test_fail_src("source/typechk/arr_fail_1.dyon"); test_fail_src("source/typechk/arr_fail_2.dyon"); test_fail_src("source/typechk/go.dyon"); test_fail_src("source/typechk/unused_result.dyon"); test_fail_src("source/typechk/unused_result_2.dyon"); test_src("source/typechk/res.dyon"); test_fail_src("source/typechk/vec4.dyon"); test_src("source/typechk/if.dyon"); test_fail_src("source/typechk/if_2.dyon"); test_fail_src("source/typechk/if_3.dyon"); test_fail_src("source/typechk/if_4.dyon"); test_fail_src("source/typechk/if_5.dyon"); test_fail_src("source/typechk/if_6.dyon"); test_src("source/typechk/ad_hoc.dyon"); test_fail_src("source/typechk/add_ad_hoc.dyon"); test_src("source/typechk/add_ad_hoc_2.dyon"); test_fail_src("source/typechk/add_ad_hoc_3.dyon"); test_fail_src("source/typechk/add_ad_hoc_4.dyon"); test_fail_src("source/typechk/mul_ad_hoc.dyon"); test_src("source/typechk/unop.dyon"); test_fail_src("source/typechk/prod.dyon"); test_src("source/typechk/closure.dyon"); test_fail_src("source/typechk/closure_2.dyon"); test_fail_src("source/typechk/closure_3.dyon"); test_src("source/typechk/closure_4.dyon"); test_fail_src("source/typechk/closure_5.dyon"); test_src("source/typechk/closure_6.dyon"); test_fail_src("source/typechk/closure_7.dyon"); test_src("source/typechk/closure_8.dyon"); test_src("source/typechk/closure_9.dyon"); test_src("source/typechk/local.dyon"); test_fail_src("source/typechk/grab.dyon"); test_fail_src("source/typechk/grab_2.dyon"); test_src("source/typechk/grab_3.dyon"); test_src("source/typechk/secret.dyon"); test_fail_src("source/typechk/secret_2.dyon"); test_fail_src("source/typechk/secret_3.dyon"); test_src("source/typechk/secret_4.dyon"); test_src("source/typechk/secret_5.dyon"); test_src("source/typechk/secret_6.dyon"); test_src("source/typechk/secret_7.dyon"); test_src("source/typechk/secret_8.dyon"); test_src("source/typechk/secret_9.dyon"); test_fail_src("source/typechk/secret_10.dyon"); test_src("source/typechk/secret_11.dyon"); test_src("source/typechk/dot.dyon"); test_src("source/typechk/in.dyon"); test_fail_src("source/typechk/in_2.dyon"); test_fail_src("source/typechk/vec4_2.dyon"); test_fail_src("source/typechk/mat4_1.dyon"); test_src("source/typechk/mat4_2.dyon"); test_src("source/typechk/ind_arr.dyon"); test_fail_src("source/typechk/norm.dyon"); test_fail_src("source/typechk/refinement.dyon"); test_fail_src("source/typechk/refinement_2.dyon"); test_fail_src("source/typechk/refinement_3.dyon"); test_fail_src("source/typechk/refinement_4.dyon"); test_src("source/typechk/refinement_5.dyon"); test_src("source/typechk/refinement_6.dyon"); test_fail_src("source/typechk/refinement_7.dyon"); test_fail_src("source/typechk/refinement_8.dyon"); test_src("source/typechk/refinement_9.dyon"); test_fail_src("source/typechk/refinement_10.dyon"); test_src("source/typechk/refinement_11.dyon"); test_fail_src("source/typechk/refinement_12.dyon"); test_src("source/typechk/refinement_13.dyon"); test_fail_src("source/typechk/refinement_14.dyon"); test_src("source/typechk/refinement_15.dyon"); test_fail_src("source/typechk/refinement_16.dyon"); test_src("source/typechk/refinement_17.dyon"); test_fail_src("source/typechk/refinement_18.dyon"); test_src("source/typechk/refinement_19.dyon"); test_fail_src("source/typechk/refinement_20.dyon"); test_src("source/typechk/refinement_21.dyon");<|fim▁hole|> test_fail_src("source/typechk/refinement_24.dyon"); test_src("source/typechk/refinement_25.dyon"); test_fail_src("source/typechk/refinement_26.dyon"); test_src("source/typechk/refinement_27.dyon"); test_fail_src("source/typechk/void_refinement.dyon"); test_fail_src("source/typechk/args_refinement.dyon"); test_fail_src("source/typechk/refine_type_fail.dyon"); test_fail_src("source/typechk/refine_type_fail_2.dyon"); test_src("source/typechk/closure_ad_hoc.dyon"); test_fail_src("source/typechk/refine_closed_fail_1.dyon"); test_fail_src("source/typechk/refine_closed_fail_2.dyon"); test_src("source/typechk/refine_closed_pass_1.dyon"); test_src("source/typechk/refine_quantifier_pass_1.dyon"); test_src("source/typechk/refine_quantifier_pass_2.dyon"); test_src("source/typechk/refine_quantifier_pass_3.dyon"); test_src("source/typechk/refine_quantifier_pass_4.dyon"); test_src("source/typechk/refine_quantifier_pass_5.dyon"); test_fail_src("source/typechk/refine_quantifier_fail_1.dyon"); test_fail_src("source/typechk/refine_quantifier_fail_2.dyon"); } #[test] fn test_functions() { test_src("source/functions/functions.dyon"); } #[test] fn test_error() { test_src("source/error/propagate.dyon"); test_src("source/error/call.dyon"); test_src("source/error/named_call.dyon"); test_src("source/error/if.dyon"); test_src("source/error/trace.dyon"); test_src("source/error/unwrap_err.dyon"); test_src("source/error/option.dyon"); }<|fim▁end|>
test_fail_src("source/typechk/refinement_22.dyon"); test_src("source/typechk/refinement_23.dyon");
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 Jonathan Eyolfson use libc::{c_char, c_int, c_void, size_t, uint32_t, uint64_t}; #[repr(C)] pub type wl_argument = uint64_t; #[repr(C)] pub struct wl_array { pub size: size_t, pub alloc: size_t, pub data: *mut c_void, } #[repr(C)] pub type wl_dispatcher_func_t = extern fn( _: *const c_void, _: *mut c_void, _: uint32_t, _: *const wl_message, _: *mut wl_argument); #[repr(C)] pub type wl_fixed_t = uint32_t; #[repr(C)] pub struct wl_interface { pub name: *const c_char, pub version: c_int, pub method_count: c_int,<|fim▁hole|> #[repr(C)] pub struct wl_list { pub prev: *mut wl_list, pub next: *mut wl_list, } #[repr(C)] pub type wl_log_func_t = extern fn(_: *const c_char, ...); #[repr(C)] pub struct wl_message { pub name: *const c_char, pub signature: *const c_char, pub types: *mut *const wl_interface, } #[repr(C)] pub struct wl_object;<|fim▁end|>
pub methods: *const wl_message, pub event_count: c_int, pub events: *const wl_message, }
<|file_name|>datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for ListPolicyTags # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-datacatalog # [START datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync] from google.cloud import datacatalog_v1 def sample_list_policy_tags(): # Create a client client = datacatalog_v1.PolicyTagManagerClient() # Initialize request argument(s) request = datacatalog_v1.ListPolicyTagsRequest( parent="parent_value", ) # Make the request<|fim▁hole|> for response in page_result: print(response) # [END datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync]<|fim▁end|>
page_result = client.list_policy_tags(request=request) # Handle the response
<|file_name|>test_runner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2014-2017 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Run regression test suite. This module calls down into individual test cases via subprocess. It will forward all unrecognized arguments onto the individual test scripts. Functional tests are disabled on Windows by default. Use --force to run them anyway. For a description of arguments recognized by test scripts, see `test/functional/test_framework/test_framework.py:BitcoinTestFramework.main`. """ import argparse from collections import deque import configparser import datetime import os import time import shutil import signal import sys import subprocess import tempfile import re import logging # Formatting. Default colors to empty strings. BOLD, BLUE, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "") try: # Make sure python thinks it can write unicode to its stdout "\u2713".encode("utf_8").decode(sys.stdout.encoding) TICK = "✓ " CROSS = "✖ " CIRCLE = "○ " except UnicodeDecodeError: TICK = "P " CROSS = "x " CIRCLE = "o " if os.name == 'posix': # primitive formatting on supported # terminal via ANSI escape sequences: BOLD = ('\033[0m', '\033[1m') BLUE = ('\033[0m', '\033[0;34m') RED = ('\033[0m', '\033[0;31m') GREY = ('\033[0m', '\033[1;30m') TEST_EXIT_PASSED = 0 TEST_EXIT_SKIPPED = 77 BASE_SCRIPTS= [ # Scripts that are run by the travis build process. # Longest test should go first, to favor running tests in parallel 'wallet_hd.py', 'wallet_backup.py', # vv Tests less than 5m vv 'feature_block.py', 'rpc_fundrawtransaction.py', 'p2p_compactblocks.py', 'feature_segwit.py', # vv Tests less than 2m vv 'wallet_basic.py', 'wallet_accounts.py', 'p2p_segwit.py', 'wallet_dump.py', 'rpc_listtransactions.py', # vv Tests less than 60s vv 'p2p_sendheaders.py', 'wallet_zapwallettxes.py', 'wallet_importmulti.py', 'mempool_limit.py', 'rpc_txoutproof.py', 'wallet_listreceivedby.py', 'wallet_abandonconflict.py', 'feature_csv_activation.py', 'rpc_rawtransaction.py', 'wallet_address_types.py', 'feature_reindex.py', # vv Tests less than 30s vv 'wallet_keypool_topup.py', 'interface_zmq.py', 'interface_bitcoin_cli.py', 'mempool_resurrect.py', 'wallet_txn_doublespend.py --mineblock', 'wallet_txn_clone.py', 'wallet_txn_clone.py --segwit', 'rpc_getchaintips.py', 'interface_rest.py', 'mempool_spend_coinbase.py', 'mempool_reorg.py', 'mempool_persist.py',<|fim▁hole|> 'rpc_users.py', 'feature_proxy.py', 'rpc_signrawtransaction.py', 'p2p_disconnect_ban.py', 'rpc_decodescript.py', 'rpc_blockchain.py', 'rpc_deprecated.py', 'wallet_disable.py', 'rpc_net.py', 'wallet_keypool.py', 'p2p_mempool.py', 'mining_prioritisetransaction.py', 'p2p_invalid_block.py', 'p2p_invalid_tx.py', 'feature_versionbits_warning.py', 'rpc_preciousblock.py', 'wallet_importprunedfunds.py', 'rpc_signmessage.py', 'feature_nulldummy.py', 'wallet_import_rescan.py', 'mining_basic.py', 'wallet_bumpfee.py', 'rpc_named_arguments.py', 'wallet_listsinceblock.py', 'p2p_leak.py', 'wallet_encryption.py', 'feature_dersig.py', 'feature_cltv.py', 'rpc_uptime.py', 'wallet_resendwallettransactions.py', 'feature_minchainwork.py', 'p2p_fingerprint.py', 'feature_uacomment.py', 'p2p_unrequested_blocks.py', 'feature_logging.py', 'p2p_node_network_limited.py', 'feature_config_args.py', # Don't append tests at the end to avoid merge conflicts # Put them in a random line within the section that fits their approximate run-time ] EXTENDED_SCRIPTS = [ # These tests are not run by the travis build process. # Longest test should go first, to favor running tests in parallel 'feature_pruning.py', # vv Tests less than 20m vv 'feature_fee_estimation.py', # vv Tests less than 5m vv 'feature_maxuploadtarget.py', 'mempool_packages.py', 'feature_dbcrash.py', # vv Tests less than 2m vv 'feature_bip68_sequence.py', 'mining_getblocktemplate_longpoll.py', 'p2p_timeouts.py', # vv Tests less than 60s vv 'feature_bip9_softforks.py', 'p2p_feefilter.py', 'rpc_bind.py', # vv Tests less than 30s vv 'feature_assumevalid.py', 'example_test.py', 'wallet_txn_doublespend.py', 'wallet_txn_clone.py --mineblock', 'feature_notifications.py', 'rpc_invalidateblock.py', 'feature_rbf.py', ] # Place EXTENDED_SCRIPTS first since it has the 3 longest running tests ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS NON_SCRIPTS = [ # These are python files that live in the functional tests directory, but are not test scripts. "combine_logs.py", "create_cache.py", "test_runner.py", ] def main(): # Parse arguments and pass through unrecognised args parser = argparse.ArgumentParser(add_help=False, usage='%(prog)s [test_runner.py options] [script options] [scripts]', description=__doc__, epilog=''' Help text and arguments for individual test script:''', formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('--combinedlogslen', '-c', type=int, default=0, help='print a combined log (of length n lines) from all test nodes and test framework to the console on failure.') parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface') parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.') parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests') parser.add_argument('--force', '-f', action='store_true', help='run tests even on platforms where they are disabled by default (e.g. windows).') parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit') parser.add_argument('--jobs', '-j', type=int, default=4, help='how many test scripts to run in parallel. Default=4.') parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.') parser.add_argument('--quiet', '-q', action='store_true', help='only print results summary and failure logs') parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs") args, unknown_args = parser.parse_known_args() # args to be passed on always start with two dashes; tests are the remaining unknown args tests = [arg for arg in unknown_args if arg[:2] != "--"] passon_args = [arg for arg in unknown_args if arg[:2] == "--"] # Read config generated by configure. config = configparser.ConfigParser() configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini" config.read_file(open(configfile)) passon_args.append("--configfile=%s" % configfile) # Set up logging logging_level = logging.INFO if args.quiet else logging.DEBUG logging.basicConfig(format='%(message)s', level=logging_level) # Create base test directory tmpdir = "%s/bitcoin_test_runner_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S")) os.makedirs(tmpdir) logging.debug("Temporary test directory at %s" % tmpdir) enable_wallet = config["components"].getboolean("ENABLE_WALLET") enable_utils = config["components"].getboolean("ENABLE_UTILS") enable_globaltokend = config["components"].getboolean("ENABLE_BITCOIND") if config["environment"]["EXEEXT"] == ".exe" and not args.force: # https://github.com/bitcoin/bitcoin/commit/d52802551752140cf41f0d9a225a43e84404d3e9 # https://github.com/bitcoin/bitcoin/pull/5677#issuecomment-136646964 print("Tests currently disabled on Windows by default. Use --force option to enable") sys.exit(0) if not (enable_wallet and enable_utils and enable_globaltokend): print("No functional tests to run. Wallet, utils, and globaltokend must all be enabled") print("Rerun `configure` with -enable-wallet, -with-utils and -with-daemon and rerun make") sys.exit(0) # Build list of tests if tests: # Individual tests have been specified. Run specified tests that exist # in the ALL_SCRIPTS list. Accept the name with or without .py extension. tests = [re.sub("\.py$", "", t) + ".py" for t in tests] test_list = [] for t in tests: if t in ALL_SCRIPTS: test_list.append(t) else: print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], t)) else: # No individual tests have been specified. # Run all base tests, and optionally run extended tests. test_list = BASE_SCRIPTS if args.extended: # place the EXTENDED_SCRIPTS first since the three longest ones # are there and the list is shorter test_list = EXTENDED_SCRIPTS + test_list # Remove the test cases that the user has explicitly asked to exclude. if args.exclude: tests_excl = [re.sub("\.py$", "", t) + ".py" for t in args.exclude.split(',')] for exclude_test in tests_excl: if exclude_test in test_list: test_list.remove(exclude_test) else: print("{}WARNING!{} Test '{}' not found in current test list.".format(BOLD[1], BOLD[0], exclude_test)) if not test_list: print("No valid test scripts specified. Check that your test is in one " "of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests") sys.exit(0) if args.help: # Print help for test_runner.py, then print help of the first script (with args removed) and exit. parser.print_help() subprocess.check_call([sys.executable, os.path.join(config["environment"]["SRCDIR"], 'test', 'functional', test_list[0].split()[0]), '-h']) sys.exit(0) check_script_list(config["environment"]["SRCDIR"]) check_script_prefixes() if not args.keepcache: shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True) run_tests(test_list, config["environment"]["SRCDIR"], config["environment"]["BUILDDIR"], config["environment"]["EXEEXT"], tmpdir, args.jobs, args.coverage, passon_args, args.combinedlogslen) def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_coverage=False, args=[], combined_logs_len=0): # Warn if globaltokend is already running (unix only) try: if subprocess.check_output(["pidof", "globaltokend"]) is not None: print("%sWARNING!%s There is already a globaltokend process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0])) except (OSError, subprocess.SubprocessError): pass # Warn if there is a cache directory cache_dir = "%s/test/cache" % build_dir if os.path.isdir(cache_dir): print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir)) #Set env vars if "BITCOIND" not in os.environ: os.environ["BITCOIND"] = build_dir + '/src/globaltokend' + exeext os.environ["BITCOINCLI"] = build_dir + '/src/globaltoken-cli' + exeext tests_dir = src_dir + '/test/functional/' flags = ["--srcdir={}/src".format(build_dir)] + args flags.append("--cachedir=%s" % cache_dir) if enable_coverage: coverage = RPCCoverage() flags.append(coverage.flag) logging.debug("Initializing coverage directory at %s" % coverage.dir) else: coverage = None if len(test_list) > 1 and jobs > 1: # Populate cache try: subprocess.check_output([sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir]) except subprocess.CalledProcessError as e: sys.stdout.buffer.write(e.output) raise #Run Tests job_queue = TestHandler(jobs, tests_dir, tmpdir, test_list, flags) time0 = time.time() test_results = [] max_len_name = len(max(test_list, key=len)) for _ in range(len(test_list)): test_result, testdir, stdout, stderr = job_queue.get_next() test_results.append(test_result) if test_result.status == "Passed": logging.debug("\n%s%s%s passed, Duration: %s s" % (BOLD[1], test_result.name, BOLD[0], test_result.time)) elif test_result.status == "Skipped": logging.debug("\n%s%s%s skipped" % (BOLD[1], test_result.name, BOLD[0])) else: print("\n%s%s%s failed, Duration: %s s\n" % (BOLD[1], test_result.name, BOLD[0], test_result.time)) print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n') print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n') if combined_logs_len and os.path.isdir(testdir): # Print the final `combinedlogslen` lines of the combined logs print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0])) print('\n============') print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0])) print('============\n') combined_logs, _ = subprocess.Popen([sys.executable, os.path.join(tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate() print("\n".join(deque(combined_logs.splitlines(), combined_logs_len))) print_results(test_results, max_len_name, (int(time.time() - time0))) if coverage: coverage.report_rpc_coverage() logging.debug("Cleaning up coverage data") coverage.cleanup() # Clear up the temp directory if all subdirectories are gone if not os.listdir(tmpdir): os.rmdir(tmpdir) all_passed = all(map(lambda test_result: test_result.was_successful, test_results)) sys.exit(not all_passed) def print_results(test_results, max_len_name, runtime): results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0] test_results.sort(key=lambda result: result.name.lower()) all_passed = True time_sum = 0 for test_result in test_results: all_passed = all_passed and test_result.was_successful time_sum += test_result.time test_result.padding = max_len_name results += str(test_result) status = TICK + "Passed" if all_passed else CROSS + "Failed" results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0] results += "Runtime: %s s\n" % (runtime) print(results) class TestHandler: """ Trigger the test scripts passed in via the list. """ def __init__(self, num_tests_parallel, tests_dir, tmpdir, test_list=None, flags=None): assert(num_tests_parallel >= 1) self.num_jobs = num_tests_parallel self.tests_dir = tests_dir self.tmpdir = tmpdir self.test_list = test_list self.flags = flags self.num_running = 0 # In case there is a graveyard of zombie globaltokends, we can apply a # pseudorandom offset to hopefully jump over them. # (625 is PORT_RANGE/MAX_NODES) self.portseed_offset = int(time.time() * 1000) % 625 self.jobs = [] def get_next(self): while self.num_running < self.num_jobs and self.test_list: # Add tests self.num_running += 1 t = self.test_list.pop(0) portseed = len(self.test_list) + self.portseed_offset portseed_arg = ["--portseed={}".format(portseed)] log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16) log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16) test_argv = t.split() testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed) tmpdir_arg = ["--tmpdir={}".format(testdir)] self.jobs.append((t, time.time(), subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg, universal_newlines=True, stdout=log_stdout, stderr=log_stderr), testdir, log_stdout, log_stderr)) if not self.jobs: raise IndexError('pop from empty list') while True: # Return first proc that finishes time.sleep(.5) for j in self.jobs: (name, time0, proc, testdir, log_out, log_err) = j if os.getenv('TRAVIS') == 'true' and int(time.time() - time0) > 20 * 60: # In travis, timeout individual tests after 20 minutes (to stop tests hanging and not # providing useful output. proc.send_signal(signal.SIGINT) if proc.poll() is not None: log_out.seek(0), log_err.seek(0) [stdout, stderr] = [l.read().decode('utf-8') for l in (log_out, log_err)] log_out.close(), log_err.close() if proc.returncode == TEST_EXIT_PASSED and stderr == "": status = "Passed" elif proc.returncode == TEST_EXIT_SKIPPED: status = "Skipped" else: status = "Failed" self.num_running -= 1 self.jobs.remove(j) return TestResult(name, status, int(time.time() - time0)), testdir, stdout, stderr print('.', end='', flush=True) class TestResult(): def __init__(self, name, status, time): self.name = name self.status = status self.time = time self.padding = 0 def __repr__(self): if self.status == "Passed": color = BLUE glyph = TICK elif self.status == "Failed": color = RED glyph = CROSS elif self.status == "Skipped": color = GREY glyph = CIRCLE return color[1] + "%s | %s%s | %s s\n" % (self.name.ljust(self.padding), glyph, self.status.ljust(7), self.time) + color[0] @property def was_successful(self): return self.status != "Failed" def check_script_prefixes(): """Check that test scripts start with one of the allowed name prefixes.""" good_prefixes_re = re.compile("(example|feature|interface|mempool|mining|p2p|rpc|wallet)_") bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None] if bad_script_names: print("%sERROR:%s %d tests not meeting naming conventions:" % (BOLD[1], BOLD[0], len(bad_script_names))) print(" %s" % ("\n ".join(sorted(bad_script_names)))) raise AssertionError("Some tests are not following naming convention!") def check_script_list(src_dir): """Check scripts directory. Check that there are no scripts in the functional tests directory which are not being run by pull-tester.py.""" script_dir = src_dir + '/test/functional/' python_files = set([t for t in os.listdir(script_dir) if t[-3:] == ".py"]) missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS))) if len(missed_tests) != 0: print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests))) if os.getenv('TRAVIS') == 'true': # On travis this warning is an error to prevent merging incomplete commits into master sys.exit(1) class RPCCoverage(): """ Coverage reporting utilities for test_runner. Coverage calculation works by having each test script subprocess write coverage files into a particular directory. These files contain the RPC commands invoked during testing, as well as a complete listing of RPC commands per `bitcoin-cli help` (`rpc_interface.txt`). After all tests complete, the commands run are combined and diff'd against the complete list to calculate uncovered RPC commands. See also: test/functional/test_framework/coverage.py """ def __init__(self): self.dir = tempfile.mkdtemp(prefix="coverage") self.flag = '--coveragedir=%s' % self.dir def report_rpc_coverage(self): """ Print out RPC commands that were unexercised by tests. """ uncovered = self._get_uncovered_rpc_commands() if uncovered: print("Uncovered RPC commands:") print("".join((" - %s\n" % i) for i in sorted(uncovered))) else: print("All RPC commands covered.") def cleanup(self): return shutil.rmtree(self.dir) def _get_uncovered_rpc_commands(self): """ Return a set of currently untested RPC commands. """ # This is shared from `test/functional/test-framework/coverage.py` reference_filename = 'rpc_interface.txt' coverage_file_prefix = 'coverage.' coverage_ref_filename = os.path.join(self.dir, reference_filename) coverage_filenames = set() all_cmds = set() covered_cmds = set() if not os.path.isfile(coverage_ref_filename): raise RuntimeError("No coverage reference found") with open(coverage_ref_filename, 'r') as f: all_cmds.update([i.strip() for i in f.readlines()]) for root, dirs, files in os.walk(self.dir): for filename in files: if filename.startswith(coverage_file_prefix): coverage_filenames.add(os.path.join(root, filename)) for filename in coverage_filenames: with open(filename, 'r') as f: covered_cmds.update([i.strip() for i in f.readlines()]) return all_cmds - covered_cmds if __name__ == '__main__': main()<|fim▁end|>
'wallet_multiwallet.py', 'wallet_multiwallet.py --usecli', 'interface_http.py',
<|file_name|>regress-900055.js<|end_file_name|><|fim▁begin|>// Copyright 2008 Google Inc. All Rights Reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following // disclaimer in the documentation and/or other materials provided // with the distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. var alias = eval; function e(s) { return alias(s); } <|fim▁hole|>assertEquals(Object, e("Object")); assertEquals(e, e("e")); var caught = false; try { e('s'); // should throw exception since aliased eval is global } catch (e) { caught = true; assertTrue(e instanceof ReferenceError); } assertTrue(caught);<|fim▁end|>
assertEquals(42, e("42"));
<|file_name|>OneFichierCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import re import time import pycurl from module.network.HTTPRequest import BadHeader from ..internal.Account import Account class OneFichierCom(Account): __name__ = "OneFichierCom" __type__ = "account" __version__ = "0.23" __status__ = "testing" __description__ = """1fichier.com account plugin""" __license__ = "GPLv3" __authors__ = [("Elrick69", "elrick69[AT]rocketmail[DOT]com"), ("Walter Purcaro", "vuolter@gmail.com")] VALID_UNTIL_PATTERN = r'Your Premium offer subscription is valid until <span style="font-weight:bold">(\d+\-\d+\-\d+)' def grab_info(self, user, password, data): validuntil = None trafficleft = -1<|fim▁hole|> html = self.load("https://1fichier.com/console/abo.pl") m = re.search(self.VALID_UNTIL_PATTERN, html) if m is not None: expiredate = m.group(1) self.log_debug("Expire date: " + expiredate) try: validuntil = time.mktime(time.strptime(expiredate, "%Y-%m-%d")) except Exception, e: self.log_error(e, trace=True) else: premium = True return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium or False} def signin(self, user, password, data): self.req.http.c.setopt( pycurl.REFERER, "https://1fichier.com/login.pl?lg=en") try: html = self.load("https://1fichier.com/login.pl?lg=en", post={'mail': user, 'pass': password, 'It': "on", 'purge': "off", 'valider': "Send"}) if any(_x in html for _x in ('>Invalid username or Password', '>Invalid email address', '>Invalid password')): self.fail_login() except BadHeader, e: if e.code == 403: self.fail_login() else: raise<|fim▁end|>
premium = None
<|file_name|>data.js<|end_file_name|><|fim▁begin|>var DATA = {"a" : "b", "c" : "d"}; var RESPONSE = {"vendor":"OpenPolicy","name":"alex","locations":[{"continent":"Europe","country":"Germany","price":5,"latitude":"48.105964","name":"Slot_ger_4","longitude":"11.612549"},{"continent":"Europe","country":"Germany","price":6,"latitude":"52.473412","name":"Slot_ger_1","longitude":"13.390961"},{"continent":"Europe","country":"Germany","price":5,"latitude":"48.105964","name":"Slot_ger_2","longitude":"11.612549"},{"continent":"Europe","country":"Germany","price":4,"latitude":"52.473412","name":"Slot_ger_3","longitude":"13.390961"}],"version":"0.1"} <|fim▁hole|> "vendor":"OpenPolicy", "version":"0.1", "preferences":["germany"], "attributes":[ { "#http://www.q-team.org/Ontology#Einwilligung_Zur_Datensammlung":true }, { "#http://www.q-team.org/Ontology#minAge":12 }, { "#http://www.q-team.org/Ontology#Recht_auf_Vergessen":false }, { "#http://www.q-team.org/Ontology#Meldepflicht_bei_Verletzung":true } ] }<|fim▁end|>
var REQUEST = { "name":"alex",
<|file_name|>renderers.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3 from abc import ABCMeta, abstractmethod import csv import os import re import subprocess import sys import plaid2text.config_manager as cm from plaid2text.interact import separator_completer, prompt class Entry: """ This represents one entry (transaction) from Plaid. """ def __init__(self, transaction, options={}): """Parameters: transaction: a plaid transaction options: from CLI args and config file """ self.options = options self.transaction = transaction # TODO: document this if 'addons' in options: self.transaction['addons'] = dict( (k, fields[v - 1]) for k, v in options.addons.items() # NOQA ) else: self.transaction['addons'] = {} # The id for the transaction self.transaction['transaction_id'] = self.transaction['transaction_id'] # Get the date and convert it into a ledger/beancount formatted date. d8 = self.transaction['date'] d8_format = options.output_date_format if options and 'output_date_format' in options else '%Y-%m-%d' self.transaction['transaction_date'] = d8.date().strftime(d8_format) self.desc = self.transaction['name'] # amnt = self.transaction['amount'] self.transaction['currency'] = options.currency # self.transaction['debit_amount'] = amnt # self.transaction['debit_currency'] = currency # self.transaction['credit_amount'] = '' # self.transaction['credit_currency'] = '' <|fim▁hole|> with open(options.template_file, 'r', encoding='utf-8') as f: self.transaction['transaction_template'] = f.read() else: self.transaction['transaction_template'] = '' def query(self): """ We print a summary of the record on the screen, and allow you to choose the destination account. """ return '{0} {1:<40} {2}'.format( self.transaction['date'], self.desc, self.transaction['amount'] ) def journal_entry(self, payee, account, tags): """ Return a formatted journal entry recording this Entry against the specified posting account """ if self.options.output_format == 'ledger': def_template = cm.DEFAULT_LEDGER_TEMPLATE else: def_template = cm.DEFAULT_BEANCOUNT_TEMPLATE if self.transaction['transaction_template']: template = (self.transaction['transaction_template']) else: template = (def_template) if self.options.output_format == 'beancount': ret_tags = ' {}'.format(tags) if tags else '' else: ret_tags = ' ; {}'.format(tags) if tags else '' format_data = { 'associated_account': account, 'payee': payee, 'tags': ret_tags } format_data.update(self.transaction['addons']) format_data.update(self.transaction) return template.format(**format_data) class OutputRenderer(metaclass=ABCMeta): """ Base class for output rendering. """ def __init__(self, transactions, options): self.transactions = transactions self.possible_accounts = set([]) self.possible_payees = set([]) self.possible_tags = set([]) self.mappings = [] self.map_file = options.mapping_file self.read_mapping_file() self.journal_file = options.journal_file self.journal_lines = [] self.options = options self.get_possible_accounts_and_payees() # Add payees/accounts/tags from mappings for m in self.mappings: self.possible_payees.add(m[1]) self.possible_accounts.add(m[2]) if m[3]: if options.output_format == 'ledger': self.possible_tags.update(set(m[3][0].split(':'))) else: self.possible_tags.update([t.replace('#', '') for t in m[3][0].split(' ')]) def read_mapping_file(self): """ Mappings are simply a CSV file with three columns. The first is a string to be matched against an entry description. The second is the payee against which such entries should be posted. The third is the account against which such entries should be posted. If the match string begins and ends with '/' it is taken to be a regular expression. """ if not self.map_file: return with open(self.map_file, 'r', encoding='utf-8', newline='') as f: map_reader = csv.reader(f) for row in map_reader: if len(row) > 1: pattern = row[0].strip() payee = row[1].strip() account = row[2].strip() tags = row[3:] if pattern.startswith('/') and pattern.endswith('/'): try: pattern = re.compile(pattern[1:-1], re.I) except re.error as e: print( "Invalid regex '{0}' in '{1}': {2}" .format(pattern, self.map_file, e), file=sys.stderr) sys.exit(1) self.mappings.append((pattern, payee, account, tags)) def append_mapping_file(self, desc, payee, account, tags): if self.map_file: with open(self.map_file, 'a', encoding='utf-8', newline='') as f: writer = csv.writer(f) ret_tags = tags if len(tags) > 0 else '' writer.writerow([desc, payee, account, ret_tags]) def process_transactions(self, callback=None): """ Read transactions from Mongo (Plaid) and process them. Writes Ledger/Beancount formatted lines either to out_file or stdout. Parameters: callback: A function taking a single transaction update object to store in the DB immediately after collecting the information from the user. """ out = self._process_plaid_transactions(callback=callback) if self.options.headers_file: headers = ''.join(open(self.options.headers_file, mode='r').readlines()) print(headers, file=self.options.outfile) print(*self.journal_lines, sep='\n', file=self.options.outfile) return out def _process_plaid_transactions(self, callback=None): """Process plaid transaction and return beancount/ledger formatted lines. """ out = [] for t in self.transactions: entry = Entry(t, self.options) payee, account, tags = self.get_payee_and_account(entry) dic = {} dic['transaction_id'] = t['transaction_id'] dic['tags'] = tags dic['associated_account'] = account dic['payee'] = payee dic['posting_account'] = self.options.posting_account out.append(dic) # save the transactions into the database as they are processed if callback: callback(dic) self.journal_lines.append(entry.journal_entry(payee, account, tags)) return out def prompt_for_value(self, text_prompt, values, default): sep = ':' if text_prompt == 'Payee' else ' ' a = prompt( '{} [{}]: '.format(text_prompt, default), completer=separator_completer(values, sep=sep) ) # Handle tag returning none if accepting return a if (a or text_prompt == 'Tag') else default def get_payee_and_account(self, entry): payee = entry.desc account = self.options.default_expense tags = '' found = False # Try to match entry desc with mappings patterns for m in self.mappings: pattern = m[0] if isinstance(pattern, str): if entry.desc == pattern: payee, account, tags = m[1], m[2], m[3] found = True # do not break here, later mapping must win else: # If the pattern isn't a string it's a regex if m[0].match(entry.desc): payee, account, tags = m[1], m[2], m[3] found = True # Tags gets read in as a list, but just contains one string if tags: tags = tags[0] modified = False if self.options.quiet and found: pass else: if self.options.clear_screen: print('\033[2J\033[;H') print('\n' + entry.query()) value = self.prompt_for_value('Payee', self.possible_payees, payee) if value: modified = modified if modified else value != payee payee = value value = self.prompt_for_value('Account', self.possible_accounts, account) if value: modified = modified if modified else value != account account = value if self.options.tags: value = self.prompt_for_tags('Tag', self.possible_tags, tags) if value: modified = modified if modified else value != tags tags = value if not found or (found and modified): # Add new or changed mapping to mappings and append to file self.mappings.append((entry.desc, payee, account, tags)) self.append_mapping_file(entry.desc, payee, account, tags) # Add new possible_values to possible values lists self.possible_payees.add(payee) self.possible_accounts.add(account) return (payee, account, tags) @abstractmethod def tagify(self, value): pass @abstractmethod def get_possible_accounts_and_payees(self): pass @abstractmethod def prompt_for_tags(self, prompt, values, default): pass class LedgerRenderer(OutputRenderer): def tagify(self, value): if value.find(':') < 0 and value[0] != '[' and value[-1] != ']': value = ':{0}:'.format(value.replace(' ', '-').replace(',', '')) return value def get_possible_accounts_and_payees(self): if self.journal_file: self.possible_payees = self._payees_from_ledger() self.possible_accounts = self._accounts_from_ledger() self.read_accounts_file() def prompt_for_tags(self, prompt, values, default): # tags = list(default[0].split(':')) tags = [':{}:'.format(t) for t in default.split(':') if t] if default else [] value = self.prompt_for_value(prompt, values, ''.join(tags).replace('::', ':')) while value: if value[0] == '-': value = self.tagify(value[1:]) if value in tags: tags.remove(value) else: value = self.tagify(value) if value not in tags: tags.append(value) value = self.prompt_for_value(prompt, values, ''.join(tags).replace('::', ':')) return ''.join(tags).replace('::', ':') def _payees_from_ledger(self): return self._from_ledger('payees') def _accounts_from_ledger(self): return self._from_ledger('accounts') def _from_ledger(self, command): ledger = 'ledger' for f in ['/usr/bin/ledger', '/usr/local/bin/ledger']: if os.path.exists(f): ledger = f break cmd = [ledger, '-f', self.journal_file, command] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout_data, stderr_data) = p.communicate() items = set() for item in stdout_data.decode('utf-8').splitlines(): items.add(item) return items def read_accounts_file(self): """ Process each line in the specified account file looking for account definitions. An account definition is a line containing the word 'account' followed by a valid account name, e.g: account Expenses account Expenses:Utilities All other lines are ignored. """ if not self.options.accounts_file: return accounts = [] pattern = re.compile('^\s*account\s+([:A-Za-z0-9-_ ]+)$') with open(self.options.accounts_file, 'r', encoding='utf-8') as f: for line in f.readlines(): mo = pattern.match(line) if mo: accounts.append(mo.group(1)) self.possible_accounts.update(accounts) class BeancountRenderer(OutputRenderer): import beancount def tagify(self, value): # No spaces or commas allowed return value.replace(' ', '-').replace(',', '') def get_possible_accounts_and_payees(self): if self.journal_file: self._payees_and_accounts_from_beancount() def _payees_and_accounts_from_beancount(self): try: payees = set() accounts = set() tags = set() from beancount import loader from beancount.core.data import Transaction, Open import sys entries, errors, options = loader.load_file(self.journal_file) except Exception as e: print(e.message, file=sys.stderr) sys.exit(1) else: for e in entries: if type(e) is Transaction: if e.payee: payees.add(e.payee) if e.tags: for t in e.tags: tags.add(t) if e.postings: for p in e.postings: accounts.add(p.account) elif type(e) is Open: accounts.add(e.account) self.possible_accounts.update(accounts) self.possible_tags.update(tags) self.possible_payees.update(payees) def prompt_for_tags(self, prompt, values, default): tags = ' '.join(['#{}'.format(t) for t in default.split() if t]) if default else [] value = self.prompt_for_value(prompt, values, ' '.join(['#{}'.format(t) for t in tags])) while value: if value[0] == '-': value = self.tagify(value[1:]) if value in tags: tags.remove(value) else: value = self.tagify(value) if value not in tags: tags.append(value) value = self.prompt_for_value( prompt, values, ' '.join(['#{}'.format(t) for t in tags]) ) return ' '.join(['#{}'.format(t) for t in tags])<|fim▁end|>
self.transaction['posting_account'] = options.posting_account self.transaction['cleared_character'] = options.cleared_character if options.template_file:
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read()<|fim▁hole|>setup( name = "django-pressroom", version = "0.4.2.1", url = 'https://github.com/petry/django-pressroom', license = 'BSD', description = "A pressroom application for django.", author = 'Justin Driscoll, Michael Thornhill, Marcos Daniel Petry', author_email = 'marcospetry@gmail.com', packages = find_packages('src'), package_dir = {'': 'src'}, install_requires = ['setuptools', 'django-photologue'], dependency_links = [ 'http://github.com/petry/django-photologue/tarball/master#egg=django-photologue', ], classifiers = [ 'Development Status :: 4.2 - Beta', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP', ] )<|fim▁end|>
<|file_name|>QualityService.java<|end_file_name|><|fim▁begin|>package com.fruit.service.management; import com.fruit.base.BaseService; import com.fruit.entity.management.Quality; import java.util.Map; /** * 产品质量检测 Service * @author CSH * */ public interface QualityService extends BaseService<Quality> { /**返回当前质检人员的质检记录 * @param page * @param pageSize * @param ownid 当id小于0时,表示不采用该条件 * @param params * @return */ public String showRecords(Integer page,Integer pageSize,int ownid,Map<String, String> params); /**返回整个公司的质检记录 * @param page<|fim▁hole|> */ public String showRecordsByAdmin(Integer page,Integer pageSize,int companyId,Map<String, String> params); /**更新质检记录 * @return */ public Integer updateRecords(Integer inspectorId,String name,String way,String checkResult,String picture,String date,String endNumber, String barcodes); /**更新质检记录 * @return */ public Integer updateRecordToAll(Integer inspectorId,String name,String way,String checkResult,String picture,String date,String endNumber, String likeBarcode); /**获取详细信息 * @param id * @return */ public String getDetail(int id); /**获取详细信息 * @param id * @return */ public String getQualityDetail(Integer id); /** * 删除产品质量检测记录 * @param id */ // public void deleteQuality(int id) { // Quality quality = qualityDao.getById(id); // List<ImageBean> pictures = ImageBean.getImageList(quality.getPictures()); // for (ImageBean picture : pictures){ // FileManager.deleteImageFile(picture.getName()); // } // qualityDao.delete(id); // } /** * 获取质检记录柱状图数据 * @param page * @param pageSize * @param id * @param params * @return */ public String getChartData(Integer page,Integer pageSize,Integer id,Map<String, String> params); /** * 获取饼图数据 * @param page * @param pageSize * @param id * @param params * @return */ public String getPieChartData(Integer page,Integer pageSize,Integer id,Map<String, String> params); }<|fim▁end|>
* @param pageSize * @param ownid 当id小于0时,表示不采用该条件 * @param params * @return
<|file_name|>referenciacatastral.py<|end_file_name|><|fim▁begin|># referenciacatastral.py - functions for handling Spanish real state ids # coding: utf-8 # # Copyright (C) 2016 David García Garzón # Copyright (C) 2016-2017 Arthur de Jong # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA """Referencia Catastral (Spanish real estate property id) The cadastral reference code is an identifier for real estate in Spain. It is issued by Dirección General del Catastro (General Directorate of Land Registry) of the Ministerio de Hacienda (Tresury Ministry). It has 20 digits and contains numbers and letters including the Spanish Ñ. The number consists of 14 digits for the parcel, 4 for identifying properties within the parcel and 2 check digits. The parcel digits are structured differently for urban, non-urban or special (infrastructure) cases.<|fim▁hole|>* http://www.catastro.meh.es/esp/referencia_catastral_1.asp (Spanish) * http://www.catastro.meh.es/documentos/05042010_P.pdf (Spanish) * https://es.wikipedia.org/wiki/Catastro#Referencia_catastral >>> validate('7837301-VG8173B-0001 TT') # Lanteira town hall '7837301VG8173B0001TT' >>> validate('783301 VG8173B 0001 TT') # missing digit Traceback (most recent call last): ... InvalidLength: ... >>> validate('7837301/VG8173B 0001 TT') # not alphanumeric Traceback (most recent call last): ... InvalidFormat: ... >>> validate('7837301 VG8173B 0001 NN') # bad check digits Traceback (most recent call last): ... InvalidChecksum: ... >>> format('4A08169P03PRAT0001LR') # BCN Airport '4A08169 P03PRAT 0001 LR' """ from stdnum.exceptions import * from stdnum.util import clean alphabet = u'ABCDEFGHIJKLMNÑOPQRSTUVWXYZ0123456789' def compact(number): """Convert the number to the minimal representation. This strips the number of any valid separators and removes surrounding whitespace.""" return clean(number, ' -').strip().upper() def format(number): """Reformat the passed number to the standard format.""" number = compact(number) return ' '.join([ number[:7], number[7:14], number[14:18], number[18:] ]) # The check digit implementation is based on the Javascript # implementation by Vicente Sancho that can be found at # http://trellat.es/validar-la-referencia-catastral-en-javascript/ def _check_digit(number): """Calculate a single check digit on the provided part of the number.""" weights = (13, 15, 12, 5, 4, 17, 9, 21, 3, 7, 1) s = sum(w * (int(n) if n.isdigit() else alphabet.find(n) + 1) for w, n in zip(weights, number)) return 'MQWERTYUIOPASDFGHJKLBZX'[s % 23] def _force_unicode(number): """Convert the number to unicode.""" if not hasattr(number, 'isnumeric'): # pragma: no cover (Python 2 code) number = number.decode('utf-8') return number def calc_check_digits(number): """Calculate the check digits for the number.""" number = _force_unicode(compact(number)) return ( _check_digit(number[0:7] + number[14:18]) + _check_digit(number[7:14] + number[14:18])) def validate(number): """Checks to see if the number provided is a valid Cadastral Reference. This checks the length, formatting and check digits.""" number = compact(number) n = _force_unicode(number) if not all(c in alphabet for c in n): raise InvalidFormat() if len(n) != 20: raise InvalidLength() if calc_check_digits(n) != n[18:]: raise InvalidChecksum() return number def is_valid(number): """Checks to see if the number provided is a valid Cadastral Reference.""" try: return bool(validate(number)) except ValidationError: return False<|fim▁end|>
More information:
<|file_name|>TestScopedEnumType.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from __future__ import print_function import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class ScopedEnumType(TestBase): mydir = TestBase.compute_mydir(__file__) @skipIf(dwarf_version=['<', '4']) def test(self): self.build() self.main_source = "main.cpp" self.main_source_spec = lldb.SBFileSpec(self.main_source) (target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(self, '// Set break point at this line.', self.main_source_spec) frame = thread.GetFrameAtIndex(0) self.expect("expr f == Foo::FooBar", substrs=['(bool) $0 = true']) value = frame.EvaluateExpression("f == Foo::FooBar") self.assertTrue(value.IsValid()) self.assertTrue(value.GetError().Success()) self.assertEqual(value.GetValueAsUnsigned(), 1) value = frame.EvaluateExpression("b == BarBar") self.assertTrue(value.IsValid()) self.assertTrue(value.GetError().Success()) self.assertEqual(value.GetValueAsUnsigned(), 1) ## b is not a Foo value = frame.EvaluateExpression("b == Foo::FooBar") self.assertTrue(value.IsValid()) self.assertFalse(value.GetError().Success()) ## integral is not implicitly convertible to a scoped enum value = frame.EvaluateExpression("1 == Foo::FooBar") self.assertTrue(value.IsValid()) self.assertFalse(value.GetError().Success())<|fim▁end|>
<|file_name|>test_volume_quotas_negative.py<|end_file_name|><|fim▁begin|># Copyright 2014 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT<|fim▁hole|> from tempest.api.volume import base from tempest import exceptions from tempest import test class VolumeQuotasNegativeTestJSON(base.BaseVolumeV1AdminTest): _interface = "json" force_tenant_isolation = True @classmethod @test.safe_setup def setUpClass(cls): super(VolumeQuotasNegativeTestJSON, cls).setUpClass() demo_user = cls.isolated_creds.get_primary_creds() cls.demo_tenant_id = demo_user.tenant_id cls.shared_quota_set = {'gigabytes': 3, 'volumes': 1, 'snapshots': 1} # NOTE(gfidente): no need to restore original quota set # after the tests as they only work with tenant isolation. resp, quota_set = cls.quotas_client.update_quota_set( cls.demo_tenant_id, **cls.shared_quota_set) # NOTE(gfidente): no need to delete in tearDown as # they are created using utility wrapper methods. cls.volume = cls.create_volume() cls.snapshot = cls.create_snapshot(cls.volume['id']) @test.attr(type='negative') def test_quota_volumes(self): self.assertRaises(exceptions.OverLimit, self.volumes_client.create_volume, size=1) @test.attr(type='negative') def test_quota_volume_snapshots(self): self.assertRaises(exceptions.OverLimit, self.snapshots_client.create_snapshot, self.volume['id']) @test.attr(type='negative') def test_quota_volume_gigabytes(self): # NOTE(gfidente): quota set needs to be changed for this test # or we may be limited by the volumes or snaps quota number, not by # actual gigs usage; next line ensures shared set is restored. self.addCleanup(self.quotas_client.update_quota_set, self.demo_tenant_id, **self.shared_quota_set) new_quota_set = {'gigabytes': 2, 'volumes': 2, 'snapshots': 1} resp, quota_set = self.quotas_client.update_quota_set( self.demo_tenant_id, **new_quota_set) self.assertRaises(exceptions.OverLimit, self.volumes_client.create_volume, size=1) new_quota_set = {'gigabytes': 2, 'volumes': 1, 'snapshots': 2} resp, quota_set = self.quotas_client.update_quota_set( self.demo_tenant_id, **self.shared_quota_set) self.assertRaises(exceptions.OverLimit, self.snapshots_client.create_snapshot, self.volume['id']) class VolumeQuotasNegativeTestXML(VolumeQuotasNegativeTestJSON): _interface = "xml"<|fim▁end|>
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License.
<|file_name|>UiPasswordPlugin.py<|end_file_name|><|fim▁begin|>import string import random import time import json import re from Config import config from Plugin import PluginManager if "sessions" not in locals().keys(): # To keep sessions between module reloads sessions = {} def showPasswordAdvice(password): error_msgs = [] if not password or not isinstance(password, (str, unicode)): error_msgs.append("You have enabled <b>UiPassword</b> plugin, but you forgot to set a password!") elif len(password) < 8: error_msgs.append("You are using a very short UI password!") return error_msgs @PluginManager.registerTo("UiRequest") class UiRequestPlugin(object): sessions = sessions last_cleanup = time.time() def route(self, path):<|fim▁hole|> return self.actionFile("src/Ui/media/img/favicon.ico") else: if config.ui_password: if time.time() - self.last_cleanup > 60 * 60: # Cleanup expired sessions every hour self.cleanup() # Validate session session_id = self.getCookies().get("session_id") if session_id not in self.sessions: # Invalid session id, display login return self.actionLogin() return super(UiRequestPlugin, self).route(path) # Action: Login def actionLogin(self): template = open("plugins/UiPassword/login.html").read() self.sendHeader() posted = self.getPosted() if posted: # Validate http posted data if self.checkPassword(posted.get("password")): # Valid password, create session session_id = self.randomString(26) self.sessions[session_id] = { "added": time.time(), "keep": posted.get("keep") } # Redirect to homepage or referer url = self.env.get("HTTP_REFERER", "") if not url or re.sub("\?.*", "", url).endswith("/Login"): url = "/" + config.homepage cookie_header = ('Set-Cookie', "session_id=%s;path=/;max-age=2592000;" % session_id) # Max age = 30 days self.start_response('301 Redirect', [('Location', url), cookie_header]) yield "Redirecting..." else: # Invalid password, show login form again template = template.replace("{result}", "bad_password") yield template def checkPassword(self, password): return password == config.ui_password def randomString(self, nchars): return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(nchars)) @classmethod def cleanup(cls): cls.last_cleanup = time.time() for session_id, session in cls.sessions.items(): if session["keep"] and time.time() - session["added"] > 60 * 60 * 24 * 60: # Max 60days for keep sessions del(cls.sessions[session_id]) elif not session["keep"] and time.time() - session["added"] > 60 * 60 * 24: # Max 24h for non-keep sessions del(cls.sessions[session_id]) # Action: Display sessions def actionSessions(self): self.sendHeader() yield "<pre>" yield json.dumps(self.sessions, indent=4) # Action: Logout def actionLogout(self): # Session id has to passed as get parameter or called without referer to avoid remote logout session_id = self.getCookies().get("session_id") if not self.env.get("HTTP_REFERER") or session_id == self.get.get("session_id"): if session_id in self.sessions: del self.sessions[session_id] self.start_response('301 Redirect', [ ('Location', "/"), ('Set-Cookie', "session_id=deleted; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT") ]) yield "Redirecting..." else: self.sendHeader() yield "Error: Invalid session id" @PluginManager.registerTo("ConfigPlugin") class ConfigPlugin(object): def createArguments(self): group = self.parser.add_argument_group("UiPassword plugin") group.add_argument('--ui_password', help='Password to access UiServer', default=None, metavar="password") return super(ConfigPlugin, self).createArguments() from Translate import translate as lang @PluginManager.registerTo("UiWebsocket") class UiWebsocketPlugin(object): def actionUiLogout(self, to): permissions = self.getPermissions(to) if "ADMIN" not in permissions: return self.response(to, "You don't have permission to run this command") session_id = self.request.getCookies().get("session_id", "") message = "<script>document.location.href = '/Logout?session_id=%s'</script>" % session_id self.cmd("notification", ["done", message]) def addHomepageNotifications(self): error_msgs = showPasswordAdvice(config.ui_password) for msg in error_msgs: self.site.notifications.append(["error", lang[msg]]) return super(UiWebsocketPlugin, self).addHomepageNotifications()<|fim▁end|>
if path.endswith("favicon.ico"):
<|file_name|>agency.js<|end_file_name|><|fim▁begin|><|fim▁hole|> let AgencySchema = new Schema({ }) module.exports = mongoose.model('Agency', AgencySchema)<|fim▁end|>
const mongoose = require('mongoose') const Schema = mongoose.Schema
<|file_name|>FvSTLAssisstant.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|>#include "FvSTLAssisstant.h"<|fim▁end|>
<|file_name|>TravelTimeFMM.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Created on Tue Jul 07 09:44:36 2015 @author: Marcus """ import pygimli as pg import numpy as np from pygimli.physics.traveltime.fastMarchingTest import fastMarch import matplotlib.pyplot as plt import time as time from pygimli.mplviewer import drawMesh # , drawField, drawStreamLines class TravelTimeFMM(pg.ModellingBase): """ Class that implements the Fast Marching Method (FMM). It can be used instead of Dijkstra modelling. Although it is currently quite slow! """ def __init__(self, mesh, data, verbose=False): """ Init function. Parameters: ----------- mesh : pygimli.Mesh 2D mesh to be used in the forward calculations. data : pygimli.DataContainer The datacontainer with sensor positions etc. verbose : boolean More printouts or not... """ pg.ModellingBase.__init__(self, mesh, data, verbose) self.timefields = dict() self._jac = dict() self.num_sensors = data.sensorCount() # num_shots = len(np.unique(data("s"))) def response(self, slowness): """ Response function. Returns the result of the forward calculation. Uses the shot- and sensor positions specified in the data container. """ mesh = self.mesh() param_markers = np.unique(mesh.cellMarker()) param_count = len(param_markers) if len(slowness) == mesh.cellCount(): self.mapModel(slowness) elif len(slowness) == param_count: # map the regions in the mesh to slowness slow_map = pg.stdMapF_F() min_reg_num = min(param_markers) for i, si in enumerate(slowness): slow_map.insert(float(i+min_reg_num), si) mesh.mapCellAttributes(slow_map) else: raise ValueError("Wrong no of parameters. Mesh size: {}, no " "of regions: {}, and number of slowness values:" "{}".format(self.mesh().cellCount(), param_count, len(slowness))) data = self.data() n_data = data.size() t_fmm = np.zeros(n_data) idx = 0 for source_idx in [0]: # np.unique(data("s")): # initialize source position and trvel time vector n_sensors = np.sum(data("s") == source_idx) # maybe not always same number of sensors source = data.sensorPosition(int(source_idx)) times = pg.RVector(mesh.nodeCount(), 0.) # initialize sets and tags # upwind, downwind = set(), set() downwind = set() upTags = np.zeros(mesh.nodeCount()) downTags = np.zeros(mesh.nodeCount()) # define initial condition cell = mesh.findCell(source) for i, n in enumerate(cell.nodes()): times[n.id()] = cell.attribute() * n.pos().distance(source) upTags[n.id()] = 1 for i, n in enumerate(cell.nodes()): tmpNodes = pg.commonNodes(n.cellSet()) for nn in tmpNodes: if not upTags[nn.id()] and not downTags[nn.id()]: downwind.add(nn) downTags[nn.id()] = 1 # start fast marching while len(downwind) > 0: fastMarch(mesh, downwind, times, upTags, downTags) self.timefields[source_idx] = np.array(times) sensor_idx = data("g")[data("s") == source_idx] t_fmm[idx:idx+n_sensors] = np.array( [times[mesh.findNearestNode(data.sensorPosition(int(i)))] for i in sensor_idx]) idx += n_sensors return t_fmm def createJacobian(self, model): """ Computes the jacobian matrix from the model. """ pass def _intersect_lines(self, l1, l2): """ Finds the parameters for which the two lines intersect. Assumes 2D lines! Parameters: ----------- l1, l2 : pygimli Line Line objects from pygimli. Useful because they nicely wrap a line and has some utility functions. Returns: -------- v : numpy array (length 2) The parameters (s and t) for l1 and l2, respectively. None if no intersection (i.e. parallell lines). """ # print("l1: {}".format(l1)) # print("l2: {}".format(l2)) # first just check if parallell epsilon = 1.0e-4 dir1 = l1.p1()-l1.p0() dir2 = l2.p1()-l2.p0() # print("dir1: {}, and length: {}".format(dir1, dir1.length())) # print("dir2: {}, and length: {}".format(dir2, dir2.length())) dir1 /= dir1.length() dir2 /= dir2.length() # print("dir1: {}, and length: {}".format(dir1, dir1.length())) # print("dir2: {}, and length: {}".format(dir2, dir2.length())) if abs(np.dot(dir1, dir2)) > 1.0 - epsilon: return np.array([1./epsilon, 1./epsilon]) # raise Warning("parallell lines!") # Solve system: Av = w, were v, w are vectors. v = (s,t) ndim = 2 A = np.ndarray((ndim, ndim)) A[0, 0] = l1.p1().x() - l1.p0().x() A[1, 0] = l1.p1().y() - l1.p0().y() A[0, 1] = -(l2.p1().x() - l2.p0().x()) A[1, 1] = -(l2.p1().y() - l2.p0().y()) w = np.array([l2.p0().x() - l1.p0().x(), l2.p0().y() - l1.p0().y()]) v = np.linalg.solve(A, w) if not np.allclose(np.dot(A, v), w): raise Warning("Problem with linear solver for intersection!") return v def _intersect_lines_by_points(self, p1, p2, p3, p4): """ Finds the parameters for which the two lines intersect. The lines are defined by four points. Assumes 2D lines! Parameters: ----------- p1, p2, p3, p4 : pygimli RVector3 Position objects from pygimli. The lines are defined as: l1 : P1 to P2 l2 : P3 to P4 Returns: -------- v : numpy array (length 2) The parameters (s and t) for l1 and l2, respectively. Will return "large" values if the lines are parallell. """ # first just check if parallell epsilon = 1.0e-4 dir1 = (p2 - p1).norm() dir2 = (p4 - p3).norm() if abs(np.dot(dir1, dir2)) > 1.0 - epsilon: return np.array([1./epsilon, 1./epsilon]) # raise Warning("parallell lines!") # Solve system: Av = w, were v, w are vectors. v = (s,t) ndim = 2 A = np.ndarray((ndim, ndim)) A[0, 0] = p2.x() - p1.x() A[1, 0] = p2.y() - p1.y() A[0, 1] = p3.x() - p4.x() A[1, 1] = p3.y() - p4.y() w = np.array([p3.x() - p1.x(), p3.y() - p1.y()]) v = np.linalg.solve(A, w) if not np.allclose(np.dot(A, v), w): raise Warning("Problem with linear solver for intersection!") return v[0], v[1] def _check_param(self, param, t_low=0, t_high=1.0): """ Returns the "proper" t-value from the list. It should be positive along with the corresponding index value. """ t_list = param[:, 0] par_pos = np.maximum(t_list, t_low) par_gt_eps = par_pos[par_pos > t_low+1e-5] print("t_list: {}\npar_pos: {}\npar_gt_eps: {}".format( t_list, par_pos, par_gt_eps)) stay_on_edge = False try: t = np.min(par_gt_eps) except ValueError: stay_on_edge = True t = max(t_list) idx = int(param[t_list == t, 1][0]) return t, idx, stay_on_edge def _check_param2(self, param, t_low=0, t_high=1.0): """ Returns the "proper" t-value from the list. It should be positive along with the corresponding index value. """ t_list = param[:, 0] par_pos = np.maximum(t_list, t_low) par_gt_eps = par_pos[par_pos > t_low+1e-5] print("t_list: {}\npar_pos: {}\npar_gt_eps: {}".format( t_list, par_pos, par_gt_eps)) stay_on_edge = False try: t = np.min(par_gt_eps) except ValueError: stay_on_edge = True t = max(t_list) idx = int(param[t_list == t, 1][0]) if np.all(t_list < 0): t = 1e-5 return t, idx, stay_on_edge def _get_new_cell(self, boundary, current): """ """ if boundary.leftCell().id() == current.id(): new_cell = boundary.rightCell() else: new_cell = boundary.leftCell() return new_cell def _get_new_cell2(self, boundary, current): """ """ if boundary.leftCell() is None or boundary.rightCell() is None: return current, False if boundary.leftCell().id() == current.id(): new_cell = boundary.rightCell() else: new_cell = boundary.leftCell() print(current.attribute(), new_cell.attribute()) fast_to_slow = new_cell.attribute() > current.attribute() return new_cell, fast_to_slow def _get_next_node(self, boundary, current_cell_id, ray_pos, ray_dir): """ Gets the next node in the case that the ray should follow an interface. Will decide which cell is the one that is travelled through by choosing the one with highest velocity. Parameters: ----------- boundary : pygimli Boundary The boundary we are coming from. current_cell_id : int The current cell index. ray_pos : pygimli RVector3 The origin of the ray. ray_dir : pygimli RVector3 Direction of the ray. Returns: -------- node_id : int The global node index. (Using the mesh numbering) cell_id : int The global cell index of the cell we will use. """ left = boundary.leftCell() right = boundary.rightCell() if left is not None: l_id = left.id() # boundary.leftCell().attribute() left_slowness = self.mesh().cell(l_id).attribute() else: l_id = None left_slowness = 10000. if right is not None: r_id = right.id() # boundary.rightCell().attribute() right_slowness = self.mesh().cell(r_id).attribute() else: r_id = None right_slowness = 10000. print("left slow: {}, right slow: {}".format( left_slowness, right_slowness)) # Pick the fastest cell if left_slowness < right_slowness: cell_id = l_id # boundary.leftCell().id() else: cell_id = r_id # boundary.rightCell().id() # pick the right direction to go line_segment = ray_pos - boundary.node(0).pos() if np.dot(line_segment, ray_dir) < 0.: node_id = boundary.node(0).id() else: <|fim▁hole|> def _trace_back(self, sensor_idx, source_idx, epsilon=1e-5): """ Traces a ray backwards through the mesh from a particular sensor towards the seismic source. """ msh = self.mesh() self.poslist = [] self._jac[source_idx] = np.zeros((msh.cellCount())) pos_offset = pg.RVector3(0., epsilon, 0.) sensor_pos = self.data().sensorPosition(sensor_idx) source_pos = self.data().sensorPosition(source_idx) source_node = msh.findNearestNode(source_pos) current_cell = msh.findCell(sensor_pos - pos_offset) new_cell = current_cell ray_origin = sensor_pos - pos_offset was_on_edge = False while ray_origin.dist(source_pos) > epsilon: self.poslist.append(ray_origin) if new_cell is None: print("Ended up outside mesh!") print("Last valid cell: {}".format(current_cell)) break # other_boundary = pg.findBoundary( # current_cell.node((node_idx+2)%nnodes), # current_cell.node((node_idx+1)%nnodes)) # new_cell = self._get_new_cell(other_boundary, current_cell) # gradient = current_cell.node((node_idx+1)%nnodes).pos() - # current_cell.node(node_idx).pos() else: old_cell_id = current_cell.id() # going to slower cell # if new_cell.attribute() > current_cell.attribute(): # gradient = current_cell.grad(current_cell.center(), # self.timefields[source_idx]) # else: # gradient = new_cell.grad(current_cell.center(), # self.timefields[source_idx]) current_cell = new_cell if not was_on_edge: gradient = current_cell.grad( current_cell.center(), self.timefields[source_idx]) else: was_on_edge = False print("Current cell: {}".format(current_cell.id())) # gradient = current_cell.grad(current_cell.center(), # self.timefields[source_idx]) # gradient_norm = -gradient / gradient.length() gradient_norm = -gradient.norm() nnodes = current_cell.nodeCount() params = np.zeros((nnodes, 2)) gradient_line = pg.Line(ray_origin, ray_origin + gradient_norm) for i in range(nnodes): if current_cell.node(i).id() == source_node: print("cell closest to source") params[i, :] = [ray_origin.dist(source_pos), i] break edge = pg.Line(current_cell.node(i).pos(), current_cell.node((i+1) % nnodes).pos()) # print("Grad: {}".format(gradient_line)) # print("Edge: {}".format(edge)) s_t = self._intersect_lines(gradient_line, edge) # print("s_t: {}".format(s_t)) params[i, :] = [s_t[0], i] t, node_idx, stay_on_edge = self._check_param(params) print("Stay on edge: {}".format(stay_on_edge)) boundary = pg.findBoundary( current_cell.node(node_idx), current_cell.node((node_idx+1) % nnodes)) if stay_on_edge: # break next_node_id, next_cell_id = self._get_next_node( boundary, current_cell.id(), ray_origin, gradient_norm) t = ray_origin.dist(msh.node(next_node_id).pos()) print("Current: {}, next: {}, t: {}".format( current_cell.id(), next_cell_id, t)) print("") self._jac[source_idx][next_cell_id] += t temp = msh.node(next_node_id).pos() - ray_origin ray_origin = msh.node(next_node_id).pos() + \ 1e-5 * temp.norm() - pg.RVector3(0.0, 1e-6, 0.0) # new_cell = mesh.cell(next_cell_id) new_cell = msh.findCell(ray_origin) was_on_edge = True # print("next_cell_id: {}, findCell: {}".format( # next_cell_id, new_cell.id())) else: # print("params: {}, t: {}, i: {}".format(params, t, node_idx)) # Save distance travelled in the cell (t) and update origin self._jac[source_idx][current_cell.id()] = t ray_origin = gradient_line.lineAt(t) # print("ray origin: {}".format(ray_origin)) new_cell = self._get_new_cell(boundary, current_cell) if new_cell.id() == old_cell_id: # If we keep jumping back and forth between two cells. print("Jumping back and forth...") break return self._jac if __name__ == '__main__': """ Currently, this script assumes that the data was generated with Dijkstra modelling and computes the differences between the FMM modelling. """ mesh = pg.Mesh('vagnh_fwd_mesh.bms') mesh.createNeighbourInfos() data = pg.DataContainer('vagnh_NONOISE.sgt', 's g') vel = [1400., 1700., 5000.] print(mesh) print(data) fwd = TravelTimeFMM(mesh, data, True) tic = time.time() t_fmm = fwd.response(1.0/np.array(vel)) print("Forward calculation time: {} seconds.".format(time.time()-tic)) delta_t = np.array(data("t")) - t_fmm # f, ax = plt.subplots() # x = pg.x(data.sensorPositions()) # ax.plot(abs(delta_t), 'r-.', label='abs. diff') # ax.plot(delta_t, 'b-', label='diff') # ax.legend(loc='best') # f.show() # raise SystemExit() l = fwd._trace_back(50, 0) fig, a = plt.subplots() drawMesh(a, mesh) pg.show(mesh, axes=a, data=l[0]) cells = fwd.mesh().cells() active_cells = [cells[i] for i in range(mesh.cellCount()) if l[0][i]] # active_cells.append(cells[2044]) for c in active_cells: pos = c.center() gradient = 2000*c.grad(pos, fwd.timefields[0]) dx, dy = gradient.x(), gradient.y() a.text(pos.x(), pos.y(), str(c.id())) a.arrow(pos.x(), pos.y(), dx, dy) ray = fwd.poslist a.plot(pg.x(ray), pg.y(ray), 'm-*', ) plt.show() # look at if next gradient contradicts the previous # if so, then follow the interface instead (line segment to next node) # this will stop when the gradients are more aligned. # drawMesh(a, mesh) # drawField(a, mesh, fwd.timefields[0], True, 'Spectral') # drawStreamLines(a, mesh, fwd.timefields[0], nx=50, ny=50) # some stats: diff_rms = np.sqrt(np.sum(delta_t**2)/len(delta_t)) print("RMS of difference: {}".format(diff_rms)) print("Mean of difference: {}".format(np.mean(delta_t))) print("Standard dev of difference: {}".format(np.std(delta_t))) print("Median of difference: {}".format(np.median(delta_t)))<|fim▁end|>
node_id = boundary.node(1).id() return node_id, cell_id
<|file_name|>tools.py<|end_file_name|><|fim▁begin|>import json import sys import logging import logging.handlers def load_config():<|fim▁hole|> '''Loads application configuration from a JSON file''' try: json_data = open('config.json') config = json.load(json_data) json_data.close() return config except Exception: print """There was an error loading config.json. Make sure that the file exists and it's a valid JSON file.""" sys.exit(1) def init_logger(file_name='clouddump.log'): ''' Initializes the logging file and module parameters ---------- file_name: A string with the name of the file to write the logs in ''' logger = logging.getLogger('clouddump') log_file_handler = logging.handlers.RotatingFileHandler( file_name, maxBytes = 10**9) log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') log_file_handler.setFormatter(log_format) logger.addHandler(log_file_handler) logger.setLevel(logging.DEBUG) if len(sys.argv) > 1: if sys.argv[1] == '-v' or sys.argv[1] == '--verbose': console = logging.StreamHandler() console.setLevel(logging.INFO) logger.addHandler(console)<|fim▁end|>
<|file_name|>upgrade.go<|end_file_name|><|fim▁begin|>package command import ( "fmt" "github.com/urfave/cli" ) func (f *CommandFactory) Upgrade() cli.Command { return cli.Command{ Name: "upgrade", Usage: "upgrade a Layer0 instance to a new version", ArgsUsage: "NAME VERSION", Flags: []cli.Flag{<|fim▁hole|> Name: "force", Usage: "skips confirmation prompt", }, }, Action: func(c *cli.Context) error { args, err := extractArgs(c.Args(), "NAME", "VERSION") if err != nil { return err } instance := f.NewInstance(args["NAME"]) if err := instance.Upgrade(args["VERSION"], c.Bool("force")); err != nil { return err } fmt.Printf("Everything looks good! You are now ready to run 'l0-setup apply %s'\n", args["NAME"]) return nil }, } }<|fim▁end|>
cli.BoolFlag{
<|file_name|>isogram.py<|end_file_name|><|fim▁begin|>def is_isogram(s): """ Determine if a word or phrase is an isogram. An isogram (also known as a "nonpattern word") is a word or phrase without a repeating letter. Examples of isograms: - lumberjacks - background<|fim▁hole|> """ from collections import Counter s = s.lower().strip() s = [c for c in s if c.isalpha()] counts = Counter(s).values() return max(counts or [1]) == 1<|fim▁end|>
- downstream
<|file_name|>role.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document desk_properties = ("search_bar", "notifications", "chat", "list_sidebar", "bulk_actions", "view_switcher", "form_sidebar", "timeline", "dashboard") class Role(Document): def before_rename(self, old, new, merge=False): if old in ("Guest", "Administrator", "System Manager", "All"): frappe.throw(frappe._("Standard roles cannot be renamed")) def after_insert(self): frappe.cache().hdel('roles', 'Administrator') def validate(self): if self.disabled: self.disable_role() else: self.set_desk_properties() def disable_role(self): if self.name in ("Guest", "Administrator", "System Manager", "All"): frappe.throw(frappe._("Standard roles cannot be disabled")) else: self.remove_roles() def set_desk_properties(self): # set if desk_access is not allowed, unset all desk properties if self.name == 'Guest': self.desk_access = 0 if not self.desk_access: for key in desk_properties: self.set(key, 0) def remove_roles(self): frappe.db.sql("delete from `tabHas Role` where role = %s", self.name) frappe.clear_cache() def on_update(self): '''update system user desk access if this has changed in this update''' if frappe.flags.in_install: return if self.has_value_changed('desk_access'): for user_name in get_users(self.name): user = frappe.get_doc('User', user_name) user_type = user.user_type user.set_system_user() if user_type != user.user_type: user.save() def get_info_based_on_role(role, field='email'): ''' Get information of all users that have been assigned this role ''' users = frappe.get_list("Has Role", filters={"role": role, "parenttype": "User"}, fields=["parent as user_name"]) return get_user_info(users, field) def get_user_info(users, field='email'): ''' Fetch details about users for the specified field '''<|fim▁hole|> info_list.append(user_info) return info_list def get_users(role): return [d.parent for d in frappe.get_all("Has Role", filters={"role": role, "parenttype": "User"}, fields=["parent"])] # searches for active employees @frappe.whitelist() @frappe.validate_and_sanitize_search_inputs def role_query(doctype, txt, searchfield, start, page_len, filters): report_filters = [['Role', 'name', 'like', '%{}%'.format(txt)], ['Role', 'is_custom', '=', 0]] if filters and isinstance(filters, list): report_filters.extend(filters) return frappe.get_all('Role', limit_start=start, limit_page_length=page_len, filters=report_filters, as_list=1)<|fim▁end|>
info_list = [] for user in users: user_info, enabled = frappe.db.get_value("User", user.get("user_name"), [field, "enabled"]) if enabled and user_info not in ["admin@example.com", "guest@example.com"]:
<|file_name|>funcd.py<|end_file_name|><|fim▁begin|># Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com> # Copyright (c) 2013, Michael Scherer <misc@zarb.org> # Copyright (c) 2017 Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION = """ author: Michael Scherer (@msherer) <misc@zarb.org> connection: funcd short_description: Use funcd to connect to target description: - This transport permits you to use Ansible over Func. - For people who have already setup func and that wish to play with ansible, this permit to move gradually to ansible without having to redo completely the setup of the network. version_added: "1.1" options: remote_addr: description: - The path of the chroot you want to access. default: inventory_hostname vars: - name: ansible_host - name: ansible_func_host """ HAVE_FUNC = False try: import func.overlord.client as fc HAVE_FUNC = True except ImportError: pass import os import tempfile import shutil from ansible.errors import AnsibleError from ansible.utils.display import Display display = Display() class Connection(object): ''' Func-based connections ''' has_pipelining = False def __init__(self, runner, host, port, *args, **kwargs): self.runner = runner self.host = host # port is unused, this go on func self.port = port<|fim▁hole|> raise AnsibleError("func is not installed") self.client = fc.Client(self.host) return self def exec_command(self, cmd, become_user=None, sudoable=False, executable='/bin/sh', in_data=None): ''' run a command on the remote minion ''' if in_data: raise AnsibleError("Internal Error: this module does not support optimized module pipelining") # totally ignores privlege escalation display.vvv("EXEC %s" % (cmd), host=self.host) p = self.client.command.run(cmd)[self.host] return (p[0], p[1], p[2]) def _normalize_path(self, path, prefix): if not path.startswith(os.path.sep): path = os.path.join(os.path.sep, path) normpath = os.path.normpath(path) return os.path.join(prefix, normpath[1:]) def put_file(self, in_path, out_path): ''' transfer a file from local to remote ''' out_path = self._normalize_path(out_path, '/') display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host) self.client.local.copyfile.send(in_path, out_path) def fetch_file(self, in_path, out_path): ''' fetch a file from remote to local ''' in_path = self._normalize_path(in_path, '/') display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host) # need to use a tmp dir due to difference of semantic for getfile # ( who take a # directory as destination) and fetch_file, who # take a file directly tmpdir = tempfile.mkdtemp(prefix="func_ansible") self.client.local.getfile.get(in_path, tmpdir) shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)), out_path) shutil.rmtree(tmpdir) def close(self): ''' terminate the connection; nothing to do here ''' pass<|fim▁end|>
def connect(self, port=None): if not HAVE_FUNC:
<|file_name|>math.hpp<|end_file_name|><|fim▁begin|>#pragma once #include <cmath> <|fim▁hole|>const double M_PI = 3.14159265358979323846264338327950288; #endif #endif const double M_2PI = M_PI * 2.0; inline float _fmod( float x, float y ) { return fmod( fmod( x, y ) + y, y ); }<|fim▁end|>
// windows MinGW fix #ifdef __MINGW32__ #ifndef M_PI
<|file_name|>optrecurse.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2002-2006 Zuza Software Foundation # # This file is part of translate. # # translate is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # translate is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. import fnmatch import logging import optparse import os.path import re import six import sys import traceback from io import BytesIO from translate import __version__ from translate.misc import progressbar class ManPageOption(optparse.Option, object): ACTIONS = optparse.Option.ACTIONS + ("manpage",) def take_action(self, action, dest, opt, value, values, parser): """take_action that can handle manpage as well as standard actions""" if action == "manpage": parser.print_manpage() sys.exit(0) return super(ManPageOption, self).take_action(action, dest, opt, value, values, parser) class ManHelpFormatter(optparse.HelpFormatter): def __init__(self, indent_increment=0, max_help_position=0, width=80, short_first=1): optparse.HelpFormatter.__init__( self, indent_increment, max_help_position, width, short_first) def format_option_strings(self, option): """Return a comma-separated list of option strings & metavariables.""" if option.takes_value(): metavar = option.metavar or option.dest.upper() metavar = '\\fI%s\\fP' % metavar short_opts = [sopt + metavar for sopt in option._short_opts] long_opts = [lopt + "\\fR=\\fP" + metavar for lopt in option._long_opts] else: short_opts = option._short_opts long_opts = option._long_opts if self.short_first: opts = short_opts + long_opts else: opts = long_opts + short_opts return '\\fB%s\\fP' % ("\\fR, \\fP".join(opts)) class RecursiveOptionParser(optparse.OptionParser, object): """A specialized Option Parser for recursing through directories.""" def __init__(self, formats, usetemplates=False, allowmissingtemplate=False, description=None): """Construct the specialized Option Parser. :type formats: Dictionary :param formats: See :meth:`~.RecursiveOptionParser.setformats` for an explanation of the formats parameter. """ optparse.OptionParser.__init__(self, version="%prog " + __version__.sver, description=description) self.setmanpageoption() self.setprogressoptions() self.seterrorleveloptions() self.setformats(formats, usetemplates) self.passthrough = [] self.allowmissingtemplate = allowmissingtemplate logging.basicConfig(format="%(name)s: %(levelname)s: %(message)s") def get_prog_name(self): return os.path.basename(sys.argv[0]) def setmanpageoption(self): """creates a manpage option that allows the optionparser to generate a manpage""" manpageoption = ManPageOption(None, "--manpage", dest="manpage", default=False, action="manpage", help="output a manpage based on the help") self.define_option(manpageoption) def format_manpage(self): """returns a formatted manpage""" result = [] prog = self.get_prog_name() formatprog = lambda x: x.replace("%prog", prog) formatToolkit = lambda x: x.replace("%prog", "Translate Toolkit") result.append('.\\" Autogenerated manpage\n') result.append('.TH %s 1 "%s" "" "%s"\n' % (prog, formatToolkit(self.version), formatToolkit(self.version))) result.append('.SH NAME\n') result.append('%s \\- %s\n' % (self.get_prog_name(), self.description.split('\n\n')[0])) result.append('.SH SYNOPSIS\n') result.append('.PP\n') usage = "\\fB%prog " usage += " ".join([self.getusageman(option) for option in self.option_list]) usage += "\\fP" result.append('%s\n' % formatprog(usage)) description_lines = self.description.split('\n\n')[1:] if description_lines: result.append('.SH DESCRIPTION\n') result.append('\n\n'.join([re.sub('\.\. note::', 'Note:', l) for l in description_lines])) result.append('.SH OPTIONS\n') ManHelpFormatter().store_option_strings(self) result.append('.PP\n') for option in self.option_list: result.append('.TP\n') result.append('%s\n' % str(option).replace('-', '\-')) result.append('%s\n' % option.help.replace('-', '\-')) return "".join(result) def print_manpage(self, file=None): """outputs a manpage for the program using the help information""" if file is None: file = sys.stdout file.write(self.format_manpage()) def set_usage(self, usage=None): """sets the usage string - if usage not given, uses getusagestring for each option""" if usage is None: self.usage = "%prog " + " ".join([self.getusagestring(option) for option in self.option_list]) else: super(RecursiveOptionParser, self).set_usage(usage) def warning(self, msg, options=None, exc_info=None): """Print a warning message incorporating 'msg' to stderr and exit.""" if options: if options.errorlevel == "traceback": errorinfo = "\n".join(traceback.format_exception(exc_info[0], exc_info[1], exc_info[2])) elif options.errorlevel == "exception": errorinfo = "\n".join(traceback.format_exception_only(exc_info[0], exc_info[1])) elif options.errorlevel == "message": errorinfo = str(exc_info[1]) else: errorinfo = "" if errorinfo: msg += ": " + errorinfo logging.getLogger(self.get_prog_name()).warning(msg) def getusagestring(self, option): """returns the usage string for the given option""" optionstring = "|".join(option._short_opts + option._long_opts) if getattr(option, "optionalswitch", False): optionstring = "[%s]" % optionstring if option.metavar: optionstring += " " + option.metavar if getattr(option, "required", False): return optionstring else: return "[%s]" % optionstring def getusageman(self, option): """returns the usage string for the given option""" optionstring = "\\fR|\\fP".join(option._short_opts + option._long_opts) if getattr(option, "optionalswitch", False): optionstring = "\\fR[\\fP%s\\fR]\\fP" % optionstring if option.metavar: optionstring += " \\fI%s\\fP" % option.metavar if getattr(option, "required", False): return optionstring else: return "\\fR[\\fP%s\\fR]\\fP" % optionstring def define_option(self, option): """Defines the given option, replacing an existing one of the same short name if neccessary...""" for short_opt in option._short_opts: if self.has_option(short_opt): self.remove_option(short_opt) for long_opt in option._long_opts: if self.has_option(long_opt): self.remove_option(long_opt) self.add_option(option) def setformats(self, formats, usetemplates): """Sets the format options using the given format dictionary. :type formats: Dictionary :param formats: The dictionary *keys* should be: - Single strings (or 1-tuples) containing an input format (if not *usetemplates*) - Tuples containing an input format and template format (if *usetemplates*) - Formats can be *None* to indicate what to do with standard input The dictionary *values* should be tuples of outputformat (string) and processor method. """ inputformats = [] outputformats = [] templateformats = [] self.outputoptions = {} self.usetemplates = usetemplates for formatgroup, outputoptions in six.iteritems(formats): if isinstance(formatgroup, (str, unicode)) or formatgroup is None: formatgroup = (formatgroup, ) if not isinstance(formatgroup, tuple): raise ValueError("formatgroups must be tuples or None/str/unicode") if len(formatgroup) < 1 or len(formatgroup) > 2: raise ValueError("formatgroups must be tuples of length 1 or 2") if len(formatgroup) == 1: formatgroup += (None, ) inputformat, templateformat = formatgroup if not isinstance(outputoptions, tuple) or len(outputoptions) != 2: raise ValueError("output options must be tuples of length 2") outputformat, processor = outputoptions if not inputformat in inputformats: inputformats.append(inputformat) if not outputformat in outputformats: outputformats.append(outputformat) if not templateformat in templateformats: templateformats.append(templateformat) self.outputoptions[(inputformat, templateformat)] = (outputformat, processor) self.inputformats = inputformats inputformathelp = self.getformathelp(inputformats) inputoption = optparse.Option("-i", "--input", dest="input", default=None, metavar="INPUT", help="read from INPUT in %s" % (inputformathelp)) inputoption.optionalswitch = True inputoption.required = True self.define_option(inputoption) excludeoption = optparse.Option("-x", "--exclude", dest="exclude", action="append", type="string", metavar="EXCLUDE", default=["CVS", ".svn", "_darcs", ".git", ".hg", ".bzr"], help="exclude names matching EXCLUDE from input paths") self.define_option(excludeoption) outputformathelp = self.getformathelp(outputformats) outputoption = optparse.Option("-o", "--output", dest="output", default=None, metavar="OUTPUT", help="write to OUTPUT in %s" % (outputformathelp)) outputoption.optionalswitch = True outputoption.required = True self.define_option(outputoption) if self.usetemplates: self.templateformats = templateformats templateformathelp = self.getformathelp(self.templateformats) templateoption = optparse.Option("-t", "--template", dest="template", default=None, metavar="TEMPLATE", help="read from TEMPLATE in %s" % (templateformathelp)) self.define_option(templateoption) def setprogressoptions(self): """Sets the progress options.""" self.progresstypes = { "none": progressbar.NoProgressBar, "bar": progressbar.HashProgressBar, "dots": progressbar.DotsProgressBar, "names": progressbar.MessageProgressBar, "verbose": progressbar.VerboseProgressBar, } progressoption = optparse.Option(None, "--progress", dest="progress", default="bar", choices=self.progresstypes.keys(), metavar="PROGRESS", help="show progress as: %s" % (", ".join(self.progresstypes))) self.define_option(progressoption) def seterrorleveloptions(self): """Sets the errorlevel options.""" self.errorleveltypes = ["none", "message", "exception", "traceback"] errorleveloption = optparse.Option(None, "--errorlevel", dest="errorlevel", default="message", choices=self.errorleveltypes, metavar="ERRORLEVEL", help="show errorlevel as: %s" % (", ".join(self.errorleveltypes))) self.define_option(errorleveloption) def getformathelp(self, formats): """Make a nice help string for describing formats...""" formats = sorted(formats) if None in formats: formats = filter(lambda format: format is not None, formats) if len(formats) == 0: return "" elif len(formats) == 1: return "%s format" % (", ".join(formats)) else: return "%s formats" % (", ".join(formats)) def isrecursive(self, fileoption, filepurpose='input'): """Checks if fileoption is a recursive file.""" if fileoption is None: return False elif isinstance(fileoption, list): return True else: return os.path.isdir(fileoption) def parse_args(self, args=None, values=None): """Parses the command line options, handling implicit input/output args.""" (options, args) = super(RecursiveOptionParser, self).parse_args(args, values) # some intelligent as to what reasonable people might give on the # command line if args and not options.input: if len(args) > 1: options.input = args[:-1] args = args[-1:] else: options.input = args[0] args = [] if args and not options.output: options.output = args[-1] args = args[:-1] if args: self.error("You have used an invalid combination of --input, --output and freestanding args") if isinstance(options.input, list) and len(options.input) == 1: options.input = options.input[0] if options.input is None: self.error("You need to give an inputfile or use - for stdin ; use --help for full usage instructions") elif options.input == '-': options.input = None return (options, args) def getpassthroughoptions(self, options): """Get the options required to pass to the filtermethod...""" passthroughoptions = {} for optionname in dir(options): if optionname in self.passthrough: passthroughoptions[optionname] = getattr(options, optionname) return passthroughoptions def getoutputoptions(self, options, inputpath, templatepath): """Works out which output format and processor method to use...""" if inputpath: inputbase, inputext = self.splitinputext(inputpath) else: inputext = None if templatepath: templatebase, templateext = self.splittemplateext(templatepath) else: templateext = None if (inputext, templateext) in options.outputoptions: return options.outputoptions[inputext, templateext] elif (inputext, "*") in options.outputoptions: outputformat, fileprocessor = options.outputoptions[inputext, "*"] elif ("*", templateext) in options.outputoptions: outputformat, fileprocessor = options.outputoptions["*", templateext] elif ("*", "*") in options.outputoptions: outputformat, fileprocessor = options.outputoptions["*", "*"] elif (inputext, None) in options.outputoptions: return options.outputoptions[inputext, None] elif (None, templateext) in options.outputoptions: return options.outputoptions[None, templateext] elif ("*", None) in options.outputoptions: outputformat, fileprocessor = options.outputoptions["*", None] elif (None, "*") in options.outputoptions: outputformat, fileprocessor = options.outputoptions[None, "*"] else: if self.usetemplates: if inputext is None: raise ValueError("don't know what to do with input format (no file extension), no template file") elif templateext is None: raise ValueError("don't know what to do with input format %s, no template file" % (os.extsep + inputext)) else: raise ValueError("don't know what to do with input format %s, template format %s" % (os.extsep + inputext, os.extsep + templateext)) else: raise ValueError("don't know what to do with input format %s" % (os.extsep + inputext)) if outputformat == "*": if inputext: outputformat = inputext elif templateext: outputformat = templateext elif ("*", "*") in options.outputoptions: outputformat = None else: if self.usetemplates: raise ValueError("don't know what to do with input format (no file extension), no template file") else: raise ValueError("don't know what to do with input format (no file extension)") return outputformat, fileprocessor def initprogressbar(self, allfiles, options): """Sets up a progress bar appropriate to the options and files.""" if options.progress in ('bar', 'verbose'): self.progressbar = \ self.progresstypes[options.progress](0, len(allfiles)) # should use .getChild("progress") but that is only in 2.7 logger = logging.getLogger(self.get_prog_name() + ".progress") logger.setLevel(logging.INFO) logger.propagate = False handler = logging.StreamHandler() handler.setLevel(logging.INFO) handler.setFormatter(logging.Formatter()) logger.addHandler(handler) logger.info("processing %d files...", len(allfiles)) else: self.progressbar = self.progresstypes[options.progress]() def getfullinputpath(self, options, inputpath): """Gets the absolute path to an input file.""" if options.input: return os.path.join(options.input, inputpath) else: return inputpath def getfulloutputpath(self, options, outputpath): """Gets the absolute path to an output file.""" if options.recursiveoutput and options.output: return os.path.join(options.output, outputpath) else: return outputpath def getfulltemplatepath(self, options, templatepath): """Gets the absolute path to a template file.""" if not options.recursivetemplate: return templatepath elif (templatepath is not None and self.usetemplates and options.template): return os.path.join(options.template, templatepath) else: return None def run(self): """Parses the arguments, and runs recursiveprocess with the resulting options...""" (options, args) = self.parse_args() # this is so derived classes can modify the inputformats etc based on # the options options.inputformats = self.inputformats options.outputoptions = self.outputoptions self.recursiveprocess(options) def recursiveprocess(self, options): """Recurse through directories and process files.""" if self.isrecursive(options.input, 'input') and getattr(options, "allowrecursiveinput", True): if not self.isrecursive(options.output, 'output'): if not options.output: self.error(optparse.OptionValueError("No output directory given")) try: self.warning("Output directory does not exist. Attempting to create") os.mkdir(options.output) except IOError as e: self.error(optparse.OptionValueError("Output directory does not exist, attempt to create failed")) if isinstance(options.input, list): inputfiles = self.recurseinputfilelist(options) else: inputfiles = self.recurseinputfiles(options) else: if options.input: inputfiles = [os.path.basename(options.input)] options.input = os.path.dirname(options.input) else: inputfiles = [options.input] options.recursiveoutput = (self.isrecursive(options.output, 'output') and getattr(options, "allowrecursiveoutput", True)) options.recursivetemplate = (self.usetemplates and self.isrecursive(options.template, 'template') and getattr(options, "allowrecursivetemplate", True)) self.initprogressbar(inputfiles, options) for inputpath in inputfiles: try: templatepath = self.gettemplatename(options, inputpath) # If we have a recursive template, but the template doesn't # have this input file, let's drop it. if (options.recursivetemplate and templatepath is None and not self.allowmissingtemplate): self.warning("No template at %s. Skipping %s." % (templatepath, inputpath)) continue outputformat, fileprocessor = self.getoutputoptions(options, inputpath, templatepath) fullinputpath = self.getfullinputpath(options, inputpath) fulltemplatepath = self.getfulltemplatepath(options, templatepath) outputpath = self.getoutputname(options, inputpath, outputformat) fulloutputpath = self.getfulloutputpath(options, outputpath) if options.recursiveoutput and outputpath: self.checkoutputsubdir(options, os.path.dirname(outputpath)) except Exception: self.warning("Couldn't handle input file %s" % inputpath, options, sys.exc_info()) continue try: success = self.processfile(fileprocessor, options, fullinputpath, fulloutputpath, fulltemplatepath) except Exception: self.warning("Error processing: input %s, output %s, template %s" % (fullinputpath, fulloutputpath, fulltemplatepath), options, sys.exc_info()) success = False self.reportprogress(inputpath, success) del self.progressbar def openinputfile(self, options, fullinputpath): """Opens the input file.""" if fullinputpath is None: return sys.stdin return open(fullinputpath, 'r') def openoutputfile(self, options, fulloutputpath): """Opens the output file.""" if fulloutputpath is None: return sys.stdout return open(fulloutputpath, 'w') def opentempoutputfile(self, options, fulloutputpath): """Opens a temporary output file.""" return BytesIO() def finalizetempoutputfile(self, options, outputfile, fulloutputpath): """Write the temp outputfile to its final destination.""" outputfile.reset() outputstring = outputfile.read() outputfile = self.openoutputfile(options, fulloutputpath) outputfile.write(outputstring) outputfile.close() def opentemplatefile(self, options, fulltemplatepath): """Opens the template file (if required).""" if fulltemplatepath is not None: if os.path.isfile(fulltemplatepath): return open(fulltemplatepath, 'r') else: self.warning("missing template file %s" % fulltemplatepath) return None def processfile(self, fileprocessor, options, fullinputpath, fulloutputpath, fulltemplatepath): """Process an individual file.""" inputfile = self.openinputfile(options, fullinputpath) if (fulloutputpath and fulloutputpath in (fullinputpath, fulltemplatepath)): outputfile = self.opentempoutputfile(options, fulloutputpath) tempoutput = True else: outputfile = self.openoutputfile(options, fulloutputpath) tempoutput = False templatefile = self.opentemplatefile(options, fulltemplatepath) passthroughoptions = self.getpassthroughoptions(options) if fileprocessor(inputfile, outputfile, templatefile, **passthroughoptions): if tempoutput: self.warning("writing to temporary output...") self.finalizetempoutputfile(options, outputfile, fulloutputpath) return True else: # remove the file if it is a file (could be stdout etc) if fulloutputpath and os.path.isfile(fulloutputpath): outputfile.close() os.unlink(fulloutputpath) return False def reportprogress(self, filename, success): """Shows that we are progressing...""" self.progressbar.amount += 1 self.progressbar.show(filename) def mkdir(self, parent, subdir): """Makes a subdirectory (recursively if neccessary).""" if not os.path.isdir(parent): raise ValueError("cannot make child directory %r if parent %r does not exist" % (subdir, parent)) currentpath = parent subparts = subdir.split(os.sep) for part in subparts: currentpath = os.path.join(currentpath, part) if not os.path.isdir(currentpath): os.mkdir(currentpath) def checkoutputsubdir(self, options, subdir): """Checks to see if subdir under options.output needs to be created, creates if neccessary.""" fullpath = os.path.join(options.output, subdir) if not os.path.isdir(fullpath): self.mkdir(options.output, subdir) def isexcluded(self, options, inputpath): """Checks if this path has been excluded.""" basename = os.path.basename(inputpath) for excludename in options.exclude: if fnmatch.fnmatch(basename, excludename): return True return False def recurseinputfilelist(self, options): """Use a list of files, and find a common base directory for them.""" # find a common base directory for the files to do everything # relative to commondir = os.path.dirname(os.path.commonprefix(options.input)) inputfiles = [] for inputfile in options.input: if self.isexcluded(options, inputfile): continue if inputfile.startswith(commondir + os.sep): inputfiles.append(inputfile.replace(commondir + os.sep, "", 1)) else: inputfiles.append(inputfile.replace(commondir, "", 1)) options.input = commondir return inputfiles def recurseinputfiles(self, options): """Recurse through directories and return files to be processed.""" dirstack = [''] join = os.path.join inputfiles = [] while dirstack: top = dirstack.pop(-1) names = os.listdir(join(options.input, top)) dirs = [] for name in names: inputpath = join(top, name) if self.isexcluded(options, inputpath): continue fullinputpath = self.getfullinputpath(options, inputpath) # handle directories... if os.path.isdir(fullinputpath): dirs.append(inputpath) elif os.path.isfile(fullinputpath): if not self.isvalidinputname(options, name): # only handle names that match recognized input # file extensions continue inputfiles.append(inputpath) # make sure the directories are processed next time round. dirs.reverse() dirstack.extend(dirs) return inputfiles def splitext(self, pathname): """Splits *pathname* into name and ext, and removes the extsep. :param pathname: A file path :type pathname: string :return: root, ext :rtype: tuple """ root, ext = os.path.splitext(pathname) ext = ext.replace(os.extsep, "", 1) return (root, ext) def splitinputext(self, inputpath): """Splits an *inputpath* into name and extension.""" return self.splitext(inputpath) def splittemplateext(self, templatepath): """Splits a *templatepath* into name and extension.""" return self.splitext(templatepath)<|fim▁hole|> def templateexists(self, options, templatepath): """Returns whether the given template exists...""" fulltemplatepath = self.getfulltemplatepath(options, templatepath) return os.path.isfile(fulltemplatepath) def gettemplatename(self, options, inputname): """Gets an output filename based on the input filename.""" if not self.usetemplates: return None if not inputname or not options.recursivetemplate: return options.template inputbase, inputext = self.splitinputext(inputname) if options.template: for inputext1, templateext1 in options.outputoptions: if inputext == inputext1: if templateext1: templatepath = inputbase + os.extsep + templateext1 if self.templateexists(options, templatepath): return templatepath if "*" in options.inputformats: for inputext1, templateext1 in options.outputoptions: if (inputext == inputext1) or (inputext1 == "*"): if templateext1 == "*": templatepath = inputname if self.templateexists(options, templatepath): return templatepath elif templateext1: templatepath = inputbase + os.extsep + templateext1 if self.templateexists(options, templatepath): return templatepath return None def getoutputname(self, options, inputname, outputformat): """Gets an output filename based on the input filename.""" if not inputname or not options.recursiveoutput: return options.output inputbase, inputext = self.splitinputext(inputname) outputname = inputbase if outputformat: outputname += os.extsep + outputformat return outputname def isvalidinputname(self, options, inputname): """Checks if this is a valid input filename.""" inputbase, inputext = self.splitinputext(inputname) return ((inputext in options.inputformats) or ("*" in options.inputformats))<|fim▁end|>
<|file_name|>VisitorVo.java<|end_file_name|><|fim▁begin|>package com.baidu.disconf.web.service.user.vo;<|fim▁hole|> private Long id; private String name; private String role; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getRole() { return role; } public void setRole(String role) { this.role = role; } @Override public String toString() { return "VisitorVo [id=" + id + ", name=" + name + ", role=" + role + "]"; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((id == null) ? 0 : id.hashCode()); result = prime * result + ((name == null) ? 0 : name.hashCode()); result = prime * result + ((role == null) ? 0 : role.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } VisitorVo other = (VisitorVo) obj; if (id == null) { if (other.id != null) { return false; } } else if (!id.equals(other.id)) { return false; } if (name == null) { if (other.name != null) { return false; } } else if (!name.equals(other.name)) { return false; } if (role == null) { if (other.role != null) { return false; } } else if (!role.equals(other.role)) { return false; } return true; } }<|fim▁end|>
public class VisitorVo {
<|file_name|>tempstorage.py<|end_file_name|><|fim▁begin|>import os.path from uuid import uuid4 import shutil import logging logger = logging.getLogger(__name__) _MARKER = object() class FileUploadTempStore(object): session_storage_slug = 'websauna.tempstore' def __init__(self, request): self.tempdir = request.registry.settings['websauna.uploads_tempdir'] if os.path.os.makedirs(self.tempdir, mode=0o777, exist_ok=True): logger.warning("Creating dir: '%s'", self.tempdir) self.request = request self.session = request.session def preview_url(self, _uid): # pylint: disable=no-self-use return None def __contains__(self, name): return name in self.session.get(self.session_storage_slug, {}) def __setitem__(self, name, data): newdata = data.copy() stream = newdata.pop('fp', None) if stream is not None: newdata['randid'] = uuid4().hex file_name = os.path.join(self.tempdir, newdata['randid']) shutil.copyfileobj(stream, open(file_name, 'wb')) self._tempstore_set(name, newdata) def _tempstore_set(self, name, data): # cope with sessioning implementations that cant deal with # in-place mutation of mutable values (temporarily?) existing = self.session.get(self.session_storage_slug, {}) existing[name] = data<|fim▁hole|> for cookie in data.items(): randid = cookie.get('randid') file_name = os.path.join(self.tempdir, randid) try: os.remove(file_name) except OSError: pass def get(self, name, default=None): data = self.session.get(self.session_storage_slug, {}).get(name) if data is None: return default newdata = data.copy() randid = newdata.get('randid') if randid is not None: file_name = os.path.join(self.tempdir, randid) try: newdata['fp'] = open(file_name, 'rb') except IOError: pass return newdata def __getitem__(self, name): data = self.get(name, _MARKER) if data is _MARKER: raise KeyError(name) return data<|fim▁end|>
self.session[self.session_storage_slug] = existing def clear(self): data = self.session.pop('substanced.tempstore', {})
<|file_name|>AbstractEigenvectorModel.js<|end_file_name|><|fim▁begin|>var clover = new Object(); // JSON: {classes : [{name, id, sl, el, methods : [{sl, el}, ...]}, ...]} clover.pageData = {"classes":[{"el":46,"id":86762,"methods":[{"el":38,"sc":2,"sl":36}],"name":"AbstractEigenvectorModel","sl":32}]} // JSON: {test_ID : {"methods": [ID1, ID2, ID3...], "name" : "testXXX() void"}, ...};<|fim▁hole|>// JSON: { lines : [{tests : [testid1, testid2, testid3, ...]}, ...]}; clover.srcFileLines = [[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []]<|fim▁end|>
clover.testTargets = {}
<|file_name|>JavaBinary.java<|end_file_name|><|fim▁begin|>/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java; import static com.facebook.buck.rules.BuildableProperties.Kind.PACKAGING; import com.facebook.buck.io.DirectoryTraverser; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.rules.AbstractBuildRule; import com.facebook.buck.rules.AddToRuleKey; import com.facebook.buck.rules.BinaryBuildRule; import com.facebook.buck.rules.BuildContext; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRules; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.BuildableContext; import com.facebook.buck.rules.BuildableProperties; import com.facebook.buck.rules.CommandTool; import com.facebook.buck.rules.RuleKeyAppendable; import com.facebook.buck.rules.RuleKeyBuilder; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePaths; import com.facebook.buck.rules.Tool; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.MkdirAndSymlinkFileStep; import com.facebook.buck.step.fs.MkdirStep; import com.google.common.base.Preconditions; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.ImmutableSortedSet; import java.nio.file.Path; import java.nio.file.Paths; import javax.annotation.Nullable; @BuildsAnnotationProcessor public class JavaBinary extends AbstractBuildRule implements BinaryBuildRule, HasClasspathEntries, RuleKeyAppendable { private static final BuildableProperties OUTPUT_TYPE = new BuildableProperties(PACKAGING); @AddToRuleKey @Nullable private final String mainClass; @AddToRuleKey @Nullable private final SourcePath manifestFile; private final boolean mergeManifests; @Nullable private final Path metaInfDirectory; @AddToRuleKey private final ImmutableSet<String> blacklist; private final DirectoryTraverser directoryTraverser; private final ImmutableSetMultimap<JavaLibrary, Path> transitiveClasspathEntries; public JavaBinary( BuildRuleParams params, SourcePathResolver resolver, @Nullable String mainClass, @Nullable SourcePath manifestFile, boolean mergeManifests, @Nullable Path metaInfDirectory, ImmutableSet<String> blacklist, DirectoryTraverser directoryTraverser, ImmutableSetMultimap<JavaLibrary, Path> transitiveClasspathEntries) { super(params, resolver); this.mainClass = mainClass; this.manifestFile = manifestFile; this.mergeManifests = mergeManifests; this.metaInfDirectory = metaInfDirectory; this.blacklist = blacklist; this.directoryTraverser = directoryTraverser; this.transitiveClasspathEntries = transitiveClasspathEntries; } @Override public BuildableProperties getProperties() { return OUTPUT_TYPE; } @Override public RuleKeyBuilder appendToRuleKey(RuleKeyBuilder builder) { // Build a sorted set so that metaInfDirectory contents are listed in a canonical order. ImmutableSortedSet.Builder<Path> paths = ImmutableSortedSet.naturalOrder(); BuildRules.addInputsToSortedSet(metaInfDirectory, paths, directoryTraverser); return builder.setReflectively( "metaInfDirectory", FluentIterable.from(paths.build()) .transform(SourcePaths.toSourcePath(getProjectFilesystem()))); } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { ImmutableList.Builder<Step> commands = ImmutableList.builder(); Path outputDirectory = getOutputDirectory(); Step mkdir = new MkdirStep(getProjectFilesystem(), outputDirectory); commands.add(mkdir); ImmutableSortedSet<Path> includePaths; if (metaInfDirectory != null) { Path stagingRoot = outputDirectory.resolve("meta_inf_staging"); Path stagingTarget = stagingRoot.resolve("META-INF"); MakeCleanDirectoryStep createStagingRoot = new MakeCleanDirectoryStep( getProjectFilesystem(), stagingRoot); commands.add(createStagingRoot); MkdirAndSymlinkFileStep link = new MkdirAndSymlinkFileStep( getProjectFilesystem(), metaInfDirectory, stagingTarget); commands.add(link); includePaths = ImmutableSortedSet.<Path>naturalOrder() .add(stagingRoot) .addAll(getTransitiveClasspathEntries().values()) .build(); } else { includePaths = ImmutableSortedSet.copyOf(getTransitiveClasspathEntries().values()); } Path outputFile = getPathToOutput(); Path manifestPath = manifestFile == null ? null : getResolver().getAbsolutePath(manifestFile); Step jar = new JarDirectoryStep( getProjectFilesystem(), outputFile, includePaths, mainClass, manifestPath, mergeManifests, blacklist); commands.add(jar); buildableContext.recordArtifact(outputFile); return commands.build(); } @Override public ImmutableSetMultimap<JavaLibrary, Path> getTransitiveClasspathEntries() { return transitiveClasspathEntries; } @Override public ImmutableSet<JavaLibrary> getTransitiveClasspathDeps() { return transitiveClasspathEntries.keySet(); } private Path getOutputDirectory() { return BuildTargets.getGenPath(getBuildTarget(), "%s").getParent(); } @Override public Path getPathToOutput() { return Paths.get( String.format( "%s/%s.jar", getOutputDirectory(), getBuildTarget().getShortNameAndFlavorPostfix())); } @Override public Tool getExecutableCommand() { Preconditions.checkState( mainClass != null, "Must specify a main class for %s in order to to run it.",<|fim▁hole|> return new CommandTool.Builder() .addArg("java") .addArg("-jar") .addArg(new BuildTargetSourcePath(getBuildTarget())) .build(); } }<|fim▁end|>
getBuildTarget());
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|><|fim▁hole|> path = os.path.abspath(__file__) modpath = os.path.dirname(path) base_dir = os.getcwd() install_mode = 'normal' plus = False mon = False venv = "y" if len(sys.argv) > 1: if '-django' in sys.argv: install_mode = 'django' elif '-dev' in sys.argv: install_mode = 'dev' elif '-modules' in sys.argv: install_mode = 'modules' elif "-q" is sys.argv: install_mode = "default" if '-plus' in sys.argv: plus = True if '-mon' in sys.argv: mon = True if '-noenv' in sys.argv: venv = "n" msg = 'What is the name of the project? > ' if sys.version_info[:2] <= (2, 7): get_input = raw_input else: get_input = input user_input = get_input(msg) if user_input == "": print("You must provide a project name") sys.exit() project_name = user_input bscript = modpath + '/install/init/install.sh' print("Starting install ...") if not install_mode == 'modules': subprocess.call([bscript, project_name, base_dir, install_mode, modpath, venv]) pages_installed = "n" if install_mode != 'django': bscript = modpath + '/install/pages/install.sh' subprocess.call([bscript, project_name, base_dir, install_mode, modpath]) # contact bscript = modpath + '/install/contact/install.sh' subprocess.call([bscript, project_name, base_dir, install_mode, modpath]) if install_mode != "default": # real time msg = 'Install the realtime modules? [y/N] > ' rt = "n" user_input = get_input(msg) if user_input == "y": rt = "y" bscript = modpath + '/install/real_time/install.sh' subprocess.call([bscript, project_name, base_dir, modpath]) if plus is True: # users bscript = modpath + '/install/users/install.sh' subprocess.call( [bscript, project_name, base_dir, install_mode, modpath]) if mon is True: bscript = modpath + '/install/mon/install.sh' subprocess.call( [bscript, project_name, base_dir, install_mode, modpath]) # end bscript = modpath + '/install/end/install.sh' subprocess.call([bscript, project_name, base_dir, install_mode, modpath, rt])<|fim▁end|>
from __future__ import print_function import sys import os import subprocess
<|file_name|>sentiment_anaylsis_twitter_data.py<|end_file_name|><|fim▁begin|>from tweepy import API from tweepy import Cursor from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream from textblob import TextBlob import twitter_credentials import matplotlib.pyplot as plt import numpy as np import pandas as pd import re # # # # TWITTER CLIENT # # # # class TwitterClient(): def __init__(self, twitter_user=None): self.auth = TwitterAuthenticator().authenticate_twitter_app() self.twitter_client = API(self.auth) self.twitter_user = twitter_user def get_twitter_client_api(self): return self.twitter_client def get_user_timeline_tweets(self, num_tweets): tweets = [] for tweet in Cursor(self.twitter_client.user_timeline, id=self.twitter_user).items(num_tweets): tweets.append(tweet) return tweets def get_friend_list(self, num_friends): friend_list = [] for friend in Cursor(self.twitter_client.friends, id=self.twitter_user).items(num_friends): friend_list.append(friend) return friend_list def get_home_timeline_tweets(self, num_tweets): home_timeline_tweets = [] for tweet in Cursor(self.twitter_client.home_timeline, id=self.twitter_user).items(num_tweets): home_timeline_tweets.append(tweet) return home_timeline_tweets # # # # TWITTER AUTHENTICATER # # # # class TwitterAuthenticator(): def authenticate_twitter_app(self): auth = OAuthHandler(twitter_credentials.CONSUMER_KEY, twitter_credentials.CONSUMER_SECRET) auth.set_access_token(twitter_credentials.ACCESS_TOKEN, twitter_credentials.ACCESS_TOKEN_SECRET) return auth # # # # TWITTER STREAMER # # # # class TwitterStreamer(): """ Class for streaming and processing live tweets. """ def __init__(self): self.twitter_autenticator = TwitterAuthenticator() def stream_tweets(self, fetched_tweets_filename, hash_tag_list): # This handles Twitter authetification and the connection to Twitter Streaming API listener = TwitterListener(fetched_tweets_filename) auth = self.twitter_autenticator.authenticate_twitter_app() stream = Stream(auth, listener) # This line filter Twitter Streams to capture data by the keywords: stream.filter(track=hash_tag_list) # # # # TWITTER STREAM LISTENER # # # # class TwitterListener(StreamListener): """ This is a basic listener that just prints received tweets to stdout. """ def __init__(self, fetched_tweets_filename): self.fetched_tweets_filename = fetched_tweets_filename def on_data(self, data): try: print(data) with open(self.fetched_tweets_filename, 'a') as tf: tf.write(data) return True except BaseException as e: print("Error on_data %s" % str(e)) return True def on_error(self, status): if status == 420: # Returning False on_data method in case rate limit occurs. return False print(status) class TweetAnalyzer(): """ Functionality for analyzing and categorizing content from tweets. """ def clean_tweet(self, tweet):<|fim▁hole|> analysis = TextBlob(self.clean_tweet(tweet)) if analysis.sentiment.polarity > 0: return 1 elif analysis.sentiment.polarity == 0: return 0 else: return -1 def tweets_to_data_frame(self, tweets): df = pd.DataFrame(data=[tweet.text for tweet in tweets], columns=['tweets']) df['id'] = np.array([tweet.id for tweet in tweets]) df['len'] = np.array([len(tweet.text) for tweet in tweets]) df['date'] = np.array([tweet.created_at for tweet in tweets]) df['source'] = np.array([tweet.source for tweet in tweets]) df['likes'] = np.array([tweet.favorite_count for tweet in tweets]) df['retweets'] = np.array([tweet.retweet_count for tweet in tweets]) return df if __name__ == '__main__': twitter_client = TwitterClient() tweet_analyzer = TweetAnalyzer() api = twitter_client.get_twitter_client_api() tweets = api.user_timeline(screen_name="realDonaldTrump", count=200) df = tweet_analyzer.tweets_to_data_frame(tweets) df['sentiment'] = np.array([tweet_analyzer.analyze_sentiment(tweet) for tweet in df['tweets']]) print(df.head(10))<|fim▁end|>
return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split()) def analyze_sentiment(self, tweet):
<|file_name|>viperpeers.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # vim:fileencoding=utf-8 # Find the best reactor reactorchoices = ["epollreactor", "kqreactor", "cfreactor", "pollreactor", "selectreactor", "posixbase", "default"] for choice in reactorchoices: try: exec("from twisted.internet import %s as bestreactor" % choice) break except: pass bestreactor.install() #from twisted.application import internet, service from twisted.internet import reactor from twisted.protocols import basic, policies import yaml import socket import select import re import logging import sys import signal import os import traceback import codecs import time import resource logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) #logging.getLogger().addHandler() resource.setrlimit(resource.RLIMIT_NOFILE, [32768,65536]) trace=None if 'format_exc' in dir(traceback): from traceback import format_exc as trace else: from traceback import print_exc as trace reload(sys) def lock2key (lock): key = {} for i in xrange(1, len(lock)): key[i] = ord(lock[i]) ^ ord(lock[i-1]) key[0] = ord(lock[0]) ^ ord(lock[len(lock)-1]) ^ ord(lock[len(lock)-2]) ^ 5 for i in xrange(0, len(lock)): key[i] = ((key[i]<<4) & 240) | ((key[i]>>4) & 15) out = '' for i in xrange(0, len(lock)): out += unichr(key[i]) out = out.replace(u'\0', u'/%DCN000%/').replace(u'\5', u'/%DCN005%/').replace(u'\44', u'/%DCN036%/') out = out.replace(u'\140', u'/%DCN096%/').replace(u'\174', u'/%DCN124%/').replace(u'\176', u'/%DCN126%/') return out def number_to_human_size(size, precision=1): """ Returns a formatted-for-humans file size. ``precision`` The level of precision, defaults to 1 Examples:: >>> number_to_human_size(123) '123 Bytes' >>> number_to_human_size(1234) '1.2 KB' >>> number_to_human_size(12345) '12.1 KB' >>> number_to_human_size(1234567) '1.2 MB' >>> number_to_human_size(1234567890) '1.1 GB' >>> number_to_human_size(1234567890123) '1.1 TB' >>> number_to_human_size(1234567, 2) '1.18 MB' """ if size == 1: return "1 Byte" elif size < 1024: return "%d Bytes" % size elif size < (1024**2): return ("%%.%if KB" % precision) % (size / 1024.00) elif size < (1024**3): return ("%%.%if MB" % precision) % (size / 1024.00**2) elif size < (1024**4): return ("%%.%if GB" % precision) % (size / 1024.00**3) elif size < (1024**5): return ("%%.%if TB" % precision) % (size / 1024.00**4) return "" class DCUser: recp={} recp['tag']=re.compile('[<](.*)[>]$') recp['slots']=re.compile('S:(\d*)') recp['hubs']=re.compile('H:([0-9/]*)') def __init__(self,myinfo="",descr=None,addr=None): self.nick = '' self.connection = '' self.flag = '' self.mail = '' self.share = 0 self.descr = None self.MyINFO = None self.level = 0 self.tag = '' self.slots = 0 self.hubs = 0 self.sum_hubs = 0 if len( myinfo )>0: self.upInfo( myinfo ) self.descr = descr self.addr = addr def upInfo(self,myinfo): self.MyINFO = myinfo ar = myinfo.split("$") ar2 = ar[2].split(" ",2) self.nick = ar2[1] self.description = ar2[2] self.connection = ar[4][0:-1] self.flag = ar[4][-1] self.mail = ar[5] self.share = int( ar[6] ) # Parsing TAG tag = self.recp['tag'].search( self.description ) if self.tag != None: self.tag=tag.group( 1 ) slots = self.recp['slots'].search( self.tag ) if slots != None: self.slots = int( slots.group( 1 ) ) hubs = self.recp['hubs'].search( self.tag ) if hubs != None: self.hubs = hubs.group( 1 ) try: self.sum_hubs=self.get_sum_hubs() except: logging.warning( 'WRONG TAG: %s' % tag ) def get_ip( self ): return self.addr.split(':')[0] def get_sum_hubs( self ): s=0 for i in self.hubs.split('/'): s=s+int( i ) return s class DCHub( policies.ServerFactory ): # CONSTANTS LOCK='EXTENDEDPROTOCOL_VIPERHUB Pk=versionHidden' SUPPORTS='OpPlus NoGetINFO NoHello UserIP UserIP2' def _(self,string): # Translate function return self.lang.get(string,string) def tUCR( self, req ): '''translate and make usercmmand request %[line:req:] ''' return '%%[line:%s:]' % self._( req ) def UC( self, menu, params ): '''make UserCommands''' return '$UserCommand 1 2 %s %s %s%s&#124;|' % ( menu, '$<%[mynick]>', self.core_settings['cmdsymbol'], ' '.join( params ) ) def Gen_UC( self ): self.usercommands={} # -- CORE USERCOMMANDS -- self.usercommands['Quit'] = self.UC( self._('Core\\Quit'), ['Quit'] ) self.usercommands['Save'] = self.UC( self._('Settings\\Save settings'), ['Save'] ) self.usercommands['SetTopic'] = self.UC( self._('Settings\\Set hub topic'), ['SetTopic', self.tUCR('New Topic')] ) self.usercommands['Help'] = self.UC( self._('Help'), ['Help'] ) self.usercommands['RegenMenu'] = self.UC( self._( 'Core\\Regenerate menu' ), ['RegenMenu'] ) self.usercommands['ReloadSettings'] = self.UC( self._( 'Core\\Reload settings (DANGEROUS)' ), ['ReloadSettings'] ) # -- settings get/set self.usercommands['Get'] = self.UC( self._('Settings\\List settings files'), ['Get'] ) self.usercommands['Set'] = self.UC( self._('Settings\\Set variable'), ['Set', self.tUCR( 'File' ), self.tUCR( 'Variable' ), self.tUCR( 'New Value' )] ) # -- Limits control self.usercommands['Set'] += self.UC( self._('Settings\\Limits\\Set max users'), ['Set core max_users', self.tUCR( 'New max users' )] ) self.usercommands['Set'] += self.UC( self._('Settings\\Limits\\Set min share'), ['Set core min_share', self.tUCR( 'New min share (in bytes)' )] ) self.usercommands['Set'] += self.UC( self._('Settings\\Limits\\Set max hubs'), ['Set core max_hubs', self.tUCR( 'New max hubs' )] ) self.usercommands['Set'] += self.UC( self._('Settings\\Limits\\Set min slots'), ['Set core min_slots', self.tUCR( 'New min slots' )] ) # -- User control self.usercommands['AddReg'] = '' self.usercommands['SetLevel'] = '' for i in self.settings['privlist'].keys(): self.usercommands['AddReg'] += self.UC( self._( 'Users\\Selected\\Register selected nick as\\%s' ) % i, ['AddReg %[nick]', i, self.tUCR( 'Password' )] ) self.usercommands['AddReg'] += self.UC( self._( 'Users\\Register nick...' ), ['AddReg', self.tUCR( 'nick' ), self.tUCR( 'level' ), self.tUCR( 'Password' )] ) self.usercommands['ListReg'] = self.UC( self._( 'Users\\List registred nicks' ), ['ListReg'] ) self.usercommands['DelReg'] = self.UC( self._( 'Users\\Selected\\Unreg selected nick' ), ['DelReg %[nick]'] ) self.usercommands['DelReg'] += self.UC( self._( 'Users\\Unreg nick...' ), ['DelReg', self.tUCR('Nick')] ) for i in self.settings['privlist'].keys(): self.usercommands['SetLevel'] += self.UC( self._( 'Users\\Selected\\Set level for selected nick\\%s' ) % i, ['SetLevel %[nick]', i] ) self.usercommands['PasswdTo'] = self.UC( self._( 'Users\\Selected\\Set password for selected nick...' ), ['PasswdTo %[nick]', self.tUCR('new password')] ) self.usercommands['Kick'] = self.UC( self._( 'Kick selected nick...' ), ['Kick %[nick]', self.tUCR( 'reason (may be empty)' )] ) self.usercommands['UI'] = self.UC( self._( 'Users\\Selected\\User Info' ), ['UI %[nick]'] ) # -- Plugin control #self.usercommands['ListPlugins'] = self.UC( self._( 'Plugins\\List aviable plugins' ), ['ListPlugins'] ) #self.usercommands['ActivePlugins'] = self.UC( self._( 'Plugins\\List active plugins' ), ['ListPlugins'] ) menu = self._( 'Plugins\\Load/Reload Plugin\\' ) menuU = self._( 'Plugins\\Unload Plugin\\' ) loaded = self._( '(loaded)' ) aplugs = self.get_aviable_plugins() self.usercommands['ReloadPlugin'] = '' self.usercommands['LoadPlugin'] = '' self.usercommands['UnloadPlugin'] = '' for i in aplugs: if i in self.plugs: self.usercommands['ReloadPlugin'] += self.UC( menu + i + ' ' + loaded, ['ReloadPlugin', i] ) else: self.usercommands['LoadPlugin'] += self.UC( menu + i, ['LoadPlugin', i] ) for i in self.plugs.keys(): self.usercommands['UnloadPlugin'] += self.UC( menuU + i, ['UnloadPlugin', i] ) #self.usercommands['ListPlugins']='$UserCommand 1 2 '+self._('Plugins\\List aviable plugins')+'$<%[mynick]> '+self.core_settings['cmdsymbol']+'ListPlugins&#124;|' #self.usercommands['ActivePlugins']='$UserCommand 1 2 '+self._('Plugins\\List active plugins')+'$<%[mynick]> '+self.core_settings['cmdsymbol']+'ActivePlugins&#124;|' #self.usercommands['LoadPlugin']='$UserCommand 1 2 '+self._('Plugins\\Load plugin..')+'$<%[mynick]> '+self.core_settings['cmdsymbol']+'LoadPlugin %[line:'+self._('plugin')+':]&#124;|' #self.usercommands['UnloadPlugin']='$UserCommand 1 2 '+self._('Plugins\\Unload plugin...')+'$<%[mynick]> '+self.core_settings['cmdsymbol']+'UnloadPlugin %[line:'+self._('plugin')+':]&#124;|' #self.usercommands['ReloadPlugin']='$UserCommand 1 2 '+self._('Plugins\\Reload plugin...')+'$<%[mynick]> '+self.core_settings['cmdsymbol']+'ReloadPlugin %[line:'+self._('plugin')+':]&#124;|' # -- Self control self.usercommands['Passwd'] = self.UC( self._('Me\\Set MY password...'), [ 'Passwd', self.tUCR( 'new password' ) ] ) for i in self.plugs.values(): i.update_menu() self.usercommands.update( i.usercommands ) #logging.debug ('UC: %s' % repr(self.usercommands) ) return def __init__( self ): # COMMANDS self.commands={} # SIGNAL-SLOT EVENT SUBSYSTEM self.slots={} # COMPILE REGEXPS self.recp={} #self.recp['Key']=re.compile('(?<=\$Key )[^|]*(?=[|])') #self.recp['ValidateNick']=re.compile('(?<=\$ValidateNick )[^|]*(?=[|])') #self.recp['Supports']=re.compile('(?<=\$Supports )[^|]*(?=[|])') #self.recp['MyPass']=re.compile('(?<=\$MyPass )[^|]*(?=[|])') #self.recp['MyINFO']=re.compile('\$MyINFO [^|]*(?=[|])') #self.recp['NoGetINFO']=re.compile('NoGetINFO') #self.recp['NoHello']=re.compile('NoHello') self.recp['.yaml']=re.compile('\.yaml$') self.recp['before.yaml']=re.compile('.*(?=\.yaml)') self.recp['.py']=re.compile('\.py$') self.recp['before.py']=re.compile('.*(?=\.py)') self.recp['tag']=re.compile('[<](.*)[>]$') # SET PATHS self.path_to_settings="./settings/" self.path_to_plugins="./plugins/" # ----- SETTINGS ----- self.settings={} # LOADING SETTINGS self.load_settings() # SHORTCUTS self.core_settings=self.settings.get('core',{}) self.reglist=self.settings.get('reglist',{}) self.privlist=self.settings.get('privlist',{}) # DEFAULTS defcore_settings={} defcore_settings['port']=[411] defcore_settings['hubname']='ViperPeers' defcore_settings['topic']='' defcore_settings['cmdsymbol']='!' defcore_settings['OpLevels']=['owner'] defcore_settings['Protected']=['owner', 'op'] defcore_settings['Lang']='ru.cp1251' defcore_settings['autoload']=[ 'ban', 'chatlist', 'chatroom', 'forbid', 'goodplug', 'iplog', 'massmsg', 'motd', 'mute', 'say', 'regme' ] defcore_settings['logfile']='' defcore_settings['loglevel']=10 defcore_settings['autosave']=120 defcore_settings['userip']=['owner', 'op'] # ---- LIMITS ---- defcore_settings['max_users'] = 10000 defcore_settings['min_share'] = 0 defcore_settings['max_hubs'] = 1000 defcore_settings['min_slots'] = 0 defcore_settings['pass_limits'] = ['owner', 'op', 'chatroom'] defcore_settings['hubinfo']={'address':'127.0.0.1','description':'ViperPeers powered hub (vipehive fork)','type':'ViperPeers Hub', 'hubowner':'owner'} defreglist={'admin':{'level':'owner', 'passwd':'megapass'}} defprivlist={'owner':['*']} # If loaded core_settings miss some stuff - load defaults if len(self.core_settings)==0: self.settings['core']=self.core_settings={} for i in defcore_settings.keys(): if not i in self.core_settings: self.core_settings[i]=defcore_settings[i] #------UPDATE SETTINGS FROM OLD VERSION:------- # UPDATE PORT SETTINGS FOR VERSIONS <= svn r168 if not isinstance( self.core_settings['port'], list ): self.core_settings['port'] = [ self.core_settings['port'] ] if len(self.reglist)==0: self.settings['reglist']=self.reglist=defreglist if len(self.privlist)==0: self.settings['privlist']=self.privlist=defprivlist # MORE SHORTCUTS self.oplevels=self.core_settings['OpLevels'] self.protected=self.core_settings['Protected'] self.KEY=lock2key(self.LOCK) # ---- TRANSPORTS ---- self.transports=[] # User hashes self.nicks={} self.addrs={} # Support for very, VERY old clients self.hello=[] self.getinfo=[] self.clthreads=[] # Reinitialize Logging self.reload_logging() # REGISTERING CORE COMMANDS self.commands['Quit']=self.Quit #Usercommands + self.commands['AddReg']=self.AddReg #Usercommands + self.commands['DelReg']=self.DelReg #Usercommands + self.commands['ListReg']=self.ListReg #Usercommands + self.commands['Get']=self.Get #Usercommands + self.commands['Set']=self.Set #Usercommands + self.commands['SetLevel']=self.SetLevel #Usercommands + self.commands['Help']=self.Help #Usercommands + self.commands['ListPlugins']=self.ListPlugins #Usercommands + self.commands['LoadPlugin']=self.LoadPlugin #Usercommands + self.commands['UnloadPlugin']=self.UnloadPlugin #Usercommands + self.commands['ActivePlugins']=self.ActivePlugins #Usercommands + self.commands['Save']=self.Save #Usercommands + self.commands['ReloadPlugin']=self.ReloadPlugin self.commands['RP']=self.ReloadPlugin #Usercommands + self.commands['Passwd']=self.Passwd self.commands['PasswdTo']=self.PasswdTo #Usercommands + self.commands['Kick']=self.Kick #Usercommands + self.commands['UI']=self.UI #Usercoommands + self.commands['SetTopic']=self.SetTopic #Usercommands + self.commands['RegenMenu'] = self.RegenMenu #Usercommands + self.commands['ReloadSettings'] = self.ReloadSettings #Usercommands + # TRANSLATION SYSTEM self.lang={} # Current language array self.help={} # Help for current language # -- LOADING LANGUAGE lang=self.core_settings['Lang'].split('.')[0]<|fim▁hole|> lpath='./languages/'+lang+'/' lfiles=os.listdir(lpath) for i in lfiles: # LOAD MESSAGES FOR CURRENT LANGUAGE if self.recp['.yaml'].search(i)!=None: try: arr=yaml.load(codecs.open(lpath+i,'r','utf-8').read()) #for key,value in arr.iteritems(): # arr[key]=value.encode(cpage) self.lang.update(arr) except: logging.error('file %s in wrong format: %s' % ((lpath+i), trace())) if 'help' in lfiles: # LOAD HELP FOR CURRENT LANGUAGE hpath=lpath+'help/' hfiles=os.listdir(hpath) for i in hfiles: if self.recp['.yaml'].search(i)!=None: try: arr=yaml.load(codecs.open(hpath+i,'r','utf-8').read()) #for key,value in arr.iteritems(): # arr[key]=value.encode(cpage) self.help.update(arr) except: logging.error('file %s in wrong format: %s' % ((lpath+i), trace())) except: logging.error('language directory not found %s' % (trace())) logging.info('Language loaded: %s strings' % str(len(self.lang))) logging.info('Help loaded: %s strings' % str(len(self.help))) # PLUGINS self.plugs={} self.Gen_UC() # Queue for queue_worker self.queue = [] self.queue_lock = False self.delay = 0.5 self.ping_time = 150. reactor.callLater(self.delay, self.queue_worker, self.ping_time) # AUTOLOAD PLUGINS for i in self.settings['core']['autoload']: reactor.callLater(self.delay, self.LoadPlugin, None, [i]) # SETTING AUTOSAVER reactor.callLater(self.settings['core']['autosave'], self.settings_autosaver) logging.info ('Hub ready to start on port %s...' % self.core_settings['port']) self.skipme=[] def reload_logging(self): logging.debug('Set logging to %s, level %s' % (self.settings['core']['logfile'], str(self.settings['core']['loglevel']))) reload(sys.modules['logging']) if self.settings['core']['logfile']: logging.basicConfig(filename=self.settings['core']['logfile'],) logging.getLogger().setLevel(self.settings['core']['loglevel']) def emit(self,signal,*args): #logging.debug('emitting %s' % signal) #logging.debug('emit map %s' % repr(self.slots)) for slot in self.slots.get(signal,[]): logging.debug( 'Emitting: %s, for %s slot' % ( signal, repr( slot )) ) try: if not slot(*args): logging.debug( 'Emit %s: FALSE' % signal ) return False except: logging.error('PLUGIN ERROR: %s' % trace()) logging.debug( 'Emit %s: True' % signal ) return True def settings_autosaver(self): logging.debug('settings autosave') self.save_settings() reactor.callLater(self.settings['core']['autosave'], self.settings_autosaver) def drop_user_by_addr(self,addr): if addr in self.addrs: transport=self.addrs[addr].descr nick=self.addrs[addr].nick self.drop_user(addr,nick,transport) def drop_user(self, addr, nick, transport): logging.debug('dropping %s %s' % (addr, nick)) try: if transport in self.transports: self.transports.remove(transport) self.addrs.pop(addr,'') self.nicks.pop(nick,'') if transport in self.hello: self.hello.remove(transport) transport.loseConnection() except: logging.debug('something wrong while dropping client %s' % trace()) self.send_to_all('$Quit %s|' % nick) self.emit('onUserLeft',addr,nick) def drop_user_by_nick(self,nick): if nick in self.nicks: transport=self.nicks[nick].descr addr=self.nicks[nick].addr self.drop_user(addr,nick,transport) def drop_user_by_transport(self, transport): A=None N=None for nick, user in self.nicks.items(): if user.descr == transport: N=nick break for addr, user in self.addrs.items(): if user.descr == transport: A=addr break self.drop_user(A, N, transport) def send_to_all(self, msg): if not self.queue_lock: self.queue_lock = True self.queue.append(msg) self.queue_lock = False else: reactor.callLater(self.delay, self.send_to_all, msg) def queue_worker(self, ping_timer): if ping_timer > 0: ping_timer -= self.delay result = '' if not self.queue_lock: self.queue_lock = True msgs = self.queue self.queue = [] self.queue_lock = False if len(msgs)>0: for msg in msgs: logging.debug('sending to all %s' % msg) if not (len(msg)>0 and msg[-1]=="|"): msg += "|" result += msg if not result and ping_timer <= 0: # We should probably "ping" all connections if no messages to send ping_timer += self.ping_time logging.debug('pinging') result = '|' if result: logging.debug('senging "%s" to all' % result) for transport in self.transports: try: transport.write(result.encode(self.charset)) except: logging.debug('transport layer error %s' % trace()) reactor.callLater(0, self.drop_user_by_transport, transport) reactor.callLater(self.delay, self.queue_worker, ping_timer) def send_pm_to_nick(self,fnick,nick,msg): self.send_to_nick(nick,'$To: %s From: %s $<%s> %s|' % (nick, fnick, fnick, msg)) def send_to_nick(self,nick,msg): if nick in self.nicks: if not (len(msg)>0 and msg[-1]=="|"): msg=msg+"|" try: logging.debug('sending "%s" to %s' % (msg, nick)) self.nicks[nick].descr.write(msg.encode(self.charset)) except: #logging.debug('Error while sending "%s" to %s. Dropping. %s' % (msg,nick,trace())) logging.debug('socket error %s. dropping lost user!' % trace() ) self.drop_user_by_nick(nick) else: logging.debug('send to unknown nick: %s' % nick) def send_to_addr(self,addr,msg): if addr in self.addrs: if not (len(msg)>0 and msg[-1]=="|"): msg=msg+"|" try: logging.debug('sending "%s" to %s' % (msg, addr)) self.addrs[addr].descr.write(msg.encode(self.charset)) except: logging.debug('socket error %s' % trace()) else: logging.warning('uknown addres: %s' % addr) def get_nick_list( self ): nicklist="$NickList " oplist="$OpList " for user in self.nicks.values(): nicklist+=user.nick+"$$" if user.level in self.oplevels: oplist+=user.nick+"$$" return "%s|%s|" % (nicklist[:-2], oplist[:-2]) def get_op_list(self): #repeat some code for faster access oplist="$OpList " for user in self.nicks.values(): if user.level in self.oplevels: oplist+=user.nick+"$$" return oplist+'|' def get_userip_list( self ): uip='$UserIP ' for user in self.nicks.values(): uip+='%s %s$$' % (user.nick, user.get_ip()) return uip+'|' def get_userip_acc_list(self): uip=[] for user in self.nicks.values(): if user.level in self.core_settings['userip']: uip.append(user.nick) return uip def save_settings(self): logging.debug('saving settigs') try: for mod, sett in self.settings.items(): try: logging.info('saving settings for %s' % mod) f=open(self.path_to_settings+'/'+mod+'.yaml','wb') f.write(yaml.safe_dump(sett,default_flow_style=False,allow_unicode=True)) except: logging.error('failed to load settings for module %s. cause:' % mod) logging.error('%s' % trace()) return False except: logging.error('!!! SETTINGS NOT SAVED !!!') return False return True def load_settings(self): logging.debug('reading settigs') try: for i in os.listdir(self.path_to_settings): if self.recp['.yaml'].search(i)!=None: mod=self.recp['before.yaml'].search(i).group(0) logging.debug('loading settings for %s' % mod) try: f=codecs.open(self.path_to_settings+'/'+ i,'r','utf-8') text=f.read() dct=yaml.load(text) if dct!=None: self.settings[mod]=dct except: logging.error('failed to load settings for module %s. cause:' % mod) logging.error('%s' % trace()) except: logging.error('error while loading settings: %s', trace()) def check_rights(self, user, command): rights=self.privlist.get(user.level,[]) if ('*' in rights) or (command in rights): return True else: return False def send_usercommands_to_nick(self, nick): for i in range(1,4): self.send_to_nick(nick, '$UserCommand 255 %s |' % i) for name, cmd in self.usercommands.items(): if self.check_rights(self.nicks[nick],name): self.send_to_nick(nick, cmd) def send_usercommands_to_all(self): for nick in self.nicks.keys(): self.send_usercommands_to_nick(nick) # COMMANDS # -- Hub Control def Quit(self,addr,params=[]): self.work=False exit return True def Set(self,addr,params=[]): # Setting param for core or plugin # Params should be: 'core/plugin name' 'parameter' 'value' # Cause 'value' can contain spaces - join params[2:] if len(params)<2: return self._('Params error') try: value=yaml.load(" ".join(params[2:])) self.settings[params[0]][params[1]]=value if params[1].startswith('log'): self.reload_logging() return self._('Settings for %s - %s setted for %s') % (params[0], params[1], value) except: return self._('Error: %s') % trace() def Get(self,addr, params=[]): #Getting params or list # Params can be 'core/plugin name' 'parameter' or 'core/plugin name' if len(params)==0: return self._(' -- Available settings --:\n%s' ) % (unicode(yaml.safe_dump(self.settings.keys(),allow_unicode=True),'utf-8')) elif len(params)==1: if params[0] in self.settings: return self._(' -- Settings for %s --\n%s' ) % (params[0], unicode(yaml.safe_dump(self.settings.get(params[0],''),allow_unicode=True),'utf-8')) elif len(params)==2: if params[0] in self.settings and params[1] in self.settings[params[0]]: return self._(' -- Settings for %s - %s --\n%s' ) % ( params[0], params[1], unicode(yaml.safe_dump(self.settings[params[0]][params[1]],allow_unicode=True),'utf-8')) else: return self._('Params error') else: return self._('Params error') def Save(self, params=[]): try: self.save_settings() return True except: return False def RegenMenu( self, params = [] ): try: self.Gen_UC() self.send_usercommands_to_all() return True except: return False def ReloadSettings( self, params = [] ): try: self.load_settings() except: return False return True # --- User Control def AddReg(self,addr,params=[]): # Params should be: 'nick' 'level' 'passwd' if len(params)==3: # Check if 'nick' already registred if params[0] not in self.reglist: self.reglist[params[0]]={'level': params[1],'passwd':params[2]} return self._('User Registred:\n nick: %s\n level: %s\n passwd:%s') % (params[0],params[1],params[2]) else: return self._('User already registred') else: return self._('Params error.') def DelReg(self,addr,params=[]): # Params should be 'nick' if len(params)==1: # Check if 'nick' registred if params[0] in self.reglist: if params[0] not in self.protected: del self.reglist[params[0]] return self._('User deleted') else: return self._('User protected!') else: return self._('User not registred') else: return self._('Params error') def ListReg(self,addr): s=self._('--- REGISTRED USERES --- \n') for nick, param in self.reglist.items(): s=s+('nick: %s level: %s' % (nick, param['level'],))+'\n' return s #return self._('--- REGISTRED USERES --- \n') + "\n".join('nick: %s level: %s' % (nick, param['level'],) for nick, param in self.reglist.iteritems()) def SetLevel(self,addr,params=[]): # Params should be: 'nick' 'level' if len(params)==2: if params[0] in self.reglist: self.reglist[params[0]]['level']=yaml.load(params[1]) return self._('Success') else: return self._('No such user') else: return self._('Params error.') def Kick (self, addr, params=[]): # Params should be: 'nick' if len(params)>=1: if params[0] in self.nicks: if self.nicks[params[0]].level in self.protected: return self._('User protected!') msg = '<%s> is kicking %s because: ' % (self.addrs[addr].nick, params[0]) if len(params)>1: fnick = self.core_settings['hubname'].replace(' ','_') reason = ' '.join(params[1:]) self.send_pm_to_nick(fnick, params[0], reason) msg += reason else: msg += '-' self.drop_user_by_nick(params[0]) self.send_to_all(msg) return self._('Success') else: return self._('No such user') else: return self._('Usage: !Kick <Username> [<reason>]') # -- Help System def Help(self,addr,params=""): # Params can be empty or 'command' if len(params)==1: if self.check_rights(self.addrs[addr], params[0]): return self.help[params[0]] else: return self._('Premission denied') elif len(params)==0: ans=self._(' -- Aviable commands for you--\n') for cmd in self.commands.keys(): if self.check_rights(self.addrs[addr],cmd): ans+='%s\n' % self.help.get(cmd,cmd) return ans else: return self._('Params error') # -- Plugin control def get_aviable_plugins( self ): ans = [] try: for i in os.listdir(self.path_to_plugins): if self.recp['.py'].search(i)!=None and i!="__init__.py" and i!="plugin.py": mod=self.recp['before.py'].search(i).group(0) ans.append( mod ) return ans except: logging.error('error while listing plugins: %s', trace()) return ans def ListPlugins(self,addr): logging.debug('listing plugins') ans = self._(' -- Aviable plugins --\n%s') % '\n'.join( self.get_aviable_plugins() ) return ans def LoadPlugin(self,addr,params=[]): # Params should be: 'plugin' if len(params)==1: logging.debug('loading plugin %s' % params[0]) if params[0] not in self.plugs: try: if not '.' in sys.path: sys.path.append('.') if 'plugins.'+params[0] not in sys.modules: plugins=__import__('plugins.'+params[0]) plugin=getattr(plugins,params[0]) else: plugin=reload(sys.modules['plugins.'+params[0]]) logging.getLogger().setLevel(self.settings['core']['loglevel']) logging.debug('loaded plugin file success') cls=getattr(plugin,params[0]+'_plugin') obj=cls(self) self.plugs[params[0]]=obj self.commands.update(obj.commands) #self.usercommands.update(obj.usercommands) logging.debug( 'Plugin %s slots: %s' % (params[0], repr( obj.slots ) ) ) for key,value in obj.slots.iteritems(): logging.debug( 'Activating Slot: %s, on plugin %s' % ( key, params[0] ) ) if key in self.slots: self.slots[key].append(value) else: self.slots[key]=[value] logging.debug( 'MessageMap: %s' % repr( self.slots )) self.Gen_UC() self.send_usercommands_to_all() return self._('Success') except: e=trace() logging.debug( 'Plugin load error: %s' % (e,) ) return self._( 'Plugin load error: %s' % (e,) ) else: return self._('Plugin already loaded') else: return self._('Params error') def UnloadPlugin(self,addr,params=[]): # Params should be: 'plugin' logging.debug('unloading plugin') if len(params)==1: try: if params[0] in self.plugs: plug=self.plugs.pop(params[0]) plug.unload() for key in plug.commands.keys(): self.commands.pop(key,None) for key in plug.usercommands.keys(): self.usercommands.pop(key,None) for key, value in plug.slots.iteritems(): if key in self.slots: if value in self.slots[key]: self.slots[key].remove(value) self.Gen_UC() self.send_usercommands_to_all() return self._('Success') else: return self._('Plugin not loaded') except: return self._('Plugin unload error: %s' % trace()) else: return self._('Params error') def ReloadPlugin(self, addr, params=[]): # Params 'plugin' return 'Unload: %s, Load %s' % (self.UnloadPlugin(addr, params), self.LoadPlugin(addr, params)) def ActivePlugins(self,addr,params=[]): return self._(' -- ACTIVE PLUGINS -- \n')+"\n".join(self.plugs.keys()) def Passwd(self,addr,params=[]): # Params 'nick' if len(params)>0: newpass=" ".join(params) nick=self.addrs[addr].nick if nick in self.reglist: self.reglist[nick]['passwd']=newpass return self._('Your password updated') else: return self._('You are not registred') else: return self._('Params error') def PasswdTo(self,addr,params=[]): # Params: 'nick' 'newpass' if len(params)>1: nick=params[0] newpass=" ".join(params[1:]) if nick in self.reglist: if self.nicks[nick].level in self.protected: return self._('User protected!') self.reglist[nick]['passwd']=newpass return self._('User password updated') else: return self._('User not registred') else: return self._('Params error') def UI(self,addr,params=[]): # params: 'nick' if len(params)==1: user=self.nicks.get(params[0],None) if user!=None: return self._(' -- USER %s INFO --\n addres: %s\n level: %s\n is op?: %s\n is protected?: %s') % (user.nick, user.addr, user.level, repr(user.level in self.oplevels), repr(user.level in self.protected)) else: return self._('No such user') else: return self._('Params error') def SetTopic(self,addr,params=[]): #params: ['topic'] if len(params)>=1: topic=' '.join(params) self.core_settings['topic']=topic self.send_to_all('$HubTopic %s|' % topic) return self._('Success') else: return self._('Params error') # -- EXTENDED FUNCTIONS USED FOR SIMPLIFY SOME WRK def masksyms(self, str): ''' return string with ASCII 0, 5, 36, 96, 124, 126 masked with: &# ;. e.g. chr(5) -> &#5; ''' cds=[0, 5, 36, 96, 124, 126] for i in cds: str=str.replace(chr(i),'&#%s;' % i) return str def unmasksyms(self, str): ''' return string with ASCII 0, 5, 36, 96, 124, 126 unmasked from: &# ; mask. e.g. &#5; -> chr(5) ''' cds=[0, 5, 36, 96, 124, 126] for i in cds: str=str.replace('&#%s;' % i, chr(i)) return str class DCProtocol(basic.LineOnlyReceiver, policies.TimeoutMixin): def _(self,string): # Translate function return self.factory._(string) def __init__(self): self.delimiter = '|' self.MAX_LENGTH = 2**16 # default is 16384 def write(self, msg): self.transport.write(msg) logging.debug('sending "%s" to %s' % (msg, self._addr)) def connectionMade(self): self._state = 'connect' self._supports = [] self._hubinfo = self.factory.core_settings['hubinfo'] self._host, self._port = self.transport.socket.getpeername() self._addr = '%s:%s' % (self._host, self._port) self._nick = '' self.setTimeout(None) if len( self.factory.nicks ) >= self.factory.core_settings['max_users']: self.transport.loseConnection() logging.warning( 'MAX USERS REACHED!!!' ) return logging.debug ('connecting: %s' % self._addr) if self.factory.emit('onConnecting', self._addr): self.write('$Lock %s|' % self.factory.LOCK ) else: logging.debug('Connection is not allowed by plugins') self.transport.loseConnection def lineReceived(self, line): line = unicode(line, self.factory.charset) logging.debug ('received: %s from %s' % (line, self._addr)) if self._state in [ 'connect', 'validate', 'negotiate' ] and line.startswith('$'): self.resetTimeout() f = getattr(self, 'parse_' + self._state + '_cmd') f(line) elif self._state == 'logedin': self.resetTimeout() if self.factory.emit('onReceivedSomething', self._addr) and len(line) > 0: if line.startswith('$'): self.parse_protocol_cmd(line) else: self.parse_chat_msg(line) else: logging.debug ( 'Unexpected command sequence received from %s' % self._addr ) self.transport.loseConnection() def lineLengthExceeded(self, line): logging.warning ( 'Too big or wrong message received from %s: %s' % (self._addr, s) ) def connectionLost(self, reason): if self._nick: self.factory.drop_user(self._addr, self._nick, self.transport) logging.debug('User Lost: %s' % reason) def parse_protocol_cmd(self, cmd): acmd=cmd.split(' ') if acmd[0]=='$GetINFO': if len(acmd)==3: if self.factory.addrs[self._addr].nick==acmd[2] and self.factory.nicks.has_key(acmd[1]): if self.factory.emit('onGetINFO',acmd[1],acmd[2]): logging.debug('send myinfo %s' % self.factory.nicks[acmd[1]].MyINFO) self.factory.send_to_nick(acmd[2],self.factory.nicks[acmd[1]].MyINFO) elif acmd[0]=='$MyINFO': if len(acmd)>=3: if self.factory.addrs[self._addr].nick==acmd[2]: try: self.factory.nicks[acmd[2]].upInfo(cmd) if self.factory.emit('onMyINFO',cmd): self.factory.send_to_all(cmd) except: logging.warning( 'Wrong MyINFO by: %s with addr %s: %s' % ( acmd[2], self._addr, trace() ) ) self.factory.drop_user_by_addr(self._addr) elif acmd[0]=='$To:': if len(acmd)>5: if acmd[3]==self.factory.addrs[self._addr].nick==acmd[4][2:-1]: if acmd[1] in self.factory.nicks: tocmd=cmd.split(' ',5) if self.factory.emit('onPrivMsg',acmd[3],acmd[1],tocmd[5]): self.factory.send_to_nick(acmd[1],cmd+"|") elif acmd[0]=='$ConnectToMe': if len(acmd)==3: if acmd[2].split(':')[0]==self._addr.split(':')[0]: if self.factory.emit('onConnectToMe',self._addr,acmd[1]): self.factory.send_to_nick(acmd[1],cmd+"|") elif acmd[0]=='$RevConnectToMe': if len(acmd)==3: if acmd[1] in self.factory.nicks: if self.factory.addrs[self._addr].nick==acmd[1]: if self.factory.emit('onRevConnectToMe',acmd[1],acmd[2]): self.factory.send_to_nick(acmd[2],cmd+"|") elif acmd[0]=='$Search': if len(acmd)>=3: srcport=acmd[1].split(':') if len(srcport)==2: if srcport[0]=='Hub': #Passive Search if srcport[1]==self.factory.addrs[self._addr].nick: bcmd=cmd.split(' ',2) if self.factory.emit('onSearchHub',bcmd[1],bcmd[2]): self.factory.send_to_all(cmd) else: #Active Search if srcport[0]==self.factory.addrs[self._addr].addr.split(':')[0]: bcmd=cmd.split(' ',2) if self.factory.emit('onSearch',bcmd[1],bcmd[2]): self.factory.send_to_all(cmd) elif acmd[0]=='$SR': fcmd=cmd.split(chr(5)) if len(fcmd)==4 and len(acmd)>=3: sender=acmd[1] receiver=fcmd[3] if self.factory.addrs[self._addr].nick==sender: if self.factory.emit('onSearchResult', sender, receiver, cmd): self.factory.send_to_nick(receiver, chr(5).join(fcmd[:3])+'|') elif acmd[0]=='$GetNickList': self.factory.send_to_addr( self._addr, self.factory.get_nick_list() ) elif acmd[0]=='$HubINFO' or acmd[0]=='$BotINFO': hubinfo='$HubINFO ' hubinfo+='%s$' % self.factory.core_settings['hubname'] hubinfo+='%s:%s$' % ( self._hubinfo.get('address',''), self.factory.core_settings['port'][0] ) hubinfo+='%s$' % self._hubinfo.get('description','') hubinfo+='%s$' % self.factory.core_settings.get('max_users','10000') hubinfo+='%s$' % self.factory.core_settings.get('min_share','0') hubinfo+='%s$' % self.factory.core_settings.get('min_slots','0') hubinfo+='%s$' % self.factory.core_settings.get('max_hubs','1000') hubinfo+='%s$' % self._hubinfo.get('type','') hubinfo+='%s$' % self._hubinfo.get('owner','') self.factory.send_to_addr( self._addr, hubinfo ) else: logging.debug('Unknown protocol command: %s from: %s' % (cmd, self._addr)) return def parse_cmd(self, cmd): logging.debug('command received %s' % cmd) acmd=cmd.split(' ') ncmd=acmd[0] for j in self.factory.commands: if acmd[0].lower() == j.lower(): ncmd=j if self.factory.check_rights(self.factory.addrs[self._addr],acmd[0]): if ncmd in self.factory.commands: try: if (len(acmd[1:]))>0: result=self.factory.commands[ncmd](self._addr,acmd[1:]) else: result=self.factory.commands[ncmd](self._addr) if result != '': self.factory.send_to_addr(self._addr, self._('<HUB> %s|') % result) except SystemExit: raise SystemExit except: self.factory.send_to_addr(self._addr, self._('<HUB> Error while proccessing command %s|') % trace()) else: self.factory.send_to_addr(self._addr, self._('<HUB> No such command')) else: self.factory.send_to_addr(self._addr, self._('<HUB> Premission denied')) return def parse_chat_msg(self, msg): acmd=msg.split(' ',1) if len(acmd)==2: if acmd[0][1:-1]==self.factory.addrs[self._addr].nick: if acmd[1][0]==self.factory.core_settings['cmdsymbol']: self.parse_cmd(acmd[1][1:]) else: if self.factory.emit('onMainChatMsg',acmd[0][1:-1],acmd[1]): self.factory.emit('ChatHistEvent',acmd[0][1:-1],acmd[1]) self.factory.send_to_all(msg) else: logging.warning('user tried to use wrong nick in MC. Real nick: %s. Message: %s' % (self.factory.addrs[self._addr].nick, msg)) self.drop_user_by_addr(self._addr) return def parse_connect_cmd(self, cmd): acmd = cmd.split(' ', 1) if acmd[0] == '$Supports': self._supports = acmd[1].split(' ') logging.debug('Supports: %s' % acmd[1]) elif acmd[0] == '$ValidateNick': self.write('<HUB> This hub is powered by ViperPeers specific software.|$HubName %s|' % ( self.factory.core_settings['hubname'].encode(self.factory.charset) ) ) self._nick = acmd[1] if self._nick: logging.debug('validating: %s' % self._nick) if self._nick in self.factory.reglist: self._state = 'validate' self.write('$GetPass|') return elif self._nick not in self.factory.nicks: self.send_negotiate_cmd() return else: logging.debug('this nick is already online.'); else: logging.debug('not validated nick. dropping.') self.write('$ValidateDenide|') self.transport.loseConnection() def parse_validate_cmd(self, cmd): """ if user registred, and passwd is correct we should connect it even if it's already connected (drop & connect) """ acmd = cmd.split(' ', 1) if acmd[0] == '$MyPass': logging.debug('MyPass %s' % acmd[1]) if acmd[1] != self.factory.reglist[self._nick]['passwd']: logging.info('wrong pass') self.write(('<HUB> %s|$BadPass|' % (self._('Password incorrect. Provided: %s') % str(acmd[1]),)).encode(self.factory.charset)) logging.debug('not validated nick. dropping.') self.transport.loseConnection() return else: if self._nick in self.factory.nicks: logging.debug('reconnecting identified user') try: self.factory.nicks[self._nick].descr.write('<HUB> You are connecting from different machine. Bye.|') except: pass self.factory.drop_user_by_nick(self._nick) self.send_negotiate_cmd() return #else: # logging.debug('received wrong cmd: %s' % cmd) def send_negotiate_cmd(self): self._state = 'negotiate' logging.debug ('validated %s' % self._nick) for transport in self.factory.hello: reactor.callLater(0, transport.write, '$Hello %s|' % self._nick.encode(self.factory.charset)) self.write('$Hello %s|$Supports %s |' % (self._nick.encode(self.factory.charset), self.factory.SUPPORTS)) #self.write('$Hello %s|' % self._nick.encode(self.factory.charset)) def parse_negotiate_cmd(self, cmd): acmd = cmd.split(' ', 1) if acmd[0] == '$MyINFO': try: user=DCUser(cmd, self.transport, self._addr) except: logging.warning( 'wrong myinfo from: %s addr: %s info: %s %s' % ( self._nick, self._addr, cmd, trace() ) ) else: if self._nick in self.factory.reglist: user.level=self.factory.reglist[self._nick]['level'] else: user.level='unreg' self.factory.nicks[self._nick] = user self.factory.addrs[self._addr] = user try: # --- APPLY LIMITS --- if user.share < self.factory.core_settings['min_share'] and user.level not in self.factory.core_settings['pass_limits']: self.write( (self._( '<HUB> Too low share. Min share is %s.|' ) % number_to_human_size( self.factory.core_settings['min_share'] ) ).encode( self.factory.charset ) ) logging.debug('not validated. dropping') self.factory.drop_user(self._addr, self._nick, self.transport) return if user.sum_hubs > self.factory.core_settings['max_hubs'] and user.level not in self.factory.core_settings['pass_limits']: self.write( (self._( '<HUB> Too many hubs open. Max hubs is %s.|' ) % self.factory.core_settings['max_hubs']).encode( self.factory.charset ) ) logging.debug('not validated. dropping') self.factory.drop_user(self._addr, self._nick, self.transport) return if user.slots < self.factory.core_settings['min_slots'] and user.level not in self.factory.core_settings['pass_limits']: self.write( (self._( '<HUB> Too few slots open. Min slots is %s.|' ) % self.factory.core_settings['min_slots']).encode( self.factory.charset ) ) logging.debug('not validated. dropping') self.factory.drop_user(self._addr, self._nick, self.transport) return logging.debug('slots: %s, hubs: %s' % (user.slots, user.hubs) ) if self.factory.emit('onConnected',user): logging.debug('Validated. Appending.') self.factory.transports.append(self.transport) if user.level in self.factory.oplevels: self.write('$LogedIn|') self.factory.send_to_all(self.factory.get_op_list()) if not 'NoHello' in self._supports: self.factory.hello.append(self.transport) if not 'NoGetINFO' in self._supports: self.write(self.factory.get_nick_list().encode( self.factory.charset )) else: for i in self.factory.nicks.values(): self.write(i.MyINFO.encode(self.factory.charset)) self.write(self.factory.get_op_list().encode(self.factory.charset)) self.factory.send_to_all(cmd) uips=self.factory.get_userip_acc_list() if ('UserIP' in self._supports) or ('UserIP2' in self._supports): self.factory.send_to_nick(self._nick, '$UserIP %s %s$$' %(self._nick, user.get_ip())) if user.level in self.factory.core_settings['userip']: self.factory.send_to_nick(self._nick, self.factory.get_userip_list()) for unick in uips: self.factory.send_to_nick(unick, '$UserIP %s %s$$' %(self._nick, user.get_ip())) self.factory.send_usercommands_to_nick(self._nick) self.factory.send_to_nick(self._nick, '$HubTopic %s' % self.factory.core_settings['topic']) else: logging.debug('not validated. dropping') self.factory.drop_user(self._addr, self._nick, self.transport) return except: logging.debug('error while connect: %s' % trace()) self.factory.drop_user(self._addr, self._nick, self.transport) return self._state = 'logedin' self.setTimeout(None) def timeoutConnection(self): """ Called when the connection times out. """ logging.debug('timeout: %s' % self._addr) self.write('<HUB> Login timeout!|') self.transport.loseConnection() def on_exit(self): self.work=False self.save_settings() sys.exit() #RUNNING HUB #application = service.Application('DirectConnect Hub') hub = DCHub() hub.protocol = DCProtocol for i in hub.core_settings['port']: try: #internet.TCPServer(i, hub).setServiceParent(application) reactor.listenTCP(i, hub) logging.debug('Started on port %d' % i) except: logging.error('---- A PROBLEM WHILE BINDING TO PORT: %s \n %s----' % (i, trace(),) ) reactor.run()<|fim▁end|>
self.charset=cpage=self.core_settings['Lang'].split('.')[1] try:
<|file_name|>power_button.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import RPi.GPIO as GPIO import subprocess # Starting up GPIO.setmode(GPIO.BCM) GPIO.setup(3, GPIO.IN) <|fim▁hole|># Shutting down subprocess.call(['shutdown', '-h', 'now'], shell=False)<|fim▁end|>
# Wait until power button is off # Recommended to use GPIO.BOTH for cases with switch GPIO.wait_for_edge(3, GPIO.BOTH)
<|file_name|>unable_module.py<|end_file_name|><|fim▁begin|># coding:utf8 """ <|fim▁hole|>class UnableTest(object): pass<|fim▁end|>
无法从上层目录进行导入操作 """
<|file_name|>flotChartServ.js<|end_file_name|><|fim▁begin|>angular.module('n52.core.diagram') .service('flotChartServ', [ 'timeseriesService', 'timeService', 'settingsService', 'flotDataHelperServ', '$rootScope', 'monthNamesTranslaterServ', 'labelMapperSrvc', '$q', function( timeseriesService, timeService, settingsService, flotDataHelperServ, $rootScope, monthNamesTranslaterServ, labelMapperSrvc, $q ) { var createYAxis = () => { var axesList = {}; var requests = []; angular.forEach(timeseriesService.getAllTimeseries(), (elem) => { var requestUom, requestLabel, requestBundle = []; if (elem.uom) { requestUom = labelMapperSrvc.getMappedLabel(elem.uom); requestBundle.push(requestUom);<|fim▁hole|> requestBundle.push(requestLabel); requests.push(requestLabel); } $q.all(requestBundle).then((result) => { if (elem.styles.groupedAxis === undefined || elem.styles.groupedAxis) { var label; if (result.length === 2 && !result[1].indexOf('http')) { label = result[1] + ' [' + result[0] + ']'; } else { label = '[' + result[0] + ']'; } if (!axesList.hasOwnProperty(label)) { axesList[label] = { id: ++Object.keys(axesList).length, uom: label, tsColors: [elem.styles.color], zeroScaled: elem.styles.zeroScaled }; elem.styles.yaxis = axesList[label].id; } else { axesList[label].tsColors.push(elem.styles.color); elem.styles.yaxis = axesList[label].id; } } else { axesList[elem.internalId] = { id: ++Object.keys(axesList).length, uom: label, tsColors: [elem.styles.color], zeroScaled: elem.styles.zeroScaled }; elem.styles.yaxis = axesList[elem.internalId].id; } }); }); $q.all(requests).then(() => { var axes = []; angular.forEach(axesList, (elem) => { axes.splice(elem.id - 1, 0, { uom: elem.uom, tsColors: elem.tsColors, min: elem.zeroScaled ? 0 : this.options.yaxis.min }); }); this.options.yaxes = axes; }); }; var createDataSet = () => { createYAxis(); var dataset = []; if (timeseriesService.getTimeseriesCount() > 0) { angular.forEach(timeseriesService.timeseries, (elem) => { flotDataHelperServ.updateTimeseriesInDataSet(dataset, renderOptions, elem.internalId, timeseriesService.getData(elem.internalId)); }); } return dataset; }; this.options = { series: { downsample: { threshold: 0 }, lines: { show: true, fill: false }, // points : { // show: true // }, shadowSize: 1 }, selection: { mode: null }, grid: { hoverable: true, autoHighlight: true }, crosshair: { mode: 'x' }, xaxis: { mode: 'time', timezone: 'browser', monthNames: monthNamesTranslaterServ.getMonthNames() // timeformat: '%Y/%m/%d', //use these the following two lines to have small ticks at the bottom ob the diagram // tickLength: 5, // tickColor: '#000' }, yaxis: { show: true, additionalWidth: 17, panRange: false, min: null, labelWidth: 50 // tickFormatter : function(val, axis) { // var factor = axis.tickDecimals ? Math.pow(10, axis.tickDecimals) : 1; // var formatted = '' + Math.round(val * factor) / factor; // return formatted + '<br>' + this.uom; // } }, legend: { show: false }, pan: { interactive: true, frameRate: 10 }, touch: { delayTouchEnded: 200, pan: 'x', scale: '' } }; angular.merge(this.options, settingsService.chartOptions); var renderOptions = { showRefValues: true, showSelection: true, showActive: true }; this.setTimeExtent = () => { this.options.xaxis.min = timeService.time.start.toDate().getTime(); this.options.xaxis.max = timeService.time.end.toDate().getTime(); }; this.timeseriesDataChanged = (timeseries) => { createYAxis(); flotDataHelperServ.updateAllTimeseriesToDataSet(this.dataset, renderOptions, timeseries); }; this.setTimeExtent(); $rootScope.$on('timeseriesChanged', (evt, id) => { createYAxis(); flotDataHelperServ.updateTimeseriesInDataSet(this.dataset, renderOptions, id, timeseriesService.getData(id)); }); $rootScope.$on('$translateChangeEnd', () => { this.options.xaxis.monthNames = monthNamesTranslaterServ.getMonthNames(); }); $rootScope.$on('allTimeseriesChanged', () => { createYAxis(); flotDataHelperServ.updateAllTimeseriesToDataSet(this.dataset, renderOptions, timeseriesService.getAllTimeseries()); }); this.dataset = createDataSet(); } ]);<|fim▁end|>
requests.push(requestUom); } if (elem.parameters && elem.parameters.phenomenon) { requestLabel = labelMapperSrvc.getMappedLabel(elem.parameters.phenomenon.label);
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- <|fim▁hole|><|fim▁end|>
from .__version__ import __version__
<|file_name|>FTypeAtomBuilder.java<|end_file_name|><|fim▁begin|>package de.csmath.QT; import java.util.Collection; import java.util.List; /** * This class builds a FTypeAtom from given parameters. * @author lpfeiler */ public class FTypeAtomBuilder extends QTAtomBuilder { /** * @see FTypeAtom#majBrand */ private int majBrand = 0; /** * @see FTypeAtom#minVersion */ private int minVersion = 0; /** * @see FTypeAtom#compBrands */ private Collection<Integer> compBrands; /** * Constructs a FTypeAtomBuilder. * @param size the size of the FTypeAtom in the file<|fim▁hole|> * @param type the type of the atom, should be set to 'ftyp' */ public FTypeAtomBuilder(int size, int type) { super(size, type); } /** * Returns a new FTypeAtom. * @return a new FTypeAtom */ public FTypeAtom build() { return new FTypeAtom(size,type,majBrand,minVersion,compBrands); } /** * Sets the major brand. * @see FTypeAtom#majBrand * @param majBrand the major brand * @return a reference to this object */ public FTypeAtomBuilder withMajBrand(int majBrand) { this.majBrand = majBrand; return this; } /** * Sets the minor version. * @see FTypeAtom#minVersion * @param minVersion the minor version * @return a reference to this object */ public FTypeAtomBuilder withMinVersion(int minVersion) { this.minVersion = minVersion; return this; } /** * Sets the compatible brands. * @param compBrands a collection of compatible brands * @return a reference to this object */ public FTypeAtomBuilder withCompBrands(Collection<Integer> compBrands) { this.compBrands = compBrands; return this; } }<|fim▁end|>
<|file_name|>dns.go<|end_file_name|><|fim▁begin|>package dns import ( "bufio" "bytes" "fmt" "io" "io/ioutil" "strings" . "github.com/onsi/ginkgo" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/api/errors" "k8s.io/kubernetes/pkg/api/unversioned" "k8s.io/kubernetes/pkg/apimachinery/registered" "k8s.io/kubernetes/pkg/util/sets" "k8s.io/kubernetes/pkg/util/uuid" "k8s.io/kubernetes/pkg/watch" e2e "k8s.io/kubernetes/test/e2e/framework" ) func createDNSPod(namespace, probeCmd string) *api.Pod { pod := &api.Pod{ TypeMeta: unversioned.TypeMeta{ Kind: "Pod", APIVersion: registered.GroupOrDie(api.GroupName).GroupVersion.String(), }, ObjectMeta: api.ObjectMeta{ Name: "dns-test-" + string(uuid.NewUUID()), Namespace: namespace, }, Spec: api.PodSpec{ RestartPolicy: api.RestartPolicyNever, Containers: []api.Container{ { Name: "querier", Image: "gcr.io/google_containers/dnsutils:e2e", Command: []string{"sh", "-c", probeCmd}, }, }, }, } return pod } func digForNames(namesToResolve []string, expect sets.String) string { fileNamePrefix := "test" var probeCmd string for _, name := range namesToResolve { // Resolve by TCP and UDP DNS. Use $$(...) because $(...) is // expanded by kubernetes (though this won't expand so should // remain a literal, safe > sorry). lookup := "A" if strings.HasPrefix(name, "_") { lookup = "SRV" } fileName := fmt.Sprintf("%s_udp@%s", fileNamePrefix, name) expect.Insert(fileName) probeCmd += fmt.Sprintf(`test -n "$$(dig +notcp +noall +answer +search %s %s)" && echo %q;`, name, lookup, fileName) fileName = fmt.Sprintf("%s_tcp@%s", fileNamePrefix, name) expect.Insert(fileName) probeCmd += fmt.Sprintf(`test -n "$$(dig +tcp +noall +answer +search %s %s)" && echo %q;`, name, lookup, fileName) } return probeCmd } func digForCNAMEs(namesToResolve []string, expect sets.String) string { fileNamePrefix := "test" var probeCmd string for _, name := range namesToResolve { // Resolve by TCP and UDP DNS. Use $$(...) because $(...) is // expanded by kubernetes (though this won't expand so should // remain a literal, safe > sorry). lookup := "CNAME" fileName := fmt.Sprintf("%s_udp@%s", fileNamePrefix, name) expect.Insert(fileName) probeCmd += fmt.Sprintf(`test -n "$$(dig +notcp +noall +answer +search %s %s)" && echo %q;`, name, lookup, fileName) fileName = fmt.Sprintf("%s_tcp@%s", fileNamePrefix, name) expect.Insert(fileName) probeCmd += fmt.Sprintf(`test -n "$$(dig +tcp +noall +answer +search %s %s)" && echo %q;`, name, lookup, fileName) } return probeCmd } func digForSRVs(namesToResolve []string, expect sets.String) string { fileNamePrefix := "test" var probeCmd string for _, name := range namesToResolve { // Resolve by TCP and UDP DNS. Use $$(...) because $(...) is // expanded by kubernetes (though this won't expand so should // remain a literal, safe > sorry). lookup := "SRV" fileName := fmt.Sprintf("%s_udp@%s", fileNamePrefix, name) expect.Insert(fileName) probeCmd += fmt.Sprintf(`test -n "$$(dig +notcp +noall +additional +search %s %s)" && echo %q;`, name, lookup, fileName) fileName = fmt.Sprintf("%s_tcp@%s", fileNamePrefix, name) expect.Insert(fileName) probeCmd += fmt.Sprintf(`test -n "$$(dig +tcp +noall +additional +search %s %s)" && echo %q;`, name, lookup, fileName) } return probeCmd } func digForARecords(records map[string][]string, expect sets.String) string { var probeCmd string fileNamePrefix := "test" for name, ips := range records { fileName := fmt.Sprintf("%s_endpoints@%s", fileNamePrefix, name) probeCmd += fmt.Sprintf(`[ "$$(dig +short +notcp +noall +answer +search %s A | sort | xargs echo)" = "%s" ] && echo %q;`, name, strings.Join(ips, " "), fileName) expect.Insert(fileName) } return probeCmd } func digForPod(namespace string, expect sets.String) string { var probeCmd string fileNamePrefix := "test" podARecByUDPFileName := fmt.Sprintf("%s_udp@PodARecord", fileNamePrefix) podARecByTCPFileName := fmt.Sprintf("%s_tcp@PodARecord", fileNamePrefix) probeCmd += fmt.Sprintf(`podARec=$$(hostname -i| awk -F. '{print $$1"-"$$2"-"$$3"-"$$4".%s.pod.cluster.local"}');`, namespace) probeCmd += fmt.Sprintf(`test -n "$$(dig +notcp +noall +answer +search $${podARec} A)" && echo %q;`, podARecByUDPFileName) probeCmd += fmt.Sprintf(`test -n "$$(dig +tcp +noall +answer +search $${podARec} A)" && echo %q;`, podARecByTCPFileName) expect.Insert(podARecByUDPFileName, podARecByTCPFileName) return probeCmd } func repeatCommand(times int, cmd ...string) string { probeCmd := fmt.Sprintf("for i in `seq 1 %d`; do ", times) probeCmd += strings.Join(cmd, " ") probeCmd += "sleep 1; done" return probeCmd } func assertLinesExist(lines sets.String, expect int, r io.Reader) error { count := make(map[string]int) unrecognized := sets.NewString() scan := bufio.NewScanner(r) for scan.Scan() { line := scan.Text() if lines.Has(line) { count[line]++ } else { unrecognized.Insert(line) } } for k := range lines { if count[k] != expect { return fmt.Errorf("unexpected count %d/%d for %q: %v", count[k], expect, k, unrecognized) } } if unrecognized.Len() > 0 { return fmt.Errorf("unexpected matches from output: %v", unrecognized) } return nil } // PodSucceeded returns true if the pod has succeeded, false if the pod has not yet // reached running state, or an error in any other case. func PodSucceeded(event watch.Event) (bool, error) { switch event.Type { case watch.Deleted: return false, errors.NewNotFound(unversioned.GroupResource{Resource: "pods"}, "") } switch t := event.Object.(type) { case *api.Pod: switch t.Status.Phase { case api.PodSucceeded: return true, nil case api.PodFailed: return false, fmt.Errorf("pod failed: %#v", t) } } return false, nil } func validateDNSResults(f *e2e.Framework, pod *api.Pod, fileNames sets.String, expect int) { By("submitting the pod to kubernetes") podClient := f.Client.Pods(f.Namespace.Name) defer func() { By("deleting the pod") defer GinkgoRecover() podClient.Delete(pod.Name, api.NewDeleteOptions(0)) }() updated, err := podClient.Create(pod) if err != nil { e2e.Failf("Failed to create %s pod: %v", pod.Name, err) } w, err := f.Client.Pods(f.Namespace.Name).Watch(api.SingleObject(api.ObjectMeta{Name: pod.Name, ResourceVersion: updated.ResourceVersion})) if err != nil { e2e.Failf("Failed: %v", err) } if _, err = watch.Until(e2e.PodStartTimeout, w, PodSucceeded); err != nil { e2e.Failf("Failed: %v", err) } By("retrieving the pod logs") r, err := podClient.GetLogs(pod.Name, &api.PodLogOptions{Container: "querier"}).Stream() if err != nil { e2e.Failf("Failed to get pod logs %s: %v", pod.Name, err) } out, err := ioutil.ReadAll(r) if err != nil { e2e.Failf("Failed to read pod logs %s: %v", pod.Name, err) } // Try to find results for each expected name. By("looking for the results for each expected name from probiers") if err := assertLinesExist(fileNames, expect, bytes.NewBuffer(out)); err != nil { e2e.Logf("Got results from pod:\n%s", out) e2e.Failf("Unexpected results: %v", err) } e2e.Logf("DNS probes using %s succeeded\n", pod.Name) } func createServiceSpec(serviceName string, isHeadless bool, externalName string, selector map[string]string) *api.Service { s := &api.Service{ ObjectMeta: api.ObjectMeta{ Name: serviceName, }, Spec: api.ServiceSpec{ Ports: []api.ServicePort{ {Port: 80, Name: "http", Protocol: "TCP"}, }, Selector: selector, }, } if isHeadless { s.Spec.ClusterIP = "None" } if len(externalName) > 0 { s.Spec.Type = api.ServiceTypeExternalName<|fim▁hole|> return s } func createEndpointSpec(name string) *api.Endpoints { return &api.Endpoints{ ObjectMeta: api.ObjectMeta{ Name: name, }, Subsets: []api.EndpointSubset{ { Addresses: []api.EndpointAddress{ {IP: "1.1.1.1", Hostname: "endpoint1"}, {IP: "1.1.1.2"}, }, NotReadyAddresses: []api.EndpointAddress{ {IP: "2.1.1.1"}, {IP: "2.1.1.2"}, }, Ports: []api.EndpointPort{ {Port: 80}, }, }, }, } } func ipsForEndpoints(ep *api.Endpoints) []string { ips := sets.NewString() for _, sub := range ep.Subsets { for _, addr := range sub.Addresses { ips.Insert(addr.IP) } } return ips.List() } var _ = Describe("DNS", func() { f := e2e.NewDefaultFramework("dns") It("should answer endpoint and wildcard queries for the cluster [Conformance]", func() { if _, err := f.Client.Services(f.Namespace.Name).Create(createServiceSpec("headless", true, "", nil)); err != nil { e2e.Failf("unable to create headless service: %v", err) } if _, err := f.Client.Endpoints(f.Namespace.Name).Create(createEndpointSpec("headless")); err != nil { e2e.Failf("unable to create clusterip endpoints: %v", err) } if _, err := f.Client.Services(f.Namespace.Name).Create(createServiceSpec("clusterip", false, "", nil)); err != nil { e2e.Failf("unable to create clusterip service: %v", err) } if _, err := f.Client.Endpoints(f.Namespace.Name).Create(createEndpointSpec("clusterip")); err != nil { e2e.Failf("unable to create clusterip endpoints: %v", err) } if _, err := f.Client.Services(f.Namespace.Name).Create(createServiceSpec("externalname", true, "www.google.com", nil)); err != nil { e2e.Failf("unable to create externalName service: %v", err) } ep, err := f.Client.Endpoints("default").Get("kubernetes") if err != nil { e2e.Failf("unable to find endpoints for kubernetes.default: %v", err) } kubeEndpoints := ipsForEndpoints(ep) readyEndpoints := ipsForEndpoints(createEndpointSpec("")) // All the names we need to be able to resolve. expect := sets.NewString() times := 10 cmd := repeatCommand( times, // the DNS pod should be able to resolve these names digForNames([]string{ // answer wildcards on default service "prefix.kubernetes.default", "prefix.kubernetes.default.svc", "prefix.kubernetes.default.svc.cluster.local", // answer wildcards on clusterIP services fmt.Sprintf("prefix.clusterip.%s", f.Namespace.Name), }, expect), // the DNS pod should be able to get additional A records for this service digForSRVs([]string{ fmt.Sprintf("_http._tcp.externalname.%s.svc", f.Namespace.Name), }, expect), // the DNS pod should be able to get a CNAME for this service digForCNAMEs([]string{ fmt.Sprintf("externalname.%s.svc", f.Namespace.Name), }, expect), // the DNS pod should be able to look up endpoints for names and wildcards digForARecords(map[string][]string{ "kubernetes.default.endpoints": kubeEndpoints, fmt.Sprintf("headless.%s.svc", f.Namespace.Name): readyEndpoints, fmt.Sprintf("headless.%s.endpoints", f.Namespace.Name): readyEndpoints, fmt.Sprintf("clusterip.%s.endpoints", f.Namespace.Name): readyEndpoints, fmt.Sprintf("endpoint1.headless.%s.endpoints", f.Namespace.Name): {"1.1.1.1"}, fmt.Sprintf("endpoint1.clusterip.%s.endpoints", f.Namespace.Name): {"1.1.1.1"}, }, expect), // the DNS pod should respond to its own request digForPod(f.Namespace.Name, expect), ) By("Running these commands:" + cmd + "\n") // Run a pod which probes DNS and exposes the results by HTTP. By("creating a pod to probe DNS") pod := createDNSPod(f.Namespace.Name, cmd) validateDNSResults(f, pod, expect, times) }) })<|fim▁end|>
s.Spec.ExternalName = externalName s.Spec.ClusterIP = "" }
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from datetime import datetime from django.http import HttpResponse from smsbrana import SmsConnect from smsbrana import signals from smsbrana.const import DELIVERY_STATUS_DELIVERED, DATETIME_FORMAT from smsbrana.models import SentSms def smsconnect_notification(request): sc = SmsConnect() result = sc.inbox() # print result for delivered in result['delivery_report']: sms_id = delivered['idsms'] if delivered['status'] != DELIVERY_STATUS_DELIVERED: continue try: sms = SentSms.objects.get(sms_id=sms_id) if sms.delivered: continue<|fim▁hole|> except SentSms.DoesNotExist: # logger.error('sms delivered which wasn\'t sent' + str(delivered)) pass # delete the inbox if there are 100+ items if len(result['delivery_report']) > 100: sc.inbox(delete=True) signals.smsconnect_notification_received.send(sender=None, inbox=result, request=request) return HttpResponse('OK')<|fim▁end|>
sms.delivered = True sms.delivered_date = datetime.strptime(delivered['time'], DATETIME_FORMAT) sms.save()
<|file_name|>logFileParser.py<|end_file_name|><|fim▁begin|>""" 1. Parse log file of a webserver 2. Print the filename and number of bytes delivered for 200 responses """ import re import sys from os import path import operator<|fim▁hole|> pattern = re.compile(r'\[(?P<time>.+)\](\s+\")(?P<requestType>\w+)(\s+)(?P<fileName>.*?)(\sHTTP)\/(?P<httpVersion>.*?)\"\s+(?P<httpResponse>\d+)\s(?P<bytes>\d+)') fileDict = dict() with open(log_file_path, "r") as file: for line in file: pattern_match = pattern.match(line) log_data.append(pattern_match.groupdict()) dedup_log_data = [] for i in log_data: if i not in dedup_log_data: dedup_log_data.append(i) for item in dedup_log_data: key = item['fileName'] value = int(item['bytes']) respCode = item['httpResponse'] if (respCode == '200'): if key not in fileDict.keys(): fileDict[key] = value else: oldValue = int(fileDict.get(key)) value = oldValue+value fileDict[key] = value print(fileDict) print(dict(sorted(fileDict.items(), key=operator.itemgetter(1)))) sorted_fileDict = dict(sorted(fileDict.items(), key=operator.itemgetter(1))) out_Dict = dict(itertools.islice(sorted_fileDict.items(), 10)) for k, v in out_Dict.items(): print (str(k) + " " + str(v))<|fim▁end|>
import itertools log_file_path = "server.log" log_data = []
<|file_name|>algoliasearch.js<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013 Algolia * http://www.algolia.com/ * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ var ALGOLIA_VERSION = '2.8.5'; /* * Copyright (c) 2013 Algolia * http://www.algolia.com/ * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ /* * Algolia Search library initialization * @param applicationID the application ID you have in your admin interface * @param apiKey a valid API key for the service * @param methodOrOptions the hash of parameters for initialization. It can contains: * - method (optional) specify if the protocol used is http or https (http by default to make the first search query faster). * You need to use https is you are doing something else than just search queries. * - hosts (optional) the list of hosts that you have received for the service * - dsn (optional) set to true if your account has the Distributed Search Option * - dsnHost (optional) override the automatic computation of dsn hostname */ var AlgoliaSearch = function(applicationID, apiKey, methodOrOptions, resolveDNS, hosts) { var self = this; this.applicationID = applicationID; this.apiKey = apiKey; this.dsn = true; this.dsnHost = null; this.hosts = []; this.currentHostIndex = 0; this.requestTimeoutInMs = 2000; this.extraHeaders = []; this.jsonp = null; var method; var tld = 'net'; if (typeof methodOrOptions === 'string') { // Old initialization method = methodOrOptions; } else { // Take all option from the hash var options = methodOrOptions || {}; if (!this._isUndefined(options.method)) { method = options.method; } if (!this._isUndefined(options.tld)) { tld = options.tld; } if (!this._isUndefined(options.dsn)) { this.dsn = options.dsn; } if (!this._isUndefined(options.hosts)) { hosts = options.hosts; } if (!this._isUndefined(options.dsnHost)) { this.dsnHost = options.dsnHost; } if (!this._isUndefined(options.requestTimeoutInMs)) { this.requestTimeoutInMs = +options.requestTimeoutInMs; } if (!this._isUndefined(options.jsonp)) { this.jsonp = options.jsonp; } } // If hosts is undefined, initialize it with applicationID if (this._isUndefined(hosts)) { hosts = [ this.applicationID + '-1.algolia.' + tld, this.applicationID + '-2.algolia.' + tld, this.applicationID + '-3.algolia.' + tld ]; } // detect is we use http or https this.host_protocol = 'http://'; if (this._isUndefined(method) || method === null) { this.host_protocol = ('https:' == document.location.protocol ? 'https' : 'http') + '://'; } else if (method === 'https' || method === 'HTTPS') { this.host_protocol = 'https://'; } // Add hosts in random order for (var i = 0; i < hosts.length; ++i) { if (Math.random() > 0.5) { this.hosts.reverse(); } this.hosts.push(this.host_protocol + hosts[i]); } if (Math.random() > 0.5) { this.hosts.reverse(); } // then add Distributed Search Network host if there is one if (this.dsn || this.dsnHost != null) { if (this.dsnHost) { this.hosts.unshift(this.host_protocol + this.dsnHost); } else { this.hosts.unshift(this.host_protocol + this.applicationID + '-dsn.algolia.' + tld); } } }; function AlgoliaExplainResults(hit, titleAttribute, otherAttributes) { function _getHitExplanationForOneAttr_recurse(obj, foundWords) { var res = []; if (typeof obj === 'object' && 'matchedWords' in obj && 'value' in obj) { var match = false; for (var j = 0; j < obj.matchedWords.length; ++j) { var word = obj.matchedWords[j]; if (!(word in foundWords)) { foundWords[word] = 1; match = true; } } if (match) { res.push(obj.value); } } else if (Object.prototype.toString.call(obj) === '[object Array]') { for (var i = 0; i < obj.length; ++i) { var array = _getHitExplanationForOneAttr_recurse(obj[i], foundWords); res = res.concat(array); } } else if (typeof obj === 'object') { for (var prop in obj) { if (obj.hasOwnProperty(prop)){ res = res.concat(_getHitExplanationForOneAttr_recurse(obj[prop], foundWords)); } } } return res; } function _getHitExplanationForOneAttr(hit, foundWords, attr) { var base = hit._highlightResult || hit; if (attr.indexOf('.') === -1) { if (attr in base) { return _getHitExplanationForOneAttr_recurse(base[attr], foundWords); } return []; } var array = attr.split('.'); var obj = base; for (var i = 0; i < array.length; ++i) { if (Object.prototype.toString.call(obj) === '[object Array]') { var res = []; for (var j = 0; j < obj.length; ++j) { res = res.concat(_getHitExplanationForOneAttr(obj[j], foundWords, array.slice(i).join('.'))); } return res; } if (array[i] in obj) { obj = obj[array[i]]; } else { return []; } } return _getHitExplanationForOneAttr_recurse(obj, foundWords); } var res = {}; var foundWords = {}; var title = _getHitExplanationForOneAttr(hit, foundWords, titleAttribute); res.title = (title.length > 0) ? title[0] : ''; res.subtitles = []; if (typeof otherAttributes !== 'undefined') { for (var i = 0; i < otherAttributes.length; ++i) { var attr = _getHitExplanationForOneAttr(hit, foundWords, otherAttributes[i]); for (var j = 0; j < attr.length; ++j) { res.subtitles.push({ attr: otherAttributes[i], value: attr[j] }); } } } return res; } window.AlgoliaSearch = AlgoliaSearch; AlgoliaSearch.prototype = { /* * Delete an index * * @param indexName the name of index to delete * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer that contains the task ID */ deleteIndex: function(indexName, callback) { this._jsonRequest({ method: 'DELETE', url: '/1/indexes/' + encodeURIComponent(indexName), callback: callback }); }, /** * Move an existing index. * @param srcIndexName the name of index to copy. * @param dstIndexName the new index name that will contains a copy of srcIndexName (destination will be overriten if it already exist). * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer that contains the task ID */ moveIndex: function(srcIndexName, dstIndexName, callback) { var postObj = {operation: 'move', destination: dstIndexName}; this._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(srcIndexName) + '/operation', body: postObj, callback: callback }); }, /** * Copy an existing index. * @param srcIndexName the name of index to copy. * @param dstIndexName the new index name that will contains a copy of srcIndexName (destination will be overriten if it already exist). * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer that contains the task ID */ copyIndex: function(srcIndexName, dstIndexName, callback) { var postObj = {operation: 'copy', destination: dstIndexName}; this._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(srcIndexName) + '/operation', body: postObj, callback: callback }); }, /** * Return last log entries. * @param offset Specify the first entry to retrieve (0-based, 0 is the most recent log entry). * @param length Specify the maximum number of entries to retrieve starting at offset. Maximum allowed value: 1000. * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer that contains the task ID */ getLogs: function(callback, offset, length) { if (this._isUndefined(offset)) { offset = 0; } if (this._isUndefined(length)) { length = 10; } this._jsonRequest({ method: 'GET', url: '/1/logs?offset=' + offset + '&length=' + length, callback: callback }); }, /* * List all existing indexes (paginated) * * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with index list or error description if success is false. * @param page The page to retrieve, starting at 0. */ listIndexes: function(callback, page) { var params = page ? '?page=' + page : ''; this._jsonRequest({ method: 'GET', url: '/1/indexes' + params, callback: callback }); }, /* * Get the index object initialized * * @param indexName the name of index * @param callback the result callback with one argument (the Index instance) */ initIndex: function(indexName) { return new this.Index(this, indexName); }, /* * List all existing user keys with their associated ACLs * * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ listUserKeys: function(callback) { this._jsonRequest({ method: 'GET', url: '/1/keys', callback: callback }); }, /* * Get ACL of a user key * * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ getUserKeyACL: function(key, callback) { this._jsonRequest({ method: 'GET', url: '/1/keys/' + key, callback: callback }); }, /* * Delete an existing user key * * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ deleteUserKey: function(key, callback) { this._jsonRequest({ method: 'DELETE', url: '/1/keys/' + key, callback: callback }); }, /* * Add an existing user key * * @param acls the list of ACL for this key. Defined by an array of strings that * can contains the following values: * - search: allow to search (https and http) * - addObject: allows to add/update an object in the index (https only) * - deleteObject : allows to delete an existing object (https only) * - deleteIndex : allows to delete index content (https only) * - settings : allows to get index settings (https only) * - editSettings : allows to change index settings (https only) * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ addUserKey: function(acls, callback) { var aclsObject = {}; aclsObject.acl = acls; this._jsonRequest({ method: 'POST', url: '/1/keys', body: aclsObject, callback: callback }); }, /* * Add an existing user key * * @param acls the list of ACL for this key. Defined by an array of strings that * can contains the following values: * - search: allow to search (https and http) * - addObject: allows to add/update an object in the index (https only) * - deleteObject : allows to delete an existing object (https only) * - deleteIndex : allows to delete index content (https only) * - settings : allows to get index settings (https only) * - editSettings : allows to change index settings (https only) * @param validity the number of seconds after which the key will be automatically removed (0 means no time limit for this key) * @param maxQueriesPerIPPerHour Specify the maximum number of API calls allowed from an IP address per hour. * @param maxHitsPerQuery Specify the maximum number of hits this API key can retrieve in one call. * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ addUserKeyWithValidity: function(acls, validity, maxQueriesPerIPPerHour, maxHitsPerQuery, callback) { var indexObj = this; var aclsObject = {}; aclsObject.acl = acls; aclsObject.validity = validity; aclsObject.maxQueriesPerIPPerHour = maxQueriesPerIPPerHour; aclsObject.maxHitsPerQuery = maxHitsPerQuery; this._jsonRequest({ method: 'POST', url: '/1/indexes/' + indexObj.indexName + '/keys', body: aclsObject, callback: callback }); }, /** * Set the extra security tagFilters header * @param {string|array} tags The list of tags defining the current security filters */ setSecurityTags: function(tags) { if (Object.prototype.toString.call(tags) === '[object Array]') { var strTags = []; for (var i = 0; i < tags.length; ++i) { if (Object.prototype.toString.call(tags[i]) === '[object Array]') { var oredTags = []; for (var j = 0; j < tags[i].length; ++j) { oredTags.push(tags[i][j]); } strTags.push('(' + oredTags.join(',') + ')'); } else { strTags.push(tags[i]); } } tags = strTags.join(','); } this.tagFilters = tags; }, /** * Set the extra user token header * @param {string} userToken The token identifying a uniq user (used to apply rate limits) */ setUserToken: function(userToken) { this.userToken = userToken; }, /* * Initialize a new batch of search queries */ startQueriesBatch: function() { this.batch = []; }, /* * Add a search query in the batch * * @param query the full text query * @param args (optional) if set, contains an object with query parameters: * - attributes: an array of object attribute names to retrieve * (if not set all attributes are retrieve) * - attributesToHighlight: an array of object attribute names to highlight * (if not set indexed attributes are highlighted) * - minWordSizefor1Typo: the minimum number of characters to accept one typo. * Defaults to 3. * - minWordSizefor2Typos: the minimum number of characters to accept two typos. * Defaults to 7. * - getRankingInfo: if set, the result hits will contain ranking information in * _rankingInfo attribute * - page: (pagination parameter) page to retrieve (zero base). Defaults to 0. * - hitsPerPage: (pagination parameter) number of hits per page. Defaults to 10. */ addQueryInBatch: function(indexName, query, args) { var params = 'query=' + encodeURIComponent(query); if (!this._isUndefined(args) && args !== null) { params = this._getSearchParams(args, params); } this.batch.push({ indexName: indexName, params: params }); }, /* * Clear all queries in cache */ clearCache: function() { this.cache = {}; }, /* * Launch the batch of queries using XMLHttpRequest. * (Optimized for browser using a POST query to minimize number of OPTIONS queries) * * @param callback the function that will receive results * @param delay (optional) if set, wait for this delay (in ms) and only send the batch if there was no other in the meantime. */ sendQueriesBatch: function(callback, delay) { var as = this; var params = {requests: []}; for (var i = 0; i < as.batch.length; ++i) { params.requests.push(as.batch[i]); } window.clearTimeout(as.onDelayTrigger); if (!this._isUndefined(delay) && delay !== null && delay > 0) { var onDelayTrigger = window.setTimeout( function() { as._sendQueriesBatch(params, callback); }, delay); as.onDelayTrigger = onDelayTrigger; } else { this._sendQueriesBatch(params, callback); } }, /** * Set the number of milliseconds a request can take before automatically being terminated. * * @param {Number} milliseconds */ setRequestTimeout: function(milliseconds) { if (milliseconds) { this.requestTimeoutInMs = parseInt(milliseconds, 10); } }, /* * Index class constructor. * You should not use this method directly but use initIndex() function */ Index: function(algoliasearch, indexName) { this.indexName = indexName; this.as = algoliasearch; this.typeAheadArgs = null; this.typeAheadValueOption = null; }, /** * Add an extra field to the HTTP request * * @param key the header field name * @param value the header field value */ setExtraHeader: function(key, value) { this.extraHeaders.push({ key: key, value: value}); }, _sendQueriesBatch: function(params, callback) { if (this.jsonp === null) { var self = this; this._jsonRequest({ cache: this.cache, method: 'POST', url: '/1/indexes/*/queries', body: params, callback: function(success, content) { if (!success) { // retry first with JSONP self.jsonp = true; self._sendQueriesBatch(params, callback); } else { self.jsonp = false; callback && callback(success, content); } } }); } else if (this.jsonp) { var jsonpParams = ''; for (var i = 0; i < params.requests.length; ++i) { var q = '/1/indexes/' + encodeURIComponent(params.requests[i].indexName) + '?' + params.requests[i].params; jsonpParams += i + '=' + encodeURIComponent(q) + '&'; } var pObj = {params: jsonpParams}; this._jsonRequest({ cache: this.cache, method: 'GET', url: '/1/indexes/*', body: pObj, callback: callback }); } else { this._jsonRequest({ cache: this.cache, method: 'POST', url: '/1/indexes/*/queries', body: params, callback: callback}); } }, /* * Wrapper that try all hosts to maximize the quality of service */ _jsonRequest: function(opts) { var self = this; var callback = opts.callback; var cache = null; var cacheID = opts.url; if (!this._isUndefined(opts.body)) { cacheID = opts.url + '_body_' + JSON.stringify(opts.body); } if (!this._isUndefined(opts.cache)) { cache = opts.cache; if (!this._isUndefined(cache[cacheID])) { if (!this._isUndefined(callback)) { setTimeout(function () { callback(true, cache[cacheID]); }, 1); } return; } } opts.successiveRetryCount = 0; var impl = function() { if (opts.successiveRetryCount >= self.hosts.length) { if (!self._isUndefined(callback)) { opts.successiveRetryCount = 0; callback(false, { message: 'Cannot connect the Algolia\'s Search API. Please send an email to support@algolia.com to report the issue.' }); } return; } opts.callback = function(retry, success, res, body) { if (!success && !self._isUndefined(body)) { window.console && console.log('Error: ' + body.message); } if (success && !self._isUndefined(opts.cache)) { cache[cacheID] = body; } if (!success && retry) { self.currentHostIndex = ++self.currentHostIndex % self.hosts.length; opts.successiveRetryCount += 1; impl(); } else { opts.successiveRetryCount = 0; if (!self._isUndefined(callback)) { callback(success, body); } } }; opts.hostname = self.hosts[self.currentHostIndex]; self._jsonRequestByHost(opts); }; impl(); }, _jsonRequestByHost: function(opts) { var self = this; var url = opts.hostname + opts.url; if (this.jsonp) { this._makeJsonpRequestByHost(url, opts); } else { this._makeXmlHttpRequestByHost(url, opts); } }, /** * Make a JSONP request * * @param url request url (includes endpoint and path) * @param opts all request options */ _makeJsonpRequestByHost: function(url, opts) { ////////////////// ////////////////// ///// ///// DISABLED FOR SECURITY PURPOSE ///// ////////////////// ////////////////// opts.callback(true, false, null, { 'message': 'JSONP not allowed.' }); return; // if (opts.method !== 'GET') { // opts.callback(true, false, null, { 'message': 'Method ' + opts.method + ' ' + url + ' is not supported by JSONP.' }); // return; // } // this.jsonpCounter = this.jsonpCounter || 0; // this.jsonpCounter += 1; // var head = document.getElementsByTagName('head')[0]; // var script = document.createElement('script'); // var cb = 'algoliaJSONP_' + this.jsonpCounter; // var done = false; // var ontimeout = null; // window[cb] = function(data) { // opts.callback(false, true, null, data); // try { delete window[cb]; } catch (e) { window[cb] = undefined; } // }; // script.type = 'text/javascript'; // script.src = url + '?callback=' + cb + '&X-Algolia-Application-Id=' + this.applicationID + '&X-Algolia-API-Key=' + this.apiKey; // if (this.tagFilters) { // script.src += '&X-Algolia-TagFilters=' + encodeURIComponent(this.tagFilters); // } // if (this.userToken) { // script.src += '&X-Algolia-UserToken=' + encodeURIComponent(this.userToken); // } // for (var i = 0; i < this.extraHeaders.length; ++i) { // script.src += '&' + this.extraHeaders[i].key + '=' + this.extraHeaders[i].value; // } // if (opts.body && opts.body.params) { // script.src += '&' + opts.body.params; // } // ontimeout = setTimeout(function() { // script.onload = script.onreadystatechange = script.onerror = null; // window[cb] = function(data) { // try { delete window[cb]; } catch (e) { window[cb] = undefined; } // }; // opts.callback(true, false, null, { 'message': 'Timeout - Failed to load JSONP script.' }); // head.removeChild(script); // clearTimeout(ontimeout); // ontimeout = null; // }, this.requestTimeoutInMs); // script.onload = script.onreadystatechange = function() { // clearTimeout(ontimeout); // ontimeout = null; // if (!done && (!this.readyState || this.readyState == 'loaded' || this.readyState == 'complete')) { // done = true; // if (typeof window[cb + '_loaded'] === 'undefined') { // opts.callback(true, false, null, { 'message': 'Failed to load JSONP script.' }); // try { delete window[cb]; } catch (e) { window[cb] = undefined; } // } else { // try { delete window[cb + '_loaded']; } catch (e) { window[cb + '_loaded'] = undefined; } // } // script.onload = script.onreadystatechange = null; // Handle memory leak in IE // head.removeChild(script); // } // }; // script.onerror = function() { // clearTimeout(ontimeout); // ontimeout = null; // opts.callback(true, false, null, { 'message': 'Failed to load JSONP script.' }); // head.removeChild(script); // try { delete window[cb]; } catch (e) { window[cb] = undefined; } // }; // head.appendChild(script); }, /** * Make a XmlHttpRequest * * @param url request url (includes endpoint and path) * @param opts all request opts */ _makeXmlHttpRequestByHost: function(url, opts) { var self = this; var xmlHttp = window.XMLHttpRequest ? new XMLHttpRequest() : {}; var body = null; var ontimeout = null; if (!this._isUndefined(opts.body)) { body = JSON.stringify(opts.body); } url += ((url.indexOf('?') == -1) ? '?' : '&') + 'X-Algolia-API-Key=' + this.apiKey; url += '&X-Algolia-Application-Id=' + this.applicationID; if (this.userToken) { url += '&X-Algolia-UserToken=' + encodeURIComponent(this.userToken); } if (this.tagFilters) { url += '&X-Algolia-TagFilters=' + encodeURIComponent(this.tagFilters); } for (var i = 0; i < this.extraHeaders.length; ++i) { url += '&' + this.extraHeaders[i].key + '=' + this.extraHeaders[i].value; } if ('withCredentials' in xmlHttp) { xmlHttp.open(opts.method, url, true); xmlHttp.timeout = this.requestTimeoutInMs * (opts.successiveRetryCount + 1); if (body !== null) { /* This content type is specified to follow CORS 'simple header' directive */ xmlHttp.setRequestHeader('Content-type', 'application/x-www-form-urlencoded'); } } else if (typeof XDomainRequest !== 'undefined') { // Handle IE8/IE9 // XDomainRequest only exists in IE, and is IE's way of making CORS requests. xmlHttp = new XDomainRequest(); xmlHttp.open(opts.method, url); } else { // very old browser, not supported opts.callback(false, false, null, { 'message': 'CORS not supported' }); return; } ontimeout = setTimeout(function() { xmlHttp.abort(); // Prevent Internet Explorer 9, JScript Error c00c023f if (xmlHttp.aborted === true) { stopLoadAnimation(); return; } opts.callback(true, false, null, { 'message': 'Timeout - Could not connect to endpoint ' + url } ); clearTimeout(ontimeout); ontimeout = null; }, this.requestTimeoutInMs * (opts.successiveRetryCount + 1)); xmlHttp.onload = function(event) { clearTimeout(ontimeout); ontimeout = null; if (!self._isUndefined(event) && event.target !== null) { var retry = (event.target.status === 0 || event.target.status === 503); var success = false; var response = null; if (typeof XDomainRequest !== 'undefined') { // Handle CORS requests IE8/IE9 response = event.target.responseText; success = (response && response.length > 0); } else { response = event.target.response; success = (event.target.status === 200 || event.target.status === 201); } opts.callback(retry, success, event.target, response ? JSON.parse(response) : null); } else { opts.callback(false, true, event, JSON.parse(xmlHttp.responseText)); } }; xmlHttp.ontimeout = function(event) { // stop the network call but rely on ontimeout to call opt.callback }; xmlHttp.onerror = function(event) { clearTimeout(ontimeout); ontimeout = null; opts.callback(true, false, null, { 'message': 'Could not connect to host', 'error': event } ); }; xmlHttp.send(body); }, /* * Transform search param object in query string */ _getSearchParams: function(args, params) { if (this._isUndefined(args) || args === null) { return params; } for (var key in args) { if (key !== null && args.hasOwnProperty(key)) { params += (params.length === 0) ? '?' : '&'; params += key + '=' + encodeURIComponent(Object.prototype.toString.call(args[key]) === '[object Array]' ? JSON.stringify(args[key]) : args[key]); } } return params; }, _isUndefined: function(obj) { return obj === void 0; }, /// internal attributes applicationID: null, apiKey: null, tagFilters: null, userToken: null, hosts: [], cache: {}, extraHeaders: [] }; /* * Contains all the functions related to one index * You should use AlgoliaSearch.initIndex(indexName) to retrieve this object */ AlgoliaSearch.prototype.Index.prototype = { /* * Clear all queries in cache */ clearCache: function() { this.cache = {}; }, /* * Add an object in this index * * @param content contains the javascript object to add inside the index * @param callback (optional) the result callback with two arguments: * success: boolean set to true if the request was successfull * content: the server answer that contains 3 elements: createAt, taskId and objectID * @param objectID (optional) an objectID you want to attribute to this object * (if the attribute already exist the old object will be overwrite) */ addObject: function(content, callback, objectID) { var indexObj = this; if (this.as._isUndefined(objectID)) { this.as._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName), body: content, callback: callback }); } else { this.as._jsonRequest({ method: 'PUT', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/' + encodeURIComponent(objectID), body: content, callback: callback }); } }, /* * Add several objects * * @param objects contains an array of objects to add * @param callback (optional) the result callback with two arguments: * success: boolean set to true if the request was successfull * content: the server answer that updateAt and taskID */ addObjects: function(objects, callback) { var indexObj = this; var postObj = {requests:[]}; for (var i = 0; i < objects.length; ++i) { var request = { action: 'addObject', body: objects[i] }; postObj.requests.push(request); } this.as._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/batch', body: postObj, callback: callback }); }, /* * Get an object from this index * * @param objectID the unique identifier of the object to retrieve * @param callback (optional) the result callback with two arguments * success: boolean set to true if the request was successfull * content: the object to retrieve or the error message if a failure occured * @param attributes (optional) if set, contains the array of attribute names to retrieve */ getObject: function(objectID, callback, attributes) { var indexObj = this; var params = ''; if (!this.as._isUndefined(attributes)) { params = '?attributes='; for (var i = 0; i < attributes.length; ++i) { if (i !== 0) { params += ','; } params += attributes[i]; } } if (this.as.jsonp === null) { this.as._jsonRequest({ method: 'GET', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/' + encodeURIComponent(objectID) + params, callback: callback }); } else { var pObj = {params: params}; this.as._jsonRequest({ method: 'GET', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/' + encodeURIComponent(objectID), callback: callback, body: pObj}); } }, /* * Update partially an object (only update attributes passed in argument) * * @param partialObject contains the javascript attributes to override, the * object must contains an objectID attribute * @param callback (optional) the result callback with two arguments: * success: boolean set to true if the request was successfull * content: the server answer that contains 3 elements: createAt, taskId and objectID */ partialUpdateObject: function(partialObject, callback) { var indexObj = this; this.as._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/' + encodeURIComponent(partialObject.objectID) + '/partial', body: partialObject, callback: callback }); }, /* * Partially Override the content of several objects * * @param objects contains an array of objects to update (each object must contains a objectID attribute) * @param callback (optional) the result callback with two arguments: * success: boolean set to true if the request was successfull * content: the server answer that updateAt and taskID */ partialUpdateObjects: function(objects, callback) { var indexObj = this; var postObj = {requests:[]}; for (var i = 0; i < objects.length; ++i) { var request = { action: 'partialUpdateObject', objectID: objects[i].objectID, body: objects[i] }; postObj.requests.push(request); } this.as._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/batch', body: postObj, callback: callback }); }, /* * Override the content of object * * @param object contains the javascript object to save, the object must contains an objectID attribute * @param callback (optional) the result callback with two arguments: * success: boolean set to true if the request was successfull * content: the server answer that updateAt and taskID */ saveObject: function(object, callback) { var indexObj = this; this.as._jsonRequest({ method: 'PUT', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/' + encodeURIComponent(object.objectID), body: object, callback: callback }); }, /* * Override the content of several objects * * @param objects contains an array of objects to update (each object must contains a objectID attribute) * @param callback (optional) the result callback with two arguments: * success: boolean set to true if the request was successfull * content: the server answer that updateAt and taskID */ saveObjects: function(objects, callback) { var indexObj = this; var postObj = {requests:[]}; for (var i = 0; i < objects.length; ++i) { var request = { action: 'updateObject', objectID: objects[i].objectID, body: objects[i] }; postObj.requests.push(request); } this.as._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/batch', body: postObj, callback: callback });<|fim▁hole|> * * @param objectID the unique identifier of object to delete * @param callback (optional) the result callback with two arguments: * success: boolean set to true if the request was successfull * content: the server answer that contains 3 elements: createAt, taskId and objectID */ deleteObject: function(objectID, callback) { if (objectID === null || objectID.length === 0) { callback(false, { message: 'empty objectID'}); return; } var indexObj = this; this.as._jsonRequest({ method: 'DELETE', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/' + encodeURIComponent(objectID), callback: callback }); }, /* * Search inside the index using XMLHttpRequest request (Using a POST query to * minimize number of OPTIONS queries: Cross-Origin Resource Sharing). * * @param query the full text query * @param callback the result callback with two arguments: * success: boolean set to true if the request was successfull. If false, the content contains the error. * content: the server answer that contains the list of results. * @param args (optional) if set, contains an object with query parameters: * - page: (integer) Pagination parameter used to select the page to retrieve. * Page is zero-based and defaults to 0. Thus, to retrieve the 10th page you need to set page=9 * - hitsPerPage: (integer) Pagination parameter used to select the number of hits per page. Defaults to 20. * - attributesToRetrieve: a string that contains the list of object attributes you want to retrieve (let you minimize the answer size). * Attributes are separated with a comma (for example "name,address"). * You can also use a string array encoding (for example ["name","address"]). * By default, all attributes are retrieved. You can also use '*' to retrieve all values when an attributesToRetrieve setting is specified for your index. * - attributesToHighlight: a string that contains the list of attributes you want to highlight according to the query. * Attributes are separated by a comma. You can also use a string array encoding (for example ["name","address"]). * If an attribute has no match for the query, the raw value is returned. By default all indexed text attributes are highlighted. * You can use `*` if you want to highlight all textual attributes. Numerical attributes are not highlighted. * A matchLevel is returned for each highlighted attribute and can contain: * - full: if all the query terms were found in the attribute, * - partial: if only some of the query terms were found, * - none: if none of the query terms were found. * - attributesToSnippet: a string that contains the list of attributes to snippet alongside the number of words to return (syntax is `attributeName:nbWords`). * Attributes are separated by a comma (Example: attributesToSnippet=name:10,content:10). * You can also use a string array encoding (Example: attributesToSnippet: ["name:10","content:10"]). By default no snippet is computed. * - minWordSizefor1Typo: the minimum number of characters in a query word to accept one typo in this word. Defaults to 3. * - minWordSizefor2Typos: the minimum number of characters in a query word to accept two typos in this word. Defaults to 7. * - getRankingInfo: if set to 1, the result hits will contain ranking information in _rankingInfo attribute. * - aroundLatLng: search for entries around a given latitude/longitude (specified as two floats separated by a comma). * For example aroundLatLng=47.316669,5.016670). * You can specify the maximum distance in meters with the aroundRadius parameter (in meters) and the precision for ranking with aroundPrecision * (for example if you set aroundPrecision=100, two objects that are distant of less than 100m will be considered as identical for "geo" ranking parameter). * At indexing, you should specify geoloc of an object with the _geoloc attribute (in the form {"_geoloc":{"lat":48.853409, "lng":2.348800}}) * - insideBoundingBox: search entries inside a given area defined by the two extreme points of a rectangle (defined by 4 floats: p1Lat,p1Lng,p2Lat,p2Lng). * For example insideBoundingBox=47.3165,4.9665,47.3424,5.0201). * At indexing, you should specify geoloc of an object with the _geoloc attribute (in the form {"_geoloc":{"lat":48.853409, "lng":2.348800}}) * - numericFilters: a string that contains the list of numeric filters you want to apply separated by a comma. * The syntax of one filter is `attributeName` followed by `operand` followed by `value`. Supported operands are `<`, `<=`, `=`, `>` and `>=`. * You can have multiple conditions on one attribute like for example numericFilters=price>100,price<1000. * You can also use a string array encoding (for example numericFilters: ["price>100","price<1000"]). * - tagFilters: filter the query by a set of tags. You can AND tags by separating them by commas. * To OR tags, you must add parentheses. For example, tags=tag1,(tag2,tag3) means tag1 AND (tag2 OR tag3). * You can also use a string array encoding, for example tagFilters: ["tag1",["tag2","tag3"]] means tag1 AND (tag2 OR tag3). * At indexing, tags should be added in the _tags** attribute of objects (for example {"_tags":["tag1","tag2"]}). * - facetFilters: filter the query by a list of facets. * Facets are separated by commas and each facet is encoded as `attributeName:value`. * For example: `facetFilters=category:Book,author:John%20Doe`. * You can also use a string array encoding (for example `["category:Book","author:John%20Doe"]`). * - facets: List of object attributes that you want to use for faceting. * Attributes are separated with a comma (for example `"category,author"` ). * You can also use a JSON string array encoding (for example ["category","author"]). * Only attributes that have been added in **attributesForFaceting** index setting can be used in this parameter. * You can also use `*` to perform faceting on all attributes specified in **attributesForFaceting**. * - queryType: select how the query words are interpreted, it can be one of the following value: * - prefixAll: all query words are interpreted as prefixes, * - prefixLast: only the last word is interpreted as a prefix (default behavior), * - prefixNone: no query word is interpreted as a prefix. This option is not recommended. * - optionalWords: a string that contains the list of words that should be considered as optional when found in the query. * The list of words is comma separated. * - distinct: If set to 1, enable the distinct feature (disabled by default) if the attributeForDistinct index setting is set. * This feature is similar to the SQL "distinct" keyword: when enabled in a query with the distinct=1 parameter, * all hits containing a duplicate value for the attributeForDistinct attribute are removed from results. * For example, if the chosen attribute is show_name and several hits have the same value for show_name, then only the best * one is kept and others are removed. * @param delay (optional) if set, wait for this delay (in ms) and only send the query if there was no other in the meantime. */ search: function(query, callback, args, delay) { var indexObj = this; var params = 'query=' + encodeURIComponent(query); if (!this.as._isUndefined(args) && args !== null) { params = this.as._getSearchParams(args, params); } window.clearTimeout(indexObj.onDelayTrigger); if (!this.as._isUndefined(delay) && delay !== null && delay > 0) { var onDelayTrigger = window.setTimeout( function() { indexObj._search(params, callback); }, delay); indexObj.onDelayTrigger = onDelayTrigger; } else { this._search(params, callback); } }, /* * Browse all index content * * @param page Pagination parameter used to select the page to retrieve. * Page is zero-based and defaults to 0. Thus, to retrieve the 10th page you need to set page=9 * @param hitsPerPage: Pagination parameter used to select the number of hits per page. Defaults to 1000. */ browse: function(page, callback, hitsPerPage) { var indexObj = this; var params = '?page=' + page; if (!this.as._isUndefined(hitsPerPage)) { params += '&hitsPerPage=' + hitsPerPage; } this.as._jsonRequest({ method: 'GET', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/browse' + params, callback: callback }); }, /* * Get a Typeahead.js adapter * @param searchParams contains an object with query parameters (see search for details) */ ttAdapter: function(params) { var self = this; return function(query, cb) { self.search(query, function(success, content) { if (success) { cb(content.hits); } }, params); }; }, /* * Wait the publication of a task on the server. * All server task are asynchronous and you can check with this method that the task is published. * * @param taskID the id of the task returned by server * @param callback the result callback with with two arguments: * success: boolean set to true if the request was successfull * content: the server answer that contains the list of results */ waitTask: function(taskID, callback) { var indexObj = this; this.as._jsonRequest({ method: 'GET', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/task/' + taskID, callback: function(success, body) { if (success) { if (body.status === 'published') { callback(true, body); } else { setTimeout(function() { indexObj.waitTask(taskID, callback); }, 100); } } else { callback(false, body); } }}); }, /* * This function deletes the index content. Settings and index specific API keys are kept untouched. * * @param callback (optional) the result callback with two arguments * success: boolean set to true if the request was successfull * content: the settings object or the error message if a failure occured */ clearIndex: function(callback) { var indexObj = this; this.as._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/clear', callback: callback }); }, /* * Get settings of this index * * @param callback (optional) the result callback with two arguments * success: boolean set to true if the request was successfull * content: the settings object or the error message if a failure occured */ getSettings: function(callback) { var indexObj = this; this.as._jsonRequest({ method: 'GET', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/settings', callback: callback }); }, /* * Set settings for this index * * @param settigns the settings object that can contains : * - minWordSizefor1Typo: (integer) the minimum number of characters to accept one typo (default = 3). * - minWordSizefor2Typos: (integer) the minimum number of characters to accept two typos (default = 7). * - hitsPerPage: (integer) the number of hits per page (default = 10). * - attributesToRetrieve: (array of strings) default list of attributes to retrieve in objects. * If set to null, all attributes are retrieved. * - attributesToHighlight: (array of strings) default list of attributes to highlight. * If set to null, all indexed attributes are highlighted. * - attributesToSnippet**: (array of strings) default list of attributes to snippet alongside the number of words to return (syntax is attributeName:nbWords). * By default no snippet is computed. If set to null, no snippet is computed. * - attributesToIndex: (array of strings) the list of fields you want to index. * If set to null, all textual and numerical attributes of your objects are indexed, but you should update it to get optimal results. * This parameter has two important uses: * - Limit the attributes to index: For example if you store a binary image in base64, you want to store it and be able to * retrieve it but you don't want to search in the base64 string. * - Control part of the ranking*: (see the ranking parameter for full explanation) Matches in attributes at the beginning of * the list will be considered more important than matches in attributes further down the list. * In one attribute, matching text at the beginning of the attribute will be considered more important than text after, you can disable * this behavior if you add your attribute inside `unordered(AttributeName)`, for example attributesToIndex: ["title", "unordered(text)"]. * - attributesForFaceting: (array of strings) The list of fields you want to use for faceting. * All strings in the attribute selected for faceting are extracted and added as a facet. If set to null, no attribute is used for faceting. * - attributeForDistinct: (string) The attribute name used for the Distinct feature. This feature is similar to the SQL "distinct" keyword: when enabled * in query with the distinct=1 parameter, all hits containing a duplicate value for this attribute are removed from results. * For example, if the chosen attribute is show_name and several hits have the same value for show_name, then only the best one is kept and others are removed. * - ranking: (array of strings) controls the way results are sorted. * We have six available criteria: * - typo: sort according to number of typos, * - geo: sort according to decreassing distance when performing a geo-location based search, * - proximity: sort according to the proximity of query words in hits, * - attribute: sort according to the order of attributes defined by attributesToIndex, * - exact: * - if the user query contains one word: sort objects having an attribute that is exactly the query word before others. * For example if you search for the "V" TV show, you want to find it with the "V" query and avoid to have all popular TV * show starting by the v letter before it. * - if the user query contains multiple words: sort according to the number of words that matched exactly (and not as a prefix). * - custom: sort according to a user defined formula set in **customRanking** attribute. * The standard order is ["typo", "geo", "proximity", "attribute", "exact", "custom"] * - customRanking: (array of strings) lets you specify part of the ranking. * The syntax of this condition is an array of strings containing attributes prefixed by asc (ascending order) or desc (descending order) operator. * For example `"customRanking" => ["desc(population)", "asc(name)"]` * - queryType: Select how the query words are interpreted, it can be one of the following value: * - prefixAll: all query words are interpreted as prefixes, * - prefixLast: only the last word is interpreted as a prefix (default behavior), * - prefixNone: no query word is interpreted as a prefix. This option is not recommended. * - highlightPreTag: (string) Specify the string that is inserted before the highlighted parts in the query result (default to "<em>"). * - highlightPostTag: (string) Specify the string that is inserted after the highlighted parts in the query result (default to "</em>"). * - optionalWords: (array of strings) Specify a list of words that should be considered as optional when found in the query. * @param callback (optional) the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer or the error message if a failure occured */ setSettings: function(settings, callback) { var indexObj = this; this.as._jsonRequest({ method: 'PUT', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/settings', body: settings, callback: callback }); }, /* * List all existing user keys associated to this index * * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ listUserKeys: function(callback) { var indexObj = this; this.as._jsonRequest({ method: 'GET', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/keys', callback: callback }); }, /* * Get ACL of a user key associated to this index * * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ getUserKeyACL: function(key, callback) { var indexObj = this; this.as._jsonRequest({ method: 'GET', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/keys/' + key, callback: callback }); }, /* * Delete an existing user key associated to this index * * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ deleteUserKey: function(key, callback) { var indexObj = this; this.as._jsonRequest({ method: 'DELETE', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/keys/' + key, callback: callback }); }, /* * Add an existing user key associated to this index * * @param acls the list of ACL for this key. Defined by an array of strings that * can contains the following values: * - search: allow to search (https and http) * - addObject: allows to add/update an object in the index (https only) * - deleteObject : allows to delete an existing object (https only) * - deleteIndex : allows to delete index content (https only) * - settings : allows to get index settings (https only) * - editSettings : allows to change index settings (https only) * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ addUserKey: function(acls, callback) { var indexObj = this; var aclsObject = {}; aclsObject.acl = acls; this.as._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/keys', body: aclsObject, callback: callback }); }, /* * Add an existing user key associated to this index * * @param acls the list of ACL for this key. Defined by an array of strings that * can contains the following values: * - search: allow to search (https and http) * - addObject: allows to add/update an object in the index (https only) * - deleteObject : allows to delete an existing object (https only) * - deleteIndex : allows to delete index content (https only) * - settings : allows to get index settings (https only) * - editSettings : allows to change index settings (https only) * @param validity the number of seconds after which the key will be automatically removed (0 means no time limit for this key) * @param maxQueriesPerIPPerHour Specify the maximum number of API calls allowed from an IP address per hour. * @param maxHitsPerQuery Specify the maximum number of hits this API key can retrieve in one call. * @param callback the result callback with two arguments * success: boolean set to true if the request was successfull * content: the server answer with user keys list or error description if success is false. */ addUserKeyWithValidity: function(acls, validity, maxQueriesPerIPPerHour, maxHitsPerQuery, callback) { var indexObj = this; var aclsObject = {}; aclsObject.acl = acls; aclsObject.validity = validity; aclsObject.maxQueriesPerIPPerHour = maxQueriesPerIPPerHour; aclsObject.maxHitsPerQuery = maxHitsPerQuery; this.as._jsonRequest({ method: 'POST', url: '/1/indexes/' + encodeURIComponent(indexObj.indexName) + '/keys', body: aclsObject, callback: callback }); }, /// /// Internal methods only after this line /// _search: function(params, callback) { var pObj = {params: params}; if (this.as.jsonp === null) { var self = this; this.as._jsonRequest({ cache: this.cache, method: 'POST', url: '/1/indexes/' + encodeURIComponent(this.indexName) + '/query', body: pObj, callback: function(success, content) { if (!success) { // retry first with JSONP self.as.jsonp = true; self._search(params, callback); } else { self.as.jsonp = false; callback && callback(success, content); } } }); } else if (this.as.jsonp) { this.as._jsonRequest({ cache: this.cache, method: 'GET', url: '/1/indexes/' + encodeURIComponent(this.indexName), body: pObj, callback: callback }); } else { this.as._jsonRequest({ cache: this.cache, method: 'POST', url: '/1/indexes/' + encodeURIComponent(this.indexName) + '/query', body: pObj, callback: callback}); } }, // internal attributes as: null, indexName: null, cache: {}, typeAheadArgs: null, typeAheadValueOption: null, emptyConstructor: function() {} }; /* * Copyright (c) 2014 Algolia * http://www.algolia.com/ * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ (function($) { var extend = function(out) { out = out || {}; for (var i = 1; i < arguments.length; i++) { if (!arguments[i]) { continue; } for (var key in arguments[i]) { if (arguments[i].hasOwnProperty(key)) { out[key] = arguments[i][key]; } } } return out; }; /** * Algolia Search Helper providing faceting and disjunctive faceting * @param {AlgoliaSearch} client an AlgoliaSearch client * @param {string} index the index name to query * @param {hash} options an associative array defining the hitsPerPage, list of facets and list of disjunctive facets */ window.AlgoliaSearchHelper = function(client, index, options) { /// Default options var defaults = { facets: [], // list of facets to compute disjunctiveFacets: [], // list of disjunctive facets to compute hitsPerPage: 20 // number of hits per page }; this.init(client, index, extend({}, defaults, options)); }; AlgoliaSearchHelper.prototype = { /** * Initialize a new AlgoliaSearchHelper * @param {AlgoliaSearch} client an AlgoliaSearch client * @param {string} index the index name to query * @param {hash} options an associative array defining the hitsPerPage, list of facets and list of disjunctive facets * @return {AlgoliaSearchHelper} */ init: function(client, index, options) { this.client = client; this.index = index; this.options = options; this.page = 0; this.refinements = {}; this.disjunctiveRefinements = {}; this.extraQueries = []; }, /** * Perform a query * @param {string} q the user query * @param {function} searchCallback the result callback called with two arguments: * success: boolean set to true if the request was successfull * content: the query answer with an extra 'disjunctiveFacets' attribute */ search: function(q, searchCallback, searchParams) { this.q = q; this.searchCallback = searchCallback; this.searchParams = searchParams || {}; this.page = this.page || 0; this.refinements = this.refinements || {}; this.disjunctiveRefinements = this.disjunctiveRefinements || {}; this._search(); }, /** * Remove all refinements (disjunctive + conjunctive) */ clearRefinements: function() { this.disjunctiveRefinements = {}; this.refinements = {}; }, /** * Ensure a facet refinement exists * @param {string} facet the facet to refine * @param {string} value the associated value */ addDisjunctiveRefine: function(facet, value) { this.disjunctiveRefinements = this.disjunctiveRefinements || {}; this.disjunctiveRefinements[facet] = this.disjunctiveRefinements[facet] || {}; this.disjunctiveRefinements[facet][value] = true; }, /** * Ensure a facet refinement does not exist * @param {string} facet the facet to refine * @param {string} value the associated value */ removeDisjunctiveRefine: function(facet, value) { this.disjunctiveRefinements = this.disjunctiveRefinements || {}; this.disjunctiveRefinements[facet] = this.disjunctiveRefinements[facet] || {}; try { delete this.disjunctiveRefinements[facet][value]; } catch (e) { this.disjunctiveRefinements[facet][value] = undefined; // IE compat } }, /** * Ensure a facet refinement exists * @param {string} facet the facet to refine * @param {string} value the associated value */ addRefine: function(facet, value) { var refinement = facet + ':' + value; this.refinements = this.refinements || {}; this.refinements[refinement] = true; }, /** * Ensure a facet refinement does not exist * @param {string} facet the facet to refine * @param {string} value the associated value */ removeRefine: function(facet, value) { var refinement = facet + ':' + value; this.refinements = this.refinements || {}; this.refinements[refinement] = false; }, /** * Toggle refinement state of a facet * @param {string} facet the facet to refine * @param {string} value the associated value * @return {boolean} true if the facet has been found */ toggleRefine: function(facet, value) { for (var i = 0; i < this.options.facets.length; ++i) { if (this.options.facets[i] == facet) { var refinement = facet + ':' + value; this.refinements[refinement] = !this.refinements[refinement]; this.page = 0; this._search(); return true; } } this.disjunctiveRefinements[facet] = this.disjunctiveRefinements[facet] || {}; for (var j = 0; j < this.options.disjunctiveFacets.length; ++j) { if (this.options.disjunctiveFacets[j] == facet) { this.disjunctiveRefinements[facet][value] = !this.disjunctiveRefinements[facet][value]; this.page = 0; this._search(); return true; } } return false; }, /** * Check the refinement state of a facet * @param {string} facet the facet * @param {string} value the associated value * @return {boolean} true if refined */ isRefined: function(facet, value) { var refinement = facet + ':' + value; if (this.refinements[refinement]) { return true; } if (this.disjunctiveRefinements[facet] && this.disjunctiveRefinements[facet][value]) { return true; } return false; }, /** * Go to next page */ nextPage: function() { this._gotoPage(this.page + 1); }, /** * Go to previous page */ previousPage: function() { if (this.page > 0) { this._gotoPage(this.page - 1); } }, /** * Goto a page * @param {integer} page The page number */ gotoPage: function(page) { this._gotoPage(page); }, /** * Configure the page but do not trigger a reload * @param {integer} page The page number */ setPage: function(page) { this.page = page; }, /** * Configure the underlying index name * @param {string} name the index name */ setIndex: function(name) { this.index = name; }, /** * Get the underlying configured index name */ getIndex: function() { return this.index; }, /** * Clear the extra queries added to the underlying batch of queries */ clearExtraQueries: function() { this.extraQueries = []; }, /** * Add an extra query to the underlying batch of queries. Once you add queries * to the batch, the 2nd parameter of the searchCallback will be an object with a `results` * attribute listing all search results. */ addExtraQuery: function(index, query, params) { this.extraQueries.push({ index: index, query: query, params: (params || {}) }); }, ///////////// PRIVATE /** * Goto a page * @param {integer} page The page number */ _gotoPage: function(page) { this.page = page; this._search(); }, /** * Perform the underlying queries */ _search: function() { this.client.startQueriesBatch(); this.client.addQueryInBatch(this.index, this.q, this._getHitsSearchParams()); var disjunctiveFacets = []; var unusedDisjunctiveFacets = {}; for (var i = 0; i < this.options.disjunctiveFacets.length; ++i) { var facet = this.options.disjunctiveFacets[i]; if (this._hasDisjunctiveRefinements(facet)) { disjunctiveFacets.push(facet); } else { unusedDisjunctiveFacets[facet] = true; } } for (var i = 0; i < disjunctiveFacets.length; ++i) { this.client.addQueryInBatch(this.index, this.q, this._getDisjunctiveFacetSearchParams(disjunctiveFacets[i])); } for (var i = 0; i < this.extraQueries.length; ++i) { this.client.addQueryInBatch(this.extraQueries[i].index, this.extraQueries[i].query, this.extraQueries[i].params); } var self = this; this.client.sendQueriesBatch(function(success, content) { if (!success) { self.searchCallback(false, content); return; } var aggregatedAnswer = content.results[0]; aggregatedAnswer.disjunctiveFacets = aggregatedAnswer.disjunctiveFacets || {}; aggregatedAnswer.facetStats = aggregatedAnswer.facetStats || {}; for (var facet in unusedDisjunctiveFacets) { if (aggregatedAnswer.facets[facet] && !aggregatedAnswer.disjunctiveFacets[facet]) { aggregatedAnswer.disjunctiveFacets[facet] = aggregatedAnswer.facets[facet]; try { delete aggregatedAnswer.facets[facet]; } catch (e) { aggregatedAnswer.facets[facet] = undefined; // IE compat } } } for (var i = 0; i < disjunctiveFacets.length; ++i) { for (var facet in content.results[i + 1].facets) { aggregatedAnswer.disjunctiveFacets[facet] = content.results[i + 1].facets[facet]; if (self.disjunctiveRefinements[facet]) { for (var value in self.disjunctiveRefinements[facet]) { if (!aggregatedAnswer.disjunctiveFacets[facet][value] && self.disjunctiveRefinements[facet][value]) { aggregatedAnswer.disjunctiveFacets[facet][value] = 0; } } } } for (var stats in content.results[i + 1].facets_stats) { aggregatedAnswer.facetStats[stats] = content.results[i + 1].facets_stats[stats]; } } if (self.extraQueries.length === 0) { self.searchCallback(true, aggregatedAnswer); } else { var c = { results: [ aggregatedAnswer ] }; for (var i = 0; i < self.extraQueries.length; ++i) { c.results.push(content.results[1 + disjunctiveFacets.length + i]); } self.searchCallback(true, c); } }); }, /** * Build search parameters used to fetch hits * @return {hash} */ _getHitsSearchParams: function() { var facets = []; for (var i = 0; i < this.options.facets.length; ++i) { facets.push(this.options.facets[i]); } for (var i = 0; i < this.options.disjunctiveFacets.length; ++i) { var facet = this.options.disjunctiveFacets[i]; if (!this._hasDisjunctiveRefinements(facet)) { facets.push(facet); } } return extend({}, { hitsPerPage: this.options.hitsPerPage, page: this.page, facets: facets, facetFilters: this._getFacetFilters() }, this.searchParams); }, /** * Build search parameters used to fetch a disjunctive facet * @param {string} facet the associated facet name * @return {hash} */ _getDisjunctiveFacetSearchParams: function(facet) { return extend({}, this.searchParams, { hitsPerPage: 1, page: 0, attributesToRetrieve: [], attributesToHighlight: [], attributesToSnippet: [], facets: facet, facetFilters: this._getFacetFilters(facet) }); }, /** * Test if there are some disjunctive refinements on the facet */ _hasDisjunctiveRefinements: function(facet) { for (var value in this.disjunctiveRefinements[facet]) { if (this.disjunctiveRefinements[facet][value]) { return true; } } return false; }, /** * Build facetFilters parameter based on current refinements * @param {string} facet if set, the current disjunctive facet * @return {hash} */ _getFacetFilters: function(facet) { var facetFilters = []; for (var refinement in this.refinements) { if (this.refinements[refinement]) { facetFilters.push(refinement); } } for (var disjunctiveRefinement in this.disjunctiveRefinements) { if (disjunctiveRefinement != facet) { var refinements = []; for (var value in this.disjunctiveRefinements[disjunctiveRefinement]) { if (this.disjunctiveRefinements[disjunctiveRefinement][value]) { refinements.push(disjunctiveRefinement + ':' + value); } } if (refinements.length > 0) { facetFilters.push(refinements); } } } return facetFilters; } }; })(); /* * Copyright (c) 2014 Algolia * http://www.algolia.com/ * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ (function($) { /** * Algolia Places API * @param {string} Your application ID * @param {string} Your API Key */ window.AlgoliaPlaces = function(applicationID, apiKey) { this.init(applicationID, apiKey); }; AlgoliaPlaces.prototype = { /** * @param {string} Your application ID * @param {string} Your API Key */ init: function(applicationID, apiKey) { this.client = new AlgoliaSearch(applicationID, apiKey, 'http', true, ['places-1.algolia.io', 'places-2.algolia.io', 'places-3.algolia.io']); this.cache = {}; }, /** * Perform a query * @param {string} q the user query * @param {function} searchCallback the result callback called with two arguments: * success: boolean set to true if the request was successfull * content: the query answer with an extra 'disjunctiveFacets' attribute * @param {hash} the list of search parameters */ search: function(q, searchCallback, searchParams) { var indexObj = this; var params = 'query=' + encodeURIComponent(q); if (!this.client._isUndefined(searchParams) && searchParams != null) { params = this.client._getSearchParams(searchParams, params); } var pObj = {params: params, apiKey: this.client.apiKey, appID: this.client.applicationID}; this.client._jsonRequest({ cache: this.cache, method: 'POST', url: '/1/places/query', body: pObj, callback: searchCallback, removeCustomHTTPHeaders: true }); } }; })();<|fim▁end|>
}, /* * Delete an object from the index
<|file_name|>auth.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 OpenStack, LLC. # # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log from manila.api.middleware import auth from manila.i18n import _LW LOG = log.getLogger(__name__) class ManilaKeystoneContext(auth.ManilaKeystoneContext): def __init__(self, application): LOG.warn(_LW('manila.api.auth:ManilaKeystoneContext is deprecated. ' 'Please use ' 'manila.api.middleware.auth:ManilaKeystoneContext ' 'instead.')) super(ManilaKeystoneContext, self).__init__(application) def pipeline_factory(loader, global_conf, **local_conf):<|fim▁hole|> auth.pipeline_factory(loader, global_conf, **local_conf)<|fim▁end|>
LOG.warn(_LW('manila.api.auth:pipeline_factory is deprecated. Please use ' 'manila.api.middleware.auth:pipeline_factory instead.'))
<|file_name|>transformer.ts<|end_file_name|><|fim▁begin|>/* @internal */ namespace ts { function getModuleTransformer(moduleKind: ModuleKind): TransformerFactory<SourceFile | Bundle> { switch (moduleKind) { case ModuleKind.ESNext: case ModuleKind.ES2015: return transformES2015Module; case ModuleKind.System: return transformSystemModule; default: return transformModule; } } const enum TransformationState { Uninitialized, Initialized, Completed, Disposed } const enum SyntaxKindFeatureFlags { Substitution = 1 << 0, EmitNotifications = 1 << 1, } export function getTransformers(compilerOptions: CompilerOptions, customTransformers?: CustomTransformers) { const jsx = compilerOptions.jsx; const languageVersion = getEmitScriptTarget(compilerOptions); const moduleKind = getEmitModuleKind(compilerOptions); const transformers: TransformerFactory<SourceFile | Bundle>[] = []; addRange(transformers, customTransformers && customTransformers.before); transformers.push(transformTypeScript); if (jsx === JsxEmit.React) { transformers.push(transformJsx); } if (languageVersion < ScriptTarget.ESNext) { transformers.push(transformESNext); } if (languageVersion < ScriptTarget.ES2019) { transformers.push(transformES2019); } if (languageVersion < ScriptTarget.ES2018) { transformers.push(transformES2018); } if (languageVersion < ScriptTarget.ES2017) { transformers.push(transformES2017); } if (languageVersion < ScriptTarget.ES2016) { transformers.push(transformES2016); } if (languageVersion < ScriptTarget.ES2015) { transformers.push(transformES2015); transformers.push(transformGenerators); } transformers.push(getModuleTransformer(moduleKind)); // The ES5 transformer is last so that it can substitute expressions like `exports.default` // for ES3. if (languageVersion < ScriptTarget.ES5) { transformers.push(transformES5); } addRange(transformers, customTransformers && customTransformers.after); return transformers; } export function noEmitSubstitution(_hint: EmitHint, node: Node) { return node; } export function noEmitNotification(hint: EmitHint, node: Node, callback: (hint: EmitHint, node: Node) => void) { callback(hint, node); } /** * Transforms an array of SourceFiles by passing them through each transformer. * * @param resolver The emit resolver provided by the checker. * @param host The emit host object used to interact with the file system. * @param options Compiler options to surface in the `TransformationContext`. * @param nodes An array of nodes to transform. * @param transforms An array of `TransformerFactory` callbacks. * @param allowDtsFiles A value indicating whether to allow the transformation of .d.ts files. */ export function transformNodes<T extends Node>(resolver: EmitResolver | undefined, host: EmitHost | undefined, options: CompilerOptions, nodes: ReadonlyArray<T>, transformers: ReadonlyArray<TransformerFactory<T>>, allowDtsFiles: boolean): TransformationResult<T> { const enabledSyntaxKindFeatures = new Array<SyntaxKindFeatureFlags>(SyntaxKind.Count); let lexicalEnvironmentVariableDeclarations: VariableDeclaration[]; let lexicalEnvironmentFunctionDeclarations: FunctionDeclaration[]; let lexicalEnvironmentVariableDeclarationsStack: VariableDeclaration[][] = []; let lexicalEnvironmentFunctionDeclarationsStack: FunctionDeclaration[][] = []; let lexicalEnvironmentStackOffset = 0; let lexicalEnvironmentSuspended = false; let emitHelpers: EmitHelper[] | undefined; let onSubstituteNode: TransformationContext["onSubstituteNode"] = noEmitSubstitution; let onEmitNode: TransformationContext["onEmitNode"] = noEmitNotification; let state = TransformationState.Uninitialized; const diagnostics: DiagnosticWithLocation[] = []; // The transformation context is provided to each transformer as part of transformer // initialization. const context: TransformationContext = { getCompilerOptions: () => options, getEmitResolver: () => resolver!, // TODO: GH#18217 getEmitHost: () => host!, // TODO: GH#18217 startLexicalEnvironment, suspendLexicalEnvironment, resumeLexicalEnvironment, endLexicalEnvironment, hoistVariableDeclaration, hoistFunctionDeclaration, requestEmitHelper, readEmitHelpers, enableSubstitution, enableEmitNotification, isSubstitutionEnabled, isEmitNotificationEnabled, get onSubstituteNode() { return onSubstituteNode; }, set onSubstituteNode(value) { Debug.assert(state < TransformationState.Initialized, "Cannot modify transformation hooks after initialization has completed."); Debug.assert(value !== undefined, "Value must not be 'undefined'"); onSubstituteNode = value; }, get onEmitNode() { return onEmitNode; }, set onEmitNode(value) { Debug.assert(state < TransformationState.Initialized, "Cannot modify transformation hooks after initialization has completed."); Debug.assert(value !== undefined, "Value must not be 'undefined'"); onEmitNode = value; }, addDiagnostic(diag) { diagnostics.push(diag); } }; // Ensure the parse tree is clean before applying transformations for (const node of nodes) { disposeEmitNodes(getSourceFileOfNode(getParseTreeNode(node))); } performance.mark("beforeTransform"); // Chain together and initialize each transformer. const transformation = chain(...transformers)(context); // prevent modification of transformation hooks. state = TransformationState.Initialized; // Transform each node. const transformed = map(nodes, allowDtsFiles ? transformation : transformRoot); // prevent modification of the lexical environment. state = TransformationState.Completed; performance.mark("afterTransform"); performance.measure("transformTime", "beforeTransform", "afterTransform"); return { transformed, substituteNode, emitNodeWithNotification, dispose, diagnostics }; function transformRoot(node: T) { return node && (!isSourceFile(node) || !node.isDeclarationFile) ? transformation(node) : node; } /** * Enables expression substitutions in the pretty printer for the provided SyntaxKind. */ function enableSubstitution(kind: SyntaxKind) { Debug.assert(state < TransformationState.Completed, "Cannot modify the transformation context after transformation has completed."); enabledSyntaxKindFeatures[kind] |= SyntaxKindFeatureFlags.Substitution; } /** * Determines whether expression substitutions are enabled for the provided node. */ function isSubstitutionEnabled(node: Node) { return (enabledSyntaxKindFeatures[node.kind] & SyntaxKindFeatureFlags.Substitution) !== 0 && (getEmitFlags(node) & EmitFlags.NoSubstitution) === 0; } /** * Emits a node with possible substitution. * * @param hint A hint as to the intended usage of the node. * @param node The node to emit. * @param emitCallback The callback used to emit the node or its substitute. */ function substituteNode(hint: EmitHint, node: Node) { Debug.assert(state < TransformationState.Disposed, "Cannot substitute a node after the result is disposed."); return node && isSubstitutionEnabled(node) && onSubstituteNode(hint, node) || node; } /** * Enables before/after emit notifications in the pretty printer for the provided SyntaxKind. */ function enableEmitNotification(kind: SyntaxKind) { Debug.assert(state < TransformationState.Completed, "Cannot modify the transformation context after transformation has completed."); enabledSyntaxKindFeatures[kind] |= SyntaxKindFeatureFlags.EmitNotifications; } /** * Determines whether before/after emit notifications should be raised in the pretty * printer when it emits a node. */ function isEmitNotificationEnabled(node: Node) { return (enabledSyntaxKindFeatures[node.kind] & SyntaxKindFeatureFlags.EmitNotifications) !== 0 || (getEmitFlags(node) & EmitFlags.AdviseOnEmitNode) !== 0; } /** * Emits a node with possible emit notification. * * @param hint A hint as to the intended usage of the node. * @param node The node to emit. * @param emitCallback The callback used to emit the node. */ function emitNodeWithNotification(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) { Debug.assert(state < TransformationState.Disposed, "Cannot invoke TransformationResult callbacks after the result is disposed."); if (node) { if (isEmitNotificationEnabled(node)) { onEmitNode(hint, node, emitCallback); } else { emitCallback(hint, node); } } } /** * Records a hoisted variable declaration for the provided name within a lexical environment. */ function hoistVariableDeclaration(name: Identifier): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); const decl = setEmitFlags(createVariableDeclaration(name), EmitFlags.NoNestedSourceMaps); if (!lexicalEnvironmentVariableDeclarations) { lexicalEnvironmentVariableDeclarations = [decl]; } else { lexicalEnvironmentVariableDeclarations.push(decl); } } /** * Records a hoisted function declaration within a lexical environment. */ function hoistFunctionDeclaration(func: FunctionDeclaration): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); if (!lexicalEnvironmentFunctionDeclarations) { lexicalEnvironmentFunctionDeclarations = [func]; } else { lexicalEnvironmentFunctionDeclarations.push(func); } } /** * Starts a new lexical environment. Any existing hoisted variable or function declarations * are pushed onto a stack, and the related storage variables are reset. */ function startLexicalEnvironment(): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); Debug.assert(!lexicalEnvironmentSuspended, "Lexical environment is suspended."); // Save the current lexical environment. Rather than resizing the array we adjust the // stack size variable. This allows us to reuse existing array slots we've // already allocated between transformations to avoid allocation and GC overhead during // transformation. lexicalEnvironmentVariableDeclarationsStack[lexicalEnvironmentStackOffset] = lexicalEnvironmentVariableDeclarations; lexicalEnvironmentFunctionDeclarationsStack[lexicalEnvironmentStackOffset] = lexicalEnvironmentFunctionDeclarations; lexicalEnvironmentStackOffset++; lexicalEnvironmentVariableDeclarations = undefined!; lexicalEnvironmentFunctionDeclarations = undefined!; } /** Suspends the current lexical environment, usually after visiting a parameter list. */ function suspendLexicalEnvironment(): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); Debug.assert(!lexicalEnvironmentSuspended, "Lexical environment is already suspended."); lexicalEnvironmentSuspended = true; } /** Resumes a suspended lexical environment, usually before visiting a function body. */ function resumeLexicalEnvironment(): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); Debug.assert(lexicalEnvironmentSuspended, "Lexical environment is not suspended."); lexicalEnvironmentSuspended = false; } /** * Ends a lexical environment. The previous set of hoisted declarations are restored and * any hoisted declarations added in this environment are returned. */ function endLexicalEnvironment(): Statement[] | undefined { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed."); Debug.assert(!lexicalEnvironmentSuspended, "Lexical environment is suspended."); let statements: Statement[] | undefined; if (lexicalEnvironmentVariableDeclarations || lexicalEnvironmentFunctionDeclarations) { if (lexicalEnvironmentFunctionDeclarations) { statements = [...lexicalEnvironmentFunctionDeclarations]; } if (lexicalEnvironmentVariableDeclarations) { const statement = createVariableStatement( /*modifiers*/ undefined, createVariableDeclarationList(lexicalEnvironmentVariableDeclarations) ); setEmitFlags(statement, EmitFlags.CustomPrologue); if (!statements) { statements = [statement]; } else { statements.push(statement); } } } // Restore the previous lexical environment. lexicalEnvironmentStackOffset--; lexicalEnvironmentVariableDeclarations = lexicalEnvironmentVariableDeclarationsStack[lexicalEnvironmentStackOffset]; lexicalEnvironmentFunctionDeclarations = lexicalEnvironmentFunctionDeclarationsStack[lexicalEnvironmentStackOffset]; if (lexicalEnvironmentStackOffset === 0) { lexicalEnvironmentVariableDeclarationsStack = []; lexicalEnvironmentFunctionDeclarationsStack = []; } return statements; } function requestEmitHelper(helper: EmitHelper): void { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the transformation context during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the transformation context after transformation has completed."); Debug.assert(!helper.scoped, "Cannot request a scoped emit helper."); emitHelpers = append(emitHelpers, helper); } function readEmitHelpers(): EmitHelper[] | undefined { Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the transformation context during initialization."); Debug.assert(state < TransformationState.Completed, "Cannot modify the transformation context after transformation has completed."); const helpers = emitHelpers; emitHelpers = undefined; return helpers; } function dispose() { if (state < TransformationState.Disposed) { // Clean up emit nodes on parse tree for (const node of nodes) { disposeEmitNodes(getSourceFileOfNode(getParseTreeNode(node))); } // Release references to external entries for GC purposes. lexicalEnvironmentVariableDeclarations = undefined!; lexicalEnvironmentVariableDeclarationsStack = undefined!; lexicalEnvironmentFunctionDeclarations = undefined!; lexicalEnvironmentFunctionDeclarationsStack = undefined!; onSubstituteNode = undefined!; onEmitNode = undefined!; emitHelpers = undefined; // Prevent further use of the transformation result. state = TransformationState.Disposed; } } } <|fim▁hole|><|fim▁end|>
}
<|file_name|>test_custom_dm_diam_dm_proj.py<|end_file_name|><|fim▁begin|>import shesha.config as conf simul_name = "bench_scao_sh_16x16_8pix" layout = "layoutDeFab_SH" # loop p_loop = conf.Param_loop() p_loop.set_niter(1000) p_loop.set_ittime(0.002) # =1/500 # geom p_geom = conf.Param_geom()<|fim▁hole|># tel p_tel = conf.Param_tel() p_tel.set_diam(4.0) p_tel.set_cobs(0.2) # atmos p_atmos = conf.Param_atmos() p_atmos.set_r0(0.16) p_atmos.set_nscreens(1) p_atmos.set_frac([1.0]) p_atmos.set_alt([0.0]) p_atmos.set_windspeed([10.]) p_atmos.set_winddir([45.]) p_atmos.set_L0([1.e5]) # target p_target = conf.Param_target() p_targets = [p_target] # p_target.set_ntargets(1) p_target.set_xpos(0.) p_target.set_ypos(0.) p_target.set_Lambda(1.65) p_target.set_mag(10.) # wfs p_wfs0 = conf.Param_wfs(roket=True) p_wfss = [p_wfs0] p_wfs0.set_type("sh") p_wfs0.set_nxsub(8) p_wfs0.set_npix(8) p_wfs0.set_pixsize(0.3) p_wfs0.set_fracsub(0.8) p_wfs0.set_xpos(0.) p_wfs0.set_ypos(0.) p_wfs0.set_Lambda(0.5) p_wfs0.set_gsmag(8.) p_wfs0.set_optthroughput(0.5) p_wfs0.set_zerop(1.e11) p_wfs0.set_noise(3.) p_wfs0.set_atmos_seen(1) # lgs parameters # p_wfs0.set_gsalt(90*1.e3) # p_wfs0.set_lltx(0) # p_wfs0.set_llty(0) # p_wfs0.set_laserpower(10) # p_wfs0.set_lgsreturnperwatt(1.e3) # p_wfs0.set_proftype("Exp") # p_wfs0.set_beamsize(0.8) # dm p_dm0 = conf.Param_dm() p_dm1 = conf.Param_dm() p_dms = [p_dm0, p_dm1] p_dm0.set_type("pzt") p_dm0.set_file_influ_fits("test_custom_dm.fits") p_dm0.set_alt(0.) p_dm0.set_thresh(0.3) p_dm0.set_unitpervolt(0.01) p_dm0.set_push4imat(100.) p_dm0.set_diam_dm_proj(4.1) p_dm1.set_type("tt") p_dm1.set_alt(0.) p_dm1.set_unitpervolt(0.0005) p_dm1.set_push4imat(10.) # centroiders p_centroider0 = conf.Param_centroider() p_centroiders = [p_centroider0] p_centroider0.set_nwfs(0) p_centroider0.set_type("cog") # p_centroider0.set_type("corr") # p_centroider0.set_type_fct("model") # controllers p_controller0 = conf.Param_controller() p_controllers = [p_controller0] p_controller0.set_type("ls") p_controller0.set_nwfs([0]) p_controller0.set_ndm([0, 1]) p_controller0.set_maxcond(1500.) p_controller0.set_delay(1.) p_controller0.set_gain(0.4) p_controller0.set_modopti(0) p_controller0.set_nrec(2048) p_controller0.set_nmodes(216) p_controller0.set_gmin(0.001) p_controller0.set_gmax(0.5) p_controller0.set_ngain(500)<|fim▁end|>
p_geom.set_zenithangle(0.)
<|file_name|>compressd.py<|end_file_name|><|fim▁begin|>#import binwalk.core.C import binwalk.core.plugin #from binwalk.core.common import * class CompressdPlugin(binwalk.core.plugin.Plugin): # ''' # Searches for and validates compress'd data. # ''' MODULES = ['Signature'] #READ_SIZE = 64 #COMPRESS42 = "compress42" #COMPRESS42_FUNCTIONS = [ # binwalk.core.C.Function(name="is_compressed", type=bool), #] #comp = None #def init(self):<|fim▁hole|> # installation / package maintenance. A Python implementation will likely need to # be custom developed in the future, but for now, since this compression format is # not very common, especially in firmware, simply disable it. #self.comp = None #def scan(self, result): # if self.comp and result.file and result.description.lower().startswith("compress'd data"): # fd = self.module.config.open_file(result.file.name, offset=result.offset, length=self.READ_SIZE) # compressed_data = fd.read(self.READ_SIZE) # fd.close() # if not self.comp.is_compressed(compressed_data, len(compressed_data)): # result.valid = False<|fim▁end|>
#self.comp = binwalk.core.C.Library(self.COMPRESS42, self.COMPRESS42_FUNCTIONS) # This plugin is currently disabled due to the need to move away from supporting C # libraries and into a pure Python project, for cross-platform support and ease of
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # __ # /__) _ _ _ _ _/ _ # / ( (- (/ (/ (- _) / _) # / """ Requests HTTP library ~~~~~~~~~~~~~~~~~~~~~ Requests is an HTTP library, written in Python, for human beings. Basic GET usage: >>> import requests >>> r = requests.get('https://www.python.org') >>> r.status_code 200 >>> 'Python is a programming language' in r.content True ... or POST: >>> payload = dict(key1='value1', key2='value2') >>> r = requests.post('http://httpbin.org/post', data=payload) >>> print(r.text) { ... "form": { "key2": "value2", "key1": "value1" }, ... } The other HTTP methods are supported - see `requests.api`. Full documentation is at <http://python-requests.org>. :copyright: (c) 2015 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. """ __title__ = 'requests' __version__ = '2.8.1' __build__ = 0x020801 __author__ = 'Kenneth Reitz' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2015 Kenneth Reitz' # Attempt to enable urllib3's SNI support, if possible try: from .packages.urllib3.contrib import pyopenssl pyopenssl.inject_into_urllib3() except ImportError: pass from . import utils from .models import Request, Response, PreparedRequest from .api import request, get, head, post, patch, put, delete, options from .sessions import session, Session from .status_codes import codes from .exceptions import ( RequestException, Timeout, URLRequired, TooManyRedirects, HTTPError, ConnectionError ) # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+<|fim▁hole|> class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())<|fim▁end|>
from logging import NullHandler except ImportError:
<|file_name|>pictorial_structures_revisited_main.cpp<|end_file_name|><|fim▁begin|>#if defined(_WIN64) || defined(WIN64) || defined(_WIN32) || defined(WIN32) #include <windows.h> #endif #include <iostream> #include <sstream> #include <string> #include <cstdlib> #if defined(__unix__) || defined(__unix) || defined(unix) || defined(__linux__) || defined(__linux) || defined(linux) #include <unistd.h> #endif namespace { namespace local { <|fim▁hole|> } // namespace local } // unnamed namespace namespace my_pictorial_structures_revisited { int pictorial_structures_revisited_partapp_main(int argc, char *argv[]); } // namespace my_pictorial_structures_revisited // [ref] ${PictorialStructureRevisited_HOME}/ReadMe.txt // // -. to compute part posteriors for a single image // pictorial_structures_revisited.exe --expopt ./expopt/<EXP_FILENAME> --part_detect --find_obj --first <IMGIDX> --numimgs 1 // examples of <EXP_FILENAME> // ./expopt/exp_buffy_hog_detections.txt // ./expopt/exp_ramanan_075.txt // ./expopt/exp_tud_upright_people.txt // // -. to precess the whole dataset // pictorial_structures_revisited.exe --expopt ./expopt/<EXP_FILENAME> --part_detect --find_obj // // -. to evaluate the number of correctly detected parts // pictorial_structures_revisited.exe --expopt ./expopt/<EXP_FILENAME> --eval_segments --first <IMGIDX> --numimgs 1 // this command will also produce visualization of the max-marginal part estimates in the "part_marginals/seg_eval_images" directory // // -. to extract object hypothesis // pictorial_structures_revisited.exe --expopt ./expopt/<EXP_FILENAME> --save_res // this will produce annotation files in the same format as training and test data. // // -. pretrained model (classifiers and joint parameters) // ./log_dir/<EXP_NAME>/class // // -. at runtime the following directories will be created: // ./log_dir/<EXP_NAME>/test_scoregrid - location where part detections will be stored // ./log_dir/<EXP_NAME>/part_marginals - location where part marginals will be stored // ./log_dir/<EXP_NAME>/part_marginals/seg_eval_images int pictorial_structures_revisited_main(int argc, char *argv[]) { #if 0 // testing const std::string curr_directory("./data/object_representation/pictorial_structures_revisited/code_test"); const std::string exp_filename("./expopt/exp_code_test.txt"); #else // experiment const std::string curr_directory("./data/object_representation/pictorial_structures_revisited/partapp-experiments-r2"); const std::string exp_filename("./expopt/exp_buffy_hog_detections.txt"); //const std::string exp_filename("./expopt/exp_ramanan_075.txt"); //const std::string exp_filename("./expopt/exp_tud_upright_people.txt"); #endif const int first_image_idx = 0; const int num_images = 1; std::ostringstream sstream1, sstream2; sstream1 << first_image_idx; sstream2 << num_images; // #if defined(_WIN64) || defined(WIN64) || defined(_WIN32) || defined(WIN32) const BOOL retval = SetCurrentDirectoryA(curr_directory.c_str()); #elif defined(__unix__) || defined(__unix) || defined(unix) || defined(__linux__) || defined(__linux) || defined(linux) const int retval = chdir(curr_directory.c_str()); #endif #if 1 const int my_argc = 5; const char *my_argv[my_argc] = { argv[0], "--expopt", exp_filename.c_str(), "--part_detect", "--find_obj" }; #elif 0 const int my_argc = 9; const char *my_argv[my_argc] = { argv[0], "--expopt", exp_filename.c_str(), "--part_detect", "--find_obj", "--first", sstream1.str().c_str(), "--numimgs", sstream2.str().c_str() }; #elif 0 const int my_argc = 4; const char *my_argv[] = { argv[0], "--expopt", exp_filename.c_str(), "--eval_segments" }; #elif 0 const int my_argc = 8; const char *my_argv[my_argc] = { argv[0], "--expopt", exp_filename.c_str(), "--eval_segments", "--first", sstream1.str().c_str(), "--numimgs", sstream2.str().c_str() }; #endif std::cout << "-----------------------------------------" << std::endl; for (int i = 0; i < my_argc; ++i) std::cout << "argv[" << i << "] : " << my_argv[i] << std::endl; const char *home = getenv("HOME"); if (home) std::cout << "environment variable, HOME = " << home << std::endl; else { std::cout << "environment variable, HOME, is not found" << std::endl; return -1; } std::cout << "-----------------------------------------" << std::endl; my_pictorial_structures_revisited::pictorial_structures_revisited_partapp_main(my_argc, (char **)my_argv); return 0; }<|fim▁end|>
<|file_name|>stock_quant_report.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################### # License, author and contributors information in: # # __manifest__.py file at the root folder of this module. # ############################################################################### from odoo import models, fields, api, _ from odoo.exceptions import UserError, ValidationError from itertools import groupby from operator import itemgetter from collections import defaultdict class WizardValuationStockInventory(models.TransientModel): _name = 'wizard.valuation.stock.inventory' _description = 'Wizard that opens the stock Inventory by Location' location_id = fields.Many2one('stock.location', string='Location', required=True) product_categ_id = fields.Many2one('product.category', string='Category') product_sub_categ_id = fields.Many2one('product.category', string='Sub Category') line_ids = fields.One2many('wizard.valuation.stock.inventory.line', 'wizard_id', required=True, ondelete='cascade') @api.multi def print_pdf_stock_inventory(self, data): line_ids_all_categ = [] line_ids_filterd_categ = [] line_ids = [] # Unlink All one2many Line Ids from same wizard for wizard_id in self.env['wizard.valuation.stock.inventory.line'].search([('wizard_id', '=', self.id)]): if wizard_id.wizard_id.id == self.id: self.write({'line_ids': [(3, wizard_id.id)]}) child_loc_ids = [] if self.location_id: child_loc_ids = self.env['stock.location'].sudo().search([('location_id', 'child_of', self.location_id.id)]).mapped('id') # Creating Temp dictionry for Product List if data["product_sub_categ_id"]: for resource in self.env['stock.quant'].search( ['|', ('location_id', '=', self.location_id.id), ('location_id', 'in', child_loc_ids)]): if resource.product_id.categ_id.id == data[ "product_sub_categ_id"] or resource.product_id.categ_id.parent_id.id == data[ "product_sub_categ_id"]: line_ids_filterd_categ.append({ 'location_id': resource.location_id.id, 'product_id': resource.product_id.id, 'product_categ_id': resource.product_id.categ_id.parent_id.id, 'product_sub_categ_id': resource.product_id.categ_id.id, 'product_uom_id': resource.product_id.uom_id.id, 'qty': resource.qty, 'standard_price': resource.product_id.standard_price, }) else: for resource in self.env['stock.quant'].search( ['|', ('location_id', '=', self.location_id.id), ('location_id', 'in', child_loc_ids)]): line_ids_all_categ.append({ 'location_id': resource.location_id.id, 'product_id': resource.product_id.id, 'product_categ_id': resource.product_id.categ_id.parent_id.id, 'product_sub_categ_id': resource.product_id.categ_id.id, 'product_uom_id': resource.product_id.uom_id.id, 'qty': resource.qty, 'standard_price': resource.product_id.standard_price, }) if data["product_sub_categ_id"]: # Merging stock moves into single product item line grouper = itemgetter("product_id", "product_categ_id", "product_sub_categ_id", "location_id", "product_uom_id", "standard_price") for key, grp in groupby(sorted(line_ids_filterd_categ, key=grouper), grouper): temp_dict = dict(zip( ["product_id", "product_categ_id", "product_sub_categ_id", "location_id", "product_uom_id", "standard_price"], key)) temp_dict["qty"] = sum(item["qty"] for item in grp) temp_dict["amount"] = temp_dict["standard_price"] * temp_dict["qty"] line_ids.append((0, 0, temp_dict)) else: # Merging stock moves into single product item line grouper = itemgetter("product_id", "product_categ_id", "product_sub_categ_id", "location_id", "product_uom_id", "standard_price") for key, grp in groupby(sorted(line_ids_all_categ, key=grouper), grouper): temp_dict = dict(zip( ["product_id", "product_categ_id", "product_sub_categ_id", "location_id", "product_uom_id", "standard_price"], key)) temp_dict["qty"] = sum(item["qty"] for item in grp) temp_dict["amount"] = temp_dict["standard_price"] * temp_dict["qty"] line_ids.append((0, 0, temp_dict)) if len(line_ids) == 0: raise ValidationError(_('Material is not available on this location.')) # writing to One2many line_ids self.write({'line_ids': line_ids}) context = { 'lang': 'en_US', 'active_ids': [self.id], } return { 'context': context, 'data': None, 'type': 'ir.actions.report.xml', 'report_name': 'dvit_report_inventory_valuation_multi_uom.report_stock_inventory_location', 'report_type': 'qweb-pdf', 'report_file': 'dvit_report_inventory_valuation_multi_uom.report_stock_inventory_location', 'name': 'Stock Inventory', 'flags': {'action_buttons': True}, } class WizardValuationStockInventoryLine(models.TransientModel): _name = 'wizard.valuation.stock.inventory.line' <|fim▁hole|> product_sub_categ_id = fields.Many2one('product.category', string='Sub Category') product_uom_id = fields.Many2one('product.uom') qty = fields.Float('Quantity') standard_price = fields.Float('Rate') amount = fields.Float('Amount') @api.model def convert_qty_in_uom(self, from_uom, to_uom, qty): return (qty / from_uom.factor) * to_uom.factor<|fim▁end|>
wizard_id = fields.Many2one('wizard.valuation.stock.inventory', required=True, ondelete='cascade') location_id = fields.Many2one('stock.location', 'Location') product_id = fields.Many2one('product.product', 'Product') product_categ_id = fields.Many2one('product.category', string='Category')
<|file_name|>fonts.cpp<|end_file_name|><|fim▁begin|>/* Copyright 2013-2017 Matt Tytel * * helm is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * helm is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with helm. If not, see <http://www.gnu.org/licenses/>. */ #include "fonts.h"<|fim▁hole|> proportional_regular_ = Font(Typeface::createSystemTypefaceFor( BinaryData::RobotoRegular_ttf, BinaryData::RobotoRegular_ttfSize)); proportional_light_ = Font(Typeface::createSystemTypefaceFor( BinaryData::RobotoLight_ttf, BinaryData::RobotoLight_ttfSize)); monospace_ = Font(Typeface::createSystemTypefaceFor( BinaryData::DroidSansMono_ttf, BinaryData::DroidSansMono_ttfSize)); }<|fim▁end|>
Fonts::Fonts() {
<|file_name|>MessageTraffic.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'; const { LineChart, Line, XAxis, YAxis, Tooltip, } = require('recharts'); export default ({data, type, text}) => ( <div className='lineGraph'> <h2>Right now:&nbsp; <span className='rate'>{data[data.length - 1][type]}</span>&nbsp; {text} </h2> <LineChart width={300} height={300} data={data}> <XAxis dataKey='date'/> <YAxis/><|fim▁hole|> </LineChart> <Tooltip/> </div> );<|fim▁end|>
<Line type='monotone' dataKey={type} stroke='#228B22' strokeWidth={2}/>
<|file_name|>507. Perfect Number.cpp<|end_file_name|><|fim▁begin|>507. Perfect Number We define the Perfect Number is a positive integer that is equal to the sum of all its positive divisors except itself. Now, given an integer n, write a function that returns true when it is a perfect number and false when it is not. Example: Input: 28 Output: True Explanation: 28 = 1 + 2 + 4 + 7 + 14<|fim▁hole|>题目大意:完美数字是指它的所有可以整除的正数中除了它本身,其他数字之和等于这个数字的数。给一个正整数n,写一个函数,当它是一个完美数字的时候返回true否则false。 分析:从2~sqrt(num),累加所有i和num/i【因为如果从1~num一个个试是否可以整除的话会超时,而且也没必要,因为知道了除数a必然就知道了num/a这个数字也是它的除数】因为最后还有一个1没有加,所以sum一开始为1,然后返回num == sum,注意如果num本身为1,则要return false,因为1的唯一一个除数1是它本身不能累加,所以1不满足条件。 class Solution { public: bool checkPerfectNumber(int num) { if (num == 1) return false; int sum = 1; for (int i = 2; i <= sqrt(num); i++) if (num % i == 0) sum = sum + (num / i) + i; return num == sum; } };<|fim▁end|>
Note: The input number n will not exceed 100,000,000. (1e8)
<|file_name|>embed.js<|end_file_name|><|fim▁begin|>/* Sapling pagelink dialog */ CKEDITOR.dialog.add( 'embed', function( editor ) { var plugin = CKEDITOR.plugins.embed; var pagelink_plugin = CKEDITOR.plugins.pagelink; return { title : 'Embed media', minWidth : 300, minHeight : 150, contents : [ { id : 'info', label : 'Embed media', title : 'Embed media', elements : [ { type : 'textarea', id : 'code', label : 'Paste the embed code below:', required: true, validate : function() { var dialog = this.getDialog(); var func = CKEDITOR.dialog.validate.notEmpty( 'Please enter the embed code' ); return func.apply( this ); }, setup : function( data ) { if ( data.code ) this.setValue( data.code ); }, commit : function( data ) { data.code = this.getValue(); } } ] } ], onShow : function() { var editor = this.getParentEditor(), selection = editor.getSelection(), element = null, data = { code : '' }; if ( ( element = selection.getStartElement() ) && element.is( 'span' ) ) selection.selectElement( element ); else element = null; if( element ) { this._.selectedElement = element; data.code = $(element.$).text(); } this.setupContent( data ); }, onOk : function() { var attributes = {}, data = {}, me = this, editor = this.getParentEditor(); this.commitContent( data ); attributes['class'] = 'plugin embed'; var style = []; var node = $(data.code), width = node.attr('width'), height = node.attr('height'); if(width) style.push('width: ' + width + 'px;'); if(height) style.push('height: ' + height + 'px;'); if(style.length) attributes['style'] = style.join(' '); if ( !this._.selectedElement ) { if(jQuery.trim(data.code) == '') return; // Create element if current selection is collapsed. var selection = editor.getSelection(), ranges = selection.getRanges( true ); <|fim▁hole|> var text = new CKEDITOR.dom.text( data.code, editor.document ); ranges[0].insertNode( text ); ranges[0].selectNodeContents( text ); selection.selectRanges( ranges ); // Apply style. var style = new CKEDITOR.style( { element : 'span', attributes : attributes } ); style.apply( editor.document ); var selected = selection.getStartElement(); ranges[0].setStartAfter( selected ); ranges[0].setEndAfter( selected ); selection.selectRanges( ranges ); } else { // We're only editing an existing link, so just overwrite the attributes. var element = this._.selectedElement; element.setAttributes( attributes ); element.setText( data.code ); } }, onLoad : function() { }, // Inital focus on 'url' field if link is of type URL. onFocus : function() { var pageField = this.getContentElement( 'info', 'code' ); pageField.select(); } }; });<|fim▁end|>
<|file_name|>jdk.nashorn.api.tree.ModuleTree.d.ts<|end_file_name|><|fim▁begin|>declare namespace jdk { namespace nashorn { namespace api { namespace tree { interface ModuleTree extends jdk.nashorn.api.tree.Tree {<|fim▁hole|> getIndirectExportEntries(): java.util.List<jdk.nashorn.api.tree.ExportEntryTree> getStarExportEntries(): java.util.List<jdk.nashorn.api.tree.ExportEntryTree> } } } } }<|fim▁end|>
getImportEntries(): java.util.List<jdk.nashorn.api.tree.ImportEntryTree> getLocalExportEntries(): java.util.List<jdk.nashorn.api.tree.ExportEntryTree>
<|file_name|>api-data.js<|end_file_name|><|fim▁begin|>import supertest from 'supertest'; import { publicChannelName, privateChannelName } from './channel.js'; import { roleNameUsers, roleNameSubscriptions, roleScopeUsers, roleScopeSubscriptions, roleDescription } from './role.js'; import { username, email, adminUsername, adminPassword } from './user.js'; export const request = supertest('http://localhost:3000'); const prefix = '/api/v1/'; export function wait(cb, time) { return () => setTimeout(cb, time); } export const apiUsername = `api${ username }`; export const apiEmail = `api${ email }`; export const apiPublicChannelName = `api${ publicChannelName }`; export const apiPrivateChannelName = `api${ privateChannelName }`; export const apiRoleNameUsers = `api${ roleNameUsers }`; export const apiRoleNameSubscriptions = `api${ roleNameSubscriptions }`; export const apiRoleScopeUsers = `${ roleScopeUsers }`; export const apiRoleScopeSubscriptions = `${ roleScopeSubscriptions }`;<|fim▁hole|> 'administrator', 'system', 'user', ]; export const targetUser = {}; export const channel = {}; export const group = {}; export const message = {}; export const directMessage = {}; export const integration = {}; export const credentials = { 'X-Auth-Token': undefined, 'X-User-Id': undefined, }; export const login = { user: adminUsername, password: adminPassword, }; export function api(path) { return prefix + path; } export function methodCall(methodName) { return api(`method.call/${ methodName }`); } export function log(res) { console.log(res.req.path); console.log({ body: res.body, headers: res.headers, }); } export function getCredentials(done = function() {}) { request.post(api('login')) .send(login) .expect('Content-Type', 'application/json') .expect(200) .expect((res) => { credentials['X-Auth-Token'] = res.body.data.authToken; credentials['X-User-Id'] = res.body.data.userId; }) .end(done); }<|fim▁end|>
export const apiRoleDescription = `api${ roleDescription }`; export const reservedWords = [ 'admin',
<|file_name|>receiver.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Created on Fri Jul 3 13:38:36 2015 @author: madengr """ from gnuradio import gr import osmosdr from gnuradio import filter as grfilter # Don't redefine Python's filter() from gnuradio import blocks from gnuradio import fft from gnuradio.fft import window from gnuradio import analog from gnuradio import audio import os import time import numpy as np from gnuradio.filter import pfb class BaseTuner(gr.hier_block2): """Some base methods that are the same between the known tuner types. See TunerDemodNBFM and TunerDemodAM for better documentation. """ def set_center_freq(self, center_freq, rf_center_freq): """Sets baseband center frequency and file name Sets baseband center frequency of frequency translating FIR filter Also sets file name of wave file sink If tuner is tuned to zero Hz then set to file name to /dev/null Otherwise set file name to tuned RF frequency in MHz Args: center_freq (float): Baseband center frequency in Hz rf_center_freq (float): RF center in Hz (for file name) """ # Since the frequency (hence file name) changed, then close it self.blocks_wavfile_sink.close() # If we never wrote any data to the wavfile sink, delete the file self._delete_wavfile_if_empty() # Set the frequency self.freq_xlating_fir_filter_ccc.set_center_freq(center_freq) self.center_freq = center_freq # Set the file name if self.center_freq == 0 or not self.record: # If tuner at zero Hz, or record false, then file name to /dev/null file_name = "/dev/null" else: # Otherwise use frequency and time stamp for file name tstamp = "_" + str(int(time.time())) file_freq = (rf_center_freq + self.center_freq)/1E6 file_freq = np.round(file_freq, 3) file_name = 'wav/' + '{:.3f}'.format(file_freq) + tstamp + ".wav" # Make sure the 'wav' directory exists try: os.mkdir('wav') except OSError: # will need to add something here for Win support pass # directory already exists self.file_name = file_name self.blocks_wavfile_sink.open(self.file_name) def _delete_wavfile_if_empty(self): """Delete the current wavfile if it's empty.""" if (not self.record or not self.file_name or self.file_name == '/dev/null'): return # If we never wrote any data to the wavfile sink, delete # the (empty) wavfile if os.stat(self.file_name).st_size in (44, 0): # ugly hack os.unlink(self.file_name) # delete the file def set_squelch(self, squelch_db): """Sets the threshold for both squelches Args: squelch_db (float): Squelch in dB """ self.analog_pwr_squelch_cc.set_threshold(squelch_db) def __del__(self): """Called when the object is destroyed.""" # Make a best effort attempt to clean up our wavfile if it's empty try: self._delete_wavfile_if_empty() except Exception: pass # oh well, we're dying anyway class TunerDemodNBFM(BaseTuner): """Tuner, demodulator, and recorder chain for narrow band FM demodulation Kept as it's own class so multiple can be instantiated in parallel Accepts complex baseband samples at 1 Msps minimum Frequency translating FIR filter tunes from -samp_rate/2 to +samp_rate/2 The following sample rates assume 1 Msps input First two stages of decimation are 5 each for a total of 25 Thus first two stages brings 1 Msps down to 40 ksps The third stage decimates by int(samp_rate/1E6) Thus output rate will vary from 40 ksps to 79.99 ksps The channel is filtered to 12.5 KHz bandwidth followed by squelch The squelch is non-blocking since samples will be added with other demods The quadrature demod is followed by a forth stage of decimation by 5 This brings the sample rate down to 8 ksps to 15.98 ksps The audio is low-pass filtered to 3.5 kHz bandwidth The polyphase resampler resamples by samp_rate/(decims[1] * decims[0]**3) This results in a constant 8 ksps, irrespective of RF sample rate This 8 ksps audio stream may be added to other demos streams The audio is run through an additional blocking squelch at -200 dB This stops the sample flow so squelced audio is not recorded to file The wav file sink stores 8-bit samples (grainy quality but compact) Default demodulator center freqwuency is 0 Hz This is desired since hardware DC removal reduces sensitivity at 0 Hz NBFM demod of LO leakage will just be 0 amplitude Args: samp_rate (float): Input baseband sample rate in sps (1E6 minimum) audio_rate (float): Output audio sample rate in sps (8 kHz minimum) record (bool): Record audio to file if True Attributes: center_freq (float): Baseband center frequency in Hz record (bool): Record audio to file if True """ # pylint: disable=too-many-instance-attributes def __init__(self, samp_rate=4E6, audio_rate=8000, record=True): gr.hier_block2.__init__(self, "TunerDemodNBFM", gr.io_signature(1, 1, gr.sizeof_gr_complex), gr.io_signature(1, 1, gr.sizeof_float)) # Default values self.center_freq = 0 squelch_db = -60 self.quad_demod_gain = 0.050 self.file_name = "/dev/null" self.record = record # Decimation values for four stages of decimation decims = (5, int(samp_rate/1E6)) # Low pass filter taps for decimation by 5 low_pass_filter_taps_0 = \ grfilter.firdes_low_pass(1, 1, 0.090, 0.010, grfilter.firdes.WIN_HAMMING) # Frequency translating FIR filter decimating by 5 self.freq_xlating_fir_filter_ccc = \ grfilter.freq_xlating_fir_filter_ccc(decims[0], low_pass_filter_taps_0, self.center_freq, samp_rate) # FIR filter decimating by 5 fir_filter_ccc_0 = grfilter.fir_filter_ccc(decims[0], low_pass_filter_taps_0) # Low pass filter taps for decimation from samp_rate/25 to 40-79.9 ksps # In other words, decimation by int(samp_rate/1E6) # 12.5 kHz cutoff for NBFM channel bandwidth low_pass_filter_taps_1 = grfilter.firdes_low_pass( 1, samp_rate/decims[0]**2, 12.5E3, 1E3, grfilter.firdes.WIN_HAMMING) # FIR filter decimation by int(samp_rate/1E6) fir_filter_ccc_1 = grfilter.fir_filter_ccc(decims[1], low_pass_filter_taps_1) # Non blocking power squelch self.analog_pwr_squelch_cc = analog.pwr_squelch_cc(squelch_db, 1e-1, 0, False) # Quadrature demod with gain set for decent audio # The gain will be later multiplied by the 0 dB normalized volume self.analog_quadrature_demod_cf = \ analog.quadrature_demod_cf(self.quad_demod_gain) # 3.5 kHz cutoff for audio bandwidth low_pass_filter_taps_2 = grfilter.firdes_low_pass(1,\ samp_rate/(decims[1] * decims[0]**2),\ 3.5E3, 500, grfilter.firdes.WIN_HAMMING) # FIR filter decimating by 5 from 40-79.9 ksps to 8-15.98 ksps fir_filter_fff_0 = grfilter.fir_filter_fff(decims[0], low_pass_filter_taps_2) # Polyphase resampler allows arbitary RF sample rates # Takes 8-15.98 ksps to a constant 8 ksps for audio pfb_resamp = audio_rate/float(samp_rate/(decims[1] * decims[0]**3)) pfb_arb_resampler_fff = pfb.arb_resampler_fff(pfb_resamp, taps=None, flt_size=32) # Connect the blocks for the demod self.connect(self, self.freq_xlating_fir_filter_ccc) self.connect(self.freq_xlating_fir_filter_ccc, fir_filter_ccc_0) self.connect(fir_filter_ccc_0, fir_filter_ccc_1) self.connect(fir_filter_ccc_1, self.analog_pwr_squelch_cc) self.connect(self.analog_pwr_squelch_cc, self.analog_quadrature_demod_cf) self.connect(self.analog_quadrature_demod_cf, fir_filter_fff_0) self.connect(fir_filter_fff_0, pfb_arb_resampler_fff) self.connect(pfb_arb_resampler_fff, self) # Need to set this to a very low value of -200 since it is after demod # Only want it to gate when the previuos squelch has gone to zero analog_pwr_squelch_ff = analog.pwr_squelch_ff(-200, 1e-1, 0, True) # File sink with single channel and 8 bits/sample self.blocks_wavfile_sink = blocks.wavfile_sink(self.file_name, 1, audio_rate, 8) # Connect the blocks for recording self.connect(pfb_arb_resampler_fff, analog_pwr_squelch_ff) self.connect(analog_pwr_squelch_ff, self.blocks_wavfile_sink) def set_volume(self, volume_db): """Sets the volume Args: volume_db (float): Volume in dB """<|fim▁hole|> self.analog_quadrature_demod_cf.set_gain(gain) class TunerDemodAM(BaseTuner): """Tuner, demodulator, and recorder chain for AM demodulation Kept as it's own class so multiple can be instantiated in parallel Accepts complex baseband samples at 1 Msps minimum Frequency translating FIR filter tunes from -samp_rate/2 to +samp_rate/2 The following sample rates assume 1 Msps input First two stages of decimation are 5 each for a total of 25 Thus first two stages brings 1 Msps down to 40 ksps The third stage decimates by int(samp_rate/1E6) Thus output rate will vary from 40 ksps to 79.99 ksps The channel is filtered to 12.5 KHz bandwidth followed by squelch The squelch is non-blocking since samples will be added with other demods The AGC sets level (volume) prior to AM demod The AM demod is followed by a forth stage of decimation by 5 This brings the sample rate down to 8 ksps to 15.98 ksps The audio is low-pass filtered to 3.5 kHz bandwidth The polyphase resampler resamples by samp_rate/(decims[1] * decims[0]**3) This results in a constant 8 ksps, irrespective of RF sample rate This 8 ksps audio stream may be added to other demos streams The audio is run through an additional blocking squelch at -200 dB This stops the sample flow so squelced audio is not recorded to file The wav file sink stores 8-bit samples (grainy quality but compact) Default demodulator center freqwuency is 0 Hz This is desired since hardware DC removal reduces sensitivity at 0 Hz AM demod of LO leakage will just be 0 amplitude Args: samp_rate (float): Input baseband sample rate in sps (1E6 minimum) audio_rate (float): Output audio sample rate in sps (8 kHz minimum) record (bool): Record audio to file if True Attributes: center_freq (float): Baseband center frequency in Hz record (bool): Record audio to file if True """ # pylint: disable=too-many-instance-attributes # pylint: disable=too-many-locals def __init__(self, samp_rate=4E6, audio_rate=8000, record=True): gr.hier_block2.__init__(self, "TunerDemodAM", gr.io_signature(1, 1, gr.sizeof_gr_complex), gr.io_signature(1, 1, gr.sizeof_float)) # Default values self.center_freq = 0 squelch_db = -60 self.agc_ref = 0.1 self.file_name = "/dev/null" self.record = record # Decimation values for four stages of decimation decims = (5, int(samp_rate/1E6)) # Low pass filter taps for decimation by 5 low_pass_filter_taps_0 = \ grfilter.firdes_low_pass(1, 1, 0.090, 0.010, grfilter.firdes.WIN_HAMMING) # Frequency translating FIR filter decimating by 5 self.freq_xlating_fir_filter_ccc = \ grfilter.freq_xlating_fir_filter_ccc(decims[0], low_pass_filter_taps_0, self.center_freq, samp_rate) # FIR filter decimating by 5 fir_filter_ccc_0 = grfilter.fir_filter_ccc(decims[0], low_pass_filter_taps_0) # Low pass filter taps for decimation from samp_rate/25 to 40-79.9 ksps # In other words, decimation by int(samp_rate/1E6) # 12.5 kHz cutoff for NBFM channel bandwidth low_pass_filter_taps_1 = grfilter.firdes_low_pass( 1, samp_rate/decims[0]**2, 12.5E3, 1E3, grfilter.firdes.WIN_HAMMING) # FIR filter decimation by int(samp_rate/1E6) fir_filter_ccc_1 = grfilter.fir_filter_ccc(decims[1], low_pass_filter_taps_1) # Non blocking power squelch # Squelch level needs to be lower than NBFM or else choppy AM demod self.analog_pwr_squelch_cc = analog.pwr_squelch_cc(squelch_db, 1e-1, 0, False) # AGC with reference set for nomninal 0 dB volume # Paramaters tweaked to prevent impulse during squelching self.agc3_cc = analog.agc3_cc(1.0, 1E-4, self.agc_ref, 10, 1) self.agc3_cc.set_max_gain(65536) # AM demod with complex_to_mag() # Can't use analog.am_demod_cf() since it won't work with N>2 demods am_demod_cf = blocks.complex_to_mag(1) # 3.5 kHz cutoff for audio bandwidth low_pass_filter_taps_2 = grfilter.firdes_low_pass(1,\ samp_rate/(decims[1] * decims[0]**2),\ 3.5E3, 500, grfilter.firdes.WIN_HAMMING) # FIR filter decimating by 5 from 40-79.9 ksps to 8-15.98 ksps fir_filter_fff_0 = grfilter.fir_filter_fff(decims[0], low_pass_filter_taps_2) # Polyphase resampler allows arbitary RF sample rates # Takes 8-15.98 ksps to a constant 8 ksps for audio pfb_resamp = audio_rate/float(samp_rate/(decims[1] * decims[0]**3)) pfb_arb_resampler_fff = pfb.arb_resampler_fff(pfb_resamp, taps=None, flt_size=32) # Connect the blocks for the demod self.connect(self, self.freq_xlating_fir_filter_ccc) self.connect(self.freq_xlating_fir_filter_ccc, fir_filter_ccc_0) self.connect(fir_filter_ccc_0, fir_filter_ccc_1) self.connect(fir_filter_ccc_1, self.analog_pwr_squelch_cc) self.connect(self.analog_pwr_squelch_cc, self.agc3_cc) self.connect(self.agc3_cc, am_demod_cf) self.connect(am_demod_cf, fir_filter_fff_0) self.connect(fir_filter_fff_0, pfb_arb_resampler_fff) self.connect(pfb_arb_resampler_fff, self) # Need to set this to a very low value of -200 since it is after demod # Only want it to gate when the previuos squelch has gone to zero analog_pwr_squelch_ff = analog.pwr_squelch_ff(-200, 1e-1, 0, True) # File sink with single channel and 8 bits/sample self.blocks_wavfile_sink = blocks.wavfile_sink(self.file_name, 1, audio_rate, 8) # Connect the blocks for recording self.connect(pfb_arb_resampler_fff, analog_pwr_squelch_ff) self.connect(analog_pwr_squelch_ff, self.blocks_wavfile_sink) def set_volume(self, volume_db): """Sets the volume Args: volume_db (float): Volume in dB """ agc_ref = self.agc_ref * 10**(volume_db/20.0) self.agc3_cc.set_reference(agc_ref) class Receiver(gr.top_block): """Receiver for narrow band frequency modulation Controls hardware and instantiates multiple tuner/demodulators Generates FFT power spectrum for channel estimation Args: ask_samp_rate (float): Asking sample rate of hardware in sps (1E6 min) num_demod (int): Number of parallel demodulators type_demod (int): Type of demodulator (0=NBFM, 1=AM) hw_args (string): Argument string to pass to harwdare freq_correction (int): Frequency correction in ppm record (bool): Record audio to file if True Attributes: center_freq (float): Hardware RF center frequency in Hz samp_rate (float): Hardware sample rate in sps (1E6 min) gain_db (int): Hardware RF gain in dB squelch_db (int): Squelch in dB volume_dB (int): Volume in dB """ # pylint: disable=too-many-instance-attributes # pylint: disable=too-many-locals # pylint: disable=too-many-arguments def __init__(self, ask_samp_rate=4E6, num_demod=4, type_demod=0, hw_args="uhd", freq_correction=0, record=True, play=True): # Call the initialization method from the parent class gr.top_block.__init__(self, "Receiver") # Default values self.center_freq = 144E6 self.gain_db = 10 self.squelch_db = -70 self.volume_db = 0 audio_rate = 8000 # Setup the USRP source, or use the USRP sim self.src = osmosdr.source(args="numchan=" + str(1) + " " + hw_args) self.src.set_sample_rate(ask_samp_rate) self.src.set_gain(self.gain_db) self.src.set_center_freq(self.center_freq) self.src.set_freq_corr(freq_correction) # Get the sample rate and center frequency from the hardware self.samp_rate = self.src.get_sample_rate() self.center_freq = self.src.get_center_freq() # Set the I/Q bandwidth to 80 % of sample rate self.src.set_bandwidth(0.8 * self.samp_rate) # NBFM channel is about 10 KHz wide # Want about 3 FFT bins to span a channel # Use length FFT so 4 Msps / 1024 = 3906.25 Hz/bin # This also means 3906.25 vectors/second # Using below formula keeps FFT size a power of two # Also keeps bin size constant for power of two sampling rates # Use of 256 sets 3906.25 Hz/bin; increase to reduce bin size samp_ratio = self.samp_rate / 1E6 fft_length = 256 * int(pow(2, np.ceil(np.log(samp_ratio)/np.log(2)))) # -----------Flow for FFT-------------- # Convert USRP steam to vector stream_to_vector = blocks.stream_to_vector(gr.sizeof_gr_complex*1, fft_length) # Want about 1000 vector/sec amount = int(round(self.samp_rate/fft_length/1000)) keep_one_in_n = blocks.keep_one_in_n(gr.sizeof_gr_complex* fft_length, amount) # Take FFT fft_vcc = fft.fft_vcc(fft_length, True, window.blackmanharris(fft_length), True, 1) # Compute the power complex_to_mag_squared = blocks.complex_to_mag_squared(fft_length) # Video average and decimate from 1000 vector/sec to 10 vector/sec integrate_ff = blocks.integrate_ff(100, fft_length) # Probe vector self.probe_signal_vf = blocks.probe_signal_vf(fft_length) # Connect the blocks self.connect(self.src, stream_to_vector, keep_one_in_n, fft_vcc, complex_to_mag_squared, integrate_ff, self.probe_signal_vf) # -----------Flow for Demod-------------- # Create N parallel demodulators as a list of objects # Default to NBFM demod self.demodulators = [] for idx in range(num_demod): if type_demod == 1: self.demodulators.append(TunerDemodAM(self.samp_rate, audio_rate, record)) else: self.demodulators.append(TunerDemodNBFM(self.samp_rate, audio_rate, record)) if play: # Create an adder add_ff = blocks.add_ff(1) # Connect the demodulators between the source and adder for idx, demodulator in enumerate(self.demodulators): self.connect(self.src, demodulator, (add_ff, idx)) # Audio sink audio_sink = audio.sink(audio_rate) # Connect the summed outputs to the audio sink self.connect(add_ff, audio_sink) else: # Just connect each demodulator to the receiver source for demodulator in self.demodulators: self.connect(self.src, demodulator) def set_center_freq(self, center_freq): """Sets RF center frequency of hardware Args: center_freq (float): Hardware RF center frequency in Hz """ # Tune the hardware self.src.set_center_freq(center_freq) # Update center frequency with hardware center frequency # Do this to account for slight hardware offsets self.center_freq = self.src.get_center_freq() def set_gain(self, gain_db): """Sets gain of RF hardware Args: gain_db (float): Hardware RF gain in dB """ self.src.set_gain(gain_db) self.gain_db = self.src.get_gain() def set_squelch(self, squelch_db): """Sets squelch of all demodulators and clamps range Args: squelch_db (float): Squelch in dB """ self.squelch_db = max(min(0, squelch_db), -100) for demodulator in self.demodulators: demodulator.set_squelch(self.squelch_db) def set_volume(self, volume_db): """Sets volume of all demodulators and clamps range Args: volume_db (float): Volume in dB """ self.volume_db = max(min(20, volume_db), -20) for demodulator in self.demodulators: demodulator.set_volume(self.volume_db) def get_demod_freqs(self): """Gets baseband frequencies of all demodulators Returns: List[float]: List of baseband center frequencies in Hz """ center_freqs = [] for demodulator in self.demodulators: center_freqs.append(demodulator.center_freq) return center_freqs def main(): """Test the receiver Sets up the hadrware Tunes a couple of demodulators Prints the max power spectrum """ # Create receiver object ask_samp_rate = 4E6 num_demod = 4 type_demod = 0 hw_args = "uhd" freq_correction = 0 record = False play = True receiver = Receiver(ask_samp_rate, num_demod, type_demod, hw_args, freq_correction, record, play) # Start the receiver and wait for samples to accumulate receiver.start() time.sleep(1) # Set frequency, gain, squelch, and volume center_freq = 144.5E6 receiver.set_center_freq(center_freq) receiver.set_gain(10) print "\n" print "Started %s at %.3f Msps" % (hw_args, receiver.samp_rate/1E6) print "RX at %.3f MHz with %d dB gain" % (receiver.center_freq/1E6, receiver.gain_db) receiver.set_squelch(-60) receiver.set_volume(0) print "%d demods of type %d at %d dB squelch and %d dB volume" % \ (num_demod, type_demod, receiver.squelch_db, receiver.volume_db) # Create some baseband channels to tune based on 144 MHz center channels = np.zeros(num_demod) channels[0] = 144.39E6 - receiver.center_freq # APRS channels[1] = 144.6E6 - receiver.center_freq # Tune demodulators to baseband channels # If recording on, this creates empty wav file since manually tuning. for idx, demodulator in enumerate(receiver.demodulators): demodulator.set_center_freq(channels[idx], center_freq) # Print demodulator info for idx, channel in enumerate(channels): print "Tuned demod %d to %.3f MHz" % (idx, (channel+receiver.center_freq) /1E6) while 1: # No need to go faster than 10 Hz rate of GNU Radio probe # Just do 1 Hz here time.sleep(1) # Grab the FFT data and print max value spectrum = receiver.probe_signal_vf.level() print "Max spectrum of %.3f" % (np.max(spectrum)) # Stop the receiver receiver.stop() receiver.wait() if __name__ == '__main__': try: main() except KeyboardInterrupt: pass<|fim▁end|>
gain = self.quad_demod_gain * 10**(volume_db/20.0)
<|file_name|>SearchViewSubject.java<|end_file_name|><|fim▁begin|>/* * Copyright 2013 Square, Inc. * Copyright 2016 PKWARE, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.pkware.truth.androidx.appcompat.widget; import androidx.annotation.StringRes; import androidx.appcompat.widget.SearchView; import androidx.cursoradapter.widget.CursorAdapter; import com.google.common.truth.FailureMetadata; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * Propositions for {@link SearchView} subjects. */ public class SearchViewSubject extends AbstractLinearLayoutCompatSubject<SearchView> { @Nullable private final SearchView actual; public SearchViewSubject(@Nonnull FailureMetadata failureMetadata, @Nullable SearchView actual) { super(failureMetadata, actual); this.actual = actual; } public void hasImeOptions(int options) { check("getImeOptions()").that(actual.getImeOptions()).isEqualTo(options); } public void hasInputType(int type) { check("getInputType()").that(actual.getInputType()).isEqualTo(type); } public void hasMaximumWidth(int width) { check("getMaxWidth()").that(actual.getMaxWidth()).isEqualTo(width); } public void hasQuery(@Nullable String query) { check("getQuery()").that(actual.getQuery().toString()).isEqualTo(query); } public void hasQueryHint(@Nullable String hint) { CharSequence actualHint = actual.getQueryHint(); String actualHintString; if (actualHint == null) { actualHintString = null; } else { actualHintString = actualHint.toString(); } check("getQueryHint()").that(actualHintString).isEqualTo(hint); } public void hasQueryHint(@StringRes int resId) { hasQueryHint(actual.getContext().getString(resId)); } public void hasSuggestionsAdapter(@Nullable CursorAdapter adapter) { check("getSuggestionsAdapter()").that(actual.getSuggestionsAdapter()).isSameInstanceAs(adapter); } public void isIconifiedByDefault() { check("isIconfiedByDefault()").that(actual.isIconfiedByDefault()).isTrue(); } public void isNotIconifiedByDefault() { check("isIconfiedByDefault()").that(actual.isIconfiedByDefault()).isFalse(); } public void isIconified() { check("isIconified()").that(actual.isIconified()).isTrue(); } public void isNotIconified() { check("isIconified()").that(actual.isIconified()).isFalse(); } public void isQueryRefinementEnabled() { check("isQueryRefinementEnabled()").that(actual.isQueryRefinementEnabled()).isTrue(); } public void isQueryRefinementDisabled() { check("isQueryRefinementEnabled()").that(actual.isQueryRefinementEnabled()).isFalse(); } public void isSubmitButtonEnabled() { check("isSubmitButtonEnabled()").that(actual.isSubmitButtonEnabled()).isTrue(); } public void isSubmitButtonDisabled() { check("isSubmitButtonEnabled()").that(actual.isSubmitButtonEnabled()).isFalse(); } }<|fim▁end|>
*
<|file_name|>tool-list_networks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- """ This bot regenerates the page VEIDs <|fim▁hole|> """ __version__ = '$Id: basic.py 4946 2008-01-29 14:58:25Z wikipedian $' import wikipedia import pagegenerators, catlib, re, socket, sys from iplib import CIDR, IPv4Address class IpNetworkBot: def __init__(self, nets_generator, debug): """ Constructor. Parameters: * generator - The page generator that determines on which pages to work on. * debug - If True, doesn't do any real changes, but only shows what would have been changed. """ self.nets_generator = nets_generator self.nets = dict() self.debug = debug def registerIpNet(self, page): if ":" in page.title(): return text = page.get() in_ipnettpl = False private = False for line in text.split("\n"): if line.startswith("{{IPNetwork"): in_ipnettpl = True continue if line.startswith("}}"): in_ipnettpl = False continue if in_ipnettpl: if line.startswith("|PRIVATE=1"): private = True if not private: print page.title() def run(self): print "# generated by netlist.py" for page in self.nets_generator: self.registerIpNet(page) def main(): # The generator gives the pages that should be worked upon. gen = None # If debug is True, doesn't do any real changes, but only show # what would have been changed. debug = False wantHelp = False # Parse command line arguments for arg in wikipedia.handleArgs(): if arg.startswith("-debug"): debug = True else: wantHelp = True if not wantHelp: # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. cat = catlib.Category(wikipedia.getSite(), 'Category:%s' % 'IP-Network') nets_gen = pagegenerators.CategorizedPageGenerator(cat, start = None, recurse = False) nets_gen = pagegenerators.PreloadingGenerator(nets_gen) bot = IpNetworkBot(nets_gen, debug) bot.run() else: wikipedia.showHelp() if __name__ == "__main__": try: main() finally: wikipedia.stopme()<|fim▁end|>
The following parameters are supported: -debug If given, doesn't do any real changes, but only shows what would have been changed.
<|file_name|>deliver_test.go<|end_file_name|><|fim▁begin|>/* Copyright IBM Corp. 2016 All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package deliver import ( "fmt" "io" "testing" "time" mockpolicies "github.com/hyperledger/fabric/common/mocks/policies" "github.com/hyperledger/fabric/common/policies" "github.com/hyperledger/fabric/common/tools/configtxgen/provisional" "github.com/hyperledger/fabric/orderer/common/ledger" ramledger "github.com/hyperledger/fabric/orderer/common/ledger/ram" cb "github.com/hyperledger/fabric/protos/common" ab "github.com/hyperledger/fabric/protos/orderer" "github.com/hyperledger/fabric/protos/utils" logging "github.com/op/go-logging" "github.com/stretchr/testify/assert" "google.golang.org/grpc" ) var genesisBlock = cb.NewBlock(0, nil) var systemChainID = "systemChain" const ledgerSize = 10 func init() { logging.SetLevel(logging.DEBUG, "") } type mockD struct { grpc.ServerStream recvChan chan *cb.Envelope sendChan chan *ab.DeliverResponse } func newMockD() *mockD { return &mockD{ recvChan: make(chan *cb.Envelope), sendChan: make(chan *ab.DeliverResponse), } } func (m *mockD) Send(br *ab.DeliverResponse) error { m.sendChan <- br return nil } func (m *mockD) Recv() (*cb.Envelope, error) { msg, ok := <-m.recvChan if !ok { return msg, io.EOF } return msg, nil } type erroneousRecvMockD struct { grpc.ServerStream } func (m *erroneousRecvMockD) Send(br *ab.DeliverResponse) error { return nil } func (m *erroneousRecvMockD) Recv() (*cb.Envelope, error) { // The point here is to simulate an error other than EOF. // We don't bother to create a new custom error type. return nil, io.ErrUnexpectedEOF } type erroneousSendMockD struct { grpc.ServerStream recvVal *cb.Envelope } func (m *erroneousSendMockD) Send(br *ab.DeliverResponse) error { // The point here is to simulate an error other than EOF. // We don't bother to create a new custom error type. return io.ErrUnexpectedEOF } func (m *erroneousSendMockD) Recv() (*cb.Envelope, error) { return m.recvVal, nil } type mockSupportManager struct { chains map[string]*mockSupport } func (mm *mockSupportManager) GetChain(chainID string) (Support, bool) { cs, ok := mm.chains[chainID] return cs, ok } type mockSupport struct { ledger ledger.ReadWriter policyManager *mockpolicies.Manager erroredChan chan struct{} configSeq uint64 } func (mcs *mockSupport) Errored() <-chan struct{} { return mcs.erroredChan } func (mcs *mockSupport) Sequence() uint64 { return mcs.configSeq } func (mcs *mockSupport) PolicyManager() policies.Manager { return mcs.policyManager } func (mcs *mockSupport) Reader() ledger.Reader { return mcs.ledger } func NewRAMLedger() ledger.ReadWriter { rlf := ramledger.New(ledgerSize + 1) rl, _ := rlf.GetOrCreate(provisional.TestChainID) rl.Append(genesisBlock) return rl } func initializeDeliverHandler() Handler { mm := newMockMultichainManager() for i := 1; i < ledgerSize; i++ { l := mm.chains[systemChainID].ledger l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", i))}})) } return NewHandlerImpl(mm) } func newMockMultichainManager() *mockSupportManager { rl := NewRAMLedger() mm := &mockSupportManager{ chains: make(map[string]*mockSupport), } mm.chains[systemChainID] = &mockSupport{ ledger: rl, policyManager: &mockpolicies.Manager{Policy: &mockpolicies.Policy{}}, erroredChan: make(chan struct{}), } return mm } var seekOldest = &ab.SeekPosition{Type: &ab.SeekPosition_Oldest{Oldest: &ab.SeekOldest{}}} var seekNewest = &ab.SeekPosition{Type: &ab.SeekPosition_Newest{Newest: &ab.SeekNewest{}}} func seekSpecified(number uint64) *ab.SeekPosition { return &ab.SeekPosition{Type: &ab.SeekPosition_Specified{Specified: &ab.SeekSpecified{Number: number}}} } func makeSeek(chainID string, seekInfo *ab.SeekInfo) *cb.Envelope { return &cb.Envelope{ Payload: utils.MarshalOrPanic(&cb.Payload{ Header: &cb.Header{ ChannelHeader: utils.MarshalOrPanic(&cb.ChannelHeader{ ChannelId: chainID, }), SignatureHeader: utils.MarshalOrPanic(&cb.SignatureHeader{}), }, Data: utils.MarshalOrPanic(seekInfo), }), } } func TestWholeChainSeek(t *testing.T) {<|fim▁hole|> m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekOldest, Stop: seekNewest, Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) count := uint64(0) for { select { case deliverReply := <-m.sendChan: if deliverReply.GetBlock() == nil { if deliverReply.GetStatus() != cb.Status_SUCCESS { t.Fatalf("Received an error on the reply channel") } if count != ledgerSize { t.Fatalf("Expected %d blocks but got %d", ledgerSize, count) } return } if deliverReply.GetBlock().Header.Number != count { t.Fatalf("Expected block %d but got block %d", count, deliverReply.GetBlock().Header.Number) } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } count++ } } func TestNewestSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekNewest, Stop: seekNewest, Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: if deliverReply.GetBlock() == nil { t.Fatalf("Received an error on the reply channel") } if deliverReply.GetBlock().Header.Number != uint64(ledgerSize-1) { t.Fatalf("Expected only the most recent block") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestSpecificSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) specifiedStart := uint64(3) specifiedStop := uint64(7) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(specifiedStart), Stop: seekSpecified(specifiedStop), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) count := uint64(0) for { select { case deliverReply := <-m.sendChan: if deliverReply.GetBlock() == nil { if deliverReply.GetStatus() != cb.Status_SUCCESS { t.Fatalf("Received an error on the reply channel") } return } if expected := specifiedStart + count; deliverReply.GetBlock().Header.Number != expected { t.Fatalf("Expected block %d but got block %d", expected, deliverReply.GetBlock().Header.Number) } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } count++ } } func TestUnauthorizedSeek(t *testing.T) { mm := newMockMultichainManager() for i := 1; i < ledgerSize; i++ { l := mm.chains[systemChainID].ledger l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", i))}})) } mm.chains[systemChainID].policyManager.Policy.Err = fmt.Errorf("Fail to evaluate policy") m := newMockD() defer close(m.recvChan) ds := NewHandlerImpl(mm) go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(uint64(0)), Stop: seekSpecified(uint64(0)), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: if deliverReply.GetStatus() != cb.Status_FORBIDDEN { t.Fatalf("Received wrong error on the reply channel") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestRevokedAuthorizationSeek(t *testing.T) { mm := newMockMultichainManager() for i := 1; i < ledgerSize; i++ { l := mm.chains[systemChainID].ledger l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", i))}})) } m := newMockD() defer close(m.recvChan) ds := NewHandlerImpl(mm) go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(uint64(ledgerSize - 1)), Stop: seekSpecified(ledgerSize), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: assert.NotNil(t, deliverReply.GetBlock(), "First should succeed") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } mm.chains[systemChainID].policyManager.Policy.Err = fmt.Errorf("Fail to evaluate policy") mm.chains[systemChainID].configSeq++ l := mm.chains[systemChainID].ledger l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", ledgerSize+1))}})) select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_FORBIDDEN, deliverReply.GetStatus(), "Second should been forbidden ") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestOutOfBoundSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(uint64(3 * ledgerSize)), Stop: seekSpecified(uint64(3 * ledgerSize)), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: if deliverReply.GetStatus() != cb.Status_NOT_FOUND { t.Fatalf("Received wrong error on the reply channel") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestFailFastSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(uint64(ledgerSize - 1)), Stop: seekSpecified(ledgerSize), Behavior: ab.SeekInfo_FAIL_IF_NOT_READY}) select { case deliverReply := <-m.sendChan: if deliverReply.GetBlock() == nil { t.Fatalf("Expected to receive first block") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } select { case deliverReply := <-m.sendChan: if deliverReply.GetStatus() != cb.Status_NOT_FOUND { t.Fatalf("Expected to receive failure for second block") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestBlockingSeek(t *testing.T) { mm := newMockMultichainManager() for i := 1; i < ledgerSize; i++ { l := mm.chains[systemChainID].ledger l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", i))}})) } m := newMockD() defer close(m.recvChan) ds := NewHandlerImpl(mm) go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(uint64(ledgerSize - 1)), Stop: seekSpecified(ledgerSize), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: if deliverReply.GetBlock() == nil { t.Fatalf("Expected to receive first block") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get first block") } select { case <-m.sendChan: t.Fatalf("Should not have delivered an error or second block") case <-time.After(50 * time.Millisecond): } l := mm.chains[systemChainID].ledger l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", ledgerSize+1))}})) select { case deliverReply := <-m.sendChan: if deliverReply.GetBlock() == nil { t.Fatalf("Expected to receive new block") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get new block") } select { case deliverReply := <-m.sendChan: if deliverReply.GetStatus() != cb.Status_SUCCESS { t.Fatalf("Expected delivery to complete") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestErroredSeek(t *testing.T) { mm := newMockMultichainManager() ms := mm.chains[systemChainID] l := ms.ledger close(ms.erroredChan) for i := 1; i < ledgerSize; i++ { l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", i))}})) } m := newMockD() defer close(m.recvChan) ds := NewHandlerImpl(mm) go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(uint64(ledgerSize - 1)), Stop: seekSpecified(ledgerSize), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_SERVICE_UNAVAILABLE, deliverReply.GetStatus(), "Mock support errored") case <-time.After(time.Second): t.Fatalf("Timed out waiting for error response") } } func TestErroredBlockingSeek(t *testing.T) { mm := newMockMultichainManager() ms := mm.chains[systemChainID] l := ms.ledger for i := 1; i < ledgerSize; i++ { l.Append(ledger.CreateNextBlock(l, []*cb.Envelope{&cb.Envelope{Payload: []byte(fmt.Sprintf("%d", i))}})) } m := newMockD() defer close(m.recvChan) ds := NewHandlerImpl(mm) go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(uint64(ledgerSize - 1)), Stop: seekSpecified(ledgerSize), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: assert.NotNil(t, deliverReply.GetBlock(), "Expected first block") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get first block") } close(ms.erroredChan) select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_SERVICE_UNAVAILABLE, deliverReply.GetStatus(), "Mock support errored") case <-time.After(time.Second): t.Fatalf("Timed out waiting for error response") } } func TestSGracefulShutdown(t *testing.T) { m := newMockD() ds := NewHandlerImpl(nil) close(m.recvChan) assert.NoError(t, ds.Handle(m), "Expected no error for hangup") } func TestReversedSeqSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) specifiedStart := uint64(7) specifiedStop := uint64(3) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekSpecified(specifiedStart), Stop: seekSpecified(specifiedStop), Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: if deliverReply.GetStatus() != cb.Status_BAD_REQUEST { t.Fatalf("Received wrong error on the reply channel") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestBadStreamRecv(t *testing.T) { bh := NewHandlerImpl(nil) assert.Error(t, bh.Handle(&erroneousRecvMockD{}), "Should catch unexpected stream error") } func TestBadStreamSend(t *testing.T) { m := &erroneousSendMockD{recvVal: makeSeek(systemChainID, &ab.SeekInfo{Start: seekNewest, Stop: seekNewest, Behavior: ab.SeekInfo_BLOCK_UNTIL_READY})} ds := initializeDeliverHandler() assert.Error(t, ds.Handle(m), "Should catch unexpected stream error") } func TestOldestSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekOldest, Stop: seekOldest, Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: assert.NotEqual(t, nil, deliverReply.GetBlock(), "Received an error on the reply channel") assert.Equal(t, uint64(0), deliverReply.GetBlock().Header.Number, "Expected only the most recent block") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestNoPayloadSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- &cb.Envelope{Payload: []byte("Foo")} select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_BAD_REQUEST, deliverReply.GetStatus(), "Received wrong error on the reply channel") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestNilPayloadHeaderSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- &cb.Envelope{Payload: utils.MarshalOrPanic(&cb.Payload{})} select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_BAD_REQUEST, deliverReply.GetStatus(), "Received wrong error on the reply channel") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestBadChannelHeader(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- &cb.Envelope{Payload: utils.MarshalOrPanic(&cb.Payload{ Header: &cb.Header{ChannelHeader: []byte("Foo")}, })} select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_BAD_REQUEST, deliverReply.GetStatus(), "Received wrong error on the reply channel") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestChainNotFound(t *testing.T) { mm := &mockSupportManager{ chains: make(map[string]*mockSupport), } m := newMockD() defer close(m.recvChan) ds := NewHandlerImpl(mm) go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekNewest, Stop: seekNewest, Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_NOT_FOUND, deliverReply.GetStatus(), "Received wrong error on the reply channel") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestBadSeekInfoPayload(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- &cb.Envelope{ Payload: utils.MarshalOrPanic(&cb.Payload{ Header: &cb.Header{ ChannelHeader: utils.MarshalOrPanic(&cb.ChannelHeader{ ChannelId: systemChainID, }), SignatureHeader: utils.MarshalOrPanic(&cb.SignatureHeader{}), }, Data: []byte("Foo"), }), } select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_BAD_REQUEST, deliverReply.GetStatus(), "Received wrong error on the reply channel") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } } func TestMissingSeekPosition(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- &cb.Envelope{ Payload: utils.MarshalOrPanic(&cb.Payload{ Header: &cb.Header{ ChannelHeader: utils.MarshalOrPanic(&cb.ChannelHeader{ ChannelId: systemChainID, }), SignatureHeader: utils.MarshalOrPanic(&cb.SignatureHeader{}), }, Data: nil, }), } select { case deliverReply := <-m.sendChan: assert.Equal(t, cb.Status_BAD_REQUEST, deliverReply.GetStatus(), "Received wrong error on the reply channel") case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } }<|fim▁end|>
<|file_name|>app.js<|end_file_name|><|fim▁begin|>define(['exports'], function (exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var App = exports.App = function () { function App() { _classCallCheck(this, App); this.firstName = 'John'; this.lastName = 'Doe'; } App.prototype.mouseMove = function mouseMove(e) { this.mouseX = e.clientX; this.mouseY = e.clientY; }; App.prototype.mouseMove200 = function mouseMove200(e) { this.mouse200X = e.clientX; this.mouse200Y = e.clientY; }; App.prototype.mouseMove800 = function mouseMove800(e) { this.mouse800X = e.clientX; this.mouse800Y = e.clientY; };<|fim▁hole|><|fim▁end|>
return App; }(); });
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url from django.conf import settings from django.conf.urls.static import static from django.contrib import admin # not sure about line 7 admin.autodiscover() urlpatterns = patterns('', url(r'^admin/', include(admin.site.urls)), url(r'^dropzone-drag-drop/$', include('dragdrop.urls', namespace="dragdrop", app_name="dragdrop")), url(r'^index/$', 'dragdrop.views.GetUserImages'), url(r'^$', 'signups.views.home', name='home'), url(r'^register/$', 'drinker.views.DrinkerRegistration'), url(r'^login/$', 'drinker.views.LoginRequest'),<|fim▁hole|> # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # not sure if I need an actual url wrapper in this code. # url(r'^admin/varnish/', include('varnishapp.urls')), ) if settings.DEBUG: # urlpatterns add STATIC_URL and serves the STATIC_ROOT file urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)<|fim▁end|>
url(r'^logout/$', 'drinker.views.LogOutRequest'), url(r'^index/filter/$', 'filter.views.changeBright'),
<|file_name|>Ge.CorCorporateRow.ts<|end_file_name|><|fim▁begin|> namespace GestionEquestre.Ge { export interface CorCorporateRow { Id?: number; IsActive?: boolean; NotArchive?: boolean; InsertDate?: string; InsertUserId?: number; UpdateDate?: string; UpdateUserId?: number; Name?: string; Phone?: string; Gsm?: string; OtherPhone1?: string; Email1?: string; Email2?: string; BankAccount?: number; IdAdress?: number; FrSiren?: string; Caption?: string; ArchiveDate?: string; BankAccountCountryCodeIban?: string; BankAccountCheckDigitsIban?: string; BankAccountBban?: string; BankAccountBic?: string; BankAccountAccountOnwer?: string; BankAccountBankAdress?: string; BankAccountCis?: string; BankAccountUrm?: string; BankAccountMandateDateSign?: string; BankAccountTypePayment?: number; BankAccountDateLastPrelevement?: string; BankAccountTypeOfLastPayment?: number; IdAdressIsActive?: boolean; IdAdressNotArchive?: boolean; IdAdressInsertDate?: string; IdAdressInsertUserId?: number; IdAdressUpdateDate?: string; IdAdressUpdateUserId?: number; IdAdressAdress1?: string; IdAdressAdress2?: string; IdAdressAdress3?: string; IdAdressCity?: number; IdAdressCountry?: number; IdAdressCedex?: string; IdAdressBuilding?: string; IdAdressCaption?: string; IdAdressArchiveDate?: string; } export namespace CorCorporateRow { export const idProperty = 'Id'; export const nameProperty = 'Name'; export const localTextPrefix = 'Ge.CorCorporate'; export namespace Fields { export declare const Id; export declare const IsActive; export declare const NotArchive; export declare const InsertDate; export declare const InsertUserId; export declare const UpdateDate; export declare const UpdateUserId; export declare const Name; export declare const Phone; export declare const Gsm; export declare const OtherPhone1; export declare const Email1; export declare const Email2; export declare const BankAccount; export declare const IdAdress; export declare const FrSiren; export declare const Caption; export declare const ArchiveDate; export declare const BankAccountCountryCodeIban; export declare const BankAccountCheckDigitsIban; export declare const BankAccountBban; export declare const BankAccountBic; export declare const BankAccountAccountOnwer; export declare const BankAccountBankAdress; export declare const BankAccountCis; export declare const BankAccountUrm; export declare const BankAccountMandateDateSign; export declare const BankAccountTypePayment; export declare const BankAccountDateLastPrelevement; export declare const BankAccountTypeOfLastPayment; export declare const IdAdressIsActive; export declare const IdAdressNotArchive; export declare const IdAdressInsertDate; export declare const IdAdressInsertUserId; export declare const IdAdressUpdateDate; export declare const IdAdressUpdateUserId; export declare const IdAdressAdress1; export declare const IdAdressAdress2; export declare const IdAdressAdress3; export declare const IdAdressCity; export declare const IdAdressCountry; export declare const IdAdressCedex; export declare const IdAdressBuilding; export declare const IdAdressCaption; export declare const IdAdressArchiveDate; } [ 'Id',<|fim▁hole|> 'InsertDate', 'InsertUserId', 'UpdateDate', 'UpdateUserId', 'Name', 'Phone', 'Gsm', 'OtherPhone1', 'Email1', 'Email2', 'BankAccount', 'IdAdress', 'FrSiren', 'Caption', 'ArchiveDate', 'BankAccountCountryCodeIban', 'BankAccountCheckDigitsIban', 'BankAccountBban', 'BankAccountBic', 'BankAccountAccountOnwer', 'BankAccountBankAdress', 'BankAccountCis', 'BankAccountUrm', 'BankAccountMandateDateSign', 'BankAccountTypePayment', 'BankAccountDateLastPrelevement', 'BankAccountTypeOfLastPayment', 'IdAdressIsActive', 'IdAdressNotArchive', 'IdAdressInsertDate', 'IdAdressInsertUserId', 'IdAdressUpdateDate', 'IdAdressUpdateUserId', 'IdAdressAdress1', 'IdAdressAdress2', 'IdAdressAdress3', 'IdAdressCity', 'IdAdressCountry', 'IdAdressCedex', 'IdAdressBuilding', 'IdAdressCaption', 'IdAdressArchiveDate' ].forEach(x => (<any>Fields)[x] = x); } }<|fim▁end|>
'IsActive', 'NotArchive',
<|file_name|>rnn_cell_wrapper_v2_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for RNN cell wrapper v2 implementation.""" from absl.testing import parameterized import numpy as np from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.keras import combinations from tensorflow.python.keras import layers from tensorflow.python.keras.layers import rnn_cell_wrapper_v2 from tensorflow.python.keras.layers.legacy_rnn import rnn_cell_impl from tensorflow.python.keras.legacy_tf_layers import base as legacy_base_layer from tensorflow.python.keras.utils import generic_utils from tensorflow.python.ops import array_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import variables as variables_lib from tensorflow.python.platform import test @combinations.generate(combinations.combine(mode=["graph", "eager"])) class RNNCellWrapperTest(test.TestCase, parameterized.TestCase): def testResidualWrapper(self): wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper x = ops.convert_to_tensor_v2_with_dispatch( np.array([[1., 1., 1.]]), dtype="float32") m = ops.convert_to_tensor_v2_with_dispatch( np.array([[0.1, 0.1, 0.1]]), dtype="float32") base_cell = rnn_cell_impl.GRUCell( 3, kernel_initializer=init_ops.constant_initializer(0.5), bias_initializer=init_ops.constant_initializer(0.5)) g, m_new = base_cell(x, m) wrapper_object = wrapper_type(base_cell) children = wrapper_object._trackable_children() wrapper_object.get_config() # Should not throw an error self.assertIn("cell", children) self.assertIs(children["cell"], base_cell) g_res, m_new_res = wrapper_object(x, m) self.evaluate([variables_lib.global_variables_initializer()]) res = self.evaluate([g, g_res, m_new, m_new_res]) # Residual connections self.assertAllClose(res[1], res[0] + [1., 1., 1.]) # States are left untouched self.assertAllClose(res[2], res[3]) def testResidualWrapperWithSlice(self): wrapper_type = rnn_cell_wrapper_v2.ResidualWrapper x = ops.convert_to_tensor_v2_with_dispatch( np.array([[1., 1., 1., 1., 1.]]), dtype="float32") m = ops.convert_to_tensor_v2_with_dispatch( np.array([[0.1, 0.1, 0.1]]), dtype="float32") base_cell = rnn_cell_impl.GRUCell( 3, kernel_initializer=init_ops.constant_initializer(0.5), bias_initializer=init_ops.constant_initializer(0.5)) g, m_new = base_cell(x, m) def residual_with_slice_fn(inp, out): inp_sliced = array_ops.slice(inp, [0, 0], [-1, 3]) return inp_sliced + out g_res, m_new_res = wrapper_type( base_cell, residual_with_slice_fn)(x, m) self.evaluate([variables_lib.global_variables_initializer()]) res_g, res_g_res, res_m_new, res_m_new_res = self.evaluate( [g, g_res, m_new, m_new_res]) # Residual connections self.assertAllClose(res_g_res, res_g + [1., 1., 1.]) # States are left untouched self.assertAllClose(res_m_new, res_m_new_res) def testDeviceWrapper(self): wrapper_type = rnn_cell_wrapper_v2.DeviceWrapper x = array_ops.zeros([1, 3]) m = array_ops.zeros([1, 3]) cell = rnn_cell_impl.GRUCell(3) wrapped_cell = wrapper_type(cell, "/cpu:0") children = wrapped_cell._trackable_children() wrapped_cell.get_config() # Should not throw an error self.assertIn("cell", children) self.assertIs(children["cell"], cell) outputs, _ = wrapped_cell(x, m) self.assertIn("cpu:0", outputs.device.lower()) @parameterized.parameters( [[rnn_cell_impl.DropoutWrapper, rnn_cell_wrapper_v2.DropoutWrapper], [rnn_cell_impl.ResidualWrapper, rnn_cell_wrapper_v2.ResidualWrapper]]) def testWrapperKerasStyle(self, wrapper, wrapper_v2): """Tests if wrapper cell is instantiated in keras style scope.""" wrapped_cell_v2 = wrapper_v2(rnn_cell_impl.BasicRNNCell(1)) self.assertIsNone(getattr(wrapped_cell_v2, "_keras_style", None)) wrapped_cell = wrapper(rnn_cell_impl.BasicRNNCell(1)) self.assertFalse(wrapped_cell._keras_style) @parameterized.parameters( [rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper]) def testWrapperWeights(self, wrapper): """Tests that wrapper weights contain wrapped cells weights.""" base_cell = layers.SimpleRNNCell(1, name="basic_rnn_cell") rnn_cell = wrapper(base_cell) rnn_layer = layers.RNN(rnn_cell) inputs = ops.convert_to_tensor_v2_with_dispatch([[[1]]], dtype=dtypes.float32) rnn_layer(inputs) wrapper_name = generic_utils.to_snake_case(wrapper.__name__) expected_weights = ["rnn/" + wrapper_name + "/" + var for var in ("kernel:0", "recurrent_kernel:0", "bias:0")] self.assertLen(rnn_cell.weights, 3) self.assertCountEqual([v.name for v in rnn_cell.weights], expected_weights) self.assertCountEqual([v.name for v in rnn_cell.trainable_variables], expected_weights) self.assertCountEqual([v.name for v in rnn_cell.non_trainable_variables], []) self.assertCountEqual([v.name for v in rnn_cell.cell.weights], expected_weights) @parameterized.parameters( [rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper]) def testWrapperV2Caller(self, wrapper): """Tests that wrapper V2 is using the LayerRNNCell's caller.""" with legacy_base_layer.keras_style_scope(): base_cell = rnn_cell_impl.MultiRNNCell( [rnn_cell_impl.BasicRNNCell(1) for _ in range(2)]) rnn_cell = wrapper(base_cell) inputs = ops.convert_to_tensor_v2_with_dispatch([[1]], dtype=dtypes.float32) state = ops.convert_to_tensor_v2_with_dispatch([[1]], dtype=dtypes.float32) _ = rnn_cell(inputs, [state, state]) weights = base_cell._cells[0].weights self.assertLen(weights, expected_len=2) self.assertTrue(all("_wrapper" in v.name for v in weights)) @parameterized.parameters( [rnn_cell_wrapper_v2.DropoutWrapper, rnn_cell_wrapper_v2.ResidualWrapper]) def testWrapperV2Build(self, wrapper): cell = rnn_cell_impl.LSTMCell(10) wrapper = wrapper(cell) wrapper.build((1,)) self.assertTrue(cell.built) def testDeviceWrapperSerialization(self): wrapper_cls = rnn_cell_wrapper_v2.DeviceWrapper cell = layers.LSTMCell(10) wrapper = wrapper_cls(cell, "/cpu:0") config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) self.assertDictEqual(config, reconstructed_wrapper.get_config()) self.assertIsInstance(reconstructed_wrapper, wrapper_cls) def testResidualWrapperSerialization(self): wrapper_cls = rnn_cell_wrapper_v2.ResidualWrapper cell = layers.LSTMCell(10) wrapper = wrapper_cls(cell) config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) self.assertDictEqual(config, reconstructed_wrapper.get_config()) self.assertIsInstance(reconstructed_wrapper, wrapper_cls) wrapper = wrapper_cls(cell, residual_fn=lambda i, o: i + i + o) config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) # Assert the reconstructed function will perform the math correctly. self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 4) def residual_fn(inputs, outputs): return inputs * 3 + outputs wrapper = wrapper_cls(cell, residual_fn=residual_fn) config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) # Assert the reconstructed function will perform the math correctly. self.assertEqual(reconstructed_wrapper._residual_fn(1, 2), 5) def testDropoutWrapperSerialization(self): wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper cell = layers.GRUCell(10) wrapper = wrapper_cls(cell) config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) self.assertDictEqual(config, reconstructed_wrapper.get_config()) self.assertIsInstance(reconstructed_wrapper, wrapper_cls) wrapper = wrapper_cls(cell, dropout_state_filter_visitor=lambda s: True)<|fim▁hole|> config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) self.assertTrue(reconstructed_wrapper._dropout_state_filter(None)) def dropout_state_filter_visitor(unused_state): return False wrapper = wrapper_cls( cell, dropout_state_filter_visitor=dropout_state_filter_visitor) config = wrapper.get_config() reconstructed_wrapper = wrapper_cls.from_config(config) self.assertFalse(reconstructed_wrapper._dropout_state_filter(None)) def testDropoutWrapperWithKerasLSTMCell(self): wrapper_cls = rnn_cell_wrapper_v2.DropoutWrapper cell = layers.LSTMCell(10) with self.assertRaisesRegex(ValueError, "does not work with "): wrapper_cls(cell) cell = layers.LSTMCellV2(10) with self.assertRaisesRegex(ValueError, "does not work with "): wrapper_cls(cell) if __name__ == "__main__": test.main()<|fim▁end|>
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>#[cfg(test)] use ::{card, deck, hand}; #[test] fn create_hand() { let mut deck = deck::Deck::new(); let mut cards: Vec<card::Card> = vec![]; for _i in 0..5 { cards.push(deck.draw().unwrap()); } let hand = hand::Hand::new(cards); assert_eq!(hand.cards.len(), 5); } #[test] fn has_flush() { let a = card::Card::new(card::Rank::Three, card::Suit::Hearts); let b = card::Card::new(card::Rank::Six, card::Suit::Hearts); let c = card::Card::new(card::Rank::Ten, card::Suit::Hearts); let d = card::Card::new(card::Rank::Queen, card::Suit::Hearts); let e = card::Card::new(card::Rank::Two, card::Suit::Hearts); let cards: Vec<card::Card> = vec![a, b, c, d, e]; let hand: hand::Hand = hand::Hand::new(cards); assert_eq!(hand.has_flush(), true); } #[test] fn no_flush() { let a = card::Card::new(card::Rank::Three, card::Suit::Hearts); let b = card::Card::new(card::Rank::Six, card::Suit::Spades); let c = card::Card::new(card::Rank::Ten, card::Suit::Hearts); let d = card::Card::new(card::Rank::Queen, card::Suit::Hearts); let e = card::Card::new(card::Rank::Two, card::Suit::Hearts); let cards: Vec<card::Card> = vec![a, b, c, d, e]; let hand: hand::Hand = hand::Hand::new(cards); assert_eq!(hand.has_flush(), false); } #[test] fn has_straight() { let a = card::Card::new(card::Rank::Three, card::Suit::Hearts); let b = card::Card::new(card::Rank::Six, card::Suit::Diamonds); let c = card::Card::new(card::Rank::Four, card::Suit::Hearts); let d = card::Card::new(card::Rank::Seven, card::Suit::Clubs); let e = card::Card::new(card::Rank::Five, card::Suit::Spades); let cards: Vec<card::Card> = vec![a, b, c, d, e]; let hand: hand::Hand = hand::Hand::new(cards); assert_eq!(hand.has_straight(), true); } #[test] fn has_ace_low_straight() { let a = card::Card::new(card::Rank::Three, card::Suit::Hearts); let b = card::Card::new(card::Rank::Two, card::Suit::Diamonds); let c = card::Card::new(card::Rank::Four, card::Suit::Hearts); let d = card::Card::new(card::Rank::Ace, card::Suit::Clubs); let e = card::Card::new(card::Rank::Five, card::Suit::Spades); let cards: Vec<card::Card> = vec![a, b, c, d, e]; let hand: hand::Hand = hand::Hand::new(cards); assert_eq!(hand.has_straight(), true); } #[test] fn no_straight() { let a = card::Card::new(card::Rank::Three, card::Suit::Hearts); let b = card::Card::new(card::Rank::Six, card::Suit::Diamonds); let c = card::Card::new(card::Rank::King, card::Suit::Hearts); let d = card::Card::new(card::Rank::Seven, card::Suit::Clubs); let e = card::Card::new(card::Rank::Five, card::Suit::Spades); let cards: Vec<card::Card> = vec![a, b, c, d, e]; let hand: hand::Hand = hand::Hand::new(cards); assert_eq!(hand.has_straight(), false); } #[test] fn has_pair() { let a = card::Card::new(card::Rank::Seven, card::Suit::Hearts); let b = card::Card::new(card::Rank::Six, card::Suit::Diamonds); let c = card::Card::new(card::Rank::King, card::Suit::Hearts); let d = card::Card::new(card::Rank::Seven, card::Suit::Clubs); let e = card::Card::new(card::Rank::Five, card::Suit::Spades); let cards: Vec<card::Card> = vec![a, b, c, d, e]; let hand: hand::Hand = hand::Hand::new(cards); assert_eq!(hand.has_pair(), true); } #[test] fn has_pair_in_set() { let a = card::Card::new(card::Rank::Seven, card::Suit::Hearts); let b = card::Card::new(card::Rank::Six, card::Suit::Diamonds); let c = card::Card::new(card::Rank::King, card::Suit::Hearts); let d = card::Card::new(card::Rank::Seven, card::Suit::Clubs); let e = card::Card::new(card::Rank::Seven, card::Suit::Spades); let cards: Vec<card::Card> = vec![a, b, c, d, e]; let hand: hand::Hand = hand::Hand::new(cards); assert_eq!(hand.has_pair(), true); } #[test] fn no_pair() { let a = card::Card::new(card::Rank::Three, card::Suit::Hearts); let b = card::Card::new(card::Rank::Six, card::Suit::Diamonds); let c = card::Card::new(card::Rank::King, card::Suit::Hearts); let d = card::Card::new(card::Rank::Seven, card::Suit::Clubs); let e = card::Card::new(card::Rank::Five, card::Suit::Spades); let cards: Vec<card::Card> = vec![a, b, c, d, e]; let hand: hand::Hand = hand::Hand::new(cards); <|fim▁hole|>fn high_card_raw_value() { assert_eq!(hand::PokerRanking::HighCard as u32, 0); } #[test] fn one_pair_raw_value() { assert_eq!(hand::PokerRanking::OnePair as u32, 1); } #[test] fn two_pair_raw_value() { assert_eq!(hand::PokerRanking::TwoPair as u32, 2); } #[test] fn trips_raw_value() { assert_eq!(hand::PokerRanking::Trips as u32, 3); } #[test] fn straight_raw_value() { assert_eq!(hand::PokerRanking::Straight as u32, 4); } #[test] fn flush_raw_value() { assert_eq!(hand::PokerRanking::Flush as u32, 5); } #[test] fn full_house_raw_value() { assert_eq!(hand::PokerRanking::FullHouse as u32, 6); } #[test] fn quads_raw_value() { assert_eq!(hand::PokerRanking::Quads as u32, 7); } #[test] fn straight_flush_raw_value() { assert_eq!(hand::PokerRanking::StraightFlush as u32, 8); }<|fim▁end|>
assert_eq!(hand.has_pair(), false); } #[test]
<|file_name|>wp_deploy.js<|end_file_name|><|fim▁begin|>module.exports = { dist: { options: {<|fim▁hole|> } } };<|fim▁end|>
plugin_slug: 'simple-user-adding', svn_user: 'wearerequired', build_dir: 'release/svn/', assets_dir: 'assets/'
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![crate_name="rustspec"] #![crate_type="dylib"] #![feature(plugin_registrar, rustc_private, collections, core, convert)] extern crate syntax; extern crate core; extern crate rustc; extern crate rustspec_assertions; pub use rustspec_assertions::{expect, eq, be_gt, be_ge, be_lt, be_le, contain, be_false, be_true, be_some, be_none}; use macro_result::MacroResult; use test_context_node::TestContextNode; use test_case_node::TestCaseNode; use test_node::TestNode; use self::core::ops::Deref; use rustc::plugin::Registry; use syntax::ext::base::{ExtCtxt, MacResult}; use syntax::ext::quote::rt::ToTokens; use syntax::codemap::Span; use syntax::ast; use syntax::ptr::P; use syntax::parse::{token, tts_to_parser}; use syntax::parse::parser::Parser; mod macro_result; mod test_context_node; mod test_case_node; mod test_node; #[plugin_registrar] pub fn plugin_registrar(registry: &mut Registry) { registry.register_macro("scenario", macro_scenario); } fn is_skippable(token: syntax::parse::token::Token) -> bool { token == token::OpenDelim(token::Brace) || token == token::CloseDelim(token::Brace) || token == token::OpenDelim(token::Paren) || token == token::CloseDelim(token::Paren) || token == token::Comma || token == token::Semi } #[allow(unused_must_use)] fn extract_test_node_data(parser: &mut Parser) -> (String, P<ast::Block>) { parser.bump(); // skip ( let (name, _) = parser.parse_str().ok().unwrap(); parser.bump(); // skip , let block = parser.parse_block().ok().unwrap(); (name.deref().to_string(), block) } #[allow(unused_must_use)] fn parse_test_node(parser: &mut Parser) -> Box<TestCaseNode> { let mut should_fail = false; let mut should_be_ignored = false; if parser.token == token::Dot { parser.bump(); let ident = parser.parse_ident().ok().unwrap(); let token_str = ident.as_str(); should_fail = token_str == "fails"; should_be_ignored = token_str == "ignores"; } let (name, block) = extract_test_node_data(parser); TestCaseNode::new(name, block, should_fail, should_be_ignored) } #[allow(unused_must_use)] fn parse_node(cx: &mut ExtCtxt, parser: &mut Parser) -> (Option<P<ast::Block>>, Vec<Box<TestNode + 'static>>) { let mut nodes: Vec<Box<TestNode>> = Vec::new(); let mut before_block = None; while parser.token != token::Eof { if is_skippable(parser.token.clone()) { parser.bump(); continue; } let ident = parser.parse_ident().ok().unwrap(); let token_str = ident.as_str(); match token_str { "before" => { if before_block.is_some() {<|fim▁hole|> panic!("More than one before blocks found in the same context."); } parser.bump(); // skip ( before_block = Some(parser.parse_block().ok().unwrap()); }, "when" | "context" | "describe" => { parser.bump(); // skip ( let (name, _) = parser.parse_str().ok().unwrap(); parser.bump(); // skip , let block_tokens = parser.parse_block().ok().unwrap().to_tokens(cx); let mut block_parser = tts_to_parser(cx.parse_sess(), block_tokens, cx.cfg()); let (b, children) = parse_node(cx, &mut block_parser); let before = if b.is_some() { Some(P(b.unwrap().deref().clone())) } else { None }; nodes.push(TestContextNode::new( name.deref().to_string(), before, children )); }, "it" => { nodes.push(parse_test_node(parser)); }, other => { let span = parser.span; parser.span_fatal(span, format!("Unexpected {}", other).as_ref()); } } } (before_block, nodes) } #[allow(unused_must_use)] pub fn macro_scenario(cx: &mut ExtCtxt, _: Span, tts: &[ast::TokenTree]) -> Box<MacResult + 'static> { let mut parser = cx.new_parser_from_tts(tts); let (name, _) = parser.parse_str().ok().unwrap(); parser.bump(); let block_tokens = parser.parse_block().ok().unwrap().to_tokens(cx); let mut block_parser = tts_to_parser(cx.parse_sess(), block_tokens, cx.cfg()); let (before, children) = parse_node(cx, &mut block_parser); let node = TestContextNode::new(name.deref().to_string(), before, children); MacroResult::new(vec![node.to_item(cx, &mut vec![])]) }<|fim▁end|>
<|file_name|>options.py<|end_file_name|><|fim▁begin|>''' This module corresponds to ARDroneLib/Soft/Common/navdata_common.h ''' import ctypes import functools from pyardrone.utils.structure import Structure uint8_t = ctypes.c_uint8 uint16_t = ctypes.c_uint16 uint32_t = ctypes.c_uint32 int16_t = ctypes.c_int16 int32_t = ctypes.c_int32 bool_t = ctypes.c_uint32 # ARDroneTool's bool is 4 bytes char = ctypes.c_char float32_t = ctypes.c_float NB_GYROS = 3 NB_ACCS = 3 NB_NAVDATA_DETECTION_RESULTS = 4 NB_CORNER_TRACKERS_WIDTH = 5 NB_CORNER_TRACKERS_HEIGHT = 4 DEFAULT_NB_TRACKERS_WIDTH = NB_CORNER_TRACKERS_WIDTH + 1 DEFAULT_NB_TRACKERS_HEIGHT = NB_CORNER_TRACKERS_HEIGHT + 1 NAVDATA_MAX_CUSTOM_TIME_SAVE = 20 _vector31_t = float32_t * 3 _velocities_t = _vector31_t _vector21_t = float32_t * 2 _screen_point_t = int32_t * 2 _matrix33_t = float32_t * 3 * 3 class OptionHeader(dict): def register(self, tag): return functools.partial(self._register, tag) def _register(self, tag, function): if tag in self: raise KeyError('Key {!r} conflict with existing item {}'.format( tag, self[tag])) self[tag] = function return function index = OptionHeader() class Metadata(Structure): ''' Header of :py:class:`~pyardrone.navdata.NavData`. Available via :py:class:`~pyardrone.navdata.NavData`.metadata Corresponds to C struct ``navdata_t``. ''' _pack_ = 1 _attrname = 'metadata' header = uint32_t #: Should be 0x55667788 #: raw drone state, #: see also: :py:class:`~pyardrone.navdata.states.DroneState` state = uint32_t sequence_number = uint32_t #: vision_flag = uint32_t #: class OptionHeader(Structure): _pack_ = 1 tag = uint16_t size = uint16_t @index.register(0) class Demo(OptionHeader): ''' Minimal navigation data for all flights. Corresponds to C struct ``navdata_demo_t``. ''' _attrname = 'demo' #: Flying state (landed, flying, hovering, etc.) #: defined in CTRL_STATES enum. ctrl_state = uint32_t vbat_flying_percentage = uint32_t #: battery voltage filtered (mV) theta = float32_t #: UAV's pitch in milli-degrees phi = float32_t #: UAV's roll in milli-degrees psi = float32_t #: UAV's yaw in milli-degrees altitude = int32_t #: UAV's altitude in centimeters vx = float32_t #: UAV's estimated linear velocity vy = float32_t #: UAV's estimated linear velocity vz = float32_t #: UAV's estimated linear velocity #: streamed frame index Not used -> To integrate in video stage. num_frames = uint32_t # Camera parameters compute by detection detection_camera_rot = _matrix33_t #: Deprecated ! Don't use ! detection_camera_trans = _vector31_t #: Deprecated ! Don't use ! detection_tag_index = uint32_t #: Deprecated ! Don't use ! detection_camera_type = uint32_t #: Type of tag searched in detection # Camera parameters compute by drone drone_camera_rot = _matrix33_t #: Deprecated ! Don't use ! drone_camera_trans = _vector31_t #: Deprecated ! Don't use ! @index.register(1) class Time(OptionHeader): ''' Timestamp Corresponds to C struct ``navdata_time_t``. ''' _attrname = 'time' #: 32 bit value where the 11 most significant bits represents the seconds, #: and the 21 least significant bits are the microseconds. time = uint32_t @index.register(2) class RawMeasures(OptionHeader): ''' Raw sensors measurements Corresponds to C struct ``navdata_raw_measures_t``. ''' _attrname = 'raw_measures' # +12 bytes raw_accs = uint16_t * NB_ACCS #: filtered accelerometers raw_gyros = int16_t * NB_GYROS #: filtered gyrometers raw_gyros_110 = int16_t * 2 #: gyrometers x/y 110 deg/s vbat_raw = uint32_t #: battery voltage raw (mV) us_debut_echo = uint16_t us_fin_echo = uint16_t us_association_echo = uint16_t us_distance_echo = uint16_t us_courbe_temps = uint16_t us_courbe_valeur = uint16_t us_courbe_ref = uint16_t flag_echo_ini = uint16_t # TODO: uint16_t frame_number from ARDrone_Magneto nb_echo = uint16_t sum_echo = uint32_t alt_temp_raw = int32_t gradient = int16_t @index.register(21) class PressureRaw(OptionHeader): 'Corresponds to C struct ``navdata_pressure_raw_t``.' _attrname = 'pressure_raw' up = int32_t ut = int16_t Temperature_meas = int32_t Pression_meas = int32_t @index.register(22) class Magneto(OptionHeader): 'Corresponds to C struct ``navdata_magneto_t``.' _attrname = 'magneto' mx = int16_t my = int16_t mz = int16_t magneto_raw = _vector31_t #: magneto in the body frame, in mG magneto_rectified = _vector31_t magneto_offset = _vector31_t heading_unwrapped = float32_t heading_gyro_unwrapped = float32_t heading_fusion_unwrapped = float32_t magneto_calibration_ok = char magneto_state = uint32_t magneto_radius = float32_t error_mean = float32_t error_var = float32_t @index.register(23) class WindSpeed(OptionHeader): 'Corresponds to C struct ``navdata_wind_speed_t``.' _attrname = 'wind_speed' wind_speed = float32_t #: estimated wind speed [m/s] #: estimated wind direction in North-East frame [rad] e.g. #: if wind_angle is pi/4, wind is from South-West to North-East wind_angle = float32_t wind_compensation_theta = float32_t wind_compensation_phi = float32_t state_x1 = float32_t state_x2 = float32_t state_x3 = float32_t state_x4 = float32_t state_x5 = float32_t state_x6 = float32_t magneto_debug1 = float32_t magneto_debug2 = float32_t magneto_debug3 = float32_t @index.register(24) class KalmanPressure(OptionHeader): 'Corresponds to C struct ``navdata_kalman_pressure_t``.' _attrname = 'kalman_pressure' offset_pressure = float32_t est_z = float32_t est_zdot = float32_t est_bias_PWM = float32_t est_biais_pression = float32_t offset_US = float32_t prediction_US = float32_t cov_alt = float32_t cov_PWM = float32_t cov_vitesse = float32_t bool_effet_sol = bool_t somme_inno = float32_t flag_rejet_US = bool_t u_multisinus = float32_t gaz_altitude = float32_t Flag_multisinus = bool_t Flag_multisinus_debut = bool_t @index.register(27) class Zimmu3000(OptionHeader): 'Corresponds to C struct ``navdata_zimmu_3000_t``.' _attrname = 'zimmu_3000' vzimmuLSB = int32_t vzfind = float32_t @index.register(3) class PhysMeasures(OptionHeader): 'Corresponds to C struct ``navdata_phys_measures_t``.' _attrname = 'phys_measures' accs_temp = float32_t gyro_temp = uint16_t phys_accs = float32_t * NB_ACCS phys_gyros = float32_t * NB_GYROS alim3V3 = uint32_t #: 3.3volt alim [LSB] vrefEpson = uint32_t #: ref volt Epson gyro [LSB] vrefIDG = uint32_t #: ref volt IDG gyro [LSB] @index.register(4) class GyrosOffsets(OptionHeader): 'Corresponds to C struct ``navdata_gyros_offsets_t``.' _attrname = 'gyros_offsets' offset_g = float32_t * NB_GYROS @index.register(5) class EulerAngles(OptionHeader): 'Corresponds to C struct ``navdata_euler_angles_t``.' _attrname = 'eular_angles' theta_a = float32_t phi_a = float32_t @index.register(6) class References(OptionHeader): 'Corresponds to C struct ``navdata_references_t``.' _attrname = 'references' ref_theta = int32_t ref_phi = int32_t ref_theta_I = int32_t ref_phi_I = int32_t ref_pitch = int32_t ref_roll = int32_t ref_yaw = int32_t ref_psi = int32_t vx_ref = float32_t vy_ref = float32_t theta_mod = float32_t phi_mod = float32_t k_v_x = float32_t k_v_y = float32_t k_mode = uint32_t ui_time = float32_t ui_theta = float32_t ui_phi = float32_t ui_psi = float32_t ui_psi_accuracy = float32_t ui_seq = int32_t @index.register(7) class Trims(OptionHeader): 'Corresponds to C struct ``navdata_trims_t``.' _attrname = 'trims' angular_rates_trim_r = float32_t euler_angles_trim_theta = float32_t euler_angles_trim_phi = float32_t @index.register(8) class RcReferences(OptionHeader): 'Corresponds to C struct ``navdata_rc_references_t``.' _attrname = 'rc_references' rc_ref_pitch = int32_t rc_ref_roll = int32_t rc_ref_yaw = int32_t rc_ref_gaz = int32_t rc_ref_ag = int32_t @index.register(9) class Pwm(OptionHeader): 'Corresponds to C struct ``navdata_pwm_t``.' _attrname = 'pwm' motor1 = uint8_t motor2 = uint8_t motor3 = uint8_t motor4 = uint8_t sat_motor1 = uint8_t sat_motor2 = uint8_t sat_motor3 = uint8_t sat_motor4 = uint8_t gaz_feed_forward = float32_t gaz_altitude = float32_t altitude_integral = float32_t vz_ref = float32_t u_pitch = int32_t u_roll = int32_t u_yaw = int32_t yaw_u_I = float32_t u_pitch_planif = int32_t u_roll_planif = int32_t u_yaw_planif = int32_t u_gaz_planif = float32_t current_motor1 = uint16_t current_motor2 = uint16_t current_motor3 = uint16_t current_motor4 = uint16_t # WARNING: new navdata (FC 26/07/2011) altitude_prop = float32_t altitude_der = float32_t @index.register(10) class Altitude(OptionHeader): 'Corresponds to C struct ``navdata_altitude_t``.' _attrname = 'altitude' altitude_vision = int32_t altitude_vz = float32_t altitude_ref = int32_t altitude_raw = int32_t obs_accZ = float32_t obs_alt = float32_t obs_x = _vector31_t obs_state = uint32_t est_vb = _vector21_t est_state = uint32_t @index.register(11) class VisionRaw(OptionHeader): 'Corresponds to C struct ``navdata_vision_raw_t``.' _attrname = 'vision_raw' vision_tx_raw = float32_t vision_ty_raw = float32_t vision_tz_raw = float32_t @index.register(13) class Vision(OptionHeader): 'Corresponds to C struct ``navdata_vision_t``.' _attrname = 'vision' vision_state = uint32_t vision_misc = int32_t vision_phi_trim = float32_t vision_phi_ref_prop = float32_t vision_theta_trim = float32_t vision_theta_ref_prop = float32_t<|fim▁hole|> theta_capture = float32_t phi_capture = float32_t psi_capture = float32_t altitude_capture = int32_t time_capture = uint32_t #: time in TSECDEC format (see config.h) body_v = _velocities_t delta_phi = float32_t delta_theta = float32_t delta_psi = float32_t gold_defined = uint32_t gold_reset = uint32_t gold_x = float32_t gold_y = float32_t @index.register(14) class VisionPerf(OptionHeader): 'Corresponds to C struct ``navdata_vision_perf_t``.' _attrname = 'vision_perf' time_szo = float32_t time_corners = float32_t time_compute = float32_t time_tracking = float32_t time_trans = float32_t time_update = float32_t time_custom = float32_t * NAVDATA_MAX_CUSTOM_TIME_SAVE @index.register(15) class TrackersSend(OptionHeader): 'Corresponds to C struct ``navdata_trackers_send_t``.' _attrname = 'trackers_send' locked = int32_t * (DEFAULT_NB_TRACKERS_WIDTH * DEFAULT_NB_TRACKERS_HEIGHT) point = _screen_point_t * ( DEFAULT_NB_TRACKERS_WIDTH * DEFAULT_NB_TRACKERS_HEIGHT ) @index.register(16) class VisionDetect(OptionHeader): 'Corresponds to C struct ``navdata_vision_detect_t``.' # Change the function 'navdata_server_reset_vision_detect()' # if this structure is modified _attrname = 'vision_detect' nb_detected = uint32_t type = uint32_t * NB_NAVDATA_DETECTION_RESULTS xc = uint32_t * NB_NAVDATA_DETECTION_RESULTS yc = uint32_t * NB_NAVDATA_DETECTION_RESULTS width = uint32_t * NB_NAVDATA_DETECTION_RESULTS height = uint32_t * NB_NAVDATA_DETECTION_RESULTS dist = uint32_t * NB_NAVDATA_DETECTION_RESULTS orientation_angle = float32_t * NB_NAVDATA_DETECTION_RESULTS rotation = _matrix33_t * NB_NAVDATA_DETECTION_RESULTS translation = _vector31_t * NB_NAVDATA_DETECTION_RESULTS camera_source = uint32_t * NB_NAVDATA_DETECTION_RESULTS @index.register(12) class VisionOf(OptionHeader): 'Corresponds to C struct ``navdata_vision_of_t``.' _attrname = 'vision_of' of_dx = float32_t * 5 of_dy = float32_t * 5 @index.register(17) class Watchdog(OptionHeader): 'Corresponds to C struct ``navdata_watchdog_t``.' _attrname = 'watchdog' # +4 bytes watchdog = int32_t @index.register(18) class AdcDataFrame(OptionHeader): 'Corresponds to C struct ``navdata_adc_data_frame_t``.' _attrname = 'adc_data_frame' version = uint32_t data_frame = uint8_t * 32 @index.register(19) class VideoStream(OptionHeader): 'Corresponds to C struct ``navdata_video_stream_t``.' _attrname = 'video_stream' quant = uint8_t #: quantizer reference used to encode frame [1:31] frame_size = uint32_t #: frame size (bytes) frame_number = uint32_t #: frame index atcmd_ref_seq = uint32_t #: atmcd ref sequence number #: mean time between two consecutive atcmd_ref (ms) atcmd_mean_ref_gap = uint32_t atcmd_var_ref_gap = float32_t atcmd_ref_quality = uint32_t #: estimator of atcmd link quality # drone2 #: measured out throughput from the video tcp socket out_bitrate = uint32_t #: last frame size generated by the video encoder desired_bitrate = uint32_t # misc temporary data data1 = int32_t data2 = int32_t data3 = int32_t data4 = int32_t data5 = int32_t # queue usage tcp_queue_level = uint32_t fifo_queue_level = uint32_t @index.register(25) class HdvideoStream(OptionHeader): 'Corresponds to C struct ``navdata_hdvideo_stream_t``.' _attrname = 'hdvideo_stream' hdvideo_state = uint32_t storage_fifo_nb_packets = uint32_t storage_fifo_size = uint32_t usbkey_size = uint32_t #: USB key in kbytes - 0 if no key present #: USB key free space in kbytes - 0 if no key present usbkey_freespace = uint32_t #: 'frame_number' PaVE field of the frame starting to be encoded for the #: HD stream frame_number = uint32_t usbkey_remaining_time = uint32_t #: time in seconds @index.register(20) class Games(OptionHeader): 'Corresponds to C struct ``navdata_games_t``.' _attrname = 'games' double_tap_counter = uint32_t finish_line_counter = uint32_t @index.register(26) class Wifi(OptionHeader): 'Corresponds to C struct ``navdata_wifi_t``.' _attrname = 'wifi' link_quality = uint32_t @index.register(0xFFFF) class Cks(OptionHeader): 'Corresponds to C struct ``navdata_cks_t``.' _attrname = 'cks' value = uint32_t #: Value of the checksum<|fim▁end|>
new_raw_picture = int32_t
<|file_name|>test_random_graphs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from nose.tools import * from networkx import * from networkx.generators.random_graphs import * class TestGeneratorsRandom(): def smoke_test_random_graph(self): seed = 42 G=gnp_random_graph(100,0.25,seed) G=binomial_graph(100,0.25,seed) G=erdos_renyi_graph(100,0.25,seed) G=fast_gnp_random_graph(100,0.25,seed) G=gnm_random_graph(100,20,seed) G=dense_gnm_random_graph(100,20,seed) G=watts_strogatz_graph(10,2,0.25,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=connected_watts_strogatz_graph(10,2,0.1,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=watts_strogatz_graph(10,4,0.25,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 20) G=newman_watts_strogatz_graph(10,2,0.0,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=newman_watts_strogatz_graph(10,4,0.25,seed) assert_equal(len(G), 10) assert_true(G.number_of_edges() >= 20)<|fim▁hole|> G=barabasi_albert_graph(100,3,seed) assert_equal(G.number_of_edges(),(97*3)) G=powerlaw_cluster_graph(100,1,1.0,seed) G=powerlaw_cluster_graph(100,3,0.0,seed) assert_equal(G.number_of_edges(),(97*3)) G=duplication_divergence_graph(100,1.0,seed) assert_equal(len(G), 100) assert_raises(networkx.exception.NetworkXError, duplication_divergence_graph, 100, 2) assert_raises(networkx.exception.NetworkXError, duplication_divergence_graph, 100, -1) G=random_regular_graph(10,20,seed) assert_raises(networkx.exception.NetworkXError, random_regular_graph, 3, 21) constructor=[(10,20,0.8),(20,40,0.8)] G=random_shell_graph(constructor,seed) G=nx.random_lobster(10,0.1,0.5,seed) def test_random_zero_regular_graph(self): """Tests that a 0-regular graph has the correct number of nodes and edges. """ G = random_regular_graph(0, 10) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 0) def test_gnp(self): for generator in [gnp_random_graph, binomial_graph, erdos_renyi_graph, fast_gnp_random_graph]: G = generator(10, -1.1) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 0) G = generator(10, 0.1) assert_equal(len(G), 10) G = generator(10, 0.1, seed=42) assert_equal(len(G), 10) G = generator(10, 1.1) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 45) G = generator(10, -1.1, directed=True) assert_true(G.is_directed()) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 0) G = generator(10, 0.1, directed=True) assert_true(G.is_directed()) assert_equal(len(G), 10) G = generator(10, 1.1, directed=True) assert_true(G.is_directed()) assert_equal(len(G), 10) assert_equal(sum(1 for _ in G.edges()), 90) # assert that random graphs generate all edges for p close to 1 edges = 0 runs = 100 for i in range(runs): edges += sum(1 for _ in generator(10, 0.99999, directed=True).edges()) assert_almost_equal(edges/float(runs), 90, delta=runs*2.0/100) def test_gnm(self): G=gnm_random_graph(10,3) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()), 3) G=gnm_random_graph(10,3,seed=42) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()), 3) G=gnm_random_graph(10,100) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()), 45) G=gnm_random_graph(10,100,directed=True) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()),90) G=gnm_random_graph(10,-1.1) assert_equal(len(G),10) assert_equal(sum(1 for _ in G.edges()),0) def test_watts_strogatz_big_k(self): assert_raises(networkx.exception.NetworkXError, watts_strogatz_graph, 10, 10, 0.25) assert_raises(networkx.exception.NetworkXError, newman_watts_strogatz_graph, 10, 10, 0.25) # could create an infinite loop, now doesn't # infinite loop used to occur when a node has degree n-1 and needs to rewire watts_strogatz_graph(10, 9, 0.25, seed=0) newman_watts_strogatz_graph(10, 9, 0.5, seed=0)<|fim▁end|>
G=barabasi_albert_graph(100,1,seed)
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// // imag - the personal information management suite for the commandline // Copyright (C) 2015-2020 Matthias Beyer <mail@beyermatthias.de> and contributors // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; version // 2.1 of the License. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA // #![forbid(unsafe_code)] #![recursion_limit="256"] #![deny( dead_code, non_camel_case_types, non_snake_case, path_statements, trivial_numeric_casts, unstable_features, unused_allocation, unused_import_braces, unused_imports, unused_must_use, unused_mut, unused_qualifications, while_true, )] extern crate filters; extern crate chrono; extern crate toml; extern crate toml_query; #[macro_use] extern crate lazy_static; #[macro_use] extern crate is_match; #[macro_use] extern crate anyhow; #[macro_use] extern crate libimagstore; #[macro_use] extern crate libimagentryutil; extern crate libimagentrydatetime; extern crate libimagentrytag; extern crate libimagerror; mod constants;<|fim▁hole|>pub mod iter; pub mod tag; pub mod store; module_entry_path_mod!("timetrack");<|fim▁end|>
pub mod timetracking;
<|file_name|>alignment.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use Bin; use Buildable; use Container; use Widget; use ffi; use glib; use glib::StaticType; use glib::Value; use glib::object::Downcast; use glib::object::IsA; use glib::signal::SignalHandlerId; use glib::signal::connect; use glib::translate::*; use glib_ffi; use gobject_ffi; use std::boxed::Box as Box_; use std::mem; use std::mem::transmute; use std::ptr; glib_wrapper! { pub struct Alignment(Object<ffi::GtkAlignment, ffi::GtkAlignmentClass>): Bin, Container, Widget, Buildable; match fn { get_type => || ffi::gtk_alignment_get_type(), } } impl Alignment { #[cfg_attr(feature = "v3_14", deprecated)] pub fn new(xalign: f32, yalign: f32, xscale: f32, yscale: f32) -> Alignment { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_alignment_new(xalign, yalign, xscale, yscale)).downcast_unchecked() } } } pub trait AlignmentExt { #[cfg_attr(feature = "v3_14", deprecated)] fn get_padding(&self) -> (u32, u32, u32, u32); #[cfg_attr(feature = "v3_14", deprecated)] fn set(&self, xalign: f32, yalign: f32, xscale: f32, yscale: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn set_padding(&self, padding_top: u32, padding_bottom: u32, padding_left: u32, padding_right: u32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_bottom_padding(&self) -> u32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_bottom_padding(&self, bottom_padding: u32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_left_padding(&self) -> u32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_left_padding(&self, left_padding: u32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_right_padding(&self) -> u32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_right_padding(&self, right_padding: u32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_top_padding(&self) -> u32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_top_padding(&self, top_padding: u32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xalign(&self, xalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_xscale(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_xscale(&self, xscale: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_yalign(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_yalign(&self, yalign: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn get_property_yscale(&self) -> f32; #[cfg_attr(feature = "v3_14", deprecated)] fn set_property_yscale(&self, yscale: f32); #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_bottom_padding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_left_padding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_right_padding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_top_padding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; <|fim▁hole|> fn connect_property_xscale_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_yscale_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Alignment> + IsA<glib::object::Object>> AlignmentExt for O { fn get_padding(&self) -> (u32, u32, u32, u32) { unsafe { let mut padding_top = mem::uninitialized(); let mut padding_bottom = mem::uninitialized(); let mut padding_left = mem::uninitialized(); let mut padding_right = mem::uninitialized(); ffi::gtk_alignment_get_padding(self.to_glib_none().0, &mut padding_top, &mut padding_bottom, &mut padding_left, &mut padding_right); (padding_top, padding_bottom, padding_left, padding_right) } } fn set(&self, xalign: f32, yalign: f32, xscale: f32, yscale: f32) { unsafe { ffi::gtk_alignment_set(self.to_glib_none().0, xalign, yalign, xscale, yscale); } } fn set_padding(&self, padding_top: u32, padding_bottom: u32, padding_left: u32, padding_right: u32) { unsafe { ffi::gtk_alignment_set_padding(self.to_glib_none().0, padding_top, padding_bottom, padding_left, padding_right); } } fn get_property_bottom_padding(&self) -> u32 { unsafe { let mut value = Value::from_type(<u32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "bottom-padding".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_bottom_padding(&self, bottom_padding: u32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "bottom-padding".to_glib_none().0, Value::from(&bottom_padding).to_glib_none().0); } } fn get_property_left_padding(&self) -> u32 { unsafe { let mut value = Value::from_type(<u32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "left-padding".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_left_padding(&self, left_padding: u32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "left-padding".to_glib_none().0, Value::from(&left_padding).to_glib_none().0); } } fn get_property_right_padding(&self) -> u32 { unsafe { let mut value = Value::from_type(<u32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "right-padding".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_right_padding(&self, right_padding: u32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "right-padding".to_glib_none().0, Value::from(&right_padding).to_glib_none().0); } } fn get_property_top_padding(&self) -> u32 { unsafe { let mut value = Value::from_type(<u32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "top-padding".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_top_padding(&self, top_padding: u32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "top-padding".to_glib_none().0, Value::from(&top_padding).to_glib_none().0); } } fn get_property_xalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xalign(&self, xalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xalign".to_glib_none().0, Value::from(&xalign).to_glib_none().0); } } fn get_property_xscale(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "xscale".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_xscale(&self, xscale: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "xscale".to_glib_none().0, Value::from(&xscale).to_glib_none().0); } } fn get_property_yalign(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "yalign".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_yalign(&self, yalign: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "yalign".to_glib_none().0, Value::from(&yalign).to_glib_none().0); } } fn get_property_yscale(&self) -> f32 { unsafe { let mut value = Value::from_type(<f32 as StaticType>::static_type()); gobject_ffi::g_object_get_property(self.to_glib_none().0, "yscale".to_glib_none().0, value.to_glib_none_mut().0); value.get().unwrap() } } fn set_property_yscale(&self, yscale: f32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "yscale".to_glib_none().0, Value::from(&yscale).to_glib_none().0); } } fn connect_property_bottom_padding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::bottom-padding", transmute(notify_bottom_padding_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_left_padding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::left-padding", transmute(notify_left_padding_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_right_padding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::right-padding", transmute(notify_right_padding_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_top_padding_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::top-padding", transmute(notify_top_padding_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xalign", transmute(notify_xalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_xscale_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::xscale", transmute(notify_xscale_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_yalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::yalign", transmute(notify_yalign_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } fn connect_property_yscale_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe { let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "notify::yscale", transmute(notify_yscale_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } } unsafe extern "C" fn notify_bottom_padding_trampoline<P>(this: *mut ffi::GtkAlignment, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Alignment> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Alignment::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_left_padding_trampoline<P>(this: *mut ffi::GtkAlignment, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Alignment> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Alignment::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_right_padding_trampoline<P>(this: *mut ffi::GtkAlignment, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Alignment> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Alignment::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_top_padding_trampoline<P>(this: *mut ffi::GtkAlignment, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Alignment> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Alignment::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_xalign_trampoline<P>(this: *mut ffi::GtkAlignment, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Alignment> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Alignment::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_xscale_trampoline<P>(this: *mut ffi::GtkAlignment, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Alignment> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Alignment::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_yalign_trampoline<P>(this: *mut ffi::GtkAlignment, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Alignment> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Alignment::from_glib_borrow(this).downcast_unchecked()) } unsafe extern "C" fn notify_yscale_trampoline<P>(this: *mut ffi::GtkAlignment, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer) where P: IsA<Alignment> { let f: &&(Fn(&P) + 'static) = transmute(f); f(&Alignment::from_glib_borrow(this).downcast_unchecked()) }<|fim▁end|>
#[cfg_attr(feature = "v3_14", deprecated)] fn connect_property_xalign_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[cfg_attr(feature = "v3_14", deprecated)]
<|file_name|>readers.py<|end_file_name|><|fim▁begin|>import tensorflow as tf import tensorflow.contrib.slim as slim class BaseReader(object): def read(self): raise NotImplementedError() class ImageReader(BaseReader): def __init__(self): self.width = None self.height = None def get_image_size(self): return self.width, self.height<|fim▁hole|> self.height = height def read(self, filename, num_classes, batch_size=256, feature_map=None): assert(self.width is not None and self.height is not None) assert(self.width > 0 and self.height > 0) reader = tf.TFRecordReader() tf.add_to_collection(filename, batch_size) # is this really needed? key, value = reader.read_up_to(filename, batch_size) if feature_map is None: feature_map = { 'label': tf.FixedLenFeature([], tf.int64), 'image_raw': tf.FixedLenFeature([self.width * self.height], tf.int64), } features = tf.parse_example(value, features=feature_map) images = tf.cast(features["image_raw"], tf.float32) * (1. / 255) if feature_map.get('label') is not None: labels = tf.cast(features['label'], tf.int32) one_hot = tf.map_fn(lambda x: tf.cast(slim.one_hot_encoding(x, num_classes), tf.int32), labels) one_hot = tf.reshape(one_hot, [-1, num_classes]) return one_hot, images empty_labels = tf.reduce_sum(tf.zeros_like(images), axis=1) return empty_labels, images<|fim▁end|>
def set_image_size(self, width, height): self.width = width
<|file_name|>extensions.py<|end_file_name|><|fim▁begin|>""" This module hosts all the extension functions and classes created via SDK. The function :py:func:`ext_import` is used to import a toolkit module (shared library) into the workspace. The shared library can be directly imported from a remote source, e.g. http, s3, or hdfs. The imported module will be under namespace `graphlab.extensions`. Alternatively, if the shared library is local, it can be directly imported using the python import statement. Note that graphlab must be imported first. """ ''' Copyright (C) 2015 Dato, Inc. All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the DATO-PYTHON-LICENSE file for details. ''' # This is a fake meta namespace which contains toolkit functions and toolkit # models implemented as extensions in C++ import graphlab as _gl import types as _types from graphlab.util import _make_internal_url from graphlab.cython.cy_sframe import UnitySFrameProxy as _UnitySFrameProxy from graphlab.cython.cy_sarray import UnitySArrayProxy as _UnitySArrayProxy from graphlab.cython.cy_graph import UnityGraphProxy as _UnityGraphProxy from graphlab.cython.cy_model import UnityModel as _UnityModel from graphlab.toolkits._main import ToolkitError as _ToolkitError from graphlab.cython.context import debug_trace as cython_context # Now. a bit of magic hackery is going to happen to this module. # This module is going to be first imported as graphlab.extensions # After which, inside graphlab/__init__.py, sys.modules['graphlab.extensions'] # will be modified to become a class called _extension_wrapper which redirects # getattr calls into this module. # # The reason for this wrapping is so that uses of functions in gl.extensions # (for instance) # # import graphlab as gl # gl.extensions._demo_addone(5) # # This will normally not work because gl.extensions._publish() was not called # hence _demo_addone will not be found. # # By wrapping the extensions module in another class, we can redefine # __getattr__ on that class and have it force gl.extensions._publish() when # an attribute name is not found. # # However, there are some odd sideeffects due to the use of the metapath # system as well. the metapath importer (this module) is going to look in # gl.extensions, but gl.extensions is going poke this module hence resulting # in an interesting recursive relationship. # # Also, we need gl.extensions.__dict__ to have all the published information # so that tab completion in ipython works. # # The result is that we need gl.extensions._publish() to publish into both # places. # - the current module # - the gl.extensions wrapper # # Then the metapath importer (this module) will just need to look in this # module, breaking the recursive relation. And the gl.extensions wrapper will # have all the stuff in it for tab completion by IPython. import sys as _sys _thismodule = _sys.modules[__name__] class_uid_to_class = {} def _wrap_function_return(val): """ Recursively walks each thing in val, opening lists and dictionaries, converting all occurances of UnityGraphProxy to an SGraph, UnitySFrameProxy to SFrame, and UnitySArrayProxy to SArray. """ if type(val) == _UnityGraphProxy: return _gl.SGraph(_proxy = val) elif type(val) == _UnitySFrameProxy: return _gl.SFrame(_proxy = val) elif type(val) == _UnitySArrayProxy: return _gl.SArray(_proxy = val) elif type(val) == _UnityModel: # we need to cast it up to the appropriate type try: if '__uid__' in val.list_fields(): uid = val.get('__uid__') if uid in class_uid_to_class: return class_uid_to_class[uid](_proxy=val) except: pass return val elif type(val) == list: return [_wrap_function_return(i) for i in val] elif type(val) == dict: return {i:_wrap_function_return(val[i]) for i in val} else: return val def _setattr_wrapper(mod, key, value): """ A setattr wrapper call used only by _publish(). This ensures that anything published into this module is also published into gl.extensions """ setattr(mod, key, value) if mod == _thismodule: setattr(_sys.modules[__name__], key, value) def _translate_function_arguments(argument): import inspect if inspect.isfunction(argument): try: return _build_native_function_call(argument) except: raise TypeError("Only native functions, or simple lambdas of native functions (with constant capture values) can be passed to an extension function.") elif type(argument) is list: return [_translate_function_arguments(i) for i in argument] elif type(argument) is tuple: return [_translate_function_arguments(i) for i in argument] elif type(argument) is dict: return {i:_translate_function_arguments(v) for (i, v) in argument.iteritems()} elif hasattr(argument, '_tkclass') and hasattr(argument, '__glmeta__'): return argument._tkclass else: return argument def _run_toolkit_function(fnname, arguments, args, kwargs): """ Dispatches arguments to a toolkit function. Parameters ---------- fnname : string The toolkit function to run arguments : list[string] The list of all the arguments the function takes. args : list The arguments that were passed kwargs : dictionary The keyword arguments that were passed """ # scan for all the arguments in args num_args_got = len(args) + len(kwargs) num_args_required = len(arguments) if num_args_got != num_args_required: raise TypeError("Expecting " + str(num_args_required) + " arguments, got " + str(num_args_got)) ## fill the dict first with the regular args argument_dict = {} for i in range(len(args)): argument_dict[arguments[i]] = args[i] # now fill with the kwargs. for k in kwargs.keys(): if k in argument_dict: raise TypeError("Got multiple values for keyword argument '" + k + "'") argument_dict[k] = kwargs[k] argument_dict = _translate_function_arguments(argument_dict) # unwrap it with cython_context(): ret = _gl.connect.main.get_unity().run_toolkit(fnname, argument_dict) # handle errors if ret[0] != True: if len(ret[1]) > 0: raise _ToolkitError(ret[1]) else: raise _ToolkitError("Toolkit failed with unknown error") ret = _wrap_function_return(ret[2]) if type(ret) == dict and 'return_value' in ret: return ret['return_value'] else: return ret def _make_injected_function(fn, arguments): return lambda *args, **kwargs: _run_toolkit_function(fn, arguments, args, kwargs) def _class_instance_from_name(class_name, *arg, **kwarg): """ class_name is of the form modA.modB.modC.class module_path splits on "." and the import_path is then ['modA','modB','modC'] the __import__ call is really annoying but essentially it reads like: import class from modA.modB.modC - Then the module variable points to modC - Then you get the class from the module. """ # we first look in gl.extensions for the class name module_path = class_name.split('.') import_path = module_path[0:-1] module = __import__('.'.join(import_path), fromlist=[module_path[-1]]) class_ = getattr(module, module_path[-1]) instance = class_(*arg, **kwarg) return instance def _create_class_instance(class_name, _proxy): """ Look for the class in graphlab.extensions in case it has already been imported (perhaps as a builtin extensions hard compiled into unity_server). """ try: return _class_instance_from_name("graphlab.extensions." + class_name, _proxy=_proxy) except: pass return _class_instance_from_name(class_name, _proxy=_proxy) class _ToolkitClass: """ The actual class class that is rewritten to become each user defined toolkit class. Certain care with attributes (__getattr__ / __setattr__) has to be done to inject functions, and attributes into their appropriate places. """ _functions = {} # The functions in the class _get_properties = [] # The getable properties in the class _set_properties = [] # The setable properties in the class _tkclass = None def __init__(self, *args, **kwargs): tkclass_name = getattr(self.__init__, "tkclass_name") _proxy = None if "_proxy" in kwargs: _proxy = kwargs['_proxy'] del kwargs['_proxy'] if _proxy: self.__dict__['_tkclass'] = _proxy elif tkclass_name: self.__dict__['_tkclass'] = _gl.connect.main.get_unity().create_toolkit_class(tkclass_name) try: # fill the functions and properties self.__dict__['_functions'] = self._tkclass.get('list_functions') self.__dict__['_get_properties'] = self._tkclass.get('list_get_properties') self.__dict__['_set_properties'] = self._tkclass.get('list_set_properties') # rewrite the doc string for this class try: self.__dict__['__doc__'] = self._tkclass.get('get_docstring', {'__symbol__':'__doc__'}) self.__class__.__dict__['__doc__'] = self.__dict__['__doc__'] except: pass except: raise _ToolkitError("Cannot create Toolkit Class for this class. " "This class was not created with the new toolkit class system.") # for compatibility with older classes / models self.__dict__['__proxy__'] = self.__dict__['_tkclass'] if '__init__' in self.__dict__['_functions']: self.__run_class_function("__init__", args, kwargs) elif len(args) != 0 or len(kwargs) != 0: raise TypeError("This constructor takes no arguments") def _get_wrapper(self): gl_meta_value = self.__glmeta__['extension_name'] return lambda _proxy: _create_class_instance(gl_meta_value, _proxy) def __dir__(self): return self._functions.keys() + self._get_properties + self._set_properties def __run_class_function(self, fnname, args, kwargs): # scan for all the arguments in args arguments = self._functions[fnname] num_args_got = len(args) + len(kwargs) num_args_required = len(arguments) if num_args_got != num_args_required: raise TypeError("Expecting " + str(num_args_required) + " arguments, got " + str(num_args_got)) ## fill the dict first with the regular args argument_dict = {} for i in range(len(args)): argument_dict[arguments[i]] = args[i] # now fill with the kwargs. for k in kwargs.keys(): if k in argument_dict: raise TypeError("Got multiple values for keyword argument '" + k + "'") argument_dict[k] = kwargs[k] # unwrap it argument_dict['__function_name__'] = fnname ret = self._tkclass.get('call_function', argument_dict) ret = _wrap_function_return(ret) return ret def __getattr__(self, name): if name == '__proxy__': return self.__dict__['__proxy__'] elif name in self._get_properties: # is it an attribute? arguments = {'__property_name__':name} return _wrap_function_return(self._tkclass.get('get_property', arguments)) elif name in self._functions: # is it a function? ret = lambda *args, **kwargs: self.__run_class_function(name, args, kwargs) ret.__doc__ = "Name: " + name + "\nParameters: " + str(self._functions[name]) + "\n" try: ret.__doc__ += self._tkclass.get('get_docstring', {'__symbol__':name}) ret.__doc__ += '\n' except: pass return ret else: raise AttributeError("no attribute " + name) def __setattr__(self, name, value): if name == '__proxy__': self.__dict__['__proxy__'] = value elif name in self._set_properties: # is it a setable property? arguments = {'__property_name__':name, 'value':value} return _wrap_function_return(self._tkclass.get('set_property', arguments)) else: raise AttributeError("no attribute " + name) def _list_functions(): """ Lists all the functions registered in unity_server. """ unity = _gl.connect.main.get_unity() return unity.list_toolkit_functions() def _publish(): import sys import copy """ Publishes all functions and classes registered in unity_server. The functions and classes will appear in the module graphlab.extensions """ unity = _gl.connect.main.get_unity() fnlist = unity.list_toolkit_functions() # Loop through all the functions and inject it into # graphlab.extensions.[blah] # Note that [blah] may be somemodule.somefunction # and so the injection has to be # graphlab.extensions.somemodule.somefunction for fn in fnlist: props = unity.describe_toolkit_function(fn) # quit if there is nothing we can process if 'arguments' not in props: continue arguments = props['arguments'] newfunc = _make_injected_function(fn, arguments) newfunc.__doc__ = "Name: " + fn + "\nParameters: " + str(arguments) + "\n" if 'documentation' in props: newfunc.__doc__ += props['documentation'] + "\n" newfunc.__dict__['__glmeta__'] = {'extension_name':fn} modpath = fn.split('.') # walk the module tree mod = _thismodule for path in modpath[:-1]: try: getattr(mod, path) except: _setattr_wrapper(mod, path, _types.ModuleType(name=path)) mod = getattr(mod, path) _setattr_wrapper(mod, modpath[-1], newfunc) # Repeat for classes tkclasslist = unity.list_toolkit_classes() for tkclass in tkclasslist: pathpos = tkclass.split('.') m = unity.describe_toolkit_class(tkclass) # of v2 type if not ('functions' in m and 'get_properties' in m and 'set_properties' in m and 'uid' in m): continue # create a new class new_class = copy.deepcopy(_ToolkitClass.__dict__) # rewrite the init method to add the toolkit class name so it will # default construct correctly new_class['__init__'] = _types.FunctionType(new_class['__init__'].func_code, new_class['__init__'].func_globals, name='__init__', argdefs=(), closure=()) new_class['__init__'].tkclass_name = tkclass newclass = _types.ClassType(tkclass, (object,), new_class) setattr(newclass, '__glmeta__', {'extension_name':tkclass}) class_uid_to_class[m['uid']] = newclass modpath = tkclass.split('.') # walk the module tree mod = _thismodule for path in modpath[:-1]: try: getattr(mod, path) except: _setattr_wrapper(mod, path, _types.ModuleType(name=path)) mod = getattr(mod, path) _setattr_wrapper(mod, modpath[-1], newclass) class _ExtMetaPath(object): """ This is a magic metapath searcher. To understand how this works, See the PEP 302 document. Essentially this class is inserted into the sys.meta_path list. This class must implement find_module() and load_module(). After which, this class is called first when any particular module import was requested, allowing this to essentially 'override' the default import behaviors. """ def find_module(self, fullname, submodule_path=None): """ We have to see if fullname refers to a module we can import. Some care is needed here because: import xxx # tries to load xxx.so from any of the python import paths import aaa.bbb.xxx # tries to load aaa/bbb/xxx.so from any of the python import paths """ # first see if we have this particular so has been loaded by # graphlab's extension library before ret = self.try_find_module(fullname, submodule_path) if ret is not None: return ret # nope. has not been loaded before # lets try to find a ".so" or a ".dylib" if any of the python # locations import sys import os # This drops the last "." So if I am importing aaa.bbb.xxx # module_subpath is aaa.bbb module_subpath = ".".join(fullname.split('.')[:-1]) for path in sys.path: # joins the path to aaa/bbb/xxx pathname = os.path.join(path, os.sep.join(fullname.split('.'))) # try to laod the ".so" extension try: if os.path.exists(pathname + '.so'): ext_import(pathname + '.so', module_subpath) break except: pass # try to laod the ".dylib" extension try: if os.path.exists(pathname + '.dylib'): ext_import(pathname + '.dylib', module_subpath) break except: pass ret = self.try_find_module(fullname, submodule_path) if ret is not None: return ret def try_find_module(self, fullname, submodule_path=None): # check if the so has been loaded before import sys # try to find the module inside of gl.extensions # Essentially: if fullname == aaa.bbb.xxx # Then we try to see if we have loaded gl.extensions.aaa.bbb.xxx mod = _thismodule modpath = fullname.split('.') # walk the module tree mod = _thismodule for path in modpath: try: mod = getattr(mod, path) except: return None return self def load_module(self, fullname): import sys # we may have already been loaded if fullname in sys.modules: return sys.modules[fullname] # try to find the module inside of gl.extensions # Essentially: if fullname == aaa.bbb.xxx # Then we try to look for gl.extensions.aaa.bbb.xxx mod = _thismodule modpath = fullname.split('.') for path in modpath: mod = getattr(mod, path) # Inject the module into aaa.bbb.xxx mod.__loader__ = self mod.__package__ = fullname mod.__name__ = fullname sys.modules[fullname] = mod return mod _ext_meta_path_singleton = None def _add_meta_path(): """ called on unity_server import to insert the meta path loader. """ import sys global _ext_meta_path_singleton if _ext_meta_path_singleton == None: _ext_meta_path_singleton = _ExtMetaPath() sys.meta_path += [_ext_meta_path_singleton] def ext_import(soname, module_subpath=""): """ Loads a graphlab toolkit module (a shared library) into the gl.extensions namespace. Toolkit module created via SDK can either be directly imported, e.g. ``import example`` or via this function, e.g. ``graphlab.ext_import("example.so")``. Use ``ext_import`` when you need more namespace control, or when the shared library is not local, e.g. in http, s3 or hdfs. Parameters ---------- soname : string The filename of the shared library to load. This can be a URL, or a HDFS location. For instance if soname is somewhere/outthere/toolkit.so The functions in toolkit.so will appear in gl.extensions.toolkit.* module_subpath : string, optional Any additional module paths to prepend to the toolkit module after it is imported. For instance if soname is somewhere/outthere/toolkit.so, by default the functions in toolkit.so will appear in gl.extensions.toolkit.*. However, if I module_subpath="somewhere.outthere", the functions in toolkit.so will appear in gl.extensions.somewhere.outthere.toolkit.* Returns ------- out : a list of functions and classes loaded. Examples -------- For instance, given a module which implements the function "square_root", .. code-block:: c++ #include <cmath> #include <graphlab/sdk/toolkit_function_macros.hpp> double square_root(double a) { return sqrt(a); } BEGIN_FUNCTION_REGISTRATION REGISTER_FUNCTION(square_root, "a"); END_FUNCTION_REGISTRATION compiled into example.so >>> graphlab.ext_import('example1.so') ['example1.square_root'] >>> graphlab.extensions.example1.square_root(9) 3.0 We can customize the import location with module_subpath which can be used to avoid namespace conflicts when you have multiple toolkits with the same filename. >>> graphlab.ext_import('example1.so', 'math') ['math.example1.square_root'] >>> graphlab.extensions.math.example1.square_root(9) 3.0 The module can also be imported directly, but graphlab *must* be imported first. graphlab will intercept the module loading process to load the toolkit. >>> import graphlab >>> import example1 #searches for example1.so in all the python paths >>> example1.square_root(9) 3.0 """ unity = _gl.connect.main.get_unity() import os if os.path.exists(soname): soname = os.path.abspath(soname) else: soname = _make_internal_url(soname) ret = unity.load_toolkit(soname, module_subpath) if len(ret) > 0: raise RuntimeError(ret) _publish() # push the functions into the corresponding module namespace filename = os.path.basename(soname) modulename = filename.split('.')[0] return unity.list_toolkit_functions_in_dynamic_module(soname) + unity.list_toolkit_classes_in_dynamic_module(soname) def _get_toolkit_function_name_from_function(fn): """ If fn is a toolkit function either imported by graphlab.extensions.ext_import or the magic import system, we return the name of toolkit function. Otherwise we return an empty string. """ try: if '__glmeta__' in fn.__dict__: return fn.__dict__['__glmeta__']['extension_name'] else: return "" except: return "" def _get_argument_list_from_toolkit_function_name(fn): """ Given a toolkit function name, return the argument list """ unity = _gl.connect.main.get_unity() fnprops = unity.describe_toolkit_function(fn) argnames = fnprops['arguments'] return argnames class _Closure: """ Defines a closure class describing a lambda closure. Contains 2 fields: native_fn_name: The toolkit native function name arguments: An array of the same length as the toolkit native function. Each array element is an array of 2 elements [is_capture, value] If is_capture == 1: value contains the captured value If is_capture == 0: value contains a number denoting the lambda argument position. Example: lambda x, y: fn(10, x, x, y) Then arguments will be [1, 10], --> is captured value. has value 10 [0, 0], --> is not captured value. is argument 0 of the lambda. [0, 0], --> is not captured value. is argument 0 of the lambda. [0, 1] --> is not captured value. is argument 1 of the lambda. """ def __init__(self, native_fn_name, arguments): self.native_fn_name = native_fn_name self.arguments = arguments def _descend_namespace(caller_globals, name): """ Given a globals dictionary, and a name of the form "a.b.c.d", recursively walk the globals expanding caller_globals['a']['b']['c']['d'] returning the result. Raises an exception (IndexError) on failure. """ names = name.split('.') cur = caller_globals for i in names: if type(cur) is dict: cur = cur[i] else: cur = getattr(cur, i) return cur def _build_native_function_call(fn): """ If fn can be interpreted and handled as a native function: i.e. fn is one of the extensions, or fn is a simple lambda closure using one of the extensions. fn = gl.extensions.add fn = lambda x: gl.extensions.add(5) Then, this returns a closure object, which describes the function call which can then be passed to C++. Returns a _Closure object on success, raises an exception on failure. """ # See if fn is the native function itself native_function_name = _get_toolkit_function_name_from_function(fn) if native_function_name != "": # yup! # generate an "identity" argument list argnames = _get_argument_list_from_toolkit_function_name(native_function_name)<|fim▁hole|> return _Closure(native_function_name, arglist) # ok. its not a native function from graphlab_util.lambda_closure_capture import translate from graphlab_util.lambda_closure_capture import Parameter # Lets see if it is a simple lambda capture = translate(fn) # ok. build up the closure arguments # Try to pick up the lambda function = _descend_namespace(capture.caller_globals, capture.closure_fn_name) native_function_name = _get_toolkit_function_name_from_function(function) if native_function_name == "": raise RuntimeError("Lambda does not contain a native function") argnames = _get_argument_list_from_toolkit_function_name(native_function_name) # ok. build up the argument list. this is mildly annoying due to the mix of # positional and named arguments # make an argument list with a placeholder for everything first arglist = [[-1, i] for i in argnames] # loop through the positional arguments for i in range(len(capture.positional_args)): arg = capture.positional_args[i] if type(arg) is Parameter: # This is a lambda argument # arg.name is the actual string of the argument # here we need the index arglist[i] = [0, capture.input_arg_names.index(arg.name)] else: # this is a captured value arglist[i] = [1, arg] # now. the named arguments are somewhat annoying for i in capture.named_args: arg = capture.named_args[i] if type(arg) is Parameter: # This is a lambda argument # arg.name is the actual string of the argument # here we need the index arglist[argnames.index(i)] = [0, capture.input_arg_names.index(arg.name)] else: # this is a captured value arglist[argnames.index(i)] = [1, arg] # done. Make sure all arguments are filled for i in arglist: if i[0] == -1: raise RuntimeError("Incomplete function specification") # attempt to recursively break down any other functions import inspect for i in range(len(arglist)): if arglist[i][0] == 1 and inspect.isfunction(arglist[i][1]): try: arglist[i][1] = _build_native_function_call(arglist[i][1]) except: pass return _Closure(native_function_name, arglist)<|fim▁end|>
arglist = [[0, i] for i in range(len(argnames))]
<|file_name|>menu.ts<|end_file_name|><|fim▁begin|>import * as h from 'mithril/hyperscript' import { select } from 'd3-selection' import { scaleLinear } from 'd3-scale' import { area as d3Area } from 'd3-shape' import { axisLeft } from 'd3-axis' import i18n from '../../i18n' import router from '../../router' import { UserData as PuzzleUserData } from '../../lichess/interfaces/training' import loginModal from '../loginModal' import popupWidget from '../shared/popup' import * as helper from '../helper' import TrainingCtrl from './TrainingCtrl' export interface IMenuCtrl { open: () => void close: () => void isOpen: () => boolean root: TrainingCtrl } export default { controller(root: TrainingCtrl): IMenuCtrl { let isOpen = false function open() { router.backbutton.stack.push(close) isOpen = true } function close(fromBB?: string) { if (fromBB !== 'backbutton' && isOpen) router.backbutton.stack.pop()<|fim▁hole|> isOpen = false } return { open, close, isOpen: () => isOpen, root } }, view(ctrl: IMenuCtrl) { return popupWidget( 'trainingMenu', undefined, () => renderTrainingMenu(ctrl.root), ctrl.isOpen(), ctrl.close ) } } function renderTrainingMenu(ctrl: TrainingCtrl) { if (ctrl.data && ctrl.data.user) { return renderUserInfos(ctrl.data.user) } else { return renderSigninBox() } } function renderSigninBox() { return h('div.trainingMenuContent', [ h('p', i18n('toTrackYourProgress')), h('p', h('button', { oncreate: helper.ontap(loginModal.open) }, [h('span.fa.fa-user'), i18n('signIn')]) ), h('p', i18n('trainingSignupExplanation')) ]) } function renderUserInfos(user: PuzzleUserData) { const { vw } = helper.viewportDim() let width: number // see overlay-popup.styl for popup width if (vw >= 900) width = vw * 0.4 else if (vw >= 800) width = vw * 0.45 else if (vw >= 700) width = vw * 0.5 else if (vw >= 600) width = vw * 0.55 else if (vw >= 500) width = vw * 0.6 else width = vw * 0.85 const height = 200 return [ h('p.trainingRatingHeader', h.trust(i18n('yourPuzzleRatingX', `<strong>${user.rating}</strong>`))), user.recent ? h('svg#training-graph', { width, height, oncreate() { drawChart(user) } }) : null, renderRecent(user) ] } function onRecentTap(e: TouchEvent) { const button = helper.getButton(e) const id = button && (button.dataset as DOMStringMap).id if (id) router.set(`/training/${id}`, true) } function renderRecent(user: PuzzleUserData) { return h('div.puzzle-recents', { oncreate: helper.ontapY(onRecentTap, undefined, helper.getButton) }, user.recent.map(([id, diff]) => h('button', { 'data-id': id, className: diff > 0 ? 'up' : 'down' }, (diff > 0 ? '+' : '') + diff)) ) } function drawChart(user: PuzzleUserData) { const history = Array.from(user.recent.map(x => x[2])) history.push(user.rating) const data = history.map((x, i) => [i + 1, x]) const graph = select('#training-graph') const margin = {top: 5, right: 20, bottom: 5, left: 35} const width = +graph.attr('width') - margin.left - margin.right const height = +graph.attr('height') - margin.top - margin.bottom const g = graph.append('g').attr('transform', 'translate(' + margin.left + ',' + margin.top + ')') const xvalues = data.map(d => d[0]) const scaleX = scaleLinear() .domain([Math.min.apply(null, xvalues), Math.max.apply(null, xvalues)]) .rangeRound([0, width]) const yvalues = data.map(d => d[1]) const scaleY = scaleLinear() .domain([Math.min.apply(null, yvalues) - 10, Math.max.apply(null, yvalues) + 10]) .rangeRound([height, 0]) const area = d3Area() .x(d => scaleX(d[0])) .y0(height) .y1(d => scaleY(d[1])) const line = d3Area() .x(d => scaleX(d[0])) .y(d => scaleY(d[1])) const yAxis = axisLeft(scaleY) .tickFormat(d => String(d)) g.datum(data) g.append('g') .call(yAxis) .append('text') .attr('class', 'legend') .attr('transform', 'rotate(-90)') .attr('y', 6) .attr('dy', '0.71em') .attr('text-anchor', 'end') .text(i18n('rating')) g.append('path') .attr('class', 'path') .attr('fill', 'steelblue') .attr('stroke', 'steelblue') .attr('stroke-linejoin', 'round') .attr('stroke-linecap', 'round') .attr('stroke-width', 0) .attr('d', area) g.append('path') .attr('class', 'line') .attr('d', line) }<|fim▁end|>
<|file_name|>application.js<|end_file_name|><|fim▁begin|>/** * This file is part of taolin project (http://taolin.fbk.eu) * Copyright (C) 2008, 2009 FBK Foundation, (http://www.fbk.eu) * Authors: SoNet Group (see AUTHORS.txt) * * Taolin is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation version 3 of the License. * * Taolin is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Taolin. If not, see <http://www.gnu.org/licenses/>. * */ /* Retrieves portal configuration as: * - img path * - contact email * - jabber server and domain */ Ext.onReady(function(){ document.getElementById('loading-msg').innerHTML = 'Loading Interface...'; setPortalConfiguration(application_init); }); Ext.BLANK_IMAGE_URL = 'extjs/resources/images/default/s.gif'; // nsb stands for NOT SUPPORTED BROWSER nsb = (Ext.isIE6 || Ext.isIE7); /* * Themes for Taolin gui */ themes = [ ['tp', 'Tp (default)'], //['access', 'Access'], //['aero', 'Aero'], ['blue', 'Blue'], ['blueen', 'Blueen'], ['gray', 'Gray'], //['galdaka', 'Galdaka'], ['indigo', 'Indigo'], ['midnight', 'Midnight'], ['purple', 'Purple'], ['silverCherry', 'Silver Cherry'], ['slate', 'Slate'], ['slickness', 'Slickness'], ['human', 'Ubuntu'], ['vista', 'Vista'] ]; //Ext.onReady( function application_init(){ if(typeof user.theme == 'string') changeExtTheme(user.theme); Ext.QuickTips.init(); /* * qtip intercepts tooltip */ var qtip = Ext.QuickTips.getQuickTip(); qtip.interceptTitles = true; eventManager = new Ext.ux.fbk.sonet.EventManager({ name: "taolin-event-manager" ,events: { addcomment: true ,removecomment: true ,newtimelineevent: true ,userphotochange: true ,userprofilechange: true } ,listeners:{ addcomment: function(){ this.fireEvent('newtimelineevent'); } ,removecomment: function(){ this.fireEvent('newtimelineevent'); } ,userphotochange: function(){ this.fireEvent('newtimelineevent'); } ,userprofilechange: function(){ Ext.getCmp('user_edit_profile').form.load(); } } }); config.num_columns = user.number_of_columns; var columns = new Array(), width = 1/config.num_columns; for(var i=0; i< config.num_columns; i++){ columns.push({ columnWidth:width ,style:'padding:20px 10px 20px 10px' }); } // preparing text for Did you know messages var aDyk = ['Did you know that you can <span class="right-element" style="float:none;position:relative;padding:0;"><span class="a add_widgets" onclick="openAddWidgetsModalWindow()"><b>Add widgets</b></span></span>? <span class="right-element" style="float:none;position:relative;padding:0;"><span class="a add_widgets" onclick="openAddWidgetsModalWindow()"><b>Add widgets</b></a></span>.', 'Did you know that you can <span class="a" onclick="expandUserEditProfilePanel()">edit your profile</span>? <span class="a" onclick="expandUserEditProfilePanel()">Edit your profile</span>.', 'Did you know that you can <span class="a" onclick="expandSettingsPanel()">customize your widgets\' theme</span>? <span class="a" onclick="expandSettingsPanel()">Edit your settings</span>.', 'Did you know that you can <span class="a" onclick="expandSettingsPanel()">change the number of columns containing your widgets</span>? <span class="a" onclick="expandSettingsPanel()">Edit your settings</span>.', 'Did you know that you can <span class="a" onclick="expandSettingsPanel()">personalize '+config.appname+' background?</span>? <span class="a" onclick="expandSettingsPanel()">Edit your settings</span>.', 'Did you know that you can expand fullscreen widgets clicking on <img width=20px height=1px src="'+Ext.BLANK_IMAGE_URL+'" class="x-tool x-tool-maximize" style="vertical-align:bottom;float:none;cursor:default;"/>? ', 'Did you know that you can configure a widget clicking on <img width=20px height=1px src="'+Ext.BLANK_IMAGE_URL+'" class="x-tool x-tool-gear" style="vertical-align:bottom;float:none;cursor:default;"/>? ', 'Did you know that you can minimize your widget clicking on <img width=20px height=1px src="'+Ext.BLANK_IMAGE_URL+'" class="x-tool x-tool-toggle" style="vertical-align:bottom;float:none;cursor:default;"/>? ', 'Did you know that you can remove a widget clicking on <img width=20px height=1px src="'+Ext.BLANK_IMAGE_URL+'" class="x-tool x-tool-close" style="vertical-align:bottom;float:none;cursor:default;"/>? ', 'Did you know that you can move widgets dragging the title bar?', 'Did you know that you can edit your photos? <span class="a" onclick="openImageChooser()">Edit your photo</span>.', 'Did you know that you can add a new photo? <span class="a" onclick="new PhotoUploader()">Edit your photo</span>.', 'Did you know that you can set taolin as your homepage? Read <a href="./pages/make_homepage_help" target="_blank">the instructions</a>!', 'Did you know that you can view other people photos gallery by clicking on one of their photos?', 'Did you know that there is a <a href="./pages/privacy_policy" target="_blank">privacy policy</a> about how your data are used? <a href="./pages/privacy_policy" target="_blank">Read the privacy policy</a>!', 'Did you know that you can edit your workplace and view other\'s on a map? <span class="a" onclick="(new Ext.ux.fbk.sonet.MapWindow({logparams: {source: \'did you know\', user_id:\'\'}})).show()">Edit!</span>', 'Did you know that you can suggest a colleague of yours as new champion on her/his profile?' ]; var dyk = (nsb ? '<a href="http://getfirefox.com" target="_blank">DOWNLOAD AND USE FIREFOX</a> FOR A BETTER, FASTER USER EXPERIENCE!' : aDyk[Math.floor(Math.random()*aDyk.length)] /* pick a random string out of aDyk */); /* * Main menu: * use .header class only for top-level menu voices */ var main_menu = '' ,admin_menu_item = '<li class="header"><span class="a menu-item">Admin portal</span>' + '<ul>' + '<li><span class="menu-item"><a class="sprited help-icon" href="./admin" target="_blank">Admin main</a></span></li>' + '<li><span class="menu-item"><a class="sprited picture" href="./admin/backgrounds" target="_blank">Background</a></span></li>' + '<li><span class="menu-item"><a class="sprited map" href="./admin/buildings" target="_blank">Building</a></span></li>' + '<li><span class="menu-item"><a class="sprited gears" href="./admin/portals/config" target="_blank">Configuration</a></span></li>' + '<li><span class="menu-item"><a class="sprited image-edit" href="./admin/templates" target="_blank">Templates</a></span></li>' + '<li><span class="menu-item"><a class="sprited groups" href="./admin/users" target="_blank">Users</a></span></li>' + '<li><span class="menu-item"><a class="sprited chart-icon" href="./admin/widgets" target="_blank">Widgets</a></span></li>' + '</ul>' + '</li>' ,simple_admin_menu_item = '<li class="header"><a class="menu-item" href="./admin" target="_blank">Admin portal</a></li>'; if(!nsb) main_menu = '<ul class="dd-menu">' + '<li class="header"><span class="a menu-item">Personal profile</span>' + '<ul>' + '<li><span class="a menu-item" onclick="showUserInfo(null, null, {source: \'logout_div\'})"><span class="sprited user-icon">View your profile</span></span></li>' + '<li><span class="a menu-item" onclick="expandUserEditProfilePanel()"><span class="sprited user-edit">Edit your profile</span></span></li>' + '<li><span class="a menu-item" onclick="expandSettingsPanel()"><span class="sprited settings">Edit your settings</span></span></li>' + '<li><span class="a menu-item" onclick="openImageChooser()"><span class="sprited image-edit">Edit your photos</span></span></li>' + '<li><span class="a menu-item" onclick="new Ext.ux.fbk.sonet.MapWindow().show()"><span class="sprited map-edit">Edit your workplace position</span></span></li>' + '</ul>' + '</li>' + '<li class="header"><span class="a menu-item">Tools</span>' + '<ul>' + '<li><span class="menu-item a add_widgets" onclick="openAddWidgetsModalWindow()"><span class="sprited add-icon">Add widgets</span></span></li>' + '<li><span class="a menu-item" onclick="addOrBounceWidget(\'Ext.ux.fbk.sonet.MetaSearch\',\'string_identifier\',{source: \'logout_div\'})"><span class="sprited search">Search</span></span></li>' + '<li><span class="a menu-item" onclick="new Ext.ux.fbk.sonet.MapWindow().show()"><span class="sprited map">Map of colleagues workplaces</span></span></li>' + '<li><span class="a menu-item" onclick="new PhotoUploader()"><span class="sprited upload-picture">Upload a photo</span></a></li>' + '<li><span class="a menu-item" onclick="new SendToWindow()"><span class="sprited email">Send an email</span></span></li>' + '</ul>' + '</li>' + '<li class="header"><a class="menu-item" href="./wiki" target="_blank">FBK Wiki</a></li>' + '<li class="header"><span class="a menu-item" onclick="showMainTimeline()">Timeline</span></li>' + '<li class="header"><span class="a menu-item">Info</span>' + '<ul>' + '<li><a class="menu-item" href="./pages/help" target="_blank">FAQ - Help</a></li>' + '<li><a class="menu-item" href="./pages/privacy_policy" target="_blank">Privacy policy</a></li>' + '</ul>' + '</li>' + '<li class="header"><span class="a menu-item">' + config.appname + '</span>' + '<ul>' + /* This software is open source released under aGPL. See http://www.fsf.org/licensing/licenses/agpl-3.0.html for more details. According to the license, you must place in every Web page served by Taolin a link where your user can download the source code. So, please, don't remove this link, you can move it in another part of the web page, though. */ '<li><a class="menu-item" href="http://github.com/vad/taolin" target="_blank">Download the code</a></li>' + //'<li><a class="menu-item" href="http://github.com/vad/taolin/issues" target="_blank">Report an issue</a></li>' + '</ul>' + '</li>' + (user.admin ? admin_menu_item : '' ) + '<li class="header last"><a class="menu-item" href="./accounts/logout" onclick="jabber.quit()">Logout</a></li>' + '</ul>'; else // Simplified version for old, stupidunsupported browsers main_menu = '<ul class="dd-menu">' + '<li class="header"><span class="a menu-item" onclick="showUserInfo(null, null, {source: \'logout_div\'})">Personal profile</span></li>' + '<li class="header"><span class="menu-item a add_widgets" onclick="openAddWidgetsModalWindow()">Add widgets</span></li>' + '<li class="header"><a class="menu-item" href="./wiki" target="_blank">FBK Wiki</a></li>' + '<li class="header"><span class="a menu-item" onclick="showMainTimeline()">Timeline</span></li>' + '<li class="header"><a class="menu-item" href="./pages/help" target="_blank">FAQ - Help</a></li>' + '<li class="header"><a class="menu-item" href="./pages/privacy_policy" target="_blank">Privacy policy</a></li>' + /* This software is open source released under aGPL. See http://www.fsf.org/licensing/licenses/agpl-3.0.html for more details. According to the license, you must place in every Web page served by Taolin a link where your user can download the source code. So, please, don't remove this link, you can move it in another part of the web page, though. */ '<li class="header"><a class="menu-item" href="http://github.com/vad/taolin" target="_blank">Download the code</a></li>' + (user.admin ? simple_admin_menu_item : '' ) + '<li class="header last"><a class="menu-item" href="./accounts/logout" onclick="jabber.quit()">Logout</a></li>' + '</ul>'; /** * HTML shown in the northern part of the viewport. * It contains: * - FBK logo * - logout menu * - "Did you know?" questions */ var dyk_style = (nsb ? 'color:darkRed;font-weight:bold;font-size:110%;' : (Math.random() > 0.3 ? 'display:none;':'')); var clear_html = '<div id="logout_div" class="right-element">' + main_menu + '</div>' + '<div class="left-element">' + '<img src="'+config.logo+'" qtip="taolin logo" style="padding-left:10px"/>' + '</div>' + '<div id="didyouknow_div" style="'+dyk_style+'"><span id="didyouknow_span"><table class="border_radius_5px"><tr><td style="padding:0 10px;">'+dyk+' <span class="a" onclick="$(\'#didyouknow_div\').hide();" style="margin-left:10px;font-size:x-small;">[close this message]</span></td></tr></table></span></div>'; viewport = new Ext.Viewport({ layout:'border', items:[{ region:'north', id: 'north-panel', border: false, height: 50, style: 'z-index:1', items:[{ html: clear_html ,border: false }] },{ xtype:'portal', region:'center', id:'portal_central', margins:'5 5 5 0', cls:'desktop', bodyStyle: 'padding:0 10px', style: 'z-index:0;', /* Here we define three different column for our portal. If you change the number of * the column please check the database for any inconsistency */ items: columns, // Setting desktop background listeners:{ afterlayout: function(){ var bg = get(user, 'bg', config.background); changeBg(bg); } } }, westPanel] }); /* These functions are invoked when the page is loaded. * getWidgetsPosition retrieves user's widgets and their position * showUserInfo(null, true) fill western-panel */ getWidgetsPosition(); /* Check if there's a valid session */ var task = { run: function(){ Ext.Ajax.request({ url : 'accounts/issessionup', method: 'GET', success: function ( result, request ) { var valid = Ext.util.JSON.decode(result.responseText); if (!valid){ window.location.reload(false); } } }); }, interval: 300000 //5 minutes }; Ext.TaskMgr.start(task); if(!user.privacy_policy_acceptance) // check if first login wizard should be opened or not openFirstLoginWizard(); /** * Menu */ // Styling: add an image (an arrow) at the end of each menu voice that has a sub-menu //$('.dd-menu .header:has(ul)') $('.dd-menu .header') .each(function(){ $(this) .has('ul') .find('.a:first')<|fim▁hole|> .addClass('sprited arrow-down')) .end() .find('ul') .css('display', 'none') .hide(); } ) .hover( function(){ $(this) .find('.a:first .sprited') .removeClass('arrow-down') .addClass('arrow-up') .end() .find('ul') .css({visibility: 'visible', display: 'none'}) .show(); },function(){ $(this) .find('.a:first .sprited') .removeClass('arrow-up') .addClass('arrow-down') .end() .find('ul') .css('visibility', 'hidden'); } ); } $(document).ready(function(){ $('#jplayer').jPlayer({ oggSupport:true ,swfPath: 'js/jquery/jplayer' }); });<|fim▁end|>
.append($('<span>')
<|file_name|>anon_db.py<|end_file_name|><|fim▁begin|>"""Remove any personally identifying information from the database""" from django.core.management.base import BaseCommand from django.conf import settings from django.contrib.admin.models import LogEntry from django_openid_auth.models import UserOpenID from rest_framework.authtoken.models import Token from reversion.models import Revision from games.models import ( Installer, InstallerIssue, InstallerIssueReply, InstallerHistory, Screenshot, GameSubmission, ) from accounts.models import User from common.models import Upload, News class Command(BaseCommand): """Django command to anonymize the database""" @staticmethod def get_main_user(): """Return the only user remaining in the DB""" return User.objects.first() @staticmethod def delete_tokens(): """Remove all auth tokens (OpenID, DRF, ...)""" res = UserOpenID.objects.all().delete() print("Deleted %s openids" % res[0]) res = Token.objects.all().delete() print("Deleted %s tokens" % res[0]) res = LogEntry.objects.all().delete() print("Deleted %s log entries" % res[0]) def handle(self, *args, **kwargs): if not settings.DEBUG: raise RuntimeError("Never run this in production") self.delete_tokens() user = self.get_main_user() res = InstallerIssue.objects.all().update(submitted_by=user) print("Updated %s issues" % res) <|fim▁hole|> res = InstallerHistory.objects.all().update(user=user) print("Updated %s installer history" % res) res = Installer.objects.all().update(user=user) print("Updated %s installers" % res) res = InstallerHistory.objects.all().update(user=user) print("Updated %s installer history" % res) res = GameSubmission.objects.all().update(user=user) print("Updated %s game submissions" % res) res = Screenshot.objects.all().update(uploaded_by=user) print("Updated %s screenshots" % res) res = Upload.objects.all().update(uploaded_by=user) print("Updated %s uploads" % res) res = News.objects.all().update(user=user) print("Updated %s news" % res) res = Revision.objects.all().update(user=user) print("Updated %s revisions" % res) res = User.objects.exclude(pk=user.id).delete() print("Deleted %s users" % res[0]) default_password = "lutris" user.set_password(default_password) user.username = "lutris" user.email = "root@localhost" user.website = "" user.steamid = "" user.save() print("Password for user %s is now %s" % (user, default_password))<|fim▁end|>
res = InstallerIssueReply.objects.all().update(submitted_by=user) print("Updated %s issue replies" % res)