input
stringlengths 0
1.96k
| context
stringlengths 1.23k
257k
| answers
listlengths 1
5
| length
int32 399
40.5k
| dataset
stringclasses 10
values | language
stringclasses 5
values | all_classes
listlengths | _id
stringlengths 48
48
|
|---|---|---|---|---|---|---|---|
// Copyright 2014 - 2014 Esk0r
// SpellDatabase.cs is part of Evade.
//
// Evade is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Evade is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Evade. If not, see <http://www.gnu.org/licenses/>.
#region
using System;
using System.Collections.Generic;
using System.Linq;
using LeagueSharp;
#endregion
namespace Evade
{
public static class SpellDatabase
{
public static List<SpellData> Spells = new List<SpellData>();
static SpellDatabase()
{
//Add spells to the database
#region Test
if (Config.TestOnAllies)
{
Spells.Add(
new SpellData
{
ChampionName = ObjectManager.Player.ChampionName,
SpellName = "TestSkillShot",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 600,
Range = 650,
Radius = 350,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "TestSkillShot",
});
}
#endregion Test
#region Aatrox
Spells.Add(
new SpellData
{
ChampionName = "Aatrox",
SpellName = "AatroxQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 600,
Range = 650,
Radius = 250,
MissileSpeed = 2000,
FixedRange = false,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "",
});
Spells.Add(
new SpellData
{
ChampionName = "Aatrox",
SpellName = "AatroxE",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1075,
Radius = 35,
MissileSpeed = 1250,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = false,
MissileSpellName = "AatroxEConeMissile",
});
#endregion Aatrox
#region Ahri
Spells.Add(
new SpellData
{
ChampionName = "Ahri",
SpellName = "AhriOrbofDeception",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1000,
Radius = 100,
MissileSpeed = 2500,
MissileAccel = -3200,
MissileMaxSpeed = 2500,
MissileMinSpeed = 400,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "AhriOrbMissile",
CanBeRemoved = true,
ForceRemove = true,
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Ahri",
SpellName = "AhriOrbReturn",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1000,
Radius = 100,
MissileSpeed = 60,
MissileAccel = 1900,
MissileMinSpeed = 60,
MissileMaxSpeed = 2600,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileFollowsUnit = true,
CanBeRemoved = true,
ForceRemove = true,
MissileSpellName = "AhriOrbReturn",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Ahri",
SpellName = "AhriSeduce",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1000,
Radius = 60,
MissileSpeed = 1550,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "AhriSeduceMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
#endregion Ahri
#region Amumu
Spells.Add(
new SpellData
{
ChampionName = "Amumu",
SpellName = "BandageToss",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1100,
Radius = 90,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "SadMummyBandageToss",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Amumu",
SpellName = "CurseoftheSadMummy",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 0,
Radius = 550,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = false,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "",
});
#endregion Amumu
#region Anivia
Spells.Add(
new SpellData
{
ChampionName = "Anivia",
SpellName = "FlashFrost",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1100,
Radius = 110,
MissileSpeed = 850,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "FlashFrostSpell",
CanBeRemoved = true,
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
#endregion Anivia
#region Annie
Spells.Add(
new SpellData
{
ChampionName = "Annie",
SpellName = "Incinerate",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotCone,
Delay = 250,
Range = 825,
Radius = 80,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = false,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "",
});
Spells.Add(
new SpellData
{
ChampionName = "Annie",
SpellName = "InfernalGuardian",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 600,
Radius = 251,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "",
});
#endregion Annie
#region Ashe
Spells.Add(
new SpellData
{
ChampionName = "Ashe",
SpellName = "Volley",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1250,
Radius = 60,
MissileSpeed = 1500,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "VolleyAttack",
MultipleNumber = 9,
MultipleAngle = 4.62f * (float)Math.PI / 180,
CanBeRemoved = true,
CollisionObjects = new[] { CollisionObjectTypes.Champions, CollisionObjectTypes.YasuoWall, CollisionObjectTypes.Minion }
});
Spells.Add(
new SpellData
{
ChampionName = "Ashe",
SpellName = "EnchantedCrystalArrow",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 20000,
Radius = 130,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "EnchantedCrystalArrow",
CanBeRemoved = true,
CollisionObjects = new[] { CollisionObjectTypes.Champions, CollisionObjectTypes.YasuoWall }
});
#endregion Ashe
#region Bard
Spells.Add(
new SpellData
{
ChampionName = "Bard",
SpellName = "BardQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 950,
Radius = 60,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "BardQMissile",
CanBeRemoved = true,
CollisionObjects = new[] { CollisionObjectTypes.Champions, CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Bard",
SpellName = "BardR",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 500,
Range = 3400,
Radius = 350,
MissileSpeed = 2100,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "BardR",
});
#endregion
#region Blatzcrink
Spells.Add(
new SpellData
{
ChampionName = "Blitzcrank",
SpellName = "RocketGrab",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1050,
Radius = 70,
MissileSpeed = 1800,
FixedRange = true,
AddHitbox = true,
DangerValue = 4,
IsDangerous = true,
MissileSpellName = "RocketGrabMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Blitzcrank",
SpellName = "StaticField",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 0,
Radius = 600,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = false,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "",
});
#endregion Blatzcrink
#region Brand
Spells.Add(
new SpellData
{
ChampionName = "Brand",
SpellName = "BrandBlaze",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1100,
Radius = 60,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "BrandBlazeMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Brand",
SpellName = "BrandFissure",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotCircle,
Delay = 850,
Range = 900,
Radius = 240,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "",
});
#endregion Brand
#region Braum
Spells.Add(
new SpellData
{
ChampionName = "Braum",
SpellName = "BraumQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1050,
Radius = 60,
MissileSpeed = 1700,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "BraumQMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Braum",
SpellName = "BraumRWrapper",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 500,
Range = 1200,
Radius = 115,
MissileSpeed = 1400,
FixedRange = true,
AddHitbox = true,
DangerValue = 4,
IsDangerous = true,
MissileSpellName = "braumrmissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
#endregion Braum
#region Caitlyn
Spells.Add(
new SpellData
{
ChampionName = "Caitlyn",
SpellName = "CaitlynPiltoverPeacemaker",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 625,
Range = 1300,
Radius = 90,
MissileSpeed = 2200,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "CaitlynPiltoverPeacemaker",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Caitlyn",
SpellName = "CaitlynEntrapment",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 125,
Range = 1000,
Radius = 80,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 1,
IsDangerous = false,
MissileSpellName = "CaitlynEntrapmentMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
#endregion Caitlyn
#region Cassiopeia
Spells.Add(
new SpellData
{
ChampionName = "Cassiopeia",
SpellName = "CassiopeiaNoxiousBlast",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 750,
Range = 850,
Radius = 150,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "CassiopeiaNoxiousBlast",
});
Spells.Add(
new SpellData
{
ChampionName = "Cassiopeia",
SpellName = "CassiopeiaPetrifyingGaze",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCone,
Delay = 600,
Range = 825,
Radius = 80,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = false,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "CassiopeiaPetrifyingGaze",
});
#endregion Cassiopeia
#region Chogath
Spells.Add(
new SpellData
{
ChampionName = "Chogath",
SpellName = "Rupture",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 1200,
Range = 950,
Radius = 250,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 3,
IsDangerous = false,
MissileSpellName = "Rupture",
});
#endregion Chogath
#region Corki
Spells.Add(
new SpellData
{
ChampionName = "Corki",
SpellName = "PhosphorusBomb",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 300,
Range = 825,
Radius = 250,
MissileSpeed = 1000,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "PhosphorusBombMissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Corki",
SpellName = "MissileBarrage",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 200,
Range = 1300,
Radius = 40,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "MissileBarrageMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Corki",
SpellName = "MissileBarrage2",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 200,
Range = 1500,
Radius = 40,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "MissileBarrageMissile2",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
#endregion Corki
#region Darius
Spells.Add(
new SpellData
{
ChampionName = "Darius",
SpellName = "DariusCleave",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 750,
Range = 0,
Radius = 425,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = false,
MissileSpellName = "DariusCleave",
FollowCaster = true,
DisabledByDefault = true,
});
Spells.Add(
new SpellData
{
ChampionName = "Darius",
SpellName = "DariusAxeGrabCone",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotCone,
Delay = 250,
Range = 550,
Radius = 80,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = false,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "DariusAxeGrabCone",
});
#endregion Darius
#region Diana
Spells.Add(
new SpellData
{
ChampionName = "Diana",
SpellName = "DianaArc",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 895,
Radius = 195,
MissileSpeed = 1400,
FixedRange = false,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "DianaArcArc",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Diana",
SpellName = "DianaArcArc",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotArc,
Delay = 250,
Range = 895,
Radius = 195,
DontCross = true,
MissileSpeed = 1400,
FixedRange = false,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "DianaArcArc",
TakeClosestPath = true,
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
#endregion Diana
#region DrMundo
Spells.Add(
new SpellData
{
ChampionName = "DrMundo",
SpellName = "InfectedCleaverMissileCast",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1050,
Radius = 60,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = false,
MissileSpellName = "InfectedCleaverMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
#endregion DrMundo
#region Draven
Spells.Add(
new SpellData
{
ChampionName = "Draven",
SpellName = "DravenDoubleShot",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1100,
Radius = 130,
MissileSpeed = 1400,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "DravenDoubleShotMissile",
CanBeRemoved = true,
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Draven",
SpellName = "DravenRCast",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 400,
Range = 20000,
Radius = 160,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "DravenR",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall }
});
#endregion Draven
#region Ekko
Spells.Add(
new SpellData
{
ChampionName = "Ekko",
SpellName = "EkkoQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 950,
Radius = 60,
MissileSpeed = 1650,
FixedRange = true,
AddHitbox = true,
DangerValue = 4,
IsDangerous = true,
MissileSpellName = "ekkoqmis",
CanBeRemoved = true,
CollisionObjects =
new[] { CollisionObjectTypes.Champions, CollisionObjectTypes.YasuoWall }
});
Spells.Add(
new SpellData
{
ChampionName = "Ekko",
SpellName = "EkkoW",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotCircle,
Delay = 3750,
Range = 1600,
Radius = 375,
MissileSpeed = 1650,
FixedRange = false,
DisabledByDefault = true,
AddHitbox = false,
DangerValue = 3,
IsDangerous = false,
MissileSpellName = "EkkoW",
CanBeRemoved = true
});
Spells.Add(
new SpellData
{
ChampionName = "Ekko",
SpellName = "EkkoR",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 1600,
Radius = 375,
MissileSpeed = 1650,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = false,
MissileSpellName = "EkkoR",
CanBeRemoved = true,
FromObjects = new[] { "Ekko_Base_R_TrailEnd.troy" }
});
#endregion Ekko
#region Elise
Spells.Add(
new SpellData
{
ChampionName = "Elise",
SpellName = "EliseHumanE",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1100,
Radius = 55,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 4,
IsDangerous = true,
MissileSpellName = "EliseHumanE",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall }
});
#endregion Elise
#region Evelynn
Spells.Add(
new SpellData
{
ChampionName = "Evelynn",
SpellName = "EvelynnR",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 650,
Radius = 350,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "EvelynnR",
});
#endregion Evelynn
#region Ezreal
Spells.Add(
new SpellData
{
ChampionName = "Ezreal",
SpellName = "EzrealMysticShot",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1200,
Radius = 60,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "EzrealMysticShotMissile",
ExtraMissileNames = new[] { "EzrealMysticShotPulseMissile" },
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
Id = 229,
});
Spells.Add(
new SpellData
{
ChampionName = "Ezreal",
SpellName = "EzrealEssenceFlux",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1050,
Radius = 80,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "EzrealEssenceFluxMissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Ezreal",
SpellName = "EzrealTrueshotBarrage",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 1000,
Range = 20000,
Radius = 160,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "EzrealTrueshotBarrage",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
Id = 245,
});
#endregion Ezreal
#region Fiora
Spells.Add(
new SpellData
{
ChampionName = "Fiora",
SpellName = "FioraW",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotMissileLine,
Delay = 700,
Range = 800,
Radius = 70,
MissileSpeed = 3200,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "FioraWMissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
#endregion Fiora
#region Fizz
Spells.Add(
new SpellData
{
ChampionName = "Fizz",
SpellName = "FizzMarinerDoom",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1300,
Radius = 120,
MissileSpeed = 1350,
FixedRange = false,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "FizzMarinerDoomMissile",
CollisionObjects = new[] { CollisionObjectTypes.Champions, CollisionObjectTypes.YasuoWall },
CanBeRemoved = true,
});
#endregion Fizz
#region Galio
Spells.Add(
new SpellData
{
ChampionName = "Galio",
SpellName = "GalioResoluteSmite",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 900,
Radius = 200,
MissileSpeed = 1300,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GalioResoluteSmite",
});
Spells.Add(
new SpellData
{
ChampionName = "Galio",
SpellName = "GalioRighteousGust",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1200,
Radius = 120,
MissileSpeed = 1200,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GalioRighteousGust",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Galio",
SpellName = "GalioIdolOfDurand",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 0,
Radius = 550,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = false,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "",
});
#endregion Galio
#region Gnar
Spells.Add(
new SpellData
{
ChampionName = "Gnar",
SpellName = "GnarQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1125,
Radius = 60,
MissileSpeed = 2500,
MissileAccel = -3000,
MissileMaxSpeed = 2500,
MissileMinSpeed = 1400,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
CanBeRemoved = true,
ForceRemove = true,
MissileSpellName = "gnarqmissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Gnar",
SpellName = "GnarQReturn",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 0,
Range = 2500,
Radius = 75,
MissileSpeed = 60,
MissileAccel = 800,
MissileMaxSpeed = 2600,
MissileMinSpeed = 60,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
CanBeRemoved = true,
ForceRemove = true,
MissileSpellName = "GnarQMissileReturn",
DisableFowDetection = false,
DisabledByDefault = true,
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Gnar",
SpellName = "GnarBigQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 500,
Range = 1150,
Radius = 90,
MissileSpeed = 2100,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GnarBigQMissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Gnar",
SpellName = "GnarBigW",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotLine,
Delay = 600,
Range = 600,
Radius = 80,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GnarBigW",
});
Spells.Add(
new SpellData
{
ChampionName = "Gnar",
SpellName = "GnarE",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotCircle,
Delay = 0,
Range = 473,
Radius = 150,
MissileSpeed = 903,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GnarE",
});
Spells.Add(
new SpellData
{
ChampionName = "Gnar",
SpellName = "GnarBigE",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 475,
Radius = 200,
MissileSpeed = 1000,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GnarBigE",
});
Spells.Add(
new SpellData
{
ChampionName = "Gnar",
SpellName = "GnarR",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 0,
Radius = 500,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = false,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "",
});
#endregion
#region Gragas
Spells.Add(
new SpellData
{
ChampionName = "Gragas",
SpellName = "GragasQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 1100,
Radius = 275,
MissileSpeed = 1300,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GragasQMissile",
ExtraDuration = 4500,
ToggleParticleName = "Gragas_.+_Q_(Enemy|Ally)",
DontCross = true,
});
Spells.Add(
new SpellData
{
ChampionName = "Gragas",
SpellName = "GragasE",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 0,
Range = 950,
Radius = 200,
MissileSpeed = 1200,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GragasE",
CanBeRemoved = true,
ExtraRange = 300,
CollisionObjects = new[] { CollisionObjectTypes.Champions, CollisionObjectTypes.Minion },
});
Spells.Add(
new SpellData
{
ChampionName = "Gragas",
SpellName = "GragasR",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 1050,
Radius = 375,
MissileSpeed = 1800,
FixedRange = false,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "GragasRBoom",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
#endregion Gragas
#region Graves
Spells.Add(
new SpellData
{
ChampionName = "Graves",
SpellName = "GravesClusterShot",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1000,
Radius = 50,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "GravesClusterShotAttack",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
MultipleNumber = 3,
MultipleAngle = 15 * (float) Math.PI / 180,
});
Spells.Add(
new SpellData
{
ChampionName = "Graves",
SpellName = "GravesChargeShot",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1100,
Radius = 100,
MissileSpeed = 2100,
FixedRange = true,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "GravesChargeShotShot",
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
#endregion Graves
#region Heimerdinger
Spells.Add(
new SpellData
{
ChampionName = "Heimerdinger",
SpellName = "Heimerdingerwm",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1500,
Radius = 70,
MissileSpeed = 1800,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "HeimerdingerWAttack2",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Heimerdinger",
SpellName = "HeimerdingerE",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 925,
Radius = 100,
MissileSpeed = 1200,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "heimerdingerespell",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
#endregion Heimerdinger
#region Irelia
Spells.Add(
new SpellData
{
ChampionName = "Irelia",
SpellName = "IreliaTranscendentBlades",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 0,
Range = 1200,
Radius = 65,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "IreliaTranscendentBlades",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
#endregion Irelia
#region Janna
Spells.Add(
new SpellData
{
ChampionName = "Janna",
SpellName = "JannaQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1700,
Radius = 120,
MissileSpeed = 900,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "HowlingGaleSpell",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
#endregion Janna
#region JarvanIV
Spells.Add(
new SpellData
{
ChampionName = "JarvanIV",
SpellName = "JarvanIVDragonStrike",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotLine,
Delay = 600,
Range = 770,
Radius = 70,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = false,
});
Spells.Add(
new SpellData
{
ChampionName = "JarvanIV",
SpellName = "JarvanIVEQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 880,
Radius = 70,
MissileSpeed = 1450,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
});
Spells.Add(
new SpellData
{
ChampionName = "JarvanIV",
SpellName = "JarvanIVDemacianStandard",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotCircle,
Delay = 500,
Range = 860,
Radius = 175,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "JarvanIVDemacianStandard",
});
#endregion JarvanIV
#region Jayce
Spells.Add(
new SpellData
{
ChampionName = "Jayce",
SpellName = "jayceshockblast",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1300,
Radius = 70,
MissileSpeed = 1450,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "JayceShockBlastMis",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Jayce",
SpellName = "JayceQAccel",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1300,
Radius = 70,
MissileSpeed = 2350,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "JayceShockBlastWallMis",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
#endregion Jayce
#region Jinx
//TODO: Detect the animation from fow instead of the missile.
Spells.Add(
new SpellData
{
ChampionName = "Jinx",
SpellName = "JinxW",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotMissileLine,
Delay = 600,
Range = 1500,
Radius = 60,
MissileSpeed = 3300,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "JinxWMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Jinx",
SpellName = "JinxR",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 600,
Range = 20000,
Radius = 140,
MissileSpeed = 1700,
FixedRange = true,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "JinxR",
CanBeRemoved = true,
CollisionObjects = new[] { CollisionObjectTypes.Champions, CollisionObjectTypes.YasuoWall },
});
#endregion Jinx
#region Kalista
Spells.Add(
new SpellData
{
ChampionName = "Kalista",
SpellName = "KalistaMysticShot",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1200,
Radius = 40,
MissileSpeed = 1700,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "kalistamysticshotmis",
ExtraMissileNames = new[] { "kalistamysticshotmistrue" },
CanBeRemoved = true,
CollisionObjects =
new[] { CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
#endregion Kalista
#region Karma
Spells.Add(
new SpellData
{
ChampionName = "Karma",
SpellName = "KarmaQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1050,
Radius = 60,
MissileSpeed = 1700,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "KarmaQMissile",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
//TODO: add the circle at the end.
Spells.Add(
new SpellData
{
ChampionName = "Karma",
SpellName = "KarmaQMantra",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 950,
Radius = 80,
MissileSpeed = 1700,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "KarmaQMissileMantra",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
#endregion Karma
#region Karthus
Spells.Add(
new SpellData
{
ChampionName = "Karthus",
SpellName = "KarthusLayWasteA2",
ExtraSpellNames =
new[]
{
"karthuslaywastea3", "karthuslaywastea1", "karthuslaywastedeada1", "karthuslaywastedeada2",
"karthuslaywastedeada3"
},
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotCircle,
Delay = 625,
Range = 875,
Radius = 160,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "",
});
#endregion Karthus
#region Kassadin
Spells.Add(
new SpellData
{
ChampionName = "Kassadin",
SpellName = "RiftWalk",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 450,
Radius = 270,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "RiftWalk",
});
#endregion Kassadin
#region Kennen
Spells.Add(
new SpellData
{
ChampionName = "Kennen",
SpellName = "KennenShurikenHurlMissile1",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 125,
Range = 1050,
Radius = 50,
MissileSpeed = 1700,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "KennenShurikenHurlMissile1",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
#endregion Kennen
#region Khazix
Spells.Add(
new SpellData
{
ChampionName = "Khazix",
SpellName = "KhazixW",
ExtraSpellNames = new[] { "khazixwlong" },
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1025,
Radius = 73,
MissileSpeed = 1700,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "KhazixWMissile",
CanBeRemoved = true,
MultipleNumber = 3,
MultipleAngle = 22f * (float) Math.PI / 180,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Khazix",
SpellName = "KhazixE",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotCircle,
Delay = 250,
Range = 600,
Radius = 300,
MissileSpeed = 1500,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "KhazixE",
});
#endregion Khazix
#region Kogmaw
Spells.Add(
new SpellData
{
ChampionName = "Kogmaw",
SpellName = "KogMawQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1200,
Radius = 70,
MissileSpeed = 1650,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "KogMawQMis",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Kogmaw",
SpellName = "KogMawVoidOoze",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1360,
Radius = 120,
MissileSpeed = 1400,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "KogMawVoidOozeMissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Kogmaw",
SpellName = "KogMawLivingArtillery",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 1200,
Range = 1800,
Radius = 150,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "KogMawLivingArtillery",
});
#endregion Kogmaw
#region Leblanc
Spells.Add(
new SpellData
{
ChampionName = "Leblanc",
SpellName = "LeblancSlide",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotCircle,
Delay = 0,
Range = 600,
Radius = 220,
MissileSpeed = 1450,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "LeblancSlide",
});
Spells.Add(
new SpellData
{
ChampionName = "Leblanc",
SpellName = "LeblancSlideM",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 0,
Range = 600,
Radius = 220,
MissileSpeed = 1450,
FixedRange = false,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "LeblancSlideM",
});
Spells.Add(
new SpellData
{
ChampionName = "Leblanc",
SpellName = "LeblancSoulShackle",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 950,
Radius = 70,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "LeblancSoulShackle",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Leblanc",
SpellName = "LeblancSoulShackleM",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 950,
Radius = 70,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "LeblancSoulShackleM",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
#endregion Leblanc
#region LeeSin
Spells.Add(
new SpellData
{
ChampionName = "LeeSin",
SpellName = "BlindMonkQOne",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1100,
Radius = 65,
MissileSpeed = 1800,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
MissileSpellName = "BlindMonkQOne",
CanBeRemoved = true,
CollisionObjects =
new[]
{ CollisionObjectTypes.Champions, CollisionObjectTypes.Minion, CollisionObjectTypes.YasuoWall },
});
#endregion LeeSin
#region Leona
Spells.Add(
new SpellData
{
ChampionName = "Leona",
SpellName = "LeonaZenithBlade",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 905,
Radius = 70,
MissileSpeed = 2000,
FixedRange = true,
AddHitbox = true,
DangerValue = 3,
IsDangerous = true,
TakeClosestPath = true,
MissileSpellName = "LeonaZenithBladeMissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Leona",
SpellName = "LeonaSolarFlare",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotCircle,
Delay = 1000,
Range = 1200,
Radius = 300,
MissileSpeed = int.MaxValue,
FixedRange = false,
AddHitbox = true,
DangerValue = 5,
IsDangerous = true,
MissileSpellName = "LeonaSolarFlare",
});
#endregion Leona
#region Lissandra
Spells.Add(
new SpellData
{
ChampionName = "Lissandra",
SpellName = "LissandraQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 700,
Radius = 75,
MissileSpeed = 2200,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "LissandraQMissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Lissandra",
SpellName = "LissandraQShards",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 700,
Radius = 90,
MissileSpeed = 2200,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "lissandraqshards",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
Spells.Add(
new SpellData
{
ChampionName = "Lissandra",
SpellName = "LissandraE",
Slot = SpellSlot.E,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1025,
Radius = 125,
MissileSpeed = 850,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "LissandraEMissile",
CollisionObjects = new[] { CollisionObjectTypes.YasuoWall },
});
#endregion Lulu
#region Lucian
Spells.Add(
new SpellData
{
ChampionName = "Lucian",
SpellName = "LucianQ",
Slot = SpellSlot.Q,
Type = SkillShotType.SkillshotLine,
Delay = 500,
Range = 1300,
Radius = 65,
MissileSpeed = int.MaxValue,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "LucianQ",
});
Spells.Add(
new SpellData
{
ChampionName = "Lucian",
SpellName = "LucianW",
Slot = SpellSlot.W,
Type = SkillShotType.SkillshotMissileLine,
Delay = 250,
Range = 1000,
Radius = 55,
MissileSpeed = 1600,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "lucianwmissile",
});
Spells.Add(
new SpellData
{
ChampionName = "Lucian",
SpellName = "LucianRMis",
Slot = SpellSlot.R,
Type = SkillShotType.SkillshotMissileLine,
Delay = 500,
Range = 1400,
Radius = 110,
MissileSpeed = 2800,
FixedRange = true,
AddHitbox = true,
DangerValue = 2,
IsDangerous = false,
MissileSpellName = "lucianrmissileoffhand",
|
[
" ExtraMissileNames = new[] { \"lucianrmissile\" },"
] | 5,265
|
lcc
|
csharp
| null |
57cd1d16d832804a0b41e45c65ffbdf55dee0d169df79a8e
|
|
package com.sirma.sep.model.management;
import static com.sirma.sep.model.management.ModelsFakeCreator.createStringMap;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.UUID;
import javax.enterprise.inject.Produces;
import javax.inject.Inject;
import javax.jms.Message;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.ValueFactory;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.jglue.cdiunit.ActivatedAlternatives;
import org.jglue.cdiunit.AdditionalClasses;
import org.jglue.cdiunit.AdditionalClasspaths;
import org.jglue.cdiunit.AdditionalPackages;
import org.jglue.cdiunit.CdiRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import com.sirma.itt.emf.cls.persister.CodeListPersister;
import com.sirma.itt.emf.cls.validator.CodeValidator;
import com.sirma.itt.seip.configuration.annotation.Configuration;
import com.sirma.itt.seip.db.DbDao;
import com.sirma.itt.seip.definition.SemanticDefinitionService;
import com.sirma.itt.seip.definition.dozer.DefinitionsDozerProvider;
import com.sirma.itt.seip.definition.label.LabelDefinition;
import com.sirma.itt.seip.definition.label.LabelService;
import com.sirma.itt.seip.domain.codelist.CodelistService;
import com.sirma.itt.seip.domain.definition.label.LabelProvider;
import com.sirma.itt.seip.event.EventService;
import com.sirma.itt.seip.instance.DomainInstanceService;
import com.sirma.itt.seip.instance.validation.InstanceValidationResult;
import com.sirma.itt.seip.instance.validation.InstanceValidationService;
import com.sirma.itt.seip.instance.validation.ValidationContext;
import com.sirma.itt.seip.mapping.ObjectMapper;
import com.sirma.itt.seip.plugin.Extension;
import com.sirma.itt.seip.resources.EmfUser;
import com.sirma.itt.seip.security.context.SecurityContext;
import com.sirma.itt.seip.testutil.fakes.TransactionSupportFake;
import com.sirma.itt.seip.testutil.mocks.ConfigurationPropertyMock;
import com.sirma.itt.seip.tx.TransactionSupport;
import com.sirma.itt.semantic.NamespaceRegistryService;
import com.sirma.itt.semantic.model.vocabulary.EMF;
import com.sirma.sep.cls.CodeListService;
import com.sirma.sep.definition.DefinitionImportService;
import com.sirma.sep.model.ModelImportService;
import com.sirma.sep.model.management.codelists.CodeListsProvider;
import com.sirma.sep.model.management.definition.DefinitionModelConverter;
import com.sirma.sep.model.management.definition.export.GenericDefinitionConverter;
import com.sirma.sep.model.management.deploy.definition.steps.DefinitionChangeSetStep;
import com.sirma.sep.model.management.operation.ModifyAttributeChangeSetOperation;
import com.sirma.sep.model.management.stubs.LabelServiceStub;
import com.sirmaenterprise.sep.jms.api.SenderService;
/**
* Base component test for the model management functionality.
* <p>
* Includes the mandatory stubbed services and mocks to be able to run tests.
*
* @author Mihail Radkov
*/
@RunWith(CdiRunner.class)
@AdditionalClasses({ ModelManagementServiceImpl.class, DefinitionsDozerProvider.class, ModelUpdater.class, ModelPersistence.class,
ModelUpdateHandler.class, ModelsResetObserver.class, ContextualFakeProducer.class })
@AdditionalPackages(
{ DefinitionModelConverter.class, CodeListsProvider.class, ModifyAttributeChangeSetOperation.class, SenderService.class,
DefinitionChangeSetStep.class, GenericDefinitionConverter.class })
@AdditionalClasspaths({ ObjectMapper.class, Extension.class, EventService.class, Message.class })
@ActivatedAlternatives({ ModelManagementDeploymentConfigurationsFake.class })
public abstract class BaseModelManagementComponentTest {
@Produces
@Mock
protected SemanticDefinitionService semanticDefinitionService;
protected SemanticDefinitionServiceStub semanticDefinitionServiceStub;
@Produces
@Mock
protected DefinitionImportService definitionImportService;
protected DefinitionImportServiceStub definitionImportServiceStub;
@Produces
@Mock
protected ModelImportService modelImportService;
protected ModelImportServiceStub modelImportServiceStub;
@Produces
@Mock
protected CodelistService codelistService;
@Produces
@Mock
protected CodeListPersister codeListPersister;
@Produces
@Mock
protected CodeListService codeListService;
protected CodelistServiceStub codelistServiceStub;
@Produces
@Mock
protected CodeValidator codeValidator;
@Produces
@Mock
protected LabelService labelService;
protected LabelServiceStub labelServiceStub;
@Produces
protected NamespaceRegistryService namespaceRegistryService = new NamespaceRegistryFake();
@Produces
@Mock
protected SecurityContext securityContext;
@Produces
protected DbDao dbDao = new DbDaoFake();
@Produces
@Mock
protected SenderService senderService;
protected SenderServiceStub senderServiceStub;
@Inject
protected EventService eventService;
@Produces
protected RepositoryConnection semanticDatabase = new RepositoryConnectionFake();
@Produces
protected ValueFactory valueFactory = SimpleValueFactory.getInstance();
@Produces
private TransactionSupport transactionSupport = new TransactionSupportFake();
@Produces
@Mock
protected DomainInstanceService domainInstanceService;
protected DomainInstanceServiceStub domainInstanceServiceStub;
@Produces
@Mock
protected InstanceValidationService instanceValidationService;
@Produces
@Configuration
protected ConfigurationPropertyMock<IRI> deploymentContext = new ConfigurationPropertyMock<>();
@Produces
@Mock
protected LabelProvider labelProvider;
@Before
public void baseBefore() {
semanticDefinitionServiceStub = new SemanticDefinitionServiceStub(semanticDefinitionService);
labelServiceStub = new LabelServiceStub(labelService);
definitionImportServiceStub = new DefinitionImportServiceStub(definitionImportService, labelServiceStub);
codelistServiceStub = new CodelistServiceStub(codeListService);
senderServiceStub = new SenderServiceStub(senderService);
mockSecurityContext();
}
@Before
public void stubModelImportService() {
modelImportServiceStub = new ModelImportServiceStub(modelImportService);
// All validation will be valid unless re-stubbed
modelImportServiceStub.validModels();
}
@Before
public void stubInstanceValidation() {
// All validation will be valid unless re-stubbed
InstanceValidationResult valid = new InstanceValidationResult(Collections.emptyList());
when(instanceValidationService.validate(any(ValidationContext.class))).thenReturn(valid);
}
@Before
public void stubDomainInstanceService() {
domainInstanceServiceStub = new DomainInstanceServiceStub(domainInstanceService);
}
@Before
public void stubConfigurations() {
deploymentContext.setValue(EMF.DATA_CONTEXT);
}
@Before
public void stubLabelProvider() {
when(labelProvider.getLabel(anyString())).then(invocation -> invocation.getArgumentAt(0, String.class) + "_translated");
when(labelProvider.getLabel(anyString(), anyString())).then(
invocation -> invocation.getArgumentAt(0, String.class) + "_translated_in_" + invocation.getArgumentAt(1, String.class));
}
@After
public void cleanUp() {
// Remove temporary files/dirs
definitionImportServiceStub.clear();
}
/**
* Stubs the import/export service with the provided definition file names.
*
* @param definitions file names of definition XMLs
*/
protected void withDefinitions(String... definitions) {
Arrays.stream(definitions).forEach(definitionImportServiceStub::withDefinition);
}
protected void withLabelDefinitionFor(String labelId, String... labels) {
LabelDefinition labelDefinition = LabelServiceStub.build(labelId, createStringMap(labels));
labelServiceStub.withLabelDefinition(labelDefinition);
}
protected void withLabelDefinitionDefinedIn(String labelId, String definedIn, String... labels) {
LabelDefinition labelDefinition = LabelServiceStub.build(labelId, definedIn, createStringMap(labels));
labelServiceStub.withLabelDefinition(labelDefinition);
}
protected void mockSecurityContext() {
when(securityContext.getCurrentTenantId()).thenReturn("test.tenant");
|
[
"\t\tEmfUser user = new EmfUser(\"admin@test.tenant\");"
] | 505
|
lcc
|
java
| null |
46847858f2d1579242f46d971b4229d6cb8d79b8a5dea86e
|
|
import unittest
import os
import mock
import errno
import testlib
class TestTestContext(unittest.TestCase):
def test_generate_inventory_file(self):
context = testlib.TestContext()
context.inventory = dict(key='value')
self.assertEquals("key='value'", context.generate_inventory_contents())
@testlib.with_context
def test_adapter_adds_scsi_host_entry(self, context):
context.add_adapter(testlib.SCSIAdapter())
self.assertEquals(['host0'], os.listdir('/sys/class/scsi_host'))
@testlib.with_context
def test_add_disk_adds_scsi_disk_entry(self, context):
import glob
adapter = context.add_adapter(testlib.SCSIAdapter())
adapter.add_disk()
self.assertEquals(
['/sys/class/scsi_disk/0:0:0:0'],
glob.glob('/sys/class/scsi_disk/0*'))
@testlib.with_context
def test_add_disk_adds_scsibus_entry(self, context):
import glob
adapter = context.add_adapter(testlib.SCSIAdapter())
adapter.long_id = 'HELLO'
adapter.add_disk()
self.assertEquals(
['/dev/disk/by-scsibus/HELLO-0:0:0:0'],
glob.glob('/dev/disk/by-scsibus/*'))
@testlib.with_context
def test_add_disk_adds_device(self, context):
adapter = context.add_adapter(testlib.SCSIAdapter())
adapter.add_disk()
self.assertEquals(
['sda'],
os.listdir('/sys/class/scsi_disk/0:0:0:0/device/block'))
@testlib.with_context
def test_add_disk_adds_disk_by_id_entry(self, context):
adapter = context.add_adapter(testlib.SCSIAdapter())
disk = adapter.add_disk()
disk.long_id = 'SOMEID'
self.assertEquals(['SOMEID'], os.listdir('/dev/disk/by-id'))
@testlib.with_context
def test_add_disk_adds_glob(self, context):
import glob
adapter = context.add_adapter(testlib.SCSIAdapter())
disk = adapter.add_disk()
self.assertEquals(['/dev/disk/by-id'], glob.glob('/dev/disk/by-id'))
@testlib.with_context
def test_add_disk_path_exists(self, context):
adapter = context.add_adapter(testlib.SCSIAdapter())
disk = adapter.add_disk()
self.assertTrue(os.path.exists('/dev/disk/by-id'))
@testlib.with_context
def test_add_parameter_parameter_file_exists(self, context):
adapter = context.add_adapter(testlib.SCSIAdapter())
disk = adapter.add_disk()
adapter.add_parameter('fc_host', {'node_name': 'ignored'})
self.assertTrue(os.path.exists('/sys/class/fc_host/host0/node_name'))
@testlib.with_context
def test_add_parameter_parameter_file_contents(self, context):
adapter = context.add_adapter(testlib.SCSIAdapter())
disk = adapter.add_disk()
adapter.add_parameter('fc_host', {'node_name': 'value'})
param_file = open('/sys/class/fc_host/host0/node_name')
param_value = param_file.read()
param_file.close()
self.assertEquals('value', param_value)
@testlib.with_context
def test_uname_explicitly_defined(self, context):
context.kernel_version = 'HELLO'
import os
result = os.uname()
self.assertEquals('HELLO', result[2])
@testlib.with_context
def test_uname_default_kernel_version(self, context):
import os
result = os.uname()
self.assertEquals('3.1', result[2])
@testlib.with_context
def test_inventory(self, context):
context.inventory = {}
inventory_file = open('/etc/xensource-inventory', 'rb')
inventory = inventory_file.read()
inventory_file.close()
self.assertEquals('', inventory)
@testlib.with_context
def test_default_inventory(self, context):
inventory_file = open('/etc/xensource-inventory', 'rb')
inventory = inventory_file.read()
inventory_file.close()
self.assertEquals("PRIMARY_DISK='/dev/disk/by-id/primary'", inventory)
@testlib.with_context
def test_exists_returns_false_for_non_existing(self, context):
self.assertFalse(os.path.exists('somefile'))
@testlib.with_context
def test_exists_returns_true_for_root(self, context):
self.assertTrue(os.path.exists('/'))
@testlib.with_context
def test_stat_nonexistent_file_throws_oserror(self, context):
self.assertRaises(
OSError,
lambda: os.stat('/nonexistingstuff'))
@testlib.with_context
def test_stat_does_not_fail_with_existing_file(self, context):
os.makedirs('/existingstuff')
os.stat('/existingstuff')
@testlib.with_context
def test_error_codes_read(self, context):
context.setup_error_codes()
errorcodes_file = open('/opt/xensource/sm/XE_SR_ERRORCODES.xml', 'rb')
errorcodes = errorcodes_file.read()
errorcodes_file.close()
self.assertTrue("<SM-errorcodes>" in errorcodes)
@testlib.with_context
def test_executable_shows_up_on_filesystem(self, context):
context.add_executable('/something', None)
self.assertTrue(os.path.exists('/something'))
@testlib.with_context
def test_subprocess_execution(self, context):
context.add_executable(
'something',
lambda args, inp: (1, inp + ' out', ','.join(args)))
import subprocess
proc = subprocess.Popen(
['something', 'a', 'b'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
out, err = proc.communicate('in')
rc = proc.returncode
self.assertEquals(1, rc)
self.assertEquals('in out', out)
self.assertEquals('something,a,b', err)
@testlib.with_context
def test_modinfo(self, context):
import subprocess
proc = subprocess.Popen(
['/sbin/modinfo', '-d', 'somemodule'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
out, err = proc.communicate('in')
rc = proc.returncode
self.assertEquals(0, rc)
self.assertEquals('somemodule-description', out)
self.assertEquals('', err)
@testlib.with_context
def test_makedirs_mocked_out(self, context):
import os
os.makedirs('/blah/subdir')
self.assertTrue(os.path.exists('/blah/subdir'))
@testlib.with_context
def test_makedirs_raises_if_exists(self, context):
import os
os.makedirs('/blah/subdir')
self.assertRaises(OSError, os.makedirs, '/blah/subdir')
@testlib.with_context
def test_setup_error_codes(self, context):
context.setup_error_codes()
self.assertTrue(
os.path.exists('/opt/xensource/sm/XE_SR_ERRORCODES.xml'))
@testlib.with_context
def test_write_a_file(self, context):
import os
os.makedirs('/blah/subdir')
f = open('/blah/subdir/somefile', 'w+')
f.write('hello')
f.close()
self.assertTrue(
('/blah/subdir/somefile', 'hello')
in list(context.generate_path_content()))
@testlib.with_context
def test_write_a_file_in_non_existing_dir(self, context):
with self.assertRaises(IOError) as cm:
open('/blah/subdir/somefile', 'w')
self.assertEquals(errno.ENOENT, cm.exception.errno)
@testlib.with_context
def test_file_returns_an_object_with_fileno_callable(self, context):
f = file('/file', 'w+')
self.assertTrue(hasattr(f, 'fileno'))
self.assertTrue(callable(f.fileno))
@testlib.with_context
def test_filenos_are_unique(self, context):
import os
os.makedirs('/blah/subdir')
file_1 = file('/blah/subdir/somefile', 'w+')
fileno_1 = file_1.fileno()
file_2 = file('/blah/subdir/somefile2', 'w+')
fileno_2 = file_2.fileno()
self.assertTrue(fileno_1 != fileno_2)
def test_get_created_directories(self):
context = testlib.TestContext()
context.fake_makedirs('/some/path')
self.assertEquals([
'/',
'/some',
'/some/path'],
context.get_created_directories())
def test_popen_raises_error(self):
import subprocess
|
[
" context = testlib.TestContext()"
] | 426
|
lcc
|
python
| null |
36ab3dcfde1587fd327a15d56d7d38fb65b8104458bbcd00
|
|
// <file>
// <copyright see="prj:///doc/copyright.txt"/>
// <license see="prj:///doc/license.txt"/>
// <owner name="Daniel Grunwald" email="daniel@danielgrunwald.de"/>
// <version>$Revision$</version>
// </file>
using System;
using System.Collections.Generic;
using System.IO;
namespace ICSharpCode.Core
{
/// <summary>
/// Class that helps starting up ICSharpCode.Core.
/// </summary>
/// <remarks>
/// Initializing ICSharpCode.Core requires initializing several static classes
/// and the <see cref="AddInTree"/>. <see cref="CoreStartup"/> does this work
/// for you, provided you use it like this:
/// 1. Create a new CoreStartup instance
/// 2. (Optional) Set the values of the properties.
/// 3. Call <see cref="StartCoreServices()"/>.
/// 4. Add "preinstalled" AddIns using <see cref="AddAddInsFromDirectory"/>
/// and <see cref="AddAddInFile"/>.
/// 5. (Optional) Call <see cref="ConfigureExternalAddIns"/> to support
/// disabling AddIns and installing external AddIns
/// 6. (Optional) Call <see cref="ConfigureUserAddIns"/> to support installing
/// user AddIns.
/// 7. Call <see cref="RunInitialization"/>.
/// </remarks>
public sealed class CoreStartup
{
List<string> addInFiles = new List<string>();
List<string> disabledAddIns = new List<string>();
bool externalAddInsConfigured;
string propertiesName;
string configDirectory;
string dataDirectory;
string applicationName;
/// <summary>
/// Sets the name used for the properties (only name, without path or extension).
/// Must be set before StartCoreServices() is called.
/// </summary>
public string PropertiesName {
get {
return propertiesName;
}
set {
if (value == null || value.Length == 0)
throw new ArgumentNullException("value");
propertiesName = value;
}
}
/// <summary>
/// Sets the directory name used for the property service.
/// Must be set before StartCoreServices() is called.
/// Use null to use the default path "%ApplicationData%\%ApplicationName%",
/// where %ApplicationData% is the system setting for
/// "c:\documents and settings\username\application data"
/// and %ApplicationName% is the application name you used in the
/// CoreStartup constructor call.
/// </summary>
public string ConfigDirectory {
get {
return configDirectory;
}
set {
configDirectory = value;
}
}
/// <summary>
/// Sets the data directory used to load resources.
/// Must be set before StartCoreServices() is called.
/// Use null to use the default path "ApplicationRootPath\data".
/// </summary>
public string DataDirectory {
get {
return dataDirectory;
}
set {
dataDirectory = value;
}
}
/// <summary>
/// Creates a new CoreStartup instance.
/// </summary>
/// <param name="applicationName">
/// The name of your application.
/// This is used as default title for message boxes,
/// default name for the configuration directory etc.
/// </param>
public CoreStartup(string applicationName)
{
if (applicationName == null)
throw new ArgumentNullException("applicationName");
this.applicationName = applicationName;
propertiesName = applicationName + "Properties";
MessageService.DefaultMessageBoxTitle = applicationName;
MessageService.ProductName = applicationName;
}
/// <summary>
/// Find AddIns by searching all .addin files recursively in <paramref name="addInDir"/>.
/// The found AddIns are added to the list of AddIn files to load.
/// </summary>
public void AddAddInsFromDirectory(string addInDir)
{
if (addInDir == null)
throw new ArgumentNullException("addInDir");
addInFiles.AddRange(FileUtility.SearchDirectory(addInDir, "*.addin"));
}
/// <summary>
/// Add the specified .addin file to the list of AddIn files to load.
/// </summary>
public void AddAddInFile(string addInFile)
{
if (addInFile == null)
throw new ArgumentNullException("addInFile");
addInFiles.Add(addInFile);
}
/// <summary>
/// Use the specified configuration file to store information about
/// disabled AddIns and external AddIns.
/// You have to call this method to support the <see cref="AddInManager"/>.
/// </summary>
/// <param name="addInConfigurationFile">
/// The name of the file used to store the list of disabled AddIns
/// and the list of installed external AddIns.
/// A good value for this parameter would be
/// <c>Path.Combine(<see cref="PropertyService.ConfigDirectory"/>, "AddIns.xml")</c>.
/// </param>
public void ConfigureExternalAddIns(string addInConfigurationFile)
{
externalAddInsConfigured = true;
AddInManager.ConfigurationFileName = addInConfigurationFile;
AddInManager.LoadAddInConfiguration(addInFiles, disabledAddIns);
}
/// <summary>
/// Configures user AddIn support.
/// </summary>
/// <param name="addInInstallTemp">
/// The AddIn installation temporary directory.
/// ConfigureUserAddIns will install the AddIns from this directory and
/// store the parameter value in <see cref="AddInManager.AddInInstallTemp"/>.
/// </param>
/// <param name="userAddInPath">
/// The path where user AddIns are installed to.
/// AddIns from this directory will be loaded.
/// </param>
public void ConfigureUserAddIns(string addInInstallTemp, string userAddInPath)
{
if (!externalAddInsConfigured) {
throw new InvalidOperationException("ConfigureExternalAddIns must be called before ConfigureUserAddIns");
}
AddInManager.AddInInstallTemp = addInInstallTemp;
AddInManager.UserAddInPath = userAddInPath;
if (Directory.Exists(addInInstallTemp)) {
AddInManager.InstallAddIns(disabledAddIns);
}
if (Directory.Exists(userAddInPath)) {
AddAddInsFromDirectory(userAddInPath);
}
}
/// <summary>
/// Initializes the AddIn system.
/// This loads the AddIns that were added to the list,
/// then it executes the <see cref="ICommand">commands</see>
/// in <c>/Workspace/Autostart</c>.
/// </summary>
public void RunInitialization()
{
AddInTree.Load(addInFiles, disabledAddIns);
// run workspace autostart commands
LoggingService.Info("Running autostart commands...");
foreach (ICommand command in AddInTree.BuildItems<ICommand>("/Workspace/Autostart", null, false)) {
try {
command.Run();
} catch (Exception ex) {
// allow startup to continue if some commands fail
MessageService.ShowError(ex);
}
}
}
/// <summary>
/// Starts the core services.
/// This initializes the PropertyService and ResourceService.
/// </summary>
public void StartCoreServices()
{
|
[
"\t\t\tif (configDirectory == null)"
] | 753
|
lcc
|
csharp
| null |
dc7f3a684d9468c9631828995c836b9722e18b4999cdd713
|
|
//
// System.Data.Constraint.cs
//
// Author:
// Franklin Wise <gracenote@earthlink.net>
// Daniel Morgan
// Tim Coleman (tim@timcoleman.com)
//
//
// (C) Ximian, Inc. 2002
// Copyright (C) Tim Coleman, 2002
//
//
// Copyright (C) 2004 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections;
using System.ComponentModel;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using System.Data.Common;
namespace System.Data {
[Serializable]
internal delegate void DelegateConstraintNameChange (object sender, string newName);
[DefaultProperty ("ConstraintName")]
#if !NET_2_0
[Serializable]
#endif
[TypeConverterAttribute (typeof (ConstraintConverter))]
public abstract class Constraint {
static readonly object beforeConstraintNameChange = new object ();
EventHandlerList events = new EventHandlerList ();
internal event DelegateConstraintNameChange BeforeConstraintNameChange {
add { events.AddHandler (beforeConstraintNameChange, value); }
remove { events.RemoveHandler (beforeConstraintNameChange, value); }
}
//if constraintName is not set then a name is
//created when it is added to
//the ConstraintCollection
//it can not be set to null, empty or duplicate
//once it has been added to the collection
private string _constraintName;
private PropertyCollection _properties;
private Index _index;
//Used for membership checking
private ConstraintCollection _constraintCollection;
DataSet dataSet;
protected Constraint ()
{
dataSet = null;
_properties = new PropertyCollection ();
}
[CLSCompliant (false)]
protected internal virtual DataSet _DataSet {
get { return dataSet; }
}
[DataCategory ("Data")]
#if !NET_2_0
[DataSysDescription ("Indicates the name of this constraint.")]
#endif
[DefaultValue ("")]
public virtual string ConstraintName {
get { return _constraintName == null ? "" : _constraintName; }
set {
//This should only throw an exception when it
//is a member of a ConstraintCollection which
//means we should let the ConstraintCollection
//handle exceptions when this value changes
_onConstraintNameChange (value);
_constraintName = value;
}
}
[Browsable (false)]
[DataCategory ("Data")]
#if !NET_2_0
[DataSysDescription ("The collection that holds custom user information.")]
#endif
public PropertyCollection ExtendedProperties {
get { return _properties; }
}
#if !NET_2_0
[DataSysDescription ("Indicates the table of this constraint.")]
#endif
public abstract DataTable Table {
get;
}
internal ConstraintCollection ConstraintCollection {
get { return _constraintCollection; }
set { _constraintCollection = value; }
}
private void _onConstraintNameChange (string newName)
{
DelegateConstraintNameChange eh = events [beforeConstraintNameChange] as DelegateConstraintNameChange;
if (eh != null)
eh (this, newName);
}
//call once before adding a constraint to a collection
//will throw an exception to prevent the add if a rule is broken
internal abstract void AddToConstraintCollectionSetup (ConstraintCollection collection);
internal abstract bool IsConstraintViolated ();
internal static void ThrowConstraintException ()
{
throw new ConstraintException("Failed to enable constraints. One or more rows contain values violating non-null, unique, or foreign-key constraints.");
}
bool initInProgress = false;
internal virtual bool InitInProgress {
get { return initInProgress; }
set { initInProgress = value; }
}
internal virtual void FinishInit (DataTable table)
{
}
internal void AssertConstraint ()
{
// The order is important.. IsConstraintViolated fills the RowErrors if it detects
// a violation
if (!IsConstraintViolated ())
return;
if (Table._duringDataLoad || (Table.DataSet != null && !Table.DataSet.EnforceConstraints))
return;
ThrowConstraintException ();
}
internal abstract void AssertConstraint (DataRow row);
internal virtual void RollbackAssert (DataRow row)
{
}
//call once before removing a constraint to a collection
//can throw an exception to prevent the removal
internal abstract void RemoveFromConstraintCollectionCleanup (ConstraintCollection collection);
[MonoTODO]
protected void CheckStateForProperty ()
{
throw new NotImplementedException ();
}
protected internal void SetDataSet (DataSet dataSet)
{
this.dataSet = dataSet;
}
internal void SetExtendedProperties (PropertyCollection properties)
{
_properties = properties;
}
internal Index Index {
get { return _index; }
set {
if (_index != null) {
_index.RemoveRef();
Table.DropIndex(_index);
}
_index = value;
if (_index != null)
_index.AddRef();
}
}
|
[
"\t\tinternal abstract bool IsColumnContained (DataColumn column);"
] | 743
|
lcc
|
csharp
| null |
1fbe52a6ae333eb46f1ea6000375cec54d0c1377ea171db3
|
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pyslvs_ui/io/preference.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from qtpy import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(865, 427)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("icons:settings.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
Dialog.setSizeGripEnabled(True)
Dialog.setModal(True)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.settings_ui_groupbox = QtWidgets.QGroupBox(Dialog)
self.settings_ui_groupbox.setObjectName("settings_ui_groupbox")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.settings_ui_groupbox)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.zoomby_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.zoomby_label.setObjectName("zoomby_label")
self.gridLayout.addWidget(self.zoomby_label, 3, 2, 1, 1)
self.font_size_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.font_size_option.setMinimum(1)
self.font_size_option.setMaximum(30)
self.font_size_option.setSingleStep(2)
self.font_size_option.setObjectName("font_size_option")
self.gridLayout.addWidget(self.font_size_option, 0, 3, 1, 1)
self.tick_mark_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.tick_mark_label.setObjectName("tick_mark_label")
self.gridLayout.addWidget(self.tick_mark_label, 4, 2, 1, 1)
self.zoom_by_option = QtWidgets.QComboBox(self.settings_ui_groupbox)
self.zoom_by_option.setObjectName("zoom_by_option")
self.zoom_by_option.addItem("")
self.zoom_by_option.addItem("")
self.gridLayout.addWidget(self.zoom_by_option, 3, 3, 1, 1)
self.line_width_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.line_width_option.setMinimum(1)
self.line_width_option.setMaximum(10)
self.line_width_option.setDisplayIntegerBase(10)
self.line_width_option.setObjectName("line_width_option")
self.gridLayout.addWidget(self.line_width_option, 0, 1, 1, 1)
self.scale_factor_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.scale_factor_option.setMinimum(5)
self.scale_factor_option.setMaximum(100)
self.scale_factor_option.setSingleStep(5)
self.scale_factor_option.setObjectName("scale_factor_option")
self.gridLayout.addWidget(self.scale_factor_option, 1, 3, 1, 1)
self.linewidth_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.linewidth_label.setObjectName("linewidth_label")
self.gridLayout.addWidget(self.linewidth_label, 0, 0, 1, 1)
self.fontsize_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.fontsize_label.setObjectName("fontsize_label")
self.gridLayout.addWidget(self.fontsize_label, 0, 2, 1, 1)
self.snap_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.snap_label.setObjectName("snap_label")
self.gridLayout.addWidget(self.snap_label, 5, 0, 1, 1)
self.jointsize_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.jointsize_label.setObjectName("jointsize_label")
self.gridLayout.addWidget(self.jointsize_label, 4, 0, 1, 1)
self.pathwidth_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.pathwidth_label.setObjectName("pathwidth_label")
self.gridLayout.addWidget(self.pathwidth_label, 1, 0, 1, 1)
self.linktransparency_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.linktransparency_label.setObjectName("linktransparency_label")
self.gridLayout.addWidget(self.linktransparency_label, 2, 2, 1, 1)
self.margin_factor_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.margin_factor_option.setMaximum(30)
self.margin_factor_option.setSingleStep(5)
self.margin_factor_option.setObjectName("margin_factor_option")
self.gridLayout.addWidget(self.margin_factor_option, 3, 1, 1, 1)
self.toolbar_pos_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.toolbar_pos_label.setObjectName("toolbar_pos_label")
self.gridLayout.addWidget(self.toolbar_pos_label, 5, 2, 1, 1)
self.selectionradius_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.selectionradius_label.setObjectName("selectionradius_label")
self.gridLayout.addWidget(self.selectionradius_label, 2, 0, 1, 1)
self.scalefactor_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.scalefactor_label.setObjectName("scalefactor_label")
self.gridLayout.addWidget(self.scalefactor_label, 1, 2, 1, 1)
self.nav_toolbar_pos_option = QtWidgets.QComboBox(self.settings_ui_groupbox)
self.nav_toolbar_pos_option.setObjectName("nav_toolbar_pos_option")
self.nav_toolbar_pos_option.addItem("")
self.nav_toolbar_pos_option.addItem("")
self.gridLayout.addWidget(self.nav_toolbar_pos_option, 5, 3, 1, 1)
self.marginfactor_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.marginfactor_label.setObjectName("marginfactor_label")
self.gridLayout.addWidget(self.marginfactor_label, 3, 0, 1, 1)
self.joint_size_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.joint_size_option.setMinimum(1)
self.joint_size_option.setMaximum(100)
self.joint_size_option.setObjectName("joint_size_option")
self.gridLayout.addWidget(self.joint_size_option, 4, 1, 1, 1)
self.path_width_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.path_width_option.setMinimum(1)
self.path_width_option.setMaximum(5)
self.path_width_option.setObjectName("path_width_option")
self.gridLayout.addWidget(self.path_width_option, 1, 1, 1, 1)
self.link_trans_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.link_trans_option.setMaximum(80)
self.link_trans_option.setSingleStep(10)
self.link_trans_option.setObjectName("link_trans_option")
self.gridLayout.addWidget(self.link_trans_option, 2, 3, 1, 1)
self.snap_option = QtWidgets.QDoubleSpinBox(self.settings_ui_groupbox)
self.snap_option.setMaximum(50.0)
self.snap_option.setObjectName("snap_option")
self.gridLayout.addWidget(self.snap_option, 5, 1, 1, 1)
self.tick_mark_option = QtWidgets.QComboBox(self.settings_ui_groupbox)
self.tick_mark_option.setObjectName("tick_mark_option")
self.tick_mark_option.addItem("")
self.tick_mark_option.addItem("")
self.tick_mark_option.addItem("")
self.gridLayout.addWidget(self.tick_mark_option, 4, 3, 1, 1)
self.selection_radius_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.selection_radius_option.setMinimum(3)
self.selection_radius_option.setMaximum(10)
self.selection_radius_option.setObjectName("selection_radius_option")
self.gridLayout.addWidget(self.selection_radius_option, 2, 1, 1, 1)
self.default_zoom_label = QtWidgets.QLabel(self.settings_ui_groupbox)
self.default_zoom_label.setObjectName("default_zoom_label")
self.gridLayout.addWidget(self.default_zoom_label, 6, 0, 1, 1)
self.default_zoom_option = QtWidgets.QSpinBox(self.settings_ui_groupbox)
self.default_zoom_option.setObjectName("default_zoom_option")
self.gridLayout.addWidget(self.default_zoom_option, 6, 1, 1, 1)
self.verticalLayout_3.addLayout(self.gridLayout)
self.grab_no_background_option = QtWidgets.QCheckBox(self.settings_ui_groupbox)
self.grab_no_background_option.setObjectName("grab_no_background_option")
self.verticalLayout_3.addWidget(self.grab_no_background_option)
self.monochrome_option = QtWidgets.QCheckBox(self.settings_ui_groupbox)
self.monochrome_option.setObjectName("monochrome_option")
self.verticalLayout_3.addWidget(self.monochrome_option)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_3.addItem(spacerItem)
self.horizontalLayout.addWidget(self.settings_ui_groupbox)
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.settings_kernels_groupBox = QtWidgets.QGroupBox(Dialog)
self.settings_kernels_groupBox.setObjectName("settings_kernels_groupBox")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.settings_kernels_groupBox)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.formLayout_3 = QtWidgets.QFormLayout()
self.formLayout_3.setObjectName("formLayout_3")
self.planarsolver_label = QtWidgets.QLabel(self.settings_kernels_groupBox)
self.planarsolver_label.setObjectName("planarsolver_label")
self.formLayout_3.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.planarsolver_label)
self.planar_solver_option = QtWidgets.QComboBox(self.settings_kernels_groupBox)
self.planar_solver_option.setObjectName("planar_solver_option")
self.formLayout_3.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.planar_solver_option)
self.pathpreview_label = QtWidgets.QLabel(self.settings_kernels_groupBox)
self.pathpreview_label.setObjectName("pathpreview_label")
self.formLayout_3.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.pathpreview_label)
self.path_preview_option = QtWidgets.QComboBox(self.settings_kernels_groupBox)
self.path_preview_option.setObjectName("path_preview_option")
self.formLayout_3.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.path_preview_option)
self.verticalLayout_4.addLayout(self.formLayout_3)
self.console_error_option = QtWidgets.QCheckBox(self.settings_kernels_groupBox)
self.console_error_option.setObjectName("console_error_option")
self.verticalLayout_4.addWidget(self.console_error_option)
self.verticalLayout.addWidget(self.settings_kernels_groupBox)
self.settings_project_groupbox = QtWidgets.QGroupBox(Dialog)
self.settings_project_groupbox.setObjectName("settings_project_groupbox")
self.formLayout_2 = QtWidgets.QFormLayout(self.settings_project_groupbox)
self.formLayout_2.setObjectName("formLayout_2")
self.undo_limit_label = QtWidgets.QLabel(self.settings_project_groupbox)
self.undo_limit_label.setObjectName("undo_limit_label")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.undo_limit_label)
self.undo_limit_option = QtWidgets.QSpinBox(self.settings_project_groupbox)
self.undo_limit_option.setMinimum(5)
self.undo_limit_option.setObjectName("undo_limit_option")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.undo_limit_option)
self.open_project_actions_label = QtWidgets.QLabel(self.settings_project_groupbox)
self.open_project_actions_label.setObjectName("open_project_actions_label")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.open_project_actions_label)
self.open_project_actions_option = QtWidgets.QComboBox(self.settings_project_groupbox)
self.open_project_actions_option.setObjectName("open_project_actions_option")
self.open_project_actions_option.addItem("")
self.open_project_actions_option.addItem("")
self.open_project_actions_option.addItem("")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.open_project_actions_option)
self.file_type_label = QtWidgets.QLabel(self.settings_project_groupbox)
self.file_type_label.setObjectName("file_type_label")
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.file_type_label)
self.file_type_option = QtWidgets.QComboBox(self.settings_project_groupbox)
self.file_type_option.setObjectName("file_type_option")
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.file_type_option)
self.verticalLayout.addWidget(self.settings_project_groupbox)
self.settings_misc_groupBox = QtWidgets.QGroupBox(Dialog)
self.settings_misc_groupBox.setObjectName("settings_misc_groupBox")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.settings_misc_groupBox)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.auto_remove_link_option = QtWidgets.QCheckBox(self.settings_misc_groupBox)
self.auto_remove_link_option.setObjectName("auto_remove_link_option")
self.verticalLayout_7.addWidget(self.auto_remove_link_option)
self.title_full_path_option = QtWidgets.QCheckBox(self.settings_misc_groupBox)
self.title_full_path_option.setObjectName("title_full_path_option")
self.verticalLayout_7.addWidget(self.title_full_path_option)
self.not_save_option = QtWidgets.QCheckBox(self.settings_misc_groupBox)
self.not_save_option.setObjectName("not_save_option")
self.verticalLayout_7.addWidget(self.not_save_option)
spacerItem1 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_7.addItem(spacerItem1)
self.verticalLayout.addWidget(self.settings_misc_groupBox)
self.horizontalLayout.addLayout(self.verticalLayout)
self.verticalLayout_2.addLayout(self.horizontalLayout)
|
[
" self.button_box = QtWidgets.QDialogButtonBox(Dialog)"
] | 519
|
lcc
|
python
| null |
ffafcf6803367046bcd6535efabfd55b6913ed04cf9c0e14
|
|
/*
* FlightIntel for Pilots
*
* Copyright 2012 Nadeem Hasan <nhasan@nadmm.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.nadmm.airports.wx;
import java.util.Locale;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.location.Location;
import android.os.Bundle;
import android.support.v4.content.LocalBroadcastManager;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.nadmm.airports.DatabaseManager;
import com.nadmm.airports.DatabaseManager.Airports;
import com.nadmm.airports.DatabaseManager.Awos1;
import com.nadmm.airports.DatabaseManager.Wxs;
import com.nadmm.airports.DrawerActivityBase;
import com.nadmm.airports.FragmentBase;
import com.nadmm.airports.R;
import com.nadmm.airports.utils.CursorAsyncTask;
import com.nadmm.airports.utils.FormatUtils;
import com.nadmm.airports.utils.GeoUtils;
import com.nadmm.airports.utils.TimeUtils;
import com.nadmm.airports.wx.Taf.Forecast;
import com.nadmm.airports.wx.Taf.IcingCondition;
import com.nadmm.airports.wx.Taf.TurbulenceCondition;
public class TafFragment extends FragmentBase {
private final String mAction = NoaaService.ACTION_GET_TAF;
private final int TAF_RADIUS = 25;
private final int TAF_HOURS_BEFORE = 3;
private Location mLocation;
private IntentFilter mFilter;
private BroadcastReceiver mReceiver;
private String mStationId;
private Forecast mLastForecast;
@Override
public void onCreate( Bundle savedInstanceState ) {
super.onCreate( savedInstanceState );
setHasOptionsMenu( true );
mFilter = new IntentFilter();
mFilter.addAction( mAction );
mReceiver = new BroadcastReceiver() {
@Override
public void onReceive( Context context, Intent intent ) {
String action = intent.getAction();
if ( action.equals( mAction ) ) {
String type = intent.getStringExtra( NoaaService.TYPE );
if ( type.equals( NoaaService.TYPE_TEXT ) ) {
showTaf( intent );
}
}
}
};
}
@Override
public void onResume() {
LocalBroadcastManager bm = LocalBroadcastManager.getInstance( getActivity() );
bm.registerReceiver( mReceiver, mFilter );
Bundle args = getArguments();
String stationId = args.getString( NoaaService.STATION_ID );
setBackgroundTask( new TafTask() ).execute( stationId );
super.onResume();
}
@Override
public void onPause() {
LocalBroadcastManager bm = LocalBroadcastManager.getInstance( getActivity() );
bm.unregisterReceiver( mReceiver );
super.onPause();
}
@Override
public View onCreateView( LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState ) {
View view = inflater.inflate( R.layout.taf_detail_view, container, false );
Button btnGraphic = (Button) view.findViewById( R.id.btnViewGraphic );
btnGraphic.setOnClickListener( new OnClickListener() {
@Override
public void onClick( View v ) {
Intent intent = new Intent( getActivity(), TafMapActivity.class );
startActivity( intent );
}
} );
return createContentView( view );
}
@Override
public void onPrepareOptionsMenu( Menu menu ) {
DrawerActivityBase activity = (DrawerActivityBase) getActivity();
setRefreshItemVisible( !activity.isNavDrawerOpen() );
}
@Override
public boolean onOptionsItemSelected( MenuItem item ) {
// Handle item selection
switch ( item.getItemId() ) {
case R.id.menu_refresh:
startRefreshAnimation();
requestTaf( mStationId, true );
return true;
default:
return super.onOptionsItemSelected( item );
}
}
private final class TafTask extends CursorAsyncTask {
@Override
protected Cursor[] doInBackground( String... params ) {
String stationId = params[ 0 ];
Cursor[] cursors = new Cursor[ 2 ];
SQLiteDatabase db = getDbManager().getDatabase( DatabaseManager.DB_FADDS );
SQLiteQueryBuilder builder = new SQLiteQueryBuilder();
builder.setTables( Wxs.TABLE_NAME );
String selection = Wxs.STATION_ID+"=?";
Cursor c = builder.query( db, new String[] { "*" }, selection,
new String[] { stationId }, null, null, null, null );
c.moveToFirst();
String siteTypes = c.getString( c.getColumnIndex( Wxs.STATION_SITE_TYPES ) );
if ( !siteTypes.contains( "TAF" ) ) {
// There is no TAF available at this station, search for the nearest
double lat = c.getDouble( c.getColumnIndex( Wxs.STATION_LATITUDE_DEGREES ) );
double lon = c.getDouble( c.getColumnIndex( Wxs.STATION_LONGITUDE_DEGREES ) );
Location location = new Location( "" );
location.setLatitude( lat );
location.setLongitude( lon );
c.close();
// Get the bounding box first to do a quick query as a first cut
double[] box = GeoUtils.getBoundingBoxRadians( location, TAF_RADIUS );
double radLatMin = box[ 0 ];
double radLatMax = box[ 1 ];
double radLonMin = box[ 2 ];
double radLonMax = box[ 3 ];
// Check if 180th Meridian lies within the bounding Box
boolean isCrossingMeridian180 = ( radLonMin > radLonMax );
selection = "("
+Wxs.STATION_LATITUDE_DEGREES+">=? AND "+Wxs.STATION_LATITUDE_DEGREES+"<=?"
+") AND ("+Wxs.STATION_LONGITUDE_DEGREES+">=? "
+(isCrossingMeridian180? "OR " : "AND ")+Wxs.STATION_LONGITUDE_DEGREES+"<=?)";
String[] selectionArgs = {
String.valueOf( Math.toDegrees( radLatMin ) ),
String.valueOf( Math.toDegrees( radLatMax ) ),
String.valueOf( Math.toDegrees( radLonMin ) ),
String.valueOf( Math.toDegrees( radLonMax ) )
};
c = builder.query( db, new String[] { "*" }, selection, selectionArgs,
null, null, null, null );
stationId = "";
if ( c.moveToFirst() ) {
float distance = Float.MAX_VALUE;
do {
siteTypes = c.getString( c.getColumnIndex( Wxs.STATION_SITE_TYPES ) );
if ( !siteTypes.contains( "TAF" ) ) {
continue;
}
// Get the location of this station
float[] results = new float[ 2 ];
Location.distanceBetween(
location.getLatitude(),
location.getLongitude(),
c.getDouble( c.getColumnIndex( Wxs.STATION_LATITUDE_DEGREES ) ),
c.getDouble( c.getColumnIndex( Wxs.STATION_LONGITUDE_DEGREES ) ),
results );
results[ 0 ] /= GeoUtils.METERS_PER_NAUTICAL_MILE;
if ( results[ 0 ] <= TAF_RADIUS && results[ 0 ] < distance ) {
stationId = c.getString( c.getColumnIndex( Wxs.STATION_ID ) );
distance = results[ 0 ];
}
} while ( c.moveToNext() );
}
}
c.close();
if ( stationId.length() > 0 ) {
// We have the station with TAF
builder = new SQLiteQueryBuilder();
builder.setTables( Wxs.TABLE_NAME );
selection = Wxs.STATION_ID+"=?";
c = builder.query( db, new String[] { "*" }, selection,
new String[] { stationId }, null, null, null, null );
cursors[ 0 ] = c;
String[] wxColumns = new String[] {
Awos1.WX_SENSOR_IDENT,
Awos1.WX_SENSOR_TYPE,
Awos1.STATION_FREQUENCY,
Awos1.SECOND_STATION_FREQUENCY,
Awos1.STATION_PHONE_NUMBER,
Airports.ASSOC_CITY,
Airports.ASSOC_STATE
};
builder = new SQLiteQueryBuilder();
builder.setTables( Airports.TABLE_NAME+" a"
+" LEFT JOIN "+Awos1.TABLE_NAME+" w"
+" ON a."+Airports.FAA_CODE+" = w."+Awos1.WX_SENSOR_IDENT );
selection = "a."+Airports.ICAO_CODE+"=?";
c = builder.query( db, wxColumns, selection, new String[] { stationId },
null, null, null, null );
cursors[ 1 ] = c;
}
return cursors;
}
@Override
protected boolean onResult( Cursor[] result ) {
Cursor wxs = result[ 0 ];
if ( wxs == null || !wxs.moveToFirst() ) {
// No station with TAF was found nearby
Bundle args = getArguments();
String stationId = args.getString( NoaaService.STATION_ID );
View detail = findViewById( R.id.wx_detail_layout );
detail.setVisibility( View.GONE );
LinearLayout layout = (LinearLayout) findViewById( R.id.wx_status_layout );
layout.removeAllViews();
layout.setVisibility( View.GONE );
|
[
" TextView tv =(TextView) findViewById( R.id.status_msg );"
] | 986
|
lcc
|
java
| null |
cff5cce659d8bde6af869ff91a57f4b6445b88159e169708
|
|
using System.Collections;
using System.Collections.Generic;
using NHibernate.Criterion;
using NHibernate.Multi;
using NHibernate.Stat;
using NUnit.Framework;
namespace NHibernate.Test.Stats
{
[TestFixture]
public class StatsFixture : TestCase
{
protected override string MappingsAssembly
{
get { return "NHibernate.Test"; }
}
protected override string[] Mappings
{
get { return new string[] { "Stats.Continent.hbm.xml" }; }
}
protected override void Configure(Cfg.Configuration configuration)
{
configuration.SetProperty(Cfg.Environment.GenerateStatistics, "true");
}
private static Continent FillDb(ISession s)
{
Continent europe = new Continent();
europe.Name="Europe";
Country france = new Country();
france.Name="France";
europe.Countries=new HashSet<Country>();
europe.Countries.Add(france);
s.Save(france);
s.Save(europe);
return europe;
}
private static void CleanDb(ISession s)
{
s.Delete("from Locality");
s.Delete("from Country");
s.Delete("from Continent");
}
[Test]
public void CollectionFetchVsLoad()
{
IStatistics stats = Sfi.Statistics;
stats.Clear();
ISession s = OpenSession();
ITransaction tx = s.BeginTransaction();
Continent europe = FillDb(s);
tx.Commit();
s.Clear();
tx = s.BeginTransaction();
Assert.AreEqual(0, stats.CollectionLoadCount);
Assert.AreEqual(0, stats.CollectionFetchCount);
Continent europe2 = s.Get<Continent>(europe.Id);
Assert.AreEqual(0, stats.CollectionLoadCount, "Lazy true: no collection should be loaded");
Assert.AreEqual(0, stats.CollectionFetchCount);
int cc = europe2.Countries.Count;
Assert.AreEqual(1, stats.CollectionLoadCount);
Assert.AreEqual(1, stats.CollectionFetchCount, "Explicit fetch of the collection state");
tx.Commit();
s.Close();
s = OpenSession();
tx = s.BeginTransaction();
stats.Clear();
europe = FillDb(s);
tx.Commit();
s.Clear();
tx = s.BeginTransaction();
Assert.AreEqual(0, stats.CollectionLoadCount);
Assert.AreEqual(0, stats.CollectionFetchCount);
europe2 = s.CreateQuery("from Continent a join fetch a.Countries where a.id = " + europe.Id).UniqueResult<Continent>();
Assert.AreEqual(1, stats.CollectionLoadCount);
Assert.AreEqual(0, stats.CollectionFetchCount, "collection should be loaded in the same query as its parent");
tx.Commit();
s.Close();
Mapping.Collection coll = cfg.GetCollectionMapping("NHibernate.Test.Stats.Continent.Countries");
coll.FetchMode = FetchMode.Join;
coll.IsLazy = false;
ISessionFactory sf = cfg.BuildSessionFactory();
stats = sf.Statistics;
stats.Clear();
stats.IsStatisticsEnabled = true;
s = sf.OpenSession();
tx = s.BeginTransaction();
europe = FillDb(s);
tx.Commit();
s.Clear();
tx = s.BeginTransaction();
Assert.AreEqual(0, stats.CollectionLoadCount);
Assert.AreEqual(0, stats.CollectionFetchCount);
europe2 = s.Get<Continent>(europe.Id);
Assert.AreEqual(1, stats.CollectionLoadCount);
Assert.AreEqual(0, stats.CollectionFetchCount,
"Should do direct load, not indirect second load when lazy false and JOIN");
tx.Commit();
s.Close();
sf.Close();
coll = cfg.GetCollectionMapping("NHibernate.Test.Stats.Continent.Countries");
coll.FetchMode = FetchMode.Select;
coll.IsLazy = false;
sf = cfg.BuildSessionFactory();
stats = sf.Statistics;
stats.Clear();
stats.IsStatisticsEnabled = true;
s = sf.OpenSession();
tx = s.BeginTransaction();
europe = FillDb(s);
tx.Commit();
s.Clear();
tx = s.BeginTransaction();
Assert.AreEqual(0, stats.CollectionLoadCount);
Assert.AreEqual(0, stats.CollectionFetchCount);
europe2 = s.Get<Continent>(europe.Id);
Assert.AreEqual(1, stats.CollectionLoadCount);
Assert.AreEqual(1, stats.CollectionFetchCount, "Should do explicit collection load, not part of the first one");
foreach (Country country in europe2.Countries)
{
s.Delete(country);
}
CleanDb(s);
tx.Commit();
s.Close();
}
[Test]
public void QueryStatGathering()
{
IStatistics stats = Sfi.Statistics;
stats.Clear();
ISession s = OpenSession();
ITransaction tx = s.BeginTransaction();
FillDb(s);
tx.Commit();
s.Close();
s = OpenSession();
tx = s.BeginTransaction();
string continents = "from Continent";
int results = s.CreateQuery(continents).List().Count;
QueryStatistics continentStats = stats.GetQueryStatistics(continents);
Assert.IsNotNull(continentStats, "stats were null");
Assert.AreEqual(1, continentStats.ExecutionCount, "unexpected execution count");
Assert.AreEqual(results, continentStats.ExecutionRowCount, "unexpected row count");
var maxTime = continentStats.ExecutionMaxTime;
Assert.AreEqual(maxTime, stats.QueryExecutionMaxTime);
Assert.AreEqual( continents, stats.QueryExecutionMaxTimeQueryString );
IEnumerable itr = s.CreateQuery(continents).Enumerable();
// Enumerable() should increment the execution count
Assert.AreEqual(2, continentStats.ExecutionCount, "unexpected execution count");
// but should not effect the cumulative row count
Assert.AreEqual(results, continentStats.ExecutionRowCount, "unexpected row count");
NHibernateUtil.Close(itr);
tx.Commit();
s.Close();
// explicitly check that statistics for "split queries" get collected
// under the original query
stats.Clear();
s = OpenSession();
tx = s.BeginTransaction();
string localities = "from Locality";
results = s.CreateQuery(localities).List().Count;
QueryStatistics localityStats = stats.GetQueryStatistics(localities);
Assert.IsNotNull(localityStats, "stats were null");
// ...one for each split query
Assert.AreEqual(2, localityStats.ExecutionCount, "unexpected execution count");
Assert.AreEqual(results, localityStats.ExecutionRowCount, "unexpected row count");
maxTime = localityStats.ExecutionMaxTime;
Assert.AreEqual(maxTime, stats.QueryExecutionMaxTime);
Assert.AreEqual( localities, stats.QueryExecutionMaxTimeQueryString );
tx.Commit();
s.Close();
Assert.IsFalse(s.IsOpen);
// native sql queries
stats.Clear();
s = OpenSession();
tx = s.BeginTransaction();
string sql = "select Id, Name from Country";
results = s.CreateSQLQuery(sql).AddEntity(typeof (Country)).List().Count;
QueryStatistics sqlStats = stats.GetQueryStatistics(sql);
Assert.IsNotNull(sqlStats, "sql stats were null");
Assert.AreEqual(1, sqlStats.ExecutionCount, "unexpected execution count");
Assert.AreEqual(results, sqlStats.ExecutionRowCount, "unexpected row count");
maxTime = sqlStats.ExecutionMaxTime;
Assert.AreEqual(maxTime, stats.QueryExecutionMaxTime);
Assert.AreEqual( sql, stats.QueryExecutionMaxTimeQueryString);
tx.Commit();
s.Close();
s = OpenSession();
tx = s.BeginTransaction();
CleanDb(s);
tx.Commit();
s.Close();
}
[Test]
public void IncrementQueryExecutionCount_WhenExplicitQueryIsExecuted()
{
using (ISession s = OpenSession())
using (ITransaction tx = s.BeginTransaction())
{
FillDb(s);
tx.Commit();
}
IStatistics stats = Sfi.Statistics;
stats.Clear();
using (ISession s = OpenSession())
{
|
[
"\t\t\t\tvar r = s.CreateCriteria<Country>().List();"
] | 596
|
lcc
|
csharp
| null |
ae9e378a2b48e8a532bf970e581bc7ccc318cd5047ec916c
|
|
using System;
namespace WebArbor.LiveUpdate.Engine
{
/// <summary>
/// Tool to calculate and add CRC codes to a string
///
/// ***************************************************************************
/// Copyright (c) 2003 Thoraxcentrum, Erasmus MC, The Netherlands.
///
/// Written by Marcel de Wijs with help from a lot of others,
/// especially Stefan Nelwan
///
/// This code is for free. I ported it from several different sources to C#.
///
/// For comments: Marcel_de_Wijs@hotmail.com
/// ***************************************************************************
/// </summary>
public class CRCTool
{
// 'order' [1..32] is the CRC polynom order, counted without the leading '1' bit
// 'polynom' is the CRC polynom without leading '1' bit
// 'direct' [0,1] specifies the kind of algorithm: 1=direct, no augmented zero bits
// 'crcinit' is the initial CRC value belonging to that algorithm
// 'crcxor' is the final XOR value
// 'refin' [0,1] specifies if a data byte is reflected before processing (UART) or not
// 'refout' [0,1] specifies if the CRC will be reflected before XOR
// Data character string
// For CRC-CCITT : order = 16, direct=1, poly=0x1021, CRCinit = 0xFFFF, crcxor=0; refin =0, refout=0
// For CRC16: order = 16, direct=1, poly=0x8005, CRCinit = 0x0, crcxor=0x0; refin =1, refout=1
// For CRC32: order = 32, direct=1, poly=0x4c11db7, CRCinit = 0xFFFFFFFF, crcxor=0xFFFFFFFF; refin =1, refout=1
// Default : CRC-CCITT
private int order = 16;
private ulong polynom = 0x1021;
private int direct = 1;
private ulong crcinit = 0xFFFF;
private ulong crcxor = 0x0;
private int refin = 0;
private int refout = 0;
private ulong crcmask;
private ulong crchighbit;
private ulong crcinit_direct;
private ulong crcinit_nondirect;
private ulong[] crctab = new ulong[256];
// Enumeration used in the init function to specify which CRC algorithm to use
public enum CRCCode { CRC_CCITT, CRC16, CRC32 };
public CRCTool()
{
//
// TODO: Add constructor logic here
//
}
public void Init(CRCCode CodingType)
{
switch (CodingType)
{
case CRCCode.CRC_CCITT:
order = 16; direct = 1; polynom = 0x1021; crcinit = 0xFFFF; crcxor = 0; refin = 0; refout = 0;
break;
case CRCCode.CRC16:
order = 16; direct = 1; polynom = 0x8005; crcinit = 0x0; crcxor = 0x0; refin = 1; refout = 1;
break;
case CRCCode.CRC32:
order = 32; direct = 1; polynom = 0x4c11db7; crcinit = 0xFFFFFFFF; crcxor = 0xFFFFFFFF; refin = 1; refout = 1;
break;
}
// Initialize all variables for seeding and builing based upon the given coding type
// at first, compute constant bit masks for whole CRC and CRC high bit
crcmask = ((((ulong)1 << (order - 1)) - 1) << 1) | 1;
crchighbit = (ulong)1 << (order - 1);
// generate lookup table
generate_crc_table();
ulong bit, crc;
int i;
if (direct == 0)
{
crcinit_nondirect = crcinit;
crc = crcinit;
for (i = 0; i < order; i++)
{
bit = crc & crchighbit;
crc <<= 1;
if (bit != 0)
{
crc ^= polynom;
}
}
crc &= crcmask;
crcinit_direct = crc;
}
else
{
crcinit_direct = crcinit;
crc = crcinit;
for (i = 0; i < order; i++)
{
bit = crc & 1;
if (bit != 0)
{
crc ^= polynom;
}
crc >>= 1;
if (bit != 0)
{
crc |= crchighbit;
}
}
crcinit_nondirect = crc;
}
}
/// <summary>
/// 4 ways to calculate the crc checksum. If you have to do a lot of encoding
/// you should use the table functions. Since they use precalculated values, which
/// saves some calculating.
/// </summary>.
public ulong crctablefast(byte[] p)
{
// fast lookup table algorithm without augmented zero bytes, e.g. used in pkzip.
// only usable with polynom orders of 8, 16, 24 or 32.
ulong crc = crcinit_direct;
if (refin != 0)
{
crc = reflect(crc, order);
}
if (refin == 0)
{
for (int i = 0; i < p.Length; i++)
{
crc = (crc << 8) ^ crctab[((crc >> (order - 8)) & 0xff) ^ p[i]];
}
}
else
{
for (int i = 0; i < p.Length; i++)
{
crc = (crc >> 8) ^ crctab[(crc & 0xff) ^ p[i]];
}
}
if ((refout ^ refin) != 0)
{
crc = reflect(crc, order);
}
crc ^= crcxor;
crc &= crcmask;
return (crc);
}
public ulong crctable(byte[] p)
{
// normal lookup table algorithm with augmented zero bytes.
// only usable with polynom orders of 8, 16, 24 or 32.
ulong crc = crcinit_nondirect;
if (refin != 0)
{
crc = reflect(crc, order);
}
if (refin == 0)
{
for (int i = 0; i < p.Length; i++)
{
crc = ((crc << 8) | p[i]) ^ crctab[(crc >> (order - 8)) & 0xff];
}
}
else
{
for (int i = 0; i < p.Length; i++)
{
crc = (ulong)(((int)(crc >> 8) | (p[i] << (order - 8))) ^ (int)crctab[crc & 0xff]);
}
}
if (refin == 0)
{
for (int i = 0; i < order / 8; i++)
{
crc = (crc << 8) ^ crctab[(crc >> (order - 8)) & 0xff];
}
}
else
{
for (int i = 0; i < order / 8; i++)
{
crc = (crc >> 8) ^ crctab[crc & 0xff];
}
}
if ((refout ^ refin) != 0)
{
crc = reflect(crc, order);
}
crc ^= crcxor;
crc &= crcmask;
return (crc);
}
public ulong crcbitbybit(byte[] p)
{
// bit by bit algorithm with augmented zero bytes.
// does not use lookup table, suited for polynom orders between 1...32.
int i;
ulong j, c, bit;
ulong crc = crcinit_nondirect;
for (i = 0; i < p.Length; i++)
{
c = (ulong)p[i];
if (refin != 0)
{
c = reflect(c, 8);
}
for (j = 0x80; j != 0; j >>= 1)
{
bit = crc & crchighbit;
crc <<= 1;
if ((c & j) != 0)
{
crc |= 1;
}
if (bit != 0)
{
crc ^= polynom;
}
}
}
for (i = 0; (int)i < order; i++)
{
bit = crc & crchighbit;
crc <<= 1;
if (bit != 0) crc ^= polynom;
}
if (refout != 0)
{
crc = reflect(crc, order);
}
crc ^= crcxor;
crc &= crcmask;
return (crc);
}
public ulong crcbitbybitfast(byte[] p)
{
// fast bit by bit algorithm without augmented zero bytes.
// does not use lookup table, suited for polynom orders between 1...32.
int i;
ulong j, c, bit;
ulong crc = crcinit_direct;
for (i = 0; i < p.Length; i++)
{
c = (ulong)p[i];
if (refin != 0)
{
c = reflect(c, 8);
}
for (j = 0x80; j > 0; j >>= 1)
{
bit = crc & crchighbit;
crc <<= 1;
if ((c & j) > 0) bit ^= crchighbit;
if (bit > 0) crc ^= polynom;
}
}
if (refout > 0)
{
crc = reflect(crc, order);
}
crc ^= crcxor;
crc &= crcmask;
return (crc);
}
/// <summary>
/// CalcCRCITT is an algorithm found on the web for calculating the CRCITT checksum
/// It is included to demonstrate that although it looks different it is the same
/// routine as the crcbitbybit* functions. But it is optimized and preconfigured for CRCITT.
/// </summary>
public ushort CalcCRCITT(byte[] p)
{
uint uiCRCITTSum = 0xFFFF;
uint uiByteValue;
for (int iBufferIndex = 0; iBufferIndex < p.Length; iBufferIndex++)
{
uiByteValue = ((uint)p[iBufferIndex] << 8);
|
[
" for (int iBitIndex = 0; iBitIndex < 8; iBitIndex++)"
] | 1,212
|
lcc
|
csharp
| null |
cb8b4b2b743ae5c5feb0a78bada007e175140164e40f5a42
|
|
package net.geforcemods.securitycraft.entity;
import java.util.List;
import java.util.Random;
import net.geforcemods.securitycraft.SCContent;
import net.geforcemods.securitycraft.SecurityCraft;
import net.geforcemods.securitycraft.api.Owner;
import net.geforcemods.securitycraft.blockentities.KeypadChestBlockEntity;
import net.geforcemods.securitycraft.entity.ai.AttackRangedIfEnabledGoal;
import net.geforcemods.securitycraft.entity.ai.TargetNearestPlayerOrMobGoal;
import net.geforcemods.securitycraft.items.ModuleItem;
import net.geforcemods.securitycraft.network.client.InitSentryAnimation;
import net.geforcemods.securitycraft.util.ModuleUtils;
import net.geforcemods.securitycraft.util.PlayerUtils;
import net.geforcemods.securitycraft.util.Utils;
import net.minecraft.ChatFormatting;
import net.minecraft.core.BlockPos;
import net.minecraft.core.BlockSourceImpl;
import net.minecraft.core.Direction;
import net.minecraft.core.dispenser.AbstractProjectileDispenseBehavior;
import net.minecraft.core.dispenser.DispenseItemBehavior;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.network.protocol.Packet;
import net.minecraft.network.protocol.game.ClientboundAddEntityPacket;
import net.minecraft.network.syncher.EntityDataAccessor;
import net.minecraft.network.syncher.EntityDataSerializers;
import net.minecraft.network.syncher.SynchedEntityData;
import net.minecraft.server.level.ServerLevel;
import net.minecraft.sounds.SoundEvent;
import net.minecraft.sounds.SoundEvents;
import net.minecraft.util.Mth;
import net.minecraft.world.InteractionHand;
import net.minecraft.world.InteractionResult;
import net.minecraft.world.damagesource.DamageSource;
import net.minecraft.world.entity.Entity;
import net.minecraft.world.entity.EntityType;
import net.minecraft.world.entity.EquipmentSlot;
import net.minecraft.world.entity.LivingEntity;
import net.minecraft.world.entity.MobSpawnType;
import net.minecraft.world.entity.MoverType;
import net.minecraft.world.entity.PathfinderMob;
import net.minecraft.world.entity.Pose;
import net.minecraft.world.entity.monster.RangedAttackMob;
import net.minecraft.world.entity.player.Player;
import net.minecraft.world.entity.projectile.Projectile;
import net.minecraft.world.item.Item;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.item.Items;
import net.minecraft.world.item.context.UseOnContext;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.LevelAccessor;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.Blocks;
import net.minecraft.world.level.block.DispenserBlock;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.level.material.PushReaction;
import net.minecraft.world.phys.AABB;
import net.minecraft.world.phys.BlockHitResult;
import net.minecraft.world.phys.HitResult;
import net.minecraft.world.phys.Vec3;
import net.minecraft.world.phys.shapes.Shapes;
import net.minecraftforge.common.util.LazyOptional;
import net.minecraftforge.fmllegacy.network.PacketDistributor;
import net.minecraftforge.items.CapabilityItemHandler;
import net.minecraftforge.items.IItemHandler;
public class Sentry extends PathfinderMob implements RangedAttackMob //needs to be a creature so it can target a player, ai is also only given to living entities
{
private static final EntityDataAccessor<Owner> OWNER = SynchedEntityData.<Owner> defineId(Sentry.class, Owner.getSerializer());
private static final EntityDataAccessor<CompoundTag> DISGUISE_MODULE = SynchedEntityData.<CompoundTag> defineId(Sentry.class, EntityDataSerializers.COMPOUND_TAG);
private static final EntityDataAccessor<CompoundTag> ALLOWLIST = SynchedEntityData.<CompoundTag> defineId(Sentry.class, EntityDataSerializers.COMPOUND_TAG);
private static final EntityDataAccessor<Boolean> HAS_SPEED_MODULE = SynchedEntityData.<Boolean> defineId(Sentry.class, EntityDataSerializers.BOOLEAN);
private static final EntityDataAccessor<Integer> MODE = SynchedEntityData.<Integer> defineId(Sentry.class, EntityDataSerializers.INT);
public static final EntityDataAccessor<Float> HEAD_ROTATION = SynchedEntityData.<Float> defineId(Sentry.class, EntityDataSerializers.FLOAT);
public static final float MAX_TARGET_DISTANCE = 20.0F;
private static final float ANIMATION_STEP_SIZE = 0.025F;
private static final float UPWARDS_ANIMATION_LIMIT = 0.025F;
private static final float DOWNWARDS_ANIMATION_LIMIT = 0.9F;
private float headYTranslation = 0.9F;
public boolean animateUpwards = false;
public boolean animate = false;
private long previousTargetId = Long.MIN_VALUE;
public Sentry(EntityType<Sentry> type, Level level) {
super(SCContent.eTypeSentry, level);
}
public void setupSentry(Player owner) {
entityData.set(OWNER, new Owner(owner.getName().getString(), Player.createPlayerUUID(owner.getGameProfile()).toString()));
entityData.set(DISGUISE_MODULE, new CompoundTag());
entityData.set(ALLOWLIST, new CompoundTag());
entityData.set(HAS_SPEED_MODULE, false);
entityData.set(MODE, SentryMode.CAMOUFLAGE_HP.ordinal());
entityData.set(HEAD_ROTATION, 0.0F);
}
@Override
protected void defineSynchedData() {
super.defineSynchedData();
entityData.define(OWNER, new Owner());
entityData.define(DISGUISE_MODULE, new CompoundTag());
entityData.define(ALLOWLIST, new CompoundTag());
entityData.define(HAS_SPEED_MODULE, false);
entityData.define(MODE, SentryMode.CAMOUFLAGE_HP.ordinal());
entityData.define(HEAD_ROTATION, 0.0F);
}
@Override
protected void registerGoals() {
goalSelector.addGoal(1, new AttackRangedIfEnabledGoal(this, this::getShootingSpeed, 10.0F));
targetSelector.addGoal(1, new TargetNearestPlayerOrMobGoal(this));
}
@Override
public void tick() {
super.tick();
if (!level.isClientSide) {
BlockPos downPos = getBlockPosBelowThatAffectsMyMovement();
if (level.getBlockState(downPos).isAir() || level.noCollision(new AABB(downPos)))
discard();
}
else {
if (!animate && headYTranslation > 0.0F && getMode().isAggressive()) {
animateUpwards = true;
animate = true;
}
if (animate) //no else if because animate can be changed in the above if statement
{
if (animateUpwards && headYTranslation > UPWARDS_ANIMATION_LIMIT) {
headYTranslation -= ANIMATION_STEP_SIZE;
if (headYTranslation <= UPWARDS_ANIMATION_LIMIT) {
animateUpwards = false;
animate = false;
}
}
else if (!animateUpwards && headYTranslation < DOWNWARDS_ANIMATION_LIMIT) {
headYTranslation += ANIMATION_STEP_SIZE;
if (headYTranslation >= DOWNWARDS_ANIMATION_LIMIT) {
animateUpwards = true;
animate = false;
}
}
}
}
}
@Override
public ItemStack getPickedResult(HitResult target) {
return new ItemStack(SCContent.SENTRY.get());
}
@Override
public InteractionResult mobInteract(Player player, InteractionHand hand) {
BlockPos pos = blockPosition();
if (getOwner().isOwner(player) && hand == InteractionHand.MAIN_HAND) {
Item item = player.getMainHandItem().getItem();
player.closeContainer();
if (player.isCrouching())
discard();
else if (item == SCContent.UNIVERSAL_BLOCK_REMOVER.get()) {
kill();
if (!player.isCreative())
player.getMainHandItem().hurtAndBreak(1, player, p -> p.broadcastBreakEvent(hand));
}
else if (item == SCContent.DISGUISE_MODULE.get()) {
ItemStack module = getDisguiseModule();
//drop the old module as to not override it with the new one
if (!module.isEmpty()) {
Block.popResource(level, pos, module);
Block block = ((ModuleItem) module.getItem()).getBlockAddon(module.getTag());
if (block == level.getBlockState(pos).getBlock())
level.removeBlock(pos, false);
}
setDisguiseModule(player.getMainHandItem());
if (!player.isCreative())
player.setItemSlot(EquipmentSlot.MAINHAND, ItemStack.EMPTY);
}
else if (item == SCContent.ALLOWLIST_MODULE.get()) {
ItemStack module = getAllowlistModule();
if (!module.isEmpty())
Block.popResource(level, pos, module);
setAllowlistModule(player.getMainHandItem());
if (!player.isCreative())
player.setItemSlot(EquipmentSlot.MAINHAND, ItemStack.EMPTY);
}
else if (item == SCContent.SPEED_MODULE.get()) {
if (!hasSpeedModule()) {
setHasSpeedModule(true);
if (!player.isCreative())
player.setItemSlot(EquipmentSlot.MAINHAND, ItemStack.EMPTY);
}
}
else if (item == SCContent.UNIVERSAL_BLOCK_MODIFIER.get()) {
if (!getDisguiseModule().isEmpty()) {
Block block = ((ModuleItem) getDisguiseModule().getItem()).getBlockAddon(getDisguiseModule().getTag());
if (block == level.getBlockState(pos).getBlock())
level.setBlockAndUpdate(pos, Blocks.AIR.defaultBlockState());
}
Block.popResource(level, pos, getDisguiseModule());
Block.popResource(level, pos, getAllowlistModule());
if (hasSpeedModule())
Block.popResource(level, pos, new ItemStack(SCContent.SPEED_MODULE.get()));
entityData.set(DISGUISE_MODULE, new CompoundTag());
entityData.set(ALLOWLIST, new CompoundTag());
entityData.set(HAS_SPEED_MODULE, false);
}
else if (item == SCContent.REMOTE_ACCESS_SENTRY.get())
item.useOn(new UseOnContext(player, hand, new BlockHitResult(new Vec3(0.0D, 0.0D, 0.0D), Direction.NORTH, pos, false)));
else if (item == Items.NAME_TAG) {
setCustomName(player.getMainHandItem().getHoverName());
player.getMainHandItem().shrink(1);
}
else if (item == SCContent.UNIVERSAL_OWNER_CHANGER.get()) {
String newOwner = player.getMainHandItem().getHoverName().getString();
entityData.set(OWNER, new Owner(newOwner, PlayerUtils.isPlayerOnline(newOwner) ? PlayerUtils.getPlayerFromName(newOwner).getUUID().toString() : "ownerUUID"));
PlayerUtils.sendMessageToPlayer(player, Utils.localize(SCContent.UNIVERSAL_OWNER_CHANGER.get().getDescriptionId()), Utils.localize("messages.securitycraft:universalOwnerChanger.changed", newOwner), ChatFormatting.GREEN);
}
else
toggleMode(player);
player.swing(InteractionHand.MAIN_HAND);
return InteractionResult.SUCCESS;
}
else if (!getOwner().isOwner(player) && hand == InteractionHand.MAIN_HAND && player.isCreative()) {
if (player.isCrouching() || player.getMainHandItem().getItem() == SCContent.UNIVERSAL_BLOCK_REMOVER.get())
kill();
}
return super.mobInteract(player, hand);
}
/**
* Cleanly removes this sentry from the world, dropping the module and removing the block the sentry is disguised with
*/
@Override
public void remove(RemovalReason reason) {
BlockPos pos = blockPosition();
if (!getDisguiseModule().isEmpty()) {
Block block = ((ModuleItem) getDisguiseModule().getItem()).getBlockAddon(getDisguiseModule().getTag());
if (block == level.getBlockState(pos).getBlock())
level.removeBlock(pos, false);
}
super.remove(reason);
Block.popResource(level, pos, new ItemStack(SCContent.SENTRY.get()));
Block.popResource(level, pos, getDisguiseModule()); //if there is none, nothing will drop
Block.popResource(level, pos, getAllowlistModule()); //if there is none, nothing will drop
if (hasSpeedModule())
Block.popResource(level, pos, new ItemStack(SCContent.SPEED_MODULE.get()));
}
@Override
public void kill() {
remove(RemovalReason.KILLED);
}
/**
* Sets this sentry's mode to the next one and sends the player a message about the switch
*
* @param player The player to send the message to
*/
public void toggleMode(Player player) {
toggleMode(player, entityData.get(MODE) + 1, true);
}
/**
* Sets this sentry's mode to the given mode (or 0 if the mode is not one of 0, 1, 2) and sends the player a message
* about the switch if wanted
*
* @param player The player to send the message to
* @param mode The mode (int) to switch to (instead of sequentially toggling)
*/
public void toggleMode(Player player, int mode, boolean sendMessage) {
if (mode < 0 || mode >= SentryMode.values().length)
mode = 0;
entityData.set(MODE, mode);
if (sendMessage)
player.displayClientMessage(Utils.localize(SentryMode.values()[mode].getModeKey()).append(Utils.localize(SentryMode.values()[mode].getDescriptionKey())), true);
if (!player.level.isClientSide)
SecurityCraft.channel.send(PacketDistributor.ALL.noArg(), new InitSentryAnimation(blockPosition(), true, SentryMode.values()[mode].isAggressive()));
}
@Override
public void setTarget(LivingEntity target) {
if (!getMode().isAggressive() && (target == null && previousTargetId != Long.MIN_VALUE || (target != null && previousTargetId != target.getId()))) {
animateUpwards = getMode().isCamouflage() && target != null;
animate = true;
SecurityCraft.channel.send(PacketDistributor.ALL.noArg(), new InitSentryAnimation(blockPosition(), animate, animateUpwards));
}
previousTargetId = target == null ? Long.MIN_VALUE : target.getId();
super.setTarget(target);
}
@Override
public float getEyeHeight(Pose pose) //the sentry's eyes are higher so that it can see players even if it's inside a block when disguised - this also makes bullets spawn higher
{
return 1.5F;
}
@Override
public void performRangedAttack(LivingEntity target, float distanceFactor) {
//don't shoot if somehow a non player is a target, or if the player is in spectator or creative mode
if (target instanceof Player player && (player.isSpectator() || player.isCreative()))
return;
//also don't shoot if the target is too far away
if (distanceToSqr(target) > MAX_TARGET_DISTANCE * MAX_TARGET_DISTANCE)
return;
BlockEntity blockEntity = level.getBlockEntity(blockPosition().below());
Projectile throwableEntity = null;
SoundEvent shootSound = SoundEvents.ARROW_SHOOT;
AbstractProjectileDispenseBehavior pdb = null;
LazyOptional<IItemHandler> optional = LazyOptional.empty();
if (blockEntity instanceof KeypadChestBlockEntity be)
optional = be.getHandlerForSentry(this);
else if (blockEntity != null)
optional = blockEntity.getCapability(CapabilityItemHandler.ITEM_HANDLER_CAPABILITY, Direction.UP);
if (optional.isPresent()) {
IItemHandler handler = optional.orElse(null); //this is safe, because the presence was checked beforehand
for (int i = 0; i < handler.getSlots(); i++) {
ItemStack stack = handler.getStackInSlot(i);
if (!stack.isEmpty()) {
DispenseItemBehavior dispenseBehavior = ((DispenserBlock) Blocks.DISPENSER).getDispenseMethod(stack);
if (dispenseBehavior instanceof AbstractProjectileDispenseBehavior projectileDispenseBehavior) {
ItemStack extracted = handler.extractItem(i, 1, false);
pdb = projectileDispenseBehavior;
throwableEntity = pdb.getProjectile(level, position().add(0.0D, 1.6D, 0.0D), extracted);
throwableEntity.setOwner(this);
shootSound = null;
break;
}
}
}
}
if (throwableEntity == null)
throwableEntity = new Bullet(level, this);
double baseY = target.getY() + target.getEyeHeight() - 1.100000023841858D;
double x = target.getX() - getX();
double y = baseY - throwableEntity.getY();
double z = target.getZ() - getZ();
float yOffset = Mth.sqrt((float) (x * x + z * z)) * 0.2F;
entityData.set(HEAD_ROTATION, (float) (Mth.atan2(x, -z) * (180D / Math.PI)));
throwableEntity.shoot(x, y + yOffset, z, 1.6F, 0.0F); //no inaccuracy for sentries!
if (shootSound == null) {
if (!level.isClientSide)
pdb.playSound(new BlockSourceImpl((ServerLevel) level, blockPosition()));
}
else
playSound(shootSound, 1.0F, 1.0F / (getRandom().nextFloat() * 0.4F + 0.8F));
level.addFreshEntity(throwableEntity);
}
@Override
public void addAdditionalSaveData(CompoundTag tag) {
tag.put("TileEntityData", getOwnerTag());
tag.put("InstalledModule", getDisguiseModule().save(new CompoundTag()));
tag.put("InstalledWhitelist", getAllowlistModule().save(new CompoundTag()));
tag.putBoolean("HasSpeedModule", hasSpeedModule());
tag.putInt("SentryMode", entityData.get(MODE));
tag.putFloat("HeadRotation", entityData.get(HEAD_ROTATION));
super.addAdditionalSaveData(tag);
}
private CompoundTag getOwnerTag() {
CompoundTag tag = new CompoundTag();
Owner owner = entityData.get(OWNER);
owner.save(tag, false);
return tag;
}
@Override
public void readAdditionalSaveData(CompoundTag tag) {
CompoundTag teTag = tag.getCompound("TileEntityData");
Owner owner = Owner.fromCompound(teTag);
entityData.set(OWNER, owner);
entityData.set(DISGUISE_MODULE, tag.getCompound("InstalledModule"));
entityData.set(ALLOWLIST, tag.getCompound("InstalledWhitelist"));
entityData.set(HAS_SPEED_MODULE, tag.getBoolean("HasSpeedModule"));
entityData.set(MODE, tag.getInt("SentryMode"));
entityData.set(HEAD_ROTATION, tag.getFloat("HeadRotation"));
super.readAdditionalSaveData(tag);
}
/**
* @return The owner of this sentry
*/
public Owner getOwner() {
return entityData.get(OWNER);
}
/**
* Sets the sentry's disguise module and places a block if possible
*
* @param module The module to set
*/
public void setDisguiseModule(ItemStack module) {
Block block = ((ModuleItem) module.getItem()).getBlockAddon(module.getTag());
if (block != null) {
BlockState state = block.defaultBlockState();
if (level.getBlockState(blockPosition()).isAir())
level.setBlockAndUpdate(blockPosition(), state.getShape(level, blockPosition()) == Shapes.block() ? state : Blocks.AIR.defaultBlockState());
}
entityData.set(DISGUISE_MODULE, module.save(new CompoundTag()));
}
/**
* Sets the sentry's allowlist module
*
* @param module The module to set
*/
public void setAllowlistModule(ItemStack module) {
entityData.set(ALLOWLIST, module.save(new CompoundTag()));
}
/**
* Sets whether this sentry has a speed module installed
*
* @param hasSpeedModule true to set that this sentry has a speed module, false otherwise
*/
public void setHasSpeedModule(boolean hasSpeedModule) {
entityData.set(HAS_SPEED_MODULE, hasSpeedModule);
}
/**
* @return The disguise module that is added to this sentry. ItemStack.EMPTY if none available
*/
public ItemStack getDisguiseModule() {
CompoundTag tag = entityData.get(DISGUISE_MODULE);
if (tag == null || tag.isEmpty())
return ItemStack.EMPTY;
else
return ItemStack.of(tag);
}
/**
* @return The allowlist module that is added to this sentry. ItemStack.EMPTY if none available
*/
public ItemStack getAllowlistModule() {
CompoundTag tag = entityData.get(ALLOWLIST);
if (tag == null || tag.isEmpty())
return ItemStack.EMPTY;
else
return ItemStack.of(tag);
}
public boolean hasSpeedModule() {
return entityData.get(HAS_SPEED_MODULE);
}
/**
* @return The mode in which the sentry is currently in, CAMOUFLAGE_HP as a fallback if the saved mode is not a valid
* mode
*/
public SentryMode getMode() {
int mode = entityData.get(MODE);
return mode < 0 || mode >= SentryMode.values().length ? SentryMode.CAMOUFLAGE_HP : SentryMode.values()[mode];
}
/**
* @return The amount of y translation from the head's default position, used for animation
*/
public float getHeadYTranslation() {
return headYTranslation;
}
public boolean isTargetingAllowedPlayer(LivingEntity potentialTarget) {
if (potentialTarget != null) {
List<String> players = ModuleUtils.getPlayersFromModule(getAllowlistModule());
for (String s : players) {
if (potentialTarget.getName().getContents().equalsIgnoreCase(s))
return true;
}
}
return false;
}
public int getShootingSpeed() {
|
[
"\t\treturn hasSpeedModule() ? 5 : 10;"
] | 1,639
|
lcc
|
java
| null |
97d6976069558160fcb06c6802085ee52a6ef637d5832a1a
|
|
/********************************************************************************
* Copyright (c) 2011-2017 Red Hat Inc. and/or its affiliates and others
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 1.0 which is available at
* http://www.eclipse.org/legal/epl-v10.html.
*
* SPDX-License-Identifier: EPL-1.0
********************************************************************************/
package org.eclipse.ceylon.ide.eclipse.code.editor;
import static org.eclipse.ceylon.ide.eclipse.code.preferences.CeylonPreferenceInitializer.AUTO_ACTIVATION;
import static org.eclipse.ceylon.ide.eclipse.code.preferences.CeylonPreferenceInitializer.AUTO_ACTIVATION_DELAY;
import static org.eclipse.ceylon.ide.eclipse.code.preferences.CeylonPreferenceInitializer.AUTO_INSERT;
import static org.eclipse.ceylon.ide.eclipse.code.preferences.CeylonPreferenceInitializer.AUTO_INSERT_PREFIX;
import static org.eclipse.ceylon.ide.eclipse.java2ceylon.Java2CeylonProxies.completionJ2C;
import static org.eclipse.ceylon.ide.eclipse.util.EditorUtil.createColor;
import static org.eclipse.ceylon.ide.eclipse.util.EditorUtil.getPopupStyle;
import static org.eclipse.ceylon.ide.eclipse.util.Highlights.DOC_BACKGROUND;
import static org.eclipse.ceylon.ide.eclipse.util.Highlights.getCurrentThemeColor;
import static org.eclipse.jdt.ui.PreferenceConstants.APPEARANCE_JAVADOC_FONT;
import static org.eclipse.jface.dialogs.DialogSettings.getOrCreateSection;
import static org.eclipse.jface.text.AbstractInformationControlManager.ANCHOR_GLOBAL;
import static org.eclipse.jface.text.IDocument.DEFAULT_CONTENT_TYPE;
import static org.eclipse.ui.texteditor.AbstractTextEditor.PREFERENCE_COLOR_BACKGROUND_SYSTEM_DEFAULT;
import static org.eclipse.ui.texteditor.AbstractTextEditor.PREFERENCE_COLOR_FOREGROUND;
import static org.eclipse.ui.texteditor.AbstractTextEditor.PREFERENCE_COLOR_FOREGROUND_SYSTEM_DEFAULT;
import org.eclipse.jface.bindings.keys.KeySequence;
import org.eclipse.jface.bindings.keys.KeyStroke;
import org.eclipse.jface.dialogs.IDialogSettings;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.text.IAutoEditStrategy;
import org.eclipse.jface.text.IInformationControl;
import org.eclipse.jface.text.IInformationControlCreator;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.ITextDoubleClickStrategy;
import org.eclipse.jface.text.ITextHover;
import org.eclipse.jface.text.ITextViewer;
import org.eclipse.jface.text.ITextViewerExtension2;
import org.eclipse.jface.text.Region;
import org.eclipse.jface.text.contentassist.ContentAssistEvent;
import org.eclipse.jface.text.contentassist.ContentAssistant;
import org.eclipse.jface.text.contentassist.ICompletionListener;
import org.eclipse.jface.text.contentassist.ICompletionProposal;
import org.eclipse.jface.text.contentassist.IContentAssistant;
import org.eclipse.jface.text.contentassist.IContentAssistantExtension2;
import org.eclipse.jface.text.hyperlink.IHyperlinkDetector;
import org.eclipse.jface.text.information.IInformationPresenter;
import org.eclipse.jface.text.information.IInformationProvider;
import org.eclipse.jface.text.information.IInformationProviderExtension;
import org.eclipse.jface.text.information.InformationPresenter;
import org.eclipse.jface.text.presentation.PresentationReconciler;
import org.eclipse.jface.text.quickassist.IQuickAssistAssistant;
import org.eclipse.jface.text.reconciler.IReconciler;
import org.eclipse.jface.text.source.ISourceViewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.editors.text.EditorsUI;
import org.eclipse.ui.editors.text.TextSourceViewerConfiguration;
import org.eclipse.ui.internal.editors.text.EditorsPlugin;
import org.eclipse.ceylon.ide.eclipse.code.browser.BrowserInformationControl;
import org.eclipse.ceylon.ide.eclipse.code.complete.EclipseCompletionProcessor;
import org.eclipse.ceylon.ide.eclipse.code.correct.CeylonCorrectionProcessor;
import org.eclipse.ceylon.ide.eclipse.code.hover.AnnotationHover;
import org.eclipse.ceylon.ide.eclipse.code.hover.BestMatchHover;
import org.eclipse.ceylon.ide.eclipse.code.hover.CeylonInformationControlCreator;
import org.eclipse.ceylon.ide.eclipse.code.hover.CeylonInformationProvider;
import org.eclipse.ceylon.ide.eclipse.code.hover.CeylonSourceHover;
import org.eclipse.ceylon.ide.eclipse.code.outline.HierarchyPopup;
import org.eclipse.ceylon.ide.eclipse.code.outline.OutlinePopup;
import org.eclipse.ceylon.ide.eclipse.code.parse.CeylonParseController;
import org.eclipse.ceylon.ide.eclipse.code.resolve.CeylonHyperlinkDetector;
import org.eclipse.ceylon.ide.eclipse.code.resolve.CeylonJavaBackendHyperlinkDetector;
import org.eclipse.ceylon.ide.eclipse.code.resolve.CeylonJavascriptBackendHyperlinkDetector;
import org.eclipse.ceylon.ide.eclipse.code.resolve.CeylonNativeHeaderHyperlinkDetector;
import org.eclipse.ceylon.ide.eclipse.code.resolve.JavaHyperlinkDetector;
import org.eclipse.ceylon.ide.eclipse.code.resolve.ReferencesHyperlinkDetector;
import org.eclipse.ceylon.ide.eclipse.code.search.ReferencesPopup;
import org.eclipse.ceylon.ide.eclipse.ui.CeylonPlugin;
public class CeylonSourceViewerConfiguration
extends TextSourceViewerConfiguration {
protected final CeylonEditor editor;
public CeylonSourceViewerConfiguration(CeylonEditor editor) {
super(EditorsUI.getPreferenceStore());
this.editor = editor;
}
public PresentationReconciler getPresentationReconciler(
ISourceViewer sourceViewer) {
PresentationReconciler reconciler =
new PresentationReconciler();
//make sure we pass the sourceViewer we get as an argument here
//otherwise it breaks syntax highlighting in Code popup
PresentationDamageRepairer damageRepairer =
new PresentationDamageRepairer(sourceViewer,
editor);
reconciler.setRepairer(damageRepairer,
DEFAULT_CONTENT_TYPE);
reconciler.setDamager(damageRepairer,
DEFAULT_CONTENT_TYPE);
return reconciler;
}
private static final class CompletionListener
implements ICompletionListener {
private CeylonEditor editor;
private EclipseCompletionProcessor processor;
// private CeylonCompletionProcessor processor;
private CompletionListener(CeylonEditor editor,
// CeylonCompletionProcessor processor) {
EclipseCompletionProcessor processor) {
this.editor = editor;
this.processor = processor;
}
@Override
public void selectionChanged(
ICompletionProposal proposal,
boolean smartToggle) {}
@Override
public void assistSessionStarted(
ContentAssistEvent event) {
if (editor!=null) {
editor.pauseBackgroundParsing();
}
if (event.assistant instanceof IContentAssistantExtension2) {
((IContentAssistantExtension2)event.assistant).setStatusMessage(CeylonContentAssistant.secondLevelStatusMessage);
}
processor.sessionStarted(event.isAutoActivated);
/*try {
editor.getSite().getWorkbenchWindow().run(true, true, new Warmup());
}
catch (Exception e) {}*/
}
@Override
public void assistSessionEnded(
ContentAssistEvent event) {
if (editor!=null) {
editor.unpauseBackgroundParsing();
editor.scheduleParsing(false);
}
}
}
public ContentAssistant getContentAssistant(
ISourceViewer sourceViewer) {
if (editor==null) return null;
ContentAssistant contentAssistant =
new CeylonContentAssistant();
contentAssistant.setRestoreCompletionProposalSize(
getOrCreateSection(getSettings(),
"completion_proposal_popup"));
EclipseCompletionProcessor completionProcessor =
completionJ2C().newCompletionProcessor(editor);
// CeylonCompletionProcessor completionProcessor =
// new CeylonCompletionProcessor(editor);
CompletionListener listener =
new CompletionListener(editor,
completionProcessor);
contentAssistant.addCompletionListener(listener);
contentAssistant.setContentAssistProcessor(
completionProcessor,
DEFAULT_CONTENT_TYPE);
configCompletionPopup(contentAssistant);
contentAssistant.enableColoredLabels(true);
contentAssistant.setRepeatedInvocationMode(true);
KeyStroke key =
KeyStroke.getInstance(SWT.CTRL, SWT.SPACE);
contentAssistant.setRepeatedInvocationTrigger(
KeySequence.getInstance(key));
CeylonContentAssistant.secondLevelStatusMessage = key.format() +
" to toggle second-level completions";
contentAssistant.setStatusMessage(CeylonContentAssistant.secondLevelStatusMessage);
CeylonContentAssistant.retrieveCompleteResultsStatusMessage = key.format() +
" to retrieve all results";
contentAssistant.setStatusLineVisible(true);
contentAssistant.setInformationControlCreator(
new CeylonInformationControlCreator(editor,
"Tab or click for focus"));
contentAssistant.setContextInformationPopupOrientation(
IContentAssistant.CONTEXT_INFO_ABOVE);
contentAssistant.setShowEmptyList(true);
return contentAssistant;
}
static void configCompletionPopup(
ContentAssistant contentAssistant) {
IPreferenceStore preferenceStore =
CeylonPlugin.getPreferences();
if (preferenceStore!=null) {
contentAssistant.enableAutoInsert(
preferenceStore.getBoolean(AUTO_INSERT));
contentAssistant.enableAutoActivation(
preferenceStore.getBoolean(AUTO_ACTIVATION));
contentAssistant.setAutoActivationDelay(
preferenceStore.getInt(AUTO_ACTIVATION_DELAY));
contentAssistant.enablePrefixCompletion(
preferenceStore.getBoolean(AUTO_INSERT_PREFIX));
}
}
@Override
public IQuickAssistAssistant getQuickAssistAssistant(
ISourceViewer sourceViewer) {
if (editor==null) return null;
CeylonCorrectionProcessor quickAssist =
new CeylonCorrectionProcessor(editor);
quickAssist.setRestoreCompletionProposalSize(
getOrCreateSection(getSettings(),
"quickassist_proposal_popup"));
quickAssist.enableColoredLabels(true);
return quickAssist;
}
public AnnotationHover getAnnotationHover(
ISourceViewer sourceViewer) {
return new AnnotationHover(editor, true);
}
public AnnotationHover getOverviewRulerAnnotationHover(
ISourceViewer sourceViewer) {
return new AnnotationHover(editor, true);
}
public IAutoEditStrategy[] getAutoEditStrategies(
ISourceViewer sourceViewer, String contentType) {
return new IAutoEditStrategy[] {
new CeylonAutoEditStrategy() };
}
public ITextDoubleClickStrategy getDoubleClickStrategy(
ISourceViewer sourceViewer, String contentType) {
return new DoubleClickStrategy();
}
public IHyperlinkDetector[] getHyperlinkDetectors(
ISourceViewer sourceViewer) {
CeylonParseController controller =
getParseController();
if (controller==null) {
|
[
" return new IHyperlinkDetector[0];"
] | 548
|
lcc
|
java
| null |
bf36c52efb42e2c0506f3cb765385becf6bf8e07977c4ec2
|
|
import Object3DQt as qt
import PyQt4.Qwt5 as Qwt5
from VerticalSpacer import VerticalSpacer
DEBUG = 0
DRAW_MODES = ['NONE',
'POINT',
'WIRE',
'SURFACE']
class Object3DDrawingModeWidget(qt.QGroupBox):
def __init__(self, parent = None):
qt.QGroupBox.__init__(self, parent)
self.setTitle('Drawing Mode')
self.build()
self.setDrawingMode(1)
def build(self):
self.l = qt.QVBoxLayout(self)
self.l.setMargin(0)
self.l.setSpacing(4)
self.buttonGroup = qt.QButtonGroup(self)
j = 0
for mode in DRAW_MODES:
rButton = qt.QRadioButton(self)
rButton.setText(mode)
self.l.addWidget(rButton)
self.l.setAlignment(rButton, qt.Qt.AlignLeft)
self.buttonGroup.addButton(rButton)
self.buttonGroup.setId(rButton, j)
j += 1
self.connect(self.buttonGroup,
qt.SIGNAL('buttonPressed(QAbstractButton *)'),
self._slot)
def _slot(self, button):
button.setChecked(True)
self._signal()
def _signal(self, event = None):
if DEBUG:
print("emit Object3DDrawingModeSignal")
if event is None:
event = 'DrawModeUpdated'
ddict = self.getParameters()
ddict['event'] = event
self.emit(qt.SIGNAL('Object3DDrawingModeSignal'), ddict)
def getParameters(self):
mode = self.getDrawingMode()
ddict = {}
ddict['mode'] = mode
ddict['label'] = str(self.buttonGroup.button(mode).text())
return ddict
def setParameters(self, ddict = None):
if DEBUG:
print("setParameters")
if ddict is None:
ddict = {}
mode = ddict.get('mode', 1)
self.setDrawingMode(mode)
def setDrawingMode(self, mode):
if type(mode) == type(" "):
if mode.upper() in DRAW_MODES:
i = DRAW_MODES.index(mode)
else:
raise ValueError("Unknown drawing mode: %s " % mode)
else:
i = mode
self.buttonGroup.button(i).setChecked(True)
def getDrawingMode(self):
mode = 0
n = self.buttonGroup.checkedId()
if n >= 0:
mode = n
else:
print("WARNING: getAnchor -> Unselected button")
return mode
def setSupportedModes(self, modes):
current = self.getDrawingMode()
for i in modes:
if i < len(DRAW_MODES):
self.buttonGroup.button(i).setEnabled(True)
# always possible to draw nothing
self.buttonGroup.button(i).setEnabled(True)
if not self.buttonGroup.button(current).isEnabled():
self.buttonGroup.button(0).setChecked(True)
self._signal()
class Object3DAspect(qt.QGroupBox):
def __init__(self, parent = None):
qt.QGroupBox.__init__(self, parent)
self.setTitle('Aspect')
self.build()
def build(self):
self.l = qt.QGridLayout(self)
i = 0
# point size
label = qt.QLabel('Point size')
self.pointSize = Qwt5.QwtSlider(self, qt.Qt.Horizontal)
self.pointSize.setRange(1.0, 1.0, 1.0)
self.pointSize.setValue(1.0)
self.l.addWidget(label, i, 0)
self.l.addWidget(self.pointSize, i, 1)
self.connect(self.pointSize,
qt.SIGNAL("valueChanged(double)"),
self._slot)
# line width
i += 1
label = qt.QLabel('Line width')
self.lineWidth = Qwt5.QwtSlider(self, qt.Qt.Horizontal)
self.lineWidth.setRange(1.0, 1.0, 1.0)
self.lineWidth.setValue(1.0)
self.l.addWidget(label, i, 0)
self.l.addWidget(self.lineWidth, i, 1)
self.connect(self.lineWidth,
qt.SIGNAL("valueChanged(double)"),
self._slot)
# transparency
i += 1
label = qt.QLabel('Transparency')
self.transparency = Qwt5.QwtSlider(self, qt.Qt.Horizontal)
self.transparency.setRange(0.0, 1.0, 0.01)
self.transparency.setValue(0.0)
self.l.addWidget(label, i, 0)
self.l.addWidget(self.transparency, i, 1)
self.connect(self.transparency,
qt.SIGNAL("valueChanged(double)"),
self._slot)
# bounding box
self.boundingBoxCheckBox = qt.QCheckBox(self)
self.boundingBoxCheckBox.setText("Show bounding box")
self.connect(self.boundingBoxCheckBox,
qt.SIGNAL("stateChanged(int)"),
self._signal)
i = 0
j = 2
self.l.addWidget(self.boundingBoxCheckBox, i, j)
self.showLimitsCheckBoxes = []
for t in ['X', 'Y', 'Z']:
i += 1
checkBox = qt.QCheckBox(self)
checkBox.setText('Show bbox %s limit' % t)
self.l.addWidget(checkBox, i, j)
self.connect(checkBox, qt.SIGNAL("stateChanged(int)"), self._slot)
self.showLimitsCheckBoxes.append(checkBox)
def _slot(self, *var):
self._signal()
def getParameters(self):
pointSize = self.pointSize.value()
lineWidth = self.lineWidth.value()
transparency = self.transparency.value()
if self.boundingBoxCheckBox.isChecked():
showBBox = 1
else:
showBBox = 0
showLimits = [0, 0, 0]
for i in range(3):
if self.showLimitsCheckBoxes[i].isChecked():
showLimits[i] = 1
ddict = {}
ddict['pointsize'] = pointSize
ddict['pointsizecapabilities'] = [self.pointSize.minValue(),
self.pointSize.maxValue(),
self.pointSize.step()]
ddict['linewidth'] = lineWidth
ddict['linewidthcapabilities'] = [self.lineWidth.minValue(),
self.lineWidth.maxValue(),
self.lineWidth.step()]
ddict['transparency'] = transparency
ddict['bboxflag' ] = showBBox
ddict['showlimits'] = showLimits
return ddict
def setParameters(self, ddict = None):
if DEBUG:
print("setParameters")
if ddict is None:
ddict = {}
pointSize = ddict.get('pointsize', 1.0)
pointSizeCapabilities = ddict.get('pointsizecapabilities',
[1.0, 1.0, 1.0])
lineWidth = ddict.get('linewidth', 1.0)
lineWidthCapabilities = ddict.get('linewidthcapabilities',
[1.0, 1.0, 1.0])
transparency = ddict.get('transparency', 0.0)
showBBox = ddict.get('bboxflag', 1)
showLimits = ddict.get('showlimits', [1, 1, 1])
self.pointSize.setRange(pointSizeCapabilities[0],
pointSizeCapabilities[1],
pointSizeCapabilities[2])
self.pointSize.setValue(pointSize)
self.lineWidth.setRange(lineWidthCapabilities[0],
lineWidthCapabilities[1],
lineWidthCapabilities[2])
self.lineWidth.setValue(lineWidth)
if lineWidth > lineWidthCapabilities[1]:
lineWidth = lineWidthCapabilities[1]
self.transparency.setValue(transparency)
self.boundingBoxCheckBox.setChecked(showBBox)
for i in [0, 1, 2]:
self.showLimitsCheckBoxes[i].setChecked(showLimits[i])
def _signal(self, event = None):
if DEBUG:
print("emitting Object3DAspectSignal")
if event is None:
event = "AspectUpdated"
ddict = self.getParameters()
ddict['event'] = event
self.emit(qt.SIGNAL('Object3DAspectSignal'), ddict)
class Object3DScale(qt.QGroupBox):
def __init__(self, parent = None):
qt.QGroupBox.__init__(self, parent)
self.setTitle('Object Scaling')
self.l = qt.QGridLayout(self)
self.__disconnect = False
self.__oldScale = [1.0, 1.0, 1.0]
self.lineEditList = []
self.validatorList = []
i = 0
self._lineSlotList =[self._xLineSlot,
self._yLineSlot,
self._zLineSlot]
for axis in ['x', 'y', 'z']:
label = qt.QLabel("%s Scale" % axis)
lineEdit = qt.QLineEdit(self)
v = qt.QDoubleValidator(lineEdit)
lineEdit.setValidator(v)
self.validatorList.append(v)
self.l.addWidget(label, i, 0)
self.l.addWidget(lineEdit, i, 1)
self.lineEditList.append(lineEdit)
lineEdit.setText('1.0')
lineEdit.setFixedWidth(lineEdit.fontMetrics().width('######.#####'))
self.connect(lineEdit,
qt.SIGNAL('editingFinished()'),
self._lineSlotList[i])
i+= 1
# xScaling
i = 0
self.xScaleSlider = Qwt5.QwtSlider(self, qt.Qt.Horizontal)
self.xScaleSlider.setScale(-10.0, 10.0, 0.001)
self.xScaleSlider.setValue(1.0)
self.l.addWidget(self.xScaleSlider, i, 2)
self.connect(self.xScaleSlider,
qt.SIGNAL("valueChanged(double)"),
self._xSliderSlot)
# yScaling
i += 1
self.yScaleSlider = Qwt5.QwtSlider(self, qt.Qt.Horizontal)
self.yScaleSlider.setRange(-100.0, 100.0, 0.01)
self.yScaleSlider.setValue(1.0)
self.l.addWidget(self.yScaleSlider, i, 2)
self.connect(self.yScaleSlider,
qt.SIGNAL("valueChanged(double)"),
self._ySliderSlot)
# zScaling
i += 1
self.zScaleSlider = Qwt5.QwtSlider(self, qt.Qt.Horizontal)
self.zScaleSlider.setRange(-100.0, 100.0, 0.01)
self.zScaleSlider.setValue(1.0)
self.l.addWidget(self.zScaleSlider, i, 2)
self.connect(self.zScaleSlider,
qt.SIGNAL("valueChanged(double)"),
self._zSliderSlot)
def _xSliderSlot(self, *var):
if not self.__disconnect:
scale = [self.xScaleSlider.value(),
self.yScaleSlider.value(),
self.zScaleSlider.value()]
self.__disconnect = True
for i in [0, 1, 2]:
if scale[i] != float(str(self.lineEditList[i].text())):
self.lineEditList[i].setText("%.7g" % scale[i])
self.__disconnect = False
if (self.__oldScale[0] != scale[0]) or \
(self.__oldScale[1] != scale[1]) or \
(self.__oldScale[2] != scale[2]) :
self.__oldScale = scale
self._signal("xScaleUpdated")
def _ySliderSlot(self, *var):
if not self.__disconnect:
scale = [self.xScaleSlider.value(),
self.yScaleSlider.value(),
self.zScaleSlider.value()]
self.__disconnect = True
for i in [0, 1, 2]:
if scale[i] != float(str(self.lineEditList[i].text())):
self.lineEditList[i].setText("%.7g" % scale[i])
self.__disconnect = False
if (self.__oldScale[0] != scale[0]) or \
(self.__oldScale[1] != scale[1]) or \
(self.__oldScale[2] != scale[2]) :
self.__oldScale = scale
self._signal("yScaleUpdated")
def _zSliderSlot(self, *var):
if not self.__disconnect:
scale = [self.xScaleSlider.value(),
self.yScaleSlider.value(),
self.zScaleSlider.value()]
self.__disconnect = True
for i in [0, 1, 2]:
if scale[i] != float(str(self.lineEditList[i].text())):
self.lineEditList[i].setText("%.7g" % scale[i])
self.__disconnect = False
if (self.__oldScale[0] != scale[0]) or \
(self.__oldScale[1] != scale[1]) or \
(self.__oldScale[2] != scale[2]) :
self.__oldScale = scale
self._signal("zScaleUpdated")
def _xLineSlot(self):
if not self.__disconnect:
self.__disconnect = True
scale = [1, 1, 1]
for i in [0, 1 , 2]:
scale[i] = float(str(self.lineEditList[i].text()))
self.xScaleSlider.setValue(scale[0])
self.yScaleSlider.setValue(scale[1])
self.zScaleSlider.setValue(scale[2])
self.__disconnect = False
self._signal("xScaleUpdated")
def _yLineSlot(self):
if not self.__disconnect:
self.__disconnect = True
scale = [1, 1, 1]
for i in [0, 1 , 2]:
scale[i] = float(str(self.lineEditList[i].text()))
self.xScaleSlider.setValue(scale[0])
self.yScaleSlider.setValue(scale[1])
self.zScaleSlider.setValue(scale[2])
self.__disconnect = False
self._signal("yScaleUpdated")
def _zLineSlot(self):
if not self.__disconnect:
self.__disconnect = True
scale = [1, 1, 1]
for i in [0, 1 , 2]:
scale[i] = float(str(self.lineEditList[i].text()))
self.xScaleSlider.setValue(scale[0])
self.yScaleSlider.setValue(scale[1])
self.zScaleSlider.setValue(scale[2])
self.__disconnect = False
self._signal("zScaleUpdated")
def _signal(self, event = None):
if DEBUG:
print("emitting Object3DScaleSignal")
if self.__disconnect: return
if event is None:
event = "ScaleUpdated"
oldScale = self._lastParameters * 1
ddict = self.getParameters()
scale = ddict['scale']
emit = False
for i in range(3):
if abs((scale[i]-oldScale[i])) > 1.0e-10:
emit = True
ddict['magnification'] = scale[i]/oldScale[i]
break
if not emit:
return
ddict['event'] = event
self.emit(qt.SIGNAL('Object3DScaleSignal'), ddict)
def getParameters(self):
scale = [1.0, 1.0, 1.0]
for i in [0, 1 , 2]:
scale[i] = float(str(self.lineEditList[i].text()))
ddict = {}
ddict['scale'] = scale
self._lastParameters = scale
return ddict
def setParameters(self, ddict = None):
if DEBUG:
print("setParameters", ddict)
if ddict is None:ddict = {}
scale = ddict.get('scale', [1.0, 1.0, 1.0])
self.xScaleSlider.setValue(scale[0])
self.yScaleSlider.setValue(scale[1])
self.zScaleSlider.setValue(scale[2])
for i in [0, 1, 2]:
self.lineEditList[i].setText("%.7g" % scale[i])
self._lastParameters = scale
class Object3DPrivateInterface(qt.QGroupBox):
def __init__(self, parent = None):
qt.QGroupBox.__init__(self, parent)
self.setTitle('Private Configuration')
self.mainLayout = qt.QVBoxLayout(self)
self.button = qt.QPushButton(self)
self.button.setText("More")
self.mainLayout.addWidget(self.button)
self.mainLayout.addWidget(VerticalSpacer(self))
class Object3DProperties(qt.QWidget):
def __init__(self, parent = None):
qt.QWidget.__init__(self, parent)
self.l = qt.QHBoxLayout(self)
self.drawingModeWidget = Object3DDrawingModeWidget(self)
|
[
" self.aspectWidget = Object3DAspect(self)"
] | 1,044
|
lcc
|
python
| null |
f2938d3f351d2aebd34b5c529253c96aa77b793e3442e486
|
|
using System;
using System.Reflection;
using System.Collections;
using Server;
using Server.Targeting;
using Server.Network;
using Server.Misc;
namespace Server.Gumps
{
public class SetPoint2DGump : Gump
{
private PropertyInfo m_Property;
private Mobile m_Mobile;
private object m_Object;
private Stack m_Stack;
private int m_Page;
private ArrayList m_List;
public static readonly bool OldStyle = PropsConfig.OldStyle;
public static readonly int GumpOffsetX = PropsConfig.GumpOffsetX;
public static readonly int GumpOffsetY = PropsConfig.GumpOffsetY;
public static readonly int TextHue = PropsConfig.TextHue;
public static readonly int TextOffsetX = PropsConfig.TextOffsetX;
public static readonly int OffsetGumpID = PropsConfig.OffsetGumpID;
public static readonly int HeaderGumpID = PropsConfig.HeaderGumpID;
public static readonly int EntryGumpID = PropsConfig.EntryGumpID;
public static readonly int BackGumpID = PropsConfig.BackGumpID;
public static readonly int SetGumpID = PropsConfig.SetGumpID;
public static readonly int SetWidth = PropsConfig.SetWidth;
public static readonly int SetOffsetX = PropsConfig.SetOffsetX, SetOffsetY = PropsConfig.SetOffsetY;
public static readonly int SetButtonID1 = PropsConfig.SetButtonID1;
public static readonly int SetButtonID2 = PropsConfig.SetButtonID2;
public static readonly int PrevWidth = PropsConfig.PrevWidth;
public static readonly int PrevOffsetX = PropsConfig.PrevOffsetX, PrevOffsetY = PropsConfig.PrevOffsetY;
public static readonly int PrevButtonID1 = PropsConfig.PrevButtonID1;
public static readonly int PrevButtonID2 = PropsConfig.PrevButtonID2;
public static readonly int NextWidth = PropsConfig.NextWidth;
public static readonly int NextOffsetX = PropsConfig.NextOffsetX, NextOffsetY = PropsConfig.NextOffsetY;
public static readonly int NextButtonID1 = PropsConfig.NextButtonID1;
public static readonly int NextButtonID2 = PropsConfig.NextButtonID2;
public static readonly int OffsetSize = PropsConfig.OffsetSize;
public static readonly int EntryHeight = PropsConfig.EntryHeight;
public static readonly int BorderSize = PropsConfig.BorderSize;
private static readonly int CoordWidth = 105;
private static readonly int EntryWidth = CoordWidth + OffsetSize + CoordWidth;
private static readonly int TotalWidth = OffsetSize + EntryWidth + OffsetSize + SetWidth + OffsetSize;
private static readonly int TotalHeight = OffsetSize + ( 4 * ( EntryHeight + OffsetSize ) );
private static readonly int BackWidth = BorderSize + TotalWidth + BorderSize;
private static readonly int BackHeight = BorderSize + TotalHeight + BorderSize;
public SetPoint2DGump( PropertyInfo prop, Mobile mobile, object o, Stack stack, int page, ArrayList list )
: base( GumpOffsetX, GumpOffsetY )
{
m_Property = prop;
m_Mobile = mobile;
m_Object = o;
m_Stack = stack;
m_Page = page;
m_List = list;
Point2D p = (Point2D) prop.GetValue( o, null );
AddPage( 0 );
AddBackground( 0, 0, BackWidth, BackHeight, BackGumpID );
AddImageTiled( BorderSize, BorderSize, TotalWidth - ( OldStyle ? SetWidth + OffsetSize : 0 ), TotalHeight, OffsetGumpID );
int x = BorderSize + OffsetSize;
int y = BorderSize + OffsetSize;
AddImageTiled( x, y, EntryWidth, EntryHeight, EntryGumpID );
AddLabelCropped( x + TextOffsetX, y, EntryWidth - TextOffsetX, EntryHeight, TextHue, prop.Name );
x += EntryWidth + OffsetSize;
if ( SetGumpID != 0 )
{
AddImageTiled( x, y, SetWidth, EntryHeight, SetGumpID );
}
x = BorderSize + OffsetSize;
y += EntryHeight + OffsetSize;
AddImageTiled( x, y, EntryWidth, EntryHeight, EntryGumpID );
AddLabelCropped( x + TextOffsetX, y, EntryWidth - TextOffsetX, EntryHeight, TextHue, "Use your location" );
x += EntryWidth + OffsetSize;
if ( SetGumpID != 0 )
{
AddImageTiled( x, y, SetWidth, EntryHeight, SetGumpID );
}
AddButton( x + SetOffsetX, y + SetOffsetY, SetButtonID1, SetButtonID2, 1, GumpButtonType.Reply, 0 );
x = BorderSize + OffsetSize;
y += EntryHeight + OffsetSize;
AddImageTiled( x, y, EntryWidth, EntryHeight, EntryGumpID );
AddLabelCropped( x + TextOffsetX, y, EntryWidth - TextOffsetX, EntryHeight, TextHue, "Target a location" );
x += EntryWidth + OffsetSize;
if ( SetGumpID != 0 )
{
AddImageTiled( x, y, SetWidth, EntryHeight, SetGumpID );
}
AddButton( x + SetOffsetX, y + SetOffsetY, SetButtonID1, SetButtonID2, 2, GumpButtonType.Reply, 0 );
x = BorderSize + OffsetSize;
y += EntryHeight + OffsetSize;
AddImageTiled( x, y, CoordWidth, EntryHeight, EntryGumpID );
AddLabelCropped( x + TextOffsetX, y, CoordWidth - TextOffsetX, EntryHeight, TextHue, "X:" );
AddTextEntry( x + 16, y, CoordWidth - 16, EntryHeight, TextHue, 0, p.X.ToString() );
x += CoordWidth + OffsetSize;
AddImageTiled( x, y, CoordWidth, EntryHeight, EntryGumpID );
AddLabelCropped( x + TextOffsetX, y, CoordWidth - TextOffsetX, EntryHeight, TextHue, "Y:" );
AddTextEntry( x + 16, y, CoordWidth - 16, EntryHeight, TextHue, 1, p.Y.ToString() );
x += CoordWidth + OffsetSize;
if ( SetGumpID != 0 )
{
AddImageTiled( x, y, SetWidth, EntryHeight, SetGumpID );
}
AddButton( x + SetOffsetX, y + SetOffsetY, SetButtonID1, SetButtonID2, 3, GumpButtonType.Reply, 0 );
}
private class InternalTarget : Target
{
private PropertyInfo m_Property;
private Mobile m_Mobile;
private object m_Object;
private Stack m_Stack;
private int m_Page;
private ArrayList m_List;
public InternalTarget( PropertyInfo prop, Mobile mobile, object o, Stack stack, int page, ArrayList list )
: base( -1, true, TargetFlags.None )
{
m_Property = prop;
m_Mobile = mobile;
m_Object = o;
m_Stack = stack;
m_Page = page;
m_List = list;
}
protected override void OnTarget( Mobile from, object targeted )
{
IPoint3D p = targeted as IPoint3D;
if ( p != null )
{
try
{
Server.Scripts.Commands.CommandLogging.LogChangeProperty( m_Mobile, m_Object, m_Property.Name, new Point2D( p ).ToString() );
m_Property.SetValue( m_Object, new Point2D( p ), null );
PropertiesGump.OnValueChanged( m_Object, m_Property, m_Stack );
}
catch
{
m_Mobile.SendMessage( "An exception was caught. The property may not have changed." );
}
}
}
protected override void OnTargetFinish( Mobile from )
{
m_Mobile.SendGump( new PropertiesGump( m_Mobile, m_Object, m_Stack, m_List, m_Page ) );
}
}
public override void OnResponse( NetState sender, RelayInfo info )
{
Point2D toSet;
bool shouldSet, shouldSend;
switch ( info.ButtonID )
{
case 1: // Current location
{
toSet = new Point2D( m_Mobile.Location );
shouldSet = true;
shouldSend = true;
break;
}
case 2: // Pick location
{
m_Mobile.Target = new InternalTarget( m_Property, m_Mobile, m_Object, m_Stack, m_Page, m_List );
toSet = Point2D.Zero;
shouldSet = false;
shouldSend = false;
break;
}
|
[
"\t\t\t\tcase 3: // Use values"
] | 886
|
lcc
|
csharp
| null |
782603f333c5ed114de335a7728c0c7c288c0ea45f48a9ee
|
|
/*
* Copyright 2013 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Eclipse Public License version 1.0, available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.jboss.forge.addon.ui.util;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.jboss.forge.addon.convert.CompositeConverter;
import org.jboss.forge.addon.convert.Converter;
import org.jboss.forge.addon.convert.ConverterFactory;
import org.jboss.forge.addon.facets.Facet;
import org.jboss.forge.addon.ui.facets.HintsFacet;
import org.jboss.forge.addon.ui.hints.InputType;
import org.jboss.forge.addon.ui.input.HasCompleter;
import org.jboss.forge.addon.ui.input.InputComponent;
import org.jboss.forge.addon.ui.input.ManyValued;
import org.jboss.forge.addon.ui.input.SelectComponent;
import org.jboss.forge.addon.ui.input.SingleValued;
import org.jboss.forge.addon.ui.input.UICompleter;
import org.jboss.forge.furnace.util.Sets;
import org.jboss.forge.furnace.util.Strings;
/**
* Utilities for {@link InputComponent} objects
*
* @author <a href="mailto:ggastald@redhat.com">George Gastaldi</a>
*
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public final class InputComponents
{
public static final char DEFAULT_SHORT_NAME = ' ';
private static final String COLON = ":";
/**
* @return the {@link InputType} object associated to this {@link InputComponent}
*/
public static String getInputType(InputComponent<?, ?> input)
{
String result = InputType.DEFAULT;
for (Facet f : input.getFacets())
{
if (HintsFacet.class.isInstance(f))
{
result = ((HintsFacet) f).getInputType();
break;
}
}
// FIXME: The following code does NOT work when called from Eclipse. Could it be a bug in CLAC ?
// if (input.hasFacet(HintsFacet.class))
// {
// HintsFacet facet = input.getFacet(HintsFacet.class);
// result = facet.getInputType();
// }
return result;
}
/**
* Returns the value stored in this {@link InputComponent}. <code>null</code> if the component is null
*/
public static Object getValueFor(InputComponent<?, ?> component)
{
return (component == null) ? null : component.getValue();
}
/**
* Sets the value in the provided {@link InputComponent}, making any necessary conversions
*
* @param component
* @param value
*/
public static void setValueFor(final ConverterFactory converterFactory, final InputComponent<?, ?> component,
final Object value)
{
if (component instanceof SingleValued)
{
setSingleInputValue(converterFactory, component, value, false);
}
else if (component instanceof ManyValued)
{
setManyInputValue(converterFactory, component, value, false);
}
}
/**
* Sets the default value in the provided {@link InputComponent}, making any necessary conversions
*
* @param component
* @param value
*/
public static void setDefaultValueFor(final ConverterFactory converterFactory,
final InputComponent<?, Object> component,
final Object value)
{
if (component instanceof SingleValued)
{
setSingleInputValue(converterFactory, component, value, true);
}
else if (component instanceof ManyValued)
{
setManyInputValue(converterFactory, component, value, true);
}
}
private static void setSingleInputValue(final ConverterFactory converterFactory,
final InputComponent<?, ?> input, final Object value, boolean defaultValue)
{
final Object convertedType;
if (value != null)
{
convertedType = convertToUIInputValue(converterFactory, input, value);
}
else
{
convertedType = null;
}
if (defaultValue)
{
((SingleValued) input).setDefaultValue(convertedType);
}
else
{
((SingleValued) input).setValue(convertedType);
}
}
private static void setManyInputValue(final ConverterFactory converterFactory,
final InputComponent<?, ?> input, Object value, boolean defaultValue)
{
final Iterable<Object> convertedValues;
if (value != null)
{
List<Object> convertedValuesList = new ArrayList<>();
if (value instanceof Iterable && !input.getValueType().isInstance(value))
{
for (Object itValue : (Iterable) value)
{
Object singleValue = convertToUIInputValue(converterFactory, input, itValue);
if (singleValue != null)
{
convertedValuesList.add(singleValue);
}
}
}
else
{
Object singleValue = convertToUIInputValue(converterFactory, input, value);
if (singleValue != null)
{
convertedValuesList.add(singleValue);
}
}
convertedValues = convertedValuesList;
}
else
{
convertedValues = null;
}
if (defaultValue)
{
((ManyValued) input).setDefaultValue(convertedValues);
}
else
{
((ManyValued) input).setValue(convertedValues);
}
}
/**
* Returns the converted value that matches the input.
*/
public static Object convertToUIInputValue(final ConverterFactory converterFactory,
final InputComponent<?, ?> input, final Object value)
{
final Object result;
Class<Object> sourceType = (Class<Object>) value.getClass();
Class<Object> targetType = (Class<Object>) input.getValueType();
if (!targetType.isAssignableFrom(sourceType))
{
if (input instanceof SelectComponent)
{
SelectComponent<?, Object> selectComponent = (SelectComponent<?, Object>) input;
Iterable<Object> valueChoices = selectComponent.getValueChoices();
final Converter<Object, ?> selectConverter;
if (String.class.isAssignableFrom(sourceType))
{
selectConverter = getItemLabelConverter(converterFactory, selectComponent);
}
else
{
selectConverter = converterFactory.getConverter(targetType, sourceType);
}
Object chosenObj = null;
if (valueChoices != null)
{
for (Object valueChoice : valueChoices)
{
Object convertedObj = selectConverter.convert(valueChoice);
if (convertedObj.equals(value))
{
chosenObj = valueChoice;
break;
}
}
}
result = chosenObj;
}
else
{
Converter<String, Object> valueConverter = (Converter<String, Object>) input.getValueConverter();
if (valueConverter != null)
{
if (value instanceof String)
{
result = valueConverter.convert((String) value);
}
else
{
Converter<Object, String> stringConverter = converterFactory.getConverter(sourceType, String.class);
CompositeConverter compositeConverter = new CompositeConverter(stringConverter, valueConverter);
result = compositeConverter.convert(value);
}
}
else
{
Converter<Object, Object> converter = converterFactory.getConverter(sourceType, targetType);
result = converter.convert(value);
}
}
}
else
{
Converter<String, Object> valueConverter = (Converter<String, Object>) input.getValueConverter();
if (valueConverter != null && value instanceof String)
{
result = valueConverter.convert((String) value);
}
else
{
// FORGE-2493: By setting the system property 'org.jboss.forge.ui.select_one_lenient_value' to true will
// allow UISelectOne to set values outside of its value choices. (pre-2.20.0.Final behavior)
if (input instanceof SelectComponent && !Boolean.getBoolean("org.jboss.forge.ui.select_one_lenient_value"))
{
SelectComponent<?, Object> selectComponent = (SelectComponent<?, Object>) input;
Set<Object> valueChoices = Sets.toSet(selectComponent.getValueChoices());
// Check if the value is contained in the valueChoices set
if (valueChoices != null && valueChoices.contains(value))
{
result = value;
}
else
{
// equals()/hashCode may not have been implemented. Trying to compare from the String representation
Object chosenObj = null;
if (valueChoices != null)
{
Converter<Object, String> selectConverter = getItemLabelConverter(converterFactory,
selectComponent);
|
[
" String valueLabel = selectConverter.convert(value);"
] | 764
|
lcc
|
java
| null |
cffa07171638f2df3007fc0e85a42300f465b867bac1df90
|
|
/* Mesquite source code. Copyright 1997-2009 W. Maddison and D. Maddison.
Version 2.71, September 2009.
Disclaimer: The Mesquite source code is lengthy and we are few. There are no doubt inefficiencies and goofs in this code.
The commenting leaves much to be desired. Please approach this source code with the spirit of helping out.
Perhaps with your help we can be more than a few, and make Mesquite better.
Mesquite is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY.
Mesquite's web site is http://mesquiteproject.org
This source code and its compiled class files are free and modifiable under the terms of
GNU Lesser General Public License. (http://www.gnu.org/copyleft/lesser.html)
*/
package mesquite.categ.lib;
import mesquite.lib.*;
import mesquite.lib.duties.CharHistorySource;
public class CategStateChanges {
int maxChangesRecorded = 10;
int[][] min;
int[][] max;
double[][] avg;
double[][] total;
double[][][] fractionWithAmount;
double[][][] totalWithAmount;
double[][] totalChanges;
boolean [][] acceptableChange;
int numStates = 0;
long numMappings = 0;
long numHistories = 0;
public CategStateChanges(int numStates, int maxChanges) {
maxChangesRecorded = maxChanges;
this.numStates = numStates;
min = new int[numStates][numStates];
max = new int[numStates][numStates];
avg = new double[numStates][numStates];
total = new double[numStates][numStates];
totalChanges = new double[numStates][numStates];
fractionWithAmount = new double[numStates][numStates][maxChangesRecorded];
totalWithAmount = new double[numStates][numStates][maxChangesRecorded];
acceptableChange = new boolean[numStates][numStates];
initializeArrays();
}
/*.................................................................................................................*/
public int getNumStates(){
return numStates;
}
/*.................................................................................................................*/
public void adjustNumStates(int numStatesNew){
min =Integer2DArray.cloneIncreaseSize(min,numStatesNew, numStatesNew);
max =Integer2DArray.cloneIncreaseSize(max,numStatesNew, numStatesNew);
avg =Double2DArray.cloneIncreaseSize(avg,numStatesNew, numStatesNew);
total =Double2DArray.cloneIncreaseSize(total,numStatesNew, numStatesNew);
totalChanges =Double2DArray.cloneIncreaseSize(totalChanges,numStatesNew, numStatesNew);
fractionWithAmount =Double2DArray.cloneIncreaseSize(fractionWithAmount,numStatesNew, numStatesNew, maxChangesRecorded);
totalWithAmount =Double2DArray.cloneIncreaseSize(totalWithAmount,numStatesNew, numStatesNew, maxChangesRecorded);
numStates = numStatesNew;
}
/*.................................................................................................................*/
public void initializeArrays() {
for (int i=0; i<numStates; i++)
for (int j=0; j<numStates; j++) {
min[i][j]= Integer.MAX_VALUE;
max[i][j]= 0;
avg[i][j]=0.0;
total[i][j]=0.0;
totalChanges[i][j]=0.0;
acceptableChange[i][j]=true;
for (int k=0; k<maxChangesRecorded; k++) {
fractionWithAmount[i][j][k] = 0.0;
totalWithAmount[i][j][k] = 0.0;
}
}
}
/*.................................................................................................................*/
public void setAcceptableChange(int i, int j, boolean b) {
acceptableChange[i][j]=b;
}
/*.................................................................................................................*/
public void zeroTotals() {
for (int i=0; i<numStates; i++)
for (int j=0; j<numStates; j++) {
total[i][j]=0.0;
totalChanges[i][j]=0.0;
for (int k=0; k<maxChangesRecorded; k++) {
totalWithAmount[i][j][k] = 0.0;
}
}
}
/*.................................................................................................................*/
public boolean addOneMapping(Tree tree, CategoricalHistory history, int node, int whichMapping) {
if (!tree.nodeExists(node))
node = tree.getRoot();
int[][] array = history.harvestStateChanges(tree, node,null);
if (array==null || array.length != numStates)
return false;
return addOneMapping(array,false);
}
/*.................................................................................................................*/
public boolean acceptableMapping(int[][] array) {
for (int i=0; i<numStates && i<array.length; i++)
for (int j=0; j<numStates &&j<array[i].length; j++)
if (!acceptableChange[i][j] && array[i][j]>0)
return false;
return true;
}
/*.................................................................................................................*/
public boolean addOneMapping(int[][] array, boolean useTotal) {
if (array==null)
return false;
if (!acceptableMapping(array))
return false;
numMappings++;
for (int i=0; i<numStates && i<array.length; i++)
for (int j=0; j<numStates &&j<array[i].length; j++)
{
min[i][j] = MesquiteInteger.minimum(min[i][j],array[i][j]);
max[i][j] = MesquiteInteger.maximum(max[i][j],array[i][j]);
if (useTotal)
total[i][j] = total[i][j]+array[i][j];
else
avg[i][j] = ((avg[i][j]*numMappings-1)+array[i][j])/numMappings;
if (array[i][j]>=maxChangesRecorded)
totalWithAmount[i][j][maxChangesRecorded-1]++;
else
totalWithAmount[i][j][array[i][j]]++;
totalChanges[i][j]++;
}
return true;
}
/*.................................................................................................................*/
public void oneMappingToString (int[][] array, StringBuffer sb, String lineStart) {
if (array==null || sb==null)
return;
if (!acceptableMapping(array))
return;
if (StringUtil.notEmpty(lineStart))
sb.append(lineStart+"\t");
for (int i=0; i<numStates && i<array.length; i++)
for (int j=0; j<numStates &&j<array[i].length; j++)
if (i!=j)
sb.append(""+array[i][j]+"\t");
sb.append("\n");
}
/*.................................................................................................................*/
public boolean mappingsAvailable(){
return true;
}
/*.................................................................................................................*/
public void addOneHistory(Tree tree, CharHistorySource historySource,int ic, int node, MesquiteInteger numMappingsSampled, int samplingLimit, MesquiteInteger newSamplingLimit, boolean queryChangeSamplingLimit, StringBuffer fullDetails, String lineStart) {
CategoricalHistory resultStates=null;
CategoricalHistory history = null;
zeroTotals();
int[][] array;
int mappingsAdded=0;
if (!mappingsAvailable()) {
history = (CategoricalHistory)historySource.getMapping(0, history, null);
if (history.getMaxState()+1>getNumStates())
adjustNumStates(history.getMaxState()+1);
history.clone(resultStates);
if (resultStates instanceof mesquite.categ.lib.CategoricalHistory){
array= ((mesquite.categ.lib.CategoricalHistory)resultStates).harvestStateChanges(tree, node, null);
if (addOneMapping(array, true)) mappingsAdded++;
oneMappingToString(array, fullDetails, lineStart);
}
}
else {
long numMappings = historySource.getNumberOfMappings(tree, ic);
if (queryChangeSamplingLimit && !MesquiteThread.isScripting() && newSamplingLimit!=null) {
int newLimit = MesquiteInteger.queryInteger(historySource.getModuleWindow(), "Maximum number of mappings to sample", "Maximum number of mappings to sample for the character on each tree",samplingLimit, 1, Integer.MAX_VALUE);
if (MesquiteInteger.isCombinable(newLimit))
newSamplingLimit.setValue(newLimit);
}
if (numMappings == MesquiteLong.infinite || !MesquiteLong.isCombinable(numMappings)) {
for (int i=0; i<samplingLimit; i++) {
resultStates = (CategoricalHistory)historySource.getMapping(i, resultStates, null);
if (resultStates instanceof mesquite.categ.lib.CategoricalHistory) {
array= ((mesquite.categ.lib.CategoricalHistory)resultStates).harvestStateChanges(tree, node,null);
if (addOneMapping(array, true)) mappingsAdded++;
oneMappingToString(array, fullDetails,lineStart);
}
}
}
else
if (numMappings<=samplingLimit) {
for (int i=0; i<numMappings; i++) {
resultStates = (CategoricalHistory)historySource.getMapping(i, resultStates, null);
if (resultStates instanceof mesquite.categ.lib.CategoricalHistory) {
array= ((mesquite.categ.lib.CategoricalHistory)resultStates).harvestStateChanges(tree, node,null);
if (addOneMapping(array, true)) mappingsAdded++;
oneMappingToString(array, fullDetails,lineStart);
}
}
}
else {
for (int i=0; i<samplingLimit; i++) {
resultStates = (CategoricalHistory)historySource.getMapping(RandomBetween.getLongStatic(0,numMappings-1),resultStates,null);
if (resultStates instanceof mesquite.categ.lib.CategoricalHistory) {
array= ((mesquite.categ.lib.CategoricalHistory)resultStates).harvestStateChanges(tree, node, null);
if (addOneMapping(array, true)) mappingsAdded++;
oneMappingToString(array, fullDetails,lineStart);
}
}
}
}
if (mappingsAdded>0)
numHistories++;
if (numMappingsSampled!=null) numMappingsSampled.setValue(mappingsAdded);
if (mappingsAdded>0)
for (int i=0; i<numStates; i++)
for (int j=0; j<numStates; j++)
{
avg[i][j] = avg[i][j]+total[i][j]/mappingsAdded;
if (totalChanges[i][j]>0 && i!=j)
for (int k=0; k<maxChangesRecorded; k++) {
fractionWithAmount[i][j][k] = fractionWithAmount[i][j][k]+totalWithAmount[i][j][k]/totalChanges[i][j];
}
}
}
/*.................................................................................................................*/
public void addOneHistory(Tree tree, CategoricalHistory history,int node, int samplingLimit) {
CategoricalHistory resultStates=null;
zeroTotals();
int[][] array;
int mappingsAdded=0;
if (!mappingsAvailable()) {
history.clone(resultStates);
if (resultStates instanceof mesquite.categ.lib.CategoricalHistory){
array= ((mesquite.categ.lib.CategoricalHistory)resultStates).harvestStateChanges(tree, node, null);
if (addOneMapping(array, true)) mappingsAdded++;
}
}
else {
long numMappings = history.getNumResolutions(tree);
if (numMappings == MesquiteLong.infinite) {
for (int i=0; i<samplingLimit; i++) {
resultStates = (CategoricalHistory)history.getResolution(tree, resultStates, i);
if (resultStates instanceof mesquite.categ.lib.CategoricalHistory) {
array= ((mesquite.categ.lib.CategoricalHistory)resultStates).harvestStateChanges(tree, node,null);
if (addOneMapping(array, true)) mappingsAdded++;
}
}
}
else if (MesquiteLong.isCombinable(numMappings))
if (numMappings<=samplingLimit) {
for (int i=0; i<numMappings; i++) {
resultStates = (CategoricalHistory)history.getResolution(tree, resultStates, i);
if (resultStates instanceof mesquite.categ.lib.CategoricalHistory) {
array= ((mesquite.categ.lib.CategoricalHistory)resultStates).harvestStateChanges(tree, node,null);
if (addOneMapping(array, true)) mappingsAdded++;
}
}
}
else {
for (int i=0; i<samplingLimit; i++) {
resultStates = (CategoricalHistory)history.getResolution(tree, resultStates, RandomBetween.getLongStatic(0,numMappings-1));
if (resultStates instanceof mesquite.categ.lib.CategoricalHistory) {
array= ((mesquite.categ.lib.CategoricalHistory)resultStates).harvestStateChanges(tree, node, null);
if (addOneMapping(array, true)) mappingsAdded++;
}
}
}
}
if (mappingsAdded>0)
numHistories++;
if (mappingsAdded>0)
for (int i=0; i<numStates; i++)
for (int j=0; j<numStates; j++)
{
avg[i][j] = avg[i][j]+total[i][j]/mappingsAdded;
if (totalChanges[i][j]>0 && i!=j)
for (int k=0; k<maxChangesRecorded; k++) {
fractionWithAmount[i][j][k] = fractionWithAmount[i][j][k]+totalWithAmount[i][j][k]/totalChanges[i][j];
}
}
}
/*.................................................................................................................*/
public void cleanUp() {
for (int i=0; i<numStates; i++)
for (int j=0; j<numStates; j++) {
if (min[i][j]== Integer.MAX_VALUE)
min[i][j] = 0;
if (numHistories>0) {
avg[i][j] = avg[i][j]/numHistories;
if (i!=j) for (int k=0; k<maxChangesRecorded; k++) {
fractionWithAmount[i][j][k] = fractionWithAmount[i][j][k]/numHistories;
}
}
}
}
/*.................................................................................................................*/
public int[][] getMin() {
return min;
}
/*.................................................................................................................*/
public int[][] getMax() {
return max;
}
/*.................................................................................................................*/
public double[][] getAvg() {
return avg;
}
/*.................................................................................................................*/
public String toVerboseString(){
StringBuffer sb = new StringBuffer();
sb.append("Minimum, maximum, and average number of each kind across all trees\n");
sb.append("------------------------------------\n");
sb.append("change\tmin\tmax\tavg\n");
for (int i=0; i<numStates; i++)
for (int j=0; j<numStates; j++){
if (i!=j)
sb.append(""+i+"->"+j+" \t"+min[i][j] +"\t"+max[i][j] +"\t"+avg[i][j]+"\n");
}
sb.append("\n\n\nFraction of trees with specific number of changes of each kind\n");
sb.append("------------------------------------\n");
sb.append("change\t#changes\tfraction\n");
for (int i=0; i<numStates; i++)
for (int j=0; j<numStates; j++)
|
[
"\t\t\t\tif (i!=j) {"
] | 1,027
|
lcc
|
java
| null |
c72913a89874d6308bd8a87c745a41178548b9df3cd7e0c0
|
|
/**
* Copyright (C) Squizz PTY LTD
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses/.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Squizz.Platform.API.v1;
using Squizz.Platform.API.v1.endpoint;
using EcommerceStandardsDocuments;
namespace Squizz.Platform.API.Examples.APIv1
{
/**
* Shows an example of creating a organisation session with the SQUIZZ.com platform's API, then sends a organisation's purchase order data to supplier
*/
public class APIv1ExampleRunnerProcurePurchaseOrderFromSupplier
{
public static void runAPIv1ExampleRunnerProcurePurchaseOrderFromSupplier()
{
Console.WriteLine("Example - Procure Purchase Order From Supplier API Session");
Console.WriteLine("");
//obtain or load in an organisation's API credentials, in this example from the user in the console
Console.WriteLine("Enter Organisation ID:");
string orgID = Console.ReadLine();
Console.WriteLine("Enter Organisation API Key:");
string orgAPIKey = Console.ReadLine();
Console.WriteLine("Enter Organisation API Password:");
string orgAPIPass = Console.ReadLine();
Console.WriteLine("Enter Supplier Organisation ID:");
string supplierOrgID = Console.ReadLine();
Console.WriteLine("(optional) Enter Supplier's Customer Account Code:");
string customerAccountCode = Console.ReadLine();
//create an API session instance
int sessionTimeoutMilliseconds = 20000;
APIv1OrgSession apiOrgSession = new APIv1OrgSession(orgID, orgAPIKey, orgAPIPass, sessionTimeoutMilliseconds, APIv1Constants.SUPPORTED_LOCALES_EN_AU);
//call the platform's API to request that a session is created
APIv1EndpointResponse endpointResponse = apiOrgSession.createOrgSession();
//check if the organisation's credentials were correct and that a session was created in the platform's API
if (endpointResponse.result.ToUpper() == APIv1EndpointResponse.ENDPOINT_RESULT_SUCCESS)
{
//session has been created so now can call other API endpoints
Console.WriteLine("SUCCESS - API session has successfully been created.");
}
else
{
//session failed to be created
Console.WriteLine("FAIL - API session failed to be created. Reason: " + endpointResponse.result_message + " Error Code: " + endpointResponse.result_code);
}
//sand and procure purchsae order if the API was successfully created
if (apiOrgSession.doesSessionExist())
{
//create purchase order record to import
ESDRecordOrderPurchase purchaseOrderRecord = new ESDRecordOrderPurchase();
//set data within the purchase order
purchaseOrderRecord.keyPurchaseOrderID = "111";
purchaseOrderRecord.purchaseOrderCode = "POEXAMPLE-345";
purchaseOrderRecord.purchaseOrderNumber = "345";
purchaseOrderRecord.instructions = "Leave goods at the back entrance";
purchaseOrderRecord.keySupplierAccountID = "2";
purchaseOrderRecord.supplierAccountCode = "ACM-002";
//set delivery address that ordered goods will be delivered to
purchaseOrderRecord.deliveryAddress1 = "32";
purchaseOrderRecord.deliveryAddress2 = "Main Street";
purchaseOrderRecord.deliveryAddress3 = "Melbourne";
purchaseOrderRecord.deliveryRegionName = "Victoria";
purchaseOrderRecord.deliveryCountryName = "Australia";
purchaseOrderRecord.deliveryPostcode = "3000";
purchaseOrderRecord.deliveryOrgName = "Acme Industries";
purchaseOrderRecord.deliveryContact = "Jane Doe";
//set billing address that the order will be billed to for payment
purchaseOrderRecord.billingAddress1 = "43";
purchaseOrderRecord.billingAddress2 = " High Street";
purchaseOrderRecord.billingAddress3 = "Melbourne";
purchaseOrderRecord.billingRegionName = "Victoria";
purchaseOrderRecord.billingCountryName = "Australia";
purchaseOrderRecord.billingPostcode = "3000";
purchaseOrderRecord.billingOrgName = "Acme Industries International";
purchaseOrderRecord.billingContact = "John Citizen";
//create an array of purchase order lines
List<ESDRecordOrderPurchaseLine> orderLines = new List<ESDRecordOrderPurchaseLine>();
//create purchase order line record 1
ESDRecordOrderPurchaseLine orderProduct = new ESDRecordOrderPurchaseLine();
orderProduct.lineType = ESDocumentConstants.ORDER_LINE_TYPE_PRODUCT;
orderProduct.productCode = "TEA-TOWEL-GREEN";
orderProduct.productName = "Green tea towel - 30 x 6 centimetres";
orderProduct.keySellUnitID = "2";
orderProduct.unitName = "EACH";
orderProduct.quantity = 4;
orderProduct.sellUnitBaseQuantity = 4;
orderProduct.priceExTax = (decimal)5.00;
orderProduct.priceIncTax = (decimal)5.50;
orderProduct.priceTax = (decimal)0.50;
orderProduct.priceTotalIncTax = (decimal)22.00;
orderProduct.priceTotalExTax = (decimal)20.00;
orderProduct.priceTotalTax = (decimal)2.00;
//specify supplier's product code in salesOrderProductCode if it is different to the line's productCode field
orderProduct.salesOrderProductCode = "ACME-SUPPLIER-TTGREEN";
//add 1st order line to lines list
orderLines.Add(orderProduct);
//add a 2nd purchase order line record that is a text line
orderProduct = new ESDRecordOrderPurchaseLine();
orderProduct.lineType = ESDocumentConstants.ORDER_LINE_TYPE_TEXT;
orderProduct.textDescription = "Please bundle tea towels into a box";
orderLines.Add(orderProduct);
//add a 3rd purchase order line product record to the order
orderProduct = new ESDRecordOrderPurchaseLine();
orderProduct.lineType = ESDocumentConstants.ORDER_LINE_TYPE_PRODUCT;
orderProduct.productCode = "TEA-TOWEL-BLUE";
orderProduct.quantity = 10;
orderProduct.salesOrderProductCode = "ACME-TTBLUE";
orderLines.Add(orderProduct);
//add order lines to the order
purchaseOrderRecord.lines = orderLines;
//create purchase order records list and add purchase order to it
List<ESDRecordOrderPurchase> purchaseOrderRecords = new List<ESDRecordOrderPurchase>();
purchaseOrderRecords.Add(purchaseOrderRecord);
//after 120 seconds give up on waiting for a response from the API when procuring the order
int timeoutMilliseconds = 120000;
//create purchase order Ecommerce Standards document and add purchse order records to the document
ESDocumentOrderPurchase orderPurchaseESD = new ESDocumentOrderPurchase(ESDocumentConstants.RESULT_SUCCESS, "successfully obtained data", purchaseOrderRecords.ToArray(), new Dictionary<string, string>());
//send purchase order document to the API for procurement by the supplier organisation
APIv1EndpointResponseESD<ESDocumentOrderSale> endpointResponseESD = APIv1EndpointOrgProcurePurchaseOrderFromSupplier.call(apiOrgSession, timeoutMilliseconds, supplierOrgID, customerAccountCode, orderPurchaseESD);
ESDocumentOrderSale esDocumentOrderSale = endpointResponseESD.esDocument;
//check the result of procuring the purchase orders
if (endpointResponseESD.result.ToUpper() == APIv1EndpointResponse.ENDPOINT_RESULT_SUCCESS) {
Console.WriteLine("SUCCESS - organisation purchase orders have successfully been sent to supplier organisation.");
//iterate through each of the returned sales orders and output the details of the sales orders
if (esDocumentOrderSale.dataRecords != null) {
foreach(ESDRecordOrderSale salesOrderRecord in esDocumentOrderSale.dataRecords) {
Console.WriteLine("\nSales Order Returned, Order Details: ");
Console.WriteLine("Sales Order Code: " + salesOrderRecord.salesOrderCode);
Console.WriteLine("Sales Order Total Cost: " + salesOrderRecord.totalPriceIncTax + " (" + salesOrderRecord.currencyISOCode + ")");
Console.WriteLine("Sales Order Total Taxes: " + salesOrderRecord.totalTax + " (" + salesOrderRecord.currencyISOCode + ")");
Console.WriteLine("Sales Order Customer Account: " + salesOrderRecord.customerAccountCode);
Console.WriteLine("Sales Order Total Lines: " + salesOrderRecord.totalLines);
}
}
} else {
Console.WriteLine("FAIL - organisation purchase orders failed to be processed. Reason: " + endpointResponseESD.result_message + " Error Code: " + endpointResponseESD.result_code);
//check that a Ecommerce standards document was returned
if (esDocumentOrderSale != null && esDocumentOrderSale.configs != null)
{
//if one or more products in the purchase order could not match a product for the supplier organisation then find out the order lines caused the problem
if (esDocumentOrderSale.configs.ContainsKey(APIv1EndpointResponseESD<ESDocumentOrderSale>.ESD_CONFIG_ORDERS_WITH_UNMAPPED_LINES))
{
//get a list of order lines that could not be mapped
List<KeyValuePair<int, int>> unmappedLines = APIv1EndpointOrgProcurePurchaseOrderFromSupplier.getUnmappedOrderLines(esDocumentOrderSale);
//iterate through each unmapped order line
foreach (KeyValuePair<int, int> unmappedLine in unmappedLines)
{
//get the index of the purchase order and line that contained the unmapped product
int orderIndex = unmappedLine.Key;
int lineIndex = unmappedLine.Value;
//check that the order can be found that contains the problematic line
if (orderIndex < orderPurchaseESD.dataRecords.Length && lineIndex < orderPurchaseESD.dataRecords[orderIndex].lines.Count)
{
Console.WriteLine("For purchase order: " + orderPurchaseESD.dataRecords[orderIndex].purchaseOrderCode + " a matching supplier product for line number: " + (lineIndex + 1) + " could not be found.");
}
}
}
//if one or more supplier organisation's products in the purchase order are not stock then find the order lines that caused the problem
if (esDocumentOrderSale.configs.ContainsKey(APIv1EndpointResponseESD<ESDocumentOrderSale>.ESD_CONFIG_ORDERS_WITH_UNSTOCKED_LINES))
{
//get a list of order lines that are not stocked by the supplier
List<KeyValuePair<int, int>> unstockedLines = APIv1EndpointOrgProcurePurchaseOrderFromSupplier.getOutOfStockOrderLines(esDocumentOrderSale);
//iterate through each unstocked order line
foreach (KeyValuePair<int, int> unstockedLine in unstockedLines)
{
//get the index of the purchase order and line that contained the unstocked product
int orderIndex = unstockedLine.Key;
int lineIndex = unstockedLine.Value;
//check that the order can be found that contains the problematic line
if (orderIndex < orderPurchaseESD.dataRecords.Length && lineIndex < orderPurchaseESD.dataRecords[orderIndex].lines.Count)
{
Console.WriteLine("For purchase order: " + orderPurchaseESD.dataRecords[orderIndex].purchaseOrderCode + " the supplier has no products in stock for line number: " + (lineIndex + 1));
}
}
}
//if one or more products in the purchase order could not be priced by the supplier organisation then find the order line that caused the problem
if (esDocumentOrderSale.configs.ContainsKey(APIv1EndpointResponseESD<ESDocumentOrderSale>.ESD_CONFIG_ORDERS_WITH_UNPRICED_LINES))
{
//get a list of order lines that could not be priced
List<KeyValuePair<int, int>> unpricedLines = APIv1EndpointOrgProcurePurchaseOrderFromSupplier.getUnpricedOrderLines(esDocumentOrderSale);
//iterate through each unpriced order line
|
[
" foreach (KeyValuePair<int, int> unpricedLine in unpricedLines) {"
] | 1,177
|
lcc
|
csharp
| null |
7850286a4534f85c6c1e090310eb4027cb5b38c4f626ff33
|
|
//
// ClientOperation.cs
//
// Author:
// Atsushi Enomoto <atsushi@ximian.com>
//
// Copyright (C) 2005 Novell, Inc. http://www.novell.com
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Reflection;
using System.ServiceModel;
using System.ServiceModel.Channels;
using System.ServiceModel.Description;
using System.Text;
namespace System.ServiceModel.Dispatcher
{
public sealed class ClientOperation
{
internal class ClientOperationCollection :
#if NET_2_1
KeyedCollection<string, ClientOperation>
#else
SynchronizedKeyedCollection<string, ClientOperation>
#endif
{
protected override string GetKeyForItem (ClientOperation o)
{
return o.Name;
}
}
ClientRuntime parent;
string name, action, reply_action;
MethodInfo sync_method, begin_method, end_method;
bool deserialize_reply = true, serialize_request = true;
bool is_initiating, is_terminating, is_oneway;
IClientMessageFormatter formatter;
SynchronizedCollection<IParameterInspector> inspectors
= new SynchronizedCollection<IParameterInspector> ();
SynchronizedCollection<FaultContractInfo> fault_contract_infos = new SynchronizedCollection<FaultContractInfo> ();
public ClientOperation (ClientRuntime parent,
string name, string action)
{
this.parent = parent;
this.name = name;
this.action = action;
}
public ClientOperation (ClientRuntime parent,
string name, string action, string replyAction)
{
this.parent = parent;
this.name = name;
this.action = action;
this.reply_action = replyAction;
}
public string Action {
get { return action; }
}
public string ReplyAction {
get { return reply_action; }
}
public MethodInfo BeginMethod {
get { return begin_method; }
set {
ThrowIfOpened ();
begin_method = value;
}
}
public bool DeserializeReply {
get { return deserialize_reply; }
set {
ThrowIfOpened ();
deserialize_reply = value;
}
}
public MethodInfo EndMethod {
get { return end_method; }
set {
ThrowIfOpened ();
end_method = value;
}
}
public SynchronizedCollection<FaultContractInfo> FaultContractInfos {
get { return fault_contract_infos; }
}
public IClientMessageFormatter Formatter {
get { return formatter; }
set {
ThrowIfOpened ();
formatter = value;
}
}
public bool IsInitiating {
get { return is_initiating; }
set {
ThrowIfOpened ();
is_initiating = value;
}
}
public bool IsOneWay {
get { return is_oneway; }
set {
ThrowIfOpened ();
is_oneway = value;
}
}
public bool IsTerminating {
get { return is_terminating; }
set {
ThrowIfOpened ();
is_terminating = value;
}
}
public string Name {
get { return name; }
}
public SynchronizedCollection<IParameterInspector> ParameterInspectors {
get { return inspectors; }
}
public ClientRuntime Parent {
get { return parent; }
}
public bool SerializeRequest {
get { return serialize_request; }
set {
ThrowIfOpened ();
serialize_request = value;
}
}
public MethodInfo SyncMethod {
get { return sync_method; }
set {
ThrowIfOpened ();
sync_method = value;
}
}
void ThrowIfOpened ()
{
// FIXME: get correct state
var state = CommunicationState.Created;
switch (state) {
case CommunicationState.Created:
case CommunicationState.Opening:
return;
}
throw new InvalidOperationException ("Cannot change this property after the service host is opened");
}
[MonoTODO]
public ICollection<IParameterInspector> ClientParameterInspectors {
get { throw new NotImplementedException (); }
}
[MonoTODO]
public MethodInfo TaskMethod {
get { throw new NotImplementedException (); }
set { throw new NotImplementedException (); }
}
[MonoTODO]
public Type TaskTResult {
|
[
"\t\t\tget { throw new NotImplementedException (); }"
] | 629
|
lcc
|
csharp
| null |
a93d814dbddd20576003b7d27ccabef85f5258c0f1c189bf
|
|
package org.exist.security;
import org.exist.storage.io.VariableByteInputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import org.exist.storage.io.VariableByteOutputStream;
import org.exist.Database;
import org.exist.security.ACLPermission.ACE_TARGET;
import org.exist.security.ACLPermission.ACE_ACCESS_TYPE;
import org.exist.security.internal.SecurityManagerImpl;
import java.util.Random;
import org.easymock.EasyMock;
import org.exist.util.ByteArray;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.easymock.EasyMock.expect;
/**
*
* @author Adam Retter <adam@exist-db.org>
*/
public class SimpleACLPermissionTest {
private final static int ALL = Permission.READ | Permission.WRITE | Permission.EXECUTE;
@Test
public void add() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int userId = 1;
final int mode = ALL;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, userId, mode);
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
assertEquals(1, permission.getACECount());
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(userId, permission.getACEId(0));
assertEquals(mode, permission.getACEMode(0));
}
@Test
public void addACE_ForUserWithModeString() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
final Account mockAccount = EasyMock.createMock(Account.class);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int userId = 1112;
final String userName = "aretter";
final String mode = "rwx";
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true);
expect(mockSecurityManager.getAccount(userName)).andReturn(mockAccount);
expect(mockAccount.getId()).andReturn(userId);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject, mockAccount);
permission.addACE(ACE_ACCESS_TYPE.ALLOWED, ACE_TARGET.USER, userName, mode);
verify(mockSecurityManager, mockDatabase, mockCurrentSubject, mockAccount);
assertEquals(1, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(ALL, permission.getACEMode(0));
}
@Test
public void addACE_ForGroupWithModeString() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
final Group mockGroup = EasyMock.createMock(Group.class);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int groupId = 1112;
final String groupName = "aretter";
final String mode = "rwx";
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true);
expect(mockSecurityManager.getGroup(groupName)).andReturn(mockGroup);
expect(mockGroup.getId()).andReturn(groupId);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject, mockGroup);
permission.addACE(ACE_ACCESS_TYPE.ALLOWED, ACE_TARGET.GROUP, groupName, mode);
verify(mockSecurityManager, mockDatabase, mockCurrentSubject, mockGroup);
assertEquals(1, permission.getACECount());
assertEquals(groupId, permission.getACEId(0));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.GROUP, permission.getACETarget(0));
assertEquals(ALL, permission.getACEMode(0));
assertEquals(mode, permission.getACEModeString(0));
}
@Test
public void insert_atFront_whenEmpty() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int userId = 1112;
final int mode = ALL;
permission.insertUserACE(0, ACE_ACCESS_TYPE.ALLOWED, userId, mode);
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
assertEquals(1, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(ALL, permission.getACEMode(0));
}
@Test
public void insert_atFront() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase).times(2);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject).times(2);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true).times(2);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int userId = 1112;
final int mode = ALL;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, userId, mode);
assertEquals(1, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(ALL, permission.getACEMode(0));
final int secondUserId = 1113;
final int secondMode = 04;
permission.insertUserACE(0, ACE_ACCESS_TYPE.ALLOWED, secondUserId, secondMode);
assertEquals(2, permission.getACECount());
assertEquals(secondUserId, permission.getACEId(0));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(secondMode, permission.getACEMode(0));
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
}
@Test
public void insert_inMiddle() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase).times(3);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject).times(3);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true).times(3);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int userId = 1112;
final int mode = ALL;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, userId, mode);
assertEquals(1, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(ALL, permission.getACEMode(0));
final int secondUserId = 1113;
final int secondMode = 04;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, secondUserId, secondMode);
assertEquals(2, permission.getACECount());
assertEquals(secondUserId, permission.getACEId(1));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(1));
assertEquals(ACE_TARGET.USER, permission.getACETarget(1));
assertEquals(secondMode, permission.getACEMode(1));
final int thirdUserId = 1114;
final int thirdMode = 02;
permission.insertUserACE(1, ACE_ACCESS_TYPE.ALLOWED, thirdUserId, thirdMode);
assertEquals(3, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(ALL, permission.getACEMode(0));
assertEquals(thirdUserId, permission.getACEId(1));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(1));
assertEquals(ACE_TARGET.USER, permission.getACETarget(1));
assertEquals(thirdMode, permission.getACEMode(1));
assertEquals(secondUserId, permission.getACEId(2));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(2));
assertEquals(ACE_TARGET.USER, permission.getACETarget(2));
assertEquals(secondMode, permission.getACEMode(2));
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
}
@Test(expected=PermissionDeniedException.class)
public void insert_atEnd() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase).times(2);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject).times(2);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true).times(2);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int userId = 1112;
final int mode = ALL;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, userId, mode);
assertEquals(1, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(ALL, permission.getACEMode(0));
final int secondUserId = 1113;
final int secondMode = 04;
permission.insertUserACE(1, ACE_ACCESS_TYPE.ALLOWED, secondUserId, secondMode);
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
}
@Test
public void remove_firstACE() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase).times(3);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject).times(3);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true).times(3);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, 1, ALL);
final int secondUserId = 2;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, secondUserId, ALL);
assertEquals(2, permission.getACECount());
permission.removeACE(0);
assertEquals(1, permission.getACECount());
assertEquals(ACE_ACCESS_TYPE.ALLOWED, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(secondUserId, permission.getACEId(0));
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
}
@Test
public void remove_middleACE() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase).times(4);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject).times(4);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true).times(4);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int firstUserId = 1;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, firstUserId, ALL);
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, 2, ALL);
final int thirdUserId = 3;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, thirdUserId, ALL);
assertEquals(3, permission.getACECount());
permission.removeACE(1);
assertEquals(2, permission.getACECount());
assertEquals(firstUserId, permission.getACEId(0));
assertEquals(thirdUserId, permission.getACEId(1));
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
}
@Test
public void remove_lastACE() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase).times(3);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject).times(3);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true).times(3);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int firstUserId = 1;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, firstUserId, ALL);
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, 2, ALL);
assertEquals(2, permission.getACECount());
permission.removeACE(1);
assertEquals(1, permission.getACECount());
assertEquals(firstUserId, permission.getACEId(0));
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
}
@Test
public void modify() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase).times(3);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject).times(3);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true).times(3);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
final int userId = 1;
final int mode = Permission.READ;
final ACE_ACCESS_TYPE access_type = ACE_ACCESS_TYPE.ALLOWED;
permission.addUserACE(access_type, userId, mode);
assertEquals(1, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(access_type, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(mode, permission.getACEMode(0));
permission.modifyACE(0, access_type, Permission.WRITE);
assertEquals(1, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(access_type, permission.getACEAccessType(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(Permission.WRITE, permission.getACEMode(0));
permission.modifyACE(0, ACE_ACCESS_TYPE.DENIED, Permission.READ | Permission.WRITE);
assertEquals(1, permission.getACECount());
assertEquals(userId, permission.getACEId(0));
assertEquals(ACE_TARGET.USER, permission.getACETarget(0));
assertEquals(ACE_ACCESS_TYPE.DENIED, permission.getACEAccessType(0));
assertEquals(Permission.READ | Permission.WRITE, permission.getACEMode(0));
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
}
@Test
public void clear() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final Database mockDatabase = EasyMock.createMock(Database.class);
final Subject mockCurrentSubject = EasyMock.createMock(Subject.class);
//expect(mockSecurityManager.getDatabase()).andReturn(mockDatabase).times(3);
//expect(mockDatabase.getCurrentSubject()).andReturn(mockCurrentSubject).times(3);
//expect(mockCurrentSubject.hasDbaRole()).andReturn(true).times(3);
replay(mockSecurityManager, mockDatabase, mockCurrentSubject);
SimpleACLPermission permission = new SimpleACLPermission(mockSecurityManager);
assertEquals(0, permission.getACECount());
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, 1, ALL);
final int secondUserId = 2;
permission.addUserACE(ACE_ACCESS_TYPE.ALLOWED, secondUserId, ALL);
assertEquals(2, permission.getACECount());
permission.clear();
assertEquals(0, permission.getACECount());
verify(mockSecurityManager, mockDatabase, mockCurrentSubject);
}
@Test
public void validate_cant_read_when_readNotInACL() throws PermissionDeniedException {
final SecurityManager mockSecurityManager = EasyMock.createMock(SecurityManager.class);
final int ownerId = new Random().nextInt(SecurityManagerImpl.MAX_USER_ID);
final int mode = 0700;
final int ownerGroupId = new Random().nextInt(SecurityManagerImpl.MAX_GROUP_ID);
|
[
" final Subject mockUser = EasyMock.createMock(Subject.class);"
] | 1,006
|
lcc
|
java
| null |
b8d674585c2854a71ff59d091611a1181f7327620a70524e
|
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Audio;
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.GamerServices;
using Microsoft.Xna.Framework.Graphics;
using Microsoft.Xna.Framework.Input;
using Microsoft.Xna.Framework.Media;
using Microsoft.Xna.Framework.Net;
using Microsoft.Xna.Framework.Storage;
using Knot3.KnotData;
namespace Knot3.Utilities
{
public static class VectorHelper
{
public static Vector3 ArcBallMove (this Vector3 vectorToMove, Vector2 mouse, Vector3 up, Vector3 forward)
{
Vector3 side = Vector3.Cross (up, forward);
Vector3 movedVector = vectorToMove.RotateY (
MathHelper.Pi / 300f * mouse.X
);
movedVector = movedVector.RotateAroundVector (
-side,
MathHelper.Pi / 200f * mouse.Y
);
return movedVector;
}
public static Vector3 MoveLinear (this Vector3 vectorToMove, Vector3 mouse, Vector3 up, Vector3 forward)
{
Vector3 side = Vector3.Cross (up, forward);
Vector3 movedVector = vectorToMove - side * mouse.X - up * mouse.Y - forward * mouse.Z;
return movedVector;
}
public static Vector3 MoveLinear (this Vector3 vectorToMove, Vector2 mouse, Vector3 up, Vector3 forward)
{
return vectorToMove.MoveLinear (new Vector3 (mouse.X, mouse.Y, 0), up, forward);
}
public static Vector3 RotateX (this Vector3 vectorToRotate, float angleRadians)
{
return Vector3.Transform (vectorToRotate, Matrix.CreateRotationX (angleRadians));
}
public static Vector3 RotateY (this Vector3 vectorToRotate, float angleRadians)
{
return Vector3.Transform (vectorToRotate, Matrix.CreateRotationY (angleRadians));
}
public static Vector3 RotateZ (this Vector3 vectorToRotate, float angleRadians)
{
return Vector3.Transform (vectorToRotate, Matrix.CreateRotationZ (angleRadians));
}
public static Vector3 RotateAroundVector (this Vector3 vectorToRotate, Vector3 axis, float angleRadians)
{
return Vector3.Transform (vectorToRotate, Matrix.CreateFromAxisAngle (axis, angleRadians));
}
public static Vector3 Clamp (this Vector3 v, Vector3 lower, Vector3 higher)
{
return new Vector3 (
MathHelper.Clamp (v.X, lower.X, higher.X),
MathHelper.Clamp (v.Y, lower.Y, higher.Y),
MathHelper.Clamp (v.Z, lower.Z, higher.Z)
);
}
public static Vector3 Clamp (this Vector3 v, int minLength, int maxLength)
{
if (v.Length () < minLength) {
return v * minLength / v.Length ();
}
else if (v.Length () > maxLength) {
return v * maxLength / v.Length ();
}
else {
return v;
}
}
public static Vector2 PrimaryVector (this Vector2 v)
{
if (v.X.Abs () > v.Y.Abs ()) {
return new Vector2 (v.X, 0);
}
else if (v.Y.Abs () > v.X.Abs ()) {
return new Vector2 (0, v.Y);
}
else {
return new Vector2 (v.X, 0);
}
}
public static Vector3 PrimaryVector (this Vector3 v)
{
if (v.X.Abs () > v.Y.Abs () && v.X.Abs () > v.Z.Abs ()) {
return new Vector3 (v.X, 0, 0);
}
else if (v.Y.Abs () > v.X.Abs () && v.Y.Abs () > v.Z.Abs ()) {
return new Vector3 (0, v.Y, 0);
}
else if (v.Z.Abs () > v.Y.Abs () && v.Z.Abs () > v.X.Abs ()) {
return new Vector3 (0, 0, v.Z);
}
else {
return new Vector3 (v.X, 0, 0);
}
}
public static Vector2 PrimaryDirection (this Vector2 v)
{
Vector2 vector = v.PrimaryVector ();
return new Vector2 (Math.Sign (vector.X), Math.Sign (vector.Y));
}
public static Vector3 PrimaryDirection (this Vector3 v)
{
Vector3 vector = v.PrimaryVector ();
return new Vector3 (Math.Sign (vector.X), Math.Sign (vector.Y), Math.Sign (vector.Z));
}
public static Vector3 PrimaryDirectionExcept (this Vector3 v, Vector3 wrongDirection)
{
Vector3 copy = v;
if (wrongDirection.X != 0) {
copy.X = 0;
}
else if (wrongDirection.Y != 0) {
copy.Y = 0;
}
else if (wrongDirection.Z != 0) {
copy.Z = 0;
}
return copy.PrimaryDirection ();
}
public static float Abs (this float v)
{
return Math.Abs (v);
}
public static float Clamp (this float v, int min, int max)
{
return MathHelper.Clamp (v, min, max);
}
public static BoundingSphere[] Bounds (this Model model)
{
BoundingSphere[] bounds = new BoundingSphere[model.Meshes.Count];
int i = 0;
foreach (ModelMesh mesh in model.Meshes) {
bounds [i++] = mesh.BoundingSphere;
}
return bounds;
}
public static BoundingBox Bounds (this Vector3 a, Vector3 diff)
{
return new BoundingBox (a, a + diff);
}
public static BoundingSphere Scale (this BoundingSphere sphere, float scale)
{
return new BoundingSphere (sphere.Center, sphere.Radius * scale);
}
public static BoundingSphere Scale (this BoundingSphere sphere, Vector3 scale)
{
return new BoundingSphere (sphere.Center, sphere.Radius * scale.PrimaryVector ().Length ());
}
public static BoundingSphere Translate (this BoundingSphere sphere, Vector3 position)
{
return new BoundingSphere (Vector3.Transform (sphere.Center, Matrix.CreateTranslation (position)), sphere.Radius);
}
public static BoundingSphere Rotate (this BoundingSphere sphere, Angles3 rotation)
{
return new BoundingSphere (Vector3.Transform (sphere.Center, Matrix.CreateFromYawPitchRoll (rotation.Y, rotation.X, rotation.Z)), sphere.Radius);
}
public static BoundingBox Scale (this BoundingBox box, float scale)
{
return new BoundingBox (box.Min * scale, box.Max * scale);
}
public static BoundingBox Translate (this BoundingBox box, Vector3 position)
{
Matrix translation = Matrix.CreateTranslation (position);
return new BoundingBox (Vector3.Transform (box.Min, translation), Vector3.Transform (box.Max, translation));
}
public static Vector2 ToVector2 (this MouseState screen)
{
return new Vector2 (screen.X, screen.Y);
}
public static Point ToPoint (this MouseState screen)
{
return new Point (screen.X, screen.Y);
}
public static Vector2 ToVector2 (this Viewport viewport)
{
return new Vector2 (viewport.Width, viewport.Height);
}
public static Vector2 Center (this Viewport viewport)
{
return new Vector2 (viewport.Width, viewport.Height) / 2;
}
public static Vector2 ToVector2 (this Point v)
{
return new Vector2 (v.X, v.Y);
}
public static Point ToPoint (this Vector2 v)
{
return new Point ((int)v.X, (int)v.Y);
}
public static Point Plus (this Point a, Point b)
{
return new Point (a.X + b.X, a.Y + b.Y);
}
public static string Join (this string delimiter, List<int> list)
{
StringBuilder builder = new StringBuilder ();
foreach (int elem in list) {
// Append each int to the StringBuilder overload.
builder.Append (elem).Append (delimiter);
}
return builder.ToString ();
}
public static Vector2 ScaleFactor (this Viewport viewport)
{
Vector2 max = viewport.ToVector2 ();
return max / 1000f;
}
public static Vector2 RelativeTo (this Vector2 v, Viewport viewport)
{
|
[
"\t\t\tVector2 max = viewport.ToVector2 ();"
] | 882
|
lcc
|
csharp
| null |
145cc36abdaa2c8860959982f54fca44c7cae309bc44bcf9
|
|
import xdrlib
class XDREnum(object):
__slots__ = ['name', 'value']
def __init__(self, name, value):
self.name = name
self.value = value
def __int__(self):
return self.value
def __str__(self):
return self.name
def __repr__(self):
return self.name
def __cmp__(x, y):
return cmp(int(x), int(y))
def __hash__(self):
return hash(int(self))
@classmethod
def unpack_from(cls, reader):
value = reader.unpack_int()
return cls.members[value]
@classmethod
def pack_into(cls, packer, value):
packer.pack_int(value)
class XDRStruct(object):
__slots__ = []
def pack(self):
packer = xdrlib.Packer()
self.pack_into(packer, self)
return packer.get_buffer()
@classmethod
def unpack(cls, data):
return cls.unpack_from(xdrlib.Unpacker(data))
def __str__(self):
return repr(self)
def __ne__(self, other):
return not self == other
class XDRUnion(object):
@classmethod
def unpack(cls, data):
return cls.unpack_from(xdrlib.Unpacker(data))
@classmethod
def pack_into(cls, packer, obj):
type(obj).pack_into(packer, obj)
class XDRUnionMember(object):
__slots__ = ["value"]
def __init__(self, value=None):
self.value = value
def pack(self):
packer = xdrlib.Packer()
self.pack_into(packer, self)
return packer.get_buffer()
def __repr__(self):
return type(self).__name__ + '(' + repr(self.value) + ')'
def __str__(self):
return repr(self)
def __eq__(self, other):
return type(self) == type(other) and self.value == other.value
def __ne__(self, other):
return not self == other
class XDRTypedef(object):
__slots__ = []
@classmethod
def unpack(cls, data):
return cls.unpack_from(xdrlib.Unpacker(data))
class endpoint_key(XDRStruct):
__slots__ = ['vlan', 'mac_hi', 'mac_lo']
def __init__(self, vlan=None, mac_hi=None, mac_lo=None):
self.vlan = vlan
self.mac_hi = mac_hi
self.mac_lo = mac_lo
@classmethod
def pack_into(self, packer, obj):
packer.pack_uint(obj.vlan)
packer.pack_uint(obj.mac_hi)
packer.pack_uint(obj.mac_lo)
@classmethod
def unpack_from(cls, unpacker):
obj = endpoint_key()
obj.vlan = unpacker.unpack_uint()
obj.mac_hi = unpacker.unpack_uint()
obj.mac_lo = unpacker.unpack_uint()
return obj
def __eq__(self, other):
if type(self) != type(other):
return False
if self.vlan != other.vlan:
return False
if self.mac_hi != other.mac_hi:
return False
if self.mac_lo != other.mac_lo:
return False
return True
def __repr__(self):
parts = []
parts.append('endpoint_key(')
parts.append('vlan=')
parts.append(repr(self.vlan))
parts.append(", ")
parts.append('mac_hi=')
parts.append(repr(self.mac_hi))
parts.append(", ")
parts.append('mac_lo=')
parts.append(repr(self.mac_lo))
parts.append(')')
return ''.join(parts)
class endpoint_value(XDRStruct):
__slots__ = ['port']
def __init__(self, port=None):
self.port = port
@classmethod
def pack_into(self, packer, obj):
packer.pack_uint(obj.port)
@classmethod
def unpack_from(cls, unpacker):
obj = endpoint_value()
obj.port = unpacker.unpack_uint()
return obj
def __eq__(self, other):
if type(self) != type(other):
return False
if self.port != other.port:
return False
return True
def __repr__(self):
parts = []
parts.append('endpoint_value(')
parts.append('port=')
parts.append(repr(self.port))
parts.append(')')
return ''.join(parts)
class endpoint_stats(XDRStruct):
__slots__ = ['packets', 'bytes']
def __init__(self, packets=None, bytes=None):
self.packets = packets
self.bytes = bytes
@classmethod
def pack_into(self, packer, obj):
packer.pack_uint(obj.packets)
packer.pack_uint(obj.bytes)
@classmethod
def unpack_from(cls, unpacker):
obj = endpoint_stats()
obj.packets = unpacker.unpack_uint()
obj.bytes = unpacker.unpack_uint()
return obj
def __eq__(self, other):
if type(self) != type(other):
return False
if self.packets != other.packets:
return False
if self.bytes != other.bytes:
return False
return True
def __repr__(self):
parts = []
parts.append('endpoint_stats(')
parts.append('packets=')
parts.append(repr(self.packets))
parts.append(", ")
parts.append('bytes=')
parts.append(repr(self.bytes))
parts.append(')')
return ''.join(parts)
|
[
"__all__ = ['endpoint_key', 'endpoint_value', 'endpoint_stats']"
] | 399
|
lcc
|
python
| null |
c69d4ae0425c1d742d8cad38e1238ffbf39d8f97b105df79
|
|
/*
SLAM server
Copyright (C) 2009 Bob Mottram
fuzzgun@gmail.com
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
using System;
using System.Xml;
using System.Net;
using System.Net.Sockets;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using dpslam.core.tests;
namespace dpslam.core
{
public class dpslamServer
{
public bool kill;
public bool Running;
// list of client numbers from which data is currently being received
protected List<int> receiving_data = new List<int>();
protected const int DATA_BUFFER_SIZE = 4096 * 2;
// the type of xml encoding used
public const string XML_ENCODING = "ISO-8859-1";
// recognised xml node types
public const string STATUS_REQUEST = "HardwareDeviceStatusRequest";
public const string STATUS_REPLY = "HardwareDeviceStatus";
public const string STATUS_UPDATE = "HardwareDeviceUpdate";
public const string STATUS_BROADCAST = "HardwareDeviceBroadcast";
public const string STATUS_DISCONNECT = "HardwareDeviceDisconnect";
public int PortNumber;
public ProtocolType protocol = ProtocolType.Tcp;
protected bool NoDelay = false; // used to disable Nagle's algorithm
// timeouts
public int ReceiveTimeoutMilliseconds = 5000;
public int SendTimeoutMilliseconds = 5000;
// list of clients pending disconnection
private List<int> disconnect_client;
robot rob;
#region "constructors"
public dpslamServer(int no_of_stereo_cameras)
{
rob = new robot(no_of_stereo_cameras);
dpslam_tests.CreateSim();
}
#endregion
#region "buffer storing data recently received"
// a buffer used to store the data recently received for
// debugging purposes
const int MAX_RECENT_DATA = 10;
private List<int> data_recently_received_client_number;
private List<string> data_recently_received;
/// <summary>
/// updates the buffer storing recently received data
/// This is typically used for debugging purposes
/// </summary>
/// <param name="client_number">client number which teh data was received from</param>
/// <param name="data_received">data content</param>
private static void UpdateDataRecentlyReceived(
int client_number,
string data_received,
ref List<string> data_recently_received,
ref List<int> data_recently_received_client_number)
{
// create lists
if (data_recently_received_client_number == null)
{
data_recently_received_client_number = new List<int>();
data_recently_received = new List<string>();
}
// store the receipt
data_recently_received_client_number.Add(client_number);
data_recently_received.Add(data_received);
//Console.WriteLine("Data received: " + data_recently_received.Count.ToString());
//Console.WriteLine("Data received: " + data_received);
// only store a limited number of recent receipts
if (data_recently_received.Count >= MAX_RECENT_DATA)
{
data_recently_received.RemoveAt(0);
data_recently_received_client_number.RemoveAt(0);
}
}
/// <summary>
/// clears the recently received data buffer
/// </summary>
public void ClearDataRecentlyReceived()
{
if (data_recently_received != null)
{
data_recently_received.Clear();
data_recently_received_client_number.Clear();
}
}
/// <summary>
/// Returns data recently received from teh given client number
/// This is typically used for debugging purposes
/// </summary>
/// <param name="client_number">client number from which the data was received</param>
/// <returns>data received, or empty string</returns>
public string GetDataRecentlyReceived(int client_number)
{
string data = "";
if (data_recently_received != null)
{
int i = data_recently_received.Count-1;
while ((i >= 0) && (data == ""))
{
if (data_recently_received_client_number[i] == client_number)
data = data_recently_received[i];
i--;
}
}
return(data);
}
#endregion
#region "sockets stuff"
public delegate void UpdateRichEditCallback(string text);
public delegate void UpdateClientListCallback();
public AsyncCallback pfnWorkerCallBack;
private Socket m_mainSocket;
// An ArrayList is used to keep track of worker sockets that are designed
// to communicate with each connected client. Make it a synchronized ArrayList
// For thread safety
private ArrayList m_workerSocketList =
ArrayList.Synchronized(new System.Collections.ArrayList());
// The following variable will keep track of the cumulative
// total number of clients connected at any time. Since multiple threads
// can access this variable, modifying this variable should be done
// in a thread safe manner
private int m_clientCount = 0;
/// <summary>
/// start the server listening on the given port number
/// </summary>
/// <param name="PortNumber">port number</param>
public void Start(int PortNumber)
{
Running = false;
this.PortNumber = PortNumber;
try
{
// Create the listening socket...
m_mainSocket = new Socket(AddressFamily.InterNetwork,
SocketType.Stream,
protocol);
m_mainSocket.NoDelay = NoDelay;
IPEndPoint ipLocal = new IPEndPoint(IPAddress.Parse(GetIP()), PortNumber);
Console.WriteLine("Server running on " + ipLocal.ToString());
// Bind to local IP Address...
m_mainSocket.Bind( ipLocal );
// Start listening...
m_mainSocket.Listen(4);
// Create the call back for any client connections...
m_mainSocket.BeginAccept(new AsyncCallback (OnClientConnect), null);
//m_mainSocket.BeginDisconnect(new AsyncCallback (OnClientDisconnect), null);
Running = true;
}
catch(SocketException se)
{
Console.WriteLine("dpslamServer/Start(" + PortNumber.ToString() + ")/" + se.Message);
}
}
/// <summary>
/// This is the call back function, which will be invoked when a client is disconnected
/// </summary>
/// <param name="asyn"></param>
private static void OnClientDisconnect(IAsyncResult asyn)
{
Console.WriteLine("Client disconnected");
}
/// <summary>
/// This is the call back function, which will be invoked when a client is connected
/// </summary>
/// <param name="asyn"></param>
private void OnClientConnect(IAsyncResult asyn)
{
try
{
// Here we complete/end the BeginAccept() asynchronous call
// by calling EndAccept() - which returns the reference to
// a new Socket object
Socket workerSocket = m_mainSocket.EndAccept (asyn);
workerSocket.NoDelay = NoDelay;
// Now increment the client count for this client
// in a thread safe manner
Interlocked.Increment(ref m_clientCount);
// set timeouts
workerSocket.ReceiveTimeout = ReceiveTimeoutMilliseconds;
workerSocket.SendTimeout = SendTimeoutMilliseconds;
// Add the workerSocket reference to our ArrayList
m_workerSocketList.Add(workerSocket);
// Send a welcome message to client
Console.WriteLine("Welcome client " + m_clientCount);
//msg += getDeviceStatusAll();
//SendToClient(msg, m_clientCount);
// Let the worker Socket do the further processing for the
// just connected client
WaitForData(workerSocket, m_clientCount);
// Since the main Socket is now free, it can go back and wait for
// other clients who are attempting to connect
m_mainSocket.BeginAccept(new AsyncCallback ( OnClientConnect ),null);
}
catch(ObjectDisposedException)
{
Console.WriteLine("dpslamServer/OnClientConnect/Socket has been closed");
System.Diagnostics.Debugger.Log(0,"1","\n OnClientConnection: Socket (" + PortNumber.ToString() + ") has been closed\n");
}
catch(SocketException se)
{
Console.WriteLine("dpslamServer/OnClientConnect(" + PortNumber.ToString() + ")/" + se.Message);
}
}
internal class SocketPacket
{
// Constructor which takes a Socket and a client number
public SocketPacket(System.Net.Sockets.Socket socket, int clientNumber)
{
m_currentSocket = socket;
m_clientNumber = clientNumber;
}
public System.Net.Sockets.Socket m_currentSocket;
public int m_clientNumber;
// Buffer to store the data sent by the client
public byte[] dataBuffer = new byte[DATA_BUFFER_SIZE];
}
// Start waiting for data from the client
private void WaitForData(System.Net.Sockets.Socket soc, int clientNumber)
{
try
{
if ( pfnWorkerCallBack == null )
{
// Specify the call back function which is to be
// invoked when there is any write activity by the
// connected client
pfnWorkerCallBack = new AsyncCallback (OnDataReceived);
}
SocketPacket theSocPkt = new SocketPacket (soc, clientNumber);
soc.BeginReceive (theSocPkt.dataBuffer, 0,
theSocPkt.dataBuffer.Length,
SocketFlags.None,
pfnWorkerCallBack,
theSocPkt);
}
catch(SocketException se)
{
Console.WriteLine("dpslamServer/WaitForData(" + PortNumber.ToString() + ")/" + se.Message);
}
}
private ArrayList receive_buffer;
public static bool EndOfReceive(string received_text)
{
bool end_of_data = false;
received_text = received_text.Trim();
if ((received_text.Contains("</" + STATUS_REQUEST + ">")) ||
(received_text.Contains("</" + STATUS_UPDATE + ">")) ||
(received_text.Contains("</" + STATUS_DISCONNECT + ">")))
{
end_of_data = true;
}
return(end_of_data);
}
List<int> disconnect_now = new List<int>();
public bool processing_receive_buffer;
public void ProcessReceiveBuffer(
int client_number,
ArrayList receive_buffer)
{
processing_receive_buffer = true;
dpslamServer.ProcessReceiveBuffer(
client_number,
receive_buffer,
ref data_recently_received,
ref data_recently_received_client_number,
ref m_workerSocketList,
ref kill,
ref disconnect_client,
ref disconnect_now);
processing_receive_buffer = false;
}
/// <summary>
/// if the received text contains multiple xml documents
/// this splits it up ready for subsequent parsing
/// </summary>
/// <param name="received_text">text received</param>
/// <returns>list containing xml documents</returns>
public static List<string> SplitReceive(string received_text)
{
List<string> receipts = new List<string>();
int prev_pos = 0;
int start_pos, pos = 1;
while (pos > -1)
{
pos = received_text.IndexOf("<?xml", prev_pos);
if (pos > -1)
{
start_pos = prev_pos;
if (start_pos > 0) start_pos--;
string xml_str = received_text.Substring(start_pos, pos - start_pos);
if (xml_str.Trim() != "") receipts.Add(xml_str);
prev_pos = pos+1;
}
}
start_pos = prev_pos;
if (start_pos > 0) start_pos--;
receipts.Add(received_text.Substring(start_pos, received_text.Length - start_pos));
return(receipts);
}
public static void ProcessReceiveBuffer(
int client_number,
ArrayList receive_buffer,
ref List<string> data_recently_received,
ref List<int> data_recently_received_client_number,
ref ArrayList m_workerSocketList,
ref bool kill,
ref List<int> disconnect_client,
ref List<int> disconnect_now)
{
if (receive_buffer != null)
{
string data = "";
List<int> removals = new List<int>();
for (int i = 0; i < receive_buffer.Count; i += 2)
{
int client_no = (int)receive_buffer[i + 1];
if (client_no == client_number)
{
data += (string)receive_buffer[i];
removals.Add(i);
}
}
if (data != "")
{
//Console.WriteLine("data = " + data);
List<string> data_str = dpslamServer.SplitReceive(data);
for (int i = 0; i < data_str.Count; i++)
{
ReceiveXmlMessageFromClient(
data_str[i],
client_number,
ref data_recently_received,
ref data_recently_received_client_number,
ref m_workerSocketList,
ref kill,
ref disconnect_client,
ref disconnect_now);
}
for (int i = removals.Count-1; i >= 0; i--)
{
receive_buffer.RemoveAt(removals[i] + 1);
receive_buffer.RemoveAt(removals[i]);
}
}
else
{
Console.WriteLine("ProcessReceiveBuffer/No data received");
}
}
else
{
Console.WriteLine("Receive buffer is null");
}
}
/// <summary>
/// a thread has been created to process incoming requests
/// </summary>
/// <param name="state"></param>
private void OnDataReceivedCallback(object state)
{
}
/// <summary>
/// This the call back function which will be invoked when the socket
/// detects any client writing of data on the stream
/// </summary>
/// <param name="asyn"></param>
public void OnDataReceived(IAsyncResult asyn)
{
SocketPacket socketData = (SocketPacket)asyn.AsyncState ;
if (!receiving_data.Contains(socketData.m_clientNumber))
{
receiving_data.Add(socketData.m_clientNumber);
try
{
// Complete the BeginReceive() asynchronous call by EndReceive() method
// which will return the number of characters written to the stream
// by the client
int iRx = socketData.m_currentSocket.EndReceive (asyn);
char[] chars = new char[iRx + 1];
// Extract the characters as a buffer
System.Text.Decoder d = System.Text.Encoding.UTF8.GetDecoder();
d.GetChars(socketData.dataBuffer, 0, iRx, chars, 0);
if (chars.Length > 1)
{
string szData = "";
for (int ch = 0; ch < chars.Length; ch++)
{
if (chars[ch] != 0) szData += chars[ch];
}
// add the data to the receive buffer
if (receive_buffer == null)
{
receive_buffer = new ArrayList();
// create a thread which will process incoming receipts
// in an organised fashion
ThreadServerReceive receive = new ThreadServerReceive(new WaitCallback(OnDataReceivedCallback), this, receive_buffer);
Thread receive_thread = new Thread(new ThreadStart(receive.Execute));
receive_thread.Priority = ThreadPriority.Normal;
receive_thread.Start();
}
// push data into the receive buffer }
receive_buffer.Add(szData);
receive_buffer.Add(socketData.m_clientNumber);
}
// Continue the waiting for data on the Socket
if (!disconnect_now.Contains(socketData.m_clientNumber))
{
WaitForData(socketData.m_currentSocket, socketData.m_clientNumber );
}
else
{
disconnect_now.Remove(socketData.m_clientNumber);
}
}
catch (ObjectDisposedException )
{
System.Diagnostics.Debugger.Log(0,"1","\nOnDataReceived: Socket has been closed\n");
}
catch(SocketException se)
{
if(se.ErrorCode == 10054) // Error code for Connection reset by peer
{
string msg = "Goodbye client " + socketData.m_clientNumber.ToString();
Console.WriteLine(msg);
// Remove the reference to the worker socket of the closed client
// so that this object will get garbage collected
int index = socketData.m_clientNumber - 1;
if ((index > -1) && (index < m_workerSocketList.Count)) m_workerSocketList[index] = null;
}
else
{
Console.WriteLine("dpslamServer/OnDataReceived(" + PortNumber.ToString() + ")/" + se.Message);
}
}
receiving_data.Remove(socketData.m_clientNumber);
}
else
{
Console.WriteLine("Receive conflict: Data already being received from client " + socketData.m_clientNumber.ToString());
disconnect_client.Add(socketData.m_clientNumber);
}
}
/// <summary>
/// broadcast a set of devices and their properties
/// </summary>
/// <param name="broadcast_devices">list containing device Ids and property names</param>
/// <param name="quiet">report the boradcast xml to the console or not</param>
public void Broadcast(
ArrayList broadcast_devices,
bool quiet)
{
if (broadcast_devices.Count > 0)
{
// get the changed state information as xml
XmlDocument doc = GetDeviceStatus(broadcast_devices, STATUS_BROADCAST);
string statusStr = doc.InnerXml;
// send the xml to connected clients
Send(statusStr);
if (!quiet)
{
Console.WriteLine("Broadcasting:");
Console.WriteLine(statusStr);
}
}
}
/// <summary>
/// safely remove a connected client
/// </summary>
/// <param name="clientnumber">index number of the client to be removed</param>
/// <param name="m_workerSocketList">list of open sockets</param>
/// <param name="disconnect_client">list if client numbers to be disconnected</param>
/// <param name="usage">usage model</param>
protected static void RemoveClient(
int clientnumber,
ArrayList m_workerSocketList,
List<int> disconnect_client)
{
if ((clientnumber - 1 > -1) && (clientnumber - 1 < m_workerSocketList.Count))
{
Socket workerSocket = (Socket)m_workerSocketList[clientnumber - 1];
if (workerSocket != null)
{
workerSocket.BeginDisconnect(true, new AsyncCallback(OnClientDisconnect), null);
m_workerSocketList.RemoveAt(clientnumber - 1);
}
}
if (disconnect_client != null)
if (disconnect_client.Contains(clientnumber))
disconnect_client.Remove(clientnumber);
}
/// <summary>
/// returns the number of connected clients
/// </summary>
/// <returns>number of connected clients</returns>
public int GetNoOfConnectedClients()
{
if (m_workerSocketList != null)
return(m_workerSocketList.Count);
else
return(0);
}
// list of client numbers currently sending data
protected List<int> sending_data = new List<int>();
/// <summary>
/// sends a message to all connected clients
/// </summary>
/// <param name="msg"></param>
public void Send(string msg)
{
Socket workerSocket = null;
//msg = "dpslamServer: " + msg + "\n";
byte[] byData = System.Text.Encoding.ASCII.GetBytes(msg);
for(int i = m_workerSocketList.Count - 1; i >= 0; i--)
{
workerSocket = (Socket)m_workerSocketList[i];
bool disconnect = false;
if (disconnect_client != null) disconnect = disconnect_client.Contains(i);
if (!disconnect)
{
if(workerSocket!= null)
{
if(workerSocket.Connected)
{
// if not already sending data to this client
if (!sending_data.Contains(i))
{
sending_data.Add(i);
try
{
workerSocket.Send(byData);
}
catch(SocketException se)
{
Console.WriteLine("dpslamServer/Send(" + PortNumber.ToString() + ")/" + se.Message);
RemoveClient(i, m_workerSocketList, disconnect_client);
}
sending_data.Remove(i);
}
}
}
}
else
{
|
[
"\t\t\t\t RemoveClient(i, m_workerSocketList, disconnect_client);"
] | 2,031
|
lcc
|
csharp
| null |
cf8e0d5f2adf8762eb153e24cdc6edacca8bb1e84f6b7c14
|
|
package eu.applabs.crowdsensingfitnesslibrary.portal.google;
import android.app.Activity;
import android.content.Intent;
import android.content.IntentSender;
import android.os.Bundle;
import android.util.Log;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
import com.google.android.gms.common.Scopes;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.Scope;
import com.google.android.gms.fitness.Fitness;
import com.google.android.gms.fitness.data.Bucket;
import com.google.android.gms.fitness.data.DataPoint;
import com.google.android.gms.fitness.data.DataSet;
import com.google.android.gms.fitness.data.DataType;
import com.google.android.gms.fitness.data.Field;
import com.google.android.gms.fitness.request.DataReadRequest;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import eu.applabs.crowdsensingfitnesslibrary.FitnessLibrary;
import eu.applabs.crowdsensingfitnesslibrary.data.ActivityBucket;
import eu.applabs.crowdsensingfitnesslibrary.data.Person;
import eu.applabs.crowdsensingfitnesslibrary.data.StepBucket;
import eu.applabs.crowdsensingfitnesslibrary.portal.Portal;
import eu.applabs.crowdsensingfitnesslibrary.settings.SettingsManager;
public class GooglePortal extends Portal implements GoogleApiClient.ConnectionCallbacks,
GoogleApiClient.OnConnectionFailedListener, ReadFitnessThread.IReadFitnessThreadListener {
private static final String sClassName = GooglePortal.class.getSimpleName();
private static final int sRequestOAuth = 1;
//private int mRequestId = 0;
private Activity mActivity = null;
private boolean mAuthInProgress = false;
private GoogleApiClient mGoogleApiClient = null;
private SettingsManager mSettingsManager = null;
private boolean mConnected = false;
private Map<Integer, RequestType> mRequestMap = null;
public void logDataSet(List<DataSet> list) {
SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy");
if(list != null) {
for(DataSet dataSet : list) {
Log.i(sClassName, "Data returned for Data type: " + dataSet.getDataType().getName());
for (DataPoint dp : dataSet.getDataPoints()) {
Log.i(sClassName, "Data point:");
Log.i(sClassName, "\tType: " + dp.getDataType().getName());
Log.i(sClassName, "\tStart: " + dateFormat.format(dp.getStartTime(TimeUnit.MILLISECONDS)));
Log.i(sClassName, "\tEnd: " + dateFormat.format(dp.getEndTime(TimeUnit.MILLISECONDS)));
for(Field field : dp.getDataType().getFields()) {
Log.i(sClassName, "\tField: " + field.getName() + " Value: " + dp.getValue(field));
}
}
}
}
}
public List<StepBucket> convertToStepBucketList(List<Bucket> list) {
List<StepBucket> returnList = new ArrayList<>();
try {
if(list != null) {
for(Bucket bucket : list) {
List<DataSet> dataSets = bucket.getDataSets();
if(dataSets != null) {
for(DataSet dataSet : dataSets) {
for(DataPoint dp : dataSet.getDataPoints()) {
StepBucket stepBucket = new StepBucket();
Field field = getField(dp.getDataType().getFields(), "steps");
if(field != null) {
stepBucket.setStepCount(dp.getValue(field).asInt());
}
Calendar c = Calendar.getInstance();
c.setTimeInMillis(dp.getStartTime(TimeUnit.MILLISECONDS));
stepBucket.setStepStartDate(c.getTime());
c.setTimeInMillis(dp.getEndTime(TimeUnit.MILLISECONDS));
stepBucket.setStepEndDate(c.getTime());
returnList.add(stepBucket);
}
}
}
}
}
} catch (Exception e) {
// Something went wrong
}
return returnList;
}
public List<ActivityBucket> convertToActivityBucketList(List<Bucket> list) {
List<ActivityBucket> returnList = new ArrayList<>();
try {
if (list != null) {
for (Bucket bucket : list) {
List<DataSet> dataSets = bucket.getDataSets();
if (dataSets != null) {
for (DataSet dataSet : dataSets) {
for (DataPoint dp : dataSet.getDataPoints()) {
ActivityBucket activityBucket = new ActivityBucket();
Field field = getField(dp.getDataType().getFields(), "num_segments");
if (field != null) {
activityBucket.setActivityCount(dp.getValue(field).asInt());
}
field = getField(dp.getDataType().getFields(), "activity");
if (field != null) {
activityBucket.setActivityType(
eu.applabs.crowdsensingfitnesslibrary.data.Activity.Type.values()[dp.getValue(field).asInt()]);
}
field = getField(dp.getDataType().getFields(), "duration");
if (field != null) {
activityBucket.setActivityDuration(dp.getValue(field).asInt());
}
Calendar c = Calendar.getInstance();
c.setTimeInMillis(dp.getStartTime(TimeUnit.MILLISECONDS));
activityBucket.setActivityStartDate(c.getTime());
c.setTimeInMillis(dp.getEndTime(TimeUnit.MILLISECONDS));
activityBucket.setActivityEndDate(c.getTime());
returnList.add(activityBucket);
}
}
}
}
}
} catch (Exception e) {
// Something went wrong
}
return returnList;
}
public Field getField(List<Field> list, String name) {
for(Field field : list) {
if(field.getName().compareTo(name) == 0) {
return field;
}
}
return null;
}
@Override
public PortalType getPortalType() {
return PortalType.Google;
}
@Override
public void login(Activity activity) {
mActivity = activity;
mRequestMap = new HashMap<>();
mSettingsManager = new SettingsManager(activity);
mGoogleApiClient = new GoogleApiClient.Builder(mActivity)
.addApi(Fitness.HISTORY_API)
.addScope(new Scope(Scopes.FITNESS_LOCATION_READ))
.addScope(new Scope(Scopes.FITNESS_NUTRITION_READ))
.addScope(new Scope(Scopes.FITNESS_ACTIVITY_READ))
.addScope(new Scope(Scopes.FITNESS_BODY_READ))
.addConnectionCallbacks(this)
.addOnConnectionFailedListener(this)
.build();
if(mGoogleApiClient != null) {
mGoogleApiClient.connect();
}
}
@Override
public void logout() {
if(mGoogleApiClient != null) {
mGoogleApiClient.disconnect();
mGoogleApiClient = null;
mConnected = false;
}
if(mSettingsManager != null) {
List<PortalType> list = mSettingsManager.getConnectedServices();
if(list.contains(PortalType.Google)) {
list.remove(PortalType.Google);
mSettingsManager.setConnectedServices(list);
}
}
notifyPortalConnectionStateChanged();
}
@Override
public boolean isConnected() {
return mConnected;
}
@Override
public boolean checkActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode == sRequestOAuth) {
mAuthInProgress = false;
if(resultCode == Activity.RESULT_OK) {
if (mGoogleApiClient != null
&& !mGoogleApiClient.isConnecting()
&& !mGoogleApiClient.isConnected()) {
mGoogleApiClient.connect();
}
}
return true;
}
return false;
}
@Override
public void getPerson(int requestId) {
if(mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
}
notifyPersonReceived(FitnessLibrary.IFitnessLibraryListener.ExecutionStatus.Error, requestId, new Person());
}
@Override
public void getSteps(long startTime,
long endTime,
TimeUnit rangeUnit,
int duration,
TimeUnit durationUnit,
int requestId) {
if(mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
DataReadRequest request = new DataReadRequest.Builder()
.aggregate(DataType.TYPE_STEP_COUNT_DELTA, DataType.AGGREGATE_STEP_COUNT_DELTA)
.bucketByTime(duration, durationUnit)
.setTimeRange(startTime, endTime, rangeUnit)
.build();
//int requestId = mRequestId++;
mRequestMap.put(requestId, RequestType.Step);
new ReadFitnessThread(mGoogleApiClient, requestId, request, this).start();
return;
}
notifyStepsReceived(FitnessLibrary.IFitnessLibraryListener.ExecutionStatus.Error, requestId, new ArrayList<StepBucket>());
}
@Override
public void getActivities(long startTime,
long endTime,
TimeUnit rangeUnit,
int duration,
TimeUnit durationUnit,
int requestId) {
if(mGoogleApiClient != null && mGoogleApiClient.isConnected()) {
DataReadRequest request = new DataReadRequest.Builder()
.aggregate(DataType.TYPE_ACTIVITY_SEGMENT, DataType.AGGREGATE_ACTIVITY_SUMMARY)
.bucketByTime(duration, durationUnit)
.setTimeRange(startTime, endTime, rangeUnit)
.build();
//int requestId = mRequestId++;
mRequestMap.put(requestId, RequestType.Activity);
new ReadFitnessThread(mGoogleApiClient, requestId, request, this).start();
return;
}
notifyActivitiesReceived(FitnessLibrary.IFitnessLibraryListener.ExecutionStatus.Error, requestId, new ArrayList<ActivityBucket>());
}
// GoogleApiClient.ConnectionCallbacks
@Override
public void onConnected(Bundle bundle) {
mConnected = true;
if(mSettingsManager != null) {
List<PortalType> list = mSettingsManager.getConnectedServices();
if(!list.contains(PortalType.Google)) {
list.add(PortalType.Google);
mSettingsManager.setConnectedServices(list);
}
}
notifyPortalConnectionStateChanged();
}
@Override
public void onConnectionSuspended(int i) {
int x = 0;
x++;
}
// GoogleApiClient.OnConnectionFailedListener
@Override
public void onConnectionFailed(ConnectionResult result) {
if (!result.hasResolution()) {
GooglePlayServicesUtil.getErrorDialog(result.getErrorCode(), mActivity, 0).show();
return;
}
if (!mAuthInProgress) {
try {
mAuthInProgress = true;
result.startResolutionForResult(mActivity, sRequestOAuth);
} catch (IntentSender.SendIntentException e) {
Log.e(sClassName, "Exception while starting resolution activity", e);
}
}
}
// ReadFitnessThread.IReadFitnessThreadListener
@Override
public void onSuccess(int requestId, List<Bucket> list) {
if(mRequestMap != null && mRequestMap.containsKey(requestId)) {
Portal.RequestType type = mRequestMap.get(requestId);
|
[
" for(Bucket bucket : list) {"
] | 768
|
lcc
|
java
| null |
a0064864baa8bd09ff6fc63712208d825b29b2d8f56ab567
|
|
#!/usr/bin/python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# gen_callbacks.py
# Copyright (C) 2010 Simon Newton
import textwrap
def PrintLongLine(line):
optional_nolint = ''
if len(line) > 80:
optional_nolint = ' // NOLINT(whitespace/line_length)'
print ('%s%s' % (line, optional_nolint))
def Header():
print textwrap.dedent("""\
/*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Callback.h
* @brief Function objects.
* Copyright (C) 2005-2010 Simon Newton
*
* THIS FILE IS AUTOGENERATED!
* Please run edit & run gen_callbacks.py if you need to add more types.
*/
/**
* @defgroup callbacks Callbacks
* @brief Function objects.
*
* Callbacks are powerful objects that behave like function pointers. They
* can be constructed with a pointer to a either plain function or member
* function. Argments can be provided at either creation time or execution
* time.
*
* The SingleUse varient of a Callback automatically delete itself after it
* has been executed.
*
* Callbacks are used throughout OLA to reduce the coupling between classes
* and make for more modular code.
*
* Avoid creating Callbacks by directly calling the constructor. Instead use
* the NewSingleCallback() and NewCallback() helper methods.
*
* @examplepara Simple function pointer replacement.
* @code
* // wrap a function that takes no args and returns a bool
* SingleUseCallback<bool> *callback1 = NewSingleCallback(&Function0);
*
* // some time later
* bool result = callback1->Run();
* // callback1 has deleted itself at this point
* @endcode
*
* @examplepara Method pointer with a single bound argument
* @code
* // Create a Callback for Method1 of the Object class and bind TEST_VALUE
* // as the first argument.
* Callback<void> *callback2 = NewCallback(object, &Object::Method1,
* TEST_VALUE);
*
* // This will call object->Method1(TEST_VALUE)
* callback2->Run();
* // this wasn't a SingleUse Callback, so callback is still around and
* // needs to be deleted manually.
* delete callback2;
* @endcode
*
* @examplepara Method pointer that takes a single argument at execution time.
* @code
* // Create a Callback for a method that takes 1 argument and returns void.
* BaseCallback1<void, unsigned int> *callback3 = NewCallback(
* object, &Object::Method1);
*
* // Call object->Method1(TEST_VALUE)
* callback3->Run(TEST_VALUE);
* // callback3 is still around at this stage
* delete callback3;
* @endcode
*
* @examplepara Method pointer with one bound argument and one execution time
* argument.
* @code
* // Create a callback for a method that takes 2 args and returns void
* BaseCallback2<void, int, int> *callback4 = NewSingleCallback(
* object,
* &Object::Method2,
* TEST_VALUE);
*
* // This calls object->Method2(TEST_VALUE, TEST_VALUE2);
* callback4->Run(TEST_VALUE2);
* // callback4 is still around
* delete callback4;
* @endcode
*
* @note The code in Callback.h is autogenerated by gen_callbacks.py. Please
* run edit & run gen_callbacks.py if you need to add more types.
*
*/
/**
* @addtogroup callbacks
* @{
* @file Callback.h
* @}
*/
#ifndef INCLUDE_OLA_CALLBACK_H_
#define INCLUDE_OLA_CALLBACK_H_
namespace ola {
/**
* @addtogroup callbacks
* @{
*/
""")
def Footer():
print textwrap.dedent("""\
/**
* @}
*/
} // namespace ola
#endif // INCLUDE_OLA_CALLBACK_H_""")
def GenerateBase(number_of_args):
"""Generate the base Callback classes."""
optional_comma = ''
if number_of_args > 0:
optional_comma = ', '
typenames = ', '.join('typename Arg%d' % i for i in xrange(number_of_args))
arg_list = ', '.join('Arg%d arg%d' % (i, i) for i in xrange(number_of_args))
args = ', '.join('arg%d' % i for i in xrange(number_of_args))
arg_types = ', '.join('Arg%d' % i for i in xrange(number_of_args))
# generate the base callback class
print textwrap.dedent("""\
/**
* @brief The base class for all %d argument callbacks.
*/""" % number_of_args)
PrintLongLine('template <typename ReturnType%s%s>' %
(optional_comma, typenames))
print 'class BaseCallback%d {' % number_of_args
print ' public:'
print ' virtual ~BaseCallback%d() {}' % number_of_args
PrintLongLine(' virtual ReturnType Run(%s) = 0;' % arg_list)
print '};'
print ''
# generate the multi-use version of the callback
print textwrap.dedent("""\
/**
* @brief A %d argument callback which can be called multiple times.
*/""" % number_of_args)
PrintLongLine('template <typename ReturnType%s%s>' %
(optional_comma, typenames))
print ('class Callback%d: public BaseCallback%d<ReturnType%s%s> {' %
(number_of_args, number_of_args, optional_comma, arg_types))
print ' public:'
print ' virtual ~Callback%d() {}' % number_of_args
PrintLongLine(' ReturnType Run(%s) { return this->DoRun(%s); }' %
(arg_list, args))
print ' private:'
print ' virtual ReturnType DoRun(%s) = 0;' % arg_list
print '};'
print ''
# generate the single-use version of the callback
print textwrap.dedent("""\
/**
* @brief A %d argument callback which deletes itself after it's run.
*/""" % number_of_args)
PrintLongLine('template <typename ReturnType%s%s>' %
(optional_comma, typenames))
PrintLongLine('class SingleUseCallback%d: public BaseCallback%d<ReturnType%s%s> {' %
(number_of_args, number_of_args, optional_comma, arg_types))
print ' public:'
print ' virtual ~SingleUseCallback%d() {}' % number_of_args
print ' ReturnType Run(%s) {' % arg_list
print ' ReturnType ret = this->DoRun(%s);' % args
print ' delete this;'
print ' return ret;'
print ' }'
print ' private:'
print ' virtual ReturnType DoRun(%s) = 0;' % arg_list
print '};'
print ''
# the void specialization
print textwrap.dedent("""\
/**
* @brief A %d arg, single use callback that returns void.
*/""" % number_of_args)
print 'template <%s>' % typenames
PrintLongLine('class SingleUseCallback%d<void%s%s>: public BaseCallback%d<void%s%s> {' %
(number_of_args, optional_comma, arg_types, number_of_args,
optional_comma, arg_types))
print ' public:'
print ' virtual ~SingleUseCallback%d() {}' % number_of_args
print ' void Run(%s) {' % arg_list
print ' this->DoRun(%s);' % args
print ' delete this;'
print ' }'
print ' private:'
print ' virtual void DoRun(%s) = 0;' % arg_list
print '};'
print ''
def GenerateHelperFunction(bind_count,
exec_count,
function_name,
parent_class,
is_method=True):
"""Generate the helper functions which create callbacks.
Args:
bind_count the number of args supplied at create time.
exec_count the number of args supplied at exec time.
function_name what to call the helper function
parent_class the parent class to use
is_method True if this is a method callback, False if this is a function
callback.
"""
optional_comma = ''
if bind_count > 0 or exec_count > 0:
optional_comma = ', '
typenames = (['typename A%d' % i for i in xrange(bind_count)] +
['typename Arg%d' % i for i in xrange(exec_count)])
bind_types = ['A%d' % i for i in xrange(bind_count)]
exec_types = ['Arg%d' % i for i in xrange(exec_count)]
method_types = ', '.join(bind_types + exec_types)
if exec_count > 0:
exec_types = [''] + exec_types
exec_type_str = ', '.join(exec_types)
optional_class, ptr_name, signature = '', 'callback', '*callback'
if is_method:
optional_class, ptr_name, signature = (
'typename Class, ', 'method', 'Class::*method')
# The single use helper function
print textwrap.dedent("""\
/**
* @brief A helper function to create a new %s with %d
* create-time arguments and %d execution time arguments.""" %
(parent_class, bind_count, exec_count))
if is_method:
print " * @tparam Class the class with the member function."
print " * @tparam ReturnType the return type of the callback."
for i in xrange(bind_count):
print " * @tparam A%d a create-time argument type." % i
for i in xrange(exec_count):
print " * @tparam Arg%d an exec-time argument type." % i
if is_method:
print " * @param object the object to call the member function on."
print (" * @param method the member function pointer to use when executing "
"the callback.");
else:
print (" * @param callback the function pointer to use when executing the "
"callback.")
for i in xrange(bind_count):
print " * @param a%d a create-time argument." % i
if is_method:
print " * @returns The same return value as the member function."
else:
print " * @returns The same return value as the function."
print " */"
PrintLongLine('template <%stypename ReturnType%s%s>' %
(optional_class, optional_comma, ', '.join(typenames)))
PrintLongLine('inline %s%d<ReturnType%s>* %s(' %
(parent_class, exec_count, exec_type_str, function_name))
if is_method:
print ' Class* object,'
if bind_count:
print ' ReturnType (%s)(%s),' % (signature, method_types)
for i in xrange(bind_count):
suffix = ','
if i == bind_count - 1:
suffix = ') {'
print ' A%d a%d%s' % (i, i, suffix)
else:
print ' ReturnType (%s)(%s)) {' % (signature, method_types)
if is_method:
print ' return new MethodCallback%d_%d<Class,' % (bind_count, exec_count)
else:
print ' return new FunctionCallback%d_%d<' % (bind_count, exec_count)
PrintLongLine(' %s%d<ReturnType%s>,' %
(parent_class, exec_count, exec_type_str))
|
[
" if bind_count > 0 or exec_count > 0:"
] | 1,549
|
lcc
|
python
| null |
b8ddcb433aa1bfe07017e01e1c838c3c2d78b8b973eabc54
|
|
from django.db.models.loading import get_model
from metadata.utils.date_range import in_range
from django.shortcuts import render
from django.utils import simplejson
from django.http import Http404, HttpResponse
from django.conf import settings
from schedule.utils import range as s_range
import csv
import json
# This is used to limit range_XYZ requests to prevent them from
# DoSing URY accidentally.
MAX_RANGE_LENGTH = 10 * 24 * 60 * 60 # Ten days
def laconia_error(request, message, status=403):
"""
Throws an error from the laconia interface.
The default status code emitted is 403 Forbidden.
"""
return render(
request,
'laconia/error.txt',
{'message': message},
content_type='text/plain',
status=status
)
def current_show_location_and_time(request):
"""Sends the current show location, time and show ID as text."""
# This just expects the current show to be given by context processors now.
return render(
request,
'laconia/current-show-location-and-time.txt',
content_type="text/plain"
)
def current_show_and_next(request):
"""Sends info about the current show as JSON."""
# In case the worst happens and the schedule doesn't come back with
# two items, we're very cautious about the size of day.
day = list(s_range.day(limit=2))
json_data = {}
if len(day) >= 1:
on_air = day[0]
if on_air.player_image:
image = on_air.player_image.url
else:
image = settings.STATIC_URL + "img/default_show_player.png"
json_data.update(
{
"onAir": on_air.title,
"onAirDesc": on_air.description,
"onAirPres": on_air.by_line(),
"onAirTime": '{:%H:%M} - {:%H:%M}'.format(
on_air.start_time, on_air.end_time
),
"onAirImg": image,
}
)
if len(day) >= 2:
up_next = day[1]
json_data.update(
{
"upNext": up_next.title,
"upNextDesc": up_next.description,
"upNextPres": up_next.by_line(),
"upNextTime": '{:%H:%M} - {:%H:%M}'.format(
up_next.start_time, up_next.end_time
)
}
)
return HttpResponse(
simplejson.dumps(json_data), content_type="application/json"
)
def range_querystring(request, appname, modelname, format='json'):
"""
Wrapper to `range` that expects its date range in the query
string.
Since this view mainly exists to accommodate FullCalendar, which
expects its output in JSON, the default format is JSON as opposed
to CSV.
"""
if 'start' not in request.GET or 'end' not in request.GET:
raise Http404
return range(
request,
appname,
modelname,
request.GET['start'],
request.GET['end'],
format
)
def range(request, appname, modelname, start, end, format='csv'):
"""
Retrieves a summary about any items in the given model that fall
within the given range.
Items are returned if any time within their own time range falls
within the given range.
If format is 'csv', the result is delivered as a CSV if the given
model exists and supports range queries, or a HTTP 404 if not.
The CSV may be empty.
If format is 'fullcal', the result is instead a JSON list
corresponding to the schema at http://arshaw.com/fullcalendar -
again if the given model cannot be queried for range a HTTP 404
will be emitted.
If the model supports metadata queries, the 'title' and
'description' metadata will be pulled if it exists.
If the model supports credit queries, the by-line will also be
added.
"""
model = get_model(appname, modelname)
if model is None:
raise Http404
start = int(start)
end = int(end)
# Request sanity checking
if (end - start) < 0:
response = laconia_error(
request,
'Requested range is negative.'
)
elif (end - start) > MAX_RANGE_LENGTH:
response = laconia_error(
request,
'Requested range is too long (max: {0} seconds)'.format(
MAX_RANGE_LENGTH
)
)
else:
try:
items = in_range(model, start, end)
except AttributeError:
# Assuming this means the model can't do range-based ops
raise Http404
filename = u'{0}-{1}-{2}-{3}'.format(
appname,
modelname,
start,
end
)
if format == 'csv':
f = range_csv
elif format == 'json':
f = range_json
else:
raise ValueError('Invalid format specifier.')
response = f(filename, items)
return response
def range_csv(filename, items):
"""
Returns a range query result in CSV format.
The order of items in the CSV rows are:
1) Primary key
2) Start time as UNIX timestamp
3) End time as UNIX timestamp
4) 'title' from default metadata strand, if metadata exists;
else blank
5) 'description' from default metadata strand, if metadata exists;
else blank
6) By-line, if credits exist; else blank
"""
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = (
u'attachment; filename="{0}.csv"'.format(filename)
)
writer = csv.writer(response)
for item in items:
writer.writerow([
item.pk,
item.range_start_unix(),
item.range_end_unix(),
getattr(item, 'title', ''),
getattr(item, 'description', ''),
getattr(item, 'by_line', lambda x: '')()
])
return response
def range_item_title(item):
"""
Returns the most sensible human-readable title for the item.
This is either the 'text'/'title' metadatum if the item supports
metadata, or the empty string (for loggerng compatibility
purposes, primarily).
"""
return getattr(item, 'title', '')
def range_item_dict(item):
"""
Returns a dictionary representing the information from a given
range item that is pertinent to a range query.
"""
return {
'id': item.pk,
'title': range_item_title(item),
'start': item.range_start_unix(),
'end': item.range_end_unix(),
}
def range_json(filename, items):
"""
|
[
" Returns a range query in JSON (full-calendar) format."
] | 722
|
lcc
|
python
| null |
7f21bbeb3cc4c97701a78291f4d98bf6819b5e05cca6641e
|
|
/**
Copyright (C) SYSTAP, LLC 2006-2015. All rights reserved.
Contact:
SYSTAP, LLC
2501 Calvert ST NW #106
Washington, DC 20008
licenses@systap.com
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/*
* Created on Aug 29, 2011
*/
package com.bigdata.rdf.sparql.ast.optimizers;
import org.openrdf.query.algebra.StatementPattern.Scope;
import com.bigdata.bop.IBindingSet;
import com.bigdata.bop.bindingSet.ListBindingSet;
import com.bigdata.rdf.internal.IV;
import com.bigdata.rdf.sparql.ast.ASTContainer;
import com.bigdata.rdf.sparql.ast.AbstractASTEvaluationTestCase;
import com.bigdata.rdf.sparql.ast.ConstantNode;
import com.bigdata.rdf.sparql.ast.IQueryNode;
import com.bigdata.rdf.sparql.ast.JoinGroupNode;
import com.bigdata.rdf.sparql.ast.ProjectionNode;
import com.bigdata.rdf.sparql.ast.QueryRoot;
import com.bigdata.rdf.sparql.ast.QueryType;
import com.bigdata.rdf.sparql.ast.StatementPatternNode;
import com.bigdata.rdf.sparql.ast.VarNode;
import com.bigdata.rdf.sparql.ast.eval.AST2BOpContext;
import com.bigdata.rdf.sparql.ast.eval.ASTSearchOptimizer;
import com.bigdata.rdf.sparql.ast.service.ServiceNode;
import com.bigdata.rdf.store.BD;
import com.bigdata.rdf.store.BDS;
/**
* Test suite for {@link ASTSearchOptimizer}.
*
* @author <a href="mailto:thompsonbry@users.sourceforge.net">Bryan Thompson</a>
* @version $Id$
*/
public class TestASTSearchOptimizer extends AbstractASTEvaluationTestCase {
/**
*
*/
public TestASTSearchOptimizer() {
}
/**
* @param name
*/
public TestASTSearchOptimizer(String name) {
super(name);
}
/**
* Given
*
* <pre>
* PREFIX bd: <http://www.bigdata.com/rdf/search#>
* SELECT ?subj ?score
* {
* SELECT ?subj ?score
* WHERE {
* ?lit bd:search "mike" .
* ?lit bd:relevance ?score .
* ?subj ?p ?lit .
* }
* }
* </pre>
*
* The AST is rewritten as:
*
* <pre>
* PREFIX bd: <http://www.bigdata.com/rdf/search#>
* QueryType: SELECT
* SELECT ( VarNode(subj) AS VarNode(subj) ) ( VarNode(score) AS VarNode(score) )
* JoinGroupNode {
* StatementPatternNode(VarNode(subj), VarNode(p), VarNode(lit), DEFAULT_CONTEXTS)
* com.bigdata.rdf.sparql.ast.eval.AST2BOpBase.estimatedCardinality=5
* com.bigdata.rdf.sparql.ast.eval.AST2BOpBase.originalIndex=SPOC
* SERVICE <ConstantNode(TermId(0U)[http://www.bigdata.com/rdf/search#search])> {
* JoinGroupNode {
* StatementPatternNode(VarNode(lit), ConstantNode(TermId(0U)[http://www.bigdata.com/rdf/search#search]), ConstantNode(TermId(0L)[mike]), DEFAULT_CONTEXTS)
* StatementPatternNode(VarNode(lit), ConstantNode(TermId(0U)[http://www.bigdata.com/rdf/search#relevance]), VarNode(score), DEFAULT_CONTEXTS)
* }
* }
* }
* }
* </pre>
*/
public void test_searchServiceOptimizer_01() {
/*
* Note: DO NOT share structures in this test!!!!
*/
// final VarNode s = new VarNode("s");
// final VarNode p = new VarNode("p");
// final VarNode o = new VarNode("o");
//
// final IConstant const1 = new Constant<IV>(TermId.mockIV(VTE.URI));
@SuppressWarnings("rawtypes")
final IV searchIV = makeIV(BDS.SEARCH);
@SuppressWarnings("rawtypes")
final IV relevanceIV = makeIV(BDS.RELEVANCE);
@SuppressWarnings("rawtypes")
final IV mikeIV = makeIV(store.getValueFactory().createLiteral("mike"));
final IBindingSet[] bsets = new IBindingSet[] { //
new ListBindingSet()
};
/**
* The source AST.
*
* <pre>
* PREFIX bd: <http://www.bigdata.com/rdf/search#>
* SELECT ?subj ?score
* {
* SELECT ?subj ?score
* WHERE {
* ?lit bd:search "mike" .
* ?lit bd:relevance ?score .
* ?subj ?p ?lit .
* }
* }
* </pre>
*/
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("subj"));
projection.addProjectionVar(new VarNode("score"));
final JoinGroupNode whereClause = new JoinGroupNode();
given.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("lit"),
new ConstantNode(searchIV), new ConstantNode(mikeIV),
null/* c */, Scope.DEFAULT_CONTEXTS));
whereClause.addChild(new StatementPatternNode(new VarNode("lit"),
new ConstantNode(relevanceIV), new VarNode("score"),
null/* c */, Scope.DEFAULT_CONTEXTS));
whereClause.addChild(new StatementPatternNode(new VarNode("subj"),
new VarNode("p"), new VarNode("lit"), null/* c */,
Scope.DEFAULT_CONTEXTS));
}
/**
* The expected AST after the rewrite
*
* <pre>
* PREFIX bd: <http://www.bigdata.com/rdf/search#>
* QueryType: SELECT
* SELECT ( VarNode(subj) AS VarNode(subj) ) ( VarNode(score) AS VarNode(score) )
* JoinGroupNode {
* StatementPatternNode(VarNode(subj), VarNode(p), VarNode(lit), DEFAULT_CONTEXTS)
* com.bigdata.rdf.sparql.ast.eval.AST2BOpBase.estimatedCardinality=5
* com.bigdata.rdf.sparql.ast.eval.AST2BOpBase.originalIndex=SPOC
* SERVICE <ConstantNode(TermId(0U)[http://www.bigdata.com/rdf/search#search])> {
* JoinGroupNode {
* StatementPatternNode(VarNode(lit), ConstantNode(TermId(0U)[http://www.bigdata.com/rdf/search#search]), ConstantNode(TermId(0L)[mike]), DEFAULT_CONTEXTS)
* StatementPatternNode(VarNode(lit), ConstantNode(TermId(0U)[http://www.bigdata.com/rdf/search#relevance]), VarNode(score), DEFAULT_CONTEXTS)
* }
* }
* }
* }
* </pre>
*/
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("subj"));
projection.addProjectionVar(new VarNode("score"));
final JoinGroupNode whereClause = new JoinGroupNode();
expected.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("subj"),
new VarNode("p"), new VarNode("lit"), null/* c */,
Scope.DEFAULT_CONTEXTS));
{
final JoinGroupNode serviceGraphPattern = new JoinGroupNode();
serviceGraphPattern.addChild(new StatementPatternNode(
new VarNode("lit"), new ConstantNode(searchIV),
new ConstantNode(mikeIV), null/* c */,
Scope.DEFAULT_CONTEXTS));
serviceGraphPattern.addChild(new StatementPatternNode(
new VarNode("lit"), new ConstantNode(relevanceIV),
|
[
" new VarNode(\"score\"), null/* c */,"
] | 644
|
lcc
|
java
| null |
6a5c6418a11e7d5ca010f8f986194ae5efec73ff41f7c167
|
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import random
from django import forms
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.db.transaction import atomic
from django.forms.models import modelform_factory
from django.http.response import HttpResponseRedirect
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from shoop.admin.toolbar import (
DropdownActionButton, DropdownDivider, DropdownItem, PostActionButton, Toolbar, get_default_edit_toolbar
)
from shoop.admin.utils.urls import get_model_url
from shoop.admin.utils.views import CreateOrUpdateView
from shoop.core.models import Contact, PersonContact
from shoop.utils.excs import Problem
from shoop.utils.text import flatten
class BaseUserForm(forms.ModelForm):
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
permission_info = forms.CharField(
label=_("Permissions"),
widget=forms.TextInput(attrs={"readonly": True, "disabled": True}),
required=False,
help_text=_("See the permissions view to change these.")
)
def __init__(self, *args, **kwargs):
super(BaseUserForm, self).__init__(*args, **kwargs)
if self.instance.pk:
# Changing the password for an existing user requires more confirmation
self.fields.pop("password")
self.initial["permission_info"] = ", ".join(force_text(perm) for perm in [
_("staff") if self.instance.is_staff else "",
_("superuser") if self.instance.is_superuser else "",
] if perm) or _("No special permissions")
else:
self.fields.pop("permission_info")
def save(self, commit=True):
user = super(BaseUserForm, self).save(commit=False)
if "password" in self.fields:
user.set_password(self.cleaned_data["password"])
if commit:
user.save()
return user
class UserDetailToolbar(Toolbar):
def __init__(self, view):
self.view = view
self.request = view.request
self.user = view.object
super(UserDetailToolbar, self).__init__()
self.extend(get_default_edit_toolbar(self.view, "user_form", with_split_save=False))
if self.user.pk:
self._build_existing_user()
def _build_existing_user(self):
user = self.user
change_password_button = DropdownItem(
url=reverse("shoop_admin:user.change-password", kwargs={"pk": user.pk}),
text=_(u"Change Password"), icon="fa fa-exchange"
)
reset_password_button = DropdownItem(
url=reverse("shoop_admin:user.reset-password", kwargs={"pk": user.pk}),
disable_reason=(_("User has no email address") if not user.email else None),
text=_(u"Send Password Reset Email"), icon="fa fa-envelope"
)
permissions_button = DropdownItem(
url=reverse("shoop_admin:user.change-permissions", kwargs={"pk": user.pk}),
text=_(u"Edit Permissions"), icon="fa fa-lock"
)
menu_items = [
change_password_button,
reset_password_button,
permissions_button,
DropdownDivider()
]
person_contact = PersonContact.objects.filter(user=user).first()
if person_contact:
contact_url = reverse("shoop_admin:contact.detail", kwargs={"pk": person_contact.pk})
menu_items.append(DropdownItem(
url=contact_url,
icon="fa fa-search",
text=_(u"Contact Details"),
))
else:
contact_url = reverse("shoop_admin:contact.new") + "?user_id=%s" % user.pk
menu_items.append(DropdownItem(
url=contact_url,
icon="fa fa-plus",
text=_(u"New Contact"),
tooltip=_("Create a new contact and associate it with this user")
))
self.append(DropdownActionButton(
menu_items,
icon="fa fa-star",
text=_(u"Actions"),
extra_css_class="btn-info",
))
if not user.is_active:
self.append(PostActionButton(
post_url=self.request.path,
name="set_is_active",
value="1",
icon="fa fa-check-circle",
text=_(u"Activate User"),
extra_css_class="btn-gray",
))
else:
self.append(PostActionButton(
post_url=self.request.path,
name="set_is_active",
value="0",
icon="fa fa-times-circle",
text=_(u"Deactivate User"),
extra_css_class="btn-gray",
))
# TODO: Add extensibility
class UserDetailView(CreateOrUpdateView):
# Model set during dispatch because it's swappable
template_name = "shoop/admin/users/detail.jinja"
context_object_name = "user"
fields = ("username", "email", "first_name", "last_name")
def get_form_class(self):
return modelform_factory(self.model, form=BaseUserForm, fields=self.fields)
def _get_bind_contact(self):
contact_id = self.request.REQUEST.get("contact_id")
if contact_id:
return Contact.objects.get(pk=contact_id)
return None
def get_initial(self):
initial = super(UserDetailView, self).get_initial()
contact = self._get_bind_contact()
if contact:
# Guess some sort of usable username
username = flatten(contact, ".")
if len(username) < 3:
username = getattr(contact, "email", "").split("@")[0]
if len(username) < 3:
username = "user%08d" % random.randint(0, 99999999)
initial.update(
username=username,
email=getattr(contact, "email", ""),
first_name=getattr(contact, "first_name", ""),
last_name=getattr(contact, "last_name", ""),
)
return initial
def get_toolbar(self):
return UserDetailToolbar(view=self)
@atomic
def save_form(self, form):
self.object = form.save()
contact = self._get_bind_contact()
if contact and not contact.user:
contact.user = self.object
contact.save()
messages.info(self.request, _(u"User bound to contact %(contact)s.") % {"contact": contact})
def get_success_url(self):
return get_model_url(self.object)
def _handle_set_is_active(self):
state = bool(int(self.request.POST["set_is_active"]))
if not state:
if (self.object.is_superuser and not self.request.user.is_superuser):
raise Problem(_("You can not deactivate a superuser."))
if self.object == self.request.user:
raise Problem(_("You can not deactivate yourself."))
self.object.is_active = state
self.object.save(update_fields=("is_active",))
messages.success(self.request, _("%(user)s is now %(state)s.") % {
"user": self.object,
"state": _("active") if state else _("inactive")
})
return HttpResponseRedirect(self.request.path)
def post(self, request, *args, **kwargs):
|
[
" self.object = self.get_object()"
] | 558
|
lcc
|
python
| null |
2f9e87b359085e3741fab9367978273edee2f0b2e60fcfe8
|
|
//-----------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation. All Rights Reserved.
// This code is licensed under the Microsoft Public License.
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//-----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
//^ using Microsoft.Contracts;
namespace Microsoft.Cci.Ast {
/// <summary>
/// Represents a .NET assembly.
/// </summary>
public abstract class Assembly : Module, IAssembly {
/// <summary>
/// Allocates an object that represents a .NET assembly.
/// </summary>
/// <param name="name">The name of the unit.</param>
/// <param name="location">An indication of the location where the unit is or will be stored. This need not be a file system path and may be empty.
/// The interpretation depends on the IMetadataHost instance used to resolve references to this unit.</param>
/// <param name="moduleName">The name of the module containing the assembly manifest. This can be different from the name of the assembly itself.</param>
/// <param name="assemblyReferences">A list of the assemblies that are referenced by this module.</param>
/// <param name="moduleReferences">A list of the modules that are referenced by this module.</param>
/// <param name="resources">A list of named byte sequences persisted with the assembly and used during execution, typically via .NET Framework helper classes.</param>
/// <param name="files">
/// A list of the files that constitute the assembly. These are not the source language files that may have been
/// used to compile the assembly, but the files that contain constituent modules of a multi-module assembly as well
/// as any external resources. It corresonds to the File table of the .NET assembly file format.
/// </param>
protected Assembly(IName name, string location, IName moduleName, IEnumerable<IAssemblyReference> assemblyReferences, IEnumerable<IModuleReference> moduleReferences,
IEnumerable<IResourceReference> resources, IEnumerable<IFileReference> files)
: base(name, location, Dummy.Assembly, assemblyReferences, moduleReferences) {
this.moduleName = moduleName;
this.resources = resources;
this.files = files;
}
/// <summary>
/// A list of aliases for the root namespace of the referenced assembly.
/// </summary>
public IEnumerable<IName> Aliases {
get { return Enumerable<IName>.Empty; }
}
/// <summary>
/// A list of objects representing persisted instances of types that extend System.Attribute. Provides an extensible way to associate metadata
/// with this assembly.
/// </summary>
public IEnumerable<ICustomAttribute> AssemblyAttributes {
get {
if (this.assemblyAttributes == null) {
var assemblyAttributes = this.GetAssemblyAttributes();
assemblyAttributes.TrimExcess();
this.assemblyAttributes = assemblyAttributes.AsReadOnly();
}
return this.assemblyAttributes;
}
}
IEnumerable<ICustomAttribute> assemblyAttributes;
/// <summary>
/// The identity of the assembly.
/// </summary>
public AssemblyIdentity AssemblyIdentity {
get {
if (this.assemblyIdentity == null)
this.assemblyIdentity = UnitHelper.GetAssemblyIdentity(this);
return this.assemblyIdentity;
}
}
AssemblyIdentity/*?*/ assemblyIdentity;
/// <summary>
/// The assembly that contains this module.
/// </summary>
public override IAssembly/*?*/ ContainingAssembly {
get { return this; }
}
/// <summary>
/// Identifies the culture associated with the assembly. Typically specified for sattelite assemblies with localized resources.
/// Empty if not specified.
/// </summary>
public virtual string Culture {
get { return string.Empty; }
}
/// <summary>
/// Calls visitor.Visit(IAssembly).
/// </summary>
public override void Dispatch(IMetadataVisitor visitor) {
visitor.Visit(this);
}
/// <summary>
/// Calls visitor.Visit(IAssemblyReference).
/// </summary>
public override void DispatchAsReference(IMetadataVisitor visitor) {
visitor.Visit((IAssemblyReference)this);
}
/// <summary>
/// Public types defined in other modules making up this assembly and to which other assemblies may refer to via this assembly.
/// </summary>
public virtual IEnumerable<IAliasForType> ExportedTypes {
get { return Enumerable<IAliasForType>.Empty; }
}
/// <summary>
/// A list of the files that constitute the assembly. These are not the source language files that may have been
/// used to compile the assembly, but the files that contain constituent modules of a multi-module assembly as well
/// as any external resources. It corresonds to the File table of the .NET assembly file format.
/// </summary>
public IEnumerable<IFileReference> Files {
get { return this.files; }
}
readonly IEnumerable<IFileReference> files;
/// <summary>
/// A set of bits and bit ranges representing properties of the assembly. The value of <see cref="Flags"/> can be set
/// from source code via the AssemblyFlags assembly custom attribute. The interpretation of the property depends on the target platform.
/// </summary>
public virtual uint Flags {
get { return 0; } //TODO: get from options or an attribute
}
/// <summary>
/// Returns a list of custom attributes that describes this type declaration member.
/// Typically, these will be derived from this.SourceAttributes. However, some source attributes
/// might instead be persisted as metadata bits and other custom attributes may be synthesized
/// from information not provided in the form of source custom attributes.
/// The list is not trimmed to size, since an override of this method may call the base method
/// and then add more attributes.
/// </summary>
protected virtual List<ICustomAttribute> GetAssemblyAttributes() {
List<ICustomAttribute> result = new List<ICustomAttribute>();
bool sawTypeWithExtensions = false;
this.UnitNamespaceRoot.FillInWithAssemblyAttributes(result, ref sawTypeWithExtensions);
if (sawTypeWithExtensions) {
var eattr = new Microsoft.Cci.MutableCodeModel.CustomAttribute();
eattr.Constructor = this.Compilation.ExtensionAttributeCtor;
result.Add(eattr);
}
return result;
}
/// <summary>
/// The encrypted SHA1 hash of the persisted form of the referenced assembly.
/// </summary>
public IEnumerable<byte> HashValue {
get { return Enumerable<byte>.Empty; }
}
/// <summary>
/// True if the implementation of the referenced assembly used at runtime is not expected to match the version seen at compile time.
/// </summary>
public virtual bool IsRetargetable {
get { return false; } //TODO: get from options or an attribute
}
/// <summary>
/// The kind of metadata stored in the module. For example whether the module is an executable or a manifest resource file.
/// </summary>
public override ModuleKind Kind {
get { return this.EntryPoint.ResolvedMethod is Dummy ? ModuleKind.DynamicallyLinkedLibrary : ModuleKind.ConsoleApplication; } //TODO: obtain it from the compiler options
}
/// <summary>
/// A list of the modules that constitute the assembly.
/// </summary>
public IEnumerable<IModule> MemberModules {
get { return Enumerable<IModule>.Empty; }
}
/// <summary>
/// The identity of the module.
/// </summary>
public override ModuleIdentity ModuleIdentity {
get { return this.AssemblyIdentity; }
}
/// <summary>
/// The name of the module containing the assembly manifest. This can be different from the name of the assembly itself.
/// </summary>
public override IName ModuleName {
get { return this.moduleName; }
}
readonly IName moduleName;
/// <summary>
/// The public part of the key used to encrypt the SHA1 hash over the persisted form of this assembly . Empty if not specified.
/// This value is used by the loader to decrypt HashValue which it then compares with a freshly computed hash value to verify the
/// integrity of the assembly.
/// </summary>
public virtual IEnumerable<byte> PublicKey {
get { return Enumerable<byte>.Empty; } //TODO: get this from an option or attribute
}
/// <summary>
/// The hashed 8 bytes of the public key called public key token of the referenced assembly. This is non empty of the referenced assembly is strongly signed.
/// </summary>
public IEnumerable<byte> PublicKeyToken {
get { return UnitHelper.ComputePublicKeyToken(this.PublicKey); }
}
/// <summary>
/// A list of named byte sequences persisted with the assembly and used during execution, typically via .NET Framework helper classes.
/// </summary>
public IEnumerable<IResourceReference> Resources {
get { return this.resources; }
}
readonly IEnumerable<IResourceReference> resources;
/// <summary>
/// A list of objects representing persisted instances of pairs of security actions and sets of security permissions.
/// These apply by default to every method reachable from the module.
/// </summary>
public virtual IEnumerable<ISecurityAttribute> SecurityAttributes {
get { return Enumerable<ISecurityAttribute>.Empty; } //TODO: compute this
}
/// <summary>
/// The version of the assembly.
/// </summary>
public virtual Version Version {
get { return new System.Version(0, 0, 0, 0); } //TODO: obtain from compiler options or custom attributes
}
#region IAssemblyReference Members
IAssembly IAssemblyReference.ResolvedAssembly {
get { return this; }
}
AssemblyIdentity IAssemblyReference.UnifiedAssemblyIdentity {
get { return this.AssemblyIdentity; }
}
bool IAssemblyReference.ContainsForeignTypes {
get { return false; }
}
#endregion
#region IModuleReference Members
IAssemblyReference/*?*/ IModuleReference.ContainingAssembly {
get { return this; }
}
#endregion
}
/// <summary>
/// A reference to a .NET assembly.
/// </summary>
public class ResolvedAssemblyReference : ResolvedModuleReference, IAssemblyReference {
/// <summary>
/// Allocates a reference to a .NET assembly.
/// </summary>
/// <param name="referencedAssembly">The assembly to reference.</param>
public ResolvedAssemblyReference(IAssembly referencedAssembly)
: base(referencedAssembly) {
this.aliases = Enumerable<IName>.Empty;
}
/// <summary>
/// A list of aliases for the root namespace of the referenced assembly.
/// </summary>
public IEnumerable<IName> Aliases {
get { return this.aliases; }
}
IEnumerable<IName> aliases;
/// <summary>
/// The identity of the assembly reference.
/// </summary>
public AssemblyIdentity AssemblyIdentity {
get { return this.ResolvedAssembly.AssemblyIdentity; }
}
/// <summary>
/// Identifies the culture associated with the assembly reference. Typically specified for sattelite assemblies with localized resources.
/// Empty if not specified.
/// </summary>
public string Culture {
get { return this.ResolvedAssembly.Culture; }
}
/// <summary>
/// Calls the visitor.Visit(IAssemblyReference) method.
/// </summary>
public override void Dispatch(IMetadataVisitor visitor) {
visitor.Visit(this);
}
/// <summary>
/// Calls the visitor.Visit(IAssemblyReference) method.
/// </summary>
public override void DispatchAsReference(IMetadataVisitor visitor) {
visitor.Visit(this);
}
/// <summary>
/// The encrypted SHA1 hash of the persisted form of the referenced assembly.
/// </summary>
public IEnumerable<byte> HashValue {
get { return this.ResolvedAssembly.HashValue; }
}
/// <summary>
/// True if the implementation of the referenced assembly used at runtime is not expected to match the version seen at compile time.
/// </summary>
public virtual bool IsRetargetable {
get { return this.ResolvedAssembly.IsRetargetable; }
}
/// <summary>
/// The public part of the key used to encrypt the SHA1 hash over the persisted form of the referenced assembly. Empty if not specified.
/// This value is used by the loader to decrypt an encrypted hash value stored in the assembly, which it then compares with a freshly computed hash value
/// in order to verify the integrity of the assembly.
/// </summary>
public IEnumerable<byte> PublicKey {
get { return this.ResolvedAssembly.PublicKey; }
}
/// <summary>
/// The hashed 8 bytes of the public key called public key token of the referenced assembly. This is non empty of the referenced assembly is strongly signed.
/// </summary>
public IEnumerable<byte> PublicKeyToken {
|
[
" get { return this.ResolvedAssembly.PublicKeyToken; }"
] | 1,633
|
lcc
|
csharp
| null |
d5131ee794bcf700f0f43d91cbdd4928e4078dee775d3bd7
|
|
// <copyright file="TFQMR.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
//
// Copyright (c) 2009-2010 Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
namespace Nequeo.Science.Math.LinearAlgebra.Complex32.Solvers.Iterative
{
using System;
using Generic.Solvers.Status;
using Nequeo.Science.Math;
using Preconditioners;
using Properties;
/// <summary>
/// A Transpose Free Quasi-Minimal Residual (TFQMR) iterative matrix solver.
/// </summary>
/// <remarks>
/// <para>
/// The TFQMR algorithm was taken from: <br/>
/// Iterative methods for sparse linear systems.
/// <br/>
/// Yousef Saad
/// <br/>
/// Algorithm is described in Chapter 7, section 7.4.3, page 219
/// </para>
/// <para>
/// The example code below provides an indication of the possible use of the
/// solver.
/// </para>
/// </remarks>
public sealed class TFQMR : IIterativeSolver
{
/// <summary>
/// The status used if there is no status, i.e. the solver hasn't run yet and there is no
/// iterator.
/// </summary>
private static readonly ICalculationStatus DefaultStatus = new CalculationIndetermined();
/// <summary>
/// The preconditioner that will be used. Can be set to <see langword="null" />, in which case the default
/// pre-conditioner will be used.
/// </summary>
private IPreConditioner _preconditioner;
/// <summary>
/// The iterative process controller.
/// </summary>
private IIterator _iterator;
/// <summary>
/// Indicates if the user has stopped the solver.
/// </summary>
private bool _hasBeenStopped;
/// <summary>
/// Initializes a new instance of the <see cref="TFQMR"/> class.
/// </summary>
/// <remarks>
/// When using this constructor the solver will use the <see cref="IIterator"/> with
/// the standard settings and a default preconditioner.
/// </remarks>
public TFQMR() : this(null, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TFQMR"/> class.
/// </summary>
/// <remarks>
/// <para>
/// When using this constructor the solver will use a default preconditioner.
/// </para>
/// <para>
/// The main advantages of using a user defined <see cref="IIterator"/> are:
/// <list type="number">
/// <item>It is possible to set the desired convergence limits.</item>
/// <item>
/// It is possible to check the reason for which the solver finished
/// the iterative procedure by calling the <see cref="IIterator.Status"/> property.
/// </item>
/// </list>
/// </para>
/// </remarks>
/// <param name="iterator">The <see cref="IIterator"/> that will be used to monitor the iterative process.</param>
public TFQMR(IIterator iterator) : this(null, iterator)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TFQMR"/> class.
/// </summary>
/// <remarks>
/// When using this constructor the solver will use the <see cref="IIterator"/> with
/// the standard settings.
/// </remarks>
/// <param name="preconditioner">The <see cref="IPreConditioner"/> that will be used to precondition the matrix equation.</param>
public TFQMR(IPreConditioner preconditioner) : this(preconditioner, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="TFQMR"/> class.
/// </summary>
/// <remarks>
/// <para>
/// The main advantages of using a user defined <see cref="IIterator"/> are:
/// <list type="number">
/// <item>It is possible to set the desired convergence limits.</item>
/// <item>
/// It is possible to check the reason for which the solver finished
/// the iterative procedure by calling the <see cref="IIterator.Status"/> property.
/// </item>
/// </list>
/// </para>
/// </remarks>
/// <param name="preconditioner">The <see cref="IPreConditioner"/> that will be used to precondition the matrix equation.</param>
/// <param name="iterator">The <see cref="IIterator"/> that will be used to monitor the iterative process.</param>
public TFQMR(IPreConditioner preconditioner, IIterator iterator)
{
_iterator = iterator;
_preconditioner = preconditioner;
}
/// <summary>
/// Sets the <see cref="IPreConditioner"/> that will be used to precondition the iterative process.
/// </summary>
/// <param name="preconditioner">The preconditioner.</param>
public void SetPreconditioner(IPreConditioner preconditioner)
{
_preconditioner = preconditioner;
}
/// <summary>
/// Sets the <see cref="IIterator"/> that will be used to track the iterative process.
/// </summary>
/// <param name="iterator">The iterator.</param>
public void SetIterator(IIterator iterator)
{
_iterator = iterator;
}
/// <summary>
/// Gets the status of the iteration once the calculation is finished.
/// </summary>
public ICalculationStatus IterationResult
{
get
{
return (_iterator != null) ? _iterator.Status : DefaultStatus;
}
}
/// <summary>
/// Stops the solve process.
/// </summary>
/// <remarks>
/// Note that it may take an indetermined amount of time for the solver to actually stop the process.
/// </remarks>
public void StopSolve()
{
_hasBeenStopped = true;
}
/// <summary>
/// Solves the matrix equation Ax = b, where A is the coefficient matrix, b is the
/// solution vector and x is the unknown vector.
/// </summary>
/// <param name="matrix">The coefficient matrix, <c>A</c>.</param>
/// <param name="vector">The solution vector, <c>b</c>.</param>
/// <returns>The result vector, <c>x</c>.</returns>
public Vector Solve(Matrix matrix, Vector vector)
{
if (vector == null)
{
throw new ArgumentNullException();
}
Vector result = new DenseVector(matrix.RowCount);
Solve(matrix, vector, result);
return result;
}
/// <summary>
/// Solves the matrix equation Ax = b, where A is the coefficient matrix, b is the
/// solution vector and x is the unknown vector.
/// </summary>
/// <param name="matrix">The coefficient matrix, <c>A</c>.</param>
/// <param name="input">The solution vector, <c>b</c></param>
/// <param name="result">The result vector, <c>x</c></param>
public void Solve(Matrix matrix, Vector input, Vector result)
{
// If we were stopped before, we are no longer
// We're doing this at the start of the method to ensure
// that we can use these fields immediately.
_hasBeenStopped = false;
// Error checks
if (matrix == null)
{
throw new ArgumentNullException("matrix");
}
if (matrix.RowCount != matrix.ColumnCount)
{
throw new ArgumentException(Resources.ArgumentMatrixSquare, "matrix");
}
if (input == null)
{
throw new ArgumentNullException("input");
}
if (result == null)
{
throw new ArgumentNullException("result");
}
if (result.Count != input.Count)
{
throw new ArgumentException(Resources.ArgumentVectorsSameLength);
}
if (input.Count != matrix.RowCount)
{
throw new ArgumentException(Resources.ArgumentMatrixDimensions);
}
// Initialize the solver fields
// Set the convergence monitor
if (_iterator == null)
{
_iterator = Iterator.CreateDefault();
}
if (_preconditioner == null)
{
_preconditioner = new UnitPreconditioner();
}
_preconditioner.Initialize(matrix);
var d = new DenseVector(input.Count);
var r = new DenseVector(input);
var uodd = new DenseVector(input.Count);
var ueven = new DenseVector(input.Count);
var v = new DenseVector(input.Count);
|
[
" var pseudoResiduals = new DenseVector(input);"
] | 1,146
|
lcc
|
csharp
| null |
85a1c12ae7813d7b73cbbf458fd9262e032bd9cc88005aa1
|
|
package net.arccotangent.amathng.math;
import net.arccotangent.amathng.Main;
import net.arccotangent.amathng.utils.MathUtils;
import net.arccotangent.amathng.utils.NumberHelper;
import org.apfloat.*;
import java.util.ArrayList;
public class Statistics {
/**
* Gaussian error function
* @param z Value
* @return erf(z)
*/
public static Apfloat erf(Apfloat z) {
Apfloat sqrtPi = ApfloatMath.sqrt(NumberHelper.create("pi", Main.RADIX, Main.NUMBER_PRECISION).real());
Apfloat term1 = MathUtils.TWO.real().divide(sqrtPi);
boolean negate = false;
Apint n = MathUtils.ZERO_INT;
Apfloat prev;
Apfloat current = MathUtils.ZERO_INT;
do {
prev = current;
long twoNplusOne = (MathUtils.TWO_INT.multiply(n)).add(MathUtils.ONE_INT).longValue();
Apfloat numerTerm2 = ApfloatMath.pow(z, twoNplusOne);
Apfloat numer;
if (negate) {
numer = numerTerm2.negate();
negate = false;
} else {
numer = numerTerm2;
negate = true;
}
Apfloat nFactorial = MathUtils.factorial(n);
Apfloat denom = nFactorial.multiply(new Apfloat((double)twoNplusOne));
current = current.add(numer.divide(denom));
n = n.add(MathUtils.ONE_INT);
} while (prev.compareTo(current) != 0);
return term1.multiply(current);
}
/**
* Cumulative distribution function
* @param x Value x
* @return cdf(x)
*/
public static Apfloat cdf(Apfloat x) {
Apfloat ONE_HALF = NumberHelper.create("0.5", Main.RADIX, Main.NUMBER_PRECISION).real();
Apfloat sqrtTwo = ApfloatMath.sqrt(MathUtils.TWO.real());
Apfloat error = erf(x.divide(sqrtTwo));
error = MathUtils.ONE.real().add(error);
return ONE_HALF.multiply(error);
}
/**
* Linear regression line calculation function
* @param values A 2xN array holding the x and y values to be inserted into the linear regression equation in the following format:<br>
* values[0][N] = x values<br>
* values[1][N] = y values
* @return A 2 element array holding the coefficient for x and the y-intercept. Will be null if an error occurs (eg. more x values than y values).
*/
public static Apcomplex[] linreg(Apcomplex[][] values) {
if (values[0].length != values[1].length)
return null;
int valueAmount = values[0].length;
Apcomplex xSum = MathUtils.ZERO.real();
Apcomplex ySum = MathUtils.ZERO.real();
Apcomplex xySum = MathUtils.ZERO.real();
Apcomplex x2Sum = MathUtils.ZERO.real();
Apcomplex y2Sum = MathUtils.ZERO.real();
for (int i = 0; i < valueAmount; i++) {
Apcomplex x = values[0][i];
Apcomplex y = values[1][i];
Apcomplex xy = x.multiply(y);
xSum = xSum.add(x);
x2Sum = x2Sum.add(ApcomplexMath.pow(x, MathUtils.TWO));
ySum = ySum.add(y);
y2Sum = y2Sum.add(ApcomplexMath.pow(y, MathUtils.TWO));
xySum = xySum.add(xy);
}
Apcomplex slopeNumer = new Apfloat(valueAmount, Main.REGRESSION_PRECISION).multiply(xySum).subtract(xSum.multiply(ySum));
Apcomplex slopeDenom = new Apfloat(valueAmount, Main.REGRESSION_PRECISION).multiply(x2Sum).subtract(ApcomplexMath.pow(xSum, MathUtils.TWO));
Apcomplex slope = slopeNumer.divide(slopeDenom);
Apcomplex interceptNumer = ySum.subtract(slope.multiply(xSum));
Apcomplex interceptDenom = new Apfloat(valueAmount);
Apcomplex intercept = interceptNumer.divide(interceptDenom);
return new Apcomplex[] {slope, intercept};
}
/**
* Calculate the correlation coefficient (Pearson) for a set of data.
* @param values A 2xN array holding the x and y values to be inserted into the linear regression equation in the following format:<br>
* values[0][N] = x values<br>
* values[1][N] = y values
* @return r, the correlation coefficient. Will be null if an error occurs (eg. more x values than y values).
*/
public static Apcomplex pearsonCorrelation(Apcomplex[][] values) {
if (values[0].length != values[1].length)
return null;
int valueAmount = values[0].length;
Apcomplex xAvg = MathUtils.ZERO;
Apcomplex yAvg = MathUtils.ZERO;
for (int i = 0; i < valueAmount; i++) {
xAvg = xAvg.add(values[0][i]);
yAvg = yAvg.add(values[1][i]);
}
xAvg = xAvg.divide(new Apfloat(valueAmount));
yAvg = yAvg.divide(new Apfloat(valueAmount));
Apcomplex xAvgDiffTimesYAvgDiffSum = MathUtils.ZERO.real();
Apcomplex xAvgDiff2Sum = MathUtils.ZERO.real();
Apcomplex yAvgDiff2Sum = MathUtils.ZERO.real();
for (int i = 0; i < valueAmount; i++) {
Apcomplex x = values[0][i];
Apcomplex y = values[1][i];
xAvgDiffTimesYAvgDiffSum = xAvgDiffTimesYAvgDiffSum.add(x.subtract(xAvg).multiply(y.subtract(yAvg)));
xAvgDiff2Sum = xAvgDiff2Sum.add(ApcomplexMath.pow(x.subtract(xAvg), MathUtils.TWO));
yAvgDiff2Sum = yAvgDiff2Sum.add(ApcomplexMath.pow(y.subtract(yAvg), MathUtils.TWO));
}
Apcomplex numer = xAvgDiffTimesYAvgDiffSum;
Apcomplex denom = ApcomplexMath.sqrt(xAvgDiff2Sum).multiply(ApcomplexMath.sqrt(yAvgDiff2Sum)).real();
return numer.divide(denom);
}
/**
* Get the mode of an array of sorted numbers
* @param sortedNumbers Array of sorted numbers
* @return An array of modes, blank if there are no modes
*/
public static Apfloat[] getModes(Apfloat[] sortedNumbers) {
int modeOccurrences = 1;
int maxModeOccurrences = 1;
Apfloat temp;
ArrayList<Apfloat> modes = new ArrayList<>();
for (int i = 1; i < sortedNumbers.length; i++) {
if (sortedNumbers[i].compareTo(sortedNumbers[i - 1]) == 0) {
temp = sortedNumbers[i];
modeOccurrences++;
if (modeOccurrences > maxModeOccurrences) {
modes.clear();
modes.add(temp);
maxModeOccurrences = modeOccurrences;
i = 0;
} else if (modeOccurrences == maxModeOccurrences) {
modes.add(temp);
}
} else {
modeOccurrences = 0;
}
}
Apfloat[] modesArray = new Apfloat[modes.size()];
modes.toArray(modesArray);
return modesArray;
}
/**
* Get the medians of an array of sorted numbers
* @param sortedNumbers Array of sorted numbers
* @return An array of medians, either size 1 or 2
*/
public static Apfloat[] getMedians(Apfloat[] sortedNumbers) {
int left = 0;
int right = sortedNumbers.length - 1;
while (right - left >= 2) {
left++;
right--;
}
if (left == right) {
|
[
"\t\t\treturn new Apfloat[] {sortedNumbers[left]};"
] | 685
|
lcc
|
java
| null |
463db0e034d463507a505f61f5addf4f1153492f465bc47f
|
|
"""
A collection of utilities to edit and construct tree sequences.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import json
import random
import numpy as np
import tskit.provenance as provenance
import tskit
def add_provenance(provenance_table, method_name):
d = provenance.get_provenance_dict({"command": "tsutil.{}".format(method_name)})
provenance_table.add_row(json.dumps(d))
def subsample_sites(ts, num_sites):
"""
Returns a copy of the specified tree sequence with a random subsample of the
specified number of sites.
"""
t = ts.dump_tables()
t.sites.reset()
t.mutations.reset()
sites_to_keep = set(random.sample(list(range(ts.num_sites)), num_sites))
for site in ts.sites():
if site.id in sites_to_keep:
site_id = len(t.sites)
t.sites.add_row(
position=site.position, ancestral_state=site.ancestral_state)
for mutation in site.mutations:
t.mutations.add_row(
site=site_id, derived_state=mutation.derived_state,
node=mutation.node, parent=mutation.parent)
add_provenance(t.provenances, "subsample_sites")
return t.tree_sequence()
def decapitate(ts, num_edges):
"""
Returns a copy of the specified tree sequence in which the specified number of
edges have been retained.
"""
t = ts.dump_tables()
t.edges.set_columns(
left=t.edges.left[:num_edges], right=t.edges.right[:num_edges],
parent=t.edges.parent[:num_edges], child=t.edges.child[:num_edges])
add_provenance(t.provenances, "decapitate")
return t.tree_sequence()
def insert_branch_mutations(ts, mutations_per_branch=1):
"""
Returns a copy of the specified tree sequence with a mutation on every branch
in every tree.
"""
tables = ts.dump_tables()
tables.sites.clear()
tables.mutations.clear()
for tree in ts.trees():
site = tables.sites.add_row(position=tree.interval[0], ancestral_state='0')
for root in tree.roots:
state = {root: 0}
mutation = {root: -1}
stack = [root]
while len(stack) > 0:
u = stack.pop()
stack.extend(tree.children(u))
v = tree.parent(u)
if v != tskit.NULL_NODE:
state[u] = state[v]
parent = mutation[v]
for j in range(mutations_per_branch):
state[u] = (state[u] + 1) % 2
mutation[u] = tables.mutations.add_row(
site=site, node=u, derived_state=str(state[u]),
parent=parent)
parent = mutation[u]
add_provenance(tables.provenances, "insert_branch_mutations")
return tables.tree_sequence()
def insert_branch_sites(ts):
"""
Returns a copy of the specified tree sequence with a site on every branch
of every tree.
"""
tables = ts.dump_tables()
tables.sites.clear()
tables.mutations.clear()
for tree in ts.trees():
left, right = tree.interval
delta = (right - left) / len(list(tree.nodes()))
x = left
for u in tree.nodes():
if tree.parent(u) != tskit.NULL_NODE:
site = tables.sites.add_row(position=x, ancestral_state='0')
tables.mutations.add_row(site=site, node=u, derived_state='1')
x += delta
add_provenance(tables.provenances, "insert_branch_sites")
return tables.tree_sequence()
def insert_multichar_mutations(ts, seed=1, max_len=10):
"""
Returns a copy of the specified tree sequence with multiple chararacter
mutations on a randomly chosen branch in every tree.
"""
rng = random.Random(seed)
letters = ["A", "C", "T", "G"]
tables = ts.dump_tables()
tables.sites.clear()
tables.mutations.clear()
for tree in ts.trees():
ancestral_state = rng.choice(letters) * rng.randint(0, max_len)
site = tables.sites.add_row(
position=tree.interval[0], ancestral_state=ancestral_state)
nodes = list(tree.nodes())
nodes.remove(tree.root)
u = rng.choice(nodes)
derived_state = ancestral_state
while ancestral_state == derived_state:
derived_state = rng.choice(letters) * rng.randint(0, max_len)
tables.mutations.add_row(site=site, node=u, derived_state=derived_state)
add_provenance(tables.provenances, "insert_multichar_mutations")
return tables.tree_sequence()
def insert_random_ploidy_individuals(ts, max_ploidy=5, max_dimension=3, seed=1):
"""
Takes random contiguous subsets of the samples an assigns them to individuals.
Also creates random locations in variable dimensions in the unit interval.
"""
rng = random.Random(seed)
samples = np.array(ts.samples(), dtype=int)
j = 0
tables = ts.dump_tables()
tables.individuals.clear()
individual = tables.nodes.individual[:]
individual[:] = tskit.NULL_INDIVIDUAL
while j < len(samples):
ploidy = rng.randint(0, max_ploidy)
nodes = samples[j: min(j + ploidy, len(samples))]
dimension = rng.randint(0, max_dimension)
location = [rng.random() for _ in range(dimension)]
ind_id = tables.individuals.add_row(location=location)
individual[nodes] = ind_id
j += ploidy
tables.nodes.individual = individual
return tables.tree_sequence()
def permute_nodes(ts, node_map):
"""
Returns a copy of the specified tree sequence such that the nodes are
permuted according to the specified map.
"""
tables = ts.dump_tables()
tables.nodes.clear()
tables.edges.clear()
tables.mutations.clear()
# Mapping from nodes in the new tree sequence back to nodes in the original
reverse_map = [0 for _ in node_map]
for j in range(ts.num_nodes):
reverse_map[node_map[j]] = j
old_nodes = list(ts.nodes())
for j in range(ts.num_nodes):
old_node = old_nodes[reverse_map[j]]
tables.nodes.add_row(
flags=old_node.flags, metadata=old_node.metadata,
population=old_node.population, time=old_node.time)
for edge in ts.edges():
tables.edges.add_row(
left=edge.left, right=edge.right, parent=node_map[edge.parent],
child=node_map[edge.child])
for site in ts.sites():
for mutation in site.mutations:
tables.mutations.add_row(
site=site.id, derived_state=mutation.derived_state,
node=node_map[mutation.node], metadata=mutation.metadata)
tables.sort()
add_provenance(tables.provenances, "permute_nodes")
return tables.tree_sequence()
def insert_redundant_breakpoints(ts):
"""
Builds a new tree sequence containing redundant breakpoints.
"""
tables = ts.dump_tables()
tables.edges.reset()
for r in ts.edges():
x = r.left + (r.right - r.left) / 2
tables.edges.add_row(left=r.left, right=x, child=r.child, parent=r.parent)
tables.edges.add_row(left=x, right=r.right, child=r.child, parent=r.parent)
add_provenance(tables.provenances, "insert_redundant_breakpoints")
new_ts = tables.tree_sequence()
assert new_ts.num_edges == 2 * ts.num_edges
return new_ts
def single_childify(ts):
"""
Builds a new equivalent tree sequence which contains an extra node in the
middle of all exising branches.
"""
tables = ts.dump_tables()
time = tables.nodes.time[:]
tables.edges.reset()
for edge in ts.edges():
# Insert a new node in between the parent and child.
t = time[edge.child] + (time[edge.parent] - time[edge.child]) / 2
u = tables.nodes.add_row(time=t)
tables.edges.add_row(
left=edge.left, right=edge.right, parent=u, child=edge.child)
tables.edges.add_row(
left=edge.left, right=edge.right, parent=edge.parent, child=u)
tables.sort()
add_provenance(tables.provenances, "insert_redundant_breakpoints")
return tables.tree_sequence()
def add_random_metadata(ts, seed=1, max_length=10):
"""
Returns a copy of the specified tree sequence with random metadata assigned
to the nodes, sites and mutations.
"""
tables = ts.dump_tables()
np.random.seed(seed)
length = np.random.randint(0, max_length, ts.num_nodes)
offset = np.cumsum(np.hstack(([0], length)), dtype=np.uint32)
# Older versions of numpy didn't have a dtype argument for randint, so
# must use astype instead.
metadata = np.random.randint(-127, 127, offset[-1]).astype(np.int8)
nodes = tables.nodes
nodes.set_columns(
flags=nodes.flags, population=nodes.population, time=nodes.time,
metadata_offset=offset, metadata=metadata,
individual=nodes.individual)
length = np.random.randint(0, max_length, ts.num_sites)
offset = np.cumsum(np.hstack(([0], length)), dtype=np.uint32)
metadata = np.random.randint(-127, 127, offset[-1]).astype(np.int8)
sites = tables.sites
sites.set_columns(
position=sites.position,
ancestral_state=sites.ancestral_state,
ancestral_state_offset=sites.ancestral_state_offset,
metadata_offset=offset, metadata=metadata)
length = np.random.randint(0, max_length, ts.num_mutations)
offset = np.cumsum(np.hstack(([0], length)), dtype=np.uint32)
metadata = np.random.randint(-127, 127, offset[-1]).astype(np.int8)
mutations = tables.mutations
mutations.set_columns(
site=mutations.site,
node=mutations.node,
parent=mutations.parent,
derived_state=mutations.derived_state,
derived_state_offset=mutations.derived_state_offset,
metadata_offset=offset, metadata=metadata)
length = np.random.randint(0, max_length, ts.num_individuals)
offset = np.cumsum(np.hstack(([0], length)), dtype=np.uint32)
metadata = np.random.randint(-127, 127, offset[-1]).astype(np.int8)
individuals = tables.individuals
individuals.set_columns(
flags=individuals.flags,
location=individuals.location,
location_offset=individuals.location_offset,
metadata_offset=offset, metadata=metadata)
length = np.random.randint(0, max_length, ts.num_populations)
offset = np.cumsum(np.hstack(([0], length)), dtype=np.uint32)
metadata = np.random.randint(-127, 127, offset[-1]).astype(np.int8)
populations = tables.populations
populations.set_columns(metadata_offset=offset, metadata=metadata)
add_provenance(tables.provenances, "add_random_metadata")
ts = tables.tree_sequence()
return ts
def jiggle_samples(ts):
"""
Returns a copy of the specified tree sequence with the sample nodes switched
around. The first n / 2 existing samples become non samples, and the last
n / 2 node become samples.
"""
tables = ts.dump_tables()
nodes = tables.nodes
flags = nodes.flags
oldest_parent = tables.edges.parent[-1]
n = ts.sample_size
flags[:n // 2] = 0
flags[oldest_parent - n // 2: oldest_parent] = 1
nodes.set_columns(flags, nodes.time)
add_provenance(tables.provenances, "jiggle_samples")
return tables.tree_sequence()
def generate_site_mutations(tree, position, mu, site_table, mutation_table,
multiple_per_node=True):
"""
Generates mutations for the site at the specified position on the specified
tree. Mutations happen at rate mu along each branch. The site and mutation
information are recorded in the specified tables. Note that this records
more than one mutation per edge.
"""
assert tree.interval[0] <= position < tree.interval[1]
states = {"A", "C", "G", "T"}
state = random.choice(list(states))
site_table.add_row(position, state)
site = site_table.num_rows - 1
stack = [(tree.root, state, tskit.NULL_MUTATION)]
while len(stack) != 0:
u, state, parent = stack.pop()
if u != tree.root:
branch_length = tree.branch_length(u)
x = random.expovariate(mu)
new_state = state
while x < branch_length:
new_state = random.choice(list(states - set(state)))
if multiple_per_node and (state != new_state):
mutation_table.add_row(site, u, new_state, parent)
parent = mutation_table.num_rows - 1
state = new_state
x += random.expovariate(mu)
else:
if (not multiple_per_node) and (state != new_state):
mutation_table.add_row(site, u, new_state, parent)
parent = mutation_table.num_rows - 1
state = new_state
stack.extend(reversed([(v, state, parent) for v in tree.children(u)]))
def jukes_cantor(ts, num_sites, mu, multiple_per_node=True, seed=None):
"""
Returns a copy of the specified tree sequence with Jukes-Cantor mutations
applied at the specfied rate at the specifed number of sites. Site positions
are chosen uniformly.
"""
random.seed(seed)
positions = [ts.sequence_length * random.random() for _ in range(num_sites)]
positions.sort()
tables = ts.dump_tables()
tables.sites.clear()
tables.mutations.clear()
trees = ts.trees()
t = next(trees)
for position in positions:
while position >= t.interval[1]:
t = next(trees)
generate_site_mutations(t, position, mu, tables.sites, tables.mutations,
multiple_per_node=multiple_per_node)
add_provenance(tables.provenances, "jukes_cantor")
new_ts = tables.tree_sequence()
return new_ts
def compute_mutation_parent(ts):
"""
Compute the `parent` column of a MutationTable. Correct computation uses
topological information in the nodes and edges, as well as the fact that
each mutation must be listed after the mutation on whose background it
occurred (i.e., its parent).
:param TreeSequence ts: The tree sequence to compute for. Need not
have a valid mutation parent column.
"""
mutation_parent = np.zeros(ts.num_mutations, dtype=np.int32) - 1
# Maps nodes to the bottom mutation on each branch
bottom_mutation = np.zeros(ts.num_nodes, dtype=np.int32) - 1
for tree in ts.trees():
for site in tree.sites():
# Go forward through the mutations creating a mapping from the
# mutations to the nodes. If we see more than one mutation
# at a node, then these must be parents since we're assuming
# they are in order.
for mutation in site.mutations:
if bottom_mutation[mutation.node] != tskit.NULL_MUTATION:
mutation_parent[mutation.id] = bottom_mutation[mutation.node]
bottom_mutation[mutation.node] = mutation.id
# There's no point in checking the first mutation, since this cannot
# have a parent.
for mutation in site.mutations[1:]:
if mutation_parent[mutation.id] == tskit.NULL_MUTATION:
v = tree.parent(mutation.node)
# Traverse upwards until we find a another mutation or root.
while v != tskit.NULL_NODE \
and bottom_mutation[v] == tskit.NULL_MUTATION:
v = tree.parent(v)
if v != tskit.NULL_NODE:
mutation_parent[mutation.id] = bottom_mutation[v]
# Reset the maps for the next site.
for mutation in site.mutations:
bottom_mutation[mutation.node] = tskit.NULL_MUTATION
assert np.all(bottom_mutation == -1)
return mutation_parent
def algorithm_T(ts):
"""
Simple implementation of algorithm T from the PLOS paper, taking into
account tree sequences with gaps and other complexities.
"""
sequence_length = ts.sequence_length
edges = list(ts.edges())
M = len(edges)
time = [ts.node(edge.parent).time for edge in edges]
in_order = sorted(range(M), key=lambda j: (
edges[j].left, time[j], edges[j].parent, edges[j].child))
out_order = sorted(range(M), key=lambda j: (
edges[j].right, -time[j], -edges[j].parent, -edges[j].child))
j = 0
k = 0
left = 0
parent = [-1 for _ in range(ts.num_nodes)]
while j < M or left < sequence_length:
while k < M and edges[out_order[k]].right == left:
edge = edges[out_order[k]]
parent[edge.child] = -1
k += 1
while j < M and edges[in_order[j]].left == left:
edge = edges[in_order[j]]
parent[edge.child] = edge.parent
j += 1
right = sequence_length
if j < M:
right = min(right, edges[in_order[j]].left)
if k < M:
right = min(right, edges[out_order[k]].right)
yield (left, right), parent
left = right
class LinkedTree(object):
"""
Straightforward implementation of the quintuply linked tree for developing
and testing the sample lists feature.
NOTE: The interface is pretty awkward; it's not intended for anything other
than testing.
"""
def __init__(self, tree_sequence, tracked_samples=None):
self.tree_sequence = tree_sequence
num_nodes = tree_sequence.num_nodes
# Quintuply linked tree.
self.parent = [-1 for _ in range(num_nodes)]
self.left_sib = [-1 for _ in range(num_nodes)]
self.right_sib = [-1 for _ in range(num_nodes)]
self.left_child = [-1 for _ in range(num_nodes)]
self.right_child = [-1 for _ in range(num_nodes)]
self.left_sample = [-1 for _ in range(num_nodes)]
self.right_sample = [-1 for _ in range(num_nodes)]
# This is too long, but it's convenient for printing.
self.next_sample = [-1 for _ in range(num_nodes)]
self.sample_index_map = [-1 for _ in range(num_nodes)]
samples = tracked_samples
if tracked_samples is None:
samples = list(tree_sequence.samples())
for j in range(len(samples)):
u = samples[j]
self.sample_index_map[u] = j
self.left_sample[u] = j
self.right_sample[u] = j
def __str__(self):
fmt = "{:<5}{:>8}{:>8}{:>8}{:>8}{:>8}{:>8}{:>8}{:>8}\n"
s = fmt.format(
"node", "parent", "lsib", "rsib", "lchild", "rchild",
"nsamp", "lsamp", "rsamp")
for u in range(self.tree_sequence.num_nodes):
s += fmt.format(
u, self.parent[u],
self.left_sib[u], self.right_sib[u],
self.left_child[u], self.right_child[u],
self.next_sample[u], self.left_sample[u], self.right_sample[u])
# Strip off trailing newline
return s[:-1]
def remove_edge(self, edge):
p = edge.parent
c = edge.child
lsib = self.left_sib[c]
rsib = self.right_sib[c]
if lsib == -1:
self.left_child[p] = rsib
else:
self.right_sib[lsib] = rsib
if rsib == -1:
self.right_child[p] = lsib
else:
self.left_sib[rsib] = lsib
self.parent[c] = -1
self.left_sib[c] = -1
self.right_sib[c] = -1
def insert_edge(self, edge):
p = edge.parent
c = edge.child
assert self.parent[c] == -1, "contradictory edges"
self.parent[c] = p
u = self.right_child[p]
if u == -1:
self.left_child[p] = c
self.left_sib[c] = -1
self.right_sib[c] = -1
else:
self.right_sib[u] = c
self.left_sib[c] = u
self.right_sib[c] = -1
self.right_child[p] = c
def update_sample_list(self, parent):
# This can surely be done more efficiently and elegantly. We are iterating
# up the tree and iterating over all the siblings of the nodes we visit,
# rebuilding the links as we go. This results in visiting the same nodes
# over again, which if we have nodes with many siblings will surely be
# expensive. Another consequence of the current approach is that the
# next pointer contains an arbitrary value for the rightmost sample of
# every root. This should point to NULL ideally, but it's quite tricky
# to do in practise. It's easier to have a slightly uglier iteration
# over samples.
#
# In the future it would be good have a more efficient version of this
# algorithm using next and prev pointers that we keep up to date at all
# times, and which we use to patch the lists together more efficiently.
u = parent
while u != -1:
sample_index = self.sample_index_map[u]
if sample_index != -1:
self.right_sample[u] = self.left_sample[u]
else:
self.right_sample[u] = -1
self.left_sample[u] = -1
v = self.left_child[u]
while v != -1:
if self.left_sample[v] != -1:
assert self.right_sample[v] != -1
if self.left_sample[u] == -1:
self.left_sample[u] = self.left_sample[v]
self.right_sample[u] = self.right_sample[v]
else:
self.next_sample[self.right_sample[u]] = self.left_sample[v]
self.right_sample[u] = self.right_sample[v]
v = self.right_sib[v]
u = self.parent[u]
def sample_lists(self):
"""
Iterate over the the trees in this tree sequence, yielding the (left, right)
interval tuples. The tree state is maintained internally.
See note above about the cruddiness of this interface.
"""
ts = self.tree_sequence
sequence_length = ts.sequence_length
edges = list(ts.edges())
M = len(edges)
time = [ts.node(edge.parent).time for edge in edges]
in_order = sorted(range(M), key=lambda j: (
edges[j].left, time[j], edges[j].parent, edges[j].child))
out_order = sorted(range(M), key=lambda j: (
edges[j].right, -time[j], -edges[j].parent, -edges[j].child))
j = 0
k = 0
left = 0
while j < M or left < sequence_length:
while k < M and edges[out_order[k]].right == left:
edge = edges[out_order[k]]
self.remove_edge(edge)
self.update_sample_list(edge.parent)
k += 1
while j < M and edges[in_order[j]].left == left:
edge = edges[in_order[j]]
self.insert_edge(edge)
self.update_sample_list(edge.parent)
j += 1
right = sequence_length
if j < M:
right = min(right, edges[in_order[j]].left)
if k < M:
right = min(right, edges[out_order[k]].right)
yield left, right
left = right
def mean_descendants(ts, reference_sets):
"""
Returns the mean number of nodes from the specified reference sets
where the node is ancestral to at least one of the reference nodes. Returns a
``(ts.num_nodes, len(reference_sets))`` dimensional numpy array.
"""
# Check the inputs (could be done more efficiently here)
all_reference_nodes = set()
for reference_set in reference_sets:
U = set(reference_set)
if len(U) != len(reference_set):
raise ValueError("Cannot have duplicate values within set")
if len(all_reference_nodes & U) != 0:
raise ValueError("Sample sets must be disjoint")
all_reference_nodes |= U
K = len(reference_sets)
C = np.zeros((ts.num_nodes, K))
parent = np.zeros(ts.num_nodes, dtype=int) - 1
# The -1th element of ref_count is for all nodes in the reference set.
ref_count = np.zeros((ts.num_nodes, K + 1), dtype=int)
last_update = np.zeros(ts.num_nodes)
total_length = np.zeros(ts.num_nodes)
def update_counts(edge, sign):
# Update the counts and statistics for a given node. Before we change the
# node counts in the given direction, check to see if we need to update
# statistics for that node. When a node count changes, we add the
# accumulated statistic value for the span since that node was last updated.
v = edge.parent
while v != -1:
if last_update[v] != left:
if ref_count[v, K] > 0:
length = left - last_update[v]
C[v] += length * ref_count[v, :K]
total_length[v] += length
last_update[v] = left
ref_count[v] += sign * ref_count[edge.child]
v = parent[v]
# Set the intitial conditions.
for j in range(K):
ref_count[reference_sets[j], j] = 1
ref_count[ts.samples(), K] = 1
for (left, right), edges_out, edges_in in ts.edge_diffs():
for edge in edges_out:
parent[edge.child] = -1
update_counts(edge, -1)
for edge in edges_in:
parent[edge.child] = edge.parent
update_counts(edge, +1)
# Finally, add the stats for the last tree and divide by the total
# length that each node was an ancestor to > 0 samples.
for v in range(ts.num_nodes):
if ref_count[v, K] > 0:
length = ts.sequence_length - last_update[v]
total_length[v] += length
C[v] += length * ref_count[v, :K]
if total_length[v] != 0:
C[v] /= total_length[v]
return C
def genealogical_nearest_neighbours(ts, focal, reference_sets):
reference_set_map = np.zeros(ts.num_nodes, dtype=int) - 1
for k, reference_set in enumerate(reference_sets):
for u in reference_set:
if reference_set_map[u] != -1:
raise ValueError("Duplicate value in reference sets")
reference_set_map[u] = k
K = len(reference_sets)
A = np.zeros((len(focal), K))
L = np.zeros(len(focal))
parent = np.zeros(ts.num_nodes, dtype=int) - 1
sample_count = np.zeros((ts.num_nodes, K), dtype=int)
# Set the intitial conditions.
for j in range(K):
sample_count[reference_sets[j], j] = 1
for (left, right), edges_out, edges_in in ts.edge_diffs():
for edge in edges_out:
parent[edge.child] = -1
v = edge.parent
while v != -1:
|
[
" sample_count[v] -= sample_count[edge.child]"
] | 2,596
|
lcc
|
python
| null |
25409067cd0b9bd6ffc59d2ab30579edc2ec8b0a47154def
|
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2019 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Valerio Cosentino <valcos@bitergia.com>
#
import configparser
import json
import os
import requests
import sys
import unittest
from datetime import datetime
from elasticsearch import Elasticsearch
if '..' not in sys.path:
sys.path.insert(0, '..')
from grimoire_elk.elk import load_identities
from grimoire_elk.utils import get_connectors, get_elastic
from tests.model import ESMapping
CONFIG_FILE = 'tests.conf'
DB_SORTINGHAT = "test_sh"
DB_PROJECTS = "test_projects"
FILE_PROJECTS = "data/projects-release.json"
SCHEMA_DIR = '../schema/'
def load_mapping(enrich_index, csv_name):
cvs_path = os.path.join(SCHEMA_DIR, csv_name + '.csv')
cvs_mapping = ESMapping.from_csv(enrich_index, cvs_path)
return cvs_mapping
def data2es(items, ocean):
def ocean_item(item):
# Hack until we decide the final id to use
if 'uuid' in item:
item['ocean-unique-id'] = item['uuid']
else:
# twitter comes from logstash and uses id
item['uuid'] = item['id']
item['ocean-unique-id'] = item['id']
# Hack until we decide when to drop this field
if 'updated_on' in item:
updated = datetime.fromtimestamp(item['updated_on'])
item['metadata__updated_on'] = updated.isoformat()
if 'timestamp' in item:
ts = datetime.fromtimestamp(item['timestamp'])
item['metadata__timestamp'] = ts.isoformat()
# the _fix_item does not apply to the test data for Twitter
try:
ocean._fix_item(item)
except KeyError:
pass
return item
items_pack = [] # to feed item in packs
for item in items:
item = ocean_item(item)
if len(items_pack) >= ocean.elastic.max_items_bulk:
ocean._items_to_es(items_pack)
items_pack = []
items_pack.append(item)
inserted = ocean._items_to_es(items_pack)
return inserted
def refresh_identities(enrich_backend):
total = 0
for eitem in enrich_backend.fetch():
roles = None
try:
roles = enrich_backend.roles
except AttributeError:
pass
new_identities = enrich_backend.get_item_sh_from_id(eitem, roles)
eitem.update(new_identities)
total += 1
return total
def refresh_projects(enrich_backend):
total = 0
for eitem in enrich_backend.fetch():
new_project = enrich_backend.get_item_project(eitem)
eitem.update(new_project)
total += 1
return total
class TestBaseBackend(unittest.TestCase):
"""Functional tests for GrimoireELK Backends"""
@classmethod
def setUpClass(cls):
cls.config = configparser.ConfigParser()
cls.config.read(CONFIG_FILE)
cls.es_con = dict(cls.config.items('ElasticSearch'))['url']
cls.connectors = get_connectors()
cls.maxDiff = None
# Sorting hat settings
cls.db_user = ''
cls.db_password = ''
if 'Database' in cls.config:
if 'user' in cls.config['Database']:
cls.db_user = cls.config['Database']['user']
if 'password' in cls.config['Database']:
cls.db_password = cls.config['Database']['password']
def setUp(self):
with open(os.path.join("data", self.connector + ".json")) as f:
self.items = json.load(f)
self.ocean_backend = None
self.enrich_backend = None
self.ocean_aliases = []
self.enrich_aliases = []
def tearDown(self):
delete_raw = self.es_con + "/" + self.ocean_index
requests.delete(delete_raw, verify=False)
delete_enrich = self.es_con + "/" + self.enrich_index
requests.delete(delete_enrich, verify=False)
def _test_items_to_raw(self):
"""Test whether fetched items are properly loaded to ES"""
clean = True
perceval_backend = None
self.ocean_backend = self.connectors[self.connector][1](perceval_backend)
elastic_ocean = get_elastic(self.es_con, self.ocean_index, clean, self.ocean_backend, self.ocean_aliases)
self.ocean_backend.set_elastic(elastic_ocean)
raw_items = data2es(self.items, self.ocean_backend)
return {'items': len(self.items), 'raw': raw_items}
def _test_raw_to_enrich(self, sortinghat=False, projects=False):
"""Test whether raw indexes are properly enriched"""
# populate raw index
perceval_backend = None
clean = True
self.ocean_backend = self.connectors[self.connector][1](perceval_backend)
elastic_ocean = get_elastic(self.es_con, self.ocean_index, clean, self.ocean_backend)
self.ocean_backend.set_elastic(elastic_ocean)
data2es(self.items, self.ocean_backend)
# populate enriched index
if not sortinghat and not projects:
self.enrich_backend = self.connectors[self.connector][2]()
elif sortinghat and not projects:
self.enrich_backend = self.connectors[self.connector][2](db_sortinghat=DB_SORTINGHAT,
db_user=self.db_user,
db_password=self.db_password)
elif not sortinghat and projects:
self.enrich_backend = self.connectors[self.connector][2](json_projects_map=FILE_PROJECTS,
db_user=self.db_user,
db_password=self.db_password)
elastic_enrich = get_elastic(self.es_con, self.enrich_index, clean, self.enrich_backend, self.enrich_aliases)
self.enrich_backend.set_elastic(elastic_enrich)
# Load SH identities
if sortinghat:
load_identities(self.ocean_backend, self.enrich_backend)
raw_count = len([item for item in self.ocean_backend.fetch()])
enrich_count = self.enrich_backend.enrich_items(self.ocean_backend)
# self._test_csv_mappings(sortinghat)
return {'raw': raw_count, 'enrich': enrich_count}
def _test_csv_mappings(self, sortinghat):
"""Test whether the mappings in the CSV are successfully met"""
result = {}
if not sortinghat:
return result
csv_mapping = load_mapping(self.enrich_index, self.connector)
client = Elasticsearch(self.es_con, timeout=30)
mapping_json = client.indices.get_mapping(index=self.enrich_index)
|
[
" es_mapping = ESMapping.from_json(index_name=self.enrich_index,"
] | 630
|
lcc
|
python
| null |
40994fe53243d5f8fce4983fc820f09dbd61a083c8a53c8f
|
|
#region Copyright & License Information
/*
* Copyright 2007-2017 The OpenRA Developers (see AUTHORS)
* This file is part of OpenRA, which is free software. It is made
* available to you under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version. For more
* information, see COPYING.
*/
#endregion
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Threading.Tasks;
using OpenRA.Chat;
using OpenRA.Graphics;
using OpenRA.Mods.Common.Traits;
using OpenRA.Network;
using OpenRA.Traits;
using OpenRA.Widgets;
namespace OpenRA.Mods.Common.Widgets.Logic
{
public class LobbyLogic : ChromeLogic
{
static readonly Action DoNothing = () => { };
public MapPreview Map { get; private set; }
readonly ModData modData;
readonly Action onStart;
readonly Action onExit;
readonly OrderManager orderManager;
readonly bool skirmishMode;
readonly Ruleset modRules;
readonly World shellmapWorld;
readonly WebServices services;
enum PanelType { Players, Options, Music, Kick, ForceStart }
PanelType panel = PanelType.Players;
enum ChatPanelType { Lobby, Global }
ChatPanelType chatPanel = ChatPanelType.Lobby;
readonly Widget lobby;
readonly Widget editablePlayerTemplate;
readonly Widget nonEditablePlayerTemplate;
readonly Widget emptySlotTemplate;
readonly Widget editableSpectatorTemplate;
readonly Widget nonEditableSpectatorTemplate;
readonly Widget newSpectatorTemplate;
readonly ScrollPanelWidget lobbyChatPanel;
readonly Widget chatTemplate;
readonly ScrollPanelWidget players;
readonly Dictionary<string, LobbyFaction> factions = new Dictionary<string, LobbyFaction>();
readonly ColorPreviewManagerWidget colorPreview;
readonly TabCompletionLogic tabCompletion = new TabCompletionLogic();
readonly LabelWidget chatLabel;
bool teamChat;
bool addBotOnMapLoad;
int lobbyChatUnreadMessages;
int globalChatLastReadMessages;
int globalChatUnreadMessages;
// Listen for connection failures
void ConnectionStateChanged(OrderManager om)
{
if (om.Connection.ConnectionState == ConnectionState.NotConnected)
{
// Show connection failed dialog
Ui.CloseWindow();
Action onConnect = () =>
{
Game.OpenWindow("SERVER_LOBBY", new WidgetArgs()
{
{ "onExit", onExit },
{ "onStart", onStart },
{ "skirmishMode", false }
});
};
Action<string> onRetry = password => ConnectionLogic.Connect(om.Host, om.Port, password, onConnect, onExit);
var switchPanel = om.ServerExternalMod != null ? "CONNECTION_SWITCHMOD_PANEL" : "CONNECTIONFAILED_PANEL";
Ui.OpenWindow(switchPanel, new WidgetArgs()
{
{ "orderManager", om },
{ "onAbort", onExit },
{ "onRetry", onRetry }
});
}
}
[ObjectCreator.UseCtor]
internal LobbyLogic(Widget widget, ModData modData, WorldRenderer worldRenderer, OrderManager orderManager,
Action onExit, Action onStart, bool skirmishMode)
{
Map = MapCache.UnknownMap;
lobby = widget;
this.modData = modData;
this.orderManager = orderManager;
this.onStart = onStart;
this.onExit = onExit;
this.skirmishMode = skirmishMode;
// TODO: This needs to be reworked to support per-map tech levels, bots, etc.
this.modRules = modData.DefaultRules;
shellmapWorld = worldRenderer.World;
services = modData.Manifest.Get<WebServices>();
orderManager.AddChatLine += AddChatLine;
Game.LobbyInfoChanged += UpdateCurrentMap;
Game.LobbyInfoChanged += UpdatePlayerList;
Game.BeforeGameStart += OnGameStart;
Game.ConnectionStateChanged += ConnectionStateChanged;
var name = lobby.GetOrNull<LabelWidget>("SERVER_NAME");
if (name != null)
name.GetText = () => orderManager.LobbyInfo.GlobalSettings.ServerName;
Ui.LoadWidget("LOBBY_MAP_PREVIEW", lobby.Get("MAP_PREVIEW_ROOT"), new WidgetArgs
{
{ "orderManager", orderManager },
{ "lobby", this }
});
UpdateCurrentMap();
var playerBin = Ui.LoadWidget("LOBBY_PLAYER_BIN", lobby.Get("TOP_PANELS_ROOT"), new WidgetArgs());
playerBin.IsVisible = () => panel == PanelType.Players;
players = playerBin.Get<ScrollPanelWidget>("LOBBY_PLAYERS");
editablePlayerTemplate = players.Get("TEMPLATE_EDITABLE_PLAYER");
nonEditablePlayerTemplate = players.Get("TEMPLATE_NONEDITABLE_PLAYER");
emptySlotTemplate = players.Get("TEMPLATE_EMPTY");
editableSpectatorTemplate = players.Get("TEMPLATE_EDITABLE_SPECTATOR");
nonEditableSpectatorTemplate = players.Get("TEMPLATE_NONEDITABLE_SPECTATOR");
newSpectatorTemplate = players.Get("TEMPLATE_NEW_SPECTATOR");
colorPreview = lobby.Get<ColorPreviewManagerWidget>("COLOR_MANAGER");
colorPreview.Color = Game.Settings.Player.Color;
foreach (var f in modRules.Actors["world"].TraitInfos<FactionInfo>())
factions.Add(f.InternalName, new LobbyFaction { Selectable = f.Selectable, Name = f.Name, Side = f.Side, Description = f.Description });
var gameStarting = false;
Func<bool> configurationDisabled = () => !Game.IsHost || gameStarting ||
panel == PanelType.Kick || panel == PanelType.ForceStart ||
!Map.RulesLoaded || Map.InvalidCustomRules ||
orderManager.LocalClient == null || orderManager.LocalClient.IsReady;
var mapButton = lobby.GetOrNull<ButtonWidget>("CHANGEMAP_BUTTON");
if (mapButton != null)
{
mapButton.IsDisabled = () => gameStarting || panel == PanelType.Kick || panel == PanelType.ForceStart ||
orderManager.LocalClient == null || orderManager.LocalClient.IsReady;
mapButton.OnClick = () =>
{
var onSelect = new Action<string>(uid =>
{
// Don't select the same map again
if (uid == Map.Uid)
return;
orderManager.IssueOrder(Order.Command("map " + uid));
Game.Settings.Server.Map = uid;
Game.Settings.Save();
});
Ui.OpenWindow("MAPCHOOSER_PANEL", new WidgetArgs()
{
{ "initialMap", Map.Uid },
{ "initialTab", MapClassification.System },
{ "onExit", DoNothing },
{ "onSelect", Game.IsHost ? onSelect : null },
{ "filter", MapVisibility.Lobby },
});
};
}
var slotsButton = lobby.GetOrNull<DropDownButtonWidget>("SLOTS_DROPDOWNBUTTON");
if (slotsButton != null)
{
slotsButton.IsDisabled = () => configurationDisabled() || panel != PanelType.Players ||
(orderManager.LobbyInfo.Slots.Values.All(s => !s.AllowBots) &&
orderManager.LobbyInfo.Slots.Count(s => !s.Value.LockTeam && orderManager.LobbyInfo.ClientInSlot(s.Key) != null) == 0);
slotsButton.OnMouseDown = _ =>
{
var botNames = Map.Rules.Actors["player"].TraitInfos<IBotInfo>().Select(t => t.Name);
var options = new Dictionary<string, IEnumerable<DropDownOption>>();
var botController = orderManager.LobbyInfo.Clients.FirstOrDefault(c => c.IsAdmin);
if (orderManager.LobbyInfo.Slots.Values.Any(s => s.AllowBots))
{
var botOptions = new List<DropDownOption>()
{
new DropDownOption()
{
Title = "Add",
IsSelected = () => false,
OnClick = () =>
{
foreach (var slot in orderManager.LobbyInfo.Slots)
{
|
[
"\t\t\t\t\t\t\t\t\t\tvar bot = botNames.Random(Game.CosmeticRandom);"
] | 690
|
lcc
|
csharp
| null |
944fd1f9c7ec90cdc70498975f2faf06111d7dfe2ae05e81
|
|
// This code is derived from jcifs smb client library <jcifs at samba dot org>
// Ported by J. Arturo <webmaster at komodosoft dot net>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
using System;
using System.IO;
using WinrtCifs.Util;
using WinrtCifs.Util.Sharpen;
namespace WinrtCifs.Smb
{
/// <summary>
/// There are hundreds of error codes that may be returned by a CIFS
/// server.
/// </summary>
/// <remarks>
/// There are hundreds of error codes that may be returned by a CIFS
/// server. Rather than represent each with it's own <code>Exception</code>
/// class, this class represents all of them. For many of the popular
/// error codes, constants and text messages like "The device is not ready"
/// are provided.
/// <p>
/// The jCIFS client maps DOS error codes to NTSTATUS codes. This means that
/// the user may recieve a different error from a legacy server than that of
/// a newer varient such as Windows NT and above. If you should encounter
/// such a case, please report it to jcifs at samba dot org and we will
/// change the mapping.
/// </remarks>
public class SmbException : IOException
{
internal static string GetMessageByCode(int errcode)
{
if (errcode == 0)
{
return "NT_STATUS_SUCCESS";
}
if ((errcode & unchecked((int)(0xC0000000))) == unchecked((int)(0xC0000000)))
{
int min = 1;
int max = NtStatus.NtStatusCodes.Length - 1;
while (max >= min)
{
int mid = (min + max) / 2;
if (errcode > NtStatus.NtStatusCodes[mid])
{
min = mid + 1;
}
else
{
if (errcode < NtStatus.NtStatusCodes[mid])
{
max = mid - 1;
}
else
{
return NtStatus.NtStatusMessages[mid];
}
}
}
}
else
{
int min = 0;
int max = DosError.DosErrorCodes.Length - 1;
while (max >= min)
{
int mid = (min + max) / 2;
if (errcode > DosError.DosErrorCodes[mid][0])
{
min = mid + 1;
}
else
{
if (errcode < DosError.DosErrorCodes[mid][0])
{
max = mid - 1;
}
else
{
return DosError.DosErrorMessages[mid];
}
}
}
}
return "0x" + Hexdump.ToHexString(errcode, 8);
}
internal static int GetStatusByCode(int errcode)
{
if ((errcode & unchecked((int)(0xC0000000))) != 0)
{
return errcode;
}
int min = 0;
int max = DosError.DosErrorCodes.Length - 1;
while (max >= min)
{
int mid = (min + max) / 2;
if (errcode > DosError.DosErrorCodes[mid][0])
{
min = mid + 1;
}
else
{
if (errcode < DosError.DosErrorCodes[mid][0])
{
max = mid - 1;
}
else
{
return DosError.DosErrorCodes[mid][1];
}
}
}
return NtStatus.NtStatusUnsuccessful;
}
internal static string GetMessageByWinerrCode(int errcode)
{
int min = 0;
int max = WinError.WinerrCodes.Length - 1;
while (max >= min)
{
int mid = (min + max) / 2;
if (errcode > WinError.WinerrCodes[mid])
{
min = mid + 1;
}
else
{
if (errcode < WinError.WinerrCodes[mid])
{
max = mid - 1;
}
else
{
return WinError.WinerrMessages[mid];
}
}
}
return errcode + string.Empty;
}
private int _status;
private Exception _rootCause;
public SmbException()
{
}
internal SmbException(int errcode, Exception rootCause) : base(GetMessageByCode(errcode
))
{
_status = GetStatusByCode(errcode);
this._rootCause = rootCause;
}
public SmbException(string msg) : base(msg)
{
_status = NtStatus.NtStatusUnsuccessful;
}
public SmbException(string msg, Exception rootCause) : base(msg)
{
this._rootCause = rootCause;
_status = NtStatus.NtStatusUnsuccessful;
}
public SmbException(int errcode, bool winerr) : base(winerr ? GetMessageByWinerrCode
(errcode) : GetMessageByCode(errcode))
{
_status = winerr ? errcode : GetStatusByCode(errcode);
}
public virtual int GetNtStatus()
{
return _status;
}
public virtual Exception GetRootCause()
{
return _rootCause;
}
public override string ToString()
{
if (_rootCause != null)
{
Runtime.PrintStackTrace(_rootCause, LogStream.GetInstance());
|
[
"\t\t\t\treturn base.ToString() + \"\\n\" + _rootCause;"
] | 697
|
lcc
|
csharp
| null |
363a7940d934ec4a64df64a51b70cb62268340fa89d647f7
|
|
"""
"""
from enum import IntEnum, Enum
from .exceptions import ProtocolError, FrameSizeError, FlowControlError
import struct
MAX_FRAME_SIZE = (2 ** 14) - 1
MAX_WINDOW_UPDATE = (2 ** 31) - 1
DEFAULT_PRIORITY = (2 ** 30)
class ConnectionSetting(Enum):
HEADER_TABLE_SIZE = 0x01
ENABLE_PUSH = 0x02
MAX_CONCURRENT_STREAMS = 0x03
INITIAL_WINDOW_SIZE = 0x04
class FrameType(Enum):
DATA = 0x00
HEADERS = 0x1
PRIORITY = 0x2
RST_STREAM = 0x3
SETTINGS = 0x4
PUSH_PROMISE = 0x5
PING = 0x6
GO_AWAY = 0x7
WINDOW_UPDATE = 0x8
CONTINUATION = 0x9
class ErrorCode(Enum):
NO_ERROR = 0x0
PROTOCOL_ERROR = 0x01
INTERNAL_ERROR = 0x02
FLOW_CONTROL_ERROR = 0x04
SETTINGS_TIMEOUT = 0x08
STREAM_CLOSED = 0x10
FRAME_SIZE_ERROR = 0x20
REFUSED_STREAM = 0x40
CANCEL = 0x80
COMPRESSION_ERROR = 0x100
CONNECT_ERROR = 0x200
ENHANCE_YOUR_CALM = 0x400
INADEQUATE_SECURITY = 0x800
# TODO(roasbeef): Think of better name? And/or better way to handle the
# redundancy.
class SpecialFrameFlag(Enum):
ACK = 0x1
END_PUSH_PROMISE = 0x4
class FrameFlag(Enum):
END_STREAM = 0x1
END_SEGMENT = 0x2
END_HEADERS = 0x4
PRIORITY = 0x8
PAD_LOW = 0x10
PAD_HIGH = 0x20
@staticmethod
def create_flag_set(*flag_names):
return {FrameFlag[flag_name] for flag_name in flag_names}
class FrameHeader(object):
"""
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| R | Length (14) | Type (8) | Flags (8) |
+-+-+-----------+---------------+-------------------------------+
|R| Stream Identifier (31) |
+-+-------------------------------------------------------------+
| Frame Payload (0...) ...
+---------------------------------------------------------------+
"""
def __init__(self, length, frame_type, flags, stream_id):
self.length = length
self.frame_type = frame_type
self.raw_flag_bits = flags
self.stream_id = stream_id
def __len__(self):
""" Return the length of the header's payload, in bytes. """
return self.length
def __repr__(self):
return '<FrameHeader length:{}, frame_type:{}, flags:{}, stream_id:{}>'.format(
self.length,
FRAME_TYPE_TO_FRAME[self.frame_type].__name__,
'<{}>'.format(','.join(str(flag_type.name) for flag_type in FrameFlag if self.flags & flag_type.value)),
self.stream_id
)
@classmethod
def from_raw_bytes(cls, frame_bytes):
header_fields = struct.unpack('!HBBL', frame_bytes)
# Knock off the first 2 bits, they are reserved, and currently unused.
payload_length = header_fields[0] & 0x3FFF
frame_type = header_fields[1]
raw_flags = header_fields[2]
stream_id = header_fields[3]
return cls(payload_length, FrameType(frame_type), raw_flags, stream_id)
@classmethod
def from_frame(cls, frame):
raw_flags = 0
for flag_type in frame.flags:
raw_flags |= flag_type.value
return cls(len(frame), frame.frame_type, raw_flags, frame.stream_id)
def serialize(self):
return struct.pack(
'!HBBL',
self.length & 0x3FFF, # Knock off first two bits.
self.frame_type.value,
self.raw_flag_bits,
self.stream_id & 0x7FFFFFFF # Make sure it's 31 bits.
)
class Frame(object):
frame_type = None
defined_flags = set()
def __init__(self, stream_id, flags=None, length=0):
self.stream_id = stream_id
self.flags = flags if flags is not None else set()
self.length = length
def __len__(self):
# TODO(roasbeef): Delete this method?
return self.length
def __repr__(self):
return '<{}| length: {}, flags: {}, stream_id: {}, data: {}>'.format(
FRAME_TYPE_TO_FRAME[self.frame_type].__name__,
len(self),
'<{}>'.format(','.join(str(flag_type.name) for flag_type in self.defined_flags if flag_type in self.flags)),
self.stream_id,
(self.data if isinstance(self, DataFrame) else b''),
)
@staticmethod
def from_frame_header(frame_header):
frame_klass = FRAME_TYPE_TO_FRAME[frame_header.frame_type]
parsed_frame = frame_klass(frame_header.stream_id)
parsed_frame.parse_flags(frame_header.raw_flag_bits)
return parsed_frame
def parse_flags(self, flag_byte):
for flag_type in self.defined_flags:
if flag_byte & flag_type.value:
self.flags.add(flag_type)
def deserialize(self, frame_payload):
raise NotImplementedError
def serialize(self):
raise NotImplementedError
class DataFrame(Frame):
"""
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| [Pad High(8)] | [Pad Low (8)] | Data (*) .
+---------------+---------------+-------------------------------+
. Data (*) ...
+---------------------------------------------------------------+
| Padding (*) ...
+---------------------------------------------------------------+
"""
frame_type = FrameType.DATA
defined_flags = FrameFlag.create_flag_set('END_STREAM', 'END_SEGMENT',
'PAD_LOW', 'PAD_HIGH')
def __init__(self, stream_id, **kwargs):
if stream_id == 0:
raise ProtocolError()
super().__init__(stream_id, **kwargs)
self.data = b''
self.pad_high = None
self.pad_low = None
self.total_padding = 0
def __len__(self):
return 2 + len(self.data) + self.total_padding
def deserialize(self, frame_payload):
self.pad_high = frame_payload[0] if FrameFlag.PAD_HIGH in self.flags else 0
self.pad_low = frame_payload[1] if FrameFlag.PAD_LOW in self.flags else 0
self.total_padding = (self.pad_high * 256) + self.pad_low
if self.total_padding > len(frame_payload[2:]):
raise ProtocolError()
# TODO(roasbeef): Enforce max frame size, tests and such.
self.data = frame_payload[2:len(frame_payload) - self.total_padding]
def serialize(self, pad_low=0, pad_high=0):
frame_header = FrameHeader.from_frame(self).serialize()
padding_bytes = ((pad_high * 256) + pad_low) * struct.pack('!x')
|
[
" pad_low_and_high = struct.pack('!BB', pad_high, pad_low)"
] | 652
|
lcc
|
python
| null |
0a774cef85a83037bcd6fbf761f83707f105e2ee3118ef09
|
|
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Msagl.Core.DataStructures;
using Microsoft.Msagl.Core.Geometry;
using Microsoft.Msagl.Core.Geometry.Curves;
using Microsoft.Msagl.Layout.LargeGraphLayout;
#if TEST_MSAGL
using Microsoft.Msagl.DebugHelpers;
using System.Diagnostics;
#endif
namespace Microsoft.Msagl.Core.Layout {
/// <summary>
/// This class keeps the graph nodes, edges, and clusters, together with their geometries
/// </summary>
#if TEST_MSAGL
[Serializable]
#endif
public class GeometryGraph : GeometryObject {
IList<Node> nodes;
EdgeCollection edges;
#if TEST_MSAGL
[NonSerialized]
#endif
Cluster rootCluster;
/// <summary>
/// Creates a new GeometryGraph.
/// </summary>
public GeometryGraph()
{
this.nodes = new NodeCollection(this);
this.edges = new EdgeCollection(this);
this.rootCluster = new Cluster();
}
/// <summary>
/// The root cluster for this graph. Will never be null.
/// </summary>
public Cluster RootCluster
{
get
{
return this.rootCluster;
}
set
{
ValidateArg.IsNotNull(value, "value");
this.rootCluster = value;
}
}
internal Rectangle boundingBox;
/// <summary>
/// Bounding box of the graph
/// </summary>
public override Rectangle BoundingBox {
get { return boundingBox; }
set { boundingBox = value; }
}
double margins;
#if DEBUG && TEST_MSAGL
/// <summary>
/// curves to show debug stuff
/// </summary>
public DebugCurve[] DebugCurves;
#endif
/// <summary>
/// margins width are equal from the left and from the right; they are given in percents
/// </summary>
public double Margins
{
get { return margins; }
set { margins = value; }
}
/// <summary>
/// Width of the graph
/// </summary>
public double Width {
get { return BoundingBox.RightBottom.X - BoundingBox.LeftTop.X; }
}
/// <summary>
/// Height of the graph
/// </summary>
public double Height {
get { return BoundingBox.Height; }
}
/// <summary>
/// Left bound of the graph
/// </summary>
public double Left {
get { return BoundingBox.Left; }
}
/// <summary>
/// Right bound of the graph
/// </summary>
public double Right {
get { return BoundingBox.Right; }
}
/// <summary>
/// Left bottom corner of the graph
/// </summary>
internal Point LeftBottom {
get { return new Point(BoundingBox.Left, BoundingBox.Bottom); }
}
/// <summary>
/// Right top corner of the graph
/// </summary>
internal Point RightTop {
get { return new Point(Right, Top); }
}
/// <summary>
/// Bottom bound of the graph
/// </summary>
public double Bottom {
get { return BoundingBox.Bottom; }
}
/// <summary>
/// Top bound of the graph
/// </summary>
public double Top {
get { return BoundingBox.Bottom + BoundingBox.Height; }
}
/// <summary>
/// The nodes in the graph.
/// </summary>
public IList<Node> Nodes {
get { return nodes; }
set { nodes = value; }
}
/// <summary>
/// Edges of the graph
/// </summary>
public EdgeCollection Edges {
get { return edges; }
set { edges =value; }
}
/// <summary>
/// Returns a collection of all the labels in the graph.
/// </summary>
/// <returns></returns>
public ICollection<Label> CollectAllLabels()
{
return Edges.SelectMany(e => e.Labels).ToList();
}
/// <summary>
/// transforms the graph by the given matrix
/// </summary>
/// <param name="matrix">the matrix</param>
public void Transform(PlaneTransformation matrix) {
foreach (var node in Nodes)
node.Transform(matrix);
foreach (var edge in Edges)
edge.Transform(matrix);
#if DEBUG && TEST_MSAGL
if (DebugCurves != null)
foreach (var dc in DebugCurves)
dc.Curve = dc.Curve.Transform(matrix);
#endif
UpdateBoundingBox();
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public Rectangle PumpTheBoxToTheGraphWithMargins() {
var b = Rectangle.CreateAnEmptyBox();
PumpTheBoxToTheGraph(ref b);
var del=new Point(Margins, -Margins);
b.RightBottom += del;
b.LeftTop -= del;
b.Width = Math.Max(b.Width, MinimalWidth);
b.Height = Math.Max(b.Height, MinimalHeight);
return b;
}
///<summary>
///the minimal width of the graph
///</summary>
public double MinimalWidth { get; set; }
///<summary>
///the minimal height of the graph
///</summary>
public double MinimalHeight { get; set; }
/// <summary>
/// enlarge the rectangle to contain the graph
/// </summary>
/// <param name="b"></param>
void PumpTheBoxToTheGraph(ref Rectangle b) {
foreach (Edge e in Edges) {
if (e.UnderCollapsedCluster()) continue;
if (e.Curve != null) {
#if SHARPKIT //https://code.google.com/p/sharpkit/issues/detail?id=369 there are no structs in js
var cb = e.Curve.BoundingBox.Clone();
#else
var cb = e.Curve.BoundingBox;
#endif
cb.Pad(e.LineWidth);
b.Add(cb);
}
foreach (var l in e.Labels.Where(lbl => lbl != null))
b.Add(l.BoundingBox);
}
foreach (Node n in Nodes) {
if (n.UnderCollapsedCluster()) continue;
b.Add(n.BoundingBox);
}
foreach (var c in RootCluster.Clusters) {
if (c.BoundaryCurve == null) {
if (c.RectangularBoundary != null)
c.BoundaryCurve = c.RectangularBoundary.RectangularHull();
}
if (c.BoundaryCurve != null)
b.Add(c.BoundaryCurve.BoundingBox);
}
#if DEBUG && TEST_MSAGL
if(DebugCurves!=null)
foreach (var debugCurve in DebugCurves.Where(d => d.Curve != null))
b.Add(debugCurve.Curve.BoundingBox);
#endif
}
/// <summary>
/// Translates the graph by delta.
/// Assumes bounding box is already up to date.
/// </summary>
public void Translate(Point delta)
{
var nodeSet = new Set<Node>(Nodes);
foreach (var v in Nodes)
v.Center += delta;
foreach (var cluster in RootCluster.AllClustersDepthFirstExcludingSelf()) {
foreach (var node in cluster.Nodes.Where(n => !nodeSet.Contains(n)))
node.Center += delta;
cluster.Center += delta;
cluster.RectangularBoundary.TranslateRectangle(delta);
}
foreach (var e in edges)
e.Translate(delta);
BoundingBox = new Rectangle(BoundingBox.Left + delta.X, BoundingBox.Bottom + delta.Y, new Point(BoundingBox.Width, BoundingBox.Height));
}
/// <summary>
/// Updates the bounding box to fit the contents.
/// </summary>
public void UpdateBoundingBox() {
this.BoundingBox = PumpTheBoxToTheGraphWithMargins();
}
/// <summary>
/// Flatten the list of nodes and clusters
/// </summary>
/// <returns></returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
public IEnumerable<Node> GetFlattenedNodesAndClusters()
{
foreach (Node v in Nodes)
{
yield return v;
}
foreach(Cluster cluster in this.RootCluster.AllClustersDepthFirst())
{
if (cluster != this.RootCluster)
{
yield return cluster;
}
}
}
/// <summary>
/// Finds the first node with the corresponding user data.
/// </summary>
/// <returns>The first node with the given user data. Null if no such node exists.</returns>
public Node FindNodeByUserData(object userData)
{
return this.Nodes.FirstOrDefault(n => n.UserData.Equals(userData));
}
#if TEST_MSAGL
///<summary>
///</summary>
public void SetDebugIds()
{
int id = 0;
foreach (var node in RootCluster.AllClustersDepthFirst())
node.DebugId = id++;
foreach (var node in Nodes)
if (node.DebugId == null)
node.DebugId = id++;
}
internal void CheckClusterConsistency() {
foreach (var cluster in RootCluster.AllClustersDepthFirst())
CheckClusterConsistency(cluster);
}
static void CheckClusterConsistency(Cluster cluster) {
if (cluster.BoundaryCurve == null)
return;
|
[
" foreach (var child in cluster.Clusters.Concat(cluster.Nodes)) {"
] | 936
|
lcc
|
csharp
| null |
adeed84f45c576f4e84e46a705dd8d968d64d0646a6ef56a
|
|
/*
Bullet Continuous Collision Detection and Physics Library
Copyright (c) 2003-2008 Erwin Coumans http://bulletphysics.com
This software is provided 'as-is', without any express or implied warranty.
In no event will the authors be held liable for any damages arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it freely,
subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
#include <stdio.h>
#include "LinearMath/btIDebugDraw.h"
#include "BulletCollision/CollisionDispatch/btGhostObject.h"
#include "BulletCollision/CollisionShapes/btMultiSphereShape.h"
#include "BulletCollision/BroadphaseCollision/btOverlappingPairCache.h"
#include "BulletCollision/BroadphaseCollision/btCollisionAlgorithm.h"
#include "BulletCollision/CollisionDispatch/btCollisionWorld.h"
#include "LinearMath/btDefaultMotionState.h"
#include "btKinematicCharacterController.h"
// static helper method
static btVector3
getNormalizedVector(ref btVector3 v)
{
btVector3 n(0, 0, 0);
if (v.length() > SIMD_EPSILON) {
n = v.normalized();
}
return n;
}
///@todo Interact with dynamic objects,
///Ride kinematicly animated platforms properly
///More realistic (or maybe just a config option) falling
/// . Should integrate falling velocity manually and use that in stepDown()
///Support jumping
///Support ducking
class btKinematicClosestNotMeRayResultCallback : btCollisionWorld::ClosestRayResultCallback
{
public:
btKinematicClosestNotMeRayResultCallback (btCollisionObject me) : btCollisionWorld::ClosestRayResultCallback(btVector3(0.0, 0.0, 0.0), btVector3(0.0, 0.0, 0.0))
{
m_me = me;
}
virtual double addSingleResult(btCollisionWorld::LocalRayResult& rayResult,bool normalInWorldSpace)
{
if (rayResult.m_collisionObject == m_me)
return 1.0;
return ClosestRayResultCallback::addSingleResult (rayResult, normalInWorldSpace);
}
protected:
btCollisionObject m_me;
};
class btKinematicClosestNotMeConvexResultCallback : btCollisionWorld::ClosestConvexResultCallback
{
public:
btKinematicClosestNotMeConvexResultCallback (btCollisionObject me, ref btVector3 up, double minSlopeDot)
: btCollisionWorld::ClosestConvexResultCallback(btVector3(0.0, 0.0, 0.0), btVector3(0.0, 0.0, 0.0))
, m_me(me)
, m_up(up)
, m_minSlopeDot(minSlopeDot)
{
}
virtual double addSingleResult(btCollisionWorld::LocalConvexResult& convexResult,bool normalInWorldSpace)
{
if (convexResult.m_hitCollisionObject == m_me)
return (double)(1.0);
if (!convexResult.m_hitCollisionObject.hasContactResponse())
return (double)(1.0);
btVector3 hitNormalWorld;
if (normalInWorldSpace)
{
hitNormalWorld = convexResult.m_hitNormalLocal;
} else
{
///need to transform normal into worldspace
hitNormalWorld = convexResult.m_hitCollisionObject.getWorldTransform().getBasis()*convexResult.m_hitNormalLocal;
}
double dotUp = m_up.dot(hitNormalWorld);
if (dotUp < m_minSlopeDot) {
return (double)(1.0);
}
return ClosestConvexResultCallback::addSingleResult (convexResult, normalInWorldSpace);
}
protected:
btCollisionObject m_me;
btVector3 m_up;
double m_minSlopeDot;
};
/*
* Returns the reflection direction of a ray going 'direction' hitting a surface with normal 'normal'
*
* from: http://www-cs-students.stanford.edu/~adityagp/final/node3.html
*/
btVector3 btKinematicCharacterController::computeReflectionDirection (ref btVector3 direction, ref btVector3 normal)
{
return direction - ((double)(2.0) * direction.dot(normal)) * normal;
}
/*
* Returns the portion of 'direction' that is parallel to 'normal'
*/
btVector3 btKinematicCharacterController::parallelComponent (ref btVector3 direction, ref btVector3 normal)
{
double magnitude = direction.dot(normal);
return normal * magnitude;
}
/*
* Returns the portion of 'direction' that is perpindicular to 'normal'
*/
btVector3 btKinematicCharacterController::perpindicularComponent (ref btVector3 direction, ref btVector3 normal)
{
return direction - parallelComponent(direction, normal);
}
btKinematicCharacterController::btKinematicCharacterController (btPairCachingGhostObject* ghostObject,btConvexShape* convexShape,double stepHeight, int upAxis)
{
m_upAxis = upAxis;
m_addedMargin = 0.02;
m_walkDirection.setValue(0,0,0);
m_useGhostObjectSweepTest = true;
m_ghostObject = ghostObject;
m_stepHeight = stepHeight;
m_turnAngle = (double)(0.0);
m_convexShape=convexShape;
m_useWalkDirection = true; // use walk direction by default, legacy behavior
m_velocityTimeInterval = 0.0;
m_verticalVelocity = 0.0;
m_verticalOffset = 0.0;
m_gravity = 9.8 * 3 ; // 3G acceleration.
m_fallSpeed = 55.0; // Terminal velocity of a sky diver in m/s.
m_jumpSpeed = 10.0; // ?
m_wasOnGround = false;
m_wasJumping = false;
m_interpolateUp = true;
setMaxSlope(btRadians(45.0));
m_currentStepOffset = 0;
full_drop = false;
bounce_fix = false;
}
btKinematicCharacterController::~btKinematicCharacterController ()
{
}
btPairCachingGhostObject* btKinematicCharacterController::getGhostObject()
{
return m_ghostObject;
}
bool btKinematicCharacterController::recoverFromPenetration ( btCollisionWorld* collisionWorld)
{
// Here we must refresh the overlapping paircache as the penetrating movement itself or the
// previous recovery iteration might have used setWorldTransform and pushed us into an object
// that is not in the previous cache contents from the last timestep, as will happen if we
// are pushed into a new AABB overlap. Unhandled this means the next convex sweep gets stuck.
//
// Do this by calling the broadphase's setAabb with the moved AABB, this will update the broadphase
// paircache and the ghostobject's internal paircache at the same time. /BW
btVector3 minAabb, maxAabb;
m_convexShape.getAabb(m_ghostObject.getWorldTransform(), minAabb,maxAabb);
collisionWorld.getBroadphase().setAabb(m_ghostObject.getBroadphaseHandle(),
minAabb,
maxAabb,
collisionWorld.getDispatcher());
bool penetration = false;
collisionWorld.getDispatcher().dispatchAllCollisionPairs(m_ghostObject.getOverlappingPairCache(), collisionWorld.getDispatchInfo(), collisionWorld.getDispatcher());
m_currentPosition = m_ghostObject.getWorldTransform().getOrigin();
double maxPen = (double)(0.0);
for (int i = 0; i < m_ghostObject.getOverlappingPairCache().getNumOverlappingPairs(); i++)
{
m_manifoldArray.resize(0);
btBroadphasePair* collisionPair = &m_ghostObject.getOverlappingPairCache().getOverlappingPairArray()[i];
btCollisionObject obj0 = static_cast<btCollisionObject>(collisionPair.m_pProxy0.m_clientObject);
btCollisionObject obj1 = static_cast<btCollisionObject>(collisionPair.m_pProxy1.m_clientObject);
if ((obj0 && !obj0.hasContactResponse()) || (obj1 && !obj1.hasContactResponse()))
continue;
if (collisionPair.m_algorithm)
collisionPair.m_algorithm.getAllContactManifolds(m_manifoldArray);
for (int j=0;j<m_manifoldArray.Count;j++)
{
btPersistentManifold* manifold = m_manifoldArray[j];
double directionSign = manifold.getBody0() == m_ghostObject ? (double)(-1.0) : (double)(1.0);
for (int p=0;p<manifold.getNumContacts();p++)
{
btManifoldPointpt = manifold.getContactPoint(p);
double dist = pt.getDistance();
if (dist < 0.0)
{
if (dist < maxPen)
{
maxPen = dist;
m_touchingNormal = pt.m_normalWorldOnB * directionSign;//??
}
m_currentPosition += pt.m_normalWorldOnB * directionSign * dist * (double)(0.2);
penetration = true;
} else {
//Console.WriteLine("touching %f\n", dist);
}
}
//manifold.clearManifold();
}
}
btTransform newTrans = m_ghostObject.getWorldTransform();
newTrans.setOrigin(m_currentPosition);
m_ghostObject.setWorldTransform(newTrans);
// Console.WriteLine("m_touchingNormal = %f,%f,%f\n",m_touchingNormal,m_touchingNormal[1],m_touchingNormal[2]);
return penetration;
}
void btKinematicCharacterController::stepUp ( btCollisionWorld* world)
{
// phase 1: up
btTransform start, end;
m_targetPosition = m_currentPosition + getUpAxisDirections()[m_upAxis] * (m_stepHeight + (m_verticalOffset > 0?m_verticalOffset:0));
start.setIdentity ();
end.setIdentity ();
/* FIXME: Handle penetration properly */
start.setOrigin (m_currentPosition + getUpAxisDirections()[m_upAxis] * (m_convexShape.getMargin() + m_addedMargin));
end.setOrigin (m_targetPosition);
btKinematicClosestNotMeConvexResultCallback callback (m_ghostObject, -getUpAxisDirections()[m_upAxis], (double)(0.7071));
callback.m_collisionFilterGroup = getGhostObject().getBroadphaseHandle().m_collisionFilterGroup;
callback.m_collisionFilterMask = getGhostObject().getBroadphaseHandle().m_collisionFilterMask;
if (m_useGhostObjectSweepTest)
{
m_ghostObject.convexSweepTest (m_convexShape, start, end, callback, world.getDispatchInfo().m_allowedCcdPenetration);
}
else
{
world.convexSweepTest (m_convexShape, start, end, callback);
}
if (callback.hasHit())
{
// Only modify the position if the hit was a slope and not a wall or ceiling.
if(callback.m_hitNormalWorld.dot(getUpAxisDirections()[m_upAxis]) > 0.0)
{
// we moved up only a fraction of the step height
m_currentStepOffset = m_stepHeight * callback.m_closestHitFraction;
if (m_interpolateUp == true)
m_currentPosition.setInterpolate3 (m_currentPosition, m_targetPosition, callback.m_closestHitFraction);
else
m_currentPosition = m_targetPosition;
}
m_verticalVelocity = 0.0;
m_verticalOffset = 0.0;
} else {
m_currentStepOffset = m_stepHeight;
m_currentPosition = m_targetPosition;
}
}
void btKinematicCharacterController::updateTargetPositionBasedOnCollision (ref btVector3 hitNormal, double tangentMag, double normalMag)
{
btVector3 movementDirection = m_targetPosition - m_currentPosition;
double movementLength = movementDirection.length();
if (movementLength>SIMD_EPSILON)
{
movementDirection.normalize();
btVector3 reflectDir = computeReflectionDirection (movementDirection, hitNormal);
reflectDir.normalize();
btVector3 parallelDir, perpindicularDir;
parallelDir = parallelComponent (reflectDir, hitNormal);
perpindicularDir = perpindicularComponent (reflectDir, hitNormal);
m_targetPosition = m_currentPosition;
if (0)//tangentMag != 0.0)
{
btVector3 parComponent = parallelDir * double (tangentMag*movementLength);
// Console.WriteLine("parComponent=%f,%f,%f\n",parComponent[0],parComponent[1],parComponent[2]);
m_targetPosition += parComponent;
}
if (normalMag != 0.0)
{
btVector3 perpComponent = perpindicularDir * double (normalMag*movementLength);
// Console.WriteLine("perpComponent=%f,%f,%f\n",perpComponent[0],perpComponent[1],perpComponent[2]);
m_targetPosition += perpComponent;
}
} else
{
// Console.WriteLine("movementLength don't normalize a zero vector\n");
}
}
void btKinematicCharacterController::stepForwardAndStrafe ( btCollisionWorld* collisionWorld, ref btVector3 walkMove)
{
// Console.WriteLine("m_normalizedDirection=%f,%f,%f\n",
// m_normalizedDirection[0],m_normalizedDirection[1],m_normalizedDirection[2]);
// phase 2: forward and strafe
btTransform start, end;
m_targetPosition = m_currentPosition + walkMove;
start.setIdentity ();
end.setIdentity ();
double fraction = 1.0;
double distance2 = (m_currentPosition-m_targetPosition).length2();
// Console.WriteLine("distance2=%f\n",distance2);
if (m_touchingContact)
{
if (m_normalizedDirection.dot(m_touchingNormal) > (double)(0.0))
{
//interferes with step movement
//updateTargetPositionBasedOnCollision (m_touchingNormal);
}
}
int maxIter = 10;
while (fraction > (double)(0.01) && maxIter-- > 0)
{
start.setOrigin (m_currentPosition);
end.setOrigin (m_targetPosition);
btVector3 sweepDirNegative(m_currentPosition - m_targetPosition);
btKinematicClosestNotMeConvexResultCallback callback (m_ghostObject, sweepDirNegative, (double)(0.0));
callback.m_collisionFilterGroup = getGhostObject().getBroadphaseHandle().m_collisionFilterGroup;
callback.m_collisionFilterMask = getGhostObject().getBroadphaseHandle().m_collisionFilterMask;
double margin = m_convexShape.getMargin();
m_convexShape.setMargin(margin + m_addedMargin);
if (m_useGhostObjectSweepTest)
{
m_ghostObject.convexSweepTest (m_convexShape, start, end, callback, collisionWorld.getDispatchInfo().m_allowedCcdPenetration);
} else
{
collisionWorld.convexSweepTest (m_convexShape, start, end, callback, collisionWorld.getDispatchInfo().m_allowedCcdPenetration);
}
m_convexShape.setMargin(margin);
fraction -= callback.m_closestHitFraction;
if (callback.hasHit())
{
// we moved only a fraction
//double hitDistance;
//hitDistance = (callback.m_hitPointWorld - m_currentPosition).length();
// m_currentPosition.setInterpolate3 (m_currentPosition, m_targetPosition, callback.m_closestHitFraction);
updateTargetPositionBasedOnCollision (callback.m_hitNormalWorld);
btVector3 currentDir = m_targetPosition - m_currentPosition;
distance2 = currentDir.length2();
if (distance2 > SIMD_EPSILON)
{
currentDir.normalize();
/* See Quake2: "If velocity is against original velocity, stop ead to avoid tiny oscilations in sloping corners." */
if (currentDir.dot(m_normalizedDirection) <= (double)(0.0))
{
break;
}
} else
{
// Console.WriteLine("currentDir: don't normalize a zero vector\n");
break;
}
} else {
// we moved whole way
m_currentPosition = m_targetPosition;
}
// if (callback.m_closestHitFraction == 0)
// break;
}
}
void btKinematicCharacterController::stepDown ( btCollisionWorld* collisionWorld, double dt)
{
btTransform start, end, end_double;
bool runonce = false;
// phase 3: down
/*double additionalDownStep = (m_wasOnGround && !onGround()) ? m_stepHeight : 0.0;
btVector3 step_drop = getUpAxisDirections()[m_upAxis] * (m_currentStepOffset + additionalDownStep);
double downVelocity = (additionalDownStep == 0.0 && m_verticalVelocity<0.0?-m_verticalVelocity:0.0) * dt;
btVector3 gravity_drop = getUpAxisDirections()[m_upAxis] * downVelocity;
m_targetPosition -= (step_drop + gravity_drop);*/
btVector3 orig_position = m_targetPosition;
double downVelocity = (m_verticalVelocity<0?-m_verticalVelocity:0) * dt;
if(downVelocity > 0.0 && downVelocity > m_fallSpeed
&& (m_wasOnGround || !m_wasJumping))
downVelocity = m_fallSpeed;
btVector3 step_drop = getUpAxisDirections()[m_upAxis] * (m_currentStepOffset + downVelocity);
m_targetPosition -= step_drop;
btKinematicClosestNotMeConvexResultCallback callback (m_ghostObject, getUpAxisDirections()[m_upAxis], m_maxSlopeCosine);
callback.m_collisionFilterGroup = getGhostObject().getBroadphaseHandle().m_collisionFilterGroup;
callback.m_collisionFilterMask = getGhostObject().getBroadphaseHandle().m_collisionFilterMask;
btKinematicClosestNotMeConvexResultCallback callback2 (m_ghostObject, getUpAxisDirections()[m_upAxis], m_maxSlopeCosine);
callback2.m_collisionFilterGroup = getGhostObject().getBroadphaseHandle().m_collisionFilterGroup;
callback2.m_collisionFilterMask = getGhostObject().getBroadphaseHandle().m_collisionFilterMask;
while (1)
{
start.setIdentity ();
end.setIdentity ();
end_double.setIdentity ();
start.setOrigin (m_currentPosition);
end.setOrigin (m_targetPosition);
//set double test for 2x the step drop, to check for a large drop vs small drop
end_double.setOrigin (m_targetPosition - step_drop);
if (m_useGhostObjectSweepTest)
{
m_ghostObject.convexSweepTest (m_convexShape, start, end, callback, collisionWorld.getDispatchInfo().m_allowedCcdPenetration);
if (!callback.hasHit())
{
//test a double fall height, to see if the character should interpolate it's fall (full) or not (partial)
m_ghostObject.convexSweepTest (m_convexShape, start, end_double, callback2, collisionWorld.getDispatchInfo().m_allowedCcdPenetration);
}
} else
{
collisionWorld.convexSweepTest (m_convexShape, start, end, callback, collisionWorld.getDispatchInfo().m_allowedCcdPenetration);
if (!callback.hasHit())
{
//test a double fall height, to see if the character should interpolate it's fall (large) or not (small)
collisionWorld.convexSweepTest (m_convexShape, start, end_double, callback2, collisionWorld.getDispatchInfo().m_allowedCcdPenetration);
}
}
double downVelocity2 = (m_verticalVelocity<0?-m_verticalVelocity:0) * dt;
bool has_hit = false;
if (bounce_fix == true)
has_hit = callback.hasHit() || callback2.hasHit();
else
has_hit = callback2.hasHit();
if(downVelocity2 > 0.0 && downVelocity2 < m_stepHeight && has_hit == true && runonce == false
&& (m_wasOnGround || !m_wasJumping))
{
//redo the velocity calculation when falling a small amount, for fast stairs motion
//for larger falls, use the smoother/slower interpolated movement by not touching the target position
m_targetPosition = orig_position;
downVelocity = m_stepHeight;
btVector3 step_drop = getUpAxisDirections()[m_upAxis] * (m_currentStepOffset + downVelocity);
m_targetPosition -= step_drop;
runonce = true;
continue; //re-run previous tests
}
break;
}
if (callback.hasHit() || runonce == true)
{
// we dropped a fraction of the height . hit floor
double fraction = (m_currentPosition.y - callback.m_hitPointWorld.y) / 2;
//Console.WriteLine("hitpoint: %g - pos %g\n", callback.m_hitPointWorld.y, m_currentPosition.y);
if (bounce_fix == true)
{
if (full_drop == true)
m_currentPosition.setInterpolate3 (m_currentPosition, m_targetPosition, callback.m_closestHitFraction);
else
//due to errors in the closestHitFraction variable when used with large polygons, calculate the hit fraction manually
m_currentPosition.setInterpolate3 (m_currentPosition, m_targetPosition, fraction);
}
else
m_currentPosition.setInterpolate3 (m_currentPosition, m_targetPosition, callback.m_closestHitFraction);
full_drop = false;
m_verticalVelocity = 0.0;
m_verticalOffset = 0.0;
m_wasJumping = false;
} else {
// we dropped the full height
full_drop = true;
if (bounce_fix == true)
{
downVelocity = (m_verticalVelocity<0?-m_verticalVelocity:0) * dt;
if (downVelocity > m_fallSpeed && (m_wasOnGround || !m_wasJumping))
{
m_targetPosition += step_drop; //undo previous target change
downVelocity = m_fallSpeed;
step_drop = getUpAxisDirections()[m_upAxis] * (m_currentStepOffset + downVelocity);
m_targetPosition -= step_drop;
}
}
//Console.WriteLine("full drop - %g, %g\n", m_currentPosition.y, m_targetPosition.y);
m_currentPosition = m_targetPosition;
}
}
void btKinematicCharacterController::setWalkDirection
(
ref btVector3 walkDirection
)
{
m_useWalkDirection = true;
m_walkDirection = walkDirection;
m_normalizedDirection = getNormalizedVector(m_walkDirection);
}
void btKinematicCharacterController::setVelocityForTimeInterval
(
ref btVector3 velocity,
double timeInterval
)
{
// Console.WriteLine("setVelocity!\n");
// Console.WriteLine(" interval: %f\n", timeInterval);
// Console.WriteLine(" velocity: (%f, %f, %f)\n",
// velocity.x, velocity.y, velocity.z);
m_useWalkDirection = false;
m_walkDirection = velocity;
m_normalizedDirection = getNormalizedVector(m_walkDirection);
m_velocityTimeInterval += timeInterval;
}
void btKinematicCharacterController::reset ( btCollisionWorld* collisionWorld )
{
m_verticalVelocity = 0.0;
m_verticalOffset = 0.0;
m_wasOnGround = false;
m_wasJumping = false;
m_walkDirection.setValue(0,0,0);
m_velocityTimeInterval = 0.0;
//clear pair cache
btHashedOverlappingPairCache *cache = m_ghostObject.getOverlappingPairCache();
while (cache.getOverlappingPairArray().Count > 0)
{
cache.removeOverlappingPair(cache.getOverlappingPairArray()[0].m_pProxy0, cache.getOverlappingPairArray()[0].m_pProxy1, collisionWorld.getDispatcher());
}
}
void btKinematicCharacterController::warp (ref btVector3 origin)
{
btTransform xform;
xform.setIdentity();
xform.setOrigin (origin);
m_ghostObject.setWorldTransform (xform);
}
void btKinematicCharacterController::preStep ( btCollisionWorld* collisionWorld)
{
int numPenetrationLoops = 0;
m_touchingContact = false;
while (recoverFromPenetration (collisionWorld))
{
numPenetrationLoops++;
m_touchingContact = true;
if (numPenetrationLoops > 4)
{
//Console.WriteLine("character could not recover from penetration = %d\n", numPenetrationLoops);
break;
}
}
m_currentPosition = m_ghostObject.getWorldTransform().getOrigin();
m_targetPosition = m_currentPosition;
// Console.WriteLine("m_targetPosition=%f,%f,%f\n",m_targetPosition[0],m_targetPosition[1],m_targetPosition[2]);
}
#include <stdio.h>
void btKinematicCharacterController::playerStep ( btCollisionWorld* collisionWorld, double dt)
{
// Console.WriteLine("playerStep(): ");
// Console.WriteLine(" dt = %f", dt);
// quick check...
if (!m_useWalkDirection & (m_velocityTimeInterval <= 0.0 || m_walkDirection.fuzzyZero())) {
// Console.WriteLine("\n");
return; // no motion
}
m_wasOnGround = onGround();
// Update fall velocity.
m_verticalVelocity -= m_gravity * dt;
if(m_verticalVelocity > 0.0 && m_verticalVelocity > m_jumpSpeed)
{
m_verticalVelocity = m_jumpSpeed;
}
if(m_verticalVelocity < 0.0 && btFabs(m_verticalVelocity) > btFabs(m_fallSpeed))
{
m_verticalVelocity = -btFabs(m_fallSpeed);
}
m_verticalOffset = m_verticalVelocity * dt;
btTransform xform;
xform = m_ghostObject.getWorldTransform ();
// Console.WriteLine("walkDirection(%f,%f,%f)\n",walkDirection,walkDirection[1],walkDirection[2]);
// Console.WriteLine("walkSpeed=%f\n",walkSpeed);
stepUp (collisionWorld);
if (m_useWalkDirection) {
stepForwardAndStrafe (collisionWorld, m_walkDirection);
} else {
//Console.WriteLine(" time: %f", m_velocityTimeInterval);
// still have some time left for moving!
double dtMoving =
(dt < m_velocityTimeInterval) ? dt : m_velocityTimeInterval;
m_velocityTimeInterval -= dt;
// how far will we move while we are moving?
btVector3 move = m_walkDirection * dtMoving;
//Console.WriteLine(" dtMoving: %f", dtMoving);
// okay, step
stepForwardAndStrafe(collisionWorld, move);
}
stepDown (collisionWorld, dt);
// Console.WriteLine("\n");
xform.setOrigin (m_currentPosition);
m_ghostObject.setWorldTransform (xform);
}
void btKinematicCharacterController::setFallSpeed (double fallSpeed)
{
m_fallSpeed = fallSpeed;
}
void btKinematicCharacterController::setJumpSpeed (double jumpSpeed)
{
m_jumpSpeed = jumpSpeed;
}
void btKinematicCharacterController::setMaxJumpHeight (double maxJumpHeight)
{
m_maxJumpHeight = maxJumpHeight;
}
bool btKinematicCharacterController::canJump ()
{
return onGround();
}
void btKinematicCharacterController::jump ()
{
if (!canJump())
return;
m_verticalVelocity = m_jumpSpeed;
m_wasJumping = true;
#if 0
currently no jumping.
btTransform xform;
m_rigidBody.getMotionState().getWorldTransform (xform);
btVector3 up = xform.getBasis()[1];
up.normalize ();
double magnitude = ((double)(1.0)/m_rigidBody.getInvMass()) * (double)(8.0);
m_rigidBody.applyCentralImpulse (up * magnitude);
#endif
}
void btKinematicCharacterController::setGravity(double gravity)
{
m_gravity = gravity;
}
double btKinematicCharacterController::getGravity()
{
return m_gravity;
}
void btKinematicCharacterController::setMaxSlope(double slopeRadians)
{
m_maxSlopeRadians = slopeRadians;
|
[
"\tm_maxSlopeCosine = btCos(slopeRadians);"
] | 2,149
|
lcc
|
csharp
| null |
f498e75d306788f63949bcfcf05c9e4bb4e190034ff80488
|
|
package org.checkerframework.common.aliasing;
/*>>>
import org.checkerframework.checker.compilermsgs.qual.CompilerMessageKey;
*/
import org.checkerframework.common.aliasing.qual.LeakedToResult;
import org.checkerframework.common.aliasing.qual.NonLeaked;
import org.checkerframework.common.aliasing.qual.Unique;
import org.checkerframework.common.basetype.BaseTypeChecker;
import org.checkerframework.common.basetype.BaseTypeVisitor;
import org.checkerframework.dataflow.cfg.node.MethodInvocationNode;
import org.checkerframework.framework.source.Result;
import org.checkerframework.framework.type.AnnotatedTypeMirror;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedArrayType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedDeclaredType;
import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedExecutableType;
import org.checkerframework.javacutil.TreeUtils;
import java.util.List;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.VariableElement;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.MethodTree;
import com.sun.source.tree.NewArrayTree;
import com.sun.source.tree.ThrowTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.Tree.Kind;
import com.sun.source.tree.VariableTree;
/**
* This visitor ensures that every constructor whose result is annotated as
* {@literal @}Unique does not leak aliases.
* <p>
*
* TODO: Implement {@literal @}NonLeaked and {@literal @}LeakedToResult verifications:
* <p>
* {@literal @}NonLeaked: When a method declaration has a parameter annotated as
* {@literal @}NonLeaked, the method body must not leak a reference to that parameter.
* <p>
*
* {@literal @}LeakedToResult: When a method declaration has a parameter annotated as
* {@literal @}LeakedToResult, the method body must not leak a reference to that parameter,
* except at the method return statements.
* <p>
*
* Both of the checks above are similar to the @Unique check that is
* implemented in this visitor.
*/
public class AliasingVisitor extends
BaseTypeVisitor<AliasingAnnotatedTypeFactory> {
public AliasingVisitor(BaseTypeChecker checker) {
super(checker);
}
/**
* Checks that if a method call is being invoked inside a constructor with
* result type {@literal @}Unique, it must not leak the "this" reference.
* There are 3 ways to make sure that this is not happening:
* <p>
* 1. "this" is not an argument of the method call.
* <p>
* 2. "this" is an argument of the method call, but the respective parameter
* is annotated as {@literal @}NonLeaked.
* <p>
* 3. "this" is an argument of the method call, but the respective parameter
* is annotated as {@literal @}LeakedToResult AND the result of the method
* call is not being stored (the method call is a statement).
* <p>
* The private method <code>isUniqueCheck</code> handles cases 2 and 3.
*/
@Override
public Void visitMethodInvocation(MethodInvocationTree node, Void p) {
// The check only needs to be done for constructors with result type
// @Unique. We also want to avoid visiting the <init> method.
if (isInUniqueConstructor(node)) {
if (TreeUtils.isSuperCall(node)) {
// Check if a call to super() might create an alias: that
// happens when the parent's respective constructor is not @Unique.
AnnotatedTypeMirror superResult = atypeFactory.
getAnnotatedType(node);
if (!superResult.hasAnnotation(Unique.class)) {
checker.report(Result.failure("unique.leaked"), node);
}
} else {
// TODO: Currently the type of "this" doesn't always return
// the type of the constructor result, therefore we need
// this "else" block. Once constructors are implemented
// correctly we could remove that code below, since the type
// of "this" in a @Unique constructor will be @Unique.
MethodInvocationNode n = (MethodInvocationNode) atypeFactory.
getNodeForTree(node);
Tree parent = n.getTreePath().getParentPath().getLeaf();
boolean parentIsStatement = parent.getKind() == Kind.
EXPRESSION_STATEMENT;
ExecutableElement methodElement = TreeUtils.elementFromUse(node);
List<? extends VariableElement> params = methodElement.
getParameters();
List<? extends ExpressionTree> args = node.getArguments();
assert (args.size() == params.size()) : "Number of arguments in"
+ " the method call " + n.toString() + " is different from the "
+ "number of parameters for the method declaration: "
+ methodElement.getSimpleName().toString();
for (int i = 0; i < args.size(); i++) {
// Here we are traversing the arguments of the method call.
// For every argument we check if it is a reference to "this".
if (TreeUtils.isExplicitThisDereference(args.get(i))) {
// If it is a reference to "this", there is still hope that
// it is not being leaked (2. and 3. from the javadoc).
VariableElement param = params.get(i);
boolean hasNonLeaked = atypeFactory.getAnnotatedType(
param).
hasAnnotation(NonLeaked.class);
boolean hasLeakedToResult = atypeFactory.
getAnnotatedType(param).
hasAnnotation(LeakedToResult.class);
isUniqueCheck(node, parentIsStatement, hasNonLeaked,
hasLeakedToResult);
} else {
//Not possible to leak reference here (case 1. from the javadoc).
}
}
// Now, doing the same as above for the receiver parameter
AnnotatedExecutableType annotatedType = atypeFactory.
getAnnotatedType(methodElement);
AnnotatedDeclaredType receiverType = annotatedType.
getReceiverType();
if (receiverType != null) {
boolean hasNonLeaked = receiverType.hasAnnotation(
NonLeaked.class);
boolean hasLeakedToResult = receiverType.hasAnnotation(
LeakedToResult.class);
isUniqueCheck(node, parentIsStatement, hasNonLeaked,
hasLeakedToResult);
}
}
}
return super.visitMethodInvocation(node, p);
}
private void isUniqueCheck(MethodInvocationTree node, boolean parentIsStatement,
boolean hasNonLeaked, boolean hasLeakedToResult) {
if (hasNonLeaked || (hasLeakedToResult && parentIsStatement)) {
// Not leaked according to cases 2. and 3. from the javadoc of
// visitMethodInvocation.
} else {
// May be leaked, raise warning.
checker.report(Result.failure("unique.leaked"), node);
}
}
// TODO: Merge that code in
// commonAssignmentCheck(AnnotatedTypeMirror varType, ExpressionTree
// valueExp, String errorKey, boolean isLocalVariableAssignement), because
// the method below isn't called for pseudo-assignments, but the mentioned
// one is. The issue of copy-pasting the code from this method to the other
// one is that a declaration such as: List<@Unique Object> will raise a
// unique.leaked warning, as there is a pseudo-assignment from @Unique to a
// @MaybeAliased object, if the @Unique annotation is not in the stubfile.
// TODO: Change the documentation in BaseTypeVisitor to point out that
// this isn't called for pseudo-assignments.
@Override
protected void commonAssignmentCheck(Tree varTree, ExpressionTree valueExp,
/*@CompilerMessageKey*/ String errorKey) {
super.commonAssignmentCheck(varTree, valueExp, errorKey);
if (isInUniqueConstructor(valueExp) && TreeUtils.
isExplicitThisDereference(valueExp)) {
// If an assignment occurs inside a constructor with
// result type @Unique, it will invalidate the @Unique property
// by using the "this" reference.
checker.report(Result.failure("unique.leaked"), valueExp);
} else if (canBeLeaked(valueExp)) {
checker.report(Result.failure("unique.leaked"), valueExp);
}
}
@Override
protected void commonAssignmentCheck(AnnotatedTypeMirror varType,
AnnotatedTypeMirror valueType, Tree valueTree, /*@CompilerMessageKey*/ String errorKey) {
super.commonAssignmentCheck(varType, valueType, valueTree, errorKey);
// If we are visiting a pseudo-assignment, visitorLeafKind is either
// Kind.NEW_CLASS or Kind.METHOD_INVOCATION.
Kind visitorLeafKind = visitorState.getPath().getLeaf().getKind();
Kind parentKind = visitorState.getPath().getParentPath().getLeaf().
getKind();
if (visitorLeafKind == Kind.NEW_CLASS ||
visitorLeafKind == Kind.METHOD_INVOCATION) {
// Handling pseudo-assignments
if (canBeLeaked(valueTree)) {
if (!varType.hasAnnotation(NonLeaked.class) &&
!(varType.hasAnnotation(LeakedToResult.class) &&
parentKind == Kind.EXPRESSION_STATEMENT)) {
checker.report(Result.failure("unique.leaked"), valueTree);
}
}
}
}
@Override
public Void visitThrow(ThrowTree node, Void p) {
// throw is also an escape mechanism. If an expression of type
// @Unique is thrown, it is not @Unique anymore.
ExpressionTree exp = node.getExpression();
if (canBeLeaked(exp)) {
checker.report(Result.failure("unique.leaked"), exp);
}
return super.visitThrow(node, p);
}
@Override
public Void visitVariable(VariableTree node, Void p) {
// Component types are not allowed to have the @Unique annotation.
AnnotatedTypeMirror varType = atypeFactory.getAnnotatedType(node);
VariableElement elt = TreeUtils.elementFromDeclaration(node);
if (elt.getKind().isField() && varType.hasExplicitAnnotation(Unique.class)) {
checker.report(Result.failure("unique.location.forbidden"), node);
} else if (node.getType().getKind() == Kind.ARRAY_TYPE) {
AnnotatedArrayType arrayType = (AnnotatedArrayType) varType;
if (arrayType.getComponentType().hasAnnotation(Unique.class)) {
checker.report(Result.failure("unique.location.forbidden"),
node);
}
} else if (node.getType().getKind() == Kind.PARAMETERIZED_TYPE) {
AnnotatedDeclaredType declaredType = (AnnotatedDeclaredType) varType;
for (AnnotatedTypeMirror atm : declaredType.getTypeArguments()) {
if (atm.hasAnnotation(Unique.class)) {
checker.report(Result.failure("unique.location.forbidden"),
node);
}
}
}
return super.visitVariable(node, p);
}
@Override
public Void visitNewArray(NewArrayTree node, Void p) {
List<? extends ExpressionTree> initializers = node.getInitializers();
|
[
" if (initializers != null && !initializers.isEmpty()) {"
] | 1,063
|
lcc
|
java
| null |
57e19c332b765589c06c6317e00c2f7c5f50bc38d1fac11c
|
|
package flaxbeard.cyberware.common.item;
import java.util.List;
import net.minecraft.client.model.ModelBiped;
import net.minecraft.client.model.ModelRenderer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.item.EntityArmorStand;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.inventory.EntityEquipmentSlot;
import net.minecraft.item.Item;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.NonNullList;
import net.minecraftforge.fml.common.registry.GameRegistry;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import flaxbeard.cyberware.Cyberware;
import flaxbeard.cyberware.api.item.IDeconstructable;
import flaxbeard.cyberware.client.ClientUtils;
import flaxbeard.cyberware.common.CyberwareContent;
public class ItemArmorCyberware extends ItemArmor implements IDeconstructable
{
public static class ModelTrenchcoat extends ModelBiped
{
public ModelRenderer bottomThing;
public ModelTrenchcoat(float modelSize)
{
super(modelSize);
this.bottomThing = new ModelRenderer(this, 16, 0);
this.bottomThing.addBox(-4.0F, 0F, -1.7F, 8, 12, 4, modelSize);
this.bottomThing.setRotationPoint(0, 12.0F, 0.0F);
}
@Override
public void setRotationAngles(float limbSwing, float limbSwingAmount, float ageInTicks, float netHeadYaw, float headPitch, float scaleFactor, Entity entityIn)
{
super.setRotationAngles(limbSwing, limbSwingAmount, ageInTicks, netHeadYaw, headPitch, scaleFactor, entityIn);
this.bottomThing.setRotationPoint(0, this.bipedLeftLeg.rotationPointY, this.bipedLeftLeg.rotationPointZ);
this.bottomThing.rotateAngleX = Math.max(this.bipedLeftLeg.rotateAngleX, this.bipedRightLeg.rotateAngleX) + .05F * 1.1F;
}
@Override
public void render(Entity entityIn, float limbSwing, float limbSwingAmount, float ageInTicks, float netHeadYaw, float headPitch, float scale)
{
super.render(entityIn, limbSwing, limbSwingAmount, ageInTicks, netHeadYaw, headPitch, scale);
GlStateManager.pushMatrix();
if (this.isChild)
{
float f = 2.0F;
GlStateManager.scale(1.0F / f, 1.0F / f, 1.0F / f);
GlStateManager.translate(0.0F, 24.0F * scale, 0.0F);
this.bottomThing.render(scale);
}
else
{
if (entityIn.isSneaking())
{
GlStateManager.translate(0.0F, 0.2F, 0.0F);
}
this.bottomThing.render(scale);
}
GlStateManager.popMatrix();
}
}
public ItemArmorCyberware(String name, ArmorMaterial materialIn, int renderIndexIn, EntityEquipmentSlot equipmentSlotIn)
{
super(materialIn, renderIndexIn, equipmentSlotIn);
this.setRegistryName(name);
GameRegistry.register(this);
this.setUnlocalizedName(Cyberware.MODID + "." + name);
this.setCreativeTab(Cyberware.creativeTab);
CyberwareContent.items.add(this);
}
@Override
public boolean canDestroy(ItemStack stack)
{
return true;
}
@Override
public NonNullList<ItemStack> getComponents(ItemStack stack)
{
Item i = stack.getItem();
if (i == CyberwareContent.trenchcoat)
{
NonNullList<ItemStack> l = NonNullList.create();
l.add(new ItemStack(CyberwareContent.component, 2, 2));
l.add(new ItemStack(Items.LEATHER, 12, 0));
l.add(new ItemStack(Items.DYE, 1, 0));
return l;
}
else if (i == CyberwareContent.jacket)
{
NonNullList<ItemStack> l = NonNullList.create();
l.add(new ItemStack(CyberwareContent.component, 1, 2));
l.add(new ItemStack(Items.LEATHER, 8, 0));
l.add(new ItemStack(Items.DYE, 1, 0));
return l;
}
NonNullList<ItemStack> l = NonNullList.create();
l.add(new ItemStack(Blocks.STAINED_GLASS, 4, 15));
l.add(new ItemStack(CyberwareContent.component, 1, 4));
return l;
}
@Override
@SideOnly(Side.CLIENT)
public ModelBiped getArmorModel(EntityLivingBase entityLiving, ItemStack itemStack, EntityEquipmentSlot armorSlot, net.minecraft.client.model.ModelBiped _default)
{
ClientUtils.trench.setModelAttributes(_default);
ClientUtils.armor.setModelAttributes(_default);
ClientUtils.trench.bipedRightArm.isHidden = !(entityLiving instanceof EntityPlayer) && !(entityLiving instanceof EntityArmorStand);
ClientUtils.trench.bipedLeftArm.isHidden = !(entityLiving instanceof EntityPlayer) && !(entityLiving instanceof EntityArmorStand);
ClientUtils.armor.bipedRightArm.isHidden = ClientUtils.trench.bipedRightArm.isHidden;
ClientUtils.armor.bipedLeftArm.isHidden = ClientUtils.trench.bipedLeftArm.isHidden;
if (!itemStack.isEmpty() && itemStack.getItem() == CyberwareContent.trenchcoat) return ClientUtils.trench;
return ClientUtils.armor;
}
public boolean hasColor(ItemStack stack)
{
if (this.getArmorMaterial() != CyberwareContent.trenchMat)
{
return false;
}
else
{
NBTTagCompound nbttagcompound = stack.getTagCompound();
return nbttagcompound != null && nbttagcompound.hasKey("display", 10) ? nbttagcompound.getCompoundTag("display").hasKey("color", 3) : false;
}
}
public int getColor(ItemStack stack)
{
if (this.getArmorMaterial() != CyberwareContent.trenchMat)
{
return 16777215;
}
else
{
NBTTagCompound nbttagcompound = stack.getTagCompound();
if (nbttagcompound != null)
{
NBTTagCompound nbttagcompound1 = nbttagcompound.getCompoundTag("display");
if (nbttagcompound1 != null && nbttagcompound1.hasKey("color", 3))
{
return nbttagcompound1.getInteger("color");
}
}
return 0x333333; // 0x664028
}
}
public void removeColor(ItemStack stack)
{
if (this.getArmorMaterial() == CyberwareContent.trenchMat)
{
NBTTagCompound nbttagcompound = stack.getTagCompound();
if (nbttagcompound != null)
{
NBTTagCompound nbttagcompound1 = nbttagcompound.getCompoundTag("display");
if (nbttagcompound1.hasKey("color"))
{
nbttagcompound1.removeTag("color");
}
}
}
}
public void setColor(ItemStack stack, int color)
{
if (this.getArmorMaterial() != CyberwareContent.trenchMat)
{
throw new UnsupportedOperationException("Can\'t dye non-leather!");
}
else
{
NBTTagCompound nbttagcompound = stack.getTagCompound();
|
[
"\t\t\tif (nbttagcompound == null)"
] | 484
|
lcc
|
java
| null |
3acf18da43544d1e056b344459d85b5599253c6ef78403fb
|
|
/*
* JSTools.Parser.DocGenerator.dll / JSTools.net - A framework for JavaScript/ASP.NET applications.
* Copyright (C) 2005 Silvan Gehrig
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author:
* Silvan Gehrig
*/
using System;
using System.Collections;
using System.Xml;
using JSTools.Parser;
namespace JSTools.Parser.DocGenerator.CommentItems
{
/// <summary>
/// Represents the function comment item.
/// </summary>
internal class FunctionItem : ACommentItem
{
//--------------------------------------------------------------------
// Declarations
//--------------------------------------------------------------------
private const string RETURNS_NODE = "returns";
private const string TYPE_ATTRIB = "type";
private const string ACCESSOR_ATTRIB = "accessor";
private const string MODIFIER_ATTRIB = "modifier";
private const string CLASS_ATTRIB = "class";
private const char RETURN_TYPE_SEPARATOR = '#';
private const string DOC_PREFIX = "M:{0}";
private const string HEADER_PATTERN = "M:{0}.{1}#{2}";
private Accessor _accessor = Accessor.Public;
private MemberModifier _modifier = MemberModifier.None;
private string _class = string.Empty;
private string _type = string.Empty;
private ClassItem _returnType = null;
private XmlNode _returnNode = null;
private string _methodHeader = null;
private string _codeExpression = string.Empty;
private ParamHandler _params = null;
private string _docName = null;
//--------------------------------------------------------------------
// Properties
//--------------------------------------------------------------------
/// <summary>
/// Gets the documentation name of the current comment item.
/// (e.g. M:JSTools.Util.SimpleObjectSerializer(Global.Window.Object)
/// </summary>
public override string DocName
{
get
{
if (_docName == null)
_docName = _params.GetMethodHeader(MethodHeaderBegin);
return _docName;
}
}
/// <summary>
/// Initializes expression information of this comment item.
/// (e.g. JSTools.Util.SimpleObjectSerializer)
/// </summary>
protected override Expression InternalName
{
get
{
return new Expression(
Context.DefaultType,
(_class.Length > 0) ? new string[] { _class } : ParentScopeClasses,
_codeExpression );
}
}
private string MethodHeaderBegin
{
get
{
if (_methodHeader == null)
{
// if a return node was specified
if (_returnType != null)
{
_methodHeader = string.Format(
HEADER_PATTERN,
ItemName.ToString(true),
_returnType.ItemName.ToString(false, RETURN_TYPE_SEPARATOR),
ItemName.Name );
}
else
{
_methodHeader = string.Format(
DOC_PREFIX,
ItemName.FullName );
}
}
return _methodHeader;
}
}
//--------------------------------------------------------------------
// Constructors / Destructor
//--------------------------------------------------------------------
/// <summary>
/// Creates a new FunctionItem instance.
/// </summary>
/// <param name="context">Context which contains the type creater and include manager.</param>
/// <param name="parentScope">Parent item in the code hierarchy (not equal to namespace hierarchy!!).</param>
/// <param name="commentXmlNode">Specifies the comment xml node which has identified this instance..</param>
/// <param name="parsedNode">Node which contains the parsed javascript instructions.</param>
internal FunctionItem(
CommentItemContext context,
ACommentItem parentScope,
XmlNode commentXmlNode,
INode parsedNode) : base(context, parentScope, commentXmlNode, parsedNode)
{
XmlAttribute accessorNode = CommentXmlNode.Attributes[ACCESSOR_ATTRIB];
XmlAttribute modifierNode = CommentXmlNode.Attributes[MODIFIER_ATTRIB];
XmlAttribute classNode = CommentXmlNode.Attributes[CLASS_ATTRIB];
if (accessorNode != null)
{
try { _accessor = (Accessor)Enum.Parse(typeof(Accessor), accessorNode.Value, true); }
catch { /* ignore exceptions */ }
}
if (modifierNode != null)
{
try { _modifier = (MemberModifier)Enum.Parse(typeof(MemberModifier), modifierNode.Value, true); }
catch { /* ignore exceptions */ }
}
if (classNode != null)
_class = classNode.Value;
_codeExpression = InitNodeExpression();
}
//--------------------------------------------------------------------
// Events
//--------------------------------------------------------------------
//--------------------------------------------------------------------
// Methods
//--------------------------------------------------------------------
/// <summary>
/// Serializes the current instance into the given xml document.
/// </summary>
/// <param name="serializationContext">Context which is used to serialize the item.</param>
public override void Serialize(CommentItemSerializationContext serializationContext)
{
// create constructors
string[] methodHeaders = _params.GetMethodHeaders(MethodHeaderBegin);
for (int i = 0; i < _params.OverloadingCount; ++i)
{
serializationContext.CreateMember(methodHeaders[i], CommentXmlNode.InnerXml);
for (int j = 0; j < _params.Overloadings[i].Length; ++j)
{
serializationContext.CreateParam(
_params.Overloadings[i][j].Name,
_params.Overloadings[i][j].Comment.InnerXml );
}
serializationContext.EndMember();
}
}
/// <summary>
/// Initializes the type associated to this comment item.
/// </summary>
public override void InitType()
{
if (!IsInitialized)
{
base.InitType();
CreateMethods();
CreateReturnType();
}
}
/// <summary>
/// Loads the comment nodes. This method is called after initialize
/// the xml include nodes.
/// </summary>
protected override void LoadCommentNodes()
{
// init remarks, example, exception nodes
base.LoadCommentNodes();
// init param list
_params = new ParamHandler(CommentXmlDocument, Context);
_returnNode = CommentXmlDocument.SelectSingleNode(RETURNS_NODE);
// init return type value
if (_returnNode != null)
{
XmlAttribute typeAttribute = _returnNode.Attributes[TYPE_ATTRIB];
|
[
"\t\t\t\tif (typeAttribute != null)"
] | 715
|
lcc
|
csharp
| null |
65e83568eadd384a035b2d009ca4b6e8962b6a6b11a58326
|
|
//
// TrackBarTest.cs: Test cases for TrackBar.
//
// Author:
// Ritvik Mayank (mritvik@novell.com)
//
// (C) 2005 Novell, Inc. (http://www.novell.com)
//
using System;
using System.Windows.Forms;
using System.Drawing;
using System.Reflection;
using NUnit.Framework;
namespace MonoTests.System.Windows.Forms
{
[TestFixture]
public class TrackBarBaseTest : TestHelper
{
[Test]
public void TrackBarPropertyTest ()
{
TrackBar myTrackBar = new TrackBar ();
// A
Assert.AreEqual (true, myTrackBar.AutoSize, "#A1");
// L
Assert.AreEqual (5, myTrackBar.LargeChange, "#L1");
// M
Assert.AreEqual (10, myTrackBar.Maximum, "#M1");
Assert.AreEqual (0, myTrackBar.Minimum, "#M2");
// O
Assert.AreEqual (Orientation.Horizontal, myTrackBar.Orientation, "#O1");
// S
Assert.AreEqual (1, myTrackBar.SmallChange, "#S1");
// T
Assert.AreEqual (1, myTrackBar.TickFrequency, "#T1");
Assert.AreEqual (TickStyle.BottomRight, myTrackBar.TickStyle, "#T2");
Assert.AreEqual ("", myTrackBar.Text, "#T3");
myTrackBar.Text = "New TrackBar";
Assert.AreEqual ("New TrackBar", myTrackBar.Text, "#T4");
// V
Assert.AreEqual (0, myTrackBar.Value, "#V1");
}
[Test]
[ExpectedException (typeof (ArgumentOutOfRangeException))]
public void LargeChangeTest ()
{
TrackBar myTrackBar = new TrackBar ();
myTrackBar.LargeChange = -1;
}
[Test]
[ExpectedException (typeof (ArgumentOutOfRangeException))]
public void SmallChangeTest ()
{
TrackBar myTrackBar = new TrackBar ();
myTrackBar.SmallChange = -1;
}
[Test]
public void SetRangeTest ()
{
TrackBar myTrackBar = new TrackBar ();
myTrackBar.SetRange (2,9);
Assert.AreEqual (9, myTrackBar.Maximum, "#setM1");
Assert.AreEqual (2, myTrackBar.Minimum, "#setM2");
}
[Test]
public void ToStringMethodTest ()
{
TrackBar myTrackBar = new TrackBar ();
myTrackBar.Text = "New TrackBar";
Assert.AreEqual ("System.Windows.Forms.TrackBar, Minimum: 0, Maximum: 10, Value: 0", myTrackBar.ToString (), "#T3");
}
[Test]
public void OrientationSizeTest ()
{
IntPtr handle;
int width;
int height ;
int default_height = 45;
int default_height2 = 42;
using (TrackBar myTrackBar = new TrackBar()) {
width = myTrackBar.Width;
height = myTrackBar.Height;
myTrackBar.Orientation = Orientation.Vertical;
Assert.AreEqual(width, myTrackBar.Width, "#OS1");
Assert.AreEqual(height, myTrackBar.Height, "#OS2");
}
using (Form myForm = new Form()) {
using ( TrackBar myTrackBar = new TrackBar()) {
width = myTrackBar.Width;
height = myTrackBar.Height;
myForm.Controls.Add(myTrackBar);
handle = myTrackBar.Handle; // causes the handle to be created.
myTrackBar.Orientation = Orientation.Vertical;
AreEqual(default_height, default_height2, myTrackBar.Width, "#OS3");
Assert.AreEqual(width, myTrackBar.Height, "#OS4");
}
}
using (Form myForm = new Form()) {
using ( TrackBar myTrackBar = new TrackBar()) {
myForm.Controls.Add(myTrackBar);
handle = myTrackBar.Handle; // causes the handle to be created.
myTrackBar.Width = 200;
myTrackBar.Orientation = Orientation.Vertical;
Assert.AreEqual(200, myTrackBar.Height, "#OS5");
}
}
Assert.AreEqual(handle, handle, "Removes warning");
}
private void AreEqual(int expected1, int expected2, int real, string name)
{
// This is needed since the default size vary between XP theme and W2K theme.
if (real != expected1 && real != expected2) {
Assert.Fail("{3}: Expected <{0}> or <{1}>, but was <{2}>", expected1, expected2, real, name);
}
}
[Test]
[Category ("NotWorking")]
public void SizeTestSettingOrientation ()
{
IntPtr handle;
int default_height = 45;
int default_height2 = 42;
using (TrackBar myTrackBar = new TrackBar()) {
myTrackBar.Width = 200;
myTrackBar.Height = 250;
myTrackBar.Orientation = Orientation.Vertical;
Assert.AreEqual(200, myTrackBar.Width, "#SIZE03");
AreEqual(default_height, default_height2, myTrackBar.Height, "#SIZE04");
}
using (TrackBar myTrackBar = new TrackBar()) {
myTrackBar.AutoSize = false;
myTrackBar.Width = 200;
myTrackBar.Height = 250;
myTrackBar.Orientation = Orientation.Vertical;
Assert.AreEqual(200, myTrackBar.Width, "#SIZE07");
Assert.AreEqual(250, myTrackBar.Height, "#SIZE08");
}
using (TrackBar myTrackBar = new TrackBar()) {
myTrackBar.Width = 200;
myTrackBar.Height = 250;
myTrackBar.AutoSize = false;
myTrackBar.Orientation = Orientation.Vertical;
Assert.AreEqual(200, myTrackBar.Width, "#SIZE11");
AreEqual(default_height, default_height2, myTrackBar.Height, "#SIZE12");
}
using (TrackBar myTrackBar = new TrackBar()) {
using (Form myForm = new Form()) {
myForm.Controls.Add(myTrackBar);
myTrackBar.Width = 200;
myTrackBar.Height = 250;
myTrackBar.Orientation = Orientation.Vertical;
handle = myTrackBar.Handle;
AreEqual(default_height, default_height2, myTrackBar.Width, "#SIZE17");
AreEqual(default_height, default_height2, myTrackBar.Height, "#SIZE18");
}
}
using (TrackBar myTrackBar = new TrackBar()) {
using (Form myForm = new Form()) {
myForm.Controls.Add(myTrackBar);
myTrackBar.Width = 200;
myTrackBar.Height = 250;
myTrackBar.Orientation = Orientation.Vertical;
handle = myTrackBar.Handle;
AreEqual(default_height, default_height2, myTrackBar.Width, "#SIZE19");
AreEqual(default_height, default_height2, myTrackBar.Height, "#SIZE20");
}
}
using (TrackBar myTrackBar = new TrackBar()) {
using (Form myForm = new Form()) {
myForm.Controls.Add(myTrackBar);
myTrackBar.Width = 200;
myTrackBar.Height = 250;
myTrackBar.Orientation = Orientation.Vertical;
handle = myTrackBar.Handle;
myTrackBar.Orientation = Orientation.Horizontal;
AreEqual(default_height, default_height2, myTrackBar.Width, "#SIZE23");
AreEqual(default_height, default_height2, myTrackBar.Height, "#SIZE24");
}
}
using (TrackBar myTrackBar = new TrackBar()) {
myTrackBar.AutoSize = false;
myTrackBar.Height = 50;
myTrackBar.Width = 80;
myTrackBar.Orientation = Orientation.Vertical;
myTrackBar.Width = 100;
Assert.AreEqual(50, myTrackBar.Height, "#SIZE2_1");
Assert.AreEqual(100, myTrackBar.Width, "#SIZE2_2");
using (Form myForm = new Form()){
myForm.Controls.Add(myTrackBar);
myForm.Show();
Assert.AreEqual(50, myTrackBar.Height, "#SIZE2_3");
Assert.AreEqual(100, myTrackBar.Width, "#SIZE2_4");
}
}
|
[
"\t\t\tAssert.AreEqual(handle, handle, \"Removes warning\");"
] | 643
|
lcc
|
csharp
| null |
5eb2fab7c77b669d9d20bb243cdb60fddafbb0a6170b9d88
|
|
package rocks.inspectit.server.instrumentation.classcache;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Callable;
import org.mockito.InjectMocks;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import rocks.inspectit.server.instrumentation.config.ClassCacheSearchNarrower;
import rocks.inspectit.server.instrumentation.config.applier.IInstrumentationApplier;
import rocks.inspectit.shared.all.instrumentation.classcache.AnnotationType;
import rocks.inspectit.shared.all.instrumentation.classcache.ClassType;
import rocks.inspectit.shared.all.instrumentation.classcache.ImmutableClassType;
import rocks.inspectit.shared.all.instrumentation.classcache.InterfaceType;
import rocks.inspectit.shared.all.instrumentation.classcache.MethodType;
import rocks.inspectit.shared.all.instrumentation.classcache.Type;
import rocks.inspectit.shared.all.instrumentation.config.impl.AgentConfig;
import rocks.inspectit.shared.all.instrumentation.config.impl.InstrumentationDefinition;
import rocks.inspectit.shared.all.instrumentation.config.impl.MethodInstrumentationConfig;
import rocks.inspectit.shared.all.testbase.TestBase;
import rocks.inspectit.shared.cs.ci.assignment.AbstractClassSensorAssignment;
@SuppressWarnings({ "all", "unchecked" })
public class ClassCacheInstrumentationTest extends TestBase {
private static final String FQN = "FQN";
@InjectMocks
ClassCacheInstrumentation instrumentation;
@Mock
Logger log;
@Mock
ClassCache classCache;
@Mock
ClassCacheLookup lookup;
@Mock
AgentConfig agentConfiguration;
@Mock
ClassType classType;
@Mock
IInstrumentationApplier instrumentationApplier;
@Mock
ClassCacheSearchNarrower searchNarrower;
@Mock
AbstractClassSensorAssignment<?> assignment;
@BeforeMethod
public void setup() throws Exception {
when(classCache.getLookupService()).thenReturn(lookup);
Answer<Object> callableAnswer = new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
Callable<?> callable = (Callable<?>) invocation.getArguments()[0];
return callable.call();
}
};
doAnswer(callableAnswer).when(classCache).executeWithReadLock(Matchers.<Callable<?>> anyObject());
doAnswer(callableAnswer).when(classCache).executeWithWriteLock(Matchers.<Callable<?>> anyObject());
instrumentation.init(classCache);
}
public static class AddAndGetInstrumentationResult extends ClassCacheInstrumentationTest {
@Test
public void notInitialized() {
when(classType.isInitialized()).thenReturn(false);
InstrumentationDefinition result = instrumentation.addAndGetInstrumentationResult(classType, agentConfiguration, Collections.singleton(instrumentationApplier));
assertThat(result, is(nullValue()));
}
@Test
public void notInstrumented() {
when(classType.isInitialized()).thenReturn(true);
when(instrumentationApplier.addInstrumentationPoints(agentConfiguration, classType)).thenReturn(false);
InstrumentationDefinition result = instrumentation.addAndGetInstrumentationResult(classType, agentConfiguration, Collections.singleton(instrumentationApplier));
assertThat(result, is(nullValue()));
}
@Test
public void instrumented() {
Collection<MethodInstrumentationConfig> configs = mock(Collection.class);
when(classType.isInitialized()).thenReturn(true);
when(classType.getFQN()).thenReturn(FQN);
when(classType.hasInstrumentationPoints()).thenReturn(true);
when(classType.getInstrumentationPoints()).thenReturn(configs);
when(instrumentationApplier.addInstrumentationPoints(agentConfiguration, classType)).thenReturn(true);
InstrumentationDefinition result = instrumentation.addAndGetInstrumentationResult(classType, agentConfiguration, Collections.singleton(instrumentationApplier));
assertThat(result, is(notNullValue()));
assertThat(result.getClassName(), is(FQN));
assertThat(result.getMethodInstrumentationConfigs(), is(configs));
}
}
public static class RemoveInstrumentationPoints extends ClassCacheInstrumentationTest {
@Test
public void removeAll() throws Exception {
MethodType methodType = mock(MethodType.class);
when(classType.isClass()).thenReturn(true);
when(classType.castToClass()).thenReturn(classType);
when(classType.isInitialized()).thenReturn(true);
when(classType.hasInstrumentationPoints()).thenReturn(true);
when(classType.getMethods()).thenReturn(Collections.singleton(methodType));
doReturn(Collections.singleton(classType)).when(lookup).findAll();
instrumentation.removeInstrumentationPoints();
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(methodType, times(1)).setMethodInstrumentationConfig(null);
}
@Test
public void removeNothingWhenEmpty() throws Exception {
doReturn(Collections.emptyList()).when(lookup).findAll();
instrumentation.removeInstrumentationPoints();
// not touching the write lock
verify(classCache, times(0)).executeWithWriteLock(Matchers.<Callable<?>> any());
}
@Test
public void removeNothingForAnnotationTypes() throws Exception {
AnnotationType annotationType = new AnnotationType("");
instrumentation.removeInstrumentationPoints(Collections.singleton(annotationType), Collections.singleton(instrumentationApplier));
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verifyZeroInteractions(instrumentationApplier);
}
@Test
public void removeNothingForInterfaceTypes() throws Exception {
InterfaceType interfaceType = new InterfaceType("");
instrumentation.removeInstrumentationPoints(Collections.singleton(interfaceType), Collections.singleton(instrumentationApplier));
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verifyZeroInteractions(instrumentationApplier);
}
}
public static class AddInstrumentationPoints extends ClassCacheInstrumentationTest {
@Test
public void add() throws Exception {
when(classType.isClass()).thenReturn(true);
when(classType.castToClass()).thenReturn(classType);
when(classType.isInitialized()).thenReturn(true);
when(instrumentationApplier.addInstrumentationPoints(agentConfiguration, classType)).thenReturn(true);
doReturn(Collections.singleton(classType)).when(lookup).findAll();
Collection<? extends ImmutableClassType> result = instrumentation.addInstrumentationPoints(agentConfiguration, Collections.singleton(instrumentationApplier));
// assert result
assertThat((Collection<ClassType>) result, hasItem(classType));
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(instrumentationApplier, times(1)).addInstrumentationPoints(agentConfiguration, classType);
verify(instrumentationApplier, times(1)).getSensorAssignment();
verifyNoMoreInteractions(instrumentationApplier);
}
@Test
public void searchNarrowAdd() throws Exception {
when(classType.isClass()).thenReturn(true);
when(classType.castToClass()).thenReturn(classType);
when(classType.isInitialized()).thenReturn(true);
when(instrumentationApplier.addInstrumentationPoints(agentConfiguration, classType)).thenReturn(true);
doReturn(assignment).when(instrumentationApplier).getSensorAssignment();
doReturn(Collections.singleton(classType)).when(searchNarrower).narrowByClassSensorAssignment(classCache, assignment);
Collection<? extends ImmutableClassType> result = instrumentation.addInstrumentationPoints(agentConfiguration, Collections.singleton(instrumentationApplier));
// assert result
assertThat((Collection<ClassType>) result, hasItem(classType));
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(instrumentationApplier, times(1)).addInstrumentationPoints(agentConfiguration, classType);
verify(instrumentationApplier, times(1)).getSensorAssignment();
verifyNoMoreInteractions(instrumentationApplier);
}
@Test
public void addNothingWhenInstrumenterDoesNotAdd() throws Exception {
when(classType.isClass()).thenReturn(true);
when(classType.castToClass()).thenReturn(classType);
when(classType.isInitialized()).thenReturn(true);
when(instrumentationApplier.addInstrumentationPoints(agentConfiguration, classType)).thenReturn(false);
doReturn(Collections.singleton(classType)).when(lookup).findAll();
Collection<? extends ImmutableClassType> result = instrumentation.addInstrumentationPoints(agentConfiguration, Collections.singleton(instrumentationApplier));
// assert result
assertThat((Collection<ClassType>) result, is(empty()));
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(instrumentationApplier, times(1)).addInstrumentationPoints(agentConfiguration, classType);
verify(instrumentationApplier, times(1)).getSensorAssignment();
verifyNoMoreInteractions(instrumentationApplier);
}
@Test
public void searchNarrowAddNothingWhenInstrumenterDoesNotAdd() throws Exception {
when(classType.isClass()).thenReturn(true);
when(classType.castToClass()).thenReturn(classType);
when(classType.isInitialized()).thenReturn(true);
when(instrumentationApplier.addInstrumentationPoints(agentConfiguration, classType)).thenReturn(false);
doReturn(assignment).when(instrumentationApplier).getSensorAssignment();
doReturn(Collections.singleton(classType)).when(searchNarrower).narrowByClassSensorAssignment(classCache, assignment);
Collection<? extends ImmutableClassType> result = instrumentation.addInstrumentationPoints(agentConfiguration, Collections.singleton(instrumentationApplier));
// assert result
assertThat((Collection<ClassType>) result, is(empty()));
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(instrumentationApplier, times(1)).addInstrumentationPoints(agentConfiguration, classType);
verify(instrumentationApplier, times(1)).getSensorAssignment();
verifyNoMoreInteractions(instrumentationApplier);
}
@Test
public void addNothingForNonInitializedType() throws Exception {
when(classType.isInitialized()).thenReturn(false);
doReturn(Collections.singleton(classType)).when(lookup).findAll();
Collection<? extends ImmutableClassType> result = instrumentation.addInstrumentationPoints(agentConfiguration, Collections.singleton(instrumentationApplier));
// assert result
assertThat(result, is(empty()));
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(instrumentationApplier, times(1)).getSensorAssignment();
verifyNoMoreInteractions(instrumentationApplier);
}
@Test
public void searchNarrowAddNothingForNonInitializedType() throws Exception {
when(classType.isInitialized()).thenReturn(false);
doReturn(assignment).when(instrumentationApplier).getSensorAssignment();
doReturn(Collections.singleton(classType)).when(searchNarrower).narrowByClassSensorAssignment(classCache, assignment);
Collection<? extends ImmutableClassType> result = instrumentation.addInstrumentationPoints(agentConfiguration, Collections.singleton(instrumentationApplier));
// assert result
assertThat(result, is(empty()));
// must be write lock
verify(classCache, times(1)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(instrumentationApplier, times(1)).getSensorAssignment();
verifyNoMoreInteractions(instrumentationApplier);
}
@Test
public void addNothingWhenEmpty() throws Exception {
doReturn(Collections.emptyList()).when(lookup).findAll();
Collection<? extends ImmutableClassType> result = instrumentation.addInstrumentationPoints(agentConfiguration, Collections.singleton(instrumentationApplier));
// assert result
assertThat(result, is(empty()));
// not touching the write lock
verify(classCache, times(0)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(instrumentationApplier, times(1)).getSensorAssignment();
verifyNoMoreInteractions(instrumentationApplier);
}
@Test
public void searchNarrowAddNothingWhenEmpty() throws Exception {
doReturn(assignment).when(instrumentationApplier).getSensorAssignment();
doReturn(Collections.emptyList()).when(searchNarrower).narrowByClassSensorAssignment(classCache, assignment);
Collection<? extends ImmutableClassType> result = instrumentation.addInstrumentationPoints(agentConfiguration, Collections.singleton(instrumentationApplier));
// assert result
assertThat(result, is(empty()));
// not touching the write lock
verify(classCache, times(0)).executeWithWriteLock(Matchers.<Callable<?>> any());
verify(instrumentationApplier, times(1)).getSensorAssignment();
verifyNoMoreInteractions(instrumentationApplier);
}
@Test
public void addNothingForNonClassTypes() throws Exception {
AnnotationType annotationType = new AnnotationType("");
|
[
"\t\t\tInterfaceType interfaceType = new InterfaceType(\"\");"
] | 686
|
lcc
|
java
| null |
91bafdb949e83b9df31caa1198f61d205a6b734664bdeee5
|
|
/*******************************************************************************
* Copyright (c) 2012 Secure Software Engineering Group at EC SPRIDE.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v2.1
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*
* Contributors: Christian Fritz, Steven Arzt, Siegfried Rasthofer, Eric
* Bodden, and others.
******************************************************************************/
package soot.jimple.infoflow.data;
import heros.solver.LinkedNode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import soot.NullType;
import soot.SootMethod;
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.jimple.Stmt;
import soot.jimple.infoflow.InfoflowConfiguration;
import soot.jimple.infoflow.collect.AtomicBitSet;
import soot.jimple.infoflow.collect.ConcurrentHashSet;
import soot.jimple.infoflow.data.AccessPath.ArrayTaintType;
import soot.jimple.infoflow.solver.cfg.IInfoflowCFG.UnitContainer;
import soot.jimple.infoflow.solver.fastSolver.FastSolverLinkedNode;
import soot.jimple.internal.JimpleLocal;
import com.google.common.collect.Sets;
/**
* The abstraction class contains all information that is necessary to track the taint.
*
* @author Steven Arzt
* @author Christian Fritz
*/
public class Abstraction implements Cloneable, FastSolverLinkedNode<Abstraction, Unit>,
LinkedNode<Abstraction> {
private static boolean flowSensitiveAliasing = true;
/**
* the access path contains the currently tainted variable or field
*/
private AccessPath accessPath;
private Abstraction predecessor = null;
private Set<Abstraction> neighbors = null;
private Stmt currentStmt = null;
private Stmt correspondingCallSite = null;
private SourceContext sourceContext = null;
// only used in path generation
private Set<SourceContextAndPath> pathCache = null;
/**
* Unit/Stmt which activates the taint when the abstraction passes it
*/
private Unit activationUnit = null;
/**
* taint is thrown by an exception (is set to false when it reaches the catch-Stmt)
*/
private boolean exceptionThrown = false;
private int hashCode = 0;
/**
* The postdominators we need to pass in order to leave the current conditional
* branch. Do not use the synchronized Stack class here to avoid deadlocks.
*/
private List<UnitContainer> postdominators = null;
private boolean isImplicit = false;
/**
* Only valid for inactive abstractions. Specifies whether an access paths
* has been cut during alias analysis.
*/
private boolean dependsOnCutAP = false;
private AtomicBitSet pathFlags = null;
public Abstraction(AccessPath sourceVal,
Stmt sourceStmt,
Object userData,
boolean exceptionThrown,
boolean isImplicit){
this(sourceVal,
new SourceContext(sourceVal, sourceStmt, userData),
exceptionThrown, isImplicit);
}
protected Abstraction(AccessPath apToTaint,
SourceContext sourceContext,
boolean exceptionThrown,
boolean isImplicit){
this.sourceContext = sourceContext;
this.accessPath = apToTaint;
this.activationUnit = null;
this.exceptionThrown = exceptionThrown;
this.neighbors = null;
this.isImplicit = isImplicit;
this.currentStmt = sourceContext == null ? null : sourceContext.getStmt();
}
/**
* Creates an abstraction as a copy of an existing abstraction,
* only exchanging the access path. -> only used by AbstractionWithPath
* @param p The access path for the new abstraction
* @param original The original abstraction to copy
*/
protected Abstraction(AccessPath p, Abstraction original){
if (original == null) {
sourceContext = null;
exceptionThrown = false;
activationUnit = null;
isImplicit = false;
}
else {
sourceContext = original.sourceContext;
exceptionThrown = original.exceptionThrown;
activationUnit = original.activationUnit;
assert activationUnit == null || flowSensitiveAliasing;
postdominators = original.postdominators == null ? null
: new ArrayList<UnitContainer>(original.postdominators);
dependsOnCutAP = original.dependsOnCutAP;
isImplicit = original.isImplicit;
}
accessPath = p;
neighbors = null;
currentStmt = null;
}
public final Abstraction deriveInactiveAbstraction(Unit activationUnit){
if (!flowSensitiveAliasing) {
assert this.isAbstractionActive();
return this;
}
// If this abstraction is already inactive, we keep it
if (!this.isAbstractionActive())
return this;
Abstraction a = deriveNewAbstractionMutable(accessPath, null);
if (a == null)
return null;
a.postdominators = null;
a.activationUnit = activationUnit;
a.dependsOnCutAP |= a.getAccessPath().isCutOffApproximation();
return a;
}
public Abstraction deriveNewAbstraction(AccessPath p, Stmt currentStmt){
return deriveNewAbstraction(p, currentStmt, isImplicit);
}
public Abstraction deriveNewAbstraction(AccessPath p, Stmt currentStmt,
boolean isImplicit){
// If the new abstraction looks exactly like the current one, there is
// no need to create a new object
if (this.accessPath.equals(p) && this.currentStmt == currentStmt
&& this.isImplicit == isImplicit)
return this;
Abstraction abs = deriveNewAbstractionMutable(p, currentStmt);
if (abs == null)
return null;
abs.isImplicit = isImplicit;
return abs;
}
private Abstraction deriveNewAbstractionMutable(AccessPath p, Stmt currentStmt) {
// An abstraction needs an access path
if (p == null)
return null;
if (this.accessPath.equals(p) && this.currentStmt == currentStmt) {
Abstraction abs = clone();
abs.currentStmt = currentStmt;
return abs;
}
Abstraction abs = new Abstraction(p, this);
abs.predecessor = this;
abs.currentStmt = currentStmt;
if (!abs.getAccessPath().isEmpty())
abs.postdominators = null;
if (!abs.isAbstractionActive())
abs.dependsOnCutAP = abs.dependsOnCutAP || p.isCutOffApproximation();
abs.sourceContext = null;
return abs;
}
public final Abstraction deriveNewAbstraction(Value taint, boolean cutFirstField, Stmt currentStmt,
Type baseType) {
return deriveNewAbstraction(taint, cutFirstField, currentStmt, baseType,
getAccessPath().getArrayTaintType());
}
public final Abstraction deriveNewAbstraction(Value taint, boolean cutFirstField, Stmt currentStmt,
Type baseType, ArrayTaintType arrayTaintType) {
assert !this.getAccessPath().isEmpty();
AccessPath newAP = accessPath.copyWithNewValue(taint, baseType, cutFirstField, true,
arrayTaintType);
if (this.getAccessPath().equals(newAP) && this.currentStmt == currentStmt)
return this;
return deriveNewAbstractionMutable(newAP, currentStmt);
}
/**
* Derives a new abstraction that models the current local being thrown as
* an exception
* @param throwStmt The statement at which the exception was thrown
* @return The newly derived abstraction
*/
public final Abstraction deriveNewAbstractionOnThrow(Stmt throwStmt){
assert !this.exceptionThrown;
Abstraction abs = clone();
abs.currentStmt = throwStmt;
abs.sourceContext = null;
abs.exceptionThrown = true;
return abs;
}
/**
* Derives a new abstraction that models the current local being caught as
* an exception
* @param taint The value in which the tainted exception is stored
* @return The newly derived abstraction
*/
public final Abstraction deriveNewAbstractionOnCatch(Value taint){
assert this.exceptionThrown;
Abstraction abs = deriveNewAbstractionMutable(
AccessPathFactory.v().createAccessPath(taint, true), null);
if (abs == null)
return null;
abs.exceptionThrown = false;
return abs;
}
/**
* Gets the path of statements from the source to the current statement
* with which this abstraction is associated. If this path is ambiguous,
* a single path is selected randomly.
* @return The path from the source to the current statement
*/
public Set<SourceContextAndPath> getPaths() {
return pathCache == null ? null : Collections.unmodifiableSet(pathCache);
}
public Set<SourceContextAndPath> getOrMakePathCache() {
// We're optimistic about having a path cache. If we definitely have one,
// we return it. Otherwise, we need to lock and create one.
if (this.pathCache == null)
synchronized (this) {
if (this.pathCache == null)
this.pathCache = new ConcurrentHashSet<SourceContextAndPath>();
}
return Collections.unmodifiableSet(pathCache);
}
public boolean addPathElement(SourceContextAndPath scap) {
if (this.pathCache == null) {
synchronized (this) {
if (this.pathCache == null) {
this.pathCache = new ConcurrentHashSet<SourceContextAndPath>();
}
}
}
return this.pathCache.add(scap);
}
public void clearPathCache() {
this.pathCache = null;
}
public boolean isAbstractionActive() {
return activationUnit == null;
}
public boolean isImplicit() {
return isImplicit;
}
@Override
public String toString(){
return (isAbstractionActive()?"":"_")+accessPath.toString() + " | "+(activationUnit==null?"":activationUnit.toString()) + ">>";
}
public AccessPath getAccessPath(){
return accessPath;
}
public Unit getActivationUnit(){
return this.activationUnit;
}
public Abstraction getActiveCopy(){
assert !this.isAbstractionActive();
Abstraction a = clone();
a.sourceContext = null;
a.activationUnit = null;
return a;
}
/**
* Gets whether this value has been thrown as an exception
* @return True if this value has been thrown as an exception, otherwise
* false
*/
public boolean getExceptionThrown() {
return this.exceptionThrown;
}
public final Abstraction deriveConditionalAbstractionEnter(UnitContainer postdom,
Stmt conditionalUnit) {
assert this.isAbstractionActive();
if (postdominators != null && postdominators.contains(postdom))
return this;
Abstraction abs = deriveNewAbstractionMutable
(AccessPath.getEmptyAccessPath(), conditionalUnit);
if (abs == null)
return null;
if (abs.postdominators == null)
abs.postdominators = Collections.singletonList(postdom);
else
abs.postdominators.add(0, postdom);
return abs;
}
public final Abstraction deriveConditionalAbstractionCall(Unit conditionalCallSite) {
assert this.isAbstractionActive();
assert conditionalCallSite != null;
Abstraction abs = deriveNewAbstractionMutable
(AccessPath.getEmptyAccessPath(), (Stmt) conditionalCallSite);
if (abs == null)
return null;
// Postdominators are only kept intraprocedurally in order to not
// mess up the summary functions with caller-side information
abs.postdominators = null;
return abs;
}
public final Abstraction dropTopPostdominator() {
if (postdominators == null || postdominators.isEmpty())
return this;
Abstraction abs = clone();
abs.sourceContext = null;
abs.postdominators.remove(0);
return abs;
}
public UnitContainer getTopPostdominator() {
if (postdominators == null || postdominators.isEmpty())
return null;
return this.postdominators.get(0);
}
public boolean isTopPostdominator(Unit u) {
UnitContainer uc = getTopPostdominator();
if (uc == null)
return false;
return uc.getUnit() == u;
}
public boolean isTopPostdominator(SootMethod sm) {
UnitContainer uc = getTopPostdominator();
if (uc == null)
return false;
return uc.getMethod() == sm;
}
@Override
public Abstraction clone() {
Abstraction abs = new Abstraction(accessPath, this);
abs.predecessor = this;
abs.neighbors = null;
abs.currentStmt = null;
abs.correspondingCallSite = null;
assert abs.equals(this);
return abs;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null || getClass() != obj.getClass())
return false;
Abstraction other = (Abstraction) obj;
// If we have already computed hash codes, we can use them for
// comparison
if (this.hashCode != 0
&& other.hashCode != 0
&& this.hashCode != other.hashCode)
return false;
if (accessPath == null) {
if (other.accessPath != null)
return false;
} else if (!accessPath.equals(other.accessPath))
return false;
return localEquals(other);
}
/**
* Checks whether this object locally equals the given object, i.e. the both
* are equal modulo the access path
* @param other The object to compare this object with
* @return True if this object is locally equal to the given one, otherwise
* false
*/
private boolean localEquals(Abstraction other) {
// deliberately ignore prevAbs
if (sourceContext == null) {
if (other.sourceContext != null)
return false;
} else if (!sourceContext.equals(other.sourceContext))
return false;
if (activationUnit == null) {
if (other.activationUnit != null)
return false;
} else if (!activationUnit.equals(other.activationUnit))
return false;
if (this.exceptionThrown != other.exceptionThrown)
return false;
|
[
"\t\tif (postdominators == null) {"
] | 1,449
|
lcc
|
java
| null |
2ea1e8b1be13cbbb6a96058b7c5db07ae3e7e9bfca12207f
|
|
package edu.stanford.nlp.parser.lexparser;
import java.util.regex.Matcher;
/** Does iterative deepening search inside the CKY algorithm for faster
* parsing. This is still guaranteed to find the optimal parse. This
* iterative deepening is only implemented in insideScores().
* Implements the algorithm described in Tsuruoka and Tsujii (2004)
* IJCNLP.
*
* @author Christopher Manning
*/
public class IterativeCKYPCFGParser extends ExhaustivePCFGParser {
private static final float STEP_SIZE = -11.0F; // value suggested in their paper
public IterativeCKYPCFGParser(BinaryGrammar bg, UnaryGrammar ug, Lexicon lex, Options op) {
super(bg, ug, lex, op);
}
/** Fills in the iScore array of each category over each span
* of length 2 or more.
*/
@Override
void doInsideScores() {
float threshold = STEP_SIZE;
while ( ! doInsideScoresHelper(threshold)) {
threshold += STEP_SIZE;
}
}
/** Fills in the iScore array of each category over each spanof length 2
* or more, providing
* a state's probability is greater than a threshold.
*
* @param threshold The threshold up to which to parse as a log
* probability (i.e., a non-positive number)
* @return true iff a parse was found with this threshold or else
* it has been determined that no parse exists.
*/
private boolean doInsideScoresHelper(float threshold) {
boolean prunedSomething = false;
for (int diff = 2; diff <= length; diff++) {
// usually stop one short because boundary symbol only combines
// with whole sentence span
for (int start = 0; start < ((diff == length) ? 1: length - diff); start++) {
if (spillGuts) {
tick("Binaries for span " + diff + "...");
}
int end = start + diff;
if (Test.constraints != null) {
boolean skip = false;
for (Test.Constraint c : Test.constraints) {
if ((start > c.start && start < c.end && end > c.end) || (end > c.start && end < c.end && start < c.start)) {
skip = true;
break;
}
}
if (skip) {
continue;
}
}
for (int leftState = 0; leftState < numStates; leftState++) {
int narrowR = narrowRExtent[start][leftState];
boolean iPossibleL = (narrowR < end); // can this left constituent leave space for a right constituent?
if (!iPossibleL) {
continue;
}
BinaryRule[] leftRules = bg.splitRulesWithLC(leftState);
// if (spillGuts) System.out.println("Found " + leftRules.length + " left rules for state " + stateNumberer.object(leftState));
for (int i = 0; i < leftRules.length; i++) {
// if (spillGuts) System.out.println("Considering rule for " + start + " to " + end + ": " + leftRules[i]);
BinaryRule r = leftRules[i];
int narrowL = narrowLExtent[end][r.rightChild];
boolean iPossibleR = (narrowL >= narrowR); // can this right constituent fit next to the left constituent?
if (!iPossibleR) {
continue;
}
int min1 = narrowR;
int min2 = wideLExtent[end][r.rightChild];
int min = (min1 > min2 ? min1 : min2);
if (min > narrowL) { // can this right constituent stretch far enough to reach the left constituent?
continue;
}
int max1 = wideRExtent[start][leftState];
int max2 = narrowL;
int max = (max1 < max2 ? max1 : max2);
if (min > max) { // can this left constituent stretch far enough to reach the right constituent?
continue;
}
float pS = r.score;
int parentState = r.parent;
float oldIScore = iScore[start][end][parentState];
float bestIScore = oldIScore;
boolean foundBetter; // always set below for this rule
//System.out.println("Min "+min+" max "+max+" start "+start+" end "+end);
if (!Test.lengthNormalization) {
// find the split that can use this rule to make the max score
for (int split = min; split <= max; split++) {
if (Test.constraints != null) {
boolean skip = false;
for (Test.Constraint c : Test.constraints) {
if (((start < c.start && end >= c.end) || (start <= c.start && end > c.end)) && split > c.start && split < c.end) {
skip = true;
break;
}
if ((start == c.start && split == c.end)) {
String tag = (String) stateNumberer.object(leftState);
Matcher m = c.state.matcher(tag);
if (!m.matches()) {
skip = true;
break;
}
}
if ((split == c.start && end == c.end)) {
String tag = (String) stateNumberer.object(r.rightChild);
Matcher m = c.state.matcher(tag);
if (!m.matches()) {
skip = true;
break;
}
}
}
if (skip) {
continue;
}
}
float lS = iScore[start][split][leftState];
if (lS == Float.NEGATIVE_INFINITY) {
continue;
}
float rS = iScore[split][end][r.rightChild];
if (rS == Float.NEGATIVE_INFINITY) {
continue;
}
float tot = pS + lS + rS;
if (tot > bestIScore) {
bestIScore = tot;
}
} // for split point
foundBetter = bestIScore > oldIScore;
} else {
// find split that uses this rule to make the max *length normalized* score
int bestWordsInSpan = wordsInSpan[start][end][parentState];
float oldNormIScore = oldIScore / bestWordsInSpan;
float bestNormIScore = oldNormIScore;
for (int split = min; split <= max; split++) {
float lS = iScore[start][split][leftState];
if (lS == Float.NEGATIVE_INFINITY) {
continue;
}
float rS = iScore[split][end][r.rightChild];
if (rS == Float.NEGATIVE_INFINITY) {
continue;
}
float tot = pS + lS + rS;
int newWordsInSpan = wordsInSpan[start][split][leftState] + wordsInSpan[split][end][r.rightChild];
float normTot = tot / newWordsInSpan;
if (normTot > bestNormIScore) {
bestIScore = tot;
bestNormIScore = normTot;
bestWordsInSpan = newWordsInSpan;
}
} // for split point
foundBetter = bestNormIScore > oldNormIScore;
if (foundBetter && bestIScore > threshold) {
wordsInSpan[start][end][parentState] = bestWordsInSpan;
}
} // fi Test.lengthNormalization
if (foundBetter) {
if (bestIScore > threshold) {
// this way of making "parentState" is better than previous
// and sufficiently good to be stored on this iteration
iScore[start][end][parentState] = bestIScore;
// if (spillGuts) System.out.println("Could build " + stateNumberer.object(parentState) + " from " + start + " to " + end);
if (oldIScore == Float.NEGATIVE_INFINITY) {
if (start > narrowLExtent[end][parentState]) {
narrowLExtent[end][parentState] = start;
wideLExtent[end][parentState] = start;
} else {
if (start < wideLExtent[end][parentState]) {
wideLExtent[end][parentState] = start;
}
}
if (end < narrowRExtent[start][parentState]) {
narrowRExtent[start][parentState] = end;
wideRExtent[start][parentState] = end;
} else {
if (end > wideRExtent[start][parentState]) {
wideRExtent[start][parentState] = end;
}
}
}
} else {
prunedSomething = true;
}
} // end if foundBetter
} // end for leftRules
} // end for leftState
// do right restricted rules
for (int rightState = 0; rightState < numStates; rightState++) {
int narrowL = narrowLExtent[end][rightState];
boolean iPossibleR = (narrowL > start);
if (!iPossibleR) {
continue;
}
BinaryRule[] rightRules = bg.splitRulesWithRC(rightState);
// if (spillGuts) System.out.println("Found " + rightRules.length + " right rules for state " + stateNumberer.object(rightState));
for (int i = 0; i < rightRules.length; i++) {
// if (spillGuts) System.out.println("Considering rule for " + start + " to " + end + ": " + rightRules[i]);
BinaryRule r = rightRules[i];
int narrowR = narrowRExtent[start][r.leftChild];
boolean iPossibleL = (narrowR <= narrowL);
if (!iPossibleL) {
continue;
}
int min1 = narrowR;
|
[
" int min2 = wideLExtent[end][rightState];"
] | 1,079
|
lcc
|
java
| null |
5a8b582269e533bdd34babfe47a607fb7faa5ba65cee7bc8
|
|
# Copyright (C) 2013-2016 2ndQuadrant Italia Srl
#
# This file is part of Barman.
#
# Barman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Barman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Barman. If not, see <http://www.gnu.org/licenses/>.
import errno
import os
import select
import sys
from datetime import datetime
from logging import DEBUG, INFO, WARNING
from subprocess import PIPE
import dateutil.tz
import mock
import pytest
from barman import command_wrappers
from barman.command_wrappers import CommandFailedException, StreamLineProcessor
try:
from StringIO import StringIO
except ImportError: # pragma: no cover
from io import StringIO
def _mock_pipe(popen, pipe_processor_loop, ret=0, out='', err=''):
pipe = popen.return_value
pipe.communicate.return_value = (out.encode('utf-8'), err.encode('utf-8'))
pipe.returncode = ret
# noinspection PyProtectedMember
def ppl(processors):
for processor in processors:
if processor.fileno() == pipe.stdout.fileno.return_value:
for line in out.split('\n'):
processor._handler(line)
if processor.fileno() == pipe.stderr.fileno.return_value:
for line in err.split('\n'):
processor._handler(line)
pipe_processor_loop.side_effect = ppl
return pipe
# noinspection PyMethodMayBeStatic
@mock.patch('barman.command_wrappers.Command.pipe_processor_loop')
@mock.patch('barman.command_wrappers.subprocess.Popen')
class TestCommand(object):
def test_simple_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command)
result = cmd()
popen.assert_called_with(
[command], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_multiline_output(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'line1\nline2\n'
err = 'err1\nerr2\n'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command)
result = cmd()
popen.assert_called_with(
[command], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_failed_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 1
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command)
result = cmd()
popen.assert_called_with(
[command], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_check_failed_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 1
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command, check=True)
with pytest.raises(command_wrappers.CommandFailedException) as excinfo:
cmd()
assert excinfo.value.args[0]['ret'] == ret
assert excinfo.value.args[0]['out'] == out
assert excinfo.value.args[0]['err'] == err
popen.assert_called_with(
[command], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_shell_invocation(self, popen, pipe_processor_loop):
command = 'test -n'
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command, shell=True)
result = cmd('shell test')
popen.assert_called_with(
"test -n 'shell test'", shell=True, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_declaration_args_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command, args=['one', 'two'])
result = cmd()
popen.assert_called_with(
[command, 'one', 'two'], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_call_args_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command)
result = cmd('one', 'two')
popen.assert_called_with(
[command, 'one', 'two'], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_both_args_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command, args=['a', 'b'])
result = cmd('one', 'two')
popen.assert_called_with(
[command, 'a', 'b', 'one', 'two'], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_env_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
with mock.patch('os.environ', new={'TEST0': 'VAL0'}):
cmd = command_wrappers.Command(command,
env_append={'TEST1': 'VAL1',
'TEST2': 'VAL2'})
result = cmd()
popen.assert_called_with(
[command], shell=False,
env={'TEST0': 'VAL0', 'TEST1': 'VAL1', 'TEST2': 'VAL2'},
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_path_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
with mock.patch('os.environ', new={'TEST0': 'VAL0'}):
cmd = command_wrappers.Command(command,
path='/path/one:/path/two')
result = cmd()
popen.assert_called_with(
[command], shell=False,
env={'TEST0': 'VAL0', 'PATH': '/path/one:/path/two'},
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_env_path_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
with mock.patch('os.environ', new={'TEST0': 'VAL0'}):
cmd = command_wrappers.Command(command,
path='/path/one:/path/two',
env_append={'TEST1': 'VAL1',
'TEST2': 'VAL2'})
result = cmd()
popen.assert_called_with(
[command], shell=False,
env={'TEST0': 'VAL0', 'TEST1': 'VAL1', 'TEST2': 'VAL2',
'PATH': '/path/one:/path/two'},
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_debug_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 1
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
stdout = StringIO()
stderr = StringIO()
with mock.patch.multiple('sys', stdout=stdout, stderr=stderr):
cmd = command_wrappers.Command(command, debug=True)
result = cmd()
popen.assert_called_with(
[command], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
assert stdout.getvalue() == ""
assert stderr.getvalue() == "Command: ['command']\n" \
"Command return code: 1\n"
def test_getoutput_invocation(self, popen, pipe_processor_loop):
command = 'command'
ret = 0
out = 'out'
err = 'err'
stdin = 'in'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
with mock.patch('os.environ', new={'TEST0': 'VAL0'}):
cmd = command_wrappers.Command(command,
env_append={'TEST1': 'VAL1',
'TEST2': 'VAL2'})
result = cmd.getoutput(stdin=stdin)
popen.assert_called_with(
[command], shell=False,
env={'TEST0': 'VAL0', 'TEST1': 'VAL1', 'TEST2': 'VAL2'},
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
pipe.stdin.write.assert_called_with(stdin)
pipe.stdin.close.assert_called_once_with()
assert result == (out, err)
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_execute_invocation(self, popen, pipe_processor_loop,
caplog):
command = 'command'
ret = 0
out = 'out'
err = 'err'
stdin = 'in'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
with mock.patch('os.environ', new={'TEST0': 'VAL0'}):
cmd = command_wrappers.Command(command,
env_append={'TEST1': 'VAL1',
'TEST2': 'VAL2'})
result = cmd.execute(stdin=stdin)
popen.assert_called_with(
[command], shell=False,
env={'TEST0': 'VAL0', 'TEST1': 'VAL1', 'TEST2': 'VAL2'},
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
pipe.stdin.write.assert_called_with(stdin)
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out is None
assert cmd.err is None
assert ('Command', INFO, out) in caplog.record_tuples
assert ('Command', WARNING, err) in caplog.record_tuples
def test_execute_invocation_multiline(self, popen, pipe_processor_loop,
caplog):
command = 'command'
ret = 0
out = 'line1\nline2\n'
err = 'err1\nerr2' # no final newline here
stdin = 'in'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
with mock.patch('os.environ', new={'TEST0': 'VAL0'}):
cmd = command_wrappers.Command(command,
env_append={'TEST1': 'VAL1',
'TEST2': 'VAL2'})
result = cmd.execute(stdin=stdin)
popen.assert_called_with(
[command], shell=False,
env={'TEST0': 'VAL0', 'TEST1': 'VAL1', 'TEST2': 'VAL2'},
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
pipe.stdin.write.assert_called_with(stdin)
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out is None
assert cmd.err is None
for line in out.splitlines():
assert ('Command', INFO, line) in caplog.record_tuples
assert ('Command', INFO, '') not in caplog.record_tuples
assert ('Command', INFO, None) not in caplog.record_tuples
for line in err.splitlines():
assert ('Command', WARNING, line) in caplog.record_tuples
assert ('Command', WARNING, '') not in caplog.record_tuples
assert ('Command', WARNING, None) not in caplog.record_tuples
def test_execute_check_failed_invocation(self, popen,
pipe_processor_loop,
caplog):
command = 'command'
ret = 1
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Command(command, check=True)
with pytest.raises(command_wrappers.CommandFailedException) as excinfo:
cmd.execute()
assert excinfo.value.args[0]['ret'] == ret
assert excinfo.value.args[0]['out'] is None
assert excinfo.value.args[0]['err'] is None
popen.assert_called_with(
[command], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert cmd.ret == ret
assert cmd.out is None
assert cmd.err is None
assert ('Command', INFO, out) in caplog.record_tuples
assert ('Command', WARNING, err) in caplog.record_tuples
def test_handlers_multiline(self, popen, pipe_processor_loop, caplog):
command = 'command'
ret = 0
out = 'line1\nline2\n'
err = 'err1\nerr2' # no final newline here
stdin = 'in'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
out_list = []
err_list = []
with mock.patch('os.environ', new={'TEST0': 'VAL0'}):
cmd = command_wrappers.Command(command,
env_append={'TEST1': 'VAL1',
'TEST2': 'VAL2'},
out_handler=out_list.append,
err_handler=err_list.append)
result = cmd.execute(stdin=stdin)
popen.assert_called_with(
[command], shell=False,
env={'TEST0': 'VAL0', 'TEST1': 'VAL1', 'TEST2': 'VAL2'},
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
pipe.stdin.write.assert_called_with(stdin)
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out is None
assert cmd.err is None
assert '\n'.join(out_list) == out
assert '\n'.join(err_list) == err
def test_execute_handlers(self, popen, pipe_processor_loop, caplog):
command = 'command'
ret = 0
out = 'out'
err = 'err'
stdin = 'in'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
with mock.patch('os.environ', new={'TEST0': 'VAL0'}):
cmd = command_wrappers.Command(command,
env_append={'TEST1': 'VAL1',
'TEST2': 'VAL2'})
result = cmd.execute(
stdin=stdin,
out_handler=cmd.make_logging_handler(INFO, 'out: '),
err_handler=cmd.make_logging_handler(WARNING, 'err: '),
)
popen.assert_called_with(
[command], shell=False,
env={'TEST0': 'VAL0', 'TEST1': 'VAL1', 'TEST2': 'VAL2'},
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
pipe.stdin.write.assert_called_with(stdin)
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out is None
assert cmd.err is None
assert ('Command', INFO, 'out: ' + out) in caplog.record_tuples
assert ('Command', WARNING, 'err: ' + err) in caplog.record_tuples
# noinspection PyMethodMayBeStatic
class TestCommandPipeProcessorLoop(object):
@mock.patch('barman.command_wrappers.select.select')
@mock.patch('barman.command_wrappers.os.read')
def test_ppl(self, read_mock, select_mock):
# Simulate the two files
stdout = mock.Mock(name='pipe.stdout')
stdout.fileno.return_value = 65
stderr = mock.Mock(name='pipe.stderr')
stderr.fileno.return_value = 66
# Recipients for results
out_list = []
err_list = []
# StreamLineProcessors
out_proc = StreamLineProcessor(stdout, out_list.append)
err_proc = StreamLineProcessor(stderr, err_list.append)
# The select call always returns all the streams
select_mock.side_effect = [
[[out_proc, err_proc], [], []],
select.error(errno.EINTR), # Test interrupted system call
[[out_proc, err_proc], [], []],
[[out_proc, err_proc], [], []],
]
# The read calls return out and err interleaved
# Lines are split in various ways, to test all the code paths
read_mock.side_effect = ['line1\nl'.encode('utf-8'),
'err'.encode('utf-8'),
'ine2'.encode('utf-8'),
'1\nerr2\n'.encode('utf-8'),
'', '',
Exception] # Make sure it terminates
command_wrappers.Command.pipe_processor_loop([out_proc, err_proc])
# Check the calls order and the output
assert read_mock.mock_calls == [
mock.call(65, 4096),
mock.call(66, 4096),
mock.call(65, 4096),
mock.call(66, 4096),
mock.call(65, 4096),
mock.call(66, 4096),
]
assert out_list == ['line1', 'line2']
assert err_list == ['err1', 'err2', '']
@mock.patch('barman.command_wrappers.select.select')
def test_ppl_select_failure(self, select_mock):
# Test if select errors are passed through
select_mock.side_effect = select.error('not good')
with pytest.raises(select.error):
command_wrappers.Command.pipe_processor_loop([None])
# noinspection PyMethodMayBeStatic
@mock.patch('barman.command_wrappers.Command.pipe_processor_loop')
@mock.patch('barman.command_wrappers.subprocess.Popen')
class TestRsync(object):
def test_simple_invocation(self, popen, pipe_processor_loop):
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Rsync()
result = cmd('src', 'dst')
popen.assert_called_with(
['rsync', 'src', 'dst'], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_args_invocation(self, popen, pipe_processor_loop):
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Rsync(args=['a', 'b'])
result = cmd('src', 'dst')
popen.assert_called_with(
['rsync', 'a', 'b', 'src', 'dst'], shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
@mock.patch("barman.utils.which")
def test_custom_ssh_invocation(self, mock_which,
popen, pipe_processor_loop):
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
mock_which.return_value = True
cmd = command_wrappers.Rsync('/custom/rsync', ssh='/custom/ssh',
ssh_options=['-c', 'arcfour'])
result = cmd('src', 'dst')
mock_which.assert_called_with('/custom/rsync', None)
popen.assert_called_with(
['/custom/rsync', '-e', "/custom/ssh '-c' 'arcfour'",
'src', 'dst'],
shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_rsync_build_failure(self, popen, pipe_processor_loop):
"""
Simple test that checks if a CommandFailedException is raised
when Rsync object is build with an invalid path or rsync
is not in system path
"""
# Pass an invalid path to Rsync class constructor.
# Expect a CommandFailedException
with pytest.raises(command_wrappers.CommandFailedException):
command_wrappers.Rsync('/invalid/path/rsync')
# Force the which method to return false, simulating rsync command not
# present in system PATH. Expect a CommandFailedExceptiomn
with mock.patch("barman.utils.which") as mock_which:
mock_which.return_value = False
with pytest.raises(command_wrappers.CommandFailedException):
command_wrappers.Rsync(ssh_options=['-c', 'arcfour'])
def test_protect_ssh_invocation(self, popen, pipe_processor_loop):
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
with mock.patch('os.environ.copy') as which_mock:
which_mock.return_value = {}
cmd = command_wrappers.Rsync(exclude_and_protect=['foo', 'bar'])
result = cmd('src', 'dst')
popen.assert_called_with(
['rsync',
'--exclude=foo', '--filter=P_foo',
'--exclude=bar', '--filter=P_bar',
'src', 'dst'],
shell=False, env=mock.ANY,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_bwlimit_ssh_invocation(self, popen, pipe_processor_loop):
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Rsync(bwlimit=101)
result = cmd('src', 'dst')
popen.assert_called_with(
['rsync', '--bwlimit=101', 'src', 'dst'],
shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_from_file_list_ssh_invocation(self, popen, pipe_processor_loop):
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.Rsync()
result = cmd.from_file_list(['a', 'b', 'c'], 'src', 'dst')
popen.assert_called_with(
['rsync', '--files-from=-', 'src', 'dst'],
shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
pipe.stdin.write.assert_called_with('a\nb\nc'.encode('UTF-8'))
pipe.stdin.close.assert_called_once_with()
assert result == ret
assert cmd.ret == ret
assert cmd.out == out
assert cmd.err == err
def test_invocation_list_file(self, popen, pipe_processor_loop):
"""
Unit test for dateutil package in list_file
This test cover all list_file's code with correct parameters
:param tmpdir: temporary folder
:param popen: mock popen
"""
# variables to be tested
ret = 0
out = 'drwxrwxrwt 69632 2015/02/09 15:01:00 tmp\n' \
'drwxrwxrwt 69612 2015/02/19 15:01:22 tmp2'
err = 'err'
# created mock pipe
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
# created rsync and launched list_files
cmd = command_wrappers.Rsync()
return_values = list(cmd.list_files('some/path'))
# returned list must contain two elements
assert len(return_values) == 2
# assert call
popen.assert_called_with(
['rsync', '--no-human-readable', '--list-only', '-r', 'some/path'],
shell=False, env=None,
stdout=PIPE, stderr=PIPE, stdin=PIPE,
preexec_fn=mock.ANY, close_fds=True
)
# Rsync pipe must be called with no input
assert not pipe.stdin.write.called
pipe.stdin.close.assert_called_once_with()
# assert tmp and tmp2 in test_list
assert return_values[0] == cmd.FileItem(
'drwxrwxrwt',
69632,
datetime(year=2015, month=2, day=9,
hour=15, minute=1, second=0,
tzinfo=dateutil.tz.tzlocal()),
'tmp')
assert return_values[1] == cmd.FileItem(
'drwxrwxrwt',
69612,
datetime(year=2015, month=2, day=19,
hour=15, minute=1, second=22,
tzinfo=dateutil.tz.tzlocal()),
'tmp2')
# noinspection PyMethodMayBeStatic
@mock.patch('barman.command_wrappers.Command.pipe_processor_loop')
@mock.patch('barman.command_wrappers.subprocess.Popen')
class TestRsyncPgdata(object):
def test_simple_invocation(self, popen, pipe_processor_loop):
ret = 0
out = 'out'
err = 'err'
pipe = _mock_pipe(popen, pipe_processor_loop, ret, out, err)
cmd = command_wrappers.RsyncPgData()
result = cmd('src', 'dst')
popen.assert_called_with(
[
|
[
" 'rsync', '-rLKpts', '--delete-excluded', '--inplace',"
] | 2,433
|
lcc
|
python
| null |
8e994637773ecb1aa1a46681e23f58b9b6e946d95aa0e329
|
|
/*
Copyright (C) 2014-2019 de4dot@gmail.com
This file is part of dnSpy
dnSpy is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
dnSpy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with dnSpy. If not, see <http://www.gnu.org/licenses/>.
*/
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.Composition;
using System.Diagnostics;
using System.Linq;
using dnSpy.Contracts.Controls.ToolWindows;
using dnSpy.Contracts.Debugger;
using dnSpy.Contracts.Debugger.Evaluation;
using dnSpy.Contracts.Debugger.Text;
using dnSpy.Contracts.MVVM;
using dnSpy.Contracts.Settings.AppearanceCategory;
using dnSpy.Contracts.Text;
using dnSpy.Contracts.Text.Classification;
using dnSpy.Contracts.ToolWindows.Search;
using dnSpy.Debugger.Properties;
using dnSpy.Debugger.UI;
using dnSpy.Debugger.UI.Wpf;
using Microsoft.VisualStudio.Text.Classification;
namespace dnSpy.Debugger.ToolWindows.Threads {
interface IThreadsVM : IGridViewColumnDescsProvider {
bool IsOpen { get; set; }
bool IsVisible { get; set; }
BulkObservableCollection<ThreadVM> AllItems { get; }
ObservableCollection<ThreadVM> SelectedItems { get; }
void ResetSearchSettings();
string GetSearchHelpText();
IEnumerable<ThreadVM> Sort(IEnumerable<ThreadVM> threads);
}
[Export(typeof(IThreadsVM))]
sealed class ThreadsVM : ViewModelBase, IThreadsVM, ILazyToolWindowVM, IComparer<ThreadVM> {
public BulkObservableCollection<ThreadVM> AllItems { get; }
public ObservableCollection<ThreadVM> SelectedItems { get; }
public GridViewColumnDescs Descs { get; }
public bool IsOpen {
get => lazyToolWindowVMHelper.IsOpen;
set => lazyToolWindowVMHelper.IsOpen = value;
}
public bool IsVisible {
get => lazyToolWindowVMHelper.IsVisible;
set => lazyToolWindowVMHelper.IsVisible = value;
}
IEditValueProvider NameEditValueProvider {
get {
threadContext.UIDispatcher.VerifyAccess();
if (nameEditValueProvider is null)
nameEditValueProvider = editValueProviderService.Create(ContentTypes.ThreadsWindowName, Array.Empty<string>());
return nameEditValueProvider;
}
}
IEditValueProvider? nameEditValueProvider;
public object ProcessCollection => processes;
readonly ObservableCollection<SimpleProcessVM> processes;
public object? SelectedProcess {
get => selectedProcess;
set {
if (selectedProcess != value) {
selectedProcess = (SimpleProcessVM?)value;
OnPropertyChanged(nameof(SelectedProcess));
FilterList_UI(filterText, selectedProcess);
}
}
}
SimpleProcessVM? selectedProcess;
public string FilterText {
get => filterText;
set {
if (filterText == value)
return;
filterText = value;
OnPropertyChanged(nameof(FilterText));
FilterList_UI(filterText, selectedProcess);
}
}
string filterText = string.Empty;
public bool SomethingMatched => !nothingMatched;
public bool NothingMatched {
get => nothingMatched;
set {
if (nothingMatched == value)
return;
nothingMatched = value;
OnPropertyChanged(nameof(NothingMatched));
OnPropertyChanged(nameof(SomethingMatched));
}
}
bool nothingMatched;
sealed class ProcessState {
/// <summary>
/// Set to true when <see cref="DbgProcess.DelayedIsRunningChanged"/> gets raised
/// and cleared when the process is paused.
/// </summary>
public bool IgnoreThreadsChangedEvent { get; set; }
}
readonly Lazy<DbgManager> dbgManager;
readonly Lazy<DbgLanguageService> dbgLanguageService;
readonly ThreadContext threadContext;
readonly ThreadFormatterProvider threadFormatterProvider;
readonly DebuggerSettings debuggerSettings;
readonly ThreadCategoryService threadCategoryService;
readonly EditValueProviderService editValueProviderService;
readonly LazyToolWindowVMHelper lazyToolWindowVMHelper;
readonly List<ThreadVM> realAllItems;
int threadOrder;
[ImportingConstructor]
ThreadsVM(Lazy<DbgManager> dbgManager, Lazy<DbgLanguageService> dbgLanguageService, DebuggerSettings debuggerSettings, UIDispatcher uiDispatcher, ThreadFormatterProvider threadFormatterProvider, IClassificationFormatMapService classificationFormatMapService, ITextBlockContentInfoFactory textBlockContentInfoFactory, ThreadCategoryService threadCategoryService, EditValueProviderService editValueProviderService) {
uiDispatcher.VerifyAccess();
realAllItems = new List<ThreadVM>();
AllItems = new BulkObservableCollection<ThreadVM>();
SelectedItems = new ObservableCollection<ThreadVM>();
processes = new ObservableCollection<SimpleProcessVM>();
this.dbgManager = dbgManager;
this.dbgLanguageService = dbgLanguageService;
this.threadFormatterProvider = threadFormatterProvider;
this.debuggerSettings = debuggerSettings;
lazyToolWindowVMHelper = new DebuggerLazyToolWindowVMHelper(this, uiDispatcher, dbgManager);
this.threadCategoryService = threadCategoryService;
this.editValueProviderService = editValueProviderService;
var classificationFormatMap = classificationFormatMapService.GetClassificationFormatMap(AppearanceCategoryConstants.UIMisc);
threadContext = new ThreadContext(uiDispatcher, classificationFormatMap, textBlockContentInfoFactory, new SearchMatcher(searchColumnDefinitions), threadFormatterProvider.Create()) {
SyntaxHighlight = debuggerSettings.SyntaxHighlight,
UseHexadecimal = debuggerSettings.UseHexadecimal,
DigitSeparators = debuggerSettings.UseDigitSeparators,
FullString = debuggerSettings.FullString,
};
Descs = new GridViewColumnDescs {
Columns = new GridViewColumnDesc[] {
new GridViewColumnDesc(ThreadsWindowColumnIds.Icon, string.Empty),
new GridViewColumnDesc(ThreadsWindowColumnIds.ThreadID, dnSpy_Debugger_Resources.Column_ThreadID),
new GridViewColumnDesc(ThreadsWindowColumnIds.ThreadManagedId, dnSpy_Debugger_Resources.Column_ThreadManagedId),
new GridViewColumnDesc(ThreadsWindowColumnIds.ThreadCategory, dnSpy_Debugger_Resources.Column_ThreadCategory),
new GridViewColumnDesc(ThreadsWindowColumnIds.Name, dnSpy_Debugger_Resources.Column_Name),
new GridViewColumnDesc(ThreadsWindowColumnIds.ThreadLocation, dnSpy_Debugger_Resources.Column_ThreadLocation),
new GridViewColumnDesc(ThreadsWindowColumnIds.ThreadPriority, dnSpy_Debugger_Resources.Column_ThreadPriority),
new GridViewColumnDesc(ThreadsWindowColumnIds.ThreadAffinityMask, dnSpy_Debugger_Resources.Column_ThreadAffinityMask),
new GridViewColumnDesc(ThreadsWindowColumnIds.ThreadSuspendedCount, dnSpy_Debugger_Resources.Column_ThreadSuspendedCount),
new GridViewColumnDesc(ThreadsWindowColumnIds.ProcessName, dnSpy_Debugger_Resources.Column_ProcessName),
new GridViewColumnDesc(ThreadsWindowColumnIds.AppDomain, dnSpy_Debugger_Resources.Column_AppDomain),
new GridViewColumnDesc(ThreadsWindowColumnIds.ThreadState, dnSpy_Debugger_Resources.Column_ThreadState),
},
};
Descs.SortedColumnChanged += (a, b) => SortList();
}
// Don't change the order of these instances without also updating input passed to SearchMatcher.IsMatchAll()
static readonly SearchColumnDefinition[] searchColumnDefinitions = new SearchColumnDefinition[] {
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowId, "i", dnSpy_Debugger_Resources.Column_ThreadID),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowManagedId, "m", dnSpy_Debugger_Resources.Column_ThreadManagedId),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowCategoryText, "cat", dnSpy_Debugger_Resources.Column_ThreadCategory),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowName, "n", dnSpy_Debugger_Resources.Column_Name),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowLocation, "o", dnSpy_Debugger_Resources.Column_ThreadLocation),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowPriority, "pri", dnSpy_Debugger_Resources.Column_ThreadPriority),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowAffinityMask, "a", dnSpy_Debugger_Resources.Column_ThreadAffinityMask),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowSuspended, "sc", dnSpy_Debugger_Resources.Column_ThreadSuspendedCount),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowProcess, "p", dnSpy_Debugger_Resources.Column_ProcessName),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowAppDomain, "ad", dnSpy_Debugger_Resources.Column_AppDomain),
new SearchColumnDefinition(PredefinedTextClassifierTags.ThreadsWindowUserState, "s", dnSpy_Debugger_Resources.Column_ThreadState),
};
// UI thread
public string GetSearchHelpText() {
threadContext.UIDispatcher.VerifyAccess();
return threadContext.SearchMatcher.GetHelpText();
}
// random thread
void DbgThread(Action callback) =>
dbgManager.Value.Dispatcher.BeginInvoke(callback);
// UI thread
void ILazyToolWindowVM.Show() {
threadContext.UIDispatcher.VerifyAccess();
InitializeDebugger_UI(enable: true);
}
// UI thread
void ILazyToolWindowVM.Hide() {
threadContext.UIDispatcher.VerifyAccess();
InitializeDebugger_UI(enable: false);
}
// UI thread
void InitializeDebugger_UI(bool enable) {
threadContext.UIDispatcher.VerifyAccess();
if (processes.Count == 0)
InitializeProcesses_UI();
ResetSearchSettings();
if (enable) {
threadContext.ClassificationFormatMap.ClassificationFormatMappingChanged += ClassificationFormatMap_ClassificationFormatMappingChanged;
debuggerSettings.PropertyChanged += DebuggerSettings_PropertyChanged;
threadContext.UIVersion++;
RecreateFormatter_UI();
threadContext.SyntaxHighlight = debuggerSettings.SyntaxHighlight;
threadContext.UseHexadecimal = debuggerSettings.UseHexadecimal;
threadContext.DigitSeparators = debuggerSettings.UseDigitSeparators;
threadContext.FullString = debuggerSettings.FullString;
}
else {
processes.Clear();
threadContext.ClassificationFormatMap.ClassificationFormatMappingChanged -= ClassificationFormatMap_ClassificationFormatMappingChanged;
debuggerSettings.PropertyChanged -= DebuggerSettings_PropertyChanged;
}
DbgThread(() => InitializeDebugger_DbgThread(enable));
}
// UI thread
void InitializeProcesses_UI() {
threadContext.UIDispatcher.VerifyAccess();
if (processes.Count != 0)
return;
processes.Add(new SimpleProcessVM(dnSpy_Debugger_Resources.Threads_AllProcesses));
SelectedProcess = processes[0];
}
// DbgManager thread
void InitializeDebugger_DbgThread(bool enable) {
dbgManager.Value.Dispatcher.VerifyAccess();
if (enable) {
dbgManager.Value.ProcessesChanged += DbgManager_ProcessesChanged;
dbgManager.Value.CurrentThreadChanged += DbgManager_CurrentThreadChanged;
dbgManager.Value.DelayedIsRunningChanged += DbgManager_DelayedIsRunningChanged;
dbgLanguageService.Value.LanguageChanged += DbgLanguageService_LanguageChanged;
var threads = new List<DbgThread>();
var processes = dbgManager.Value.Processes;
foreach (var p in processes) {
InitializeProcess_DbgThread(p);
if (!p.IsRunning)
threads.AddRange(p.Threads);
foreach (var r in p.Runtimes) {
InitializeRuntime_DbgThread(r);
foreach (var a in r.AppDomains)
InitializeAppDomain_DbgThread(a);
}
}
if (threads.Count > 0 || processes.Length > 0) {
UI(() => {
AddItems_UI(threads);
AddItems_UI(processes);
});
}
}
else {
dbgManager.Value.ProcessesChanged -= DbgManager_ProcessesChanged;
dbgManager.Value.CurrentThreadChanged -= DbgManager_CurrentThreadChanged;
dbgManager.Value.DelayedIsRunningChanged -= DbgManager_DelayedIsRunningChanged;
dbgLanguageService.Value.LanguageChanged -= DbgLanguageService_LanguageChanged;
foreach (var p in dbgManager.Value.Processes) {
DeinitializeProcess_DbgThread(p);
foreach (var r in p.Runtimes) {
DeinitializeRuntime_DbgThread(r);
foreach (var a in r.AppDomains)
DeinitializeAppDomain_DbgThread(a);
}
}
UI(() => RemoveAllThreads_UI());
}
}
// DbgManager thread
void DbgLanguageService_LanguageChanged(object? sender, DbgLanguageChangedEventArgs e) => UI(() => RefreshLanguageFields_UI());
// DbgManager thread
void DbgManager_DelayedIsRunningChanged(object? sender, EventArgs e) {
// If all processes are running and the window is hidden, hide it now
if (!IsVisible)
UI(() => lazyToolWindowVMHelper.TryHideWindow());
}
// DbgManager thread
void InitializeProcess_DbgThread(DbgProcess process) {
process.DbgManager.Dispatcher.VerifyAccess();
var state = process.GetOrCreateData<ProcessState>();
state.IgnoreThreadsChangedEvent = process.IsRunning;
process.IsRunningChanged += DbgProcess_IsRunningChanged;
process.DelayedIsRunningChanged += DbgProcess_DelayedIsRunningChanged;
process.ThreadsChanged += DbgProcess_ThreadsChanged;
process.RuntimesChanged += DbgProcess_RuntimesChanged;
}
// DbgManager thread
void DeinitializeProcess_DbgThread(DbgProcess process) {
process.DbgManager.Dispatcher.VerifyAccess();
|
[
"\t\t\tprocess.IsRunningChanged -= DbgProcess_IsRunningChanged;"
] | 924
|
lcc
|
csharp
| null |
55b5cf622c6554fcf71e7f540ab814974112cbd9bdc49443
|
|
/*
* AsoBrain 3D Toolkit
* Copyright (C) 1999-2016 Peter S. Heijnen
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package ab.j3d.yafaray;
import java.io.*;
import java.util.*;
import ab.j3d.*;
import ab.j3d.appearance.*;
import ab.j3d.awt.view.*;
import ab.j3d.geom.*;
import ab.j3d.model.*;
import ab.xml.*;
import org.jetbrains.annotations.*;
/**
* Writes a YafaRay scene.
*
* <p>
* Documentation about the YafaRay XML-format is limited, but some useful
* references are:
* <ul>
* <li><a href="http://www.yafaray.org/development/documentation/XMLspecs">YafaRay XML scene specifications</a></li>
* <li><a href="http://www.yafaray.org/development/documentation/XMLparameters">YafaRay XML scene parameters</a></li>
* </ul>
* </p>
*
* @author G. Meinders
*/
public class YafaRayWriter
{
/**
* XML writer to be used.
*/
private final XMLWriter _writer;
/**
* Maps appearances to YafaRay material identifiers.
*/
private final Map<Appearance, String> _appearanceMap = new HashMap<Appearance, String>();
/**
* Texture library.
*/
private TextureLibrary _textureLibrary;
/**
* Index used to generate unique material names.
*/
private int _materialIndex = 0;
/**
* Index used to generate unique light names.
*/
public int _lightIndex = 0;
/**
* Width of the image.
*/
private int _width = 1024;
/**
* Height of the image.
*/
private int _height = 768;
/**
* Camera location.
*/
private Vector3D _cameraFrom;
/**
* Camera target.
*/
private Vector3D _cameraTo;
/**
* Constructs a new instance.
*
* @param out Output stream to write to.
*
* @throws XMLException if no {@link XMLWriter} can be created.
*/
public YafaRayWriter( final OutputStream out, final TextureLibrary textureLibrary )
throws XMLException
{
final XMLWriterFactory writerFactory = XMLWriterFactory.newInstance();
writerFactory.setIndenting( true );
_writer = writerFactory.createXMLWriter( out, "UTF-8" );
_textureLibrary = textureLibrary;
}
/**
* Sets the size of the image to be rendered.
*
* @param width Width of the image.
* @param height Height of the image.
*/
public void setOutputSize( final int width, final int height )
{
_width = width;
_height = height;
}
/**
* Sets the location and target of the camera.
*
* @param from Location of the camera.
* @param to Target that the camera is pointed at.
*/
public void setCamera( final Vector3D from, final Vector3D to )
{
_cameraFrom = from;
_cameraTo = to;
}
/**
* Writes an YafaRay scene specification for the given scene.
*
* @param scene Scene to be written.
*
* @throws XMLException if an XML-related exception occurs.
*/
public void write( final Scene scene )
throws XMLException
{
final XMLWriter writer = _writer;
writer.startDocument();
writer.startTag( null, "scene" );
writer.attribute( null, "type", "triangle" );
scene.walk( new Node3DVisitor()
{
public boolean visitNode( @NotNull final Node3DPath path )
{
final Node3D node = path.getNode();
try
{
if ( node instanceof Object3D )
{
final Object3D object = (Object3D)node;
for ( final FaceGroup faceGroup : object.getFaceGroups() )
{
final Appearance appearance = faceGroup.getAppearance();
String identifier = _appearanceMap.get( appearance );
if ( identifier== null )
{
identifier = writeMaterial( appearance );
_appearanceMap.put( appearance, identifier );
}
}
writeMesh( object, path.getTransform() );
}
else if ( node instanceof Light3D )
{
final Light3D light = (Light3D)node;
final Matrix3D transform = path.getTransform();
writer.startTag( null, "light" );
writer.attribute( null, "name", "light" + _lightIndex++ );
writeValue( "type", "spherelight" );
// writeValue( "type", "pointlight" );
writeColor( "color", (double)light.getDiffuseRed(), (double)light.getDiffuseGreen(), (double)light.getDiffuseBlue() );
writePoint( "from", transform.getTranslation() );
final double radius = 0.1;
final double power = 1.0;
writeFloat( "power", power / ( radius * radius ) );
writeFloat( "radius", radius );
writeInteger( "samples", 16 );
/*
<light name="Lamp.001">
<color r="1" g="1" b="1" a="1"/>
<corner x="-0.25" y="-0.25" z="1.99646"/>
<from x="0" y="0" z="1.99646"/>
<point1 x="-0.25" y="0.25" z="1.99646"/>
<point2 x="0.25" y="-0.25" z="1.99646"/>
<power fval="5"/>
<samples ival="16"/>
<type sval="arealight"/>
*/
writer.endTag( null, "light" );
}
}
catch ( XMLException e )
{
throw new RuntimeException( e );
}
return true;
}
} );
writer.startTag( null, "camera" );
final String cameraName = "camera0";
writer.attribute( null, "name", cameraName );
writeValue( "type", "perspective" );
writePoint( "from", _cameraFrom );
writePoint( "to", _cameraTo );
final Vector3D cameraDirection = _cameraFrom.directionTo( _cameraTo );
final Vector3D left = Vector3D.cross( cameraDirection, Vector3D.POSITIVE_Z_AXIS.multiply( 1.0 / 0.001 ) );
final Vector3D up = Vector3D.cross( left, cameraDirection );
writePoint( "up", _cameraFrom.plus( up ) );
writeInteger( "resx", _width );
writeInteger( "resy", _height );
writer.endTag( null, "camera" );
/*
<camera name="cam">
...
<aperture fval="0"/>
<bokeh_rotation fval="0"/>
<bokeh_type sval="disk1"/>
<dof_distance fval="0"/>
<focal fval="1.37374"/>
</camera>
*/
final Vector3D sunDirection = Vector3D.normalize( -1.0, -0.5, 3.5 );
// Vector3D sunDirection = Vector3D.normalize( 1.0, 0.0, 2.0 );
// Vector3D sunDirection = Vector3D.normalize( -0.5, 1.0, -2.0 );
writer.startTag( null, "light" );
writer.attribute( null, "name", "light" + _lightIndex++ );
writeValue( "type", "sunlight" );
writeFloat( "angle", 0.5 );
writeColor( "color", 1.0, 1.0, 1.0 );
writeVector( "direction", sunDirection );
writeFloat( "power", 1.0 );
// writeInteger( "samples", 16 );
writer.endTag( null, "light" );
/*
final String backgroundName = "background0";
writer.startTag( null, "background" );
writer.attribute( null, "name", backgroundName );
writeValue( "type", "constant" );
writeColor( "color", 1.0, 1.0, 1.0 );
writer.endTag( null, "background" );
*/
final String backgroundName = "background0";
writer.startTag( null, "background" );
writer.attribute( null, "name", backgroundName );
writeValue( "type", "sunsky" );
writeVector( "from", sunDirection );
writer.endTag( null, "background" );
/*
<integrator name="default">
<bounces ival="3"/>
<caustic_mix ival="5"/>
<diffuseRadius fval="1"/>
<fg_bounces ival="3"/>
<fg_samples ival="32"/>
<raydepth ival="4"/>
<search ival="150"/>
<shadowDepth ival="2"/>
<show_map bval="false"/>
<transpShad bval="false"/>
<use_background bval="false"/>
</integrator>
*/
final String integratorName = "integrator0";
writer.startTag( null, "integrator" );
writer.attribute( null, "name", integratorName );
switch ( 2 )
{
case 0:
{
writeValue( "type", "directlighting" );
break;
}
case 1:
{
writeValue( "type", "pathtracing" );
break;
}
case 2:
{
writeValue( "type", "photonmapping" );
// writeInteger( "search", 160 );
writeInteger( "photons", 200000 );
writeBoolean( "finalGather", true );
writeInteger( "fg_samples", 64 );
writeBoolean( "use_background", false );
// writeBoolean( "show_map", true );
break;
}
}
writer.endTag( null, "integrator" );
final String volumeIntegratorName = "integrator1";
writer.startTag( null, "integrator" );
writer.attribute( null, "name", volumeIntegratorName );
writeValue( "type", "none" );
writer.endTag( null, "integrator" );
writer.startTag( null, "render" );
writeValue( "camera_name", cameraName );
writeValue( "background_name", backgroundName );
writeValue( "integrator_name", integratorName );
writeValue( "volintegrator_name", volumeIntegratorName );
writeInteger( "threads", Math.max( 1, Runtime.getRuntime().availableProcessors() - 1 ) );
writeFloat( "gamma", 2.2 );
writeInteger( "width", _width );
writeInteger( "height", _height );
writeInteger( "xstart", 0 );
writeInteger( "ystart", 0 );
// ``writeValue( "filter_type", "mitchell" );
writeInteger( "AA_inc_samples", 2 );
writeInteger( "AA_minsamples", 2 );
writeInteger( "AA_passes", 2 );
writeFloat( "AA_pixelwidth", 1.5 );
writeFloat( "AA_threshold", 0.05 );
/*
<background_name sval="world_background"/>
<clamp_rgb bval="true"/>
<filter_type sval="mitchell"/>
<integrator_name sval="default"/>
<volintegrator_name sval="volintegr"/>
<z_channel bval="true"/>
*/
writer.endTag( null, "render" );
writer.endTag( null, "scene" );
writer.endDocument();
writer.flush();
}
/**
* Writes a YafaRay material reprseenting the given appearance.
*
* @param appearance Appearance to be written.
*
* @return Name of the YafaRay material.
* @throws XMLException if an XML-related exception occurs.
*/
private String writeMaterial( final Appearance appearance )
throws XMLException
{
final int materialIndex = _materialIndex++;
final String name = "material" + materialIndex;
String textureMapperName = null;
String textureName = null;
final XMLWriter writer = _writer;
final TextureMap colorMap = appearance.getColorMap();
if ( colorMap != null )
{
final File textureFile = _textureLibrary.getFile( colorMap );
if ( textureFile != null )
{
textureName = "texture" + materialIndex;
/*
<texture name="t1">
<calc_alpha bval="true"/>
<clipping sval="repeat"/>
<cropmax_x fval="1"/>
<cropmax_y fval="1"/>
<cropmin_x fval="0"/>
<cropmin_y fval="0"/>
<filename sval="C:\WallPapers\Lotus.jpg"/>
<gamma fval="2"/>
<type sval="image"/>
<use_alpha bval="true"/>
<xrepeat ival="1"/>
<yrepeat ival="1"/>
</texture>
*/
writer.startTag( null, "texture" );
writer.attribute( null, "name", textureName );
writeValue( "type", "image" );
writeValue( "filename", textureFile.toString() );
writer.endTag( null, "texture" );
}
}
writer.startTag( null, "material" );
writer.attribute( null, "name", name );
writeValue( "type", "shinydiffusemat" );
writeColor( "color", appearance.getDiffuseColor() );
writeFloat( "transparency", 1.0 - (double)appearance.getDiffuseColor().getAlphaFloat() );
if ( appearance.getDiffuseColor().getAlphaFloat() < 0.5f )
{
writeFloat( "IOR", 1520.0 );
}
if ( colorMap != null )
{
textureMapperName = "textureMapper" + materialIndex;
writer.startTag( null, "list_element" );
writeValue( "element", "shader_node" );
writeValue( "type", "texture_mapper" );
writeValue( "name", textureMapperName );
writeValue( "texture", textureName );
writeValue( "texco", "uv" );
writer.endTag( null, "list_element" );
}
if ( textureMapperName != null )
{
writeValue( "diffuse_shader", textureMapperName );
}
if ( appearance.getReflectionMap() != null )
{
writeColor( "mirror_color", appearance.getReflectionColor() );
writeFloat( "specular_reflect", (double)( appearance.getReflectionMin() + appearance.getReflectionMax() ) / 2.0 );
}
writer.endTag( null, "material" );
return name;
}
/**
* Writes a YafaRay mesh for the given object.
*
* @param object Object to be written.
* @param objectToScene Transforms the object into scene coordinates.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writeMesh( final Object3D object, final Matrix3D objectToScene )
throws XMLException
{
int vertexCount = 0;
int triangleCount = 0;
final List<FaceGroup> faceGroups = object.getFaceGroups();
for ( final FaceGroup faceGroup : faceGroups )
{
for ( final Face3D face : faceGroup.getFaces() )
{
vertexCount += face.getVertexCount();
final Tessellation tessellation = face.getTessellation();
for ( final TessellationPrimitive primitive : tessellation.getPrimitives() )
{
triangleCount += primitive.getTriangles().length / 3;
}
}
}
final XMLWriter writer = _writer;
writer.startTag( null, "mesh" );
writer.attribute( null, "vertices", String.valueOf( vertexCount ) );
writer.attribute( null, "faces", String.valueOf( triangleCount ) );
writer.attribute( null, "has_orco", String.valueOf( false ) );
writer.attribute( null, "has_uv", String.valueOf( true ) );
writer.attribute( null, "type", String.valueOf( 0 ) );
int vertexIndex = 0;
for ( final FaceGroup faceGroup : faceGroups )
{
final String materialName = _appearanceMap.get( faceGroup.getAppearance() );
writeValue( "set_material", materialName );
for ( final Face3D face : faceGroup.getFaces() )
{
final List<Vertex3D> vertices = face.getVertices();
for ( final Vertex3D vertex : vertices )
{
writePoint( "p", objectToScene.transform( vertex.point ) );
writer.emptyTag( null, "uv" );
writer.attribute( null, "u", String.valueOf( Float.isNaN( vertex.colorMapU ) ? 0.0f : vertex.colorMapU ) );
writer.attribute( null, "v", String.valueOf( Float.isNaN( vertex.colorMapV ) ? 0.0f : vertex.colorMapV ) );
writer.endTag( null, "uv" );
}
final Tessellation tessellation = face.getTessellation();
for ( final TessellationPrimitive primitive : tessellation.getPrimitives() )
{
final int[] triangles = primitive.getTriangles();
for ( int i = 0; i < triangles.length; i += 3 )
{
final int a = triangles[ i ];
final int b = triangles[ i + 1 ];
final int c = triangles[ i + 2 ];
writer.emptyTag( null, "f" );
writer.attribute( null, "a", String.valueOf( vertexIndex + a ) );
writer.attribute( null, "b", String.valueOf( vertexIndex + b ) );
writer.attribute( null, "c", String.valueOf( vertexIndex + c ) );
writer.attribute( null, "uv_a", String.valueOf( vertexIndex + a ) );
writer.attribute( null, "uv_b", String.valueOf( vertexIndex + b ) );
writer.attribute( null, "uv_c", String.valueOf( vertexIndex + c ) );
writer.endTag( null, "f" );
}
}
vertexIndex += vertices.size();
}
}
writer.endTag( null, "mesh" );
}
/**
* Writes a parameter with an integer value.
*
* @param name Name of the parameter.
* @param value Value of the parameter.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writeInteger( final String name, final int value )
throws XMLException
{
final XMLWriter writer = _writer;
writer.emptyTag( null, name );
writer.attribute( null, "ival", String.valueOf( value ) );
writer.endTag( null, name );
}
/**
* Writes a parameter with a boolean value.
*
* @param name Name of the parameter.
* @param value Value of the parameter.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writeBoolean( final String name, final boolean value )
throws XMLException
{
final XMLWriter writer = _writer;
writer.emptyTag( null, name );
writer.attribute( null, "bval", String.valueOf( value ) );
writer.endTag( null, name );
}
/**
* Writes a parameter with a string value.
*
* @param name Name of the parameter.
* @param value Value of the parameter.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writeValue( final String name, final String value )
throws XMLException
{
final XMLWriter writer = _writer;
writer.emptyTag( null, name );
writer.attribute( null, "sval", value );
writer.endTag( null, name );
}
/**
* Writes a parameter with an float value.
*
* @param name Name of the parameter.
* @param value Value of the parameter.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writeFloat( final String name, final double value )
throws XMLException
{
final XMLWriter writer = _writer;
writer.emptyTag( null, name );
writer.attribute( null, "fval", String.valueOf( value ) );
writer.endTag( null, name );
}
/**
* Writes a parameter with a point value. The value typically represents a
* point in space, and may be transformed to account for scene scale.
*
* @param name Name of the parameter.
* @param x X-coordinate of the point.
* @param y Y-coordinate of the point.
* @param z Z-coordinate of the point.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writePoint( final String name, final double x, final double y, final double z )
throws XMLException
{
final XMLWriter writer = _writer;
writer.emptyTag( null, name );
writer.attribute( null, "x", String.valueOf( 0.001 * x ) );
writer.attribute( null, "y", String.valueOf( 0.001 * y ) );
writer.attribute( null, "z", String.valueOf( 0.001 * z ) );
writer.endTag( null, name );
}
/**
* Writes a parameter with a point value. The value typically represents a
* point in space, and may be transformed to account for scene scale.
*
* @param name Name of the parameter.
* @param value Value of the parameter.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writePoint( final String name, final Vector3D value )
throws XMLException
{
writePoint( name, value.x, value.y, value.z );
}
/**
* Writes a parameter with a vector value. This is typically a unit vector,
* and as such no transformations are applied to account for scene scale.
*
* @param name Name of the parameter.
* @param value Value of the parameter.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writeVector( final String name, final Vector3D value )
throws XMLException
{
final XMLWriter writer = _writer;
writer.emptyTag( null, name );
writer.attribute( null, "x", String.valueOf( value.x ) );
writer.attribute( null, "y", String.valueOf( value.y ) );
writer.attribute( null, "z", String.valueOf( value.z ) );
writer.endTag( null, name );
}
/**
* Writes a parameter with a color value.
*
* @param name Name of the parameter.
* @param r Red-component of the color.
* @param g Green-component of the color.
* @param b Blue-component of the color.
*
* @throws XMLException if an XML-related exception occurs.
*/
private void writeColor( final String name, final double r, final double g, final double b )
throws XMLException
{
final XMLWriter writer = _writer;
writer.emptyTag( null, name );
writer.attribute( null, "r", String.valueOf( r ) );
writer.attribute( null, "g", String.valueOf( g ) );
|
[
"\t\twriter.attribute( null, \"b\", String.valueOf( b ) );"
] | 2,472
|
lcc
|
java
| null |
98f9620b662d7e8b7c7e7545d6c341f8a167b3d6925ee74a
|
|
package com.servinglynk.hmis.warehouse.model.v2016;
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.Collections;
import java.util.Map;
import java.util.WeakHashMap;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Transient;
import org.hibernate.annotations.Type;
import org.hibernate.proxy.HibernateProxy;
import com.servinglynk.hmis.warehouse.enums.ContactLocationEnum;
import com.servinglynk.hmis.warehouse.model.EnrollmentSharingModel;
/**
* Object mapping for hibernate-handled table: contact.
*
*
* @author autogenerated
*/
@Entity(name = "contact_v2016")
@Table(name = "contact", catalog = "hmis", schema = "v2016")
public class Contact extends HmisBaseModel implements Cloneable, Serializable,EnrollmentSharingModel {
/** Serial Version UID. */
private static final long serialVersionUID = -4922450713586410718L;
/** Use a WeakHashMap so entries will be garbage collected once all entities
referring to a saved hash are garbage collected themselves. */
private static final Map<Serializable, java.util.UUID> SAVED_HASHES =
Collections.synchronizedMap(new WeakHashMap<Serializable, java.util.UUID>());
/** hashCode temporary storage. */
private volatile java.util.UUID hashCode;
/** Field mapping. */
private LocalDateTime contactDate;
/** Field mapping. */
private ContactLocationEnum contactLocation;
/** Field mapping. */
private Enrollment enrollmentid;
/** Field mapping. */
private java.util.UUID id;
/**
* Default constructor, mainly for hibernate use.
*/
public Contact() {
// Default constructor
}
/** Constructor taking a given ID.
* @param id to set
*/
public Contact(java.util.UUID id) {
this.id = id;
}
/** Return the type of this class. Useful for when dealing with proxies.
* @return Defining class.
*/
@Transient
public Class<?> getClassType() {
return Contact.class;
}
/**
* Return the value associated with the column: contactDate.
* @return A LocalDateTime object (this.contactDate)
*/
@Type(type="org.jadira.usertype.dateandtime.threeten.PersistentLocalDateTime")
@Basic( optional = true )
@Column( name = "contact_date" )
public LocalDateTime getContactDate() {
return this.contactDate;
}
/**
* Set the value related to the column: contactDate.
* @param contactDate the contactDate value you wish to set
*/
public void setContactDate(final LocalDateTime contactDate) {
this.contactDate = contactDate;
}
/**
* Return the value associated with the column: contactLocation.
* @return A Integer object (this.contactLocation)
*/
@Type(type = "com.servinglynk.hmis.warehouse.enums.ContactLocationEnumType")
@Basic( optional = true )
@Column( name = "contact_location" )
public ContactLocationEnum getContactLocation() {
return this.contactLocation;
}
/**
* Set the value related to the column: contactLocation.
* @param contactLocation the contactLocation value you wish to set
*/
public void setContactLocation(final ContactLocationEnum contactLocation) {
this.contactLocation = contactLocation;
}
/**
* Return the value associated with the column: enrollmentid.
* @return A Enrollment object (this.enrollmentid)
*/
@ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.LAZY )
@org.hibernate.annotations.Cascade({org.hibernate.annotations.CascadeType.SAVE_UPDATE})
@Basic( optional = true )
@JoinColumn(name = "enrollmentid", nullable = true )
public Enrollment getEnrollmentid() {
return this.enrollmentid;
}
/**
* Set the value related to the column: enrollmentid.
* @param enrollmentid the enrollmentid value you wish to set
*/
public void setEnrollmentid(final Enrollment enrollmentid) {
this.enrollmentid = enrollmentid;
}
/**
* Return the value associated with the column: id.
* @return A java.util.UUID object (this.id)
*/
@Id
@Basic( optional = false )
@Column( name = "id", nullable = false ) @org.hibernate.annotations.Type(type="org.hibernate.type.PostgresUUIDType")
public java.util.UUID getId() {
return this.id;
}
/**
* Set the value related to the column: id.
* @param id the id value you wish to set
*/
public void setId(final java.util.UUID id) {
// If we've just been persisted and hashCode has been
// returned then make sure other entities with this
// ID return the already returned hash code
if ( (this.id == null ) &&
(id != null) &&
(this.hashCode != null) ) {
SAVED_HASHES.put( id, this.hashCode );
}
this.id = id;
}
/** Field mapping. */
protected Export export;
/**
* Return the value associated with the column: export.
* @return A Export object (this.export)
*/
@ManyToOne( cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.LAZY )
@org.hibernate.annotations.Cascade({org.hibernate.annotations.CascadeType.SAVE_UPDATE})
@Basic( optional = true )
@JoinColumn(name = "export_id", nullable = true )
public Export getExport() {
return this.export;
}
/**
* Set the value related to the column: export.
* @param export the export value you wish to set
*/
public void setExport(final Export export) {
this.export = export;
}
/**
* Deep copy.
* @return cloned object
* @throws CloneNotSupportedException on error
*/
@Override
public Contact clone() throws CloneNotSupportedException {
final Contact copy = (Contact)super.clone();
copy.setContactDate(this.getContactDate());
copy.setContactLocation(this.getContactLocation());
copy.setDateCreated(this.getDateCreated());
copy.setDateCreatedFromSource(this.getDateCreatedFromSource());
copy.setDateUpdated(this.getDateUpdated());
copy.setDateUpdatedFromSource(this.getDateUpdatedFromSource());
copy.setDeleted(this.isDeleted());
copy.setEnrollmentid(this.getEnrollmentid());
copy.setExport(this.getExport());
copy.setId(this.getId());
copy.setParentId(this.getParentId());
copy.setProjectGroupCode(this.getProjectGroupCode());
copy.setSync(this.isSync());
copy.setUserId(this.getUserId());
copy.setVersion(this.getVersion());
return copy;
}
/** Provides toString implementation.
* @see java.lang.Object#toString()
* @return String representation of this class.
*/
@Override
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("contactDate: " + this.getContactDate() + ", ");
sb.append("contactLocation: " + this.getContactLocation() + ", ");
sb.append("dateCreated: " + this.getDateCreated() + ", ");
sb.append("dateCreatedFromSource: " + this.getDateCreatedFromSource() + ", ");
sb.append("dateUpdated: " + this.getDateUpdated() + ", ");
sb.append("dateUpdatedFromSource: " + this.getDateUpdatedFromSource() + ", ");
sb.append("deleted: " + this.isDeleted() + ", ");
sb.append("id: " + this.getId() + ", ");
sb.append("parentId: " + this.getParentId() + ", ");
sb.append("projectGroupCode: " + this.getProjectGroupCode() + ", ");
sb.append("sync: " + this.isSync() + ", ");
sb.append("userId: " + this.getUserId() + ", ");
sb.append("version: " + this.getVersion());
return sb.toString();
}
/** Equals implementation.
* @see java.lang.Object#equals(java.lang.Object)
* @param aThat Object to compare with
* @return true/false
*/
@Override
public boolean equals(final Object aThat) {
Object proxyThat = aThat;
if ( this == aThat ) {
return true;
}
|
[
"\t\tif (aThat instanceof HibernateProxy) {"
] | 839
|
lcc
|
java
| null |
68ec6af65d2a91e6853c589c87fadc19a538ca14d6f249ae
|
|
import argparse
import numpy as np
import scipy.linalg
import scipy.spatial as spatial
import scipy.sparse.linalg as spla
import subprocess
try:
import pickle as cpickle
except:
try:
import cpickle
except:
import _pickle as cpickle
from functools import partial
import sys
import time
import copy
import scipy.sparse as sp
from sksparse.cholmod import cholesky
#import matplotlib.pyplot as plt
# Find project functions
found_functions = False
path_to_append = ''
sys.path.append('../../')
while found_functions is False:
try:
from Lub_Solver import Lub_Solver as LS
from stochastic_forcing import stochastic_forcing as stochastic
from mobility import mobility as mb
from body import body
from read_input import read_input
from read_input import read_vertex_file
from read_input import read_clones_file
from read_input import read_slip_file
import general_application_utils
import multi_bodies_functions
found_functions = True
except ImportError:
path_to_append += '../'
print('searching functions in path ', path_to_append)
sys.path.append(path_to_append)
if len(path_to_append) > 21:
print('\nProjected functions not found. Edit path in multi_bodies.py')
sys.exit()
if __name__ == '__main__':
# Get command line arguments
parser = argparse.ArgumentParser(description='Run a multi-body simulation and save trajectory.')
parser.add_argument('--input-file', dest='input_file', type=str, default='data.main', help='name of the input file')
parser.add_argument('--print-residual', action='store_true', help='print gmres and lanczos residuals')
args = parser.parse_args()
input_file = args.input_file
# Read input file
read = read_input.ReadInput(input_file)
# Set some variables for the simulation
eta = read.eta
a = read.blob_radius
output_name = read.output_name
structures = read.structures
print(structures)
structures_ID = read.structures_ID
# Copy input file to output
subprocess.call(["cp", input_file, output_name + '.inputfile'])
# Set random generator state
if read.random_state is not None:
with open(read.random_state, 'rb') as f:
np.random.set_state(cpickle.load(f))
elif read.seed is not None:
np.random.seed(int(read.seed))
# Save random generator state
with open(output_name + '.random_state', 'wb') as f:
cpickle.dump(np.random.get_state(), f)
# Create rigid bodies
bodies = []
body_types = []
body_names = []
for ID, structure in enumerate(structures):
print('Creating structures = ', structure[1])
# Read vertex and clones files
struct_ref_config = read_vertex_file.read_vertex_file(structure[0])
num_bodies_struct, struct_locations, struct_orientations = read_clones_file.read_clones_file(structure[1])
# Read slip file if it exists
slip = None
if(len(structure) > 2):
slip = read_slip_file.read_slip_file(structure[2])
body_types.append(num_bodies_struct)
body_names.append(structures_ID[ID])
# Create each body of type structure
for i in range(num_bodies_struct):
b = body.Body(struct_locations[i], struct_orientations[i], struct_ref_config, a)
b.ID = structures_ID[ID]
# Calculate body length for the RFD
if i == 0:
b.calc_body_length()
else:
b.body_length = bodies[-1].body_length
# Append bodies to total bodies list
bodies.append(b)
bodies = np.array(bodies)
# Set some more variables
num_of_body_types = len(body_types)
num_bodies = bodies.size
num_particles = len(bodies)
Nblobs = sum([x.Nblobs for x in bodies])
cutoff = read.Lub_Cut
#L = read.periodic_length
phi=0.4
Lphi = np.sqrt(np.pi*(a**2)*num_particles/phi)
L = np.array([Lphi, Lphi, 0])
n_steps = read.n_steps
n_save = read.n_save
dt = read.dt
print(L)
for b in bodies:
for i in range(3):
if L[i] > 0:
while b.location[i] < 0:
b.location[i] += L[i]
while b.location[i] > L[i]:
b.location[i] -= L[i]
firm_delta = read.firm_delta
debye_length_delta = 2.0*a*firm_delta/np.log(1.0e1)
repulsion_strength_delta = read.repulsion_strength_firm
LSolv = LS(bodies,a,eta,cutoff,L,debye_length=firm_delta)
LSolv.dt = dt
LSolv.kT = read.kT
LSolv.tolerance = read.solver_tolerance
multi_bodies_functions.calc_blob_blob_forces = multi_bodies_functions.set_blob_blob_forces(read.blob_blob_force_implementation)
multi_bodies_functions.calc_body_body_forces_torques = multi_bodies_functions.set_body_body_forces_torques(read.body_body_force_torque_implementation)
import time
t0 = time.time()
LSolv.Set_R_Mats()
dt1 = time.time() - t0
print(("Make R mats time : %s" %dt1))
Omega = 9.0*2.0*np.pi
total_rej = 0
for n in range(n_steps):
print(n)
FT_calc = partial(multi_bodies_functions.force_torque_calculator_sort_by_bodies,
g = read.g,
repulsion_strength_firm = repulsion_strength_delta,
debye_length_firm = debye_length_delta,
firm_delta = firm_delta,
repulsion_strength_wall = read.repulsion_strength_wall,
debye_length_wall = read.debye_length_wall,
repulsion_strength = read.repulsion_strength,
debye_length = read.debye_length,
periodic_length = L,
omega = 0, #Omega ############## CHANGE ME TO ZERO FOR CONST OMEGA AND TO 'Omega' FOR CONST TORQUE
eta = eta,
a = a)
Torque_Lim = 1.9904
Output_Vel = True
t0 = time.time()
reject_wall, reject_jump, Trap_vel_t = LSolv.Update_Bodies_Trap(FT_calc,Omega=Omega,Out_Torque=Output_Vel, Cut_Torque=Torque_Lim)
dt1 = time.time() - t0
### Update rollers with const. omega and no torque limitaion
#Output_Vel = False
#t0 = time.time()
#reject_wall, reject_jump = LSolv.Update_Bodies_Trap(FT_calc,Omega=Omega)
#dt1 = time.time() - t0
### Update rollers with const. torque (ALSO MAKE CHANGE ON LINE 169 in FT_calc)
#Output_Vel = False
#t0 = time.time()
#reject_wall, reject_jump = LSolv.Update_Bodies_Trap(FT_calc)
#dt1 = time.time() - t0
print(("walltime for time step : %s" %dt1))
print(("Number of rejected timesteps wall: %s" %LSolv.num_rejections_wall))
print(("Number of rejected timesteps jump: %s" %LSolv.num_rejections_jump))
if n % n_save == 0:
print(("SAVING CONFIGURATION : %s" %n))
if (reject_wall+reject_jump) == 0:
body_offset = 0
for i, ID in enumerate(structures_ID):
name = output_name + '.' + ID + '.config'
if n == 0:
status = 'w'
else:
status = 'a'
with open(name, status) as f_ID:
f_ID.write(str(body_types[i]) + '\n')
for j in range(body_types[i]):
orientation = bodies[body_offset + j].orientation.entries
f_ID.write('%s %s %s %s %s %s %s\n' % (bodies[body_offset + j].location[0],
bodies[body_offset + j].location[1],
bodies[body_offset + j].location[2],
orientation[0],
orientation[1],
orientation[2],
orientation[3]))
body_offset += body_types[i]
##########################
if Output_Vel:
body_offset = 0
for i, ID in enumerate(structures_ID):
name = output_name + '.' + ID + '.Torque'
if n == 0:
status = 'w'
else:
status = 'a'
with open(name, status) as f_ID:
f_ID.write(str(body_types[i]) + '\n')
for j in range(body_types[i]):
t = Trap_vel_t[3*(body_offset+j):3*(body_offset+j)+3]
f_ID.write('%s %s %s\n' % (t[0],
t[1],
t[2]))
body_offset += body_types[i]
else:
total_rej += 1
body_offset = 0
for i, ID in enumerate(structures_ID):
name = output_name + '.' + ID + '.rejected_config'
|
[
"\t if total_rej == 1:"
] | 801
|
lcc
|
python
| null |
ca3ae6a5d8c6865655de823a63f15080b5c10cc5843037df
|
|
from __future__ import absolute_import, print_function, division
"""
Tensor optimizations addressing the ops in basic.py.
"""
# TODO: intelligent merge for mul/add
# TODO: 0*x -> 0
from collections import defaultdict
import logging
import itertools
import operator
import sys
import time
import traceback
import warnings
import numpy
from six import integer_types, iteritems
from six.moves import reduce, xrange
import theano
from theano import gof
from theano.compat import izip
from theano.gof import opt, InconsistencyError, TopoOptimizer, graph
from theano.gof import Variable, Constant
from theano.gof.opt import copy_stack_trace, in2out
from theano.gof.utils import MethodNotDefined
from theano.gradient import DisconnectedType
from theano.configparser import config
from theano.tensor.elemwise import Elemwise, DimShuffle
from theano.tensor.subtensor import (get_idx_list, get_canonical_form_slice,
Subtensor, IncSubtensor, make_constant,
AdvancedIncSubtensor1,
AdvancedIncSubtensor,
AdvancedSubtensor1,
advanced_subtensor,
advanced_subtensor1,
advanced_inc_subtensor1)
from theano import scalar
from theano.scalar import basic
from theano.tensor import basic as T
from theano import compile # to register the optimizer built by this file
from theano.compile.ops import Shape, Shape_i
from theano.tensor.type import (values_eq_approx_remove_inf,
values_eq_approx_remove_nan,
values_eq_approx_remove_inf_nan)
from theano.gof.opt import (Optimizer, pre_constant_merge,
pre_greedy_local_optimizer)
from theano.gof import toolbox
from theano.tensor.basic import (Alloc, get_scalar_constant_value, ShapeError,
extract_constant, NotScalarConstantError,
Reshape)
from six import StringIO
_logger = logging.getLogger('theano.tensor.opt')
# Utilities
def _fill_chain(new_out, orig_inputs):
for i in orig_inputs:
new_out = T.fill(i, new_out)
return [new_out]
def encompasses_broadcastable(b1, b2):
"""
Parameters
----------
b1
The broadcastable attribute of a tensor type.
b2
The broadcastable attribute of a tensor type.
Returns
-------
bool
True if the broadcastable patterns b1 and b2 are such that b2 is
broadcasted to b1's shape and not the opposite.
"""
if len(b1) < len(b2):
return False
b1 = b1[-len(b2):]
return not any(v1 and not v2 for v1, v2 in zip(b1, b2))
def merge_broadcastables(broadcastables):
return [all(bcast) for bcast in zip(*broadcastables)]
def scalarconsts_rest(inputs, elemwise=True, only_process_constants=False):
"""Partition a list of variables into two kinds:
scalar constants, and the rest."""
consts = []
origconsts = []
nonconsts = []
for i in inputs:
try:
v = get_scalar_constant_value(i, elemwise=elemwise,
only_process_constants=only_process_constants)
consts.append(v)
origconsts.append(i)
except NotScalarConstantError:
nonconsts.append(i)
return consts, origconsts, nonconsts
def broadcast_like(value, template, fgraph, dtype=None):
"""
Return a Variable with the same shape and dtype as the template,
filled by broadcasting value through it. `value` will be cast as
necessary.
"""
value = T.as_tensor_variable(value)
if value.type == template.type:
return value
if template not in fgraph.variables:
raise NotImplementedError('broadcast_like currently requires the '
'template Variable to be in the fgraph already')
if dtype is None:
dtype = template.dtype
value = T.cast(value, dtype)
if value.type == template.type:
return value
if hasattr(fgraph, 'shape_feature'):
new_shape = fgraph.shape_feature.shape_of[template]
else:
new_shape = template.shape
rval = T.alloc(value, *new_shape)
# the template may have 1s in its shape without being broadcastable
if rval.broadcastable != template.broadcastable:
rval = T.unbroadcast(rval, *[i for i in xrange(rval.ndim)
if rval.broadcastable[i] and
not template.broadcastable[i]])
assert rval.type.dtype == dtype
if rval.type.broadcastable != template.broadcastable:
raise AssertionError("rval.type.broadcastable is " +
str(rval.type.broadcastable) +
" but template.broadcastable is" +
str(template.broadcastable))
return rval
class InplaceElemwiseOptimizer(Optimizer):
"""
We parametrise it to make it work for Elemwise and GpuElemwise op.
"""
def __init__(self, OP):
self.op = OP
def add_requirements(self, fgraph):
fgraph.attach_feature(theano.gof.destroyhandler.DestroyHandler())
@staticmethod
def print_profile(stream, prof, level=0):
blanc = (' ' * level)
print(blanc, "InplaceElemwiseOptimizer ", prof['opt'].op, file=stream)
for k in ['node_before',
'nb_call_replace',
'nb_call_validate',
'nb_inconsistent']:
print(blanc, k, prof[k], file=stream)
ndim = prof['ndim']
if ndim:
print(blanc, "ndim", "nb", file=stream)
for n in sorted(ndim.keys()):
print(blanc, n, ndim[n], file=stream)
def apply(self, fgraph):
"""
Usage: InplaceElemwiseOptimizer(op).optimize(fgraph)
Attempts to replace all Broadcast ops by versions of them
that operate inplace. It operates greedily: for each Broadcast
Op that is encountered, for each output, tries each input to
see if it can operate inplace on that input. If so, makes the
change and go to the next output or Broadcast Op.
Examples
--------
`x + y + z -> x += y += z`
`(x + y) * (x * y) -> (x += y) *= (x * y) or (x + y) *= (x *= y)`
"""
# We should not validate too often as this takes too much time to
# execute!
# It is the _dfs_toposort() fct in theano/gof/destroyhandler.py
# that takes so much time.
# Should we try to use another lib that does toposort?
# igraph: http://igraph.sourceforge.net/
# networkx: https://networkx.lanl.gov/
# Should we try to use cython?
# Compiling only that fct is not enough, should we try to add the
# deque class too?
# And init the deque and other list to an upper bound number of
# elements?
# Maybe Theano should do online toposort as in
# http://code.google.com/p/acyclic
#
# The next longest optimizer is the canonizer phase.
# Then I think it is the [io_?]toposort (need to validate) so check if
# the solution is also applicable there.
# We execute `validate` after this number of change.
prof = {'opt': self,
'node_before': len(fgraph.apply_nodes),
'nb_call_replace': 0,
'nb_call_validate': 0,
'nb_inconsistent': 0,
'ndim': defaultdict(lambda: 0)}
check_each_change = config.tensor.insert_inplace_optimizer_validate_nb
if check_each_change == -1:
if len(fgraph.apply_nodes) > 500:
check_each_change = 10
else:
check_each_change = 1
nb_change_no_validate = 0
chk = fgraph.checkpoint()
if fgraph.update_mapping:
update_outs = [fgraph.outputs[i] for i in fgraph.update_mapping]
else:
update_outs = []
protected_inputs = [
f.protected for f in fgraph._features if
isinstance(f, theano.compile.function_module.Supervisor)]
protected_inputs = sum(protected_inputs, []) # flatten the list
protected_inputs.extend(fgraph.outputs)
for node in list(graph.io_toposort(fgraph.inputs, fgraph.outputs)):
op = node.op
# gpuarray GpuElemwise inherit from Elemwise
if not type(op) == self.op:
continue
# If big graph and the outputs are scalar, do not make it
# inplace.
if (check_each_change != 1 and
# If multiple outputs, they must all have the same size,
# so only check the first.
getattr(node.outputs[0].type, 'ndim', -1) == 0):
continue
if op.inplace_pattern:
# Maybe this isn't needed anymore, but I don't want to
# rish regression now. This case only happen if the
# original node add already some inplace patter and we
# still try to add more pattern.
baseline = op.inplace_pattern
candidate_outputs = [i for i in xrange(len(node.outputs))
if i not in baseline]
# node inputs that are Constant, already destroyed,
# or fgraph protected inputs and fgraph outputs can't be used as
# inplace target.
# Remove here as faster.
candidate_inputs = [i for i in xrange(len(node.inputs))
if i not in baseline.values() and
not isinstance(node.inputs[i], Constant) and
# Is next line costly?
not fgraph.destroyers(node.inputs[i]) and
node.inputs[i] not in protected_inputs]
else:
baseline = []
candidate_outputs = list(range(len(node.outputs)))
# node inputs that are Constant, already destroyed,
# fgraph protected inputs and fgraph outputs can't be used as inplace
# target.
# Remove here as faster.
candidate_inputs = [i for i in xrange(len(node.inputs))
if not isinstance(node.inputs[i], Constant) and
not fgraph.destroyers(node.inputs[i]) and
node.inputs[i] not in protected_inputs]
verbose = False
raised_warning = not verbose
for candidate_output in candidate_outputs:
# If the output of the node can be established as an update
# output of the fgraph, visit the candidate_inputs in an order
# that will improve the chances of making the node operate
# inplace on the input it's meant to update
candidate_out_var = node.outputs[candidate_output]
sorted_candidate_inputs = candidate_inputs
if candidate_out_var in update_outs:
# The candidate output is an update. Sort the
# variables in candidate_inputs in the following order:
# - Vars corresponding to the actual updated input
# (best case scenario is for the node that procudes
# an update to operate inplace on the variable to
# update)
# - Vars computed inplace on the updates input (second
# best scenario if for the node to work inplace on
# a variable obtained by a chain of inplace on the
# variable to update. In some cases, this will be
# equivalent to operating inplace on the variable to
# update)
# - Remaining variables
updated_inputs = []
for i, f_out in enumerate(fgraph.outputs):
if (f_out is candidate_out_var and i in fgraph.update_mapping):
updated_inp_idx = fgraph.update_mapping[i]
updated_inputs.append(fgraph.inputs[updated_inp_idx])
updated_vars = []
vars_from_inplace = []
other_vars = []
for inp_idx in candidate_inputs:
inp = node.inputs[inp_idx]
if inp in updated_inputs:
# the candidate input is the actual updated input
updated_vars.append(inp_idx)
elif (hasattr(fgraph, 'destroy_handler') and
inp.owner and
any([fgraph.destroy_handler.root_destroyer.get(up_inp, None) is inp.owner
for up_inp in updated_inputs])):
# the candidate input is a variable computed
# inplace on the updated input via a sequence of
# one or more inplace operations
vars_from_inplace.append(inp_idx)
else:
other_vars.append(inp_idx)
sorted_candidate_inputs = (updated_vars +
vars_from_inplace + other_vars)
for candidate_input in sorted_candidate_inputs:
# remove inputs that don't have the same dtype as the output
if node.inputs[candidate_input].type != node.outputs[
candidate_output].type:
continue
inplace_pattern = dict(baseline)
inplace_pattern[candidate_output] = candidate_input
try:
if hasattr(op.scalar_op, "make_new_inplace"):
new_scal = op.scalar_op.make_new_inplace(
scalar.transfer_type(
*[inplace_pattern.get(i, o.dtype)
for i, o in enumerate(node.outputs)]))
else:
new_scal = op.scalar_op.__class__(
scalar.transfer_type(
*[inplace_pattern.get(i, None)
for i in xrange(len(node.outputs))]))
new_outputs = self.op(new_scal, inplace_pattern)(
*node.inputs, **dict(return_list=True))
new_node = new_outputs[0].owner
for r, new_r in zip(node.outputs, new_outputs):
prof['nb_call_replace'] += 1
fgraph.replace(r, new_r,
reason="inplace_elemwise_optimizer")
nb_change_no_validate += 1
prof['ndim'][candidate_out_var.ndim] += 1
if nb_change_no_validate >= check_each_change:
prof['nb_call_validate'] += 1
fgraph.validate()
chk = fgraph.checkpoint()
nb_change_no_validate = 0
except (ValueError, InconsistencyError) as e:
prof['nb_inconsistent'] += 1
if check_each_change != 1 and not raised_warning:
print(("Some inplace optimization was not "
"performed due to unexpected error:"),
file=sys.stderr)
print(e, file=sys.stderr)
raised_warning = True
fgraph.revert(chk)
continue
candidate_inputs.remove(candidate_input)
node = new_node
baseline = inplace_pattern
break
if nb_change_no_validate > 0:
try:
fgraph.validate()
except Exception:
if not raised_warning:
print(("Some inplace optimization was not "
"performed due to unexpected error"),
file=sys.stderr)
fgraph.revert(chk)
return prof
def print_summary(self, stream=sys.stdout, level=0, depth=-1):
print("%s%s (%s)" % (
(' ' * level), self.__class__.__name__, self.op), file=stream)
return inplace_elemwise_optimizer
inplace_elemwise_optimizer = InplaceElemwiseOptimizer(T.Elemwise)
compile.optdb.register('inplace_elemwise_opt', inplace_elemwise_optimizer, 75,
'inplace_opt', # for historic reason
'inplace_elemwise_optimizer',
'fast_run', 'inplace')
def register_useless(lopt, *tags, **kwargs):
if type(lopt) == str:
def register(inner_lopt):
return register_useless(inner_lopt, lopt, *tags, **kwargs)
return register
else:
name = kwargs.pop('name', None) or lopt.__name__
compile.mode.local_useless.register(name, lopt, 'last', 'fast_run',
*tags, **kwargs)
return lopt
def register_canonicalize(lopt, *tags, **kwargs):
if type(lopt) == str:
def register(inner_lopt):
return register_canonicalize(inner_lopt, lopt, *tags, **kwargs)
return register
else:
name = kwargs.pop('name', None) or lopt.__name__
compile.optdb['canonicalize'].register(name, lopt, 'fast_run',
*tags, **kwargs)
return lopt
def register_stabilize(lopt, *tags, **kwargs):
if type(lopt) == str:
def register(inner_lopt):
return register_stabilize(inner_lopt, lopt, *tags, **kwargs)
return register
else:
name = kwargs.pop('name', None) or lopt.__name__
compile.optdb['stabilize'].register(name, lopt, 'fast_run',
*tags, **kwargs)
return lopt
def register_specialize(lopt, *tags, **kwargs):
if type(lopt) == str:
def register(inner_lopt):
return register_specialize(inner_lopt, lopt, *tags, **kwargs)
return register
else:
name = kwargs.pop('name', None) or lopt.__name__
compile.optdb['specialize'].register(name, lopt, 'fast_run',
*tags, **kwargs)
return lopt
def register_uncanonicalize(lopt, *tags, **kwargs):
if type(lopt) == str:
def register(inner_lopt):
return register_uncanonicalize(inner_lopt, lopt, *tags, **kwargs)
return register
else:
name = (kwargs and kwargs.pop('name', None)) or lopt.__name__
compile.optdb['uncanonicalize'].register(name, lopt, 'fast_run', *tags,
**kwargs)
return lopt
def register_specialize_device(lopt, *tags, **kwargs):
if type(lopt) == str:
def register(inner_lopt):
return register_specialize_device(inner_lopt, lopt, *tags, **kwargs)
return register
else:
name = (kwargs and kwargs.pop('name', None)) or lopt.__name__
compile.optdb['specialize_device'].register(name, lopt, 'fast_run', *tags,
**kwargs)
return lopt
#####################
# Dot optimizations #
#####################
@register_canonicalize
@register_stabilize
@gof.local_optimizer([T.Dot])
def local_0_dot_x(node):
if not isinstance(node.op, T.Dot):
return False
x = node.inputs[0]
y = node.inputs[1]
replace = False
try:
if get_scalar_constant_value(x, only_process_constants=True) == 0:
replace = True
except NotScalarConstantError:
pass
try:
if get_scalar_constant_value(y, only_process_constants=True) == 0:
replace = True
except NotScalarConstantError:
pass
if replace:
constant_zero = T.constant(0, dtype=node.outputs[0].type.dtype)
if x.ndim == 2 and y.ndim == 2:
constant_zero = assert_(constant_zero,
T.eq(x.shape[1], y.shape[0]))
return [T.alloc(constant_zero, x.shape[0], y.shape[1])]
elif x.ndim == 1 and y.ndim == 2:
constant_zero = assert_(constant_zero,
T.eq(x.shape[0], y.shape[0]))
return [T.alloc(constant_zero, y.shape[1])]
elif x.ndim == 2 and y.ndim == 1:
constant_zero = assert_(constant_zero,
T.eq(x.shape[1], y.shape[0]))
return [T.alloc(constant_zero, x.shape[0])]
elif x.ndim == 1 and y.ndim == 1:
constant_zero = assert_(constant_zero,
T.eq(x.shape[0], y.shape[0]))
return [constant_zero]
else:
_logger.warning("Optimization Warning: "
"Optimization theano/opt.py:local_0_dot_x Found "
"that it could apply, but was not implemented "
"for dot product with these input types:\n"
"(%s, %s)",
x.type, y.type)
######################
# DimShuffle lifters #
######################
def apply_local_dimshuffle_lift(var):
# return var
# lift recursively
if not var.owner:
return var
new = local_dimshuffle_lift.transform(var.owner)
if new:
return new[0]
return var
# Checks for two types of useless dimshuffles:
# 1 - dimshuffle all dimensions in order.
# 2 - dimshuffle a broadcastable dimension.
def is_dimshuffle_useless(new_order, input):
is_useless = True
if len(new_order) == input.type.ndim:
all_broadcastable_dims = [i for (i, is_broadcastable)
in enumerate(input.type.broadcastable)
if is_broadcastable] + ['x']
for i in range(input.type.ndim):
if (new_order[i] == i or
(i in all_broadcastable_dims and
new_order[i] in all_broadcastable_dims)):
is_useless = True
else:
is_useless = False
break
else:
is_useless = False
return is_useless
@gof.local_optimizer([DimShuffle])
def local_dimshuffle_lift(node):
"""
"Lifts" DimShuffle through Elemwise operations and merges
consecutive DimShuffles. Basically, applies the following
transformations on the whole graph:
DimShuffle(Elemwise(x, y)) => Elemwise(DimShuffle(x), DimShuffle(y))
DimShuffle(DimShuffle(x)) => DimShuffle(x)
DimShuffle{0,1,...}(x) => x (when the dimshuffle do nothing)
After this transform, clusters of Elemwise operations are
void of DimShuffle operations.
"""
op = node.op
if not isinstance(op, DimShuffle):
return False
input = node.inputs[0]
inode = input.owner
new_order = op.new_order
if inode and isinstance(inode.op, Elemwise) and (len(input.clients) == 1):
# Don't use make_node to have tag.test_value set.
new_inputs = []
for inp in inode.inputs:
new_inp = op.__class__(inp.type.broadcastable,
op.new_order)(inp)
new_inputs.append(apply_local_dimshuffle_lift(new_inp))
copy_stack_trace(node.outputs[0], new_inputs)
ret = inode.op(*new_inputs, **dict(return_list=True))
return ret
if inode and isinstance(inode.op, DimShuffle):
new_order = [x == 'x' and 'x' or inode.op.new_order[x] for x in
new_order]
input = inode.inputs[0]
if is_dimshuffle_useless(new_order, input):
return [input]
elif inode and isinstance(inode.op, DimShuffle):
ret = op.__class__(input.type.broadcastable, new_order)(input)
ret = apply_local_dimshuffle_lift(ret)
copy_stack_trace(node.outputs[0], ret)
return [ret]
@register_canonicalize
@gof.local_optimizer([Reshape])
def local_useless_dimshuffle_in_reshape(node):
"""
Removes useless DimShuffle operation inside Reshape:
reshape(vector.dimshuffle('x', 0), shp) => reshape(vector, shp)
reshape(matrix.dimshuffle('x', 0, 'x', 1), shp) => reshape(matrix, shp)
reshape(row.dimshuffle(1, 'x'), shp) => reshape(row, shp)
reshape(col.dimshuffle(0), shp) => reshape(col, shp)
"""
op = node.op
if not isinstance(op, Reshape):
return False
if not (node.inputs[0].owner is not None and
isinstance(node.inputs[0].owner.op, DimShuffle)):
return False
new_order = node.inputs[0].owner.op.new_order
input = node.inputs[0].owner.inputs[0]
broadcastables = node.inputs[0].broadcastable
new_order_of_nonbroadcast = []
for i, bd in zip(new_order, broadcastables):
if not bd:
new_order_of_nonbroadcast.append(i)
no_change_in_order = all(
new_order_of_nonbroadcast[i] <= new_order_of_nonbroadcast[i + 1]
for i in xrange(len(new_order_of_nonbroadcast) - 1))
if no_change_in_order:
shape = node.inputs[1]
ret = op.__class__(node.outputs[0].ndim)(input, shape)
copy_stack_trace(node.outputs[0], ret)
return [ret]
@register_canonicalize
@gof.local_optimizer([DimShuffle])
def local_lift_transpose_through_dot(node):
"""
dot(x,y).T -> dot(y.T, x.T)
These optimizations "lift" (propagate towards the inputs) DimShuffle
through dot product. It allows to put the graph in a more standard shape,
and to later merge consecutive DimShuffles.
The transformation should be apply whether or not the transpose is
inplace. The newly-introduced transpositions are not inplace, this will
be taken care of in a later optimization phase.
"""
if not (isinstance(node.op, T.DimShuffle) and node.op.new_order == (1, 0)):
return False
if not (node.inputs[0].owner and
isinstance(node.inputs[0].owner.op, T.Dot)):
return False
x, y = node.inputs[0].owner.inputs
if x.ndim == y.ndim == 2:
# Output is dot product of transposed inputs in reverse order
ret = [T.dot(y.T, x.T)]
# Copy over stack trace to output from result of dot-product
copy_stack_trace(node.inputs[0], ret)
return ret
register_canonicalize(local_dimshuffle_lift)
register_specialize(local_dimshuffle_lift)
######################
# Casting operations #
######################
@register_canonicalize
@register_specialize
@gof.local_optimizer([T.TensorFromScalar])
def local_tensor_scalar_tensor(node):
'''tensor_from_scalar(scalar_from_tensor(x)) -> x'''
if isinstance(node.op, T.TensorFromScalar):
s = node.inputs[0]
if s.owner and isinstance(s.owner.op, T.ScalarFromTensor):
t = s.owner.inputs[0]
# We don't need to copy over any stack traces here
return [t]
@register_canonicalize
@register_specialize
@gof.local_optimizer([T.ScalarFromTensor])
def local_scalar_tensor_scalar(node):
'''scalar_from_tensor(tensor_from_scalar(x)) -> x'''
if isinstance(node.op, T.ScalarFromTensor):
t = node.inputs[0]
if t.owner and isinstance(t.owner.op, T.TensorFromScalar):
s = t.owner.inputs[0]
# We don't need to copy over any stack traces here
return [s]
#####################################
# ShapeFeature, Shape optimizations
#####################################
class MakeVector(T.Op):
"""Concatenate a number of scalars together into a vector.
This is a simple version of stack() that introduces far less cruft
into the graph. Should work with 0 inputs. The constant_folding
optimization will remove it.
"""
__props__ = ("dtype",)
def __init__(self, dtype='int64'):
self.dtype = dtype
def make_node(self, *inputs):
inputs = list(map(T.as_tensor_variable, inputs))
if (not all(a.type == inputs[0].type for a in inputs) or
(len(inputs) > 0 and inputs[0].dtype != self.dtype)):
dtype = theano.scalar.upcast(self.dtype, *[i.dtype for i in inputs])
# upcast the input to the determined dtype,
# but don't downcast anything
assert dtype == self.dtype, (
"The upcast of the inputs to MakeVector should match the "
"dtype given in __init__.")
if not all(self.dtype == T.cast(i, dtype=dtype).dtype
for i in inputs):
raise TypeError("MakeVector.make_node expected inputs"
" upcastable to %s. got %s" %
(self.dtype, str([i.dtype for i in inputs])))
inputs = [T.cast(i, dtype=dtype) for i in inputs]
assert all(self.dtype == a.dtype for a in inputs)
assert all(a.ndim == 0 for a in inputs)
if inputs:
dtype = inputs[0].type.dtype
else:
dtype = self.dtype
# bcastable = (len(inputs) == 1)
bcastable = False
otype = T.TensorType(broadcastable=(bcastable,), dtype=dtype)
return T.Apply(self, inputs, [otype()])
def perform(self, node, inputs, out_):
out, = out_
# not calling theano._asarray as optimization
if (out[0] is None) or (out[0].size != len(inputs)):
out[0] = theano._asarray(inputs, dtype=node.outputs[0].dtype)
else:
# assume that out has correct dtype. there is no cheap way to check
out[0][...] = inputs
def c_code_cache_version(self):
return (2,)
def c_code(self, node, name, inp, out_, sub):
out, = out_
# Shouldn't use PyArray_TYPE(inp[0]) for the dtype
# when len(inp) == 0 (we need to support this case.
# So there will be (1 * nb_dtype) + ((nb len(inp) - 1 ))
# different c code with the following algo
out_shape = len(inp)
out_num = numpy.dtype(node.outputs[0].dtype).num
# don't use dtype_%(out)s as when check_input=False, it isn't defined.
out_dtype = node.outputs[0].type.dtype_specs()[1]
if len(inp) > 0:
assert self.dtype == node.inputs[0].dtype
out_num = 'PyArray_TYPE(%s)' % inp[0]
ret = """
npy_intp dims[1];
dims[0] = %(out_shape)s;
if(!%(out)s || PyArray_DIMS(%(out)s)[0] != %(out_shape)s){
Py_XDECREF(%(out)s);
%(out)s = (PyArrayObject*)PyArray_EMPTY(1, dims, %(out_num)s, 0);
}
""" % locals()
for idx, i in enumerate(inp):
ret += """
*((%(out_dtype)s *)PyArray_GETPTR1(%(out)s, %(idx)s)) = *((%(out_dtype)s *) PyArray_DATA(%(i)s));
""" % locals()
return ret
def infer_shape(self, node, ishapes):
return [(len(ishapes),)]
def grad(self, inputs, output_gradients):
# If the output is of an integer dtype, no gradient shall pass
if self.dtype in theano.tensor.discrete_dtypes:
return [ipt.zeros_like().astype(theano.config.floatX)
for ipt in inputs]
grads = []
for i, inp in enumerate(inputs):
grads.append(output_gradients[0][i])
return grads
def R_op(self, inputs, eval_points):
if None in eval_points:
return [None]
return self.make_node(*eval_points).outputs
make_vector = MakeVector()
class MakeVectorPrinter:
def process(self, r, pstate):
if r.owner is None:
raise TypeError("Can only print make_vector.")
elif isinstance(r.owner.op, MakeVector):
old_precedence = getattr(pstate, 'precedence', None)
try:
pstate.precedence = 1000
s = [pstate.pprinter.process(input)
for input in r.owner.inputs]
finally:
pstate.precedence = old_precedence
return "[%s]" % ", ".join(s)
else:
raise TypeError("Can only print make_vector.")
T.pprint.assign(MakeVector, MakeVectorPrinter())
class ShapeFeature(object):
"""Graph optimizer for removing all calls to shape().
This optimizer replaces all Shapes and Subtensors of Shapes with
Shape_i and MakeVector Ops.
This optimizer has several goals:
1. to 'lift' Shapes to as close to the inputs as possible.
2. to infer the shape of every node in the graph in terms of the
input shapes.
3. remove all fills (T.second, T.fill) from the graph
Lifting shapes as close to the inputs as possible is important for
canonicalization because it is very bad form to have to compute
something just to know how big it will be. Firstly, it is a waste
of time to compute such outputs. But it is important to get rid
of these outputs as early as possible in the compilation process
because the extra computations make it appear as if many internal
graph nodes have multiple clients. Many optimizations refuse to
work on nodes with multiple clients.
Lifting is done by using an `<Op>.infer_shape` function if one is
present, or else using a conservative default. An Op that
supports shape-lifting should define a infer_shape(self, node,
input_shapes) function. The argument input_shapes is a tuple of
tuples... there is an interior tuple for each input to the node.
The tuple has as many elements as dimensions. The element in
position i of tuple j represents the i'th shape component of the
j'th input. The function should return a tuple of tuples. One
output tuple for each node.output. Again, the i'th element of the
j'th output tuple represents the output[j].shape[i] of the
function. If an output is not a TensorType, then None should be
returned instead of a tuple for that output.
For example the infer_shape for a matrix-matrix product would accept
input_shapes=((x0,x1), (y0,y1)) and return ((x0, y1),).
Inferring the shape of internal nodes in the graph is important
for doing size-driven optimizations. If we know how big various
intermediate results will be, we can estimate the cost of many Ops
accurately, and generate c-code that is specific [e.g. unrolled]
to particular sizes.
In cases where you cannot figure out the shape, raise a ShapeError.
Notes
-----
Right now there is only the ConvOp that could really take
advantage of this shape inference, but it is worth it even
just for the ConvOp. All that's necessary to do shape
inference is 1) to mark shared inputs as having a particular
shape, either via a .tag or some similar hacking; and 2) to
add an optional In() argument to promise that inputs will
have a certain shape (or even to have certain shapes in
certain dimensions). We can't automatically infer the shape of
shared variables as they can change of shape during the
execution by default. (NOT IMPLEMENTED YET, BUT IS IN TRAC)
**Using Shape information in Optimizations**
To use this shape information in OPTIMIZATIONS, use the
``shape_of`` dictionary.
For example:
.. code-block:: python
try:
shape_of = node.fgraph.shape_feature.shape_of
except AttributeError:
# This can happen when the mode doesn't include the ShapeFeature.
return
shape_of_output_zero = shape_of[node.output[0]]
The ``shape_of_output_zero`` symbol will contain a tuple, whose
elements are either integers or symbolic integers.
TODO: check to see if the symbols are necessarily
non-constant... or are integer literals sometimes Theano
constants?? That would be confusing.
"""
def get_node_infer_shape(self, node):
try:
shape_infer = node.op.infer_shape
except AttributeError:
shape_infer = self.default_infer_shape
try:
o_shapes = shape_infer(node,
[self.shape_of[r] for r in node.inputs])
except ShapeError:
o_shapes = self.default_infer_shape(node, [self.shape_of[r] for
r in node.inputs])
except NotImplementedError as e:
raise NotImplementedError(
'Code called by infer_shape failed raising a '
'NotImplementedError. Raising NotImplementedError to '
'indicate that a shape cannot be computed is no longer '
'supported, and one should now use tensor.ShapeError '
'instead. The original exception message is: %s' % e)
except Exception as e:
msg = ('Failed to infer_shape from Op %s.\nInput shapes: '
'%s\nException encountered during infer_shape: '
'%s\nException message: %s\nTraceback: %s') % (
node.op, [self.shape_of[r] for r in node.inputs],
type(e), str(e), traceback.format_exc())
if config.on_shape_error == "raise":
raise Exception(msg)
else:
_logger.warning(msg)
o_shapes = self.default_infer_shape(
node, [self.shape_of[r] for r in node.inputs])
return o_shapes
def get_shape(self, var, idx):
""" Optimization can call this to get the current shape_i
It is better to call this then use directly shape_of[var][idx]
as this method should update shape_of if needed.
TODO: Up to now, we don't update it in all cases. Update in all cases.
"""
r = self.shape_of[var][idx]
if (r.owner and
isinstance(r.owner.op, Shape_i) and
r.owner.inputs[0] not in var.fgraph.variables):
assert var.owner
node = var.owner
# recur on inputs
for i in node.inputs:
if getattr(i, 'ndim', None) > 0:
self.get_shape(i, 0)
o_shapes = self.get_node_infer_shape(node)
assert len(o_shapes) == len(node.outputs)
# Only change the variables and dimensions that would introduce
# extra computation
for new_shps, out in zip(o_shapes, node.outputs):
if not hasattr(out, 'ndim'):
continue
merged_shps = list(self.shape_of[out])
changed = False
for i in range(out.ndim):
n_r = merged_shps[i]
if (n_r.owner and
isinstance(n_r.owner.op, Shape_i) and
n_r.owner.inputs[0] not in var.fgraph.variables):
changed = True
merged_shps[i] = new_shps[i]
if changed:
self.set_shape(out, merged_shps, override=True)
r = self.shape_of[var][idx]
return r
def shape_ir(self, i, r):
"""Return symbolic r.shape[i] for tensor variable r, int i."""
if hasattr(r.type, "broadcastable") and r.type.broadcastable[i]:
return self.lscalar_one
else:
# Do not call make_node for test_value
s = Shape_i(i)(r)
try:
s = get_scalar_constant_value(s)
except NotScalarConstantError:
pass
return s
def shape_tuple(self, r):
"""Return a tuple of symbolic shape vars for tensor variable r."""
if not hasattr(r, 'ndim'):
# This happen for NoneConst.
return None
return tuple([self.shape_ir(i, r) for i in xrange(r.ndim)])
def default_infer_shape(self, node, i_shapes):
"""Return a list of shape tuple or None for the outputs of node.
This function is used for Ops that don't implement infer_shape.
Ops that do implement infer_shape should use the i_shapes parameter,
but this default implementation ignores it.
"""
rval = []
for r in node.outputs:
try:
rval.append(self.shape_tuple(r))
except AttributeError:
rval.append(None)
return rval
def unpack(self, s_i, var):
"""Return a symbolic integer scalar for the shape element s_i.
The s_i argument was produced by the infer_shape() of an Op subclass.
var: the variable that correspond to s_i. This is just for
error reporting.
"""
# unpack the s_i that the Op returned
assert s_i is not None
if s_i == 1:
# don't make the optimizer merge a zillion ones together
# by always returning the same object to represent 1
return self.lscalar_one
if type(s_i) is float and int(s_i) == s_i:
s_i = int(s_i)
if (type(s_i) in integer_types or
isinstance(s_i, numpy.integer) or
(isinstance(s_i, numpy.ndarray) and s_i.ndim == 0)):
# this shape is a constant
if s_i < 0:
msg = "There is a negative shape in the graph!"
msg += gof.utils.get_variable_trace_string(var)
raise ValueError(msg)
return T.constant(s_i, dtype='int64')
if type(s_i) in (tuple, list):
# this dimension is the same as many of the inputs
# which tells us that if one of the inputs is known,
# the others all become known.
# TODO: should be implemented in Elemwise, and Dot
#
# worst case, we loop over shape_of and replace things
raise NotImplementedError(s_i)
# s_i is x.shape[i] for some x, we change it to shape_of[x][i]
if (s_i.owner and
isinstance(s_i.owner.op, Subtensor) and
s_i.owner.inputs[0].owner and
isinstance(s_i.owner.inputs[0].owner.op, T.Shape)):
assert s_i.ndim == 0
assert len(s_i.owner.op.idx_list) == 1
# The current Subtensor always put constant index in the graph.
# This was not True in the past. So call the Subtensor function
# that will return the right index.
idx = get_idx_list(s_i.owner.inputs, s_i.owner.op.idx_list)
assert len(idx) == 1
idx = idx[0]
try:
i = get_scalar_constant_value(idx)
except NotScalarConstantError:
pass
else:
# Executed only if no exception was raised
x = s_i.owner.inputs[0].owner.inputs[0]
# x should already have been imported, and should be in shape_of.
s_i = self.shape_of[x][i]
if s_i.type.dtype in theano.tensor.integer_dtypes:
if getattr(s_i.type, 'ndim', 0):
raise TypeError('Shape element must be scalar', s_i)
return s_i
else:
raise TypeError('Unsupported shape element',
s_i, type(s_i), getattr(s_i, 'type', None))
def set_shape(self, r, s, override=False):
"""Assign the shape `s` to previously un-shaped variable `r`.
Parameters
----------
r : a variable
s : None or a tuple of symbolic integers
override : If False, it mean r is a new object in the fgraph.
If True, it mean r is already in the fgraph and we want to
override its shape.
"""
if not override:
assert r not in self.shape_of, 'r already in shape_of'
if s is None:
self.shape_of[r] = s
else:
if not isinstance(s, (tuple, list)):
raise TypeError('shapes must be tuple/list', (r, s))
if r.ndim != len(s):
sio = StringIO()
theano.printing.debugprint(r, file=sio, print_type=True)
raise AssertionError(
"Something inferred a shape with %d dimensions "
"for a variable with %d dimensions"
" for the variable:\n%s" % (
len(s), r.ndim, sio.getvalue()))
shape_vars = []
for i in xrange(r.ndim):
if (hasattr(r.type, 'broadcastable') and
r.type.broadcastable[i]):
shape_vars.append(self.lscalar_one)
else:
shape_vars.append(self.unpack(s[i], r))
assert all([not hasattr(r.type, "broadcastable") or
not r.type.broadcastable[i] or
# The two following comparison are a speed optimization
# But we never timed this speed optimization!
self.lscalar_one.equals(shape_vars[i]) or
self.lscalar_one.equals(
T.extract_constant(shape_vars[i]))
for i in xrange(r.ndim)])
self.shape_of[r] = tuple(shape_vars)
for sv in shape_vars:
self.shape_of_reverse_index.setdefault(sv, set()).add(r)
def update_shape(self, r, other_r):
"""Replace shape of r by shape of other_r.
If, on some dimensions, the shape of other_r is not informative,
keep the shape of r on those dimensions.
"""
# other_r should already have a shape
assert other_r in self.shape_of, ('other_r not in shape_of', other_r)
other_shape = self.shape_of[other_r]
# If other_shape has no information, call is pointless.
if other_shape is None:
return
if r in self.shape_of:
r_shape = self.shape_of[r]
else:
# If no info is known on r's shape, use other_shape
self.set_shape(r, other_shape)
return
if (other_r.owner and r.owner and
other_r.owner.inputs == r.owner.inputs and
other_r.owner.op == r.owner.op):
# We are doing a merge. So the 2 shapes graph will be the
# same. This is only a speed optimization to call
# ancestors() less frequently.
return
# Merge other_shape with r_shape, giving the priority to other_shape
merged_shape = []
for i, ps in enumerate(other_shape):
if r_shape is None and other_shape:
merged_shape.append(other_shape[i])
elif (ps.owner and
isinstance(getattr(ps.owner, 'op', None), Shape_i) and
ps.owner.op.i == i and
ps.owner.inputs[0] in (r, other_r)):
# If other_shape[i] is uninformative, use r_shape[i].
# For now, we consider 2 cases of uninformative other_shape[i]:
# - Shape_i(i)(other_r);
# - Shape_i(i)(r).
merged_shape.append(r_shape[i])
elif isinstance(r_shape[i], (Constant, integer_types)):
# We do this to call less often ancestors and make
# sure we have the simplest shape possible.
merged_shape.append(r_shape[i])
elif isinstance(other_shape[i], (Constant, integer_types)):
# We do this to call less often ancestors and make
# sure we have the simplest shape possible.
merged_shape.append(other_shape[i])
elif other_shape[i] == r_shape[i]:
# This mean the shape is equivalent
# We do not want to do the ancestor check in those cases
merged_shape.append(r_shape[i])
elif r_shape[i] in theano.gof.graph.ancestors([other_shape[i]]):
# Another case where we want to use r_shape[i] is when
# other_shape[i] actually depends on r_shape[i]. In that case,
# we do not want to substitute an expression with another that
# is strictly more complex. Such a substitution could also lead
# to cycles: if (in the future) r_shape[i] gets replaced by an
# expression of other_shape[i], other_shape[i] may end up
# depending on itself.
merged_shape.append(r_shape[i])
else:
merged_shape.append(other_shape[i])
assert all([(not hasattr(r.type, "broadcastable") or
not r.type.broadcastable[i] and
not other_r.type.broadcastable[i]) or
# The two following comparison are a speed optimization
# But we never timed this speed optimization!
self.lscalar_one.equals(merged_shape[i]) or
self.lscalar_one.equals(
T.extract_constant(merged_shape[i], only_process_constants=True))
for i in xrange(r.ndim)])
self.shape_of[r] = tuple(merged_shape)
for sv in self.shape_of[r]:
self.shape_of_reverse_index.setdefault(sv, set()).add(r)
def set_shape_i(self, r, i, s_i):
'''Replace element i of shape_of[r] by s_i'''
assert r in self.shape_of
prev_shape = self.shape_of[r]
# prev_shape is a tuple, so we cannot change it inplace,
# so we build another one.
new_shape = []
for j, s_j in enumerate(prev_shape):
if j == i:
new_shape.append(self.unpack(s_i, r))
else:
new_shape.append(s_j)
assert all([not hasattr(r.type, "broadcastable") or
not r.type.broadcastable[idx] or
# The two following comparison are a speed optimization
# But we never timed this speed optimization!
self.lscalar_one.equals(new_shape[idx]) or
self.lscalar_one.equals(T.extract_constant(new_shape[idx]))
for idx in xrange(r.ndim)])
self.shape_of[r] = tuple(new_shape)
for sv in self.shape_of[r]:
self.shape_of_reverse_index.setdefault(sv, set()).add(r)
def init_r(self, r):
'''Register r's shape in the shape_of dictionary.'''
if r not in self.shape_of:
try:
self.set_shape(r, self.shape_tuple(r))
except AttributeError: # XXX: where would this come from?
self.set_shape(r, None)
def make_vector_shape(self, r):
return make_vector(*self.shape_of[r])
#
# Feature interface
#
#
def on_attach(self, fgraph):
assert not hasattr(fgraph, 'shape_feature')
fgraph.shape_feature = self
# Must be local to the object as otherwise we reuse the same
# variable for multiple fgraph!
self.lscalar_one = T.constant(1, dtype='int64')
assert self.lscalar_one.type == T.lscalar
self.shape_of = {}
# Variable -> tuple(scalars) or None (All tensor vars map to tuple)
self.scheduled = {}
# Variable ->
self.shape_of_reverse_index = {}
# shape var -> graph v
for node in fgraph.toposort():
self.on_import(fgraph, node, reason='on_attach')
def on_detach(self, fgraph):
self.shape_of = {}
self.scheduled = {}
self.shape_of_reverse_index = {}
del fgraph.shape_feature
def on_import(self, fgraph, node, reason):
if node.outputs[0] in self.shape_of:
# this is a revert, not really an import
for r in node.outputs + node.inputs:
assert r in self.shape_of
return
for i, r in enumerate(node.inputs):
# make sure we have shapes for the inputs
self.init_r(r)
o_shapes = self.get_node_infer_shape(node)
# this is packed information
# an element of o_shapes is either None or a tuple
# elements of the tuple can be either strings, or ints
if len(o_shapes) != len(node.outputs):
raise Exception(
('The infer_shape method for the Op "%s" returned a list ' +
'with the wrong number of element: len(o_shapes) = %d ' +
' != len(node.outputs) = %d') % (str(node.op),
len(o_shapes),
len(node.outputs)))
# Ensure shapes are in 'int64'. This is to make sure the assert
# found in the `local_useless_subtensor` optimization does not fail.
for sh_idx, sh in enumerate(o_shapes):
if sh is None:
continue
if not isinstance(sh, (list, tuple)):
raise ValueError("infer_shape of %s didn't return a list of"
" list. It returned '%s'" % (str(node), str(o_shapes)))
new_shape = []
for i, d in enumerate(sh):
# Note: we ignore any shape element that is not typed (i.e.,
# does not have a 'dtype' attribute). This means there may
# still remain int elements that are int32 on 32-bit platforms,
# but this works with `local_useless_subtensor`, so for now we
# keep it this way. See #266 for a better long-term fix.
if getattr(d, 'dtype', 'int64') != 'int64':
assert d.dtype in theano.tensor.discrete_dtypes, (node, d.dtype)
assert str(d.dtype) != 'uint64', node
new_shape += sh[len(new_shape):i + 1]
if isinstance(d, T.Constant):
casted_d = T.constant(d.data, dtype='int64')
else:
casted_d = theano.tensor.cast(d, 'int64')
new_shape[i] = casted_d
if new_shape:
# We replace the shape with wrong dtype by the one with
# 'int64'.
new_shape += sh[len(new_shape):]
o_shapes[sh_idx] = tuple(new_shape)
for r, s in izip(node.outputs, o_shapes):
self.set_shape(r, s)
def on_change_input(self, fgraph, node, i, r, new_r, reason):
if new_r not in self.shape_of:
# It happen that the fgraph didn't called on_import for some
# new_r. This happen when new_r don't have an
# owner(i.e. it is a constant or an input of the graph)
# update_shape suppose that r and new_r are in shape_of.
self.init_r(new_r)
# This tells us that r and new_r must have the same shape if
# we didn't know that the shapes are related, now we do.
self.update_shape(new_r, r)
# change_input happens in two cases:
# 1) we are trying to get rid of r, or
# 2) we are putting things back after a failed transaction.
# In case 1, if r has a shape_i client, we will want to
# replace the shape_i of r with the shape of new_r. Say that
# r is *scheduled*.
# At that point, node is no longer a client of r, but of new_r
for (shpnode, idx) in (r.clients + [(node, i)]):
if isinstance(getattr(shpnode, 'op', None), Shape_i):
idx = shpnode.op.i
repl = self.shape_of[new_r][idx]
if repl.owner is shpnode:
# This mean the replacement shape object is
# exactly the same as the current shape object. So
# no need for replacement. This happen for example
# with the InputToGpuOptimizer optimizer.
continue
if (repl.owner and
repl.owner.inputs[0] is shpnode.inputs[0] and
isinstance(repl.owner.op, Shape_i) and
repl.owner.op.i == shpnode.op.i):
# The replacement is a shape_i of the same
# input. So no need to do this equivalent
# replacement.
continue
if shpnode.outputs[0] in theano.gof.graph.ancestors([repl]):
raise InconsistencyError(
"This substitution would insert a cycle in the graph:"
"node: %s, i: %i, r: %s, new_r: %s"
% (node, i, r, new_r))
self.scheduled[shpnode] = new_r
# In case 2, if r is a variable that we've scheduled for shape update,
# then we should cancel it.
unscheduled = [k for k, v in self.scheduled.items() if v == r]
for k in unscheduled:
del self.scheduled[k]
# In either case, r could be in shape_of.values(), that is, r itself
# is the shape of something. In that case, we want to update
# the value in shape_of, to keep it up-to-date.
for v in self.shape_of_reverse_index.get(r, []):
# The reverse index is only approximate. It is not updated on
# deletion of variables, or on change_input so it might be the
# case that there are a few extra `v`'s in it that no longer have
# a shape of r or possibly have been deleted from shape_of
# entirely. The important thing is that it permits to recall
# all variables with r in their shape.
for ii, svi in enumerate(self.shape_of.get(v, [])):
if svi == r:
self.set_shape_i(v, ii, new_r)
self.shape_of_reverse_index[r] = set()
def same_shape(self, x, y, dim_x=None, dim_y=None):
"""Return True if we are able to assert that x and y have the
same shape.
dim_x and dim_y are optional. If used, they should be an index
to compare only 1 dimension of x and y.
"""
sx = self.shape_of[x]
sy = self.shape_of[y]
if sx is None or sy is None:
return False
if dim_x is not None:
sx = [sx[dim_x]]
if dim_y is not None:
sy = [sy[dim_y]]
assert len(sx) == len(sy)
# We look on each dimensions we want to compare.
# If any of them can't be asserted to be equal, return False.
# Otherwise, we return True at the end.
for dx, dy in zip(sx, sy):
if dx is dy:
continue
# Need to try to find that they are the same shape. We
# need to compare the full graph. It could be slow. So I
# just implement for now the case of Shape_i.
if not dx.owner or not dy.owner:
return False
if (not isinstance(dx.owner.op, Shape_i) or
not isinstance(dy.owner.op, Shape_i)):
return False
opx = dx.owner.op
opy = dy.owner.op
if not (opx.i == opy.i):
return False
# FB I'm not sure if this handle correctly constants.
if dx.owner.inputs[0] == dy.owner.inputs[0]:
continue
# To be sure to cover all case, call equal_computation.
# Can't use theano.gof.graph.is_same_graph(dx, dy)
# As it currently expect that dx and dy aren't in a FunctionGraph
from theano.scan_module.scan_utils import equal_computations
if not equal_computations([dx], [dy]):
return False
return True
class ShapeOptimizer(Optimizer):
"""Optimizer that serves to add ShapeFeature as an fgraph feature."""
def add_requirements(self, fgraph):
fgraph.attach_feature(ShapeFeature())
def apply(self, fgraph):
pass
class UnShapeOptimizer(Optimizer):
"""Optimizer remove ShapeFeature as an fgraph feature."""
def apply(self, fgraph):
for feature in fgraph._features:
if isinstance(feature, ShapeFeature):
fgraph.remove_feature(feature)
# Register it after merge1 optimization at 0. We don't want to track
# the shape of merged node.
theano.compile.mode.optdb.register('ShapeOpt', ShapeOptimizer(),
0.1, 'fast_run', 'fast_compile')
# Not enabled by default for now. Some crossentropy opt use the
# shape_feature. They are at step 2.01. uncanonicalize is at step
# 3. After it goes to 48.5 that move to the gpu. So 10 seem resonable.
theano.compile.mode.optdb.register('UnShapeOpt', UnShapeOptimizer(),
10)
def local_elemwise_alloc_op(ElemwiseOP, AllocOP, DimShuffleOP):
def local_elemwise_alloc(node):
"""
elemwise(alloc(x, shp), ..., y.TensorType(BROADCAST CONDITION))
-> elemwise(x, y.TensorType(BROADCAST CONDITION))
elemwise(dimshuffle(alloc(x, shp)),... ,y.TensorType(BROADCAST CONDITION))
-> elemwise(x.dimshuffle(...), y.TensorType(BROADCAST CONDITION))
BROADCAST CONDITION: the condition is that the one input that are
not to be optimized to have the same broadcast pattern as the
output.
We can change the alloc by a dimshuffle as the elemwise
already have the shape info. The dimshuffle will be faster
to exec.
"""
if not isinstance(node.op, ElemwiseOP):
return False
if len(node.outputs) > 1:
# Ensure all outputs have the same broadcast pattern
# This is a supposition that I'm not sure is always true.
assert all([o.type.broadcastable ==
node.outputs[0].type.broadcastable for o in
node.outputs[1:]])
# The broadcast pattern of the ouptut must match the broadcast
# pattern of at least one of the inputs.
if not any([i.type.broadcastable ==
node.outputs[0].type.broadcastable for i in node.inputs]):
return False
def dimshuffled_alloc(i):
return (isinstance(i.owner.op, DimShuffleOP) and
i.owner.inputs[0].owner and
isinstance(i.owner.inputs[0].owner.op, AllocOP))
# At least one input must have an owner that is either a AllocOP or a
# DimShuffleOP with an owner that is a AllocOP -- otherwise there is
# nothing to optimize.
if not any([i.owner and (isinstance(i.owner.op, AllocOP) or
dimshuffled_alloc(i)) for i in node.inputs]):
return False
# Search for input that we can use as a baseline for the dimensions.
assert_op_idx = -1
for idx, i in enumerate(node.inputs):
if i.type.broadcastable == node.outputs[0].type.broadcastable:
# Prefer an input that is not a AllocOP nor a DimShuffleOP of a
# AllocOP so that all allocs can be optimized.
if not (i.owner and (isinstance(i.owner.op, AllocOP) or
dimshuffled_alloc(i))):
assert_op_idx = idx
break
# It may be the case that only AllocOP and DimShuffleOP of AllocOP exist.
if assert_op_idx < 0:
# We want to optimize as many allocs as possible. When
# there is more than one then do all but one. number of
# inputs with alloc or dimshuffle alloc
l2 = [i for i in node.inputs
if (i.owner and (isinstance(i.owner.op, AllocOP) or
dimshuffled_alloc(i)))]
# If only 1 alloc or dimshuffle alloc, it is the one we
# will use for the shape. So no alloc would be removed.
if len(l2) > 1:
# l containt inputs with alloc or dimshuffle alloc
# only. Its length will always be at least one, as we
# checked that before
l = [idx for idx, i in enumerate(node.inputs)
if i.broadcastable == node.outputs[0].broadcastable]
assert_op_idx = l[0] # The first one is as good as any to use.
else:
# Nothing would be optimized!
return False
assert_op = node.inputs[assert_op_idx]
cmp_op = assert_op
new_i = []
same_shape = node.fgraph.shape_feature.same_shape
for i in node.inputs:
# Remove alloc
if (i.owner and isinstance(i.owner.op, AllocOP) and
i.owner.inputs[0].type != i.owner.outputs[0].type):
# when i.owner.inputs[0].type == i.owner.outputs[0].type we
# will remove that alloc later
assert i.type.ndim == cmp_op.ndim
if theano.config.experimental.local_alloc_elemwise_assert:
get_shape = node.fgraph.shape_feature.get_shape
cond = []
for idx in xrange(i.type.ndim):
if (not i.type.broadcastable[idx] and
not same_shape(i, cmp_op, idx, idx)):
i_shp = get_shape(i, idx)
cmp_shp = get_shape(cmp_op, idx)
cond.append(T.eq(i_shp, cmp_shp))
if cond:
assert_op = assert_(assert_op, *cond)
new_i.append(i.owner.inputs[0])
# Remove Alloc in DimShuffle
elif i.owner and dimshuffled_alloc(i):
assert i.type.ndim == cmp_op.type.ndim
if theano.config.experimental.local_alloc_elemwise_assert:
assert_cond = [T.eq(i.shape[idx], cmp_op.shape[idx])
for idx in xrange(i.type.ndim)
if not i.type.broadcastable[idx] and
not same_shape(i, cmp_op, idx, idx)]
if assert_cond:
assert_op = assert_(assert_op, *assert_cond)
alloc_input = i.owner.inputs[0].owner.inputs[0]
if alloc_input.ndim != i.owner.inputs[0].ndim:
# The alloc can add dimension to the value
# We add a dimshuffle to add them.
# We let later optimization merge the multiple dimshuffle
nb_dim_to_add = i.owner.inputs[0].ndim - alloc_input.ndim
alloc_input = alloc_input.dimshuffle(
['x'] * nb_dim_to_add +
list(range(alloc_input.ndim)))
# We need to keep the dimshuffle. It could swap axes or
# add dimensions anywhere.
r_i = i.owner.op(alloc_input)
# Copy stack trace from i to new_i
copy_stack_trace(i, r_i)
new_i.append(r_i)
else:
new_i.append(i)
new_i[assert_op_idx] = assert_op
ret = node.op(*new_i, return_list=True)
# Copy over stack trace from previous outputs to new outputs.
copy_stack_trace(node.outputs, ret)
return ret
return local_elemwise_alloc
# TODO, global optimizer that lift the assert to the beginning of the graph.
# TODO, optimize all inputs when possible -- currently when all inputs have
# an alloc all but one is optimized.
local_elemwise_alloc = register_specialize(
gof.local_optimizer([T.Elemwise])(
local_elemwise_alloc_op(T.Elemwise, T.Alloc, T.DimShuffle)),
'local_alloc_elemwise')
@gof.local_optimizer([T.Elemwise])
def local_fill_sink(node):
"""
f(fill(a, b), fill(c, d), e) -> fill(c, fill(a, f(b, d, e)))
f need to be an elemwise that isn't a fill.
"""
if (not hasattr(node, 'op') or
not isinstance(node.op, T.Elemwise) or
node.op == T.fill):
return False
models = []
inputs = []
for input in node.inputs:
if input.owner and input.owner.op == T.fill:
models.append(input.owner.inputs[0])
inputs.append(input.owner.inputs[1])
else:
inputs.append(input)
if not models:
return False
c = node.op(*inputs)
for model in models:
if model.type != c.type:
c = T.fill(model, c)
# The newly created node c doesn't has 'clients',
# so this iteration is took place with node.outputs[0]
replacements = {node.outputs[0]: c}
for client, cl_idx in node.outputs[0].clients:
if (hasattr(client, 'op') and
isinstance(client.op, T.Elemwise) and
not client.op == T.fill):
client_inputs = client.inputs[:]
client_inputs[cl_idx] = c
new_client = client.op(*client_inputs)
# Add clients to new_client
new_client.owner.outputs[0].clients = client.outputs[0].clients
r = local_fill_sink.transform(new_client.owner)
if not r:
continue
replacements.update(r)
return replacements
register_canonicalize(local_fill_sink)
@register_specialize
@register_stabilize
# @register_canonicalize # We make full pass after the canonizer phase.
@gof.local_optimizer([T.fill])
def local_fill_to_alloc(node):
"""fill(s,v) -> alloc(v, shape(s))
This is an important optimization because with the shape_to_shape_i
optimization, the dependency on 's' is often removed.
"""
if node.op == T.fill:
r, v = node.inputs
if v.type == node.outputs[0].type:
# this is a useless fill, erase it.
rval = [v]
elif v.type.broadcastable == node.outputs[0].type.broadcastable:
# this is a cast
rval = [T.cast(v, node.outputs[0].type.dtype)]
elif r.type.broadcastable == node.outputs[0].type.broadcastable:
# we are broadcasting v somehow, but not r
o = broadcast_like(v, r, node.fgraph, dtype=v.dtype)
copy_stack_trace(node.outputs[0], o)
rval = [o]
else:
# we are broadcasting both v and r,
# the output shape must be computed
#
# TODO: implement this case (including a test!)
#
# I think the strategy should be to extend the shorter
# shape vector with 1s (how?) and then take the
# elementwise max of the two. - how to flag an error of
# shape mismatch where broadcasting should be illegal?
return
# TODO: cut out un-necessary dimshuffles of v
assert rval[0].type == node.outputs[0].type, (
'rval', rval[0].type, 'orig', node.outputs[0].type, 'node',
node,) # theano.printing.debugprint(node.outputs[0], file='str'))
return rval
# Register this after stabilize at 1.5 to make sure stabilize don't
# get affected by less canonicalized graph due to alloc.
compile.optdb.register('local_fill_to_alloc',
in2out(local_fill_to_alloc),
1.51, 'fast_run')
# Needed to clean some extra alloc added by local_fill_to_alloc
compile.optdb.register('local_elemwise_alloc',
in2out(local_elemwise_alloc),
1.52, 'fast_run')
@register_canonicalize("fast_compile")
@register_useless
@gof.local_optimizer([T.fill])
def local_useless_fill(node):
"""fill(s,v) -> v
This optimization is only needed in FAST_COMPILE to make the code
more readable. Normally, it is done by the local_fill_to_alloc
opt.
"""
if node.op == T.fill:
r, v = node.inputs
if v.type == node.outputs[0].type:
# this is a useless fill, erase it.
# also, we don't need to copy over any stack traces here
return [v]
@register_specialize
@register_stabilize
@register_canonicalize
@register_useless
@gof.local_optimizer([T.alloc])
def local_useless_alloc(node):
"""
If the input type is the same as the output type (dtype and broadcast)
there is no change in the shape of the input. So this is just a simple copy
of the input. This is not needed.
"""
op = node.op
if not isinstance(op, Alloc):
return False
input = node.inputs[0]
output = node.outputs[0]
# Check if dtype and broadcast remain the same.
if input.type == output.type:
# We don't need to copy over any stack traces here
return [input]
@register_specialize
@register_stabilize
@register_canonicalize
@gof.local_optimizer([T.alloc])
def local_canonicalize_alloc(node):
"""If the input type is the same as the output type (dtype and broadcast)
there is no change in the shape of the input. So this is just a simple copy
of the input. This is not needed. (as local_useless_alloc)
Also, it will canonicalize alloc by creating Dimshuffle after the
alloc to introduce the dimensions of constant size 1.
See https://github.com/Theano/Theano/issues/4072 to know why this
is needed.
"""
op = node.op
if not isinstance(op, Alloc):
return False
input = node.inputs[0]
output = node.outputs[0]
# Check if dtype and broadcast remain the same.
if input.type == output.type:
# We don't need to copy over any stack traces here
return [input]
# Allow local_merge_alloc to do its work first
clients = getattr(output, 'clients', [])
for client, i in clients:
if client != "output" and isinstance(client.op, Alloc):
return
# Check if alloc adds a broadcastable dimension with shape 1.
output_shape = node.inputs[1:]
num_dims_with_size_1_added_to_left = 0
for i in range(len(output_shape) - input.ndim):
if extract_constant(output_shape[i], only_process_constants=True) == 1:
num_dims_with_size_1_added_to_left += 1
else:
break
new_output_shape = output_shape[num_dims_with_size_1_added_to_left:]
if num_dims_with_size_1_added_to_left > 0 and len(new_output_shape) >= input.ndim:
if output.broadcastable[num_dims_with_size_1_added_to_left:] == input.broadcastable:
inner = input
else:
inner = op(*([input] + new_output_shape))
dimshuffle_new_order = (['x'] * num_dims_with_size_1_added_to_left +
list(xrange(len(new_output_shape))))
return [DimShuffle(inner.type.broadcastable, dimshuffle_new_order)(inner)]
# Don't register by default.
@gof.local_optimizer([T.AllocEmpty])
def local_alloc_empty_to_zeros(node):
"""This convert AllocEmpty to Alloc of 0.
This help investigate NaN with NanGuardMode. Not registered by
default. To activate it, use the Theano flag
optimizer_including=alloc_empty_to_zeros. This also enable
the GPU version of this optimizations.
"""
if isinstance(node.op, T.AllocEmpty):
return [T.zeros(node.inputs, dtype=node.outputs[0].dtype)]
compile.optdb.register('local_alloc_empty_to_zeros',
in2out(local_alloc_empty_to_zeros),
# After move to gpu and merge2, before inplace.
49.3,
'alloc_empty_to_zeros',)
@register_specialize
@register_canonicalize
@gof.local_optimizer([T.Shape])
def local_shape_to_shape_i(node):
if node.op == T.shape:
# This optimization needs ShapeOpt and fgraph.shape_feature
if not hasattr(node.fgraph, 'shape_feature'):
return
shape_feature = node.fgraph.shape_feature
ret = shape_feature.make_vector_shape(node.inputs[0])
# We need to copy over stack trace from input to output
copy_stack_trace(node.outputs[0], ret)
return [ret]
# TODO: Not sure what type of node we are expecting here
@register_specialize
@register_canonicalize
@gof.local_optimizer(None)
def local_track_shape_i(node):
try:
shape_feature = node.fgraph.shape_feature
except AttributeError:
return
if node in shape_feature.scheduled:
# Don't unschedule node as it could be reinserted in the
# fgraph as we don't change it in the shapefeature internal
# structure.
assert isinstance(node.op, Shape_i)
replacement = shape_feature.scheduled[node]
return [shape_feature.shape_of[replacement][node.op.i]]
@register_specialize
@register_canonicalize
@gof.local_optimizer([Subtensor])
def local_subtensor_inc_subtensor(node):
"""
Subtensor(SetSubtensor(x, y, idx), idx) -> y
"""
if isinstance(node.op, Subtensor):
x = node.inputs[0]
if not x.owner or not isinstance(x.owner.op, IncSubtensor):
return
if not x.owner.op.set_instead_of_inc:
return
if (x.owner.inputs[2:] == node.inputs[1:] and
tuple(x.owner.op.idx_list) == tuple(node.op.idx_list)):
out = node.outputs[0]
y = x.owner.inputs[1]
# If the dtypes differ, cast y into x.dtype
if x.dtype != y.dtype:
y = y.astype(x.dtype)
if out.type == y.type:
# if x[idx] and y have the same type, directly return y
return [y]
else:
# The difference is related to broadcasting pattern
assert out.broadcastable != y.broadcastable
# We have to alloc y to the shape of x[idx]
x_subtensor = node.op(x.owner.inputs[0], *x.owner.inputs[2:])
return [T.alloc(y, *x_subtensor.shape)]
else:
return
@register_specialize
@register_canonicalize
@gof.local_optimizer([Subtensor])
def local_subtensor_remove_broadcastable_index(node):
"""
Remove broadcastable dimension with index 0 or -1
a[:,:,:,0] -> a.dimshuffle(0,1,2), when
a.broadcastable = (False, False, False, True)
a[0,:,-1,:] -> a.dimshuffle(1,3), when
a.broadcastable = (True, False, True, False)
"""
if isinstance(node.op, Subtensor):
idx = node.op.idx_list
else:
return
remove_dim = []
node_inputs_idx = 1
for dim, elem in enumerate(idx):
if isinstance(elem, (scalar.Scalar)):
# The idx is a Scalar, ie a Type. This means the actual index
# is contained in node.inputs[1]
dim_index = node.inputs[node_inputs_idx]
if type(dim_index) == theano.scalar.basic.ScalarConstant:
dim_index = dim_index.value
if dim_index in [0, -1] and node.inputs[0].broadcastable[dim]:
remove_dim.append(dim)
node_inputs_idx += 1
else:
return
elif isinstance(elem, slice):
if elem != slice(None):
return
elif isinstance(elem, (integer_types, numpy.integer)):
if elem in [0, -1] and node.inputs[0].broadcastable[dim]:
remove_dim.append(dim)
else:
raise TypeError('case not expected')
if len(remove_dim) == 0:
return
else:
all_dim = range(node.inputs[0].ndim)
remain_dim = [x for x in all_dim if x not in remove_dim]
return [node.inputs[0].dimshuffle(tuple(remain_dim))]
@register_specialize
@register_canonicalize('fast_compile_gpu')
@register_useless
@gof.local_optimizer([Subtensor, AdvancedSubtensor1])
def local_subtensor_make_vector(node):
"""
Replace all subtensor(make_vector) like:
[a,b,c][0] -> a
[a,b,c][0:2] -> [a,b]
Replace all AdvancedSubtensor1(make_vector) like:
[a,b,c][[0,2]] -> [a,c]
We can do this for constant indexes.
"""
x = node.inputs[0]
if not x.owner or x.owner.op != make_vector:
return
if isinstance(node.op, Subtensor):
# This optimization needs ShapeOpt and fgraph.shape_feature
try:
idx, = node.op.idx_list
except Exception:
# 'how can you have multiple indexes into a shape?'
raise
if isinstance(idx, (scalar.Scalar, T.TensorType)):
# The idx is a Scalar, ie a Type. This means the actual index
# is contained in node.inputs[1]
old_idx, idx = idx, node.inputs[1]
assert idx.type == old_idx
elif isinstance(node.op, AdvancedSubtensor1):
idx = node.inputs[1]
else:
return
if isinstance(idx, (integer_types, numpy.integer)):
# We don't need to copy over any stack traces here
return [x.owner.inputs[idx]]
elif isinstance(idx, Variable):
if idx.ndim == 0:
# if it is a constant we can do something with it
try:
v = get_scalar_constant_value(idx, only_process_constants=True)
if isinstance(v, numpy.integer):
# Python 2.4 wants to index only with Python integers
v = int(v)
# We don't need to copy over any stack traces here
try:
ret = [x.owner.inputs[v]]
except IndexError:
raise NotScalarConstantError("Bad user graph!")
return ret
except NotScalarConstantError:
pass
elif idx.ndim == 1 and isinstance(idx, T.Constant):
values = list(map(int, list(idx.value)))
ret = make_vector(*[x.owner.inputs[v] for v in values])
# Copy over stack trace from previous output to new output
copy_stack_trace(node.outputs[0], ret)
ret = T.patternbroadcast(ret, node.outputs[0].broadcastable)
return [ret]
else:
raise TypeError('case not expected')
elif isinstance(idx, slice):
# it is a slice of ints and/or Variables
# check subtensor to see if it can contain constant variables, and if
# it can, then try to unpack them.
try:
const_slice = node.op.get_constant_idx(node.inputs,
allow_partial=False)[0]
ret = make_vector(*x.owner.inputs[const_slice])
# Copy over stack trace from previous outputs to new output
copy_stack_trace(node.outputs, ret)
ret = T.patternbroadcast(ret, node.outputs[0].broadcastable)
return [ret]
except NotScalarConstantError:
pass
else:
raise TypeError('case not expected')
# TODO: the other optimization for and, or, xor, le and ge see ticket #496.
@register_useless
@register_canonicalize('fast_compile')
@register_specialize
@gof.local_optimizer([T.Elemwise])
def local_useless_elemwise(node):
"""
eq(x,x) -> 1
neq(x,x) -> 0
mul(x) -> x
add(x) -> x
identity(x) -> x
and(x,1) -> x
and(x,0) -> zeros_like(x)
or(x,0) -> x
or(x,1) -> ones_like(x)
xor(x,x) -> zeros_like(x)
"""
if isinstance(node.op, T.Elemwise):
# We call zeros_like and one_like with opt=True to generate a
# cleaner graph.
dtype = node.outputs[0].dtype
if node.op.scalar_op == theano.scalar.eq and len(node.inputs) == 2:
if node.inputs[0] == node.inputs[1]:
# it is the same var in the graph. That will always be true
ret = T.ones_like(node.inputs[0], dtype=dtype, opt=True)
# Copy stack trace from input to constant output
copy_stack_trace(node.outputs[0], ret)
return [ret]
elif node.op.scalar_op == theano.scalar.neq and len(node.inputs) == 2:
if node.inputs[0] == node.inputs[1]:
# it is the same var in the graph. That will always be false
ret = T.zeros_like(node.inputs[0], dtype=dtype, opt=True)
# Copy stack trace from input to constant output
copy_stack_trace(node.outputs[0], ret)
return [ret]
elif node.op.scalar_op == theano.scalar.mul and len(node.inputs) == 1:
# No need to copy over any stack trace
return [node.inputs[0]]
elif node.op.scalar_op == theano.scalar.add and len(node.inputs) == 1:
# No need to copy over any stack trace
return [node.inputs[0]]
elif (node.op.scalar_op == theano.scalar.identity and
len(node.inputs) == 1):
return [node.inputs[0]]
elif (isinstance(node.op.scalar_op, scalar.AND) and
len(node.inputs) == 2):
if isinstance(node.inputs[0], T.TensorConstant):
const_val = T.extract_constant(node.inputs[0], only_process_constants=True)
if not isinstance(const_val, Variable):
if const_val == 0:
return [T.zeros_like(node.inputs[1], dtype=dtype,
opt=True)]
else:
return [node.inputs[1].astype(node.outputs[0].dtype)]
if isinstance(node.inputs[1], T.TensorConstant):
const_val = T.extract_constant(node.inputs[1], only_process_constants=True)
if not isinstance(const_val, Variable):
if const_val == 0:
return [T.zeros_like(node.inputs[0], dtype=dtype,
opt=True)]
else:
return [node.inputs[0].astype(node.outputs[0].dtype)]
elif (isinstance(node.op.scalar_op, scalar.OR) and
len(node.inputs) == 2):
if isinstance(node.inputs[0], T.TensorConstant):
const_val = T.extract_constant(node.inputs[0], only_process_constants=True)
if not isinstance(const_val, Variable):
if const_val == 0:
return [node.inputs[1].astype(node.outputs[0].dtype)]
else:
return [T.ones_like(node.inputs[1], dtype=dtype,
opt=True)]
if isinstance(node.inputs[1], T.TensorConstant):
const_val = T.extract_constant(node.inputs[1], only_process_constants=True)
if not isinstance(const_val, Variable):
if const_val == 0:
return [node.inputs[0].astype(node.outputs[0].dtype)]
else:
return [T.ones_like(node.inputs[0], dtype=dtype,
opt=True)]
elif (isinstance(node.op.scalar_op, scalar.XOR) and
len(node.inputs) == 2):
if node.inputs[0] is node.inputs[1]:
return [T.zeros_like(node.inputs[0], dtype=dtype, opt=True)]
@register_specialize
@gof.local_optimizer([T.Elemwise])
def local_alloc_unary(node):
"""unary(alloc(x, shp)) -> alloc(unary(x), shp)"""
if isinstance(node.op, T.Elemwise) and len(node.inputs) == 1:
a = node.inputs[0]
if a.owner and isinstance(a.owner.op, T.Alloc):
x = a.owner.inputs[0]
shp = a.owner.inputs[1:]
v = node.op(x)
# T.alloc does not preserve the stacktrace of v,
# so we need to copy it over from x.
copy_stack_trace(node.outputs[0], v)
ret = T.alloc(T.cast(v, node.outputs[0].dtype), *shp)
# T.cast does not preserve the stacktrace of x,
# so we need to copy it over to the output.
copy_stack_trace([node.outputs[0], a], ret)
return [ret]
@register_canonicalize
@register_specialize
@gof.local_optimizer([T.Elemwise])
def local_cast_cast(node):
"""cast(cast(x, dtype1), dtype2)
when those contrain:
dtype1 == dtype2
TODO: the base dtype is the same (int, uint, float, complex)
and the first cast cause an upcast.
"""
if (not isinstance(node.op, T.Elemwise) or
not isinstance(node.op.scalar_op, scalar.Cast)):
return
x = node.inputs[0]
if (not x.owner or
not isinstance(x.owner.op, T.Elemwise) or
not isinstance(x.owner.op.scalar_op, scalar.Cast)):
return
if node.op.scalar_op.o_type == x.owner.op.scalar_op.o_type:
# We don't need to copy over any stack traces here
return [x]
@register_canonicalize
@register_specialize
@gof.local_optimizer([T.Elemwise])
def local_func_inv(node):
"""
Check for two consecutive operations that are functional inverses
and remove them from the function graph.
"""
inv_pairs = (
(basic.Deg2Rad, basic.Rad2Deg),
(basic.Cosh, basic.ArcCosh),
(basic.Tanh, basic.ArcTanh),
(basic.Sinh, basic.ArcSinh),
(basic.Conj, basic.Conj),
(basic.Neg, basic.Neg),
(basic.Inv, basic.Inv),
)
x = node.inputs[0]
if not isinstance(node.op, T.Elemwise):
return
if (not x.owner or not isinstance(x.owner.op, T.Elemwise)):
return
prev_op = x.owner.op.scalar_op
node_op = node.op.scalar_op
for inv_pair in inv_pairs:
if is_inverse_pair(node_op, prev_op, inv_pair):
# We don't need to copy stack trace, because the optimization
# is trivial and maintains the earlier stack trace
return x.owner.inputs
return
def is_inverse_pair(node_op, prev_op, inv_pair):
"""
Given two consecutive operations, check if they are the
provided pair of inverse functions.
"""
node_is_op0 = isinstance(node_op, inv_pair[0])
node_is_op1 = isinstance(node_op, inv_pair[1])
prev_is_op0 = isinstance(prev_op, inv_pair[0])
prev_is_op1 = isinstance(prev_op, inv_pair[1])
return (node_is_op0 and prev_is_op1) or (node_is_op1 and prev_is_op0)
class Assert(T.Op):
"""
Implements assertion in a computational graph.
Returns the first parameter if the condition is true, otherwise, triggers
AssertionError.
Notes
-----
This Op is a debugging feature. It can be removed from the graph
because of optimizations, and can hide some possible optimizations to
the optimizer. Specifically, removing happens if it can be determined
that condition will always be true. Also, the output of the Op must be
used in the function computing the graph, but it doesn't have to be
returned.
Examples
--------
>>> import theano
>>> T = theano.tensor
>>> x = T.vector('x')
>>> assert_op = T.opt.Assert()
>>> func = theano.function([x], assert_op(x, x.size<2))
"""
_f16_ok = True
__props__ = ('msg',)
view_map = {0: [0]}
check_input = False
def __init__(self, msg="Theano Assert failed!"):
self.msg = msg
def __setstate__(self, attrs):
self.__dict__.update(attrs)
if not hasattr(self, 'msg'):
self.msg = "Theano Assert failed!"
def make_node(self, value, *conds):
if not isinstance(value, Variable):
value = T.as_tensor_variable(value)
cond = [T.as_tensor_variable(c) for c in conds]
assert numpy.all([c.type.ndim == 0 for c in cond])
return gof.Apply(self, [value] + cond, [value.type()])
def perform(self, node, inputs, out_):
out, = out_
v = inputs[0]
out[0] = v
assert numpy.all(inputs[1:]), self.msg
def grad(self, input, output_gradients):
return output_gradients + [DisconnectedType()()] * (len(input) - 1)
def connection_pattern(self, node):
return [[1]] + [[0]] * (len(node.inputs) - 1)
def c_code(self, node, name, inames, onames, sub):
value = inames[0]
out = onames[0]
check = []
fail = sub['fail']
msg = self.msg.replace('"', '\\"').replace('\n', '\\n')
for idx in xrange(len(inames) - 1):
i = inames[idx + 1]
dtype = node.inputs[idx + 1].dtype
check.append('if(!((npy_%(dtype)s*)PyArray_DATA(%(i)s))[0])'
'{PyErr_SetString(PyExc_AssertionError,"%(msg)s");'
'%(fail)s}' % locals())
check = "\n".join(check)
return """
%(check)s
Py_XDECREF(%(out)s);
%(out)s = %(value)s;
Py_INCREF(%(value)s);
""" % locals()
def c_code_cache_version(self):
return (3, 0)
def infer_shape(self, node, input_shapes):
return [input_shapes[0]]
assert_ = Assert()
# Unittest.assert_ is a deprecated name for assertTrue.
# 2to3 convert theano.tensor.opt.assert_ to theano.tensor.opt.assertTrue
# So I define a new name as a work around.
assert_op = assert_
@register_specialize
@gof.local_optimizer([Assert])
def local_remove_useless_assert(node):
if isinstance(node.op, Assert):
cond = []
for c in node.inputs[1:]:
try:
const = get_scalar_constant_value(c)
if 0 != const.ndim or const == 0:
# Should we raise an error here? How to be sure it
# is not catched?
cond.append(c)
except NotScalarConstantError:
cond.append(c)
if len(cond) == 0:
# We don't need to copy over any stack traces here
return [node.inputs[0]]
if len(cond) != len(node.inputs) - 1:
ret = assert_(node.inputs[0], *cond)
# We copy over stack trace from the output of the original assert
copy_stack_trace(node.outputs[0], ret)
return [ret]
@gof.local_optimizer([Assert])
def local_remove_all_assert(node):
"""An optimization disabled by default that removes all asserts from
the graph.
Notes
-----
See the :ref:`unsafe` section to know how to enable it.
"""
if not isinstance(node.op, Assert):
return
# We don't need to copy over any stack traces here
return [node.inputs[0]]
# Disabled by default
compile.optdb['canonicalize'].register('local_remove_all_assert',
local_remove_all_assert,
'unsafe',
use_db_name_as_tag=False)
compile.optdb['stabilize'].register('local_remove_all_assert',
local_remove_all_assert,
'unsafe',
use_db_name_as_tag=False)
compile.optdb['specialize'].register('local_remove_all_assert',
local_remove_all_assert,
'unsafe',
use_db_name_as_tag=False)
compile.optdb['useless'].register('local_remove_all_assert',
local_remove_all_assert,
'unsafe',
use_db_name_as_tag=False)
#######################
# Constant Canonicalization
############################
@register_canonicalize
@gof.local_optimizer([T.Elemwise])
def local_upcast_elemwise_constant_inputs(node):
"""This explicitly upcasts constant inputs to elemwise Ops, when
those Ops do implicit upcasting anyway.
Rationale: it helps merge things like (1-x) and (1.0 - x).
"""
if len(node.outputs) > 1:
return
try:
shape_i = node.fgraph.shape_feature.shape_i
except AttributeError:
shape_i = None
if isinstance(node.op, T.Elemwise):
scalar_op = node.op.scalar_op
# print "aa", scalar_op.output_types_preference
if (getattr(scalar_op, 'output_types_preference', None)
in (T.scal.upgrade_to_float, T.scal.upcast_out)):
# this is the kind of op that we can screw with the input
# dtypes by upcasting explicitly
output_dtype = node.outputs[0].type.dtype
new_inputs = []
for i in node.inputs:
if i.type.dtype == output_dtype:
new_inputs.append(i)
else:
try:
# works only for scalars
cval_i = get_scalar_constant_value(i,
only_process_constants=True)
if all(i.broadcastable):
new_inputs.append(T.shape_padleft(
T.cast(cval_i, output_dtype),
i.ndim))
else:
if shape_i is None:
return
new_inputs.append(
T.alloc(T.cast(cval_i, output_dtype),
*[shape_i(d)(i)
for d in xrange(i.ndim)]))
# print >> sys.stderr, "AAA",
# *[Shape_i(d)(i) for d in xrange(i.ndim)]
except NotScalarConstantError:
# for the case of a non-scalar
if isinstance(i, T.TensorConstant):
new_inputs.append(T.cast(i, output_dtype))
else:
new_inputs.append(i)
if new_inputs != node.inputs:
rval = [node.op(*new_inputs)]
if rval[0].type != node.outputs[0].type:
# This can happen for example when floatX=float32
# and we do the true division between and int64
# and a constant that will get typed as int8.
# As this is just to allow merging more case, if
# the upcast don't work, we can just skip it.
return
# Copy over output stacktrace from before upcasting
copy_stack_trace(node.outputs[0], rval)
return rval
##################
# Subtensor opts #
##################
@register_useless
@register_canonicalize
@register_specialize
@gof.local_optimizer([IncSubtensor])
def local_useless_inc_subtensor(node):
"""
Remove IncSubtensor, when we overwrite the full inputs with the
new value.
"""
if not isinstance(node.op, IncSubtensor):
return
if node.op.set_instead_of_inc is False:
# This is an IncSubtensor, so the init value must be zeros
try:
c = get_scalar_constant_value(node.inputs[0],
only_process_constants=True)
if c != 0:
return
except NotScalarConstantError:
return
if (node.inputs[0].ndim != node.inputs[1].ndim or
node.inputs[0].broadcastable != node.inputs[1].broadcastable):
# FB: I didn't check if this case can happen, but this opt
# don't support it.
return
# We have a SetSubtensor or an IncSubtensor on zeros
# If is this IncSubtensor useful?
# Check that we keep all the original data.
# Put the constant inputs in the slice.
idx_cst = get_idx_list(node.inputs[1:], node.op.idx_list)
if all(isinstance(e, slice) and e.start is None and
e.stop is None and (e.step is None or T.extract_constant(e.step,
only_process_constants=True) == -1)
for e in idx_cst):
# IncSubtensor broadcast node.inputs[1] on node.inputs[0]
# based on run time shapes, so we must check they are the same.
if not hasattr(node.fgraph, 'shape_feature'):
return
if not node.fgraph.shape_feature.same_shape(node.inputs[0],
node.inputs[1]):
return
# There is no reverse, so we don't need a replacement.
if all(e.step is None
for e in node.op.idx_list):
# They are the same shape, so we can remore this IncSubtensor
return [node.inputs[1]]
ret = Subtensor(node.op.idx_list)(*node.inputs[1:])
# Copy over previous output stacktrace
copy_stack_trace(node.outputs, ret)
return [ret]
@register_canonicalize
@gof.local_optimizer([AdvancedIncSubtensor1])
def local_set_to_inc_subtensor(node):
"""
AdvancedIncSubtensor1(x, x[ilist]+other, ilist, set_instead_of_inc=True) ->
AdvancedIncSubtensor1(x, other, ilist, set_instead_of_inc=False)
"""
if (isinstance(node.op, AdvancedIncSubtensor1) and
node.op.set_instead_of_inc and
node.inputs[1].owner and
isinstance(node.inputs[1].owner.op, Elemwise) and
isinstance(node.inputs[1].owner.op.scalar_op, scalar.Add)):
addn = node.inputs[1].owner
subn = None
other = None
if (addn.inputs[0].owner and
isinstance(addn.inputs[0].owner.op, AdvancedSubtensor1)):
subn = addn.inputs[0].owner
other = addn.inputs[1]
elif (addn.inputs[1].owner and
isinstance(addn.inputs[1].owner.op, AdvancedSubtensor1)):
subn = addn.inputs[1].owner
other = addn.inputs[0]
else:
return
if (subn.inputs[1] != node.inputs[2] or
subn.inputs[0] != node.inputs[0]):
return
ret = advanced_inc_subtensor1(node.inputs[0], other, node.inputs[2])
# Copy over previous output stacktrace
# Julian: I'm not sure about this at all...
copy_stack_trace(node.outputs, ret)
return [ret]
@register_useless
@register_canonicalize
@register_specialize
@gof.local_optimizer([Subtensor])
def local_useless_slice(node):
"""
Remove Subtensor of the form X[0, :] -> X[0]
"""
if isinstance(node.op, Subtensor):
slices = get_idx_list(node.inputs, node.op.idx_list)
last_slice = len(slices)
for s in slices[::-1]:
# check if slice and then check slice indices
if (isinstance(s, slice) and s.start is None and s.stop is None and
(s.step is None or T.extract_constant(s.step,
only_process_constants=True) == 1)):
last_slice -= 1
else:
break
# check if we removed something
if last_slice < len(slices):
subtens = Subtensor(slices[:last_slice])
sl_ins = Subtensor.collapse(slices[:last_slice],
lambda x: isinstance(x, T.Variable))
out = subtens(node.inputs[0], *sl_ins)
# Copy over previous output stacktrace
copy_stack_trace(node.outputs, out)
return [out]
@register_canonicalize
@register_specialize
@gof.local_optimizer([Subtensor, AdvancedSubtensor1])
def local_useless_subtensor(node):
"""
Remove Subtensor/AdvancedSubtensor1 if it takes the full input. In the
AdvancedSubtensor1 case, the full input is taken when the indices are
equivalent to `arange(0, input.shape[0], 1)` using either an explicit
list/vector or the ARange op.
"""
# If the optimization is tried over a node that is not a part of graph before
if not hasattr(node, 'fgraph'):
return
# This optimization needs ShapeOpt and fgraph.shape_feature
if not hasattr(node.fgraph, 'shape_feature'):
return
shape_of = node.fgraph.shape_feature.shape_of
if isinstance(node.op, Subtensor):
cdata = node.op.get_constant_idx(node.inputs, allow_partial=True,
only_process_constants=True)
|
[
" for pos, idx in enumerate(cdata):"
] | 10,029
|
lcc
|
python
| null |
0a0acfe5a6238a45947c2e198dabe87dee8487c858569c30
|
|
from django import forms
from django.forms import ValidationError
from django.contrib.auth.models import Group
from common.forms import ModelFormWithHelper
from common.helpers import SubmitCancelFormHelper
from community.constants import COMMUNITY_ADMIN, COMMUNITY_PRESENCE_CHOICES
from community.models import Community, CommunityPage, RequestCommunity
from community.utils import get_groups
from users.models import SystersUser
class AddCommunityForm(ModelFormWithHelper):
""" Form to create a new Community by admin. """
class Meta:
model = Community
fields = ('name', 'slug', 'order', 'location', 'email', 'mailing_list',
'parent_community', 'website', 'facebook', 'googleplus',
'twitter')
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'index' %}"
def __init__(self, *args, **kwargs):
self.admin = kwargs.pop('admin')
super(AddCommunityForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
"""Override save to add admin to the instance"""
instance = super(AddCommunityForm, self).save(commit=False)
instance.admin = self.admin
if commit:
instance.save()
return instance
class RequestCommunityForm(ModelFormWithHelper):
"""Form to request a new Community"""
def __init__(self, *args, **kwargs):
"""Makes some fields required and modifies a field to use widget"""
self.user = kwargs.pop('user')
super(RequestCommunityForm, self).__init__(*args, **kwargs)
self.fields['social_presence'] = forms.MultipleChoiceField(
choices=COMMUNITY_PRESENCE_CHOICES, label="Check off all \
the social media accounts you can manage for your proposed community:",
required=False, widget=forms.CheckboxSelectMultiple)
self.fields['email'].required = True
self.fields['demographic_target_count'].required = True
self.fields['purpose'].required = True
self.fields['content_developer'].required = True
self.fields['selection_criteria'].required = True
self.fields['is_real_time'].required = True
class Meta:
model = RequestCommunity
fields = ('is_member', 'email_id', 'email', 'name', 'slug', 'order', 'location',
'type_community', 'other_community_type', 'parent_community',
'community_channel', 'mailing_list', 'website', 'facebook',
'googleplus', 'twitter', 'social_presence', 'other_account',
'demographic_target_count',
'purpose', 'is_avail_volunteer', 'count_avail_volunteer', 'content_developer',
'selection_criteria', 'is_real_time')
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'index' %}"
def clean_social_presence(self):
"""Converts the checkbox input into char to save it to the instance's field."""
social_presence = ', '.join(
map(str, self.cleaned_data['social_presence']))
return social_presence
def save(self, commit=True):
"""Override save to add user to the instance"""
instance = super(RequestCommunityForm, self).save(commit=False)
instance.user = SystersUser.objects.get(user=self.user)
if commit:
instance.save()
return instance
class EditCommunityRequestForm(ModelFormWithHelper):
"""Form to edit a community request"""
def __init__(self, *args, **kwargs):
"""Makes some fields required and modifies a field to use widget"""
super(EditCommunityRequestForm, self).__init__(*args, **kwargs)
self.fields['social_presence'] = forms.MultipleChoiceField(
choices=COMMUNITY_PRESENCE_CHOICES, label="Check off all \
the social media accounts you can manage for your proposed community:",
required=False, widget=forms.CheckboxSelectMultiple)
self.fields['email'].required = True
self.fields['demographic_target_count'].required = True
self.fields['purpose'].required = True
self.fields['content_developer'].required = True
self.fields['selection_criteria'].required = True
self.fields['is_real_time'].required = True
class Meta:
model = RequestCommunity
fields = ('is_member', 'email_id', 'email', 'name', 'slug', 'order', 'location',
'type_community', 'other_community_type', 'parent_community',
'community_channel', 'mailing_list', 'website', 'facebook',
'googleplus', 'twitter', 'social_presence', 'other_account',
'demographic_target_count',
'purpose', 'is_avail_volunteer', 'count_avail_volunteer', 'content_developer',
'selection_criteria', 'is_real_time')
widgets = {'social_presence': forms.CheckboxSelectMultiple}
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_request' community_request.slug %}"
def clean_social_presence(self):
"""Converts the checkbox input into char to save it to the instance's field."""
social_presence = ', '.join(
map(str, self.cleaned_data['social_presence']))
return social_presence
def clean_slug(self):
"""Checks if the slug exists in the Community objects' slug"""
slug = self.cleaned_data['slug']
slug_community_values = Community.objects.all().values_list('order', flat=True)
if slug in slug_community_values:
msg = "Slug by this value already exists. Please choose a different slug\
other than {0}!"
string_slug_values = ', '.join(map(str, slug_community_values))
raise ValidationError(msg.format(string_slug_values))
else:
return slug
def clean_order(self):
"""Checks if the order exists in the Community objects' order"""
order = self.cleaned_data['order']
order_community_values = list(
Community.objects.all().values_list('order', flat=True))
order_community_values.sort()
if order is None:
raise ValidationError("Order must not be None.")
elif order in order_community_values:
msg = "Choose order value other than {0}"
string_order_values = ', '.join(map(str, order_community_values))
raise ValidationError(msg.format(string_order_values))
else:
return order
class EditCommunityForm(ModelFormWithHelper):
"""Form to edit Community profile"""
class Meta:
model = Community
fields = ('name', 'slug', 'order', 'location', 'email', 'mailing_list',
'parent_community', 'website', 'facebook', 'googleplus',
'twitter')
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_profile' " \
"community.slug %}"
class AddCommunityPageForm(ModelFormWithHelper):
"""Form to create new CommunityPage. The author and the community of the
page are expected to be provided when initializing the form:
* author - currently logged in user, aka the author of the page
* community - to which Community the CommunityPage belongs
"""
class Meta:
model = CommunityPage
fields = ('title', 'slug', 'order', 'content')
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_landing' " \
"community.slug %}"
def __init__(self, *args, **kwargs):
self.author = kwargs.pop('author')
self.community = kwargs.pop('community')
super(AddCommunityPageForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
"""Override save to add author and community to the instance"""
instance = super(AddCommunityPageForm, self).save(commit=False)
instance.author = SystersUser.objects.get(user=self.author)
instance.community = self.community
if commit:
instance.save()
return instance
class EditCommunityPageForm(ModelFormWithHelper):
"""Form to edit a CommunityPage."""
class Meta:
model = CommunityPage
fields = ('slug', 'title', 'order', 'content')
helper_class = SubmitCancelFormHelper
helper_cancel_href = "{% url 'view_community_page' community.slug " \
"object.slug %}"
class PermissionGroupsForm(forms.Form):
"""Form to manage (select/deselect) user permission groups"""
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
community = kwargs.pop('community')
super(PermissionGroupsForm, self).__init__(*args, **kwargs)
# get all community groups and remove community admin group
# from the list of choices
self.groups = list(get_groups(community.name))
admin_group = Group.objects.get(
name=COMMUNITY_ADMIN.format(community.name))
self.groups.remove(admin_group)
choices = [(group.pk, group.name) for group in self.groups]
self.fields['groups'] = forms. \
MultipleChoiceField(choices=choices, label="", required=False,
widget=forms.CheckboxSelectMultiple)
|
[
" self.member_groups = self.user.get_member_groups(self.groups)"
] | 746
|
lcc
|
python
| null |
bc922d0a7fcc0c037de90c7f575f1de8f8ac956e5c9e4e82
|
|
# orm/events.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""ORM event interfaces.
"""
from .. import event, exc, util
from .base import _mapper_or_none
import inspect
import weakref
from . import interfaces
from . import mapperlib, instrumentation
from .session import Session, sessionmaker
from .scoping import scoped_session
from .attributes import QueryableAttribute
from .query import Query
from sqlalchemy.util.compat import inspect_getargspec
class InstrumentationEvents(event.Events):
"""Events related to class instrumentation events.
The listeners here support being established against
any new style class, that is any object that is a subclass
of 'type'. Events will then be fired off for events
against that class. If the "propagate=True" flag is passed
to event.listen(), the event will fire off for subclasses
of that class as well.
The Python ``type`` builtin is also accepted as a target,
which when used has the effect of events being emitted
for all classes.
Note the "propagate" flag here is defaulted to ``True``,
unlike the other class level events where it defaults
to ``False``. This means that new subclasses will also
be the subject of these events, when a listener
is established on a superclass.
.. versionchanged:: 0.8 - events here will emit based
on comparing the incoming class to the type of class
passed to :func:`.event.listen`. Previously, the
event would fire for any class unconditionally regardless
of what class was sent for listening, despite
documentation which stated the contrary.
"""
_target_class_doc = "SomeBaseClass"
_dispatch_target = instrumentation.InstrumentationFactory
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
return _InstrumentationEventsHold(target)
else:
return None
@classmethod
def _listen(cls, event_key, propagate=True, **kw):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, \
event_key._listen_fn
def listen(target_cls, *arg):
listen_cls = target()
if propagate and issubclass(target_cls, listen_cls):
return fn(target_cls, *arg)
elif not propagate and target_cls is listen_cls:
return fn(target_cls, *arg)
def remove(ref):
key = event.registry._EventKey(
None, identifier, listen,
instrumentation._instrumentation_factory)
getattr(instrumentation._instrumentation_factory.dispatch,
identifier).remove(key)
target = weakref.ref(target.class_, remove)
event_key.\
with_dispatch_target(instrumentation._instrumentation_factory).\
with_wrapper(listen).base_listen(**kw)
@classmethod
def _clear(cls):
super(InstrumentationEvents, cls)._clear()
instrumentation._instrumentation_factory.dispatch._clear()
def class_instrument(self, cls):
"""Called after the given class is instrumented.
To get at the :class:`.ClassManager`, use
:func:`.manager_of_class`.
"""
def class_uninstrument(self, cls):
"""Called before the given class is uninstrumented.
To get at the :class:`.ClassManager`, use
:func:`.manager_of_class`.
"""
def attribute_instrument(self, cls, key, inst):
"""Called when an attribute is instrumented."""
class _InstrumentationEventsHold(object):
"""temporary marker object used to transfer from _accept_with() to
_listen() on the InstrumentationEvents class.
"""
def __init__(self, class_):
self.class_ = class_
dispatch = event.dispatcher(InstrumentationEvents)
class InstanceEvents(event.Events):
"""Define events specific to object lifecycle.
e.g.::
from sqlalchemy import event
def my_load_listener(target, context):
print "on load!"
event.listen(SomeClass, 'load', my_load_listener)
Available targets include:
* mapped classes
* unmapped superclasses of mapped or to-be-mapped classes
(using the ``propagate=True`` flag)
* :class:`.Mapper` objects
* the :class:`.Mapper` class itself and the :func:`.mapper`
function indicate listening for all mappers.
.. versionchanged:: 0.8.0 instance events can be associated with
unmapped superclasses of mapped classes.
Instance events are closely related to mapper events, but
are more specific to the instance and its instrumentation,
rather than its system of persistence.
When using :class:`.InstanceEvents`, several modifiers are
available to the :func:`.event.listen` function.
:param propagate=False: When True, the event listener should
be applied to all inheriting classes as well as the
class which is the target of this listener.
:param raw=False: When True, the "target" argument passed
to applicable event listener functions will be the
instance's :class:`.InstanceState` management
object, rather than the mapped instance itself.
"""
_target_class_doc = "SomeClass"
_dispatch_target = instrumentation.ClassManager
@classmethod
def _new_classmanager_instance(cls, class_, classmanager):
_InstanceEventsHold.populate(class_, classmanager)
@classmethod
@util.dependencies("sqlalchemy.orm")
def _accept_with(cls, orm, target):
if isinstance(target, instrumentation.ClassManager):
return target
elif isinstance(target, mapperlib.Mapper):
return target.class_manager
elif target is orm.mapper:
return instrumentation.ClassManager
elif isinstance(target, type):
if issubclass(target, mapperlib.Mapper):
return instrumentation.ClassManager
else:
manager = instrumentation.manager_of_class(target)
if manager:
return manager
else:
return _InstanceEventsHold(target)
return None
@classmethod
def _listen(cls, event_key, raw=False, propagate=False, **kw):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, \
event_key._listen_fn
if not raw:
def wrap(state, *arg, **kw):
return fn(state.obj(), *arg, **kw)
event_key = event_key.with_wrapper(wrap)
event_key.base_listen(propagate=propagate, **kw)
if propagate:
for mgr in target.subclass_managers(True):
event_key.with_dispatch_target(mgr).base_listen(
propagate=True)
@classmethod
def _clear(cls):
super(InstanceEvents, cls)._clear()
_InstanceEventsHold._clear()
def first_init(self, manager, cls):
"""Called when the first instance of a particular mapping is called.
This event is called when the ``__init__`` method of a class
is called the first time for that particular class. The event
invokes before ``__init__`` actually proceeds as well as before
the :meth:`.InstanceEvents.init` event is invoked.
"""
def init(self, target, args, kwargs):
"""Receive an instance when its constructor is called.
This method is only called during a userland construction of
an object, in conjunction with the object's constructor, e.g.
its ``__init__`` method. It is not called when an object is
loaded from the database; see the :meth:`.InstanceEvents.load`
event in order to intercept a database load.
The event is called before the actual ``__init__`` constructor
of the object is called. The ``kwargs`` dictionary may be
modified in-place in order to affect what is passed to
``__init__``.
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:param args: positional arguments passed to the ``__init__`` method.
This is passed as a tuple and is currently immutable.
:param kwargs: keyword arguments passed to the ``__init__`` method.
This structure *can* be altered in place.
.. seealso::
:meth:`.InstanceEvents.init_failure`
:meth:`.InstanceEvents.load`
"""
def init_failure(self, target, args, kwargs):
"""Receive an instance when its constructor has been called,
and raised an exception.
This method is only called during a userland construction of
an object, in conjunction with the object's constructor, e.g.
its ``__init__`` method. It is not called when an object is loaded
from the database.
The event is invoked after an exception raised by the ``__init__``
method is caught. After the event
is invoked, the original exception is re-raised outwards, so that
the construction of the object still raises an exception. The
actual exception and stack trace raised should be present in
``sys.exc_info()``.
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:param args: positional arguments that were passed to the ``__init__``
method.
:param kwargs: keyword arguments that were passed to the ``__init__``
method.
.. seealso::
:meth:`.InstanceEvents.init`
:meth:`.InstanceEvents.load`
"""
def load(self, target, context):
"""Receive an object instance after it has been created via
``__new__``, and after initial attribute population has
occurred.
This typically occurs when the instance is created based on
incoming result rows, and is only called once for that
instance's lifetime.
Note that during a result-row load, this method is called upon
the first row received for this instance. Note that some
attributes and collections may or may not be loaded or even
initialized, depending on what's present in the result rows.
The :meth:`.InstanceEvents.load` event is also available in a
class-method decorator format called :func:`.orm.reconstructor`.
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:param context: the :class:`.QueryContext` corresponding to the
current :class:`.Query` in progress. This argument may be
``None`` if the load does not correspond to a :class:`.Query`,
such as during :meth:`.Session.merge`.
.. seealso::
:meth:`.InstanceEvents.init`
:meth:`.InstanceEvents.refresh`
:meth:`.SessionEvents.loaded_as_persistent`
:ref:`mapping_constructors`
"""
def refresh(self, target, context, attrs):
"""Receive an object instance after one or more attributes have
been refreshed from a query.
Contrast this to the :meth:`.InstanceEvents.load` method, which
is invoked when the object is first loaded from a query.
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:param context: the :class:`.QueryContext` corresponding to the
current :class:`.Query` in progress.
:param attrs: sequence of attribute names which
were populated, or None if all column-mapped, non-deferred
attributes were populated.
.. seealso::
:meth:`.InstanceEvents.load`
"""
def refresh_flush(self, target, flush_context, attrs):
"""Receive an object instance after one or more attributes have
been refreshed within the persistence of the object.
This event is the same as :meth:`.InstanceEvents.refresh` except
it is invoked within the unit of work flush process, and the values
here typically come from the process of handling an INSERT or
UPDATE, such as via the RETURNING clause or from Python-side default
values.
.. versionadded:: 1.0.5
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:param flush_context: Internal :class:`.UOWTransaction` object
which handles the details of the flush.
:param attrs: sequence of attribute names which
were populated.
"""
def expire(self, target, attrs):
"""Receive an object instance after its attributes or some subset
have been expired.
'keys' is a list of attribute names. If None, the entire
state was expired.
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:param attrs: sequence of attribute
names which were expired, or None if all attributes were
expired.
"""
def pickle(self, target, state_dict):
"""Receive an object instance when its associated state is
being pickled.
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:param state_dict: the dictionary returned by
:class:`.InstanceState.__getstate__`, containing the state
to be pickled.
"""
def unpickle(self, target, state_dict):
"""Receive an object instance after its associated state has
been unpickled.
:param target: the mapped instance. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:param state_dict: the dictionary sent to
:class:`.InstanceState.__setstate__`, containing the state
dictionary which was pickled.
"""
class _EventsHold(event.RefCollection):
"""Hold onto listeners against unmapped, uninstrumented classes.
Establish _listen() for that class' mapper/instrumentation when
those objects are created for that class.
"""
def __init__(self, class_):
self.class_ = class_
@classmethod
def _clear(cls):
cls.all_holds.clear()
class HoldEvents(object):
_dispatch_target = None
@classmethod
def _listen(cls, event_key, raw=False, propagate=False, **kw):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, event_key.fn
if target.class_ in target.all_holds:
collection = target.all_holds[target.class_]
else:
collection = target.all_holds[target.class_] = {}
event.registry._stored_in_collection(event_key, target)
collection[event_key._key] = (event_key, raw, propagate)
if propagate:
stack = list(target.class_.__subclasses__())
while stack:
subclass = stack.pop(0)
stack.extend(subclass.__subclasses__())
subject = target.resolve(subclass)
if subject is not None:
# we are already going through __subclasses__()
# so leave generic propagate flag False
event_key.with_dispatch_target(subject).\
listen(raw=raw, propagate=False, **kw)
def remove(self, event_key):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, event_key.fn
if isinstance(target, _EventsHold):
collection = target.all_holds[target.class_]
del collection[event_key._key]
@classmethod
def populate(cls, class_, subject):
for subclass in class_.__mro__:
if subclass in cls.all_holds:
collection = cls.all_holds[subclass]
for event_key, raw, propagate in collection.values():
if propagate or subclass is class_:
# since we can't be sure in what order different
# classes in a hierarchy are triggered with
# populate(), we rely upon _EventsHold for all event
# assignment, instead of using the generic propagate
# flag.
event_key.with_dispatch_target(subject).\
listen(raw=raw, propagate=False)
class _InstanceEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
def resolve(self, class_):
return instrumentation.manager_of_class(class_)
class HoldInstanceEvents(_EventsHold.HoldEvents, InstanceEvents):
pass
dispatch = event.dispatcher(HoldInstanceEvents)
class MapperEvents(event.Events):
"""Define events specific to mappings.
e.g.::
from sqlalchemy import event
def my_before_insert_listener(mapper, connection, target):
# execute a stored procedure upon INSERT,
# apply the value to the row to be inserted
target.calculated_value = connection.scalar(
"select my_special_function(%d)"
% target.special_number)
# associate the listener function with SomeClass,
# to execute during the "before_insert" hook
event.listen(
SomeClass, 'before_insert', my_before_insert_listener)
Available targets include:
* mapped classes
* unmapped superclasses of mapped or to-be-mapped classes
(using the ``propagate=True`` flag)
* :class:`.Mapper` objects
* the :class:`.Mapper` class itself and the :func:`.mapper`
function indicate listening for all mappers.
.. versionchanged:: 0.8.0 mapper events can be associated with
unmapped superclasses of mapped classes.
Mapper events provide hooks into critical sections of the
mapper, including those related to object instrumentation,
object loading, and object persistence. In particular, the
persistence methods :meth:`~.MapperEvents.before_insert`,
and :meth:`~.MapperEvents.before_update` are popular
places to augment the state being persisted - however, these
methods operate with several significant restrictions. The
user is encouraged to evaluate the
:meth:`.SessionEvents.before_flush` and
:meth:`.SessionEvents.after_flush` methods as more
flexible and user-friendly hooks in which to apply
additional database state during a flush.
When using :class:`.MapperEvents`, several modifiers are
available to the :func:`.event.listen` function.
:param propagate=False: When True, the event listener should
be applied to all inheriting mappers and/or the mappers of
inheriting classes, as well as any
mapper which is the target of this listener.
:param raw=False: When True, the "target" argument passed
to applicable event listener functions will be the
instance's :class:`.InstanceState` management
object, rather than the mapped instance itself.
:param retval=False: when True, the user-defined event function
must have a return value, the purpose of which is either to
control subsequent event propagation, or to otherwise alter
the operation in progress by the mapper. Possible return
values are:
* ``sqlalchemy.orm.interfaces.EXT_CONTINUE`` - continue event
processing normally.
* ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent
event handlers in the chain.
* other values - the return value specified by specific listeners.
"""
_target_class_doc = "SomeClass"
_dispatch_target = mapperlib.Mapper
@classmethod
def _new_mapper_instance(cls, class_, mapper):
_MapperEventsHold.populate(class_, mapper)
@classmethod
@util.dependencies("sqlalchemy.orm")
def _accept_with(cls, orm, target):
if target is orm.mapper:
return mapperlib.Mapper
elif isinstance(target, type):
if issubclass(target, mapperlib.Mapper):
return target
else:
mapper = _mapper_or_none(target)
if mapper is not None:
return mapper
else:
return _MapperEventsHold(target)
else:
return target
@classmethod
def _listen(
cls, event_key, raw=False, retval=False, propagate=False, **kw):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, \
event_key._listen_fn
if identifier in ("before_configured", "after_configured") and \
target is not mapperlib.Mapper:
util.warn(
"'before_configured' and 'after_configured' ORM events "
"only invoke with the mapper() function or Mapper class "
"as the target.")
if not raw or not retval:
if not raw:
meth = getattr(cls, identifier)
try:
target_index = \
inspect_getargspec(meth)[0].index('target') - 1
except ValueError:
target_index = None
def wrap(*arg, **kw):
if not raw and target_index is not None:
arg = list(arg)
arg[target_index] = arg[target_index].obj()
if not retval:
fn(*arg, **kw)
return interfaces.EXT_CONTINUE
else:
return fn(*arg, **kw)
event_key = event_key.with_wrapper(wrap)
if propagate:
for mapper in target.self_and_descendants:
event_key.with_dispatch_target(mapper).base_listen(
propagate=True, **kw)
else:
event_key.base_listen(**kw)
@classmethod
def _clear(cls):
super(MapperEvents, cls)._clear()
_MapperEventsHold._clear()
def instrument_class(self, mapper, class_):
r"""Receive a class when the mapper is first constructed,
before instrumentation is applied to the mapped class.
This event is the earliest phase of mapper construction.
Most attributes of the mapper are not yet initialized.
This listener can either be applied to the :class:`.Mapper`
class overall, or to any un-mapped class which serves as a base
for classes that will be mapped (using the ``propagate=True`` flag)::
Base = declarative_base()
@event.listens_for(Base, "instrument_class", propagate=True)
def on_new_class(mapper, cls_):
" ... "
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param class\_: the mapped class.
"""
def mapper_configured(self, mapper, class_):
r"""Called when a specific mapper has completed its own configuration
within the scope of the :func:`.configure_mappers` call.
The :meth:`.MapperEvents.mapper_configured` event is invoked
for each mapper that is encountered when the
:func:`.orm.configure_mappers` function proceeds through the current
list of not-yet-configured mappers.
:func:`.orm.configure_mappers` is typically invoked
automatically as mappings are first used, as well as each time
new mappers have been made available and new mapper use is
detected.
When the event is called, the mapper should be in its final
state, but **not including backrefs** that may be invoked from
other mappers; they might still be pending within the
configuration operation. Bidirectional relationships that
are instead configured via the
:paramref:`.orm.relationship.back_populates` argument
*will* be fully available, since this style of relationship does not
rely upon other possibly-not-configured mappers to know that they
exist.
For an event that is guaranteed to have **all** mappers ready
to go including backrefs that are defined only on other
mappings, use the :meth:`.MapperEvents.after_configured`
event; this event invokes only after all known mappings have been
fully configured.
The :meth:`.MapperEvents.mapper_configured` event, unlike
:meth:`.MapperEvents.before_configured` or
:meth:`.MapperEvents.after_configured`,
is called for each mapper/class individually, and the mapper is
passed to the event itself. It also is called exactly once for
a particular mapper. The event is therefore useful for
configurational steps that benefit from being invoked just once
on a specific mapper basis, which don't require that "backref"
configurations are necessarily ready yet.
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param class\_: the mapped class.
.. seealso::
:meth:`.MapperEvents.before_configured`
:meth:`.MapperEvents.after_configured`
"""
# TODO: need coverage for this event
def before_configured(self):
"""Called before a series of mappers have been configured.
The :meth:`.MapperEvents.before_configured` event is invoked
each time the :func:`.orm.configure_mappers` function is
invoked, before the function has done any of its work.
:func:`.orm.configure_mappers` is typically invoked
automatically as mappings are first used, as well as each time
new mappers have been made available and new mapper use is
detected.
This event can **only** be applied to the :class:`.Mapper` class
or :func:`.mapper` function, and not to individual mappings or
mapped classes. It is only invoked for all mappings as a whole::
from sqlalchemy.orm import mapper
@event.listens_for(mapper, "before_configured")
def go():
# ...
Contrast this event to :meth:`.MapperEvents.after_configured`,
which is invoked after the series of mappers has been configured,
as well as :meth:`.MapperEvents.mapper_configured`, which is invoked
on a per-mapper basis as each one is configured to the extent possible.
Theoretically this event is called once per
application, but is actually called any time new mappers
are to be affected by a :func:`.orm.configure_mappers`
call. If new mappings are constructed after existing ones have
already been used, this event will likely be called again. To ensure
that a particular event is only called once and no further, the
``once=True`` argument (new in 0.9.4) can be applied::
from sqlalchemy.orm import mapper
@event.listens_for(mapper, "before_configured", once=True)
def go():
# ...
.. versionadded:: 0.9.3
.. seealso::
:meth:`.MapperEvents.mapper_configured`
:meth:`.MapperEvents.after_configured`
"""
def after_configured(self):
"""Called after a series of mappers have been configured.
The :meth:`.MapperEvents.after_configured` event is invoked
each time the :func:`.orm.configure_mappers` function is
invoked, after the function has completed its work.
:func:`.orm.configure_mappers` is typically invoked
automatically as mappings are first used, as well as each time
new mappers have been made available and new mapper use is
detected.
Contrast this event to the :meth:`.MapperEvents.mapper_configured`
event, which is called on a per-mapper basis while the configuration
operation proceeds; unlike that event, when this event is invoked,
all cross-configurations (e.g. backrefs) will also have been made
available for any mappers that were pending.
Also contrast to :meth:`.MapperEvents.before_configured`,
which is invoked before the series of mappers has been configured.
This event can **only** be applied to the :class:`.Mapper` class
or :func:`.mapper` function, and not to individual mappings or
mapped classes. It is only invoked for all mappings as a whole::
from sqlalchemy.orm import mapper
@event.listens_for(mapper, "after_configured")
def go():
# ...
Theoretically this event is called once per
application, but is actually called any time new mappers
have been affected by a :func:`.orm.configure_mappers`
call. If new mappings are constructed after existing ones have
already been used, this event will likely be called again. To ensure
that a particular event is only called once and no further, the
``once=True`` argument (new in 0.9.4) can be applied::
from sqlalchemy.orm import mapper
@event.listens_for(mapper, "after_configured", once=True)
def go():
# ...
.. seealso::
:meth:`.MapperEvents.mapper_configured`
:meth:`.MapperEvents.before_configured`
"""
def before_insert(self, mapper, connection, target):
"""Receive an object instance before an INSERT statement
is emitted corresponding to that instance.
This event is used to modify local, non-object related
attributes on the instance before an INSERT occurs, as well
as to emit additional SQL statements on the given
connection.
The event is often called for a batch of objects of the
same class before their INSERT statements are emitted at
once in a later step. In the extremely rare case that
this is not desirable, the :func:`.mapper` can be
configured with ``batch=False``, which will cause
batches of instances to be broken up into individual
(and more poorly performing) event->persist->event
steps.
.. warning::
Mapper-level flush events only allow **very limited operations**,
on attributes local to the row being operated upon only,
as well as allowing any SQL to be emitted on the given
:class:`.Connection`. **Please read fully** the notes
at :ref:`session_persistence_mapper` for guidelines on using
these methods; generally, the :meth:`.SessionEvents.before_flush`
method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param connection: the :class:`.Connection` being used to
emit INSERT statements for this instance. This
provides a handle into the current transaction on the
target database specific to this instance.
:param target: the mapped instance being persisted. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
.. seealso::
:ref:`session_persistence_events`
"""
def after_insert(self, mapper, connection, target):
"""Receive an object instance after an INSERT statement
is emitted corresponding to that instance.
This event is used to modify in-Python-only
state on the instance after an INSERT occurs, as well
as to emit additional SQL statements on the given
connection.
The event is often called for a batch of objects of the
same class after their INSERT statements have been
emitted at once in a previous step. In the extremely
rare case that this is not desirable, the
:func:`.mapper` can be configured with ``batch=False``,
which will cause batches of instances to be broken up
into individual (and more poorly performing)
event->persist->event steps.
.. warning::
Mapper-level flush events only allow **very limited operations**,
on attributes local to the row being operated upon only,
as well as allowing any SQL to be emitted on the given
:class:`.Connection`. **Please read fully** the notes
at :ref:`session_persistence_mapper` for guidelines on using
these methods; generally, the :meth:`.SessionEvents.before_flush`
method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param connection: the :class:`.Connection` being used to
emit INSERT statements for this instance. This
provides a handle into the current transaction on the
target database specific to this instance.
:param target: the mapped instance being persisted. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
.. seealso::
:ref:`session_persistence_events`
"""
def before_update(self, mapper, connection, target):
"""Receive an object instance before an UPDATE statement
is emitted corresponding to that instance.
This event is used to modify local, non-object related
attributes on the instance before an UPDATE occurs, as well
as to emit additional SQL statements on the given
connection.
This method is called for all instances that are
marked as "dirty", *even those which have no net changes
to their column-based attributes*. An object is marked
as dirty when any of its column-based attributes have a
"set attribute" operation called or when any of its
collections are modified. If, at update time, no
column-based attributes have any net changes, no UPDATE
statement will be issued. This means that an instance
being sent to :meth:`~.MapperEvents.before_update` is
*not* a guarantee that an UPDATE statement will be
issued, although you can affect the outcome here by
modifying attributes so that a net change in value does
exist.
To detect if the column-based attributes on the object have net
changes, and will therefore generate an UPDATE statement, use
``object_session(instance).is_modified(instance,
include_collections=False)``.
The event is often called for a batch of objects of the
same class before their UPDATE statements are emitted at
once in a later step. In the extremely rare case that
this is not desirable, the :func:`.mapper` can be
configured with ``batch=False``, which will cause
batches of instances to be broken up into individual
(and more poorly performing) event->persist->event
steps.
.. warning::
Mapper-level flush events only allow **very limited operations**,
on attributes local to the row being operated upon only,
as well as allowing any SQL to be emitted on the given
:class:`.Connection`. **Please read fully** the notes
at :ref:`session_persistence_mapper` for guidelines on using
these methods; generally, the :meth:`.SessionEvents.before_flush`
method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param connection: the :class:`.Connection` being used to
emit UPDATE statements for this instance. This
provides a handle into the current transaction on the
target database specific to this instance.
:param target: the mapped instance being persisted. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
.. seealso::
:ref:`session_persistence_events`
"""
def after_update(self, mapper, connection, target):
"""Receive an object instance after an UPDATE statement
is emitted corresponding to that instance.
This event is used to modify in-Python-only
state on the instance after an UPDATE occurs, as well
as to emit additional SQL statements on the given
connection.
This method is called for all instances that are
marked as "dirty", *even those which have no net changes
to their column-based attributes*, and for which
no UPDATE statement has proceeded. An object is marked
as dirty when any of its column-based attributes have a
"set attribute" operation called or when any of its
collections are modified. If, at update time, no
column-based attributes have any net changes, no UPDATE
statement will be issued. This means that an instance
being sent to :meth:`~.MapperEvents.after_update` is
*not* a guarantee that an UPDATE statement has been
issued.
To detect if the column-based attributes on the object have net
changes, and therefore resulted in an UPDATE statement, use
``object_session(instance).is_modified(instance,
include_collections=False)``.
The event is often called for a batch of objects of the
same class after their UPDATE statements have been emitted at
once in a previous step. In the extremely rare case that
this is not desirable, the :func:`.mapper` can be
configured with ``batch=False``, which will cause
batches of instances to be broken up into individual
(and more poorly performing) event->persist->event
steps.
.. warning::
Mapper-level flush events only allow **very limited operations**,
on attributes local to the row being operated upon only,
as well as allowing any SQL to be emitted on the given
:class:`.Connection`. **Please read fully** the notes
at :ref:`session_persistence_mapper` for guidelines on using
these methods; generally, the :meth:`.SessionEvents.before_flush`
method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param connection: the :class:`.Connection` being used to
emit UPDATE statements for this instance. This
provides a handle into the current transaction on the
target database specific to this instance.
:param target: the mapped instance being persisted. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
.. seealso::
:ref:`session_persistence_events`
"""
def before_delete(self, mapper, connection, target):
"""Receive an object instance before a DELETE statement
is emitted corresponding to that instance.
This event is used to emit additional SQL statements on
the given connection as well as to perform application
specific bookkeeping related to a deletion event.
The event is often called for a batch of objects of the
same class before their DELETE statements are emitted at
once in a later step.
.. warning::
Mapper-level flush events only allow **very limited operations**,
on attributes local to the row being operated upon only,
as well as allowing any SQL to be emitted on the given
:class:`.Connection`. **Please read fully** the notes
at :ref:`session_persistence_mapper` for guidelines on using
these methods; generally, the :meth:`.SessionEvents.before_flush`
method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param connection: the :class:`.Connection` being used to
emit DELETE statements for this instance. This
provides a handle into the current transaction on the
target database specific to this instance.
:param target: the mapped instance being deleted. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
.. seealso::
:ref:`session_persistence_events`
"""
def after_delete(self, mapper, connection, target):
"""Receive an object instance after a DELETE statement
has been emitted corresponding to that instance.
This event is used to emit additional SQL statements on
the given connection as well as to perform application
specific bookkeeping related to a deletion event.
The event is often called for a batch of objects of the
same class after their DELETE statements have been emitted at
once in a previous step.
.. warning::
Mapper-level flush events only allow **very limited operations**,
on attributes local to the row being operated upon only,
as well as allowing any SQL to be emitted on the given
:class:`.Connection`. **Please read fully** the notes
at :ref:`session_persistence_mapper` for guidelines on using
these methods; generally, the :meth:`.SessionEvents.before_flush`
method should be preferred for general on-flush changes.
:param mapper: the :class:`.Mapper` which is the target
of this event.
:param connection: the :class:`.Connection` being used to
emit DELETE statements for this instance. This
provides a handle into the current transaction on the
target database specific to this instance.
:param target: the mapped instance being deleted. If
the event is configured with ``raw=True``, this will
instead be the :class:`.InstanceState` state-management
object associated with the instance.
:return: No return value is supported by this event.
.. seealso::
:ref:`session_persistence_events`
"""
class _MapperEventsHold(_EventsHold):
all_holds = weakref.WeakKeyDictionary()
def resolve(self, class_):
return _mapper_or_none(class_)
class HoldMapperEvents(_EventsHold.HoldEvents, MapperEvents):
pass
dispatch = event.dispatcher(HoldMapperEvents)
class SessionEvents(event.Events):
"""Define events specific to :class:`.Session` lifecycle.
e.g.::
from sqlalchemy import event
from sqlalchemy.orm import sessionmaker
def my_before_commit(session):
print "before commit!"
Session = sessionmaker()
event.listen(Session, "before_commit", my_before_commit)
The :func:`~.event.listen` function will accept
:class:`.Session` objects as well as the return result
of :class:`~.sessionmaker()` and :class:`~.scoped_session()`.
Additionally, it accepts the :class:`.Session` class which
will apply listeners to all :class:`.Session` instances
globally.
"""
_target_class_doc = "SomeSessionOrFactory"
_dispatch_target = Session
@classmethod
def _accept_with(cls, target):
if isinstance(target, scoped_session):
target = target.session_factory
if not isinstance(target, sessionmaker) and \
(
not isinstance(target, type) or
not issubclass(target, Session)
):
raise exc.ArgumentError(
"Session event listen on a scoped_session "
"requires that its creation callable "
"is associated with the Session class.")
if isinstance(target, sessionmaker):
return target.class_
elif isinstance(target, type):
if issubclass(target, scoped_session):
return Session
elif issubclass(target, Session):
return target
elif isinstance(target, Session):
return target
else:
return None
def after_transaction_create(self, session, transaction):
"""Execute when a new :class:`.SessionTransaction` is created.
This event differs from :meth:`~.SessionEvents.after_begin`
in that it occurs for each :class:`.SessionTransaction`
overall, as opposed to when transactions are begun
on individual database connections. It is also invoked
for nested transactions and subtransactions, and is always
matched by a corresponding
:meth:`~.SessionEvents.after_transaction_end` event
(assuming normal operation of the :class:`.Session`).
:param session: the target :class:`.Session`.
:param transaction: the target :class:`.SessionTransaction`.
To detect if this is the outermost
:class:`.SessionTransaction`, as opposed to a "subtransaction" or a
SAVEPOINT, test that the :attr:`.SessionTransaction.parent` attribute
is ``None``::
@event.listens_for(session, "after_transaction_create")
def after_transaction_create(session, transaction):
if transaction.parent is None:
# work with top-level transaction
To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the
:attr:`.SessionTransaction.nested` attribute::
@event.listens_for(session, "after_transaction_create")
def after_transaction_create(session, transaction):
if transaction.nested:
# work with SAVEPOINT transaction
.. seealso::
:class:`.SessionTransaction`
:meth:`~.SessionEvents.after_transaction_end`
"""
def after_transaction_end(self, session, transaction):
"""Execute when the span of a :class:`.SessionTransaction` ends.
This event differs from :meth:`~.SessionEvents.after_commit`
in that it corresponds to all :class:`.SessionTransaction`
objects in use, including those for nested transactions
and subtransactions, and is always matched by a corresponding
:meth:`~.SessionEvents.after_transaction_create` event.
:param session: the target :class:`.Session`.
:param transaction: the target :class:`.SessionTransaction`.
To detect if this is the outermost
:class:`.SessionTransaction`, as opposed to a "subtransaction" or a
SAVEPOINT, test that the :attr:`.SessionTransaction.parent` attribute
is ``None``::
@event.listens_for(session, "after_transaction_create")
def after_transaction_end(session, transaction):
if transaction.parent is None:
# work with top-level transaction
To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the
:attr:`.SessionTransaction.nested` attribute::
@event.listens_for(session, "after_transaction_create")
def after_transaction_end(session, transaction):
if transaction.nested:
# work with SAVEPOINT transaction
.. seealso::
:class:`.SessionTransaction`
:meth:`~.SessionEvents.after_transaction_create`
"""
def before_commit(self, session):
"""Execute before commit is called.
.. note::
The :meth:`~.SessionEvents.before_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
For interception of these events, use the
:meth:`~.SessionEvents.before_flush`,
:meth:`~.SessionEvents.after_flush`, or
:meth:`~.SessionEvents.after_flush_postexec`
events.
:param session: The target :class:`.Session`.
.. seealso::
:meth:`~.SessionEvents.after_commit`
:meth:`~.SessionEvents.after_begin`
:meth:`~.SessionEvents.after_transaction_create`
:meth:`~.SessionEvents.after_transaction_end`
"""
def after_commit(self, session):
"""Execute after a commit has occurred.
.. note::
The :meth:`~.SessionEvents.after_commit` hook is *not* per-flush,
that is, the :class:`.Session` can emit SQL to the database
many times within the scope of a transaction.
For interception of these events, use the
:meth:`~.SessionEvents.before_flush`,
:meth:`~.SessionEvents.after_flush`, or
:meth:`~.SessionEvents.after_flush_postexec`
events.
.. note::
The :class:`.Session` is not in an active transaction
when the :meth:`~.SessionEvents.after_commit` event is invoked,
and therefore can not emit SQL. To emit SQL corresponding to
every transaction, use the :meth:`~.SessionEvents.before_commit`
event.
:param session: The target :class:`.Session`.
.. seealso::
:meth:`~.SessionEvents.before_commit`
:meth:`~.SessionEvents.after_begin`
:meth:`~.SessionEvents.after_transaction_create`
:meth:`~.SessionEvents.after_transaction_end`
"""
def after_rollback(self, session):
"""Execute after a real DBAPI rollback has occurred.
Note that this event only fires when the *actual* rollback against
the database occurs - it does *not* fire each time the
:meth:`.Session.rollback` method is called, if the underlying
DBAPI transaction has already been rolled back. In many
cases, the :class:`.Session` will not be in
an "active" state during this event, as the current
transaction is not valid. To acquire a :class:`.Session`
which is active after the outermost rollback has proceeded,
use the :meth:`.SessionEvents.after_soft_rollback` event, checking the
:attr:`.Session.is_active` flag.
:param session: The target :class:`.Session`.
"""
def after_soft_rollback(self, session, previous_transaction):
"""Execute after any rollback has occurred, including "soft"
rollbacks that don't actually emit at the DBAPI level.
This corresponds to both nested and outer rollbacks, i.e.
the innermost rollback that calls the DBAPI's
rollback() method, as well as the enclosing rollback
calls that only pop themselves from the transaction stack.
The given :class:`.Session` can be used to invoke SQL and
:meth:`.Session.query` operations after an outermost rollback
by first checking the :attr:`.Session.is_active` flag::
@event.listens_for(Session, "after_soft_rollback")
def do_something(session, previous_transaction):
if session.is_active:
session.execute("select * from some_table")
:param session: The target :class:`.Session`.
:param previous_transaction: The :class:`.SessionTransaction`
transactional marker object which was just closed. The current
:class:`.SessionTransaction` for the given :class:`.Session` is
available via the :attr:`.Session.transaction` attribute.
.. versionadded:: 0.7.3
"""
def before_flush(self, session, flush_context, instances):
"""Execute before flush process has started.
:param session: The target :class:`.Session`.
:param flush_context: Internal :class:`.UOWTransaction` object
which handles the details of the flush.
:param instances: Usually ``None``, this is the collection of
objects which can be passed to the :meth:`.Session.flush` method
(note this usage is deprecated).
.. seealso::
:meth:`~.SessionEvents.after_flush`
:meth:`~.SessionEvents.after_flush_postexec`
:ref:`session_persistence_events`
"""
def after_flush(self, session, flush_context):
"""Execute after flush has completed, but before commit has been
called.
Note that the session's state is still in pre-flush, i.e. 'new',
'dirty', and 'deleted' lists still show pre-flush state as well
as the history settings on instance attributes.
:param session: The target :class:`.Session`.
:param flush_context: Internal :class:`.UOWTransaction` object
which handles the details of the flush.
.. seealso::
:meth:`~.SessionEvents.before_flush`
:meth:`~.SessionEvents.after_flush_postexec`
:ref:`session_persistence_events`
"""
def after_flush_postexec(self, session, flush_context):
"""Execute after flush has completed, and after the post-exec
state occurs.
This will be when the 'new', 'dirty', and 'deleted' lists are in
their final state. An actual commit() may or may not have
occurred, depending on whether or not the flush started its own
transaction or participated in a larger transaction.
:param session: The target :class:`.Session`.
:param flush_context: Internal :class:`.UOWTransaction` object
which handles the details of the flush.
.. seealso::
:meth:`~.SessionEvents.before_flush`
:meth:`~.SessionEvents.after_flush`
:ref:`session_persistence_events`
"""
def after_begin(self, session, transaction, connection):
"""Execute after a transaction is begun on a connection
:param session: The target :class:`.Session`.
:param transaction: The :class:`.SessionTransaction`.
:param connection: The :class:`~.engine.Connection` object
which will be used for SQL statements.
.. seealso::
:meth:`~.SessionEvents.before_commit`
:meth:`~.SessionEvents.after_commit`
:meth:`~.SessionEvents.after_transaction_create`
:meth:`~.SessionEvents.after_transaction_end`
"""
def before_attach(self, session, instance):
"""Execute before an instance is attached to a session.
This is called before an add, delete or merge causes
the object to be part of the session.
.. versionadded:: 0.8. Note that :meth:`~.SessionEvents.after_attach`
now fires off after the item is part of the session.
:meth:`.before_attach` is provided for those cases where
the item should not yet be part of the session state.
.. seealso::
:meth:`~.SessionEvents.after_attach`
:ref:`session_lifecycle_events`
"""
def after_attach(self, session, instance):
"""Execute after an instance is attached to a session.
This is called after an add, delete or merge.
.. note::
As of 0.8, this event fires off *after* the item
has been fully associated with the session, which is
different than previous releases. For event
handlers that require the object not yet
be part of session state (such as handlers which
may autoflush while the target object is not
yet complete) consider the
new :meth:`.before_attach` event.
.. seealso::
:meth:`~.SessionEvents.before_attach`
:ref:`session_lifecycle_events`
"""
@event._legacy_signature("0.9",
["session", "query", "query_context", "result"],
lambda update_context: (
update_context.session,
update_context.query,
update_context.context,
update_context.result))
def after_bulk_update(self, update_context):
"""Execute after a bulk update operation to the session.
This is called as a result of the :meth:`.Query.update` method.
:param update_context: an "update context" object which contains
details about the update, including these attributes:
* ``session`` - the :class:`.Session` involved
* ``query`` -the :class:`.Query` object that this update operation
was called upon.
* ``context`` The :class:`.QueryContext` object, corresponding
to the invocation of an ORM query.
* ``result`` the :class:`.ResultProxy` returned as a result of the
bulk UPDATE operation.
"""
@event._legacy_signature("0.9",
["session", "query", "query_context", "result"],
lambda delete_context: (
delete_context.session,
delete_context.query,
delete_context.context,
delete_context.result))
def after_bulk_delete(self, delete_context):
"""Execute after a bulk delete operation to the session.
This is called as a result of the :meth:`.Query.delete` method.
:param delete_context: a "delete context" object which contains
details about the update, including these attributes:
* ``session`` - the :class:`.Session` involved
* ``query`` -the :class:`.Query` object that this update operation
was called upon.
* ``context`` The :class:`.QueryContext` object, corresponding
to the invocation of an ORM query.
* ``result`` the :class:`.ResultProxy` returned as a result of the
bulk DELETE operation.
"""
def transient_to_pending(self, session, instance):
"""Intercept the "transient to pending" transition for a specific object.
This event is a specialization of the
:meth:`.SessionEvents.after_attach` event which is only invoked
for this specific transition. It is invoked typically during the
:meth:`.Session.add` call.
:param session: target :class:`.Session`
:param instance: the ORM-mapped instance being operated upon.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def pending_to_transient(self, session, instance):
"""Intercept the "pending to transient" transition for a specific object.
This less common transition occurs when an pending object that has
not been flushed is evicted from the session; this can occur
when the :meth:`.Session.rollback` method rolls back the transaction,
or when the :meth:`.Session.expunge` method is used.
:param session: target :class:`.Session`
:param instance: the ORM-mapped instance being operated upon.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def persistent_to_transient(self, session, instance):
"""Intercept the "persistent to transient" transition for a specific object.
This less common transition occurs when an pending object that has
has been flushed is evicted from the session; this can occur
when the :meth:`.Session.rollback` method rolls back the transaction.
:param session: target :class:`.Session`
:param instance: the ORM-mapped instance being operated upon.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def pending_to_persistent(self, session, instance):
"""Intercept the "pending to persistent"" transition for a specific object.
This event is invoked within the flush process, and is
similar to scanning the :attr:`.Session.new` collection within
the :meth:`.SessionEvents.after_flush` event. However, in this
case the object has already been moved to the persistent state
when the event is called.
:param session: target :class:`.Session`
:param instance: the ORM-mapped instance being operated upon.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def detached_to_persistent(self, session, instance):
"""Intercept the "detached to persistent" transition for a specific object.
This event is a specialization of the
:meth:`.SessionEvents.after_attach` event which is only invoked
for this specific transition. It is invoked typically during the
:meth:`.Session.add` call, as well as during the
:meth:`.Session.delete` call if the object was not previously
associated with the
:class:`.Session` (note that an object marked as "deleted" remains
in the "persistent" state until the flush proceeds).
.. note::
If the object becomes persistent as part of a call to
:meth:`.Session.delete`, the object is **not** yet marked as
deleted when this event is called. To detect deleted objects,
check the ``deleted`` flag sent to the
:meth:`.SessionEvents.persistent_to_detached` to event after the
flush proceeds, or check the :attr:`.Session.deleted` collection
within the :meth:`.SessionEvents.before_flush` event if deleted
objects need to be intercepted before the flush.
:param session: target :class:`.Session`
:param instance: the ORM-mapped instance being operated upon.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def loaded_as_persistent(self, session, instance):
"""Intercept the "loaded as persistent" transition for a specific object.
This event is invoked within the ORM loading process, and is invoked
very similarly to the :meth:`.InstanceEvents.load` event. However,
the event here is linkable to a :class:`.Session` class or instance,
rather than to a mapper or class hierarchy, and integrates
with the other session lifecycle events smoothly. The object
is guaranteed to be present in the session's identity map when
this event is called.
:param session: target :class:`.Session`
:param instance: the ORM-mapped instance being operated upon.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def persistent_to_deleted(self, session, instance):
"""Intercept the "persistent to deleted" transition for a specific object.
This event is invoked when a persistent object's identity
is deleted from the database within a flush, however the object
still remains associated with the :class:`.Session` until the
transaction completes.
If the transaction is rolled back, the object moves again
to the persistent state, and the
:meth:`.SessionEvents.deleted_to_persistent` event is called.
If the transaction is committed, the object becomes detached,
which will emit the :meth:`.SessionEvents.deleted_to_detached`
event.
Note that while the :meth:`.Session.delete` method is the primary
public interface to mark an object as deleted, many objects
get deleted due to cascade rules, which are not always determined
until flush time. Therefore, there's no way to catch
every object that will be deleted until the flush has proceeded.
the :meth:`.SessionEvents.persistent_to_deleted` event is therefore
invoked at the end of a flush.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def deleted_to_persistent(self, session, instance):
"""Intercept the "deleted to persistent" transition for a specific object.
This transition occurs only when an object that's been deleted
successfully in a flush is restored due to a call to
:meth:`.Session.rollback`. The event is not called under
any other circumstances.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def deleted_to_detached(self, session, instance):
"""Intercept the "deleted to detached" transition for a specific object.
This event is invoked when a deleted object is evicted
from the session. The typical case when this occurs is when
the transaction for a :class:`.Session` in which the object
was deleted is committed; the object moves from the deleted
state to the detached state.
It is also invoked for objects that were deleted in a flush
when the :meth:`.Session.expunge_all` or :meth:`.Session.close`
events are called, as well as if the object is individually
expunged from its deleted state via :meth:`.Session.expunge`.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
def persistent_to_detached(self, session, instance):
"""Intercept the "persistent to detached" transition for a specific object.
This event is invoked when a persistent object is evicted
from the session. There are many conditions that cause this
to happen, including:
* using a method such as :meth:`.Session.expunge`
or :meth:`.Session.close`
* Calling the :meth:`.Session.rollback` method, when the object
was part of an INSERT statement for that session's transaction
:param session: target :class:`.Session`
:param instance: the ORM-mapped instance being operated upon.
:param deleted: boolean. If True, indicates this object moved
to the detached state because it was marked as deleted and flushed.
.. versionadded:: 1.1
.. seealso::
:ref:`session_lifecycle_events`
"""
class AttributeEvents(event.Events):
"""Define events for object attributes.
These are typically defined on the class-bound descriptor for the
target class.
e.g.::
from sqlalchemy import event
def my_append_listener(target, value, initiator):
print "received append event for target: %s" % target
event.listen(MyClass.collection, 'append', my_append_listener)
Listeners have the option to return a possibly modified version
of the value, when the ``retval=True`` flag is passed
to :func:`~.event.listen`::
def validate_phone(target, value, oldvalue, initiator):
"Strip non-numeric characters from a phone number"
return re.sub(r'\D', '', value)
# setup listener on UserContact.phone attribute, instructing
# it to use the return value
listen(UserContact.phone, 'set', validate_phone, retval=True)
A validation function like the above can also raise an exception
such as :exc:`ValueError` to halt the operation.
Several modifiers are available to the :func:`~.event.listen` function.
:param active_history=False: When True, indicates that the
"set" event would like to receive the "old" value being
replaced unconditionally, even if this requires firing off
database loads. Note that ``active_history`` can also be
set directly via :func:`.column_property` and
:func:`.relationship`.
:param propagate=False: When True, the listener function will
be established not just for the class attribute given, but
for attributes of the same name on all current subclasses
of that class, as well as all future subclasses of that
class, using an additional listener that listens for
instrumentation events.
:param raw=False: When True, the "target" argument to the
event will be the :class:`.InstanceState` management
object, rather than the mapped instance itself.
:param retval=False: when True, the user-defined event
listening must return the "value" argument from the
function. This gives the listening function the opportunity
to change the value that is ultimately used for a "set"
or "append" event.
"""
_target_class_doc = "SomeClass.some_attribute"
_dispatch_target = QueryableAttribute
@staticmethod
def _set_dispatch(cls, dispatch_cls):
dispatch = event.Events._set_dispatch(cls, dispatch_cls)
dispatch_cls._active_history = False
return dispatch
@classmethod
def _accept_with(cls, target):
# TODO: coverage
if isinstance(target, interfaces.MapperProperty):
return getattr(target.parent.class_, target.key)
else:
return target
@classmethod
def _listen(cls, event_key, active_history=False,
raw=False, retval=False,
propagate=False):
target, identifier, fn = \
event_key.dispatch_target, event_key.identifier, \
event_key._listen_fn
if active_history:
target.dispatch._active_history = True
if not raw or not retval:
def wrap(target, *arg):
if not raw:
target = target.obj()
if not retval:
if arg:
value = arg[0]
else:
value = None
fn(target, *arg)
return value
else:
return fn(target, *arg)
event_key = event_key.with_wrapper(wrap)
event_key.base_listen(propagate=propagate)
if propagate:
manager = instrumentation.manager_of_class(target.class_)
for mgr in manager.subclass_managers(True):
event_key.with_dispatch_target(
mgr[target.key]).base_listen(propagate=True)
def append(self, target, value, initiator):
"""Receive a collection append event.
The append event is invoked for each element as it is appended
to the collection. This occurs for single-item appends as well
as for a "bulk replace" operation.
:param target: the object instance receiving the event.
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param value: the value being appended. If this listener
is registered with ``retval=True``, the listener
function must return this value, or a new value which
replaces it.
:param initiator: An instance of :class:`.attributes.Event`
representing the initiation of the event. May be modified
from its original value by backref handlers in order to control
chained event propagation, as well as be inspected for information
about the source of the event.
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
.. seealso::
:meth:`.AttributeEvents.bulk_replace`
"""
def bulk_replace(self, target, values, initiator):
"""Receive a collection 'bulk replace' event.
This event is invoked for a sequence of values as they are incoming
to a bulk collection set operation, which can be
modified in place before the values are treated as ORM objects.
This is an "early hook" that runs before the bulk replace routine
attempts to reconcile which objects are already present in the
collection and which are being removed by the net replace operation.
It is typical that this method be combined with use of the
:meth:`.AttributeEvents.append` event. When using both of these
events, note that a bulk replace operation will invoke
the :meth:`.AttributeEvents.append` event for all new items,
even after :meth:`.AttributeEvents.bulk_replace` has been invoked
for the collection as a whole. In order to determine if an
:meth:`.AttributeEvents.append` event is part of a bulk replace,
use the symbol :attr:`~.attributes.OP_BULK_REPLACE` to test the
incoming initiator::
from sqlalchemy.orm.attributes import OP_BULK_REPLACE
@event.listens_for(SomeObject.collection, "bulk_replace")
def process_collection(target, values, initiator):
values[:] = [_make_value(value) for value in values]
@event.listens_for(SomeObject.collection, "append", retval=True)
def process_collection(target, value, initiator):
# make sure bulk_replace didn't already do it
if initiator is None or initiator.op is not OP_BULK_REPLACE:
return _make_value(value)
else:
return value
.. versionadded:: 1.2
:param target: the object instance receiving the event.
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param value: a sequence (e.g. a list) of the values being set. The
handler can modify this list in place.
:param initiator: An instance of :class:`.attributes.Event`
representing the initiation of the event.
"""
def remove(self, target, value, initiator):
"""Receive a collection remove event.
:param target: the object instance receiving the event.
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param value: the value being removed.
:param initiator: An instance of :class:`.attributes.Event`
representing the initiation of the event. May be modified
from its original value by backref handlers in order to control
chained event propagation.
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
passed as a :class:`.attributes.Event` object, and may be
modified by backref handlers within a chain of backref-linked
events.
:return: No return value is defined for this event.
"""
def set(self, target, value, oldvalue, initiator):
"""Receive a scalar set event.
:param target: the object instance receiving the event.
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param value: the value being set. If this listener
is registered with ``retval=True``, the listener
function must return this value, or a new value which
replaces it.
:param oldvalue: the previous value being replaced. This
may also be the symbol ``NEVER_SET`` or ``NO_VALUE``.
If the listener is registered with ``active_history=True``,
the previous value of the attribute will be loaded from
the database if the existing value is currently unloaded
or expired.
:param initiator: An instance of :class:`.attributes.Event`
representing the initiation of the event. May be modified
from its original value by backref handlers in order to control
chained event propagation.
.. versionchanged:: 0.9.0 the ``initiator`` argument is now
passed as a :class:`.attributes.Event` object, and may be
modified by backref handlers within a chain of backref-linked
events.
:return: if the event was registered with ``retval=True``,
the given value, or a new effective value, should be returned.
"""
def init_scalar(self, target, value, dict_):
"""Receive a scalar "init" event.
This event is invoked when an uninitialized, unpersisted scalar
attribute is accessed. A value of ``None`` is typically returned
in this case; no changes are made to the object's state.
The event handler can alter this behavior in two ways.
One is that a value other than ``None`` may be returned. The other
is that the value may be established as part of the object's state,
which will also have the effect that it is persisted.
Typical use is to establish a specific default value of an attribute
upon access::
SOME_CONSTANT = 3.1415926
@event.listens_for(
MyClass.some_attribute, "init_scalar",
retval=True, propagate=True)
def _init_some_attribute(target, dict_, value):
dict_['some_attribute'] = SOME_CONSTANT
return SOME_CONSTANT
Above, we initialize the attribute ``MyClass.some_attribute`` to the
value of ``SOME_CONSTANT``. The above code includes the following
features:
* By setting the value ``SOME_CONSTANT`` in the given ``dict_``,
we indicate that the value is to be persisted to the database.
**The given value is only persisted to the database if we
explicitly associate it with the object**. The ``dict_`` given
is the ``__dict__`` element of the mapped object, assuming the
default attribute instrumentation system is in place.
* By establishing the ``retval=True`` flag, the value we return
from the function will be returned by the attribute getter.
Without this flag, the event is assumed to be a passive observer
and the return value of our function is ignored.
* The ``propagate=True`` flag is significant if the mapped class
includes inheriting subclasses, which would also make use of this
event listener. Without this flag, an inheriting subclass will
not use our event handler.
When we establish the value in the given dictionary, the value will
be used in the INSERT statement established by the unit of work.
Normally, the default returned value of ``None`` is not established as
part of the object, to avoid the issue of mutations occurring to the
object in response to a normally passive "get" operation, and also
sidesteps the issue of whether or not the :meth:`.AttributeEvents.set`
event should be awkwardly fired off during an attribute access
operation. This does not impact the INSERT operation since the
``None`` value matches the value of ``NULL`` that goes into the
database in any case; note that ``None`` is skipped during the INSERT
to ensure that column and SQL-level default functions can fire off.
The attribute set event :meth:`.AttributeEvents.set` as well as the
related validation feature provided by :obj:`.orm.validates` is
**not** invoked when we apply our value to the given ``dict_``. To
have these events to invoke in response to our newly generated
value, apply the value to the given object as a normal attribute
set operation::
SOME_CONSTANT = 3.1415926
@event.listens_for(
MyClass.some_attribute, "init_scalar",
retval=True, propagate=True)
def _init_some_attribute(target, dict_, value):
# will also fire off attribute set events
target.some_attribute = SOME_CONSTANT
return SOME_CONSTANT
When multiple listeners are set up, the generation of the value
is "chained" from one listener to the next by passing the value
returned by the previous listener that specifies ``retval=True``
as the ``value`` argument of the next listener.
The :meth:`.AttributeEvents.init_scalar` event may be used to
extract values from the default values and/or callables established on
mapped :class:`.Column` objects. See the "active column defaults"
example in :ref:`examples_instrumentation` for an example of this.
.. versionadded:: 1.1
:param target: the object instance receiving the event.
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param value: the value that is to be returned before this event
listener were invoked. This value begins as the value ``None``,
however will be the return value of the previous event handler
function if multiple listeners are present.
:param dict_: the attribute dictionary of this mapped object.
This is normally the ``__dict__`` of the object, but in all cases
represents the destination that the attribute system uses to get
at the actual value of this attribute. Placing the value in this
dictionary has the effect that the value will be used in the
INSERT statement generated by the unit of work.
.. seealso::
:ref:`examples_instrumentation` - see the
``active_column_defaults.py`` example.
"""
def init_collection(self, target, collection, collection_adapter):
"""Receive a 'collection init' event.
This event is triggered for a collection-based attribute, when
the initial "empty collection" is first generated for a blank
attribute, as well as for when the collection is replaced with
a new one, such as via a set event.
E.g., given that ``User.addresses`` is a relationship-based
collection, the event is triggered here::
u1 = User()
u1.addresses.append(a1) # <- new collection
and also during replace operations::
u1.addresses = [a2, a3] # <- new collection
:param target: the object instance receiving the event.
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param collection: the new collection. This will always be generated
from what was specified as
:paramref:`.RelationshipProperty.collection_class`, and will always
be empty.
:param collection_adpater: the :class:`.CollectionAdapter` that will
mediate internal access to the collection.
.. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection`
and :meth:`.AttributeEvents.dispose_collection` events supersede
the :class:`.collection.linker` hook.
"""
def dispose_collection(self, target, collection, collection_adpater):
"""Receive a 'collection dispose' event.
This event is triggered for a collection-based attribute when
a collection is replaced, that is::
u1.addresses.append(a1)
u1.addresses = [a2, a3] # <- old collection is disposed
The old collection received will contain its previous contents.
.. versionchanged:: 1.2 The collection passed to
:meth:`.AttributeEvents.dispose_collection` will now have its
contents before the dispose intact; previously, the collection
would be empty.
.. versionadded:: 1.0.0 the :meth:`.AttributeEvents.init_collection`
and :meth:`.AttributeEvents.dispose_collection` events supersede
the :class:`.collection.linker` hook.
"""
def modified(self, target, initiator):
"""Receive a 'modified' event.
This event is triggered when the :func:`.attributes.flag_modified`
function is used to trigger a modify event on an attribute without
any specific value being set.
.. versionadded:: 1.2
:param target: the object instance receiving the event.
If the listener is registered with ``raw=True``, this will
be the :class:`.InstanceState` object.
:param initiator: An instance of :class:`.attributes.Event`
representing the initiation of the event.
"""
class QueryEvents(event.Events):
"""Represent events within the construction of a :class:`.Query` object.
The events here are intended to be used with an as-yet-unreleased
inspection system for :class:`.Query`. Some very basic operations
are possible now, however the inspection system is intended to allow
complex query manipulations to be automated.
.. versionadded:: 1.0.0
"""
_target_class_doc = "SomeQuery"
_dispatch_target = Query
def before_compile(self, query):
"""Receive the :class:`.Query` object before it is composed into a
core :class:`.Select` object.
This event is intended to allow changes to the query given::
@event.listens_for(Query, "before_compile", retval=True)
def no_deleted(query):
for desc in query.column_descriptions:
if desc['type'] is User:
entity = desc['entity']
query = query.filter(entity.deleted == False)
return query
The event should normally be listened with the ``retval=True``
parameter set, so that the modified query may be returned.
"""
@classmethod
def _listen(
cls, event_key, retval=False, **kw):
fn = event_key._listen_fn
if not retval:
def wrap(*arg, **kw):
if not retval:
query = arg[0]
fn(*arg, **kw)
return query
else:
|
[
" return fn(*arg, **kw)"
] | 9,203
|
lcc
|
python
| null |
9517e442464f24ead2a124310c2f1752d78474b38bd420f2
|
|
"""Conditional module is the xmodule, which you can use for disabling
some xmodules by conditions.
"""
import json
import logging
from lazy import lazy
from lxml import etree
from pkg_resources import resource_string
from xmodule.x_module import XModule, STUDENT_VIEW
from xmodule.seq_module import SequenceDescriptor
from xblock.fields import Scope, ReferenceList
from xmodule.modulestore.exceptions import ItemNotFoundError
log = logging.getLogger('edx.' + __name__)
class ConditionalFields(object):
has_children = True
show_tag_list = ReferenceList(help="List of urls of children that are references to external modules", scope=Scope.content)
sources_list = ReferenceList(help="List of sources upon which this module is conditional", scope=Scope.content)
class ConditionalModule(ConditionalFields, XModule):
"""
Blocks child module from showing unless certain conditions are met.
Example:
<conditional sources="i4x://.../problem_1; i4x://.../problem_2" completed="True">
<show sources="i4x://.../test_6; i4x://.../Avi_resources"/>
<video url_name="secret_video" />
</conditional>
<conditional> tag attributes:
sources - location id of required modules, separated by ';'
submitted - map to `is_submitted` module method.
(pressing RESET button makes this function to return False.)
attempted - map to `is_attempted` module method
correct - map to `is_correct` module method
poll_answer - map to `poll_answer` module attribute
voted - map to `voted` module attribute
<show> tag attributes:
sources - location id of required modules, separated by ';'
You can add you own rules for <conditional> tag, like
"completed", "attempted" etc. To do that yo must extend
`ConditionalModule.conditions_map` variable and add pair:
my_attr: my_property/my_method
After that you can use it:
<conditional my_attr="some value" ...>
...
</conditional>
And my_property/my_method will be called for required modules.
"""
js = {
'coffee': [
resource_string(__name__, 'js/src/javascript_loader.coffee'),
resource_string(__name__, 'js/src/conditional/display.coffee'),
],
'js': [
resource_string(__name__, 'js/src/collapsible.js'),
]
}
js_module_name = "Conditional"
css = {'scss': [resource_string(__name__, 'css/capa/display.scss')]}
# Map
# key: <tag attribute in xml>
# value: <name of module attribute>
conditions_map = {
'poll_answer': 'poll_answer', # poll_question attr
# problem was submitted (it can be wrong)
# if student will press reset button after that,
# state will be reverted
'submitted': 'is_submitted', # capa_problem attr
# if student attempted problem
'attempted': 'is_attempted', # capa_problem attr
# if problem is full points
'correct': 'is_correct',
'voted': 'voted' # poll_question attr
}
def _get_condition(self):
# Get first valid condition.
for xml_attr, attr_name in self.conditions_map.iteritems():
xml_value = self.descriptor.xml_attributes.get(xml_attr)
if xml_value:
return xml_value, attr_name
raise Exception(
'Error in conditional module: no known conditional found in {!r}'.format(
self.descriptor.xml_attributes.keys()
)
)
@lazy
def required_modules(self):
return [self.system.get_module(descriptor) for
descriptor in self.descriptor.get_required_module_descriptors()]
def is_condition_satisfied(self):
xml_value, attr_name = self._get_condition()
if xml_value and self.required_modules:
for module in self.required_modules:
if not hasattr(module, attr_name):
# We don't throw an exception here because it is possible for
# the descriptor of a required module to have a property but
# for the resulting module to be a (flavor of) ErrorModule.
# So just log and return false.
log.warn('Error in conditional module: \
required module {module} has no {module_attr}'.format(module=module, module_attr=attr_name))
return False
attr = getattr(module, attr_name)
if callable(attr):
attr = attr()
if xml_value != str(attr):
break
else:
return True
return False
def get_html(self):
# Calculate html ids of dependencies
self.required_html_ids = [descriptor.location.html_id() for
descriptor in self.descriptor.get_required_module_descriptors()]
return self.system.render_template('conditional_ajax.html', {
'element_id': self.location.html_id(),
'ajax_url': self.system.ajax_url,
'depends': ';'.join(self.required_html_ids)
})
def handle_ajax(self, _dispatch, _data):
"""This is called by courseware.moduleodule_render, to handle
an AJAX call.
"""
if not self.is_condition_satisfied():
defmsg = "{link} must be attempted before this will become visible."
message = self.descriptor.xml_attributes.get('message', defmsg)
context = {'module': self,
'message': message}
html = self.system.render_template('conditional_module.html',
context)
return json.dumps({'html': [html], 'message': bool(message)})
html = [child.render(STUDENT_VIEW).content for child in self.get_display_items()]
return json.dumps({'html': html})
def get_icon_class(self):
new_class = 'other'
# HACK: This shouldn't be hard-coded to two types
# OBSOLETE: This obsoletes 'type'
class_priority = ['problem', 'video']
child_classes = [self.system.get_module(child_descriptor).get_icon_class()
for child_descriptor in self.descriptor.get_children()]
for c in class_priority:
if c in child_classes:
new_class = c
return new_class
class ConditionalDescriptor(ConditionalFields, SequenceDescriptor):
"""Descriptor for conditional xmodule."""
_tag_name = 'conditional'
module_class = ConditionalModule
filename_extension = "xml"
has_score = False
show_in_read_only_mode = True
def __init__(self, *args, **kwargs):
"""
Create an instance of the conditional module.
"""
super(ConditionalDescriptor, self).__init__(*args, **kwargs)
# Convert sources xml_attribute to a ReferenceList field type so Location/Locator
# substitution can be done.
if not self.sources_list:
if 'sources' in self.xml_attributes and isinstance(self.xml_attributes['sources'], basestring):
self.sources_list = [
self.location.course_key.make_usage_key_from_deprecated_string(item)
for item in ConditionalDescriptor.parse_sources(self.xml_attributes)
]
@staticmethod
def parse_sources(xml_element):
""" Parse xml_element 'sources' attr and return a list of location strings. """
sources = xml_element.get('sources')
if sources:
return [location.strip() for location in sources.split(';')]
def get_required_module_descriptors(self):
"""Returns a list of XModuleDescriptor instances upon
which this module depends.
"""
descriptors = []
for location in self.sources_list:
try:
descriptor = self.system.load_item(location)
descriptors.append(descriptor)
except ItemNotFoundError:
msg = "Invalid module by location."
log.exception(msg)
self.system.error_tracker(msg)
return descriptors
@classmethod
def definition_from_xml(cls, xml_object, system):
children = []
show_tag_list = []
for child in xml_object:
if child.tag == 'show':
locations = ConditionalDescriptor.parse_sources(child)
for location in locations:
children.append(location)
show_tag_list.append(location)
else:
try:
descriptor = system.process_xml(etree.tostring(child))
children.append(descriptor.scope_ids.usage_id)
except:
msg = "Unable to load child when parsing Conditional."
log.exception(msg)
system.error_tracker(msg)
return {'show_tag_list': show_tag_list}, children
def definition_to_xml(self, resource_fs):
xml_object = etree.Element(self._tag_name)
for child in self.get_children():
if child.location not in self.show_tag_list:
self.runtime.add_block_as_child_node(child, xml_object)
if self.show_tag_list:
|
[
" show_str = u'<{tag_name} sources=\"{sources}\" />'.format("
] | 801
|
lcc
|
python
| null |
b7096bf4313f6484e8f39bf9f0977db406f68e5dc24dcd54
|
|
import numpy as np
import larray as la
from larray_editor.utils import Product, _LazyDimLabels, Axis, get_sample
from larray_editor.commands import ArrayValueChange
REGISTERED_ADAPTERS = {}
def register_adapter(type):
"""Class decorator to register new adapter
Parameters
----------
type : type
Type associated with adapter class.
"""
def decorate_class(cls):
if type not in REGISTERED_ADAPTERS:
REGISTERED_ADAPTERS[type] = cls
return cls
return decorate_class
def get_adapter(data, bg_value):
if data is None:
return None
data_type = type(data)
if data_type not in REGISTERED_ADAPTERS:
raise TypeError("No Adapter implemented for data with type {}".format(data_type))
adapter_cls = REGISTERED_ADAPTERS[data_type]
return adapter_cls(data, bg_value)
class AbstractAdapter(object):
def __init__(self, data, bg_value):
self.data = data
self.bg_value = bg_value
self.current_filter = {}
self.update_filtered_data()
self.ndim = None
self.size = None
self.dtype = None
# ===================== #
# PROPERTIES #
# ===================== #
@property
def data(self):
return self._original_data
@data.setter
def data(self, original_data):
assert original_data is not None, "{} does not accept None as input data".format(self.__class__)
self._original_data = self.prepare_data(original_data)
@property
def bg_value(self):
return self._bg_value
@bg_value.setter
def bg_value(self, bg_value):
self._bg_value = self.prepare_bg_value(bg_value)
# ===================== #
# METHODS TO OVERRIDE #
# ===================== #
def prepare_data(self, data):
"""Must be overridden if data passed to set_data need some checks and/or transformations"""
return data
def prepare_bg_value(self, bg_value):
"""Must be overridden if bg_value passed to set_data need some checks and/or transformations"""
return bg_value
def filter_data(self, data, filter):
"""Return filtered data"""
raise NotImplementedError()
def get_axes(self, data):
"""Return list of :py:class:`Axis` or an empty list in case of a scalar or an empty array.
"""
raise NotImplementedError()
def _get_raw_data(self, data):
"""Return internal data as a ND Numpy array"""
raise NotImplementedError()
def _get_bg_value(self, bg_value):
"""Return bg_value as ND Numpy array or None.
It must have the same shape as data if not None.
"""
raise NotImplementedError()
def _from_selection(self, raw_data, axes_names, vlabels, hlabels):
"""Create and return an object of type managed by the adapter subclass.
Parameters
----------
raw_data : Numpy.ndarray
Array of selected data.
axes_names : list of string
List of axis names
vlabels : nested list
Selected vertical labels
hlabels: list
Selected horizontal labels
Returns
-------
Object of the type managed by the adapter subclass.
"""
raise NotImplementedError()
def move_axis(self, data, bg_value, old_index, new_index):
"""Move an axis of the data array and associated bg value.
Parameters
----------
data : array
Array to transpose
bg_value : array or None
Associated bg_value array.
old_index: int
Current index of axis to move.
new_index: int
New index of axis after transpose.
Returns
-------
data : array
Transposed input array
bg_value: array
Transposed associated bg_value
"""
raise NotImplementedError()
def _map_global_to_filtered(self, data, filtered_data, filter, key):
"""
map global (unfiltered) ND key to local (filtered) 2D key
Parameters
----------
data : array
Input array.
filtered_data : array
Filtered data.
filter : dict
Current filter.
key: tuple
Labels associated with the modified element of the non-filtered array.
Returns
-------
tuple
Positional index (row, column) of the modified data cell.
"""
raise NotImplementedError()
def _map_filtered_to_global(self, filtered_data, data, filter, key):
"""
map local (filtered data) 2D key to global (unfiltered) ND key.
Parameters
----------
filtered_data : array
Filtered data.
data : array
Input array.
filter : dict
Current filter.
key: tuple
Positional index (row, column) of the modified data cell.
Returns
-------
tuple
Labels associated with the modified element of the non-filtered array.
"""
raise NotImplementedError()
def _to_excel(self, data):
"""Export data to an Excel Sheet
Parameters
----------
data : array
data to export.
"""
raise NotImplementedError()
def _plot(self, data):
"""Return a matplotlib.Figure object using input data.
Parameters
----------
data : array
Data to plot.
Returns
-------
A matplotlib.Figure object.
"""
raise NotImplementedError
# =========================== #
# OTHER METHODS #
# =========================== #
def get_axes_filtered_data(self):
return self.get_axes(self.filtered_data)
def get_sample(self):
"""Return a sample of the internal data"""
data = self._get_raw_data(self.filtered_data)
# this will yield a data sample of max 200
sample = get_sample(data, 200)
return sample[np.isfinite(sample)]
def get_axes_names(self, fold_last_axis=False):
axes_names = [axis.name for axis in self.get_axes_filtered_data()]
if fold_last_axis and len(axes_names) >= 2:
axes_names = axes_names[:-2] + [axes_names[-2] + '\\' + axes_names[-1]]
return axes_names
def get_vlabels(self):
axes = self.get_axes(self.filtered_data)
if len(axes) == 0:
vlabels = [[]]
elif len(axes) == 1:
vlabels = [['']]
else:
vlabels = [axis.labels for axis in axes[:-1]]
prod = Product(vlabels)
vlabels = [_LazyDimLabels(prod, i) for i in range(len(vlabels))]
return vlabels
def get_hlabels(self):
axes = self.get_axes(self.filtered_data)
if len(axes) == 0:
hlabels = [[]]
else:
hlabels = axes[-1].labels
hlabels = Product([hlabels])
return hlabels
def _get_shape_2D(self, np_data):
shape, ndim = np_data.shape, np_data.ndim
if ndim == 0:
shape_2D = (1, 1)
elif ndim == 1:
shape_2D = (1,) + shape
elif ndim == 2:
shape_2D = shape
else:
shape_2D = (np.prod(shape[:-1]), shape[-1])
return shape_2D
def get_raw_data(self):
# get filtered data as Numpy ND array
np_data = self._get_raw_data(self.filtered_data)
assert isinstance(np_data, np.ndarray)
# compute equivalent 2D shape
shape_2D = self._get_shape_2D(np_data)
assert shape_2D[0] * shape_2D[1] == np_data.size
# return data reshaped as 2D array
return np_data.reshape(shape_2D)
def get_bg_value(self):
# get filtered bg value as Numpy ND array or None
if self.bg_value is None:
return self.bg_value
np_bg_value = self._get_bg_value(self.filter_data(self.bg_value, self.current_filter))
# compute equivalent 2D shape
shape_2D = self._get_shape_2D(np_bg_value)
assert shape_2D[0] * shape_2D[1] == np_bg_value.size
# return bg_value reshaped as 2D array if not None
return np_bg_value.reshape(shape_2D)
def update_filtered_data(self):
self.filtered_data = self.filter_data(self.data, self.current_filter)
def change_filter(self, data, filter, axis, indices):
"""Update current filter for a given axis if labels selection from the array widget has changed
Parameters
----------
data : array
Input array.
filter: dict
Dictionary {axis_id: labels} representing the current selection.
axis: Axis
Axis for which selection has changed.
indices: list of int
Indices of selected labels.
"""
axis_id = axis.id
if not indices or len(indices) == len(axis):
if axis_id in filter:
del filter[axis_id]
else:
if len(indices) == 1:
filter[axis_id] = axis.labels[indices[0]]
else:
filter[axis_id] = axis.labels[indices]
def update_filter(self, axis, indices):
self.change_filter(self.data, self.current_filter, axis, indices)
self.update_filtered_data()
def translate_changes(self, data_model_changes):
def to_global(key):
return self._map_filtered_to_global(self.filtered_data, self.data, self.current_filter, key)
global_changes = [ArrayValueChange(to_global(key), old_value, new_value)
|
[
" for key, (old_value, new_value) in data_model_changes.items()]"
] | 947
|
lcc
|
python
| null |
8662bf8b41808cda788f6de30f92e265f34051125e765056
|
|
/*
Copyright (C) 2002-2010 Jeroen Frijters
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
Jeroen Frijters
jeroen@frijters.net
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using IKVM.Reflection;
using IKVM.Reflection.Emit;
using Type = IKVM.Reflection.Type;
using System.Diagnostics;
using IKVM.Attributes;
using IKVM.Internal;
namespace IKVM.Internal.MapXml
{
sealed class CodeGenContext
{
private ClassLoaderWrapper classLoader;
private readonly Dictionary<string, object> h = new Dictionary<string, object>();
internal CodeGenContext(ClassLoaderWrapper classLoader)
{
this.classLoader = classLoader;
}
internal object this[string key]
{
get
{
object val;
h.TryGetValue(key, out val);
return val;
}
set { h[key] = value; }
}
internal ClassLoaderWrapper ClassLoader { get { return classLoader; } }
}
public abstract class Instruction
{
private int lineNumber = Root.LineNumber;
internal int LineNumber
{
get
{
return lineNumber;
}
}
internal abstract void Generate(CodeGenContext context, CodeEmitter ilgen);
public override string ToString()
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
sb.Append('<');
object[] attr = GetType().GetCustomAttributes(typeof(XmlTypeAttribute), false);
if (attr.Length == 1)
{
sb.Append(((XmlTypeAttribute)attr[0]).TypeName);
}
else
{
sb.Append(GetType().Name);
}
foreach (System.Reflection.FieldInfo field in GetType().GetFields())
{
if (!field.IsStatic)
{
object value = field.GetValue(this);
if (value != null)
{
attr = field.GetCustomAttributes(typeof(XmlAttributeAttribute), false);
if (attr.Length == 1)
{
sb.AppendFormat(" {0}=\"{1}\"", ((XmlAttributeAttribute)attr[0]).AttributeName, value);
}
}
}
}
sb.Append(" />");
return sb.ToString();
}
}
[XmlType("ldstr")]
public sealed class Ldstr : Instruction
{
[XmlAttribute("value")]
public string Value;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.Emit(OpCodes.Ldstr, Value);
}
}
[XmlType("ldnull")]
public sealed class Ldnull : Simple
{
public Ldnull() : base(OpCodes.Ldnull)
{
}
}
[XmlType("call")]
public class Call : Instruction
{
public Call() : this(OpCodes.Call)
{
}
internal Call(OpCode opcode)
{
this.opcode = opcode;
}
[XmlAttribute("class")]
public string Class;
[XmlAttribute("type")]
public string type;
[XmlAttribute("name")]
public string Name;
[XmlAttribute("sig")]
public string Sig;
private OpCode opcode;
internal sealed override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
Debug.Assert(Name != null);
if(Name == ".ctor")
{
Debug.Assert(Class == null && type != null);
Type[] argTypes = context.ClassLoader.ArgTypeListFromSig(Sig);
ConstructorInfo ci = StaticCompiler.GetTypeForMapXml(context.ClassLoader, type).GetConstructor(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance, null, CallingConventions.Standard, argTypes, null);
if(ci == null)
{
throw new InvalidOperationException("Missing .ctor: " + type + "..ctor" + Sig);
}
ilgen.Emit(opcode, ci);
}
else
{
Debug.Assert(Class == null ^ type == null);
if(Class != null)
{
Debug.Assert(Sig != null);
MethodWrapper method = context.ClassLoader.LoadClassByDottedName(Class).GetMethodWrapper(Name, Sig, false);
if(method == null)
{
throw new InvalidOperationException("method not found: " + Class + "." + Name + Sig);
}
method.Link();
// TODO this code is part of what Compiler.CastInterfaceArgs (in compiler.cs) does,
// it would be nice if we could avoid this duplication...
TypeWrapper[] argTypeWrappers = method.GetParameters();
for(int i = 0; i < argTypeWrappers.Length; i++)
{
if(argTypeWrappers[i].IsGhost)
{
CodeEmitterLocal[] temps = new CodeEmitterLocal[argTypeWrappers.Length + (method.IsStatic ? 0 : 1)];
for(int j = temps.Length - 1; j >= 0; j--)
{
TypeWrapper tw;
if(method.IsStatic)
{
tw = argTypeWrappers[j];
}
else
{
if(j == 0)
{
tw = method.DeclaringType;
}
else
{
tw = argTypeWrappers[j - 1];
}
}
if(tw.IsGhost)
{
tw.EmitConvStackTypeToSignatureType(ilgen, null);
}
temps[j] = ilgen.DeclareLocal(tw.TypeAsSignatureType);
ilgen.Emit(OpCodes.Stloc, temps[j]);
}
for(int j = 0; j < temps.Length; j++)
{
ilgen.Emit(OpCodes.Ldloc, temps[j]);
}
break;
}
}
if(opcode.Value == OpCodes.Call.Value)
{
method.EmitCall(ilgen);
}
else if(opcode.Value == OpCodes.Callvirt.Value)
{
method.EmitCallvirt(ilgen);
}
else if(opcode.Value == OpCodes.Newobj.Value)
{
method.EmitNewobj(ilgen);
}
else
{
// ldftn or ldvirtftn
ilgen.Emit(opcode, (MethodInfo)method.GetMethod());
}
}
else
{
Type[] argTypes;
if(Sig.StartsWith("("))
{
argTypes = context.ClassLoader.ArgTypeListFromSig(Sig);
}
else if(Sig == "")
{
argTypes = Type.EmptyTypes;
}
else
{
string[] types = Sig.Split(';');
argTypes = new Type[types.Length];
for(int i = 0; i < types.Length; i++)
{
argTypes[i] = StaticCompiler.GetTypeForMapXml(context.ClassLoader, types[i]);
}
}
MethodInfo mi = StaticCompiler.GetTypeForMapXml(context.ClassLoader, type).GetMethod(Name, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static, null, argTypes, null);
if(mi == null)
{
throw new InvalidOperationException("Missing method: " + type + "." + Name + Sig);
}
ilgen.Emit(opcode, mi);
}
}
}
}
[XmlType("callvirt")]
public sealed class Callvirt : Call
{
public Callvirt() : base(OpCodes.Callvirt)
{
}
}
[XmlType("newobj")]
public sealed class NewObj : Call
{
public NewObj() : base(OpCodes.Newobj)
{
}
}
[XmlType("ldftn")]
public sealed class Ldftn : Call
{
public Ldftn() : base(OpCodes.Ldftn)
{
}
}
[XmlType("ldvirtftn")]
public sealed class Ldvirtftn : Call
{
public Ldvirtftn() : base(OpCodes.Ldvirtftn)
{
}
}
public abstract class Simple : Instruction
{
private OpCode opcode;
public Simple(OpCode opcode)
{
this.opcode = opcode;
}
internal sealed override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.Emit(opcode);
}
}
[XmlType("dup")]
public sealed class Dup : Simple
{
public Dup() : base(OpCodes.Dup)
{
}
}
[XmlType("pop")]
public sealed class Pop : Instruction
{
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.Emit(OpCodes.Pop);
}
}
public abstract class TypeOrTypeWrapperInstruction : Instruction
{
[XmlAttribute("class")]
public string Class;
[XmlAttribute("type")]
public string type;
internal TypeWrapper typeWrapper;
internal Type typeType;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
if(typeWrapper == null && typeType == null)
{
Debug.Assert(Class == null ^ type == null);
if(Class != null)
{
typeWrapper = context.ClassLoader.LoadClassByDottedName(Class);
}
else
{
typeType = StaticCompiler.GetTypeForMapXml(context.ClassLoader, type);
}
}
}
}
[XmlType("isinst")]
public sealed class IsInst : TypeOrTypeWrapperInstruction
{
public IsInst()
{
}
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
base.Generate(context, ilgen);
if(typeType != null)
{
ilgen.Emit(OpCodes.Isinst, typeType);
}
else
{
if(typeWrapper.IsGhost || typeWrapper.IsGhostArray)
{
ilgen.Emit(OpCodes.Dup);
typeWrapper.EmitInstanceOf(ilgen);
CodeEmitterLabel endLabel = ilgen.DefineLabel();
ilgen.EmitBrtrue(endLabel);
ilgen.Emit(OpCodes.Pop);
ilgen.Emit(OpCodes.Ldnull);
ilgen.MarkLabel(endLabel);
}
else
{
ilgen.Emit(OpCodes.Isinst, typeWrapper.TypeAsTBD);
}
}
}
}
[XmlType("castclass")]
public sealed class Castclass : TypeOrTypeWrapperInstruction
{
public Castclass()
{
}
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
base.Generate(context, ilgen);
if(typeType != null)
{
ilgen.Emit(OpCodes.Castclass, typeType);
}
else
{
typeWrapper.EmitCheckcast(ilgen);
}
}
}
[XmlType("castclass_impl")]
public sealed class Castclass_impl : Instruction
{
[XmlAttribute("class")]
public string Class;
public Castclass_impl()
{
}
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.Emit(OpCodes.Castclass, context.ClassLoader.LoadClassByDottedName(Class).TypeAsBaseType);
}
}
public abstract class TypeInstruction : Instruction
{
[XmlAttribute("type")]
public string type;
private OpCode opcode;
private Type typeType;
internal TypeInstruction(OpCode opcode)
{
this.opcode = opcode;
}
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
if(typeType == null)
{
Debug.Assert(type != null);
typeType = StaticCompiler.GetTypeForMapXml(context.ClassLoader, type);
}
ilgen.Emit(opcode, typeType);
}
}
[XmlType("ldobj")]
public sealed class Ldobj : TypeInstruction
{
public Ldobj() : base(OpCodes.Ldobj)
{
}
}
[XmlType("unbox")]
public sealed class Unbox : TypeInstruction
{
public Unbox() : base(OpCodes.Unbox)
{
}
}
[XmlType("box")]
public sealed class Box : TypeInstruction
{
public Box() : base(OpCodes.Box)
{
}
}
public abstract class Branch : Instruction
{
internal sealed override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
CodeEmitterLabel l;
if(context[Name] == null)
{
l = ilgen.DefineLabel();
context[Name] = l;
}
else
{
l = (CodeEmitterLabel)context[Name];
}
Emit(ilgen, l);
}
internal abstract void Emit(CodeEmitter ilgen, CodeEmitterLabel label);
[XmlAttribute("name")]
public string Name;
}
[XmlType("brfalse")]
public sealed class BrFalse : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBrfalse(label);
}
}
[XmlType("brtrue")]
public sealed class BrTrue : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBrtrue(label);
}
}
[XmlType("br")]
public sealed class Br : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBr(label);
}
}
[XmlType("beq")]
public sealed class Beq : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBeq(label);
}
}
[XmlType("bne_un")]
public sealed class Bne_Un : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBne_Un(label);
}
}
[XmlType("bge_un")]
public sealed class Bge_Un : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBge_Un(label);
}
}
[XmlType("ble_un")]
public sealed class Ble_Un : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBle_Un(label);
}
}
[XmlType("blt")]
public sealed class Blt : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBlt(label);
}
}
[XmlType("blt_un")]
public sealed class Blt_Un : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitBlt_Un(label);
}
}
[XmlType("label")]
public sealed class BrLabel : Instruction
{
[XmlAttribute("name")]
public string Name;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
CodeEmitterLabel l;
if(context[Name] == null)
{
l = ilgen.DefineLabel();
context[Name] = l;
}
else
{
l = (CodeEmitterLabel)context[Name];
}
ilgen.MarkLabel(l);
}
}
[XmlType("stloc")]
public sealed class StLoc : Instruction
{
[XmlAttribute("name")]
public string Name;
[XmlAttribute("class")]
public string Class;
[XmlAttribute("type")]
public string type;
private TypeWrapper typeWrapper;
private Type typeType;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
CodeEmitterLocal lb = (CodeEmitterLocal)context[Name];
if(lb == null)
{
if(typeWrapper == null && typeType == null)
{
Debug.Assert(Class == null ^ type == null);
if(type != null)
{
typeType = StaticCompiler.GetTypeForMapXml(context.ClassLoader, type);
}
else
{
typeWrapper = context.ClassLoader.LoadClassByDottedName(Class);
}
}
lb = ilgen.DeclareLocal(typeType != null ? typeType : typeWrapper.TypeAsTBD);
context[Name] = lb;
}
ilgen.Emit(OpCodes.Stloc, lb);
}
}
[XmlType("ldloc")]
public sealed class LdLoc : Instruction
{
[XmlAttribute("name")]
public string Name;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.Emit(OpCodes.Ldloc, (CodeEmitterLocal)context[Name]);
}
}
[XmlType("ldarga")]
public sealed class LdArga : Instruction
{
[XmlAttribute("argNum")]
public ushort ArgNum;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.EmitLdarga(ArgNum);
}
}
[XmlType("ldarg_s")]
public sealed class LdArg_S : Instruction
{
[XmlAttribute("argNum")]
public byte ArgNum;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.EmitLdarg(ArgNum);
}
}
[XmlType("ldarg_0")]
public sealed class LdArg_0 : Simple
{
public LdArg_0() : base(OpCodes.Ldarg_0)
{
}
}
[XmlType("ldarg_1")]
public sealed class LdArg_1 : Simple
{
public LdArg_1() : base(OpCodes.Ldarg_1)
{
}
}
[XmlType("ldarg_2")]
public sealed class LdArg_2 : Simple
{
public LdArg_2() : base(OpCodes.Ldarg_2)
{
}
}
[XmlType("ldarg_3")]
public sealed class LdArg_3 : Simple
{
public LdArg_3() : base(OpCodes.Ldarg_3)
{
}
}
[XmlType("ldind_i1")]
public sealed class Ldind_i1 : Simple
{
public Ldind_i1() : base(OpCodes.Ldind_I1)
{
}
}
[XmlType("ldind_i2")]
public sealed class Ldind_i2 : Simple
{
public Ldind_i2() : base(OpCodes.Ldind_I2)
{
}
}
[XmlType("ldind_i4")]
public sealed class Ldind_i4 : Simple
{
public Ldind_i4() : base(OpCodes.Ldind_I4)
{
}
}
[XmlType("ldind_i8")]
public sealed class Ldind_i8 : Simple
{
public Ldind_i8() : base(OpCodes.Ldind_I8)
{
}
}
[XmlType("ldind_r4")]
public sealed class Ldind_r4 : Simple
{
public Ldind_r4() : base(OpCodes.Ldind_R4)
{
}
}
[XmlType("ldind_r8")]
public sealed class Ldind_r8 : Simple
{
public Ldind_r8() : base(OpCodes.Ldind_R8)
{
}
}
[XmlType("ldind_ref")]
public sealed class Ldind_ref : Simple
{
public Ldind_ref() : base(OpCodes.Ldind_Ref)
{
}
}
[XmlType("stind_i1")]
public sealed class Stind_i1 : Simple
{
public Stind_i1() : base(OpCodes.Stind_I1)
{
}
}
[XmlType("stind_i2")]
public sealed class Stind_i2 : Simple
{
public Stind_i2() : base(OpCodes.Stind_I2)
{
}
}
[XmlType("stind_i4")]
public sealed class Stind_i4 : Simple
{
public Stind_i4() : base(OpCodes.Stind_I4)
{
}
}
[XmlType("stind_i8")]
public sealed class Stind_i8 : Simple
{
public Stind_i8()
: base(OpCodes.Stind_I8)
{
}
}
[XmlType("stind_ref")]
public sealed class Stind_ref : Simple
{
public Stind_ref() : base(OpCodes.Stind_Ref)
{
}
}
[XmlType("ret")]
public sealed class Ret : Simple
{
public Ret() : base(OpCodes.Ret)
{
}
}
[XmlType("throw")]
public sealed class Throw : Simple
{
public Throw() : base(OpCodes.Throw)
{
}
}
[XmlType("ldflda")]
public sealed class Ldflda : Instruction
{
[XmlAttribute("class")]
public string Class;
[XmlAttribute("name")]
public string Name;
[XmlAttribute("sig")]
public string Sig;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.Emit(OpCodes.Ldflda, StaticCompiler.GetFieldForMapXml(context.ClassLoader, Class, Name, Sig).GetField());
}
}
[XmlType("ldfld")]
public sealed class Ldfld : Instruction
{
[XmlAttribute("class")]
public string Class;
[XmlAttribute("name")]
public string Name;
[XmlAttribute("sig")]
public string Sig;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
// we don't use fw.EmitGet because we don't want automatic unboxing and whatever
ilgen.Emit(OpCodes.Ldfld, StaticCompiler.GetFieldForMapXml(context.ClassLoader, Class, Name, Sig).GetField());
}
}
[XmlType("ldsfld")]
public sealed class Ldsfld : Instruction
{
[XmlAttribute("class")]
public string Class;
[XmlAttribute("type")]
public string Type;
[XmlAttribute("name")]
public string Name;
[XmlAttribute("sig")]
public string Sig;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
if(Type != null)
{
ilgen.Emit(OpCodes.Ldsfld, StaticCompiler.GetTypeForMapXml(context.ClassLoader, Type).GetField(Name, BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic));
}
else
{
// we don't use fw.EmitGet because we don't want automatic unboxing and whatever
ilgen.Emit(OpCodes.Ldsfld, StaticCompiler.GetFieldForMapXml(context.ClassLoader, Class, Name, Sig).GetField());
}
}
}
[XmlType("stfld")]
public sealed class Stfld : Instruction
{
[XmlAttribute("class")]
public string Class;
[XmlAttribute("name")]
public string Name;
[XmlAttribute("sig")]
public string Sig;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
// we don't use fw.EmitSet because we don't want automatic unboxing and whatever
ilgen.Emit(OpCodes.Stfld, StaticCompiler.GetFieldForMapXml(context.ClassLoader, Class, Name, Sig).GetField());
}
}
[XmlType("stsfld")]
public sealed class Stsfld : Instruction
{
[XmlAttribute("class")]
public string Class;
[XmlAttribute("name")]
public string Name;
[XmlAttribute("sig")]
public string Sig;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
// we don't use fw.EmitSet because we don't want automatic unboxing and whatever
ilgen.Emit(OpCodes.Stsfld, StaticCompiler.GetFieldForMapXml(context.ClassLoader, Class, Name, Sig).GetField());
}
}
[XmlType("ldc_i4")]
public sealed class Ldc_I4 : Instruction
{
[XmlAttribute("value")]
public int val;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.EmitLdc_I4(val);
}
}
[XmlType("ldc_i4_0")]
public sealed class Ldc_I4_0 : Simple
{
public Ldc_I4_0() : base(OpCodes.Ldc_I4_0)
{
}
}
[XmlType("ldc_i4_1")]
public sealed class Ldc_I4_1 : Simple
{
public Ldc_I4_1() : base(OpCodes.Ldc_I4_1)
{
}
}
[XmlType("ldc_i4_m1")]
public sealed class Ldc_I4_M1 : Simple
{
public Ldc_I4_M1() : base(OpCodes.Ldc_I4_M1)
{
}
}
[XmlType("conv_i")]
public sealed class Conv_I : Simple
{
public Conv_I() : base(OpCodes.Conv_I)
{
}
}
[XmlType("conv_i1")]
public sealed class Conv_I1 : Simple
{
public Conv_I1() : base(OpCodes.Conv_I1)
{
}
}
[XmlType("conv_u1")]
public sealed class Conv_U1 : Simple
{
public Conv_U1() : base(OpCodes.Conv_U1)
{
}
}
[XmlType("conv_i2")]
public sealed class Conv_I2 : Simple
{
public Conv_I2() : base(OpCodes.Conv_I2)
{
}
}
[XmlType("conv_u2")]
public sealed class Conv_U2 : Simple
{
public Conv_U2() : base(OpCodes.Conv_U2)
{
}
}
[XmlType("conv_i4")]
public sealed class Conv_I4 : Simple
{
public Conv_I4() : base(OpCodes.Conv_I4)
{
}
}
[XmlType("conv_u4")]
public sealed class Conv_U4 : Simple
{
public Conv_U4() : base(OpCodes.Conv_U4)
{
}
}
[XmlType("conv_i8")]
public sealed class Conv_I8 : Simple
{
public Conv_I8() : base(OpCodes.Conv_I8)
{
}
}
[XmlType("conv_u8")]
public sealed class Conv_U8 : Simple
{
public Conv_U8() : base(OpCodes.Conv_U8)
{
}
}
[XmlType("ldlen")]
public sealed class Ldlen : Simple
{
public Ldlen() : base(OpCodes.Ldlen)
{
}
}
[XmlType("add")]
public sealed class Add : Simple
{
public Add() : base(OpCodes.Add)
{
}
}
[XmlType("sub")]
public sealed class Sub : Simple
{
public Sub()
: base(OpCodes.Sub)
{
}
}
[XmlType("mul")]
public sealed class Mul : Simple
{
public Mul() : base(OpCodes.Mul)
{
}
}
[XmlType("div_un")]
public sealed class Div_Un : Simple
{
public Div_Un()
: base(OpCodes.Div_Un)
{
}
}
[XmlType("rem_un")]
public sealed class Rem_Un : Simple
{
public Rem_Un()
: base(OpCodes.Rem_Un)
{
}
}
[XmlType("and")]
public sealed class And : Simple
{
public And()
: base(OpCodes.And)
{
}
}
[XmlType("or")]
public sealed class Or : Simple
{
public Or()
: base(OpCodes.Or)
{
}
}
[XmlType("xor")]
public sealed class Xor : Simple
{
public Xor()
: base(OpCodes.Xor)
{
}
}
[XmlType("not")]
public sealed class Not : Simple
{
public Not()
: base(OpCodes.Not)
{
}
}
[XmlType("unaligned")]
public sealed class Unaligned : Instruction
{
[XmlAttribute("alignment")]
public int Alignment;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.EmitUnaligned((byte)Alignment);
}
}
[XmlType("cpblk")]
public sealed class Cpblk : Simple
{
public Cpblk() : base(OpCodes.Cpblk)
{
}
}
[XmlType("ceq")]
public sealed class Ceq : Simple
{
public Ceq() : base(OpCodes.Ceq)
{
}
}
[XmlType("leave")]
public sealed class Leave : Branch
{
internal override void Emit(CodeEmitter ilgen, CodeEmitterLabel label)
{
ilgen.EmitLeave(label);
}
}
[XmlType("endfinally")]
public sealed class Endfinally : Simple
{
public Endfinally() : base(OpCodes.Endfinally)
{
}
}
[XmlType("exceptionBlock")]
public sealed class ExceptionBlock : Instruction
{
public InstructionList @try;
public CatchBlock @catch;
public InstructionList @finally;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.BeginExceptionBlock();
@try.Generate(context, ilgen);
if(@catch != null)
{
Type type;
if(@catch.type != null)
{
type = StaticCompiler.GetTypeForMapXml(context.ClassLoader, @catch.type);
}
else
{
type = context.ClassLoader.LoadClassByDottedName(@catch.Class).TypeAsExceptionType;
}
ilgen.BeginCatchBlock(type);
@catch.Generate(context, ilgen);
}
if(@finally != null)
{
ilgen.BeginFinallyBlock();
@finally.Generate(context, ilgen);
}
ilgen.EndExceptionBlock();
}
}
public sealed class CatchBlock : InstructionList
{
[XmlAttribute("type")]
public string type;
[XmlAttribute("class")]
public string Class;
}
[XmlType("conditional")]
public sealed class ConditionalInstruction : Instruction
{
[XmlAttribute("framework")]
public string framework;
public InstructionList code;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
if (Environment.Version.ToString().StartsWith(framework))
{
code.Generate(context, ilgen);
}
}
}
[XmlType("volatile")]
public sealed class Volatile : Simple
{
public Volatile() : base(OpCodes.Volatile)
{
}
}
[XmlType("ldelema")]
public sealed class Ldelema : Instruction
{
[XmlAttribute("sig")]
public string Sig;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.Emit(OpCodes.Ldelema, context.ClassLoader.FieldTypeWrapperFromSig(Sig, LoadMode.LoadOrThrow).TypeAsArrayType);
}
}
[XmlType("newarr")]
public sealed class Newarr : Instruction
{
[XmlAttribute("sig")]
public string Sig;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
ilgen.Emit(OpCodes.Newarr, context.ClassLoader.FieldTypeWrapperFromSig(Sig, LoadMode.LoadOrThrow).TypeAsArrayType);
}
}
[XmlType("ldtoken")]
public sealed class Ldtoken : Instruction
{
[XmlAttribute("type")]
public string type;
[XmlAttribute("class")]
public string Class;
[XmlAttribute("method")]
public string Method;
[XmlAttribute("field")]
public string Field;
[XmlAttribute("sig")]
public string Sig;
internal override void Generate(CodeGenContext context, CodeEmitter ilgen)
{
if (!Validate())
{
return;
}
MemberInfo member = Resolve(context);
Type type = member as Type;
MethodInfo method = member as MethodInfo;
ConstructorInfo constructor = member as ConstructorInfo;
FieldInfo field = member as FieldInfo;
if (type != null)
{
ilgen.Emit(OpCodes.Ldtoken, type);
}
else if (method != null)
{
ilgen.Emit(OpCodes.Ldtoken, method);
}
else if (constructor != null)
{
ilgen.Emit(OpCodes.Ldtoken, constructor);
}
else if (field != null)
{
ilgen.Emit(OpCodes.Ldtoken, field);
}
else
{
StaticCompiler.IssueMessage(Message.MapXmlUnableToResolveOpCode, ToString());
}
}
private bool Validate()
{
if (type != null && Class == null)
{
if (Method != null || Field != null || Sig != null)
{
StaticCompiler.IssueMessage(Message.MapXmlError, "not implemented: cannot use 'type' attribute with 'method' or 'field' attribute for ldtoken");
return false;
}
return true;
}
|
[
"\t\t\telse if (Class != null && type == null)"
] | 2,841
|
lcc
|
csharp
| null |
eb53d2d7980f93f767d7f4ca69e7c363d7a2bdffbbb04b9c
|
|
/*
* Copyright (C) 2006-2010 - Frictional Games
*
* This file is part of HPL1 Engine.
*
* HPL1 Engine is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* HPL1 Engine is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with HPL1 Engine. If not, see <http://www.gnu.org/licenses/>.
*/
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
namespace Mapeditor
{
/// <summary>
/// Summary description for PropertiesLightForm.
/// </summary>
public class frmPropertiesArea : System.Windows.Forms.Form
{
public bool mbOkWasPressed=false;
cArea mArea;
private System.Windows.Forms.Label objNameLabel;
private System.Windows.Forms.Label label1;
public System.Windows.Forms.Button objOkButton;
public System.Windows.Forms.Button objCancelButtom;
public System.Windows.Forms.TextBox objNameText;
private System.Windows.Forms.Label label6;
public System.Windows.Forms.ComboBox objActiveBox;
public System.Windows.Forms.TextBox objXText;
private System.Windows.Forms.Label objXLabel;
private System.Windows.Forms.Label objYLabel;
private System.Windows.Forms.Label label3;
public System.Windows.Forms.TextBox objZText;
private System.Windows.Forms.Label label4;
public System.Windows.Forms.ComboBox objTypeBox;
private System.Windows.Forms.Label label5;
public System.Windows.Forms.TextBox objYText;
private System.Windows.Forms.Label objZLabel;
public System.Windows.Forms.TextBox objWidthText;
private System.Windows.Forms.Label label7;
public System.Windows.Forms.TextBox objHeightText;
private System.Windows.Forms.Label label8;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
public frmPropertiesArea(cArea aArea)
{
//
// Required for Windows Form Designer support
//
InitializeComponent();
//
// TODO: Add any constructor code after InitializeComponent call
//
mArea = aArea;
objNameText.Text = aArea.msName;
objActiveBox.SelectedIndex = aArea.mbActive?1:0;
objHeightText.Text = aArea.mfHeight.ToString();
objWidthText.Text = aArea.mfWidth.ToString();
objXLabel.Text = ((cAreaType)aArea.mAForm.mlstTypes[aArea.mlTypeNum]).msDesc[0];
objXText.Text = aArea.mfSizeX.ToString();
objYLabel.Text = ((cAreaType)aArea.mAForm.mlstTypes[aArea.mlTypeNum]).msDesc[1];
objYText.Text = aArea.mfSizeY.ToString();
objZLabel.Text = ((cAreaType)aArea.mAForm.mlstTypes[aArea.mlTypeNum]).msDesc[2];
objZText.Text = aArea.mfSizeZ.ToString();
foreach(string sN in aArea.mAForm.objTypeList.Items)
{
objTypeBox.Items.Add(sN);
}
objTypeBox.SelectedIndex = aArea.mlTypeNum;
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if(components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.objNameLabel = new System.Windows.Forms.Label();
this.label1 = new System.Windows.Forms.Label();
this.objNameText = new System.Windows.Forms.TextBox();
this.objXText = new System.Windows.Forms.TextBox();
this.objOkButton = new System.Windows.Forms.Button();
this.objCancelButtom = new System.Windows.Forms.Button();
this.objXLabel = new System.Windows.Forms.Label();
this.label6 = new System.Windows.Forms.Label();
this.objActiveBox = new System.Windows.Forms.ComboBox();
this.objYLabel = new System.Windows.Forms.Label();
this.objYText = new System.Windows.Forms.TextBox();
this.label3 = new System.Windows.Forms.Label();
this.objZLabel = new System.Windows.Forms.Label();
this.objZText = new System.Windows.Forms.TextBox();
this.label4 = new System.Windows.Forms.Label();
this.objTypeBox = new System.Windows.Forms.ComboBox();
this.label5 = new System.Windows.Forms.Label();
this.objWidthText = new System.Windows.Forms.TextBox();
this.label7 = new System.Windows.Forms.Label();
this.objHeightText = new System.Windows.Forms.TextBox();
this.label8 = new System.Windows.Forms.Label();
this.SuspendLayout();
//
// objNameLabel
//
this.objNameLabel.Location = new System.Drawing.Point(16, 16);
this.objNameLabel.Name = "objNameLabel";
this.objNameLabel.Size = new System.Drawing.Size(64, 16);
this.objNameLabel.TabIndex = 0;
this.objNameLabel.Text = "Name:";
//
// label1
//
this.label1.Location = new System.Drawing.Point(16, 200);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(48, 16);
this.label1.TabIndex = 1;
this.label1.Text = "Var X:";
//
// objNameText
//
this.objNameText.Location = new System.Drawing.Point(104, 16);
this.objNameText.MaxLength = 40;
this.objNameText.Name = "objNameText";
this.objNameText.Size = new System.Drawing.Size(104, 20);
this.objNameText.TabIndex = 3;
this.objNameText.Text = "";
//
// objXText
//
this.objXText.Location = new System.Drawing.Point(104, 192);
this.objXText.MaxLength = 40;
this.objXText.Name = "objXText";
this.objXText.Size = new System.Drawing.Size(104, 20);
this.objXText.TabIndex = 4;
this.objXText.Text = "";
//
// objOkButton
//
this.objOkButton.Location = new System.Drawing.Point(24, 432);
this.objOkButton.Name = "objOkButton";
this.objOkButton.Size = new System.Drawing.Size(72, 24);
this.objOkButton.TabIndex = 7;
this.objOkButton.Text = "OK";
this.objOkButton.Click += new System.EventHandler(this.objOkButton_Click);
//
// objCancelButtom
//
this.objCancelButtom.Location = new System.Drawing.Point(120, 432);
this.objCancelButtom.Name = "objCancelButtom";
this.objCancelButtom.Size = new System.Drawing.Size(72, 24);
this.objCancelButtom.TabIndex = 8;
this.objCancelButtom.Text = "Cancel";
this.objCancelButtom.Click += new System.EventHandler(this.objCancelButtom_Click);
//
// objXLabel
//
this.objXLabel.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Italic, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.objXLabel.Location = new System.Drawing.Point(16, 160);
this.objXLabel.Name = "objXLabel";
this.objXLabel.Size = new System.Drawing.Size(200, 32);
this.objXLabel.TabIndex = 12;
this.objXLabel.Text = "Description...";
//
// label6
//
this.label6.Location = new System.Drawing.Point(16, 48);
this.label6.Name = "label6";
this.label6.Size = new System.Drawing.Size(48, 16);
this.label6.TabIndex = 15;
this.label6.Text = "Active:";
//
// objActiveBox
//
this.objActiveBox.Items.AddRange(new object[] {
"False",
"True"});
this.objActiveBox.Location = new System.Drawing.Point(104, 48);
this.objActiveBox.Name = "objActiveBox";
this.objActiveBox.Size = new System.Drawing.Size(104, 21);
this.objActiveBox.TabIndex = 16;
//
// objYLabel
//
this.objYLabel.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Italic, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0)));
this.objYLabel.Location = new System.Drawing.Point(16, 224);
this.objYLabel.Name = "objYLabel";
|
[
"\t\t\tthis.objYLabel.Size = new System.Drawing.Size(200, 32);"
] | 722
|
lcc
|
csharp
| null |
7176f682200a0adf4ae5cf5e13ef2ec7cb669d6f638cffd3
|
|
# -*- test-case-name: buildbot.test.test_mailparse -*-
from twisted.trial import unittest
from twisted.python import util
from buildbot.changes import mail
class TestFreshCVS(unittest.TestCase):
def get(self, msg):
msg = util.sibpath(__file__, msg)
s = mail.FCMaildirSource(None)
return s.parse_file(open(msg, "r"))
def testMsg1(self):
c = self.get("mail/freshcvs.1")
self.assertEqual(c.who, "moshez")
self.assertEqual(set(c.files), set(["Twisted/debian/python-twisted.menu.in"]))
self.assertEqual(c.comments, "Instance massenger, apparently\n")
self.assertEqual(c.isdir, 0)
def testMsg2(self):
c = self.get("mail/freshcvs.2")
self.assertEqual(c.who, "itamarst")
self.assertEqual(set(c.files), set(["Twisted/twisted/web/woven/form.py",
"Twisted/twisted/python/formmethod.py"]))
self.assertEqual(c.comments,
"submit formmethod now subclass of Choice\n")
self.assertEqual(c.isdir, 0)
def testMsg3(self):
# same as msg2 but missing the ViewCVS section
c = self.get("mail/freshcvs.3")
self.assertEqual(c.who, "itamarst")
self.assertEqual(set(c.files), set(["Twisted/twisted/web/woven/form.py",
"Twisted/twisted/python/formmethod.py"]))
self.assertEqual(c.comments,
"submit formmethod now subclass of Choice\n")
self.assertEqual(c.isdir, 0)
def testMsg4(self):
# same as msg3 but also missing CVS patch section
c = self.get("mail/freshcvs.4")
self.assertEqual(c.who, "itamarst")
self.assertEqual(set(c.files), set(["Twisted/twisted/web/woven/form.py",
"Twisted/twisted/python/formmethod.py"]))
self.assertEqual(c.comments,
"submit formmethod now subclass of Choice\n")
self.assertEqual(c.isdir, 0)
def testMsg5(self):
# creates a directory
c = self.get("mail/freshcvs.5")
self.assertEqual(c.who, "etrepum")
self.assertEqual(set(c.files), set(["Twisted/doc/examples/cocoaDemo"]))
self.assertEqual(c.comments,
"Directory /cvs/Twisted/doc/examples/cocoaDemo added to the repository\n")
self.assertEqual(c.isdir, 1)
def testMsg6(self):
# adds files
c = self.get("mail/freshcvs.6")
self.assertEqual(c.who, "etrepum")
self.assertEqual(set(c.files), set([
"Twisted/doc/examples/cocoaDemo/MyAppDelegate.py",
"Twisted/doc/examples/cocoaDemo/__main__.py",
"Twisted/doc/examples/cocoaDemo/bin-python-main.m",
"Twisted/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
"Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
"Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
"Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
"Twisted/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"]))
self.assertEqual(c.comments,
"Cocoa (OS X) clone of the QT demo, using polling reactor\n\nRequires pyobjc ( http://pyobjc.sourceforge.net ), it's not much different than the template project. The reactor is iterated periodically by a repeating NSTimer.\n")
self.assertEqual(c.isdir, 0)
def testMsg7(self):
# deletes files
c = self.get("mail/freshcvs.7")
self.assertEqual(c.who, "etrepum")
self.assertEqual(set(c.files), set([
"Twisted/doc/examples/cocoaDemo/MyAppDelegate.py",
"Twisted/doc/examples/cocoaDemo/__main__.py",
"Twisted/doc/examples/cocoaDemo/bin-python-main.m",
"Twisted/doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
"Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
"Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
"Twisted/doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
"Twisted/doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"]))
self.assertEqual(c.comments,
"Directories break debian build script, waiting for reasonable fix\n")
self.assertEqual(c.isdir, 0)
def testMsg8(self):
# files outside Twisted/
c = self.get("mail/freshcvs.8")
self.assertEqual(c.who, "acapnotic")
self.assertEqual(set(c.files), set([ "CVSROOT/freshCfg" ]))
self.assertEqual(c.comments, "it doesn't work with invalid syntax\n")
self.assertEqual(c.isdir, 0)
def testMsg9(self):
# also creates a directory
c = self.get("mail/freshcvs.9")
self.assertEqual(c.who, "exarkun")
self.assertEqual(set(c.files), set(["Twisted/sandbox/exarkun/persist-plugin"]))
self.assertEqual(c.comments,
"Directory /cvs/Twisted/sandbox/exarkun/persist-plugin added to the repository\n")
self.assertEqual(c.isdir, 1)
class TestFreshCVS_Prefix(unittest.TestCase):
def get(self, msg):
msg = util.sibpath(__file__, msg)
s = mail.FCMaildirSource(None)
return s.parse_file(open(msg, "r"), prefix="Twisted/")
def testMsg1p(self):
c = self.get("mail/freshcvs.1")
self.assertEqual(c.who, "moshez")
self.assertEqual(set(c.files), set(["debian/python-twisted.menu.in"]))
self.assertEqual(c.comments, "Instance massenger, apparently\n")
def testMsg2p(self):
c = self.get("mail/freshcvs.2")
self.assertEqual(c.who, "itamarst")
self.assertEqual(set(c.files), set(["twisted/web/woven/form.py",
"twisted/python/formmethod.py"]))
self.assertEqual(c.comments,
"submit formmethod now subclass of Choice\n")
def testMsg3p(self):
# same as msg2 but missing the ViewCVS section
c = self.get("mail/freshcvs.3")
self.assertEqual(c.who, "itamarst")
self.assertEqual(set(c.files), set(["twisted/web/woven/form.py",
"twisted/python/formmethod.py"]))
self.assertEqual(c.comments,
"submit formmethod now subclass of Choice\n")
def testMsg4p(self):
# same as msg3 but also missing CVS patch section
c = self.get("mail/freshcvs.4")
self.assertEqual(c.who, "itamarst")
self.assertEqual(set(c.files), set(["twisted/web/woven/form.py",
"twisted/python/formmethod.py"]))
self.assertEqual(c.comments,
"submit formmethod now subclass of Choice\n")
def testMsg5p(self):
# creates a directory
c = self.get("mail/freshcvs.5")
self.assertEqual(c.who, "etrepum")
self.assertEqual(set(c.files), set(["doc/examples/cocoaDemo"]))
self.assertEqual(c.comments,
"Directory /cvs/Twisted/doc/examples/cocoaDemo added to the repository\n")
self.assertEqual(c.isdir, 1)
def testMsg6p(self):
# adds files
c = self.get("mail/freshcvs.6")
self.assertEqual(c.who, "etrepum")
self.assertEqual(set(c.files), set([
"doc/examples/cocoaDemo/MyAppDelegate.py",
"doc/examples/cocoaDemo/__main__.py",
"doc/examples/cocoaDemo/bin-python-main.m",
"doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
"doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
"doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
"doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
"doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"]))
self.assertEqual(c.comments,
"Cocoa (OS X) clone of the QT demo, using polling reactor\n\nRequires pyobjc ( http://pyobjc.sourceforge.net ), it's not much different than the template project. The reactor is iterated periodically by a repeating NSTimer.\n")
self.assertEqual(c.isdir, 0)
def testMsg7p(self):
# deletes files
c = self.get("mail/freshcvs.7")
self.assertEqual(c.who, "etrepum")
self.assertEqual(set(c.files), set([
"doc/examples/cocoaDemo/MyAppDelegate.py",
"doc/examples/cocoaDemo/__main__.py",
"doc/examples/cocoaDemo/bin-python-main.m",
"doc/examples/cocoaDemo/English.lproj/InfoPlist.strings",
"doc/examples/cocoaDemo/English.lproj/MainMenu.nib/classes.nib",
"doc/examples/cocoaDemo/English.lproj/MainMenu.nib/info.nib",
"doc/examples/cocoaDemo/English.lproj/MainMenu.nib/keyedobjects.nib",
"doc/examples/cocoaDemo/cocoaDemo.pbproj/project.pbxproj"]))
self.assertEqual(c.comments,
"Directories break debian build script, waiting for reasonable fix\n")
self.assertEqual(c.isdir, 0)
def testMsg8p(self):
# files outside Twisted/
c = self.get("mail/freshcvs.8")
self.assertEqual(c, None)
class TestSyncmail(unittest.TestCase):
def get(self, msg):
msg = util.sibpath(__file__, msg)
s = mail.SyncmailMaildirSource(None)
return s.parse_file(open(msg, "r"), prefix="buildbot/")
def getNoPrefix(self, msg):
msg = util.sibpath(__file__, msg)
s = mail.SyncmailMaildirSource(None)
return s.parse_file(open(msg, "r"))
def testMsgS1(self):
c = self.get("mail/syncmail.1")
self.failUnless(c is not None)
self.assertEqual(c.who, "warner")
self.assertEqual(set(c.files), set(["buildbot/changes/freshcvsmail.py"]))
self.assertEqual(c.comments,
"remove leftover code, leave a temporary compatibility import. Note! Start\nimporting FCMaildirSource from changes.mail instead of changes.freshcvsmail\n")
self.assertEqual(c.isdir, 0)
def testMsgS2(self):
c = self.get("mail/syncmail.2")
self.assertEqual(c.who, "warner")
self.assertEqual(set(c.files), set(["ChangeLog"]))
self.assertEqual(c.comments, "\t* NEWS: started adding new features\n")
self.assertEqual(c.isdir, 0)
def testMsgS3(self):
c = self.get("mail/syncmail.3")
self.failUnless(c == None)
def testMsgS4(self):
c = self.get("mail/syncmail.4")
self.assertEqual(c.who, "warner")
self.assertEqual(set(c.files),
set(["test/mail/syncmail.1",
"test/mail/syncmail.2",
"test/mail/syncmail.3"]))
self.assertEqual(c.comments, "test cases for syncmail parser\n")
self.assertEqual(c.isdir, 0)
self.assertEqual(c.branch, None)
# tests a tag
def testMsgS5(self):
|
[
" c = self.getNoPrefix(\"mail/syncmail.5\")"
] | 615
|
lcc
|
python
| null |
20b813f30f59822e04edffdae1cdbf1b533dbdd72c1cc321
|
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/
*/
package org.phenotips.vocabulary;
import org.xwiki.stability.Unstable;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import org.apache.commons.collections4.MultiValuedMap;
import org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.slf4j.Logger;
/**
* Implements {@link VocabularyExtension} to annotate {@link VocabularyInputTerm} from {@link #getTargetVocabularyIds
* supported vocabularies} with data from {@link #getAnnotationSource a tab- or comma-separated file}. The default
* behavior implemented in this base class is to gather data from the named columns in the file, and add this data to
* the respective terms when reindexing a supported vocabulary. Setting up the names of the columns is done by the
* concrete class, either by {@link #setupCSVParser telling} the CSV parser to treat the first row as the header
* definition, or by explicitly assigning names to columns.
* <p>
* To let the first row be parsed as the column names:
* </p>
*
* <pre>
* {@code
* protected CSVFormat setupCSVParser(Vocabulary vocabulary)
* {
* return CSVFormat.TDF.withHeader();
* }
* }
* </pre>
* <p>
* To explicitly name columns:
* </p>
*
* <pre>
* {@code
* protected CSVFormat setupCSVParser(Vocabulary vocabulary)
* {
* return CSVFormat.TDF.withHeader("id", null, "symptom");
* }
* }
* </pre>
* <p>
* With the default implementation of {@link #processCSVRecordRow the row processing function}, having a column named
* {@code id} is mandatory.
* </p>
* <p>
* Columns that are not named are ignored.
* </p>
* <p>
* Missing, empty, or whitespace-only cells will be ignored.
* </p>
* <p>
* If multiple rows for the same term identifier exists, then the values are accumulated in lists of values.
* </p>
* <p>
* If one or more of the fields parsed happen to already have values already in the term being extended, then the
* existing values will be discarded and replaced with the data read from the input file.
* </p>
* <p>
* If multiple rows for the same term identifier exists, then the values are accumulated in lists of values. If in the
* schema definition a field is set as non-multi-valued, then it's the responsibility of the user to make sure that only
* one value will be specified for such fields. If a value is specified multiple times in the input file, then it will
* be added multiple times in the field.
* </p>
* <p>
* Example: for the following parser set-up:
* </p>
*
* <pre>
* {@code
* CSVFormat.CSV.withHeader("id", null, "symptom", null, "frequency")
* }
* </pre>
*
* and the following input file:
*
* <pre>
* {@code
* MIM:162200,"NEUROFIBROMATOSIS, TYPE I",HP:0009737,"Lisch nodules",HP:0040284,HPO:curators
* MIM:162200,"NEUROFIBROMATOSIS, TYPE I",HP:0001256,"Intellectual disability, mild",HP:0040283,HPO:curators
* MIM:162200,"NEUROFIBROMATOSIS, TYPE I",HP:0000316,"Hypertelorism",,HPO:curators
* MIM:162200,"NEUROFIBROMATOSIS, TYPE I",HP:0000501,"Glaucoma",HP:0040284,HPO:curators
* }
* </pre>
*
* the following fields will be added:
* <dl>
* <dt>{@code "symptom"}</dt>
* <dd>{@code "HP:0009737"}, {@code HP:0001256}</dd>
* <dt>{@code "frequency"}</dt>
* <dd>{@code "HP:0040284"}, {@code HP:0040283}, {@code "HP:0040284"}</dd>
* </dl>
*
* @version $Id$
* @since 1.3
*/
@Unstable("New API introduced in 1.3")
public abstract class AbstractCSVAnnotationsExtension implements VocabularyExtension
{
protected static final String ID_KEY = "id";
/**
* Data read from the source file. The key of the outer map is the identifier of the term being extended, and the
* value of the outer map is the data to add to the term. The key of the inner map is the name of the field, while
* the value of the inner map is the values to add to that field.
*/
protected Map<String, MultiValuedMap<String, String>> data = new HashMap<>();
/** Logging helper object. */
@Inject
protected Logger logger;
@Inject
protected VocabularySourceRelocationService relocationService;
private AtomicInteger operationsInProgress = new AtomicInteger(0);
@Override
public boolean isVocabularySupported(@Nonnull final Vocabulary vocabulary)
{
return getTargetVocabularyIds().contains(vocabulary.getIdentifier());
}
@Override
public void indexingStarted(@Nonnull final Vocabulary vocabulary)
{
if (this.operationsInProgress.incrementAndGet() == 1) {
this.data = new HashMap<>();
try (BufferedReader in = new BufferedReader(
new InputStreamReader(
new URL(getAnnotationSource()).openConnection().getInputStream(), StandardCharsets.UTF_8))) {
CSVFormat parser = setupCSVParser(vocabulary);
for (final CSVRecord row : parser.parse(in)) {
processCSVRecordRow(row, vocabulary);
}
} catch (final IOException ex) {
this.logger.error("Failed to load annotation source: {}", ex.getMessage());
}
}
}
@Override
public void extendTerm(VocabularyInputTerm term, Vocabulary vocabulary)
{
MultiValuedMap<String, String> termData = this.data.get(term.getId());
if (termData == null || termData.isEmpty()) {
return;
}
for (Map.Entry<String, Collection<String>> datum : termData.asMap().entrySet()) {
if (!datum.getValue().isEmpty()) {
term.set(datum.getKey(), datum.getValue());
}
}
}
@Override
public void indexingEnded(Vocabulary vocabulary)
{
if (this.operationsInProgress.decrementAndGet() == 0) {
this.data = null;
}
}
@Override
public void extendQuery(SolrQuery query, Vocabulary vocabulary)
{
// The base extension doesn't change queries in any way, assuming that the extra fields are only to be stored or
// explicitly queried, not queried automatically. Override if new fields must automatically be included in
// queries.
}
/**
* Processes and caches the row data. By default, it simply copies every mapped value from the row. Override if
* further processing of the data is needed.
*
* @param row the {@link CSVRecord data row} to process
* @param vocabulary the vocabulary being indexed
*/
protected void processCSVRecordRow(final CSVRecord row, final Vocabulary vocabulary)
{
Map<String, String> csvData = row.toMap();
MultiValuedMap<String, String> termData = this.data.get(row.get(ID_KEY));
|
[
" if (termData == null) {"
] | 987
|
lcc
|
java
| null |
44bc927038ee4f263ad7e6209084767cae0e9109fe53fa72
|
|
/**
* This class was created by <Vazkii>. It's distributed as
* part of the Botania Mod. Get the Source Code in github:
* https://github.com/Vazkii/Botania
*
* Botania is Open Source and distributed under the
* Botania License: http://botaniamod.net/license.php
*
* File Created @ [Jan 24, 2014, 8:03:36 PM (GMT)]
*/
package vazkii.botania.api.subtile;
import java.awt.Color;
import java.util.List;
import net.minecraft.block.Block;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.ScaledResolution;
import net.minecraft.client.resources.I18n;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import vazkii.botania.api.BotaniaAPI;
import vazkii.botania.api.internal.IManaNetwork;
import vazkii.botania.api.mana.IManaCollector;
import vazkii.botania.api.sound.BotaniaSoundEvents;
/**
* The basic class for a Generating Flower.
*/
public class SubTileGenerating extends SubTileEntity {
public static final int LINK_RANGE = 6;
private static final String TAG_MANA = "mana";
private static final String TAG_COLLECTOR_X = "collectorX";
private static final String TAG_COLLECTOR_Y = "collectorY";
private static final String TAG_COLLECTOR_Z = "collectorZ";
private static final String TAG_PASSIVE_DECAY_TICKS = "passiveDecayTicks";
protected int mana;
public int redstoneSignal = 0;
int sizeLastCheck = -1;
protected TileEntity linkedCollector = null;
public int knownMana = -1;
public int passiveDecayTicks;
BlockPos cachedCollectorCoordinates = null;
/**
* If set to true, redstoneSignal will be updated every tick.
*/
public boolean acceptsRedstone() {
return false;
}
@Override
public void onUpdate() {
super.onUpdate();
linkCollector();
if(canGeneratePassively()) {
int delay = getDelayBetweenPassiveGeneration();
if(delay > 0 && ticksExisted % delay == 0 && !supertile.getWorld().isRemote) {
if(shouldSyncPassiveGeneration())
sync();
addMana(getValueForPassiveGeneration());
}
}
emptyManaIntoCollector();
if(acceptsRedstone()) {
redstoneSignal = 0;
for(EnumFacing dir : EnumFacing.VALUES) {
int redstoneSide = supertile.getWorld().getRedstonePower(supertile.getPos().offset(dir), dir);
redstoneSignal = Math.max(redstoneSignal, redstoneSide);
}
}
if(supertile.getWorld().isRemote) {
double particleChance = 1F - (double) mana / (double) getMaxMana() / 3.5F;
Color color = new Color(getColor());
if(Math.random() > particleChance)
BotaniaAPI.internalHandler.sparkleFX(supertile.getWorld(), supertile.getPos().getX() + 0.3 + Math.random() * 0.5, supertile.getPos().getY() + 0.5 + Math.random() * 0.5, supertile.getPos().getZ() + 0.3 + Math.random() * 0.5, color.getRed() / 255F, color.getGreen() / 255F, color.getBlue() / 255F, (float) Math.random(), 5);
}
boolean passive = isPassiveFlower();
if(!supertile.getWorld().isRemote) {
int muhBalance = BotaniaAPI.internalHandler.getPassiveFlowerDecay();
if(passive && muhBalance > 0 && passiveDecayTicks > muhBalance) {
IBlockState state = supertile.getWorld().getBlockState(supertile.getPos());
supertile.getWorld().playEvent(2001, supertile.getPos(), Block.getStateId(state));
if(supertile.getWorld().getBlockState(supertile.getPos().down()).isSideSolid(supertile.getWorld(), supertile.getPos().down(), EnumFacing.UP))
supertile.getWorld().setBlockState(supertile.getPos(), Blocks.DEADBUSH.getDefaultState());
else supertile.getWorld().setBlockToAir(supertile.getPos());
}
}
if(passive)
passiveDecayTicks++;
}
public void linkCollector() {
boolean needsNew = false;
if(linkedCollector == null) {
needsNew = true;
if(cachedCollectorCoordinates != null) {
needsNew = false;
if(supertile.getWorld().isBlockLoaded(cachedCollectorCoordinates)) {
needsNew = true;
TileEntity tileAt = supertile.getWorld().getTileEntity(cachedCollectorCoordinates);
if(tileAt != null && tileAt instanceof IManaCollector && !tileAt.isInvalid()) {
linkedCollector = tileAt;
needsNew = false;
}
cachedCollectorCoordinates = null;
}
}
} else {
TileEntity tileAt = supertile.getWorld().getTileEntity(linkedCollector.getPos());
if(tileAt != null && tileAt instanceof IManaCollector)
linkedCollector = tileAt;
}
if(needsNew && ticksExisted == 1) { // New flowers only
IManaNetwork network = BotaniaAPI.internalHandler.getManaNetworkInstance();
int size = network.getAllCollectorsInWorld(supertile.getWorld()).size();
if(BotaniaAPI.internalHandler.shouldForceCheck() || size != sizeLastCheck) {
linkedCollector = network.getClosestCollector(supertile.getPos(), supertile.getWorld(), LINK_RANGE);
sizeLastCheck = size;
}
}
}
public void linkToForcefully(TileEntity collector) {
linkedCollector = collector;
}
public void addMana(int mana) {
this.mana = Math.min(getMaxMana(), this.mana + mana);
}
public void emptyManaIntoCollector() {
if(linkedCollector != null && isValidBinding()) {
IManaCollector collector = (IManaCollector) linkedCollector;
if(!collector.isFull() && mana > 0) {
int manaval = Math.min(mana, collector.getMaxMana() - collector.getCurrentMana());
mana -= manaval;
collector.recieveMana(manaval);
}
}
}
public boolean isPassiveFlower() {
return false;
}
public boolean shouldSyncPassiveGeneration() {
return false;
}
public boolean canGeneratePassively() {
return false;
}
public int getDelayBetweenPassiveGeneration() {
return 20;
}
public int getValueForPassiveGeneration() {
return 1;
}
@Override
public List<ItemStack> getDrops(List<ItemStack> list) {
List<ItemStack> drops = super.getDrops(list);
populateDropStackNBTs(drops);
return drops;
}
public void populateDropStackNBTs(List<ItemStack> drops) {
if(isPassiveFlower() && ticksExisted > 0 && BotaniaAPI.internalHandler.getPassiveFlowerDecay() > 0) {
ItemStack drop = drops.get(0);
if(!drop.isEmpty()) {
if(!drop.hasTagCompound())
drop.setTagCompound(new NBTTagCompound());
NBTTagCompound cmp = drop.getTagCompound();
cmp.setInteger(TAG_PASSIVE_DECAY_TICKS, passiveDecayTicks);
}
}
}
@Override
public void onBlockPlacedBy(World world, BlockPos pos, IBlockState state, EntityLivingBase entity, ItemStack stack) {
super.onBlockPlacedBy(world, pos, state, entity, stack);
if(isPassiveFlower()) {
NBTTagCompound cmp = stack.getTagCompound();
passiveDecayTicks = cmp.getInteger(TAG_PASSIVE_DECAY_TICKS);
}
}
@Override
public boolean onWanded(EntityPlayer player, ItemStack wand) {
|
[
"\t\tif(player == null)"
] | 639
|
lcc
|
java
| null |
b8f2bccffdfaa6714889fe5bc5f0f10b12a66609c8c585d1
|
|
package implementable;
import gnu.trove.map.hash.THashMap;
import gnu.trove.set.hash.THashSet;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import tools.DebugMode;
import communityDetectionPackage.ILCDMetaAlgorithm;
import TemporalNetworkManipulation.Community;
import TemporalNetworkManipulation.Node;
import TemporalNetworkManipulation.Operations.BirthCommunityOperation;
import TemporalNetworkManipulation.Operations.ContractCommunityOperation;
//RNHM stands for representative nodes and Hub minimization.
public class ImplementationRNHM extends Implementation {
private int initialComSize;
private float integrateParameter;
private float fusionParameter;
//for speed optimisation
THashMap<Community, Float> memorizedcohesion = new THashMap<Community, Float>();
public ImplementationRNHM(int intialComSize, float integrateParameter, float fusionParameter) {
this.initialComSize = intialComSize;
this.integrateParameter = integrateParameter;
this.fusionParameter = fusionParameter;
}
@Override
public boolean GROWTH(Node candidate, Community com) {
//DebugMode.printDebug("------ ? integrate "+candidate.getName()+" in "+com.getID()+" "+this.getBelongingStrength(candidate, com)+" > "+this.getIntrinsicCohesion(com));
if (this.getBelongingStrength(candidate, com) >= integrateParameter * this.getIntrinsicCohesion(com)) {
//OPTIMISATION
this.memorizedcohesion.remove(com);
return true;
}
return false;
}
@Override
public ArrayList<Community> BIRTH(Node n1, Node n2) {
ArrayList<Community> newCommunitiesToReturn = new ArrayList<Community>();
if (this.initialComSize < 3 || this.initialComSize > 4) {
System.err.println("sorry but, currently, intial communities must have a size of 3 or 4. Contact me if questions.");
System.exit(-1);
}
//OPTIONAL (OPTIMIZATION): we do not check for the creation of a new community if the link is created inside
//an existing community. The reason is that this link, as it is created inside a community, is very probable
//and therefore is not an argument to create a new community.
//getting communities in common
THashSet<Community> commonComs = new THashSet<Community>(n1.getCommunities());
commonComs.retainAll(n2.getCommunities());
if (commonComs.size() == 0) {
if (this.initialComSize == 3)
this.birthCase3(newCommunitiesToReturn, commonComs, n1, n2);
if (this.initialComSize == 4)
this.birthCase4(newCommunitiesToReturn, commonComs, n1, n2);
}
return newCommunitiesToReturn;
}
@Override
public ArrayList<Community> CONTRACTION_DIVISION(Community affectedCom, Node testedNode, ILCDMetaAlgorithm ilcd) {
//------------------------------
//no divisions with this version
//------------------------------
ArrayList<Community> result = new ArrayList<Community>();
result.add(affectedCom);
//if the node has already been removed, no contraction
if (!testedNode.getCommunities().contains(affectedCom)) {
return result;
}
//compute the intrinsic cohesion of the community without the node
float adaptedIntrinsicCohesion = this.getIntrinsicCohesion(affectedCom) - this.getRepresentativity(testedNode, affectedCom);
if (this.getBelongingStrength(testedNode, affectedCom) >= integrateParameter * adaptedIntrinsicCohesion) {
return result;
} else {
//OPTIMISATION
this.memorizedcohesion.remove(affectedCom);
//affectedCom.removeNodeFromCommunity(testedNode);
ilcd.contract(testedNode, affectedCom);
//Operation op = new ContractCommunityOperation(affectedCom, testedNode);
//for all neighbors in the same com, check if they must be removed
for (Node n : testedNode.getNeighborsInCommunity(affectedCom)) {
this.CONTRACTION_DIVISION(affectedCom, n, ilcd);
}
return (result);
}
}
@Override
public boolean DEATH(Community testedCom) {
//OPTIMISATION
this.memorizedcohesion.remove(testedCom);
return testedCom.getComponents().size() < this.initialComSize;
}
@Override
public ArrayList<Community> FUSION(Community toBeAbsorbed, Community toAbsorb, ILCDMetaAlgorithm ilcd) {
ArrayList<Community> result = new ArrayList<Community>();
result.add(toBeAbsorbed);
result.add(toAbsorb);
ArrayList<Node> commonNodes = new ArrayList<Node>(toBeAbsorbed.getComponents());
commonNodes.retainAll(toAbsorb.getComponents());
float representativityOfCommonNodes = 0;
for (Node n : commonNodes) {
representativityOfCommonNodes += this.getRepresentativity(n, toBeAbsorbed);
}
//if the fusion must be done
if (representativityOfCommonNodes > this.getIntrinsicCohesion(toBeAbsorbed) * this.fusionParameter) {
//OPTIMISATION
this.memorizedcohesion.remove(toBeAbsorbed);
this.memorizedcohesion.remove(toAbsorb);
//remove the younger community
result.remove(toBeAbsorbed);
//do the fusion
ArrayList<Node> mightIntegrate = new ArrayList<Node>(toBeAbsorbed.getComponents());
mightIntegrate.removeAll(commonNodes);
for (Node n : mightIntegrate) {
if (this.GROWTH(n, toAbsorb))
ilcd.addNodeToCommunity(n, toAbsorb);
}
}
return result;
}
private float getRepresentativity(Node n, Community c) {
String idReltion = this.getIdRelation(n, c);
THashSet<Node> neighborsInC = new THashSet<Node>(n.getNeighbors());
float nbNeighbors = neighborsInC.size();
neighborsInC.retainAll(c.getComponents());
float nbNeighborsInC = neighborsInC.size();
return nbNeighborsInC / nbNeighbors;
}
private String getIdRelation(Node n, Community c) {
return c.getID() + n.getName();
}
private float getBelongingStrength(Node n, Community c) {
THashSet<Node> neighborsInC = new THashSet<Node>(c.getComponents());
neighborsInC.retainAll(n.getNeighbors());
//will probably need an optimization for not computing again values already computed
float belongingStrength = 0;
if (neighborsInC.size() < 2) {
return 0;
} else {
for (Node neighb : neighborsInC) {
belongingStrength += this.getRepresentativity(neighb, c);
}
}
return belongingStrength;
}
private float getIntrinsicCohesion(Community c) {
if (this.memorizedcohesion.containsKey(c))
return this.memorizedcohesion.get(c);
//will probably need an optimization for not computing again values already computed
float intrinsicCohesion = 0;
for (Node component : c.getComponents()) {
|
[
" intrinsicCohesion += this.getRepresentativity(component, c);"
] | 569
|
lcc
|
java
| null |
db86b64c8c534e1f3bab91df60822f5e3fd18cea9415e467
|
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql, OperationalError
from six.moves import cPickle
import logging
depends_on = ['invenio_release_1_1_0']
update_needed = True
def info():
return "Change of the underlying data model allowing extended BibDocs and MoreInfo"
def do_upgrade():
""" Implement your upgrades here """
logger = logging.getLogger('invenio_upgrader')
if update_needed:
_backup_tables(logger)
_update_database_structure_pre(logger)
recids = _retrieve_fulltext_recids()
for recid in recids:
if not _fix_recid(recid, logger):
logger.info("ERROR: Failed fixing the record %s" % (str(recid)))
_update_database_structure_post(logger)
else:
logger.info("Update executed but not needed. skipping")
def estimate():
""" Estimate running time of upgrade in seconds (optional). """
res = run_sql("select count(*) from bibdoc")
if res:
return int(float(res[0][0]) / 40)
return 0
def pre_upgrade():
""" Run pre-upgrade checks (optional). """
# Example of raising errors:
res = run_sql("show create table bibdoc")[0][1]
global update_needed
if not "more_info" in res:
update_needed = False
def post_upgrade():
""" Run post-upgrade checks (optional). """
# Example of issuing warnings:
# warnings.warn("A continuable error occurred")
pass
# private methods
def _update_database_structure_pre(logger):
"""This function alters the already existing database by adding additional columns ... the step from before modification"""
logger.info("Adding missing columns to tables")
try:
run_sql("ALTER TABLE bibdoc ADD COLUMN doctype varchar(255) AFTER more_info")
except Exception as e:
logger.info("WARNING: Problem when altering table. Is the database really in the state from before the upgrade ? " + str(e))
try:
run_sql("ALTER TABLE bibdoc CHANGE COLUMN docname docname varchar(250) COLLATE utf8_bin default NULL")
except Exception as e:
logger.info("WARNING: Problem when altering table. Is the database really in the state from before the upgrade ? " + str(e))
try:
run_sql("ALTER TABLE bibrec_bibdoc ADD COLUMN docname varchar(250) COLLATE utf8_bin NOT NULL default 'file' AFTER id_bibdoc, ADD KEY docname(docname)")
except Exception as e:
logger.info("WARNING: Problem when altering table. Is the database really in the state from before the upgrade ? " + str(e))
try:
run_sql("ALTER TABLE bibdoc_bibdoc CHANGE COLUMN id_bibdoc1 id_bibdoc1 mediumint(9) unsigned DEFAULT NULL")
run_sql("ALTER TABLE bibdoc_bibdoc CHANGE COLUMN id_bibdoc2 id_bibdoc2 mediumint(9) unsigned DEFAULT NULL")
run_sql("ALTER TABLE bibdoc_bibdoc ADD COLUMN id mediumint(9) unsigned NOT NULL auto_increment FIRST, ADD COLUMN version1 tinyint(4) unsigned AFTER id_bibdoc1, ADD COLUMN format1 varchar(50) AFTER version1, ADD COLUMN version2 tinyint(4) unsigned AFTER id_bibdoc2, ADD COLUMN format2 varchar(50) AFTER version2, CHANGE COLUMN type rel_type varchar(255) AFTER format2, ADD KEY (id)")
except Exception as e:
logger.info("WARNING: Problem when altering table. Is the database really in the state from before the upgrade ? " + str(e))
run_sql("""CREATE TABLE IF NOT EXISTS bibdocmoreinfo (
id_bibdoc mediumint(9) unsigned DEFAULT NULL,
version tinyint(4) unsigned DEFAULT NULL,
format VARCHAR(50) DEFAULT NULL,
id_rel mediumint(9) unsigned DEFAULT NULL,
namespace VARCHAR(25) DEFAULT NULL,
data_key VARCHAR(25),
data_value MEDIUMBLOB,
KEY (id_bibdoc, version, format, id_rel, namespace, data_key)
) ENGINE=MyISAM;""")
def _update_database_structure_post(logger):
"""This function alters the already existing database by removing columns ... the step after the modification"""
logger.info("Removing unnecessary columns from tables")
run_sql("ALTER TABLE bibdoc DROP COLUMN more_info")
def _backup_tables(logger):
"""This function create a backup of bibrec_bibdoc, bibdoc and bibdoc_bibdoc tables. Returns False in case dropping of previous table is needed."""
logger.info("droping old backup tables")
run_sql('DROP TABLE IF EXISTS bibrec_bibdoc_backup_newdatamodel')
run_sql('DROP TABLE IF EXISTS bibdoc_backup_newdatamodel')
run_sql('DROP TABLE IF EXISTS bibdoc_bibdoc_backup_newdatamodel')
try:
run_sql("""CREATE TABLE bibrec_bibdoc_backup_newdatamodel SELECT * FROM bibrec_bibdoc""")
run_sql("""CREATE TABLE bibdoc_backup_newdatamodel SELECT * FROM bibdoc""")
run_sql("""CREATE TABLE bibdoc_bibdoc_backup_newdatamodel SELECT * FROM bibdoc_bibdoc""")
except OperationalError as e:
logger.info("Problem when backing up tables")
raise
return True
def _retrieve_fulltext_recids():
"""Returns the list of all the recid number linked with at least a fulltext
file."""
res = run_sql('SELECT DISTINCT id_bibrec FROM bibrec_bibdoc')
return [int(x[0]) for x in res]
def _fix_recid(recid, logger):
"""Fix a given recid."""
#logger.info("Upgrading record %s:" % recid)
# 1) moving docname and type to the relation with bibrec
bibrec_docs = run_sql("select id_bibdoc, type from bibrec_bibdoc where id_bibrec=%s", (recid, ))
are_equal = True
for docid_str in bibrec_docs:
docid = str(docid_str[0])
doctype = str(docid_str[1])
#logger.info("Upgrading document %s:" % (docid, ))
res2 = run_sql("select docname, more_info from bibdoc where id=%s", (docid,))
if not res2:
logger.error("Error when migrating document %s attached to the record %s: can not retrieve from the bibdoc table " % (docid, recid))
else:
docname = str(res2[0][0])
run_sql("update bibrec_bibdoc set docname=%%s where id_bibrec=%s and id_bibdoc=%s" % (str(recid), docid), (docname, ))
run_sql("update bibdoc set doctype=%%s where id=%s" % (docid,), (doctype, ))
# 2) moving moreinfo to the new moreinfo structures (default namespace)
if res2[0][1]:
minfo = cPickle.loads(res2[0][1])
# 2a migrating descriptions->version->format
new_value = cPickle.dumps(minfo['descriptions'])
run_sql("INSERT INTO bibdocmoreinfo (id_bibdoc, namespace, data_key, data_value) VALUES (%s, %s, %s, %s)", (str(docid), "", "descriptions", new_value))
# 2b migrating comments->version->format
new_value = cPickle.dumps(minfo['comments'])
run_sql("INSERT INTO bibdocmoreinfo (id_bibdoc, namespace, data_key, data_value) VALUES (%s, %s, %s, %s)", (str(docid), "", "comments", new_value))
# 2c migrating flags->flagname->version->format
new_value = cPickle.dumps(minfo['flags'])
run_sql("INSERT INTO bibdocmoreinfo (id_bibdoc, namespace, data_key, data_value) VALUES (%s, %s, %s, %s)", (str(docid), "", "flags", new_value))
# 3) Verify the correctness of moreinfo transformations
try:
descriptions = cPickle.loads(run_sql("SELECT data_value FROM bibdocmoreinfo WHERE id_bibdoc=%s AND namespace=%s AND data_key=%s", (str(docid), '', 'descriptions'))[0][0])
for version in minfo['descriptions']:
for docformat in minfo['descriptions'][version]:
v1 = descriptions[version][docformat]
v2 = minfo['descriptions'][version][docformat]
if v1 != v2:
are_equal = False
logger.info("ERROR: Document %s: Expected description %s and got %s" % (str(docid), str(v2), str(v1)))
except Exception as e:
logger.info("ERROR: Document %s: Problem with retrieving descriptions: %s MoreInfo: %s Descriptions: %s" % (str(docid), str(e), str(minfo), str(descriptions)))
try:
comments = cPickle.loads(run_sql("SELECT data_value FROM bibdocmoreinfo WHERE id_bibdoc=%s AND namespace=%s AND data_key=%s", (str(docid), '', 'comments'))[0][0])
for version in minfo['comments']:
for docformat in minfo['comments'][version]:
v1 = comments[version][docformat]
v2 = minfo['comments'][version][docformat]
if v1 != v2:
are_equal = False
logger.info("ERROR: Document %s: Expected comment %s and got %s" % (str(docid), str(v2), str(v1)))
except Exception as e:
logger.info("ERROR: Document %s: Problem with retrieving comments: %s MoreInfo: %s Comments: %s" % (str(docid), str(e), str(minfo), str(comments)))
try:
flags = cPickle.loads(run_sql("SELECT data_value FROM bibdocmoreinfo WHERE id_bibdoc=%s AND namespace=%s AND data_key=%s", (str(docid), '', 'flags'))[0][0])
for flagname in minfo['flags']:
for version in minfo['flags'][flagname]:
for docformat in minfo['flags'][flagname][version]:
if minfo['flags'][flagname][version][docformat]:
are_equal = are_equal and (docformat in flags[flagname][version])
|
[
" if not (docformat in flags[flagname][version]):"
] | 1,095
|
lcc
|
python
| null |
e346c1259b04562f788d0e2865564ee2ec42aaada9b4e031
|
|
/* Copyright (C) 2004 MySQL AB
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA */
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using System.Data;
using System.Threading;
namespace NDB_CPC
{
/// <summary>
/// Summary description for Form1.
/// </summary>
public class CPC : System.Windows.Forms.Form
{
private System.Windows.Forms.TreeView tvComputerCluster;
private System.Windows.Forms.ContextMenu ctxTreeViewMenu;
private System.Windows.Forms.ColumnHeader chComputer;
private System.Windows.Forms.ColumnHeader chProcessName;
private System.Windows.Forms.ContextMenu ctxListViewMenu;
private System.Windows.Forms.MenuItem mainMenuItem;
private System.Windows.Forms.ColumnHeader chProcesses;
private System.Windows.Forms.MainMenu mainMenu;
private System.Windows.Forms.Panel panel1;
private System.Windows.Forms.MenuItem menuItem7;
private System.Windows.Forms.MenuItem menuItem10;
private System.Windows.Forms.MenuItem mainMenuFile;
private System.Windows.Forms.MenuItem mainMenuComputer;
private System.Windows.Forms.MenuItem subMenuComputerAdd;
private System.Windows.Forms.MenuItem subMenuComputerRemove;
private System.Windows.Forms.MenuItem subMenuComputerDisconnect;
private System.Windows.Forms.MenuItem subMenuComputerProperties;
private System.ComponentModel.IContainer components;
private System.Windows.Forms.MenuItem menuItem3;
private System.Windows.Forms.MenuItem computerMenuAdd;
private System.Windows.Forms.MenuItem computerMenuRemove;
private System.Windows.Forms.MenuItem menuItem5;
private System.Windows.Forms.MenuItem computerMenuDisconnect;
private System.Windows.Forms.MenuItem computerMenuConnect;
private System.Windows.Forms.MenuItem computerMenuProperties;
private System.Windows.Forms.MenuItem menuItem11;
private System.Windows.Forms.MenuItem tvCtxMenuComputerAdd;
private System.Windows.Forms.MenuItem tvCtxMenuComputerRemove;
private System.Windows.Forms.MenuItem tvCtxMenuComputerConnect;
private System.Windows.Forms.MenuItem tvCtxMenuComputerDisconnect;
private System.Windows.Forms.MenuItem tvCtxMenuComputerDefine;
private System.Windows.Forms.MenuItem tvCtxMenuDatabaseNew;
private System.Windows.Forms.MenuItem menuItem1;
private System.Windows.Forms.MenuItem menuItem2;
private System.Windows.Forms.MenuItem mainMenuDatabase;
private System.Windows.Forms.MenuItem subMenuDatabaseCreate;
private System.Windows.Forms.MenuItem menuItem8;
private System.Windows.Forms.MenuItem tvCtxMenuProperties;
private System.Windows.Forms.ImageList imageTV;
private ComputerMgmt computerMgmt;
private System.Windows.Forms.MenuItem computerMenuRefresh;
private System.Windows.Forms.ListView listView;
private System.Windows.Forms.ColumnHeader chComputerIP;
private System.Windows.Forms.ColumnHeader chDatabase;
private System.Windows.Forms.ColumnHeader chName;
private System.Windows.Forms.ColumnHeader chOwner;
private System.Windows.Forms.ColumnHeader chStatus;
private System.Windows.Forms.Splitter splitter2;
private System.Windows.Forms.Splitter splitterVertical;
private System.Windows.Forms.Splitter splitterHorizont;
private Thread guiThread;
private float resizeWidthRatio;
private System.Windows.Forms.MenuItem menuItem6;
private System.Windows.Forms.MenuItem menuGetStatus;
private System.Windows.Forms.MenuItem menuStartProcess;
private System.Windows.Forms.MenuItem menuRestartProcess;
private System.Windows.Forms.MenuItem menuStopProcess;
private System.Windows.Forms.MenuItem menuRemoveProcess;
private System.Windows.Forms.MenuItem menuRefresh;
private System.Windows.Forms.OpenFileDialog openHostFileDialog;
private System.Windows.Forms.SaveFileDialog saveHostFileDialog;
private float resizeHeightRatio;
private System.Windows.Forms.TextBox mgmConsole;
int i;
public CPC()
{
//
// Required for Windows Form Designer support
//
InitializeComponent();
// TODO: Add any constructor code after InitializeComponent call
//
computerMgmt = new ComputerMgmt();
guiThread = new Thread(new ThreadStart(updateGuiThread));
// guiThread.Start();
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if (components != null)
{
components.Dispose();
}
}
//guiThread.Abort();
base.Dispose( disposing );
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(CPC));
this.tvComputerCluster = new System.Windows.Forms.TreeView();
this.ctxTreeViewMenu = new System.Windows.Forms.ContextMenu();
this.tvCtxMenuComputerAdd = new System.Windows.Forms.MenuItem();
this.tvCtxMenuComputerRemove = new System.Windows.Forms.MenuItem();
this.menuGetStatus = new System.Windows.Forms.MenuItem();
this.menuItem6 = new System.Windows.Forms.MenuItem();
this.tvCtxMenuComputerConnect = new System.Windows.Forms.MenuItem();
this.tvCtxMenuComputerDisconnect = new System.Windows.Forms.MenuItem();
this.tvCtxMenuDatabaseNew = new System.Windows.Forms.MenuItem();
this.tvCtxMenuComputerDefine = new System.Windows.Forms.MenuItem();
this.menuItem8 = new System.Windows.Forms.MenuItem();
this.tvCtxMenuProperties = new System.Windows.Forms.MenuItem();
this.imageTV = new System.Windows.Forms.ImageList(this.components);
this.ctxListViewMenu = new System.Windows.Forms.ContextMenu();
this.menuStartProcess = new System.Windows.Forms.MenuItem();
this.menuRestartProcess = new System.Windows.Forms.MenuItem();
this.menuStopProcess = new System.Windows.Forms.MenuItem();
this.menuRemoveProcess = new System.Windows.Forms.MenuItem();
this.menuRefresh = new System.Windows.Forms.MenuItem();
this.computerMenuAdd = new System.Windows.Forms.MenuItem();
this.menuItem3 = new System.Windows.Forms.MenuItem();
this.computerMenuRemove = new System.Windows.Forms.MenuItem();
this.menuItem5 = new System.Windows.Forms.MenuItem();
this.computerMenuDisconnect = new System.Windows.Forms.MenuItem();
this.computerMenuConnect = new System.Windows.Forms.MenuItem();
this.menuItem11 = new System.Windows.Forms.MenuItem();
this.computerMenuProperties = new System.Windows.Forms.MenuItem();
this.computerMenuRefresh = new System.Windows.Forms.MenuItem();
this.chComputer = new System.Windows.Forms.ColumnHeader();
this.chProcessName = new System.Windows.Forms.ColumnHeader();
this.mainMenuItem = new System.Windows.Forms.MenuItem();
this.chProcesses = new System.Windows.Forms.ColumnHeader();
this.mainMenu = new System.Windows.Forms.MainMenu();
this.mainMenuFile = new System.Windows.Forms.MenuItem();
this.menuItem2 = new System.Windows.Forms.MenuItem();
this.menuItem1 = new System.Windows.Forms.MenuItem();
this.mainMenuComputer = new System.Windows.Forms.MenuItem();
this.subMenuComputerAdd = new System.Windows.Forms.MenuItem();
this.menuItem7 = new System.Windows.Forms.MenuItem();
this.subMenuComputerDisconnect = new System.Windows.Forms.MenuItem();
this.subMenuComputerRemove = new System.Windows.Forms.MenuItem();
this.menuItem10 = new System.Windows.Forms.MenuItem();
this.subMenuComputerProperties = new System.Windows.Forms.MenuItem();
this.mainMenuDatabase = new System.Windows.Forms.MenuItem();
this.subMenuDatabaseCreate = new System.Windows.Forms.MenuItem();
this.panel1 = new System.Windows.Forms.Panel();
this.mgmConsole = new System.Windows.Forms.TextBox();
this.splitterHorizont = new System.Windows.Forms.Splitter();
this.splitter2 = new System.Windows.Forms.Splitter();
this.listView = new System.Windows.Forms.ListView();
this.chComputerIP = new System.Windows.Forms.ColumnHeader();
this.chStatus = new System.Windows.Forms.ColumnHeader();
this.chDatabase = new System.Windows.Forms.ColumnHeader();
this.chName = new System.Windows.Forms.ColumnHeader();
this.chOwner = new System.Windows.Forms.ColumnHeader();
this.splitterVertical = new System.Windows.Forms.Splitter();
this.openHostFileDialog = new System.Windows.Forms.OpenFileDialog();
this.saveHostFileDialog = new System.Windows.Forms.SaveFileDialog();
this.panel1.SuspendLayout();
this.SuspendLayout();
//
// tvComputerCluster
//
this.tvComputerCluster.CausesValidation = false;
this.tvComputerCluster.ContextMenu = this.ctxTreeViewMenu;
this.tvComputerCluster.Dock = System.Windows.Forms.DockStyle.Left;
this.tvComputerCluster.ImageList = this.imageTV;
this.tvComputerCluster.Name = "tvComputerCluster";
this.tvComputerCluster.Nodes.AddRange(new System.Windows.Forms.TreeNode[] {
new System.Windows.Forms.TreeNode("Computer", 0, 0),
new System.Windows.Forms.TreeNode("Database", 5, 5)});
this.tvComputerCluster.Size = new System.Drawing.Size(104, 333);
this.tvComputerCluster.TabIndex = 5;
this.tvComputerCluster.MouseDown += new System.Windows.Forms.MouseEventHandler(this.tvComputerCluster_MouseDown);
this.tvComputerCluster.AfterSelect += new System.Windows.Forms.TreeViewEventHandler(this.tvComputerCluster_AfterSelect);
this.tvComputerCluster.BeforeCollapse += new System.Windows.Forms.TreeViewCancelEventHandler(this.tvComputerCluster_BeforeCollapse);
this.tvComputerCluster.BeforeExpand += new System.Windows.Forms.TreeViewCancelEventHandler(this.tvComputerCluster_BeforeExpand);
//
// ctxTreeViewMenu
//
this.ctxTreeViewMenu.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.tvCtxMenuComputerAdd,
this.tvCtxMenuComputerRemove,
this.menuGetStatus,
this.menuItem6,
this.tvCtxMenuComputerConnect,
this.tvCtxMenuComputerDisconnect,
this.tvCtxMenuDatabaseNew,
this.tvCtxMenuComputerDefine,
this.menuItem8,
this.tvCtxMenuProperties});
this.ctxTreeViewMenu.Popup += new System.EventHandler(this.ctxTreeViewMenu_Popup);
//
// tvCtxMenuComputerAdd
//
this.tvCtxMenuComputerAdd.Index = 0;
this.tvCtxMenuComputerAdd.Text = "Add computer";
this.tvCtxMenuComputerAdd.Click += new System.EventHandler(this.computerMenuAdd_Click);
//
// tvCtxMenuComputerRemove
//
this.tvCtxMenuComputerRemove.Index = 1;
this.tvCtxMenuComputerRemove.Text = "Remove computer";
this.tvCtxMenuComputerRemove.Click += new System.EventHandler(this.computerMenuRemove_Click);
//
// menuGetStatus
//
this.menuGetStatus.Index = 2;
this.menuGetStatus.Text = "Get Status";
this.menuGetStatus.Click += new System.EventHandler(this.menuGetStatus_Click);
//
// menuItem6
//
this.menuItem6.Index = 3;
this.menuItem6.Text = "-";
//
// tvCtxMenuComputerConnect
//
this.tvCtxMenuComputerConnect.Index = 4;
this.tvCtxMenuComputerConnect.Text = "Connect";
//
// tvCtxMenuComputerDisconnect
//
this.tvCtxMenuComputerDisconnect.Index = 5;
this.tvCtxMenuComputerDisconnect.Text = "Disconnect";
//
// tvCtxMenuDatabaseNew
//
this.tvCtxMenuDatabaseNew.Index = 6;
this.tvCtxMenuDatabaseNew.Text = "Create database...";
this.tvCtxMenuDatabaseNew.Click += new System.EventHandler(this.subMenuDatabaseCreate_Click);
//
// tvCtxMenuComputerDefine
//
this.tvCtxMenuComputerDefine.Index = 7;
this.tvCtxMenuComputerDefine.Text = "Define process...";
this.tvCtxMenuComputerDefine.Click += new System.EventHandler(this.tvCtxMenuComputerDefine_Click);
//
// menuItem8
//
this.menuItem8.Index = 8;
this.menuItem8.Text = "-";
//
// tvCtxMenuProperties
//
this.tvCtxMenuProperties.Index = 9;
this.tvCtxMenuProperties.Text = "Properties";
//
// imageTV
//
this.imageTV.ColorDepth = System.Windows.Forms.ColorDepth.Depth8Bit;
this.imageTV.ImageSize = new System.Drawing.Size(16, 16);
this.imageTV.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("imageTV.ImageStream")));
this.imageTV.TransparentColor = System.Drawing.Color.Transparent;
//
// ctxListViewMenu
//
this.ctxListViewMenu.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.menuStartProcess,
this.menuRestartProcess,
this.menuStopProcess,
this.menuRemoveProcess,
this.menuRefresh});
this.ctxListViewMenu.Popup += new System.EventHandler(this.ctxListViewMenu_Popup);
//
// menuStartProcess
//
this.menuStartProcess.Index = 0;
this.menuStartProcess.Text = "Start process";
this.menuStartProcess.Click += new System.EventHandler(this.startProcess);
//
// menuRestartProcess
//
this.menuRestartProcess.Index = 1;
this.menuRestartProcess.Text = "Restart process";
this.menuRestartProcess.Click += new System.EventHandler(this.restartProcess);
//
// menuStopProcess
//
this.menuStopProcess.Index = 2;
this.menuStopProcess.Text = "Stop process";
this.menuStopProcess.Click += new System.EventHandler(this.stopProcess);
//
// menuRemoveProcess
//
this.menuRemoveProcess.Index = 3;
this.menuRemoveProcess.Text = "Remove process";
this.menuRemoveProcess.Click += new System.EventHandler(this.removeProcess);
//
// menuRefresh
//
this.menuRefresh.Index = 4;
this.menuRefresh.Text = "Refresh";
this.menuRefresh.Click += new System.EventHandler(this.menuRefresh_Click);
//
// computerMenuAdd
//
this.computerMenuAdd.Index = -1;
this.computerMenuAdd.Text = "Add";
this.computerMenuAdd.Click += new System.EventHandler(this.computerMenuAdd_Click);
//
// menuItem3
//
this.menuItem3.Index = -1;
this.menuItem3.Text = "-";
//
// computerMenuRemove
//
this.computerMenuRemove.Index = -1;
this.computerMenuRemove.Text = "Remove";
this.computerMenuRemove.Click += new System.EventHandler(this.computerMenuRemove_Click);
//
// menuItem5
//
this.menuItem5.Index = -1;
this.menuItem5.Text = "-";
//
// computerMenuDisconnect
//
this.computerMenuDisconnect.Index = -1;
this.computerMenuDisconnect.Text = "Disconnect";
//
// computerMenuConnect
//
this.computerMenuConnect.Index = -1;
this.computerMenuConnect.Text = "Connect";
//
// menuItem11
//
this.menuItem11.Index = -1;
this.menuItem11.Text = "-";
//
// computerMenuProperties
//
this.computerMenuProperties.Index = -1;
this.computerMenuProperties.Text = "Properties";
//
// computerMenuRefresh
//
this.computerMenuRefresh.Index = -1;
this.computerMenuRefresh.Text = "Refresh";
this.computerMenuRefresh.Click += new System.EventHandler(this.computerMenuRefresh_Click);
//
// chComputer
//
this.chComputer.Text = "Computer";
//
// chProcessName
//
this.chProcessName.Text = "Name";
//
// mainMenuItem
//
this.mainMenuItem.Index = -1;
this.mainMenuItem.Text = "File";
//
// chProcesses
//
this.chProcesses.Text = "Id";
//
// mainMenu
//
this.mainMenu.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.mainMenuFile,
this.mainMenuComputer,
this.mainMenuDatabase});
//
// mainMenuFile
//
this.mainMenuFile.Index = 0;
this.mainMenuFile.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.menuItem2,
this.menuItem1});
this.mainMenuFile.Text = "&File";
//
// menuItem2
//
this.menuItem2.Index = 0;
this.menuItem2.Text = "&Import...";
this.menuItem2.Click += new System.EventHandler(this.importHostFile);
//
// menuItem1
//
this.menuItem1.Index = 1;
this.menuItem1.Text = "&Export...";
this.menuItem1.Click += new System.EventHandler(this.exportHostFile);
//
// mainMenuComputer
//
this.mainMenuComputer.Index = 1;
this.mainMenuComputer.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.subMenuComputerAdd,
this.menuItem7,
this.subMenuComputerDisconnect,
this.subMenuComputerRemove,
this.menuItem10,
this.subMenuComputerProperties});
this.mainMenuComputer.Text = "&Computer";
//
// subMenuComputerAdd
//
this.subMenuComputerAdd.Index = 0;
this.subMenuComputerAdd.Text = "&Add Computer";
this.subMenuComputerAdd.Click += new System.EventHandler(this.computerMenuAdd_Click);
//
// menuItem7
//
this.menuItem7.Index = 1;
this.menuItem7.Text = "-";
//
// subMenuComputerDisconnect
//
this.subMenuComputerDisconnect.Index = 2;
this.subMenuComputerDisconnect.Text = "&Disconnect";
//
// subMenuComputerRemove
//
this.subMenuComputerRemove.Index = 3;
this.subMenuComputerRemove.Text = "&Remove Computer";
this.subMenuComputerRemove.Click += new System.EventHandler(this.computerMenuRemove_Click);
//
// menuItem10
//
this.menuItem10.Index = 4;
this.menuItem10.Text = "-";
//
// subMenuComputerProperties
//
this.subMenuComputerProperties.Index = 5;
this.subMenuComputerProperties.Text = "&Properties";
//
// mainMenuDatabase
//
this.mainMenuDatabase.Index = 2;
this.mainMenuDatabase.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.subMenuDatabaseCreate});
this.mainMenuDatabase.Text = "&Database";
this.mainMenuDatabase.Click += new System.EventHandler(this.subMenuDatabaseCreate_Click);
//
// subMenuDatabaseCreate
//
this.subMenuDatabaseCreate.Index = 0;
this.subMenuDatabaseCreate.Text = "&Create database...";
this.subMenuDatabaseCreate.Click += new System.EventHandler(this.subMenuDatabaseCreate_Click);
//
// panel1
//
this.panel1.Controls.AddRange(new System.Windows.Forms.Control[] {
this.mgmConsole,
this.splitterHorizont,
this.splitter2,
this.listView});
this.panel1.Dock = System.Windows.Forms.DockStyle.Fill;
this.panel1.Location = new System.Drawing.Point(104, 0);
this.panel1.Name = "panel1";
this.panel1.Size = new System.Drawing.Size(384, 333);
this.panel1.TabIndex = 6;
//
// mgmConsole
//
this.mgmConsole.AccessibleRole = System.Windows.Forms.AccessibleRole.StaticText;
this.mgmConsole.Dock = System.Windows.Forms.DockStyle.Bottom;
this.mgmConsole.Location = new System.Drawing.Point(0, 231);
this.mgmConsole.Multiline = true;
this.mgmConsole.Name = "mgmConsole";
this.mgmConsole.Size = new System.Drawing.Size(384, 96);
this.mgmConsole.TabIndex = 5;
this.mgmConsole.Text = "textBox1";
this.mgmConsole.TextChanged += new System.EventHandler(this.mgmConsole_TextChanged);
this.mgmConsole.Enter += new System.EventHandler(this.mgmConsole_Enter);
//
// splitterHorizont
//
this.splitterHorizont.Dock = System.Windows.Forms.DockStyle.Bottom;
this.splitterHorizont.Location = new System.Drawing.Point(0, 327);
this.splitterHorizont.MinExtra = 100;
this.splitterHorizont.MinSize = 100;
this.splitterHorizont.Name = "splitterHorizont";
this.splitterHorizont.Size = new System.Drawing.Size(384, 3);
this.splitterHorizont.TabIndex = 4;
this.splitterHorizont.TabStop = false;
//
// splitter2
//
this.splitter2.Dock = System.Windows.Forms.DockStyle.Bottom;
this.splitter2.Location = new System.Drawing.Point(0, 330);
this.splitter2.Name = "splitter2";
this.splitter2.Size = new System.Drawing.Size(384, 3);
this.splitter2.TabIndex = 2;
this.splitter2.TabStop = false;
//
// listView
//
this.listView.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
this.chComputerIP,
this.chStatus,
this.chDatabase,
this.chName,
this.chOwner});
this.listView.ContextMenu = this.ctxListViewMenu;
this.listView.Dock = System.Windows.Forms.DockStyle.Fill;
this.listView.FullRowSelect = true;
this.listView.Name = "listView";
this.listView.Size = new System.Drawing.Size(384, 333);
this.listView.TabIndex = 0;
this.listView.View = System.Windows.Forms.View.Details;
this.listView.ColumnClick += new System.Windows.Forms.ColumnClickEventHandler(this.listView_ColumnClick_1);
this.listView.SelectedIndexChanged += new System.EventHandler(this.listView_SelectedIndexChanged);
//
// chComputerIP
//
this.chComputerIP.Text = "IP Adress";
//
// chStatus
//
this.chStatus.Text = "Status";
//
// chDatabase
//
this.chDatabase.Text = "Database";
//
// chName
//
this.chName.Text = "Name";
//
// chOwner
//
this.chOwner.Text = "Owner";
//
// splitterVertical
//
this.splitterVertical.Location = new System.Drawing.Point(104, 0);
this.splitterVertical.MinSize = 100;
this.splitterVertical.Name = "splitterVertical";
this.splitterVertical.Size = new System.Drawing.Size(3, 333);
this.splitterVertical.TabIndex = 7;
this.splitterVertical.TabStop = false;
this.splitterVertical.SplitterMoved += new System.Windows.Forms.SplitterEventHandler(this.splitterVertical_SplitterMoved);
//
// openHostFileDialog
//
this.openHostFileDialog.DefaultExt = "cpc";
this.openHostFileDialog.Filter = "CPCd configuration files (*.cpc)|*.cpc| All Files (*.*)|*.*";
this.openHostFileDialog.Title = "Import a CPCd configuration file";
this.openHostFileDialog.FileOk += new System.ComponentModel.CancelEventHandler(this.openHostFileDialog_FileOk);
//
// saveHostFileDialog
//
this.saveHostFileDialog.Filter = "CPCd configuration files (*.cpc)|*.cpc| All Files (*.*)|*.*";
this.saveHostFileDialog.Title = "Export a CPCd configuration file";
this.saveHostFileDialog.FileOk += new System.ComponentModel.CancelEventHandler(this.saveHostFileDialog_FileOk);
//
// CPC
//
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.ClientSize = new System.Drawing.Size(488, 333);
this.Controls.AddRange(new System.Windows.Forms.Control[] {
this.splitterVertical,
this.panel1,
this.tvComputerCluster});
this.Menu = this.mainMenu;
this.Name = "CPC";
this.Text = "CPC";
this.Resize += new System.EventHandler(this.CPC_Resize);
this.MouseDown += new System.Windows.Forms.MouseEventHandler(this.CPC_MouseDown);
this.Closing += new System.ComponentModel.CancelEventHandler(this.CPC_Closing);
this.Load += new System.EventHandler(this.CPC_Load);
this.Activated += new System.EventHandler(this.CPC_Activated);
this.Paint += new System.Windows.Forms.PaintEventHandler(this.CPC_Paint);
this.panel1.ResumeLayout(false);
this.ResumeLayout(false);
}
#endregion
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
Application.Run(new CPC());
}
private void tvComputerCluster_AfterSelect(object sender, System.Windows.Forms.TreeViewEventArgs e)
{
if(e.Node.Text.ToString().Equals("Database"))
{
updateListViews("Database");
return;
}
if(e.Node.Text.ToString().Equals("Computer"))
{
//updateListViews();
updateListViews("Computer");
return;
}
if(e.Node.Parent.Text.ToString().Equals("Database"))
{
//updateListViews();
listView.Columns.Clear();
listView.Columns.Add(this.chName);
listView.Columns.Add(this.chDatabase);
listView.Columns.Add(this.chStatus);
listView.Columns.Add(this.chOwner);
updateDatabaseView(e.Node.Text.ToString());
}
if(e.Node.Parent.Text=="Computer")
{
//updateListViews();
Computer c=computerMgmt.getComputer(e.Node.Text.ToString());
string [] processcols= new string[5];
ArrayList processes;
processes = c.getProcesses();
listView.Items.Clear();
listView.Columns.Clear();
listView.Columns.Add(this.chComputer);
listView.Columns.Add(this.chDatabase);
listView.Columns.Add(this.chName);
listView.Columns.Add(this.chStatus);
listView.Columns.Add(this.chOwner);
if(processes != null )
{
listView.BeginUpdate();
foreach(Process p in processes)
{
processcols[0]=p.getComputer().getName();
processcols[1]=p.getDatabase();
processcols[2]=p.getName();
processcols[3]=p.getStatusString();
processcols[4]=p.getOwner();
ListViewItem lvp= new ListViewItem(processcols);
listView.Items.Add(lvp);
}
listView.EndUpdate();
}
listView.Show();
}
}
private void ctxTreeViewMenu_Popup(object sender, System.EventArgs e)
{
tvCtxMenuComputerAdd.Enabled=true;
tvCtxMenuComputerRemove.Enabled=true;
tvCtxMenuComputerConnect.Enabled=true;
tvCtxMenuComputerDisconnect.Enabled=true;
tvCtxMenuComputerDefine.Enabled=true;
menuGetStatus.Enabled=true;
tvCtxMenuDatabaseNew.Enabled=true;
tvCtxMenuComputerAdd.Visible=true;
tvCtxMenuComputerRemove.Visible=true;
tvCtxMenuComputerConnect.Visible=true;
tvCtxMenuComputerDisconnect.Visible=true;
tvCtxMenuComputerDefine.Visible=true;
tvCtxMenuDatabaseNew.Visible=true;
tvCtxMenuProperties.Visible=true;
menuGetStatus.Visible=true;
if(tvComputerCluster.SelectedNode.Text.Equals("Computer"))
{
tvCtxMenuComputerAdd.Enabled=true;
tvCtxMenuComputerRemove.Enabled=false;
tvCtxMenuComputerConnect.Enabled=false;
tvCtxMenuComputerDisconnect.Enabled=false;
tvCtxMenuComputerDefine.Enabled=false;
tvCtxMenuDatabaseNew.Visible=false;
menuGetStatus.Visible=false;
return;
}
if(tvComputerCluster.SelectedNode.Text.Equals("Database"))
{
// ctxTreeViewMenu.MenuItems.Add(menuDatabaseItem1);
tvCtxMenuComputerAdd.Visible=false;
tvCtxMenuComputerRemove.Visible=false;
tvCtxMenuComputerConnect.Visible=false;
tvCtxMenuComputerDisconnect.Visible=false;
tvCtxMenuComputerDefine.Visible=false;
tvCtxMenuDatabaseNew.Visible=true;
tvCtxMenuDatabaseNew.Enabled=true;
menuGetStatus.Visible=false;
menuItem6.Visible=false;
return;
}
if(tvComputerCluster.SelectedNode.Parent.Text.Equals("Computer"))
{
Computer c= computerMgmt.getComputer(tvComputerCluster.SelectedNode.Text.ToString());
if(c.getStatus().Equals(Computer.Status.Disconnected))
{
tvCtxMenuComputerConnect.Enabled=true;
tvCtxMenuComputerDisconnect.Enabled=false;
}
else
{
tvCtxMenuComputerDisconnect.Enabled=true;
tvCtxMenuComputerConnect.Enabled=false;
}
tvCtxMenuComputerAdd.Enabled=false;
tvCtxMenuComputerRemove.Enabled=true;
menuGetStatus.Visible=false;
tvCtxMenuComputerDefine.Enabled=true;
tvCtxMenuDatabaseNew.Visible=false;
return;
}
if(tvComputerCluster.SelectedNode.Parent.Text.Equals("Database"))
{
tvCtxMenuComputerAdd.Enabled=true;
tvCtxMenuComputerRemove.Enabled=false;
tvCtxMenuComputerConnect.Enabled=false;
tvCtxMenuComputerDisconnect.Enabled=false;
tvCtxMenuComputerDefine.Enabled=false;
tvCtxMenuDatabaseNew.Visible=true;
menuGetStatus.Visible=true;
return;
}
}
private void listView_SelectedIndexChanged(object sender, System.EventArgs e)
{
//MessageBox.Show(listView.SelectedItems[0].Text);
}
private void tvComputerCluster_MouseDown(object sender, System.Windows.Forms.MouseEventArgs e)
{ /*
TreeNode node = tvComputerCluster.GetNodeAt(e.X,e.Y);
if(node==null)
{
return;
}
tvComputerCluster.SelectedNode=node;
// updateListViews();
tvComputerCluster.SelectedNode.Expand();
*/
}
private void subMenuComputerRemove_Click(object sender, System.EventArgs e)
{
//ComputerRemoveDialog crd=new ComputerRemoveDialog(computerMgmt);
//crd.Show();
//updateListViews();
/* string computer = tvComputerCluster.SelectedNode.Text.ToString();
if(MessageBox.Show(this,"Are you sure you want to remove: " +computer+ "?","Remove computer",MessageBoxButtons.YesNo)==DialogResult.Yes)
{
computerMgmt.RemoveComputer(computer);
}
*/
}
private void subMenuComputerAdd_Click(object sender, System.EventArgs e)
{
ComputerAddDialog cad=new ComputerAddDialog(computerMgmt);
cad.ShowDialog();
cad.Dispose();
/// updateListViews(tvComputerCluster.SelectedNode.Text.ToString());
}
private void updateListViews(string node)
{
if(node.Equals("Computer"))
{
listView.Columns.Clear();
listView.Items.Clear();
ArrayList list= computerMgmt.getComputerCollection();
string [] computercols= new string[2];
listView.BeginUpdate();
listView.Columns.Add(this.chComputer);
listView.Columns.Add(this.chStatus);
foreach (Computer computer in list)
{
computercols[0]=computer.getName();
computercols[1]=computer.getStatusString();
ListViewItem lvc= new ListViewItem(computercols);
listView.Items.Add(lvc);
}
listView.EndUpdate();
listView.Show();
}
if(node.Equals("Database"))
{
ArrayList databases= computerMgmt.getDatabaseCollection();
string [] dbcols= new string[3];
listView.BeginUpdate();
listView.Items.Clear();
listView.Columns.Clear();
listView.Columns.Add(this.chDatabase);
listView.Columns.Add(this.chStatus);
listView.Columns.Add(this.chOwner);
foreach (Database db in databases)
{
dbcols[0]=db.getName();
dbcols[1]=db.getStatusString();
dbcols[2]=db.getOwner();
ListViewItem lvc= new ListViewItem(dbcols);
listView.Items.Add(lvc);
}
listView.EndUpdate();
listView.Show();
}
}
public void updateDatabaseView(string database)
{
Database d=computerMgmt.getDatabase(database);
string [] processcols= new string[5];
ArrayList processes = d.getProcesses();
listView.Items.Clear();
if(processes != null )
{
listView.BeginUpdate();
listView.Columns.Clear();
listView.Columns.Add(this.chComputer);
listView.Columns.Add(this.chDatabase);
listView.Columns.Add(this.chName);
listView.Columns.Add(this.chStatus);
listView.Columns.Add(this.chOwner);
foreach(Process p in processes)
{
processcols[0]=p.getComputer().getName();
processcols[1]=p.getDatabase();
processcols[2]=p.getName();
processcols[3]=p.getStatusString();
processcols[4]=p.getOwner();
ListViewItem lvp= new ListViewItem(processcols);
listView.Items.Add(lvp);
}
listView.EndUpdate();
}
listView.Show();
}
private void updateTreeViews()
{
//tvComputerCluster.Nodes.Clear();
ArrayList computers= computerMgmt.getComputerCollection();
ArrayList databases= computerMgmt.getDatabaseCollection();
tvComputerCluster.BeginUpdate();
tvComputerCluster.Nodes[0].Nodes.Clear();
tvComputerCluster.Nodes[1].Nodes.Clear();
if(computers != null)
{
foreach (Computer computer in computers)
{
tvComputerCluster.Nodes[0].Nodes.Add(new TreeNode(computer.getName().ToString()));
}
}
if(databases != null)
{
foreach (Database db in databases)
{
tvComputerCluster.Nodes[1].Nodes.Add(new TreeNode(db.getName().ToString()));
}
}
tvComputerCluster.EndUpdate();
}
private void CPC_MouseDown(object sender, System.Windows.Forms.MouseEventArgs e)
{
//updateListViews();
//updateTreeViews();
}
private void CPC_Paint(object sender, System.Windows.Forms.PaintEventArgs e)
{
if(tvComputerCluster.SelectedNode!=null)
{
if(tvComputerCluster.SelectedNode.Text.ToString().Equals("Computer"))
updateListViews("Computer");
}
//updateListViews();
//updateTreeViews();
}
private void CPC_Activated(object sender, System.EventArgs e)
{
updateListViews(tvComputerCluster.SelectedNode.Text.ToString());
//updateListViews();
updateTreeViews();
}
private void computerMenuAdd_Click(object sender, System.EventArgs e)
{
ComputerAddDialog cad=new ComputerAddDialog(computerMgmt);
cad.ShowDialog();
cad.Dispose();
}
private void computerMenuRemove_Click(object sender, System.EventArgs e)
{
string computer = tvComputerCluster.SelectedNode.Text.ToString();
if(MessageBox.Show("Are you sure you want to remove: " + computer +"?\n" + "This will remove all processes on the computer!" ,"Remove selected computer",MessageBoxButtons.YesNo, MessageBoxIcon.Question)== DialogResult.Yes)
{
removeComputer(computer);
}
}
private void removeComputer(string computer)
{
ArrayList processes;
Computer c=computerMgmt.getComputer(computer);
processes = c.getProcesses();
/*foreach(Process p in processes)
{
removeProcess(computer,p.getName());
processes=c.getProcesses();
}
*/
if(computerMgmt.RemoveComputer(computer))
{
tvComputerCluster.SelectedNode=tvComputerCluster.SelectedNode.PrevVisibleNode;
this.updateTreeViews();
this.updateListViews("Computer");
if(tvComputerCluster.SelectedNode!=null)
this.updateListViews(tvComputerCluster.SelectedNode.Text.ToString());
//updateListViews();
}
}
private void listView_ColumnClick(object sender, System.Windows.Forms.ColumnClickEventArgs e)
{
if(listView.Sorting.Equals(SortOrder.Ascending))
listView.Sorting=SortOrder.Descending;
else
listView.Sorting=SortOrder.Ascending;
}
private void subMenuDatabaseCreate_Click(object sender, System.EventArgs e)
{
PanelWizard p = new PanelWizard(this.computerMgmt);
p.ShowDialog();
}
private void tvCtxMenuComputerDefine_Click(object sender, System.EventArgs e)
{
ProcessDefineDialog pdd = new ProcessDefineDialog(this.computerMgmt,
tvComputerCluster.SelectedNode.Text.ToString());
pdd.Show();
}
private void listView_ItemActivate(object sender, System.EventArgs e)
{
updateDatabaseView(listView.SelectedItems[0].Text.ToString());
for(int i=0;i<tvComputerCluster.Nodes[1].Nodes.Count;i++)
{
if(tvComputerCluster.Nodes[1].Nodes[i].Text.ToString().Equals(listView.SelectedItems[0].Text.ToString()))
{
tvComputerCluster.SelectedNode=tvComputerCluster.Nodes[1].Nodes[i];
break;
}
}
}
private void CPC_Resize(object sender, System.EventArgs e)
{
if(this.Width < 200) this.Width=200;
if(this.Height <200) this.Height=200;
this.tvComputerCluster.Width=(int)(this.Width*this.resizeWidthRatio);
this.listView.Height=(int)(this.Height*this.resizeHeightRatio);
//this.Size=new System.Drawing.Size((int)(this.Size.Width*this.tvComputerCluster.Width
}
private void updateGuiThread()
{
while(true) {
if(tvComputerCluster.SelectedNode!=null)
{
if(tvComputerCluster.SelectedNode.Text.ToString().Equals("Computer"))
updateListViews("Computer");
}
Thread.Sleep(1000);
}
}
private void computerMenuRefresh_Click(object sender, System.EventArgs e)
{
updateListViews("Computer");
}
private void CPC_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
/*clean up*/
|
[
"\t\t\tArrayList comp = this.computerMgmt.getComputerCollection();"
] | 2,301
|
lcc
|
csharp
| null |
d2d2eb23f584337e252086c2162142cf3c5e40a39100b329
|
|
/*
* Copyright (C) 2000 - 2011 Silverpeas
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* Open Source Software ("FLOSS") applications as described in Silverpeas's
* FLOSS exception. You should have recieved a copy of the text describing
* the FLOSS exception, and it is also available here:
* "http://www.silverpeas.org/legal/licensing"
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.stratelia.webactiv.almanach.control.ejb;
import com.silverpeas.calendar.Datable;
import com.silverpeas.calendar.Date;
import static com.silverpeas.util.StringUtil.isDefined;
import com.stratelia.webactiv.almanach.control.ExceptionDatesGenerator;
import com.stratelia.webactiv.almanach.model.EventDetail;
import com.stratelia.webactiv.almanach.model.EventOccurrence;
import static com.stratelia.webactiv.almanach.model.EventOccurrence.*;
import com.stratelia.webactiv.almanach.model.Periodicity;
import com.stratelia.webactiv.almanach.model.PeriodicityException;
import com.stratelia.webactiv.persistence.IdPK;
import com.stratelia.webactiv.persistence.PersistenceException;
import com.stratelia.webactiv.persistence.SilverpeasBeanDAO;
import com.stratelia.webactiv.persistence.SilverpeasBeanDAOFactory;
import static com.stratelia.webactiv.util.DateUtil.extractHour;
import static com.stratelia.webactiv.util.DateUtil.extractMinutes;
import com.stratelia.webactiv.util.ResourceLocator;
import com.stratelia.webactiv.util.exception.SilverpeasRuntimeException;
import java.util.*;
import java.util.TimeZone;
import net.fortuna.ical4j.model.Calendar;
import net.fortuna.ical4j.model.*;
import net.fortuna.ical4j.model.component.VEvent;
import net.fortuna.ical4j.model.property.CalScale;
import net.fortuna.ical4j.model.property.Categories;
import net.fortuna.ical4j.model.property.ExDate;
/**
* A generator of event occurrences built on the iCal4J library.
*/
public class ICal4JEventOccurrencesGenerator implements EventOccurrenceGenerator {
@Override
public List<EventOccurrence> generateOccurrencesInYear(java.util.Calendar year,
List<EventDetail> events) {
java.util.Calendar firstDayYear = java.util.Calendar.getInstance();
firstDayYear.set(java.util.Calendar.YEAR, year.get(java.util.Calendar.YEAR));
firstDayYear.set(java.util.Calendar.DAY_OF_MONTH, 1);
firstDayYear.set(java.util.Calendar.MONTH, java.util.Calendar.JANUARY);
firstDayYear.set(java.util.Calendar.HOUR_OF_DAY, 0);
firstDayYear.set(java.util.Calendar.MINUTE, 0);
firstDayYear.set(java.util.Calendar.SECOND, 0);
firstDayYear.set(java.util.Calendar.MILLISECOND, 0);
java.util.Calendar lastDayYear = java.util.Calendar.getInstance();
lastDayYear.set(java.util.Calendar.YEAR, year.get(java.util.Calendar.YEAR));
lastDayYear.set(java.util.Calendar.DAY_OF_MONTH, 1);
lastDayYear.set(java.util.Calendar.MONTH, java.util.Calendar.JANUARY);
lastDayYear.set(java.util.Calendar.HOUR_OF_DAY, 0);
lastDayYear.set(java.util.Calendar.MINUTE, 0);
lastDayYear.set(java.util.Calendar.SECOND, 0);
lastDayYear.set(java.util.Calendar.MILLISECOND, 0);
lastDayYear.add(java.util.Calendar.YEAR, 1);
Period theYear = new Period(new DateTime(firstDayYear.getTime()),
new DateTime(lastDayYear.getTime()));
return generateOccurrencesOf(events, occuringIn(theYear));
}
@Override
public List<EventOccurrence> generateOccurrencesInMonth(java.util.Calendar month,
List<EventDetail> events) {
java.util.Calendar firstDayMonth = java.util.Calendar.getInstance();
firstDayMonth.set(java.util.Calendar.YEAR, month.get(java.util.Calendar.YEAR));
firstDayMonth.set(java.util.Calendar.DAY_OF_MONTH, 1);
firstDayMonth.set(java.util.Calendar.MONTH, month.get(java.util.Calendar.MONTH));
firstDayMonth.set(java.util.Calendar.HOUR_OF_DAY, 0);
firstDayMonth.set(java.util.Calendar.MINUTE, 0);
firstDayMonth.set(java.util.Calendar.SECOND, 0);
firstDayMonth.set(java.util.Calendar.MILLISECOND, 0);
java.util.Calendar lastDayMonth = java.util.Calendar.getInstance();
lastDayMonth.set(java.util.Calendar.YEAR, month.get(java.util.Calendar.YEAR));
lastDayMonth.set(java.util.Calendar.DAY_OF_MONTH, 1);
lastDayMonth.set(java.util.Calendar.MONTH, month.get(java.util.Calendar.MONTH));
lastDayMonth.set(java.util.Calendar.HOUR_OF_DAY, 0);
lastDayMonth.set(java.util.Calendar.MINUTE, 0);
lastDayMonth.set(java.util.Calendar.SECOND, 0);
lastDayMonth.set(java.util.Calendar.MILLISECOND, 0);
lastDayMonth.add(java.util.Calendar.MONTH, 1);
Period theMonth = new Period(new DateTime(firstDayMonth.getTime()),
new DateTime(lastDayMonth.getTime()));
return generateOccurrencesOf(events, occuringIn(theMonth));
}
@Override
public List<EventOccurrence> generateOccurrencesInWeek(java.util.Calendar week,
List<EventDetail> events) {
java.util.Calendar firstDayWeek = java.util.Calendar.getInstance();
firstDayWeek.setTime(week.getTime());
firstDayWeek.set(java.util.Calendar.DAY_OF_WEEK, week.getFirstDayOfWeek());
firstDayWeek.set(java.util.Calendar.HOUR_OF_DAY, 0);
firstDayWeek.set(java.util.Calendar.MINUTE, 0);
firstDayWeek.set(java.util.Calendar.SECOND, 0);
firstDayWeek.set(java.util.Calendar.MILLISECOND, 0);
java.util.Calendar lastDayWeek = java.util.Calendar.getInstance();
lastDayWeek.setTime(week.getTime());
lastDayWeek.set(java.util.Calendar.HOUR_OF_DAY, 0);
lastDayWeek.set(java.util.Calendar.MINUTE, 0);
lastDayWeek.set(java.util.Calendar.SECOND, 0);
lastDayWeek.set(java.util.Calendar.MILLISECOND, 0);
lastDayWeek.set(java.util.Calendar.DAY_OF_WEEK, week.getFirstDayOfWeek());
lastDayWeek.add(java.util.Calendar.WEEK_OF_YEAR, 1);
Period theWeek = new Period(new DateTime(firstDayWeek.getTime()),
new DateTime(lastDayWeek.getTime()));
return generateOccurrencesOf(events, occuringIn(theWeek));
}
@Override
public List<EventOccurrence> generateOccurrencesInRange(Date startDate, Date endDate,
List<EventDetail> events) {
Period period = new Period(new DateTime(startDate), new DateTime(endDate));
return generateOccurrencesOf(events, occuringIn(period));
}
@Override
public List<EventOccurrence> generateOccurrencesFrom(Date date, List<EventDetail> events) {
java.util.Calendar endDate = java.util.Calendar.getInstance();
// a hack as the iCal4J Period objects don't support null end date or infinite end date.
endDate.add(java.util.Calendar.YEAR, 100);
return generateOccurrencesInRange(date, new Date(endDate.getTime()), events);
}
/**
* Generates the occurrences of the specified events that occur in the specified period.
* @param events the events for which the occurrences has to be generated.
* @param inPeriod the period.
* @return a list of event occurrences that occur in the specified period.
*/
private List<EventOccurrence> generateOccurrencesOf(final List<EventDetail> events,
final Period inPeriod) {
List<EventOccurrence> occurrences = new ArrayList<EventOccurrence>();
Calendar iCal4JCalendar = anICalCalendarWith(events);
ComponentList componentList = iCal4JCalendar.getComponents(Component.VEVENT);
for (Object eventObject : componentList) {
VEvent iCalEvent = (VEvent) eventObject;
int index = Integer.parseInt(iCalEvent.getProperties().getProperty(Property.CATEGORIES).
getValue());
EventDetail event = events.get(index);
PeriodList periodList = iCalEvent.calculateRecurrenceSet(inPeriod);
for (Object recurrencePeriodObject : periodList) {
Period recurrencePeriod = (Period) recurrencePeriodObject;
Datable<?> startDate = toDatable(recurrencePeriod.getStart(), event.getStartHour());
Datable<?> endDate = toDatable(recurrencePeriod.getEnd(), event.getEndHour());
EventOccurrence occurrence = anOccurrenceOf(event, startingAt(startDate), endingAt(endDate)).
withPriority(event.isPriority());
occurrences.add(occurrence);
}
}
Collections.sort(occurrences);
return occurrences;
}
/**
* Gets an iCal calendar with the specified events.
* It uses ical4J to build the ical calendar.
* @param events the events to register in the iCal4J calendar to return.
* @return an iCal4J calendar instance with the events specified in parameter.
*/
private Calendar anICalCalendarWith(final List<EventDetail> events) {
Calendar calendarAlmanach = new Calendar();
calendarAlmanach.getProperties().add(CalScale.GREGORIAN);
for (int i = 0; i < events.size(); i++) {
EventDetail event = events.get(i);
ExDate exceptionDates = null;
if (event.isPeriodic()) {
exceptionDates = generateExceptionDates(event);
}
VEvent iCalEvent = event.icalConversion(exceptionDates);
iCalEvent.getProperties().add(new Categories(String.valueOf(i)));
calendarAlmanach.getComponents().add(iCalEvent);
}
return calendarAlmanach;
}
/**
* Generates the dates at which it exist some exceptions in the periodicity of the specified event.
* @param event the detail on the event for which it can exist some exceptions in his recurrence.
* @return an ExDate instance with all of the exception dates.
*/
private ExDate generateExceptionDates(final EventDetail event) {
ExceptionDatesGenerator generator = new ExceptionDatesGenerator();
Set<java.util.Date> exceptionDates = generator.generateExceptionDates(event);
DateList exDateList = new DateList();
|
[
" for (java.util.Date anExceptionDate : exceptionDates) {"
] | 783
|
lcc
|
java
| null |
0467947f0aca59814dfee0185e479155997e02d24818a1ab
|
|
/* Copyright 2013-2014 Daikon Forge */
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
/// <summary>
/// Provides the ability to bind a property on one object to the value of
/// another property on another object. This class uses an event-driven
/// model, which waits for an event to be raised indicating that a property
/// has been changed, rather than polling the property each frame.
/// </summary>
[Serializable]
[AddComponentMenu( "Daikon Forge/Data Binding/Event-Driven Property Binding" )]
public class dfEventDrivenPropertyBinding : dfPropertyBinding
{
#region Public fields
/// <summary>
/// The name of an event on the DataSource that will be raised when
/// the desired property is changed, allowing for an event-driven
/// rather than polling-driven binding. This value can be left blank
/// (or null), but if specified it *must* match the name of an event
/// on the source that will indicate that the property specified by
/// DataSource has been changed.
/// </summary>
public string SourceEventName;
/// <summary>
/// The name of an event on the DataTarget that will be raised when
/// the desired property is changed, allowing for an event-driven
/// rather than polling-driven binding. This value can be left blank
/// (or null), but if specified it *must* match the name of an event
/// on the source that will indicate that the property specified by
/// DataTarget has been changed.
/// </summary>
public string TargetEventName;
#endregion
#region Private runtime variables
protected dfEventBinding sourceEventBinding;
protected dfEventBinding targetEventBinding;
#endregion
#region Unity events
public override void Update()
{
// Do nothing, override the default polling behavior
}
#endregion
#region Static helper methods
/// <summary>
/// Creates a dfEventDrivenPropertyBinding component that binds the source and target properties
/// </summary>
/// <param name="sourceComponent">The component instance that will act as the data source</param>
/// <param name="sourceProperty">The name of the property on the source component that will be bound</param>
/// <param name="targetComponent">The component instance that will act as the data target</param>
/// <param name="targetProperty">The name of the property on the target component that will be bound</param>
/// <returns>An active and bound dfEventDrivenPropertyBinding instance</returns>
public static dfEventDrivenPropertyBinding Bind( Component sourceComponent, string sourceProperty, string sourceEvent, Component targetComponent, string targetProperty, string targetEvent )
{
return Bind( sourceComponent.gameObject, sourceComponent, sourceProperty, sourceEvent, targetComponent, targetProperty, targetEvent );
}
/// <summary>
/// Creates a dfEventDrivenPropertyBinding component that binds the source and target properties
/// </summary>
/// <param name="hostObject">The GameObject instance to attach the dfEventDrivenPropertyBinding component to. Required.</param>
/// <param name="sourceComponent">The component instance that will act as the data source. Required.</param>
/// <param name="sourceProperty">The name of the property on the source component that will be bound. Required.</param>
/// <param name="sourceEvent">The name of the event on the source component that will indicate that the source value should be copied to the target property. Required.</param>
/// <param name="targetComponent">The component instance that will act as the data target. Required.</param>
/// <param name="targetProperty">The name of the property on the target component that will be bound. Required.</param>
/// <param name="targetEvent">The name of the property on the target component that will indicate that the target value should be copied to the source property.
/// This value is optional, and should be set to NULL if two-way binding is not needed.</param>
/// <returns>An active and bound dfEventDrivenPropertyBinding instance</returns>
public static dfEventDrivenPropertyBinding Bind( GameObject hostObject, Component sourceComponent, string sourceProperty, string sourceEvent, Component targetComponent, string targetProperty, string targetEvent )
{
if( hostObject == null )
throw new ArgumentNullException( "hostObject" );
if( sourceComponent == null )
throw new ArgumentNullException( "sourceComponent" );
if( targetComponent == null )
throw new ArgumentNullException( "targetComponent" );
if( string.IsNullOrEmpty( sourceProperty ) )
throw new ArgumentNullException( "sourceProperty" );
if( string.IsNullOrEmpty( targetProperty ) )
throw new ArgumentNullException( "targetProperty" );
// Make sure that an event name is specified for the source. Note that the same
// check is not performed for the target event, because having a one-way binding
// is a valid condition.
if( string.IsNullOrEmpty( sourceEvent ) )
throw new ArgumentNullException( "sourceEvent" );
var binding = hostObject.AddComponent<dfEventDrivenPropertyBinding>();
binding.DataSource = new dfComponentMemberInfo() { Component = sourceComponent, MemberName = sourceProperty };
binding.DataTarget = new dfComponentMemberInfo() { Component = targetComponent, MemberName = targetProperty };
binding.SourceEventName = sourceEvent;
binding.TargetEventName = targetEvent;
binding.Bind();
return binding;
}
#endregion
#region Public methods
/// <summary>
/// Bind the source and target properties
/// </summary>
public override void Bind()
{
if( isBound )
return;
if( !DataSource.IsValid || !DataTarget.IsValid )
{
Debug.LogError( string.Format( "Invalid data binding configuration - Source:{0}, Target:{1}", DataSource, DataTarget ) );
return;
}
sourceProperty = DataSource.GetProperty();
targetProperty = DataTarget.GetProperty();
if( ( sourceProperty != null ) && ( targetProperty != null ) )
{
// Create an EventBinding component to mirror the source property
if( !string.IsNullOrEmpty( SourceEventName ) && SourceEventName.Trim() != "" )
{
bindSourceEvent();
}
// Create an EventBinding component to mirror the target property
if( !string.IsNullOrEmpty( TargetEventName ) && TargetEventName.Trim() != "" )
{
bindTargetEvent();
}
else
{
// Determine whether to use the format string
if( targetProperty.PropertyType == typeof( string ) )
{
if( sourceProperty.PropertyType != typeof( string ) )
{
useFormatString = !string.IsNullOrEmpty( FormatString );
}
}
}
// Ensure that both properties are synced at start
MirrorSourceProperty();
// Flag the binding as valid
isBound = ( sourceEventBinding != null );
}
}
/// <summary>
/// Unbind the source and target properties
/// </summary>
public override void Unbind()
{
if( !isBound )
return;
isBound = false;
if( sourceEventBinding != null )
{
sourceEventBinding.Unbind();
Destroy( sourceEventBinding );
sourceEventBinding = null;
}
if( targetEventBinding != null )
{
targetEventBinding.Unbind();
Destroy( targetEventBinding );
targetEventBinding = null;
}
}
/// <summary>
/// Copies the value of the source property to the target property
/// </summary>
public void MirrorSourceProperty()
{
targetProperty.Value = formatValue( sourceProperty.Value );
}
/// <summary>
/// Copies the value of the target property back to the source property
/// </summary>
public void MirrorTargetProperty()
{
sourceProperty.Value = targetProperty.Value;
}
#endregion
#region Private utility methods
private object formatValue( object value )
{
try
{
if( useFormatString && !string.IsNullOrEmpty( FormatString ) )
{
return string.Format( FormatString, value );
}
}
catch( FormatException err )
{
Debug.LogError( err, this );
if( Application.isPlaying )
this.enabled = false;
}
return value;
}
private void bindSourceEvent()
{
sourceEventBinding = gameObject.AddComponent<dfEventBinding>();
sourceEventBinding.hideFlags = HideFlags.HideAndDontSave | HideFlags.HideInInspector;
|
[
"\t\tsourceEventBinding.DataSource = new dfComponentMemberInfo()"
] | 1,021
|
lcc
|
csharp
| null |
7a6b465ccf619db236b9ebaa5024b7f2602fd097e1f14af8
|
|
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.devices.web;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentMap;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.opendaylight.controller.connectionmanager.IConnectionManager;
import org.opendaylight.controller.forwarding.staticrouting.IForwardingStaticRouting;
import org.opendaylight.controller.forwarding.staticrouting.StaticRouteConfig;
import org.opendaylight.controller.sal.authorization.Privilege;
import org.opendaylight.controller.sal.authorization.UserLevel;
import org.opendaylight.controller.sal.connection.ConnectionConstants;
import org.opendaylight.controller.sal.core.Config;
import org.opendaylight.controller.sal.core.Description;
import org.opendaylight.controller.sal.core.ForwardingMode;
import org.opendaylight.controller.sal.core.Name;
import org.opendaylight.controller.sal.core.Node;
import org.opendaylight.controller.sal.core.NodeConnector;
import org.opendaylight.controller.sal.core.Property;
import org.opendaylight.controller.sal.core.State;
import org.opendaylight.controller.sal.core.Tier;
import org.opendaylight.controller.sal.utils.GlobalConstants;
import org.opendaylight.controller.sal.utils.HexEncode;
import org.opendaylight.controller.sal.utils.NetUtils;
import org.opendaylight.controller.sal.utils.ServiceHelper;
import org.opendaylight.controller.sal.utils.Status;
import org.opendaylight.controller.sal.utils.StatusCode;
import org.opendaylight.controller.sal.utils.TierHelper;
import org.opendaylight.controller.switchmanager.ISwitchManager;
import org.opendaylight.controller.switchmanager.SpanConfig;
import org.opendaylight.controller.switchmanager.SubnetConfig;
import org.opendaylight.controller.switchmanager.Switch;
import org.opendaylight.controller.switchmanager.SwitchConfig;
import org.opendaylight.controller.web.DaylightWebUtil;
import org.opendaylight.controller.web.IDaylightWeb;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
@Controller
@RequestMapping("/")
public class Devices implements IDaylightWeb {
private static final UserLevel AUTH_LEVEL = UserLevel.CONTAINERUSER;
private static final String WEB_NAME = "Devices";
private static final String WEB_ID = "devices";
private static final short WEB_ORDER = 1;
public Devices() {
ServiceHelper.registerGlobalService(IDaylightWeb.class, this, null);
}
@Override
public String getWebName() {
return WEB_NAME;
}
@Override
public String getWebId() {
return WEB_ID;
}
@Override
public short getWebOrder() {
return WEB_ORDER;
}
@Override
public boolean isAuthorized(UserLevel userLevel) {
return userLevel.ordinal() <= AUTH_LEVEL.ordinal();
}
@RequestMapping(value = "/nodesLearnt", method = RequestMethod.GET)
@ResponseBody
public DevicesJsonBean getNodesLearnt(HttpServletRequest request, @RequestParam(required = false) String container) {
Gson gson = new Gson();
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Derive the privilege this user has on the current container
String userName = request.getUserPrincipal().getName();
Privilege privilege = DaylightWebUtil.getContainerPrivilege(userName, containerName, this);
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class, containerName,
this);
List<Map<String, String>> nodeData = new ArrayList<Map<String, String>>();
if (switchManager != null && privilege != Privilege.NONE) {
for (Switch device : switchManager.getNetworkDevices()) {
HashMap<String, String> nodeDatum = new HashMap<String, String>();
Node node = device.getNode();
Tier tier = (Tier) switchManager.getNodeProp(node, Tier.TierPropName);
nodeDatum.put("containerName", containerName);
Description description = (Description) switchManager.getNodeProp(node, Description.propertyName);
String desc = (description == null) ? "" : description.getValue();
nodeDatum.put("nodeName", desc);
nodeDatum.put("nodeId", node.toString());
int tierNumber = (tier == null) ? TierHelper.unknownTierNumber : tier.getValue();
nodeDatum.put("tierName", TierHelper.getTierName(tierNumber) + " (Tier-" + tierNumber + ")");
nodeDatum.put("tier", tierNumber + "");
String modeStr = "0";
ForwardingMode mode = null;
if (!containerName.equals(GlobalConstants.DEFAULT.toString())) {
ISwitchManager switchManagerDefault = (ISwitchManager) ServiceHelper.getInstance(
ISwitchManager.class, GlobalConstants.DEFAULT.toString(), this);
mode = (ForwardingMode) switchManagerDefault.getNodeProp(node, ForwardingMode.name);
} else {
mode = (ForwardingMode) switchManager.getNodeProp(node, ForwardingMode.name);
}
if (mode != null) {
modeStr = String.valueOf(mode.getValue());
}
nodeDatum.put("mode", modeStr);
nodeDatum.put("json", gson.toJson(nodeDatum));
nodeDatum.put("mac", HexEncode.bytesToHexStringFormat(device.getDataLayerAddress()));
StringBuffer sb1 = new StringBuffer();
Set<NodeConnector> nodeConnectorSet = device.getNodeConnectors();
if (nodeConnectorSet != null && nodeConnectorSet.size() > 0) {
Map<Short, String> portList = new HashMap<Short, String>();
List<String> intfList = new ArrayList<String>();
for (NodeConnector nodeConnector : nodeConnectorSet) {
String nodeConnectorNumberToStr = nodeConnector.getID().toString();
Name ncName = ((Name) switchManager.getNodeConnectorProp(nodeConnector, Name.NamePropName));
Config portConfig = ((Config) switchManager.getNodeConnectorProp(nodeConnector,
Config.ConfigPropName));
State portState = ((State) switchManager.getNodeConnectorProp(nodeConnector,
State.StatePropName));
String nodeConnectorName = (ncName != null) ? ncName.getValue() : "";
nodeConnectorName += " (" + nodeConnector.getID() + ")";
if (portConfig != null) {
if (portConfig.getValue() == Config.ADMIN_UP) {
if (portState != null && portState.getValue() == State.EDGE_UP) {
nodeConnectorName = "<span class='admin-up'>" + nodeConnectorName + "</span>";
} else if (portState == null || portState.getValue() == State.EDGE_DOWN) {
nodeConnectorName = "<span class='edge-down'>" + nodeConnectorName + "</span>";
}
} else if (portConfig.getValue() == Config.ADMIN_DOWN) {
nodeConnectorName = "<span class='admin-down'>" + nodeConnectorName + "</span>";
}
}
Class<?> idClass = nodeConnector.getID().getClass();
if (idClass.equals(Short.class)) {
portList.put(Short.parseShort(nodeConnectorNumberToStr), nodeConnectorName);
} else {
intfList.add(nodeConnectorName);
}
}
if (portList.size() > 0) {
Map<Short, String> sortedPortList = new TreeMap<Short, String>(portList);
for (Entry<Short, String> e : sortedPortList.entrySet()) {
sb1.append(e.getValue());
sb1.append("<br>");
}
} else if (intfList.size() > 0) {
for (String intf : intfList) {
sb1.append(intf);
sb1.append("<br>");
}
}
}
nodeDatum.put("ports", sb1.toString());
nodeData.add(nodeDatum);
}
}
DevicesJsonBean result = new DevicesJsonBean();
result.setNodeData(nodeData);
result.setPrivilege(privilege);
List<String> columnNames = new ArrayList<String>();
columnNames.add("Node ID");
columnNames.add("Node Name");
columnNames.add("Tier");
columnNames.add("Mac Address");
columnNames.add("Ports");
columnNames.add("Port Status");
result.setColumnNames(columnNames);
return result;
}
@RequestMapping(value = "/tiers", method = RequestMethod.GET)
@ResponseBody
public List<String> getTiers() {
return TierHelper.getTiers();
}
@RequestMapping(value = "/nodesLearnt/update", method = RequestMethod.GET)
@ResponseBody
public StatusJsonBean updateLearntNode(@RequestParam("nodeName") String nodeName,
@RequestParam("nodeId") String nodeId, @RequestParam("tier") String tier,
@RequestParam("operationMode") String operationMode, HttpServletRequest request,
@RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Authorization check
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, containerName, this) != Privilege.WRITE) {
return unauthorizedMessage();
}
StatusJsonBean resultBean = new StatusJsonBean();
try {
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class,
containerName, this);
Map<String, Property> nodeProperties = new HashMap<String, Property>();
Property desc = new Description(nodeName);
nodeProperties.put(desc.getName(), desc);
Property nodeTier = new Tier(Integer.parseInt(tier));
nodeProperties.put(nodeTier.getName(), nodeTier);
if (containerName.equals(GlobalConstants.DEFAULT.toString())) {
Property mode = new ForwardingMode(Integer.parseInt(operationMode));
nodeProperties.put(mode.getName(), mode);
}
SwitchConfig cfg = new SwitchConfig(nodeId, nodeProperties);
Status result = switchManager.updateNodeConfig(cfg);
if (!result.isSuccess()) {
resultBean.setStatus(false);
resultBean.setMessage(result.getDescription());
} else {
resultBean.setStatus(true);
resultBean.setMessage("Updated node information successfully");
DaylightWebUtil.auditlog("Property", userName, "updated",
"of Node " + DaylightWebUtil.getNodeDesc(Node.fromString(nodeId), switchManager));
}
} catch (Exception e) {
resultBean.setStatus(false);
resultBean.setMessage("Error updating node information. " + e.getMessage());
}
return resultBean;
}
@RequestMapping(value = "/staticRoutes", method = RequestMethod.GET)
@ResponseBody
public DevicesJsonBean getStaticRoutes(HttpServletRequest request, @RequestParam(required = false) String container) {
Gson gson = new Gson();
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Derive the privilege this user has on the current container
String userName = request.getUserPrincipal().getName();
Privilege privilege = DaylightWebUtil.getContainerPrivilege(userName, containerName, this);
IForwardingStaticRouting staticRouting = (IForwardingStaticRouting) ServiceHelper.getInstance(
IForwardingStaticRouting.class, containerName, this);
if (staticRouting == null) {
return null;
}
List<Map<String, String>> staticRoutes = new ArrayList<Map<String, String>>();
ConcurrentMap<String, StaticRouteConfig> routeConfigs = staticRouting.getStaticRouteConfigs();
if (routeConfigs == null) {
return null;
}
if (privilege != Privilege.NONE) {
for (StaticRouteConfig conf : routeConfigs.values()) {
Map<String, String> staticRoute = new HashMap<String, String>();
staticRoute.put("name", conf.getName());
staticRoute.put("staticRoute", conf.getStaticRoute());
staticRoute.put("nextHopType", conf.getNextHopType());
staticRoute.put("nextHop", conf.getNextHop());
staticRoute.put("json", gson.toJson(conf));
staticRoutes.add(staticRoute);
}
}
DevicesJsonBean result = new DevicesJsonBean();
result.setPrivilege(privilege);
result.setColumnNames(StaticRouteConfig.getGuiFieldsNames());
result.setNodeData(staticRoutes);
return result;
}
@RequestMapping(value = "/staticRoute/add", method = RequestMethod.GET)
@ResponseBody
public StatusJsonBean addStaticRoute(@RequestParam("routeName") String routeName,
@RequestParam("staticRoute") String staticRoute, @RequestParam("nextHop") String nextHop,
HttpServletRequest request, @RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Authorization check
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, containerName, this) != Privilege.WRITE) {
return unauthorizedMessage();
}
StatusJsonBean result = new StatusJsonBean();
try {
IForwardingStaticRouting staticRouting = (IForwardingStaticRouting) ServiceHelper.getInstance(
IForwardingStaticRouting.class, containerName, this);
StaticRouteConfig config = new StaticRouteConfig();
config.setName(routeName);
config.setStaticRoute(staticRoute);
config.setNextHop(nextHop);
Status addStaticRouteResult = staticRouting.addStaticRoute(config);
if (addStaticRouteResult.isSuccess()) {
result.setStatus(true);
result.setMessage("Static Route saved successfully");
DaylightWebUtil.auditlog("Static Route", userName, "added", routeName, containerName);
} else {
result.setStatus(false);
result.setMessage(addStaticRouteResult.getDescription());
}
} catch (Exception e) {
result.setStatus(false);
result.setMessage("Error - " + e.getMessage());
}
return result;
}
@RequestMapping(value = "/staticRoute/delete", method = RequestMethod.GET)
@ResponseBody
public StatusJsonBean deleteStaticRoute(@RequestParam("routesToDelete") String routesToDelete,
HttpServletRequest request, @RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Authorization check
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, containerName, this) != Privilege.WRITE) {
return unauthorizedMessage();
}
StatusJsonBean resultBean = new StatusJsonBean();
try {
IForwardingStaticRouting staticRouting = (IForwardingStaticRouting) ServiceHelper.getInstance(
IForwardingStaticRouting.class, containerName, this);
String[] routes = routesToDelete.split(",");
Status result;
resultBean.setStatus(true);
resultBean.setMessage("Successfully deleted selected static routes");
for (String route : routes) {
result = staticRouting.removeStaticRoute(route);
if (!result.isSuccess()) {
resultBean.setStatus(false);
resultBean.setMessage(result.getDescription());
break;
}
DaylightWebUtil.auditlog("Static Route", userName, "removed", route, containerName);
}
} catch (Exception e) {
resultBean.setStatus(false);
resultBean.setMessage("Error occurred while deleting static routes. " + e.getMessage());
}
return resultBean;
}
@RequestMapping(value = "/subnets", method = RequestMethod.GET)
@ResponseBody
public DevicesJsonBean getSubnetGateways(HttpServletRequest request,
@RequestParam(required = false) String container) {
Gson gson = new Gson();
List<Map<String, String>> subnets = new ArrayList<Map<String, String>>();
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Derive the privilege this user has on the current container
String userName = request.getUserPrincipal().getName();
Privilege privilege = DaylightWebUtil.getContainerPrivilege(userName, containerName, this);
if (privilege != Privilege.NONE) {
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class,
containerName, this);
if (switchManager != null) {
for (SubnetConfig conf : switchManager.getSubnetsConfigList()) {
Map<String, String> subnet = new HashMap<String, String>();
subnet.put("name", conf.getName());
subnet.put("subnet", conf.getSubnet());
List<SubnetGatewayPortBean> portsList = new ArrayList<SubnetGatewayPortBean>();
Iterator<NodeConnector> itor = conf.getNodeConnectors().iterator();
while (itor.hasNext()) {
SubnetGatewayPortBean bean = new SubnetGatewayPortBean();
NodeConnector nodeConnector = itor.next();
String nodeName = getNodeDesc(nodeConnector.getNode().toString(), containerName);
Name ncName = ((Name) switchManager.getNodeConnectorProp(nodeConnector, Name.NamePropName));
String nodeConnectorName = (ncName != null) ? ncName.getValue() : "";
bean.setNodeName(nodeName);
bean.setNodePortName(nodeConnectorName);
bean.setNodeId(nodeConnector.getNode().toString());
bean.setNodePortId(nodeConnector.toString());
portsList.add(bean);
}
subnet.put("nodePorts", gson.toJson(portsList));
subnets.add(subnet);
}
}
}
DevicesJsonBean result = new DevicesJsonBean();
result.setPrivilege(privilege);
result.setColumnNames(SubnetConfig.getGuiFieldsNames());
result.setNodeData(subnets);
return result;
}
@RequestMapping(value = "/subnetGateway/add", method = RequestMethod.GET)
@ResponseBody
public StatusJsonBean addSubnetGateways(@RequestParam("gatewayName") String gatewayName,
@RequestParam("gatewayIPAddress") String gatewayIPAddress, HttpServletRequest request,
@RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Authorization check
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, containerName, this) != Privilege.WRITE) {
return unauthorizedMessage();
}
StatusJsonBean resultBean = new StatusJsonBean();
try {
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class,
containerName, this);
SubnetConfig cfgObject = new SubnetConfig(gatewayName, gatewayIPAddress, new ArrayList<String>());
Status result = switchManager.addSubnet(cfgObject);
if (result.isSuccess()) {
resultBean.setStatus(true);
resultBean.setMessage("Added gateway address successfully");
DaylightWebUtil.auditlog("Subnet Gateway", userName, "added", gatewayName, containerName);
} else {
resultBean.setStatus(false);
resultBean.setMessage(result.getDescription());
}
} catch (Exception e) {
resultBean.setStatus(false);
resultBean.setMessage(e.getMessage());
}
return resultBean;
}
@RequestMapping(value = "/subnetGateway/delete", method = RequestMethod.GET)
@ResponseBody
public StatusJsonBean deleteSubnetGateways(@RequestParam("gatewaysToDelete") String gatewaysToDelete,
HttpServletRequest request, @RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Authorization check
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, container, this) != Privilege.WRITE) {
return unauthorizedMessage();
}
StatusJsonBean resultBean = new StatusJsonBean();
try {
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class,
containerName, this);
String[] subnets = gatewaysToDelete.split(",");
resultBean.setStatus(true);
resultBean.setMessage("Added gateway address successfully");
for (String subnet : subnets) {
Status result = switchManager.removeSubnet(subnet);
if (!result.isSuccess()) {
resultBean.setStatus(false);
resultBean.setMessage(result.getDescription());
break;
}
DaylightWebUtil.auditlog("Subnet Gateway", userName, "removed", subnet, containerName);
}
} catch (Exception e) {
resultBean.setStatus(false);
resultBean.setMessage(e.getMessage());
}
return resultBean;
}
@RequestMapping(value = "/subnetGateway/ports/add", method = RequestMethod.GET)
@ResponseBody
public StatusJsonBean addSubnetGatewayPort(@RequestParam("portsName") String portsName,
@RequestParam("ports") String ports, @RequestParam("nodeId") String nodeId, HttpServletRequest request,
@RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Authorization check
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, containerName, this) != Privilege.WRITE) {
return unauthorizedMessage();
}
StatusJsonBean resultBean = new StatusJsonBean();
try {
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class,
containerName, this);
List<String> toAdd = new ArrayList<String>();
for (String port : ports.split(",")) {
toAdd.add(port);
}
Status result = switchManager.addPortsToSubnet(portsName, toAdd);
if (result.isSuccess()) {
resultBean.setStatus(true);
resultBean.setMessage("Added ports to subnet gateway address successfully");
for (String port : toAdd) {
DaylightWebUtil.auditlog("Port", userName, "added",
DaylightWebUtil.getPortName(NodeConnector.fromString(port), switchManager)
+ " to Subnet Gateway " + portsName, containerName);
}
} else {
resultBean.setStatus(false);
resultBean.setMessage(result.getDescription());
}
} catch (Exception e) {
resultBean.setStatus(false);
resultBean.setMessage(e.getMessage());
}
return resultBean;
}
@RequestMapping(value = "/subnetGateway/ports/delete", method = RequestMethod.GET)
@ResponseBody
public StatusJsonBean deleteSubnetGatewayPort(@RequestParam("gatewayName") String gatewayName,
@RequestParam("nodePort") String nodePort, HttpServletRequest request,
@RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Authorization check
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, containerName, this) != Privilege.WRITE) {
return unauthorizedMessage();
}
StatusJsonBean resultBean = new StatusJsonBean();
try {
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class,
containerName, this);
List<String> toRemove = new ArrayList<String>();
for (String port : nodePort.split(",")) {
toRemove.add(port);
}
Status result = switchManager.removePortsFromSubnet(gatewayName, toRemove);
if (result.isSuccess()) {
resultBean.setStatus(true);
resultBean.setMessage("Deleted port from subnet gateway address successfully");
for (String port : toRemove) {
DaylightWebUtil.auditlog("Port", userName, "removed",
DaylightWebUtil.getPortName(NodeConnector.fromString(port), switchManager)
+ " from Subnet Gateway " + gatewayName, containerName);
}
} else {
resultBean.setStatus(false);
resultBean.setMessage(result.getDescription());
}
} catch (Exception e) {
resultBean.setStatus(false);
resultBean.setMessage(e.getMessage());
}
return resultBean;
}
@RequestMapping(value = "/spanPorts", method = RequestMethod.GET)
@ResponseBody
public DevicesJsonBean getSpanPorts(HttpServletRequest request, @RequestParam(required = false) String container) {
Gson gson = new Gson();
List<Map<String, String>> spanConfigs = new ArrayList<Map<String, String>>();
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Derive the privilege this user has on the current container
String userName = request.getUserPrincipal().getName();
Privilege privilege = DaylightWebUtil.getContainerPrivilege(userName, containerName, this);
if (privilege != Privilege.NONE) {
List<String> spanConfigs_json = new ArrayList<String>();
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class,
containerName, this);
if (switchManager != null) {
for (SpanConfig conf : switchManager.getSpanConfigList()) {
spanConfigs_json.add(gson.toJson(conf));
}
}
ObjectMapper mapper = new ObjectMapper();
for (String config_json : spanConfigs_json) {
try {
@SuppressWarnings("unchecked")
Map<String, String> config_data = mapper.readValue(config_json, HashMap.class);
Map<String, String> config = new HashMap<String, String>();
for (String name : config_data.keySet()) {
config.put(name, config_data.get(name));
// Add switch portName value (non-configuration field)
config.put("nodeName", getNodeDesc(config_data.get("nodeId"), containerName));
NodeConnector spanPortNodeConnector = NodeConnector.fromString(config_data.get("spanPort"));
Name ncName = ((Name) switchManager.getNodeConnectorProp(spanPortNodeConnector,
Name.NamePropName));
String spanPortName = (ncName != null) ? ncName.getValue() : "";
config.put("spanPortName", spanPortName);
}
config.put("json", config_json);
spanConfigs.add(config);
} catch (Exception e) {
// TODO: Handle the exception.
}
}
}
DevicesJsonBean result = new DevicesJsonBean();
result.setPrivilege(privilege);
result.setColumnNames(SpanConfig.getGuiFieldsNames());
result.setNodeData(spanConfigs);
return result;
}
@RequestMapping(value = "/nodeports")
@ResponseBody
public String getNodePorts(HttpServletRequest request, @RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Derive the privilege this user has on the current container
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, containerName, this) == Privilege.NONE) {
return null;
}
ISwitchManager switchManager = (ISwitchManager) ServiceHelper.getInstance(ISwitchManager.class, containerName,
this);
if (switchManager == null) {
return null;
}
List<NodeJsonBean> nodeJsonBeans = new ArrayList<NodeJsonBean>();
for (Switch node : switchManager.getNetworkDevices()) {
NodeJsonBean nodeJsonBean = new NodeJsonBean();
List<PortJsonBean> port = new ArrayList<PortJsonBean>();
Set<NodeConnector> nodeConnectorSet = node.getNodeConnectors();
if (nodeConnectorSet != null) {
for (NodeConnector nodeConnector : nodeConnectorSet) {
String nodeConnectorName = ((Name) switchManager.getNodeConnectorProp(nodeConnector,
Name.NamePropName)).getValue();
port.add(new PortJsonBean(nodeConnector.getID().toString(), nodeConnectorName, nodeConnector
.toString()));
}
}
nodeJsonBean.setNodeId(node.getNode().toString());
nodeJsonBean.setNodeName(getNodeDesc(node.getNode().toString(), containerName));
nodeJsonBean.setNodePorts(port);
nodeJsonBeans.add(nodeJsonBean);
}
return new Gson().toJson(nodeJsonBeans);
}
@RequestMapping(value = "/spanPorts/add", method = RequestMethod.GET)
@ResponseBody
public StatusJsonBean addSpanPort(@RequestParam("jsonData") String jsonData, HttpServletRequest request,
@RequestParam(required = false) String container) {
String containerName = (container == null) ? GlobalConstants.DEFAULT.toString() : container;
// Authorization check
String userName = request.getUserPrincipal().getName();
if (DaylightWebUtil.getContainerPrivilege(userName, containerName, this) != Privilege.WRITE) {
return unauthorizedMessage();
}
StatusJsonBean resultBean = new StatusJsonBean();
try {
|
[
" Gson gson = new Gson();"
] | 2,177
|
lcc
|
java
| null |
5eaaec0ad17e5ea95bb6b35275422001e3f763eea091799b
|
|
//#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinical.forms.edischargeallergiesetccomponent;
import ims.framework.*;
import ims.framework.controls.*;
import ims.framework.enumerations.*;
import ims.framework.utils.RuntimeAnchoring;
public class GenForm extends FormBridge
{
private static final long serialVersionUID = 1L;
protected void fireCustomControlValueChanged()
{
super.fireValueChanged();
}
public boolean canProvideData(IReportSeed[] reportSeeds)
{
return new ReportDataProvider(reportSeeds, this.getFormReportFields()).canProvideData();
}
public boolean hasData(IReportSeed[] reportSeeds)
{
return new ReportDataProvider(reportSeeds, this.getFormReportFields()).hasData();
}
public IReportField[] getData(IReportSeed[] reportSeeds)
{
return getData(reportSeeds, false);
}
public IReportField[] getData(IReportSeed[] reportSeeds, boolean excludeNulls)
{
return new ReportDataProvider(reportSeeds, this.getFormReportFields(), excludeNulls).getData();
}
public static class ctnAlertContainer extends ContainerBridge
{
private static final long serialVersionUID = 1L;
public static class cmbAlertCategoryComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.lookups.AlertType value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.lookups.AlertType value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.lookups.AlertType value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.lookups.AlertType value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.lookups.AlertType value)
{
return super.control.removeRow(value);
}
public ims.core.vo.lookups.AlertType getValue()
{
return (ims.core.vo.lookups.AlertType)super.control.getValue();
}
public void setValue(ims.core.vo.lookups.AlertType value)
{
super.control.setValue(value);
}
}
public static class cmbAlertAlertComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.lookups.AlertType value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.lookups.AlertType value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.lookups.AlertType value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.lookups.AlertType value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.lookups.AlertType value)
{
return super.control.removeRow(value);
}
public ims.core.vo.lookups.AlertType getValue()
{
return (ims.core.vo.lookups.AlertType)super.control.getValue();
}
public void setValue(ims.core.vo.lookups.AlertType value)
{
super.control.setValue(value);
}
}
public static class cmbAlertSourceComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.lookups.SourceofInformation value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.lookups.SourceofInformation value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.lookups.SourceofInformation value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.lookups.SourceofInformation value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.lookups.SourceofInformation value)
{
return super.control.removeRow(value);
}
public ims.core.vo.lookups.SourceofInformation getValue()
{
return (ims.core.vo.lookups.SourceofInformation)super.control.getValue();
}
public void setValue(ims.core.vo.lookups.SourceofInformation value)
{
super.control.setValue(value);
}
}
protected void setContext(Form form, ims.framework.interfaces.IAppForm appForm, Control control, FormLoader loader, Images form_images_local, ContextMenus contextMenus, Integer startControlID, ims.framework.utils.SizeInfo designSize, ims.framework.utils.SizeInfo runtimeSize, Integer startTabIndex, boolean skipContextValidation) throws Exception
{
if(form == null)
throw new RuntimeException("Invalid form");
if(appForm == null)
throw new RuntimeException("Invalid application form");
if(control == null); // this is to avoid eclipse warning only.
if(loader == null); // this is to avoid eclipse warning only.
if(form_images_local == null); // this is to avoid eclipse warning only.
if(contextMenus == null); // this is to avoid eclipse warning only.
if(startControlID == null)
throw new RuntimeException("Invalid startControlID");
if(designSize == null); // this is to avoid eclipse warning only.
if(runtimeSize == null); // this is to avoid eclipse warning only.
if(startTabIndex == null)
throw new RuntimeException("Invalid startTabIndex");
// Custom Controls
ims.framework.CustomComponent instance1 = factory.getEmptyCustomComponent();
RuntimeAnchoring anchoringHelper1 = new RuntimeAnchoring(designSize, runtimeSize, 448, 56, 344, 56, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ims.framework.FormUiLogic m_ccAlertAuthorForm = loader.loadComponent(102228, appForm, startControlID * 10 + 1000, anchoringHelper1.getSize(), instance1, startTabIndex.intValue() + 22, skipContextValidation);
//ims.framework.Control m_ccAlertAuthorControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1000), new Integer(448), new Integer(56), new Integer(344), new Integer(56), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 22), m_ccAlertAuthorForm, instance1 } );
ims.framework.Control m_ccAlertAuthorControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1001), new Integer(anchoringHelper1.getX()), new Integer(anchoringHelper1.getY()), new Integer(anchoringHelper1.getWidth()), new Integer(anchoringHelper1.getHeight()), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 22), m_ccAlertAuthorForm, instance1, Boolean.FALSE } );
super.addControl(m_ccAlertAuthorControl);
Menu[] menus1 = m_ccAlertAuthorForm.getForm().getRegisteredMenus();
for(int x = 0; x < menus1.length; x++)
{
form.registerMenu(menus1[x]);
}
// Label Controls
RuntimeAnchoring anchoringHelper2 = new RuntimeAnchoring(designSize, runtimeSize, 8, 8, 60, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1002), new Integer(anchoringHelper2.getX()), new Integer(anchoringHelper2.getY()), new Integer(anchoringHelper2.getWidth()), new Integer(anchoringHelper2.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Category:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper3 = new RuntimeAnchoring(designSize, runtimeSize, 8, 32, 36, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1003), new Integer(anchoringHelper3.getX()), new Integer(anchoringHelper3.getY()), new Integer(anchoringHelper3.getWidth()), new Integer(anchoringHelper3.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Alert:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper4 = new RuntimeAnchoring(designSize, runtimeSize, 8, 56, 63, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1004), new Integer(anchoringHelper4.getX()), new Integer(anchoringHelper4.getY()), new Integer(anchoringHelper4.getWidth()), new Integer(anchoringHelper4.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Comment:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper5 = new RuntimeAnchoring(designSize, runtimeSize, 456, 8, 47, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1005), new Integer(anchoringHelper5.getX()), new Integer(anchoringHelper5.getY()), new Integer(anchoringHelper5.getWidth()), new Integer(anchoringHelper5.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Source:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper6 = new RuntimeAnchoring(designSize, runtimeSize, 456, 32, 95, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1006), new Integer(anchoringHelper6.getX()), new Integer(anchoringHelper6.getY()), new Integer(anchoringHelper6.getWidth()), new Integer(anchoringHelper6.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Date Identified:", new Integer(1), null, new Integer(0)}));
// TextBox Controls
RuntimeAnchoring anchoringHelper7 = new RuntimeAnchoring(designSize, runtimeSize, 120, 56, 304, 56, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(TextBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1007), new Integer(anchoringHelper7.getX()), new Integer(anchoringHelper7.getY()), new Integer(anchoringHelper7.getWidth()), new Integer(anchoringHelper7.getHeight()), new Integer(startTabIndex.intValue() + 19), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT,Boolean.TRUE, new Integer(255), Boolean.TRUE, Boolean.FALSE, null, null, Boolean.FALSE, ims.framework.enumerations.CharacterCasing.NORMAL, ims.framework.enumerations.TextTrimming.NONE, "", ""}));
// PartialDateBox Controls
RuntimeAnchoring anchoringHelper8 = new RuntimeAnchoring(designSize, runtimeSize, 586, 32, 142, 20, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(PartialDateBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1008), new Integer(anchoringHelper8.getX()), new Integer(anchoringHelper8.getY()), new Integer(anchoringHelper8.getWidth()), new Integer(anchoringHelper8.getHeight()), new Integer(startTabIndex.intValue() + 21), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Invalid date entered", Boolean.FALSE, Boolean.FALSE}));
// ComboBox Controls
RuntimeAnchoring anchoringHelper9 = new RuntimeAnchoring(designSize, runtimeSize, 120, 8, 304, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
ComboBox m_cmbAlertCategoryTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1009), new Integer(anchoringHelper9.getX()), new Integer(anchoringHelper9.getY()), new Integer(anchoringHelper9.getWidth()), new Integer(anchoringHelper9.getHeight()), new Integer(startTabIndex.intValue() + 16), ControlState.DISABLED, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPLEFT ,Boolean.TRUE, Boolean.TRUE, SortOrder.NONE, Boolean.FALSE, new Integer(1), null, Boolean.TRUE, new Integer(-1)});
addControl(m_cmbAlertCategoryTemp);
cmbAlertCategoryComboBox cmbAlertCategory = (cmbAlertCategoryComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(cmbAlertCategoryComboBox.class, m_cmbAlertCategoryTemp);
super.addComboBox(cmbAlertCategory);
RuntimeAnchoring anchoringHelper10 = new RuntimeAnchoring(designSize, runtimeSize, 120, 32, 304, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
ComboBox m_cmbAlertAlertTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1010), new Integer(anchoringHelper10.getX()), new Integer(anchoringHelper10.getY()), new Integer(anchoringHelper10.getWidth()), new Integer(anchoringHelper10.getHeight()), new Integer(startTabIndex.intValue() + 18), ControlState.DISABLED, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPLEFT ,Boolean.TRUE, Boolean.FALSE, SortOrder.NONE, Boolean.FALSE, new Integer(1), null, Boolean.TRUE, new Integer(-1)});
addControl(m_cmbAlertAlertTemp);
cmbAlertAlertComboBox cmbAlertAlert = (cmbAlertAlertComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(cmbAlertAlertComboBox.class, m_cmbAlertAlertTemp);
super.addComboBox(cmbAlertAlert);
RuntimeAnchoring anchoringHelper11 = new RuntimeAnchoring(designSize, runtimeSize, 586, 8, 191, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ComboBox m_cmbAlertSourceTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1011), new Integer(anchoringHelper11.getX()), new Integer(anchoringHelper11.getY()), new Integer(anchoringHelper11.getWidth()), new Integer(anchoringHelper11.getHeight()), new Integer(startTabIndex.intValue() + 20), ControlState.DISABLED, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT ,Boolean.TRUE, Boolean.FALSE, SortOrder.NONE, Boolean.FALSE, new Integer(1), null, Boolean.FALSE, new Integer(-1)});
addControl(m_cmbAlertSourceTemp);
cmbAlertSourceComboBox cmbAlertSource = (cmbAlertSourceComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(cmbAlertSourceComboBox.class, m_cmbAlertSourceTemp);
super.addComboBox(cmbAlertSource);
}
public ims.core.forms.authoringinfo.IComponent ccAlertAuthor()
{
return (ims.core.forms.authoringinfo.IComponent)((ims.framework.cn.controls.CustomComponent)super.getControl(0)).getLogic();
}
public void setccAlertAuthorValueChangedEvent(ims.framework.delegates.ValueChanged delegate)
{
((CustomComponent)super.getControl(0)).setValueChangedEvent(delegate);
}
public void setccAlertAuthorVisible(boolean value)
{
((ims.framework.Control)super.getControl(0)).setVisible(value);
}
public boolean isccAlertAuthorVisible()
{
return ((ims.framework.Control)super.getControl(0)).isVisible();
}
public void setccAlertAuthorEnabled(boolean value)
{
((ims.framework.Control)super.getControl(0)).setEnabled(value);
}
public boolean isccAlertAuthorEnabled()
{
return ((ims.framework.Control)super.getControl(0)).isEnabled();
}
public TextBox txtAlertComment()
{
return (TextBox)super.getControl(6);
}
public PartialDateBox pdtAlertDateIdentified()
{
return (PartialDateBox)super.getControl(7);
}
public cmbAlertCategoryComboBox cmbAlertCategory()
{
return (cmbAlertCategoryComboBox)super.getComboBox(0);
}
public cmbAlertAlertComboBox cmbAlertAlert()
{
return (cmbAlertAlertComboBox)super.getComboBox(1);
}
public cmbAlertSourceComboBox cmbAlertSource()
{
return (cmbAlertSourceComboBox)super.getComboBox(2);
}
}
public static class ctnAllergyContainer extends ContainerBridge
{
private static final long serialVersionUID = 1L;
public static class cmbAllergyTypeComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.lookups.AllergenType value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.lookups.AllergenType value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.lookups.AllergenType value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.lookups.AllergenType value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.lookups.AllergenType value)
{
return super.control.removeRow(value);
}
public ims.core.vo.lookups.AllergenType getValue()
{
return (ims.core.vo.lookups.AllergenType)super.control.getValue();
}
public void setValue(ims.core.vo.lookups.AllergenType value)
{
super.control.setValue(value);
}
}
public static class cmbAllergyReactionComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.lookups.AllergyReaction value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.lookups.AllergyReaction value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.lookups.AllergyReaction value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.lookups.AllergyReaction value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.lookups.AllergyReaction value)
{
return super.control.removeRow(value);
}
public ims.core.vo.lookups.AllergyReaction getValue()
{
return (ims.core.vo.lookups.AllergyReaction)super.control.getValue();
}
public void setValue(ims.core.vo.lookups.AllergyReaction value)
{
super.control.setValue(value);
}
}
public static class cmbAllergySourceComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.lookups.SourceofInformation value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.lookups.SourceofInformation value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.lookups.SourceofInformation value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.lookups.SourceofInformation value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.lookups.SourceofInformation value)
{
return super.control.removeRow(value);
}
public ims.core.vo.lookups.SourceofInformation getValue()
{
return (ims.core.vo.lookups.SourceofInformation)super.control.getValue();
}
public void setValue(ims.core.vo.lookups.SourceofInformation value)
{
super.control.setValue(value);
}
}
protected void setContext(Form form, ims.framework.interfaces.IAppForm appForm, Control control, FormLoader loader, Images form_images_local, ContextMenus contextMenus, Integer startControlID, ims.framework.utils.SizeInfo designSize, ims.framework.utils.SizeInfo runtimeSize, Integer startTabIndex, boolean skipContextValidation) throws Exception
{
if(form == null)
throw new RuntimeException("Invalid form");
if(appForm == null)
throw new RuntimeException("Invalid application form");
if(control == null); // this is to avoid eclipse warning only.
if(loader == null); // this is to avoid eclipse warning only.
if(form_images_local == null); // this is to avoid eclipse warning only.
if(contextMenus == null); // this is to avoid eclipse warning only.
if(startControlID == null)
throw new RuntimeException("Invalid startControlID");
if(designSize == null); // this is to avoid eclipse warning only.
if(runtimeSize == null); // this is to avoid eclipse warning only.
if(startTabIndex == null)
throw new RuntimeException("Invalid startTabIndex");
// Custom Controls
ims.framework.CustomComponent instance1 = factory.getEmptyCustomComponent();
RuntimeAnchoring anchoringHelper12 = new RuntimeAnchoring(designSize, runtimeSize, 448, 112, 344, 56, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ims.framework.FormUiLogic m_ccAllergyAuthorForm = loader.loadComponent(102228, appForm, startControlID * 10 + 2000, anchoringHelper12.getSize(), instance1, startTabIndex.intValue() + 11, skipContextValidation);
//ims.framework.Control m_ccAllergyAuthorControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1012), new Integer(448), new Integer(112), new Integer(344), new Integer(56), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 11), m_ccAllergyAuthorForm, instance1 } );
ims.framework.Control m_ccAllergyAuthorControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1013), new Integer(anchoringHelper12.getX()), new Integer(anchoringHelper12.getY()), new Integer(anchoringHelper12.getWidth()), new Integer(anchoringHelper12.getHeight()), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 11), m_ccAllergyAuthorForm, instance1, Boolean.FALSE } );
super.addControl(m_ccAllergyAuthorControl);
Menu[] menus1 = m_ccAllergyAuthorForm.getForm().getRegisteredMenus();
for(int x = 0; x < menus1.length; x++)
{
form.registerMenu(menus1[x]);
}
ims.framework.CustomComponent instance2 = factory.getEmptyCustomComponent();
RuntimeAnchoring anchoringHelper13 = new RuntimeAnchoring(designSize, runtimeSize, 8, 8, 784, 64, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ims.framework.FormUiLogic m_ccAllergyTermForm = loader.loadComponent(123133, appForm, startControlID * 10 + 3000, anchoringHelper13.getSize(), instance2, startTabIndex.intValue() + 2, skipContextValidation);
//ims.framework.Control m_ccAllergyTermControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1014), new Integer(8), new Integer(8), new Integer(784), new Integer(64), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 2), m_ccAllergyTermForm, instance2 } );
ims.framework.Control m_ccAllergyTermControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1015), new Integer(anchoringHelper13.getX()), new Integer(anchoringHelper13.getY()), new Integer(anchoringHelper13.getWidth()), new Integer(anchoringHelper13.getHeight()), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 2), m_ccAllergyTermForm, instance2, Boolean.FALSE } );
super.addControl(m_ccAllergyTermControl);
Menu[] menus2 = m_ccAllergyTermForm.getForm().getRegisteredMenus();
for(int x = 0; x < menus2.length; x++)
{
form.registerMenu(menus2[x]);
}
// Label Controls
RuntimeAnchoring anchoringHelper14 = new RuntimeAnchoring(designSize, runtimeSize, 16, 72, 36, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1016), new Integer(anchoringHelper14.getX()), new Integer(anchoringHelper14.getY()), new Integer(anchoringHelper14.getWidth()), new Integer(anchoringHelper14.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Type:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper15 = new RuntimeAnchoring(designSize, runtimeSize, 16, 96, 58, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1017), new Integer(anchoringHelper15.getX()), new Integer(anchoringHelper15.getY()), new Integer(anchoringHelper15.getWidth()), new Integer(anchoringHelper15.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Reaction:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper16 = new RuntimeAnchoring(designSize, runtimeSize, 456, 72, 47, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1018), new Integer(anchoringHelper16.getX()), new Integer(anchoringHelper16.getY()), new Integer(anchoringHelper16.getWidth()), new Integer(anchoringHelper16.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Source:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper17 = new RuntimeAnchoring(designSize, runtimeSize, 16, 120, 41, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1019), new Integer(anchoringHelper17.getX()), new Integer(anchoringHelper17.getY()), new Integer(anchoringHelper17.getWidth()), new Integer(anchoringHelper17.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Effect:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper18 = new RuntimeAnchoring(designSize, runtimeSize, 456, 96, 95, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1020), new Integer(anchoringHelper18.getX()), new Integer(anchoringHelper18.getY()), new Integer(anchoringHelper18.getWidth()), new Integer(anchoringHelper18.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Date Identified:", new Integer(1), null, new Integer(0)}));
// TextBox Controls
RuntimeAnchoring anchoringHelper19 = new RuntimeAnchoring(designSize, runtimeSize, 120, 120, 304, 40, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(TextBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1021), new Integer(anchoringHelper19.getX()), new Integer(anchoringHelper19.getY()), new Integer(anchoringHelper19.getWidth()), new Integer(anchoringHelper19.getHeight()), new Integer(startTabIndex.intValue() + 8), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT,Boolean.TRUE, new Integer(250), Boolean.TRUE, Boolean.FALSE, null, null, Boolean.FALSE, ims.framework.enumerations.CharacterCasing.NORMAL, ims.framework.enumerations.TextTrimming.NONE, "", ""}));
// PartialDateBox Controls
RuntimeAnchoring anchoringHelper20 = new RuntimeAnchoring(designSize, runtimeSize, 586, 96, 142, 20, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(PartialDateBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1022), new Integer(anchoringHelper20.getX()), new Integer(anchoringHelper20.getY()), new Integer(anchoringHelper20.getWidth()), new Integer(anchoringHelper20.getHeight()), new Integer(startTabIndex.intValue() + 10), ControlState.DISABLED, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Invalid date entered", Boolean.FALSE, Boolean.FALSE}));
// ComboBox Controls
RuntimeAnchoring anchoringHelper21 = new RuntimeAnchoring(designSize, runtimeSize, 120, 72, 304, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
ComboBox m_cmbAllergyTypeTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1023), new Integer(anchoringHelper21.getX()), new Integer(anchoringHelper21.getY()), new Integer(anchoringHelper21.getWidth()), new Integer(anchoringHelper21.getHeight()), new Integer(startTabIndex.intValue() + 6), ControlState.DISABLED, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPLEFT ,Boolean.TRUE, Boolean.TRUE, SortOrder.NONE, Boolean.FALSE, new Integer(1), null, Boolean.FALSE, new Integer(-1)});
addControl(m_cmbAllergyTypeTemp);
cmbAllergyTypeComboBox cmbAllergyType = (cmbAllergyTypeComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(cmbAllergyTypeComboBox.class, m_cmbAllergyTypeTemp);
super.addComboBox(cmbAllergyType);
RuntimeAnchoring anchoringHelper22 = new RuntimeAnchoring(designSize, runtimeSize, 120, 96, 304, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
ComboBox m_cmbAllergyReactionTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1024), new Integer(anchoringHelper22.getX()), new Integer(anchoringHelper22.getY()), new Integer(anchoringHelper22.getWidth()), new Integer(anchoringHelper22.getHeight()), new Integer(startTabIndex.intValue() + 7), ControlState.DISABLED, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPLEFT ,Boolean.TRUE, Boolean.FALSE, SortOrder.NONE, Boolean.FALSE, new Integer(1), null, Boolean.FALSE, new Integer(-1)});
addControl(m_cmbAllergyReactionTemp);
cmbAllergyReactionComboBox cmbAllergyReaction = (cmbAllergyReactionComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(cmbAllergyReactionComboBox.class, m_cmbAllergyReactionTemp);
super.addComboBox(cmbAllergyReaction);
RuntimeAnchoring anchoringHelper23 = new RuntimeAnchoring(designSize, runtimeSize, 586, 72, 191, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ComboBox m_cmbAllergySourceTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1025), new Integer(anchoringHelper23.getX()), new Integer(anchoringHelper23.getY()), new Integer(anchoringHelper23.getWidth()), new Integer(anchoringHelper23.getHeight()), new Integer(startTabIndex.intValue() + 9), ControlState.DISABLED, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT ,Boolean.TRUE, Boolean.FALSE, SortOrder.NONE, Boolean.FALSE, new Integer(1), null, Boolean.TRUE, new Integer(-1)});
addControl(m_cmbAllergySourceTemp);
cmbAllergySourceComboBox cmbAllergySource = (cmbAllergySourceComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(cmbAllergySourceComboBox.class, m_cmbAllergySourceTemp);
super.addComboBox(cmbAllergySource);
}
public ims.core.forms.authoringinfo.IComponent ccAllergyAuthor()
{
return (ims.core.forms.authoringinfo.IComponent)((ims.framework.cn.controls.CustomComponent)super.getControl(0)).getLogic();
}
public void setccAllergyAuthorValueChangedEvent(ims.framework.delegates.ValueChanged delegate)
{
((CustomComponent)super.getControl(0)).setValueChangedEvent(delegate);
}
public void setccAllergyAuthorVisible(boolean value)
{
((ims.framework.Control)super.getControl(0)).setVisible(value);
}
public boolean isccAllergyAuthorVisible()
{
return ((ims.framework.Control)super.getControl(0)).isVisible();
}
public void setccAllergyAuthorEnabled(boolean value)
{
((ims.framework.Control)super.getControl(0)).setEnabled(value);
}
public boolean isccAllergyAuthorEnabled()
{
return ((ims.framework.Control)super.getControl(0)).isEnabled();
}
public ims.clinical.forms.clinicalcoding.IComponent ccAllergyTerm()
{
return (ims.clinical.forms.clinicalcoding.IComponent)((ims.framework.cn.controls.CustomComponent)super.getControl(1)).getLogic();
}
public void setccAllergyTermValueChangedEvent(ims.framework.delegates.ValueChanged delegate)
{
((CustomComponent)super.getControl(1)).setValueChangedEvent(delegate);
}
public void setccAllergyTermVisible(boolean value)
{
((ims.framework.Control)super.getControl(1)).setVisible(value);
}
public boolean isccAllergyTermVisible()
{
return ((ims.framework.Control)super.getControl(1)).isVisible();
}
public void setccAllergyTermEnabled(boolean value)
{
((ims.framework.Control)super.getControl(1)).setEnabled(value);
}
public boolean isccAllergyTermEnabled()
{
return ((ims.framework.Control)super.getControl(1)).isEnabled();
}
public TextBox txtAllergyEffect()
{
return (TextBox)super.getControl(7);
}
public PartialDateBox pdtAllergyDateIdentified()
{
return (PartialDateBox)super.getControl(8);
}
public cmbAllergyTypeComboBox cmbAllergyType()
{
return (cmbAllergyTypeComboBox)super.getComboBox(0);
}
public cmbAllergyReactionComboBox cmbAllergyReaction()
{
return (cmbAllergyReactionComboBox)super.getComboBox(1);
}
public cmbAllergySourceComboBox cmbAllergySource()
{
return (cmbAllergySourceComboBox)super.getComboBox(2);
}
}
public static class grdAlertsRow extends GridRowBridge
{
private static final long serialVersionUID = 1L;
protected grdAlertsRow(GridRow row)
{
super(row);
}
public void showOpened(int column)
{
super.row.showOpened(column);
}
public void setColDateReadOnly(boolean value)
{
super.row.setReadOnly(0, value);
}
public boolean isColDateReadOnly()
{
return super.row.isReadOnly(0);
}
public void showColDateOpened()
{
super.row.showOpened(0);
}
public void setTooltipForColDate(String value)
{
super.row.setTooltip(0, value);
}
public String getColDate()
{
return (String)super.row.get(0);
}
public void setColDate(String value)
{
super.row.set(0, value);
}
public void setCellColDateTooltip(String value)
{
super.row.setTooltip(0, value);
}
public void setColCategoryReadOnly(boolean value)
{
super.row.setReadOnly(1, value);
}
public boolean isColCategoryReadOnly()
{
return super.row.isReadOnly(1);
}
public void showColCategoryOpened()
{
super.row.showOpened(1);
}
public void setTooltipForColCategory(String value)
{
super.row.setTooltip(1, value);
}
public String getColCategory()
{
return (String)super.row.get(1);
}
public void setColCategory(String value)
{
super.row.set(1, value);
}
public void setCellColCategoryTooltip(String value)
{
super.row.setTooltip(1, value);
}
public void setColAlertReadOnly(boolean value)
{
super.row.setReadOnly(2, value);
}
public boolean isColAlertReadOnly()
{
return super.row.isReadOnly(2);
}
public void showColAlertOpened()
{
super.row.showOpened(2);
}
public void setTooltipForColAlert(String value)
{
super.row.setTooltip(2, value);
}
public String getColAlert()
{
return (String)super.row.get(2);
}
public void setColAlert(String value)
{
super.row.set(2, value);
}
public void setCellColAlertTooltip(String value)
{
super.row.setTooltip(2, value);
}
public void setColSourceReadOnly(boolean value)
{
super.row.setReadOnly(3, value);
}
public boolean isColSourceReadOnly()
{
return super.row.isReadOnly(3);
}
public void showColSourceOpened()
{
super.row.showOpened(3);
}
public void setTooltipForColSource(String value)
{
super.row.setTooltip(3, value);
}
public String getColSource()
{
return (String)super.row.get(3);
}
public void setColSource(String value)
{
super.row.set(3, value);
}
public void setCellColSourceTooltip(String value)
{
super.row.setTooltip(3, value);
}
public void setColActiveReadOnly(boolean value)
{
super.row.setReadOnly(4, value);
}
public boolean isColActiveReadOnly()
{
return super.row.isReadOnly(4);
}
public void showColActiveOpened()
{
super.row.showOpened(4);
}
public void setTooltipForColActive(String value)
{
super.row.setTooltip(4, value);
}
public ims.framework.utils.Image getColActive()
{
return (ims.framework.utils.Image)super.row.get(4);
}
public void setColActive(ims.framework.utils.Image value)
{
super.row.set(4, value);
}
public void setCellColActiveTooltip(String value)
{
super.row.setTooltip(4, value);
}
public void setColAuditReadOnly(boolean value)
{
super.row.setReadOnly(5, value);
}
public boolean isColAuditReadOnly()
{
return super.row.isReadOnly(5);
}
public void showColAuditOpened()
{
super.row.showOpened(5);
}
public void setTooltipForColAudit(String value)
{
super.row.setTooltip(5, value);
}
public ims.framework.utils.Image getColAudit()
{
return (ims.framework.utils.Image)super.row.get(5);
}
public void setColAudit(ims.framework.utils.Image value)
{
super.row.set(5, value);
}
public void setCellColAuditTooltip(String value)
{
super.row.setTooltip(5, value);
}
public void setColIncludeReadOnly(boolean value)
{
super.row.setReadOnly(6, value);
}
public boolean isColIncludeReadOnly()
{
return super.row.isReadOnly(6);
}
public void showColIncludeOpened()
{
super.row.showOpened(6);
}
public void setTooltipForColInclude(String value)
{
super.row.setTooltip(6, value);
}
public boolean getColInclude()
{
return ((Boolean)super.row.get(6)).booleanValue();
}
public void setColInclude(boolean value)
{
super.row.set(6, new Boolean(value));
}
public void setCellColIncludeTooltip(String value)
{
super.row.setTooltip(6, value);
}
public ims.core.vo.PatientAlertEDischargeVo getValue()
{
return (ims.core.vo.PatientAlertEDischargeVo)super.row.getValue();
}
public void setValue(ims.core.vo.PatientAlertEDischargeVo value)
{
super.row.setValue(value);
}
}
public static class grdAlertsRowCollection extends GridRowCollectionBridge
{
private static final long serialVersionUID = 1L;
private grdAlertsRowCollection(GridRowCollection collection)
{
super(collection);
}
public grdAlertsRow get(int index)
{
return new grdAlertsRow(super.collection.get(index));
}
public grdAlertsRow newRow()
{
return new grdAlertsRow(super.collection.newRow());
}
public grdAlertsRow newRow(boolean autoSelect)
{
return new grdAlertsRow(super.collection.newRow(autoSelect));
}
public grdAlertsRow newRowAt(int index)
{
return new grdAlertsRow(super.collection.newRowAt(index));
}
public grdAlertsRow newRowAt(int index, boolean autoSelect)
{
return new grdAlertsRow(super.collection.newRowAt(index, autoSelect));
}
}
public static class grdAlertsGrid extends GridBridge
{
private static final long serialVersionUID = 1L;
private void addStringColumn(String caption, int captionAlignment, int alignment, int width, boolean readOnly, boolean bold, int sortOrder, int maxLength, boolean canGrow, ims.framework.enumerations.CharacterCasing casing)
{
super.grid.addStringColumn(caption, captionAlignment, alignment, width, readOnly, bold, sortOrder, maxLength, canGrow, casing);
}
private void addImageColumn(String caption, int captionAlignment, int alignment, int width, boolean canGrow, int sortOrder)
{
super.grid.addImageColumn(caption, captionAlignment, alignment, width, canGrow, sortOrder);
}
private void addBoolColumn(String caption, int captionAlignment, int alignment, int width, boolean readOnly, boolean autoPostBack, int sortOrder, boolean canGrow)
{
super.grid.addBoolColumn(caption, captionAlignment, alignment, width, readOnly, autoPostBack, sortOrder, canGrow);
}
public ims.core.vo.PatientAlertEDischargeVoCollection getValues()
{
ims.core.vo.PatientAlertEDischargeVoCollection listOfValues = new ims.core.vo.PatientAlertEDischargeVoCollection();
for(int x = 0; x < this.getRows().size(); x++)
{
listOfValues.add(this.getRows().get(x).getValue());
}
return listOfValues;
}
public ims.core.vo.PatientAlertEDischargeVo getValue()
{
return (ims.core.vo.PatientAlertEDischargeVo)super.grid.getValue();
}
public void setValue(ims.core.vo.PatientAlertEDischargeVo value)
{
super.grid.setValue(value);
}
public grdAlertsRow getSelectedRow()
{
return super.grid.getSelectedRow() == null ? null : new grdAlertsRow(super.grid.getSelectedRow());
}
public int getSelectedRowIndex()
{
return super.grid.getSelectedRowIndex();
}
public grdAlertsRowCollection getRows()
{
return new grdAlertsRowCollection(super.grid.getRows());
}
public grdAlertsRow getRowByValue(ims.core.vo.PatientAlertEDischargeVo value)
{
GridRow row = super.grid.getRowByValue(value);
return row == null?null:new grdAlertsRow(row);
}
public void setColDateHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(0, value);
}
public String getColDateHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(0);
}
public void setColCategoryHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(1, value);
}
public String getColCategoryHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(1);
}
public void setColAlertHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(2, value);
}
public String getColAlertHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(2);
}
public void setColSourceHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(3, value);
}
public String getColSourceHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(3);
}
public void setColActiveHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(4, value);
}
public String getColActiveHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(4);
}
public void setColAuditHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(5, value);
}
public String getColAuditHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(5);
}
public void setColIncludeHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(6, value);
}
public String getColIncludeHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(6);
}
}
public static class grdAllergiesRow extends GridRowBridge
{
private static final long serialVersionUID = 1L;
protected grdAllergiesRow(GridRow row)
{
super(row);
}
public void showOpened(int column)
{
super.row.showOpened(column);
}
public void setColDateReadOnly(boolean value)
{
super.row.setReadOnly(0, value);
}
public boolean isColDateReadOnly()
{
return super.row.isReadOnly(0);
}
public void showColDateOpened()
{
super.row.showOpened(0);
}
public void setTooltipForColDate(String value)
{
super.row.setTooltip(0, value);
}
public String getColDate()
{
return (String)super.row.get(0);
}
public void setColDate(String value)
{
super.row.set(0, value);
}
public void setCellColDateTooltip(String value)
{
super.row.setTooltip(0, value);
}
public void setColAllergenDesReadOnly(boolean value)
{
super.row.setReadOnly(1, value);
}
public boolean isColAllergenDesReadOnly()
{
return super.row.isReadOnly(1);
}
public void showColAllergenDesOpened()
{
super.row.showOpened(1);
}
public void setTooltipForColAllergenDes(String value)
{
super.row.setTooltip(1, value);
}
public String getColAllergenDes()
{
return (String)super.row.get(1);
}
public void setColAllergenDes(String value)
{
super.row.set(1, value);
}
public void setCellColAllergenDesTooltip(String value)
{
super.row.setTooltip(1, value);
}
public void setColReactionReadOnly(boolean value)
{
super.row.setReadOnly(2, value);
}
public boolean isColReactionReadOnly()
{
return super.row.isReadOnly(2);
}
public void showColReactionOpened()
{
super.row.showOpened(2);
}
public void setTooltipForColReaction(String value)
{
super.row.setTooltip(2, value);
}
public String getColReaction()
{
return (String)super.row.get(2);
}
public void setColReaction(String value)
{
super.row.set(2, value);
}
public void setCellColReactionTooltip(String value)
{
super.row.setTooltip(2, value);
}
public void setColSourceReadOnly(boolean value)
{
super.row.setReadOnly(3, value);
}
public boolean isColSourceReadOnly()
{
return super.row.isReadOnly(3);
}
public void showColSourceOpened()
{
super.row.showOpened(3);
}
public void setTooltipForColSource(String value)
{
super.row.setTooltip(3, value);
}
public String getColSource()
{
return (String)super.row.get(3);
}
public void setColSource(String value)
{
super.row.set(3, value);
}
public void setCellColSourceTooltip(String value)
{
super.row.setTooltip(3, value);
}
public void setColIsActiveReadOnly(boolean value)
{
super.row.setReadOnly(4, value);
}
public boolean isColIsActiveReadOnly()
{
return super.row.isReadOnly(4);
}
public void showColIsActiveOpened()
{
super.row.showOpened(4);
}
public void setTooltipForColIsActive(String value)
{
super.row.setTooltip(4, value);
}
public ims.framework.utils.Image getColIsActive()
{
return (ims.framework.utils.Image)super.row.get(4);
}
public void setColIsActive(ims.framework.utils.Image value)
{
super.row.set(4, value);
}
public void setCellColIsActiveTooltip(String value)
{
super.row.setTooltip(4, value);
}
public void setColAuditReadOnly(boolean value)
{
super.row.setReadOnly(5, value);
}
public boolean isColAuditReadOnly()
{
return super.row.isReadOnly(5);
}
public void showColAuditOpened()
{
super.row.showOpened(5);
}
public void setTooltipForColAudit(String value)
{
super.row.setTooltip(5, value);
}
public ims.framework.utils.Image getColAudit()
{
return (ims.framework.utils.Image)super.row.get(5);
}
public void setColAudit(ims.framework.utils.Image value)
{
super.row.set(5, value);
}
public void setCellColAuditTooltip(String value)
{
super.row.setTooltip(5, value);
}
public void setColIncludeReadOnly(boolean value)
{
super.row.setReadOnly(6, value);
}
public boolean isColIncludeReadOnly()
{
return super.row.isReadOnly(6);
}
public void showColIncludeOpened()
{
super.row.showOpened(6);
}
public void setTooltipForColInclude(String value)
{
super.row.setTooltip(6, value);
}
public boolean getColInclude()
{
return ((Boolean)super.row.get(6)).booleanValue();
}
public void setColInclude(boolean value)
{
super.row.set(6, new Boolean(value));
}
public void setCellColIncludeTooltip(String value)
{
super.row.setTooltip(6, value);
}
public ims.core.vo.PatientAllergyEDischargeVo getValue()
{
return (ims.core.vo.PatientAllergyEDischargeVo)super.row.getValue();
}
public void setValue(ims.core.vo.PatientAllergyEDischargeVo value)
{
super.row.setValue(value);
}
}
public static class grdAllergiesRowCollection extends GridRowCollectionBridge
{
private static final long serialVersionUID = 1L;
private grdAllergiesRowCollection(GridRowCollection collection)
{
super(collection);
}
public grdAllergiesRow get(int index)
{
return new grdAllergiesRow(super.collection.get(index));
}
public grdAllergiesRow newRow()
{
return new grdAllergiesRow(super.collection.newRow());
}
public grdAllergiesRow newRow(boolean autoSelect)
{
return new grdAllergiesRow(super.collection.newRow(autoSelect));
}
public grdAllergiesRow newRowAt(int index)
{
return new grdAllergiesRow(super.collection.newRowAt(index));
}
public grdAllergiesRow newRowAt(int index, boolean autoSelect)
{
return new grdAllergiesRow(super.collection.newRowAt(index, autoSelect));
}
}
public static class grdAllergiesGrid extends GridBridge
{
private static final long serialVersionUID = 1L;
private void addStringColumn(String caption, int captionAlignment, int alignment, int width, boolean readOnly, boolean bold, int sortOrder, int maxLength, boolean canGrow, ims.framework.enumerations.CharacterCasing casing)
{
super.grid.addStringColumn(caption, captionAlignment, alignment, width, readOnly, bold, sortOrder, maxLength, canGrow, casing);
}
private void addImageColumn(String caption, int captionAlignment, int alignment, int width, boolean canGrow, int sortOrder)
{
super.grid.addImageColumn(caption, captionAlignment, alignment, width, canGrow, sortOrder);
}
private void addBoolColumn(String caption, int captionAlignment, int alignment, int width, boolean readOnly, boolean autoPostBack, int sortOrder, boolean canGrow)
{
super.grid.addBoolColumn(caption, captionAlignment, alignment, width, readOnly, autoPostBack, sortOrder, canGrow);
}
public ims.core.vo.PatientAllergyEDischargeVoCollection getValues()
{
ims.core.vo.PatientAllergyEDischargeVoCollection listOfValues = new ims.core.vo.PatientAllergyEDischargeVoCollection();
for(int x = 0; x < this.getRows().size(); x++)
{
listOfValues.add(this.getRows().get(x).getValue());
}
return listOfValues;
}
public ims.core.vo.PatientAllergyEDischargeVo getValue()
{
return (ims.core.vo.PatientAllergyEDischargeVo)super.grid.getValue();
}
public void setValue(ims.core.vo.PatientAllergyEDischargeVo value)
{
super.grid.setValue(value);
}
public grdAllergiesRow getSelectedRow()
{
return super.grid.getSelectedRow() == null ? null : new grdAllergiesRow(super.grid.getSelectedRow());
}
public int getSelectedRowIndex()
{
return super.grid.getSelectedRowIndex();
}
public grdAllergiesRowCollection getRows()
{
return new grdAllergiesRowCollection(super.grid.getRows());
}
public grdAllergiesRow getRowByValue(ims.core.vo.PatientAllergyEDischargeVo value)
{
GridRow row = super.grid.getRowByValue(value);
return row == null?null:new grdAllergiesRow(row);
}
public void setColDateHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(0, value);
}
public String getColDateHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(0);
}
public void setColAllergenDesHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(1, value);
}
public String getColAllergenDesHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(1);
}
public void setColReactionHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(2, value);
}
public String getColReactionHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(2);
}
public void setColSourceHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(3, value);
}
public String getColSourceHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(3);
}
public void setColIsActiveHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(4, value);
}
public String getColIsActiveHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(4);
}
public void setColAuditHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(5, value);
}
public String getColAuditHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(5);
}
public void setColIncludeHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(6, value);
}
public String getColIncludeHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(6);
}
}
private void validateContext(ims.framework.Context context)
{
if(context == null)
return;
if(!context.isValidContextType(ims.core.vo.CareContextShortVo.class))
throw new ims.framework.exceptions.CodingRuntimeException("The type 'ims.core.vo.CareContextShortVo' of the global context variable 'Core.CurrentCareContext' is not supported.");
if(!context.isValidContextType(ims.core.vo.PatientShort.class))
throw new ims.framework.exceptions.CodingRuntimeException("The type 'ims.core.vo.PatientShort' of the global context variable 'Core.PatientShort' is not supported.");
if(!context.isValidContextType(ims.core.vo.EpisodeofCareShortVo.class))
throw new ims.framework.exceptions.CodingRuntimeException("The type 'ims.core.vo.EpisodeofCareShortVo' of the global context variable 'Core.EpisodeofCareShort' is not supported.");
}
private void validateMandatoryContext(Context context)
{
if(new ims.framework.ContextVariable("Core.CurrentCareContext", "_cvp_Core.CurrentCareContext").getValueIsNull(context))
throw new ims.framework.exceptions.FormMandatoryContextMissingException("The required context data 'Core.CurrentCareContext' is not available.");
if(new ims.framework.ContextVariable("Core.PatientShort", "_cvp_Core.PatientShort").getValueIsNull(context))
throw new ims.framework.exceptions.FormMandatoryContextMissingException("The required context data 'Core.PatientShort' is not available.");
if(new ims.framework.ContextVariable("Core.EpisodeofCareShort", "_cvp_Core.EpisodeofCareShort").getValueIsNull(context))
throw new ims.framework.exceptions.FormMandatoryContextMissingException("The required context data 'Core.EpisodeofCareShort' is not available.");
}
public boolean supportsRecordedInError()
{
return false;
}
public ims.vo.ValueObject getRecordedInErrorVo()
{
return null;
}
protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, Context context) throws Exception
{
setContext(loader, form, appForm, factory, context, Boolean.FALSE, new Integer(0), null, null, new Integer(0));
}
protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, Context context, Boolean skipContextValidation) throws Exception
{
setContext(loader, form, appForm, factory, context, skipContextValidation, new Integer(0), null, null, new Integer(0));
}
protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, ims.framework.Context context, Boolean skipContextValidation, Integer startControlID, ims.framework.utils.SizeInfo runtimeSize, ims.framework.Control control, Integer startTabIndex) throws Exception
{
if(loader == null); // this is to avoid eclipse warning only.
if(factory == null); // this is to avoid eclipse warning only.
if(runtimeSize == null); // this is to avoid eclipse warning only.
if(appForm == null)
throw new RuntimeException("Invalid application form");
if(startControlID == null)
throw new RuntimeException("Invalid startControlID");
if(control == null); // this is to avoid eclipse warning only.
if(startTabIndex == null)
throw new RuntimeException("Invalid startTabIndex");
this.context = context;
this.componentIdentifier = startControlID.toString();
this.formInfo = form.getFormInfo();
this.globalContext = new GlobalContext(context);
if(skipContextValidation == null || !skipContextValidation.booleanValue())
{
validateContext(context);
validateMandatoryContext(context);
}
super.setContext(form);
ims.framework.utils.SizeInfo designSize = new ims.framework.utils.SizeInfo(848, 632);
if(runtimeSize == null)
runtimeSize = designSize;
form.setWidth(runtimeSize.getWidth());
form.setHeight(runtimeSize.getHeight());
super.setFormReferences(FormReferencesFlyweightFactory.getInstance().create(Forms.class));
super.setImageReferences(ImageReferencesFlyweightFactory.getInstance().create(Images.class));
super.setGlobalContext(ContextBridgeFlyweightFactory.getInstance().create(GlobalContextBridge.class, context, false));
super.setLocalContext(new LocalContext(context, form.getFormInfo(), componentIdentifier));
// Context Menus
contextMenus = new ContextMenus();
contextMenus.Clinical.contextMenuEdischargeAlertsEtc = factory.createMenu(startControlID.intValue() + 1);
contextMenus.Clinical.contextMenuEdischargeAlertsEtcNewItem = factory.createMenuItem(startControlID.intValue() + 1, "New Alert", true, false, new Integer(102179), true, false);
if(factory.getUIEngine().getLoggedInRole().hasMenuActionRight(appForm, new ims.framework.MenuAction(4400001)))
contextMenus.Clinical.contextMenuEdischargeAlertsEtc.add(contextMenus.Clinical.contextMenuEdischargeAlertsEtcNewItem);
contextMenus.Clinical.contextMenuEdischargeAlertsEtcEditItem = factory.createMenuItem(startControlID.intValue() + 2, "Edit Alert", true, false, new Integer(102150), true, false);
if(factory.getUIEngine().getLoggedInRole().hasMenuActionRight(appForm, new ims.framework.MenuAction(4400002)))
contextMenus.Clinical.contextMenuEdischargeAlertsEtc.add(contextMenus.Clinical.contextMenuEdischargeAlertsEtcEditItem);
form.registerMenu(contextMenus.Clinical.contextMenuEdischargeAlertsEtc);
contextMenus.Clinical.contextMenuEdischargeAllergiesEtc = factory.createMenu(startControlID.intValue() + 2);
contextMenus.Clinical.contextMenuEdischargeAllergiesEtcNewItem = factory.createMenuItem(startControlID.intValue() + 3, "New Allergy", true, false, new Integer(102179), true, false);
if(factory.getUIEngine().getLoggedInRole().hasMenuActionRight(appForm, new ims.framework.MenuAction(4390001)))
contextMenus.Clinical.contextMenuEdischargeAllergiesEtc.add(contextMenus.Clinical.contextMenuEdischargeAllergiesEtcNewItem);
contextMenus.Clinical.contextMenuEdischargeAllergiesEtcEditItem = factory.createMenuItem(startControlID.intValue() + 4, "Edit Allergy", true, false, new Integer(102150), true, false);
if(factory.getUIEngine().getLoggedInRole().hasMenuActionRight(appForm, new ims.framework.MenuAction(4390002)))
contextMenus.Clinical.contextMenuEdischargeAllergiesEtc.add(contextMenus.Clinical.contextMenuEdischargeAllergiesEtcEditItem);
form.registerMenu(contextMenus.Clinical.contextMenuEdischargeAllergiesEtc);
// Panel Controls
RuntimeAnchoring anchoringHelper24 = new RuntimeAnchoring(designSize, runtimeSize, 8, 328, 832, 24, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
super.addControl(factory.getControl(Panel.class, new Object[] { control, new Integer(startControlID.intValue() + 1026), new Integer(anchoringHelper24.getX()), new Integer(anchoringHelper24.getY()), new Integer(anchoringHelper24.getWidth()), new Integer(anchoringHelper24.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT,"Alerts", new Integer(1), ""}));
RuntimeAnchoring anchoringHelper25 = new RuntimeAnchoring(designSize, runtimeSize, 8, 0, 832, 24, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
super.addControl(factory.getControl(Panel.class, new Object[] { control, new Integer(startControlID.intValue() + 1027), new Integer(anchoringHelper25.getX()), new Integer(anchoringHelper25.getY()), new Integer(anchoringHelper25.getWidth()), new Integer(anchoringHelper25.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT,"Allergies", new Integer(1), ""}));
// Container Clasess
RuntimeAnchoring anchoringHelper26 = new RuntimeAnchoring(designSize, runtimeSize, 24, 440, 808, 152, ims.framework.enumerations.ControlAnchoring.ALL);
Container m_ctnAlert = (Container)factory.getControl(Container.class, new Object[] { control, new Integer(startControlID.intValue() + 1028), new Integer(anchoringHelper26.getX()), new Integer(anchoringHelper26.getY()), new Integer(anchoringHelper26.getWidth()), new Integer(anchoringHelper26.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.ALL, null, new Boolean(false)});
addControl(m_ctnAlert);
ctnAlertContainer ctnAlert = (ctnAlertContainer)ContainerBridgeFlyweightFactory.getInstance().createContainerBridge(ctnAlertContainer.class, m_ctnAlert, factory);
ims.framework.utils.SizeInfo m_ctnAlertDesignSize = new ims.framework.utils.SizeInfo(808, 152);
ims.framework.utils.SizeInfo m_ctnAlertRuntimeSize = new ims.framework.utils.SizeInfo(anchoringHelper26.getWidth(), anchoringHelper26.getHeight());
ctnAlert.setContext(form, appForm, m_ctnAlert, loader, this.getImages(), contextMenus, startControlID, m_ctnAlertDesignSize, m_ctnAlertRuntimeSize, startTabIndex, skipContextValidation);
super.addContainer(ctnAlert);
RuntimeAnchoring anchoringHelper27 = new RuntimeAnchoring(designSize, runtimeSize, 24, 144, 816, 176, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
Container m_ctnAllergy = (Container)factory.getControl(Container.class, new Object[] { control, new Integer(startControlID.intValue() + 1029), new Integer(anchoringHelper27.getX()), new Integer(anchoringHelper27.getY()), new Integer(anchoringHelper27.getWidth()), new Integer(anchoringHelper27.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, null, new Boolean(false)});
addControl(m_ctnAllergy);
ctnAllergyContainer ctnAllergy = (ctnAllergyContainer)ContainerBridgeFlyweightFactory.getInstance().createContainerBridge(ctnAllergyContainer.class, m_ctnAllergy, factory);
ims.framework.utils.SizeInfo m_ctnAllergyDesignSize = new ims.framework.utils.SizeInfo(816, 176);
ims.framework.utils.SizeInfo m_ctnAllergyRuntimeSize = new ims.framework.utils.SizeInfo(anchoringHelper27.getWidth(), anchoringHelper27.getHeight());
ctnAllergy.setContext(form, appForm, m_ctnAllergy, loader, this.getImages(), contextMenus, startControlID, m_ctnAllergyDesignSize, m_ctnAllergyRuntimeSize, startTabIndex, skipContextValidation);
super.addContainer(ctnAllergy);
// Button Controls
RuntimeAnchoring anchoringHelper28 = new RuntimeAnchoring(designSize, runtimeSize, 674, 600, 75, 23, ims.framework.enumerations.ControlAnchoring.BOTTOMRIGHT);
super.addControl(factory.getControl(Button.class, new Object[] { control, new Integer(startControlID.intValue() + 1030), new Integer(anchoringHelper28.getX()), new Integer(anchoringHelper28.getY()), new Integer(anchoringHelper28.getWidth()), new Integer(anchoringHelper28.getHeight()), new Integer(startTabIndex.intValue() + 25), ControlState.HIDDEN, ControlState.ENABLED, ims.framework.enumerations.ControlAnchoring.BOTTOMRIGHT, "Save", Boolean.FALSE, null, Boolean.FALSE, Boolean.TRUE, Boolean.FALSE, null, ims.framework.utils.Color.Default, ims.framework.utils.Color.Default }));
RuntimeAnchoring anchoringHelper29 = new RuntimeAnchoring(designSize, runtimeSize, 754, 600, 75, 23, ims.framework.enumerations.ControlAnchoring.BOTTOMRIGHT);
super.addControl(factory.getControl(Button.class, new Object[] { control, new Integer(startControlID.intValue() + 1031), new Integer(anchoringHelper29.getX()), new Integer(anchoringHelper29.getY()), new Integer(anchoringHelper29.getWidth()), new Integer(anchoringHelper29.getHeight()), new Integer(startTabIndex.intValue() + 26), ControlState.HIDDEN, ControlState.ENABLED, ims.framework.enumerations.ControlAnchoring.BOTTOMRIGHT, "Cancel", Boolean.FALSE, null, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null, ims.framework.utils.Color.Default, ims.framework.utils.Color.Default }));
// Grid Controls
RuntimeAnchoring anchoringHelper30 = new RuntimeAnchoring(designSize, runtimeSize, 24, 360, 808, 76, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
Grid m_grdAlertsTemp = (Grid)factory.getControl(Grid.class, new Object[] { control, new Integer(startControlID.intValue() + 1032), new Integer(anchoringHelper30.getX()), new Integer(anchoringHelper30.getY()), new Integer(anchoringHelper30.getWidth()), new Integer(anchoringHelper30.getHeight()), new Integer(startTabIndex.intValue() + 14), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT,Boolean.TRUE, Boolean.FALSE, new Integer(24), Boolean.TRUE, contextMenus.Clinical.contextMenuEdischargeAlertsEtc, Boolean.FALSE, Boolean.FALSE, new Integer(0), null, Boolean.FALSE, Boolean.TRUE});
addControl(m_grdAlertsTemp);
grdAlertsGrid grdAlerts = (grdAlertsGrid)GridFlyweightFactory.getInstance().createGridBridge(grdAlertsGrid.class, m_grdAlertsTemp);
grdAlerts.addStringColumn("Date", 0, 0, 85, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
grdAlerts.addStringColumn("Category", 0, 0, 200, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
grdAlerts.addStringColumn("Alert", 0, 0, 200, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
grdAlerts.addStringColumn("Source", 0, 0, 170, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
grdAlerts.addImageColumn(" ", 1, 1, 40, false, 0);
grdAlerts.addImageColumn(" ", 0, 0, 40, true, 0);
grdAlerts.addBoolColumn("Include", 0, 0, -1, false, true, 0, true);
super.addGrid(grdAlerts);
RuntimeAnchoring anchoringHelper31 = new RuntimeAnchoring(designSize, runtimeSize, 24, 32, 808, 112, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
Grid m_grdAllergiesTemp = (Grid)factory.getControl(Grid.class, new Object[] { control, new Integer(startControlID.intValue() + 1033), new Integer(anchoringHelper31.getX()), new Integer(anchoringHelper31.getY()), new Integer(anchoringHelper31.getWidth()), new Integer(anchoringHelper31.getHeight()), new Integer(startTabIndex.intValue() + 1), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT,Boolean.TRUE, Boolean.FALSE, new Integer(24), Boolean.TRUE, contextMenus.Clinical.contextMenuEdischargeAllergiesEtc, Boolean.FALSE, Boolean.FALSE, new Integer(0), null, Boolean.FALSE, Boolean.TRUE});
addControl(m_grdAllergiesTemp);
grdAllergiesGrid grdAllergies = (grdAllergiesGrid)GridFlyweightFactory.getInstance().createGridBridge(grdAllergiesGrid.class, m_grdAllergiesTemp);
grdAllergies.addStringColumn("Date", 0, 0, 85, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
grdAllergies.addStringColumn("Allergen Description", 0, 0, 200, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
grdAllergies.addStringColumn("Reaction", 0, 0, 200, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
grdAllergies.addStringColumn("Source", 0, 0, 170, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
grdAllergies.addImageColumn(" ", 1, 1, 40, true, 0);
grdAllergies.addImageColumn(" ", 0, 0, 40, true, 0);
grdAllergies.addBoolColumn("Include", 0, 0, -1, false, true, 0, true);
super.addGrid(grdAllergies);
}
public Forms getForms()
{
return (Forms)super.getFormReferences();
}
public Images getImages()
{
return (Images)super.getImageReferences();
}
public ctnAlertContainer ctnAlert()
{
return (ctnAlertContainer)super.getContainer(0);
}
public ctnAllergyContainer ctnAllergy()
{
return (ctnAllergyContainer)super.getContainer(1);
}
public Button btnSave()
{
return (Button)super.getControl(4);
}
public Button btnCancel()
{
return (Button)super.getControl(5);
}
public grdAlertsGrid grdAlerts()
{
return (grdAlertsGrid)super.getGrid(0);
}
public grdAllergiesGrid grdAllergies()
{
return (grdAllergiesGrid)super.getGrid(1);
}
public static class Forms implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
protected final class LocalFormName extends FormName
{
private static final long serialVersionUID = 1L;
private LocalFormName(int name)
{
super(name);
}
}
private Forms()
{
Core = new CoreForms();
}
public final class CoreForms implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
private CoreForms()
{
YesNoDialog = new LocalFormName(102107);
}
public final FormName YesNoDialog;
}
public CoreForms Core;
}
public static class Images implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
private final class ImageHelper extends ims.framework.utils.ImagePath
{
private static final long serialVersionUID = 1L;
private ImageHelper(int id, String path, Integer width, Integer height)
{
|
[
"\t\t\t\tsuper(id, path, width, height);"
] | 5,167
|
lcc
|
java
| null |
9315cf46a7b8a5186358aa90329c4cf7aa7e8815f99ea810
|
|
/*
* ome.testing
*
* Copyright 2006 University of Dundee. All rights reserved.
* Use is subject to license terms supplied in LICENSE.txt
*/
package ome.testing;
// Java imports
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.jdbc.support.rowset.SqlRowSetMetaData;
// Application-internal dependencies
/**
* abstract data container for testing. Sub-classes can set whatever values it
* would like in <code>init()</code>. After the OMEData instance is inserted
* into the test class by Spring, it SHOULD not be changed, but this is a matter
* of opionon. Setting the same <code>seed</code> value for two independent
* Data instances is also assumed to create identical values.
*
* @author Josh Moore <a
* href="mailto:josh.moore@gmx.de">josh.moore@gmx.de</a>
* @version 1.0 <small> (<b>Internal version:</b> $Rev$ $Date$) </small>
* @since 1.0
*/
public class OMEData {
final static String emptyColl = "Collections may not be empty.\n"
+ "You are currently trying to run a test on an OME database\n"
+ "that does not appear to have the needed data.\n"
+ "\n"
+ "There must be at least one:\n"
+ "project,dataset,image,experimenter,classification,category,category group,image annotation and dataset annotation\n"
+ "\n"
+ "Testing results would be unpredictable without test data.\n"
+ "Please fill your database and retry.";
private static Logger log = LoggerFactory.getLogger(OMEData.class);
boolean initialized = false;
DataSource ds;
Map properties;
Map values = new HashMap();
long seed;
Random rnd;
String[] files = new String[] { "test_data.properties" };
public void setDataSource(DataSource dataSource) {
this.ds = dataSource;
}
public OMEData() {
init();
}
public OMEData(String[] files) {
this.files = files;
init();
}
void init() {
properties = SqlPropertiesParser.parse(files);
seed = System.currentTimeMillis();
rnd = new Random(seed);
}
/* allows for storing arbitrary objects in data */
public void put(String propertyKey, Object value) {
toCache(propertyKey, value);
}
public List get(String propertyKey) {
if (inCache(propertyKey)) {
return (List) fromCache(propertyKey);
}
Object obj = properties.get(propertyKey);
if (obj == null) {
return null;
} else if (obj instanceof List) {
toCache(propertyKey, obj);
return (List) obj;
} else if (obj instanceof String) {
String sql = (String) obj;
List result = runSql(sql);
toCache(propertyKey, result);
return result;
} else {
throw new RuntimeException("Error in properties. Not expecting "
+ obj == null ? null : obj.getClass().getName());
}
}
List getRandomNumber(List l, Number number) {
if (number == null) {
return null;
}
if (l == null || l.size() == 0) {
log.warn(emptyColl);
return null;
}
List ordered = new ArrayList(l);
List result = new ArrayList();
while (ordered.size() > 0 && result.size() < number.longValue()) {
int choice = randomChoice(ordered.size());
result.add(ordered.remove(choice));
}
return result;
}
public List getMax(String propertyKey, int maximum) {
List l = get(propertyKey);
return getRandomNumber(l, new Integer(maximum));
}
public List getPercent(String propertyKey, double percent) {
List l = get(propertyKey);
return getRandomNumber(l, new Double(l.size() * percent));
}
public Object getRandom(String propertyKey) {
List l = get(propertyKey);
List result = getRandomNumber(l, new Integer(1));
if (result == null || result.size() < 1) {
return null;
}
return result.get(0);
}
public Object getFirst(String propertyKey) {
List l = get(propertyKey);
if (l == null || l.size() == 0) {
log.warn(emptyColl);
return null;
}
return l.get(0);
}
boolean inCache(String key) {
return values.containsKey(key);
}
void toCache(String key, Object value) {
values.put(key, value);
}
Object fromCache(String key) {
return values.get(key);
}
/**
* returns a list of results from the sql statement. if there is more than
* one column in the result set, a map from column name to Object is
* returned, else the Object itself.
*
* @param sql
* @return
*/
List runSql(String sql) {
JdbcTemplate jt = new JdbcTemplate(ds);
SqlRowSet rows = jt.queryForRowSet(sql);
List result = new ArrayList();
while (rows.next()) {
SqlRowSetMetaData meta = rows.getMetaData();
int count = meta.getColumnCount();
if (count > 1) {
Map cols = new HashMap();
String[] names = meta.getColumnNames();
for (int i = 0; i < names.length; i++) {
cols.put(names[i], rows.getObject(names[i]));
}
result.add(cols);
} else {
result.add(rows.getObject(1));
}
}
log.debug("SQL:" + sql + "\n\nResult:" + result);
return result;
}
public int randomChoice(int size) {
|
[
" double value = (size - 1) * rnd.nextDouble();"
] | 669
|
lcc
|
java
| null |
0a5fee26c27c916f5e6a6d4e21655aac2bf96a3c71502cab
|
|
import hashlib
from django.db import models
from django.db.models import Q
from opencontext_py.apps.entities.entity.models import Entity
from opencontext_py.apps.ldata.linkannotations.models import LinkAnnotation
from opencontext_py.apps.ldata.linkentities.models import LinkEntity
from opencontext_py.apps.ocitems.assertions.models import Assertion
from opencontext_py.apps.ocitems.manifest.models import Manifest
from opencontext_py.apps.entities.uri.models import URImanagement
from opencontext_py.apps.ocitems.predicates.models import Predicate
from opencontext_py.apps.ldata.linkannotations.recursion import LinkRecursion
from opencontext_py.apps.ldata.linkannotations.equivalence import LinkEquivalence
class LinkAnnoManagement():
"""
Some useful methods for changing linked data annoations.
from opencontext_py.apps.ldata.linkannotations.manage import LinkAnnoManagement
lam = LinkAnnoManagement()
project_uuid = 'A5DDBEA2-B3C8-43F9-8151-33343CBDC857'
lam.make_von_den_driesch_equiv(project_uuid)
from opencontext_py.apps.ldata.linkannotations.manage import LinkAnnoManagement
lam = LinkAnnoManagement()
project_uuid = '81d1157d-28f4-46ff-98dd-94899c1688f8'
old_naa_proj_uuid = 'cbd24bbb-c6fc-44ed-bd67-6f844f120ad5'
lam.make_naa_annotations(project_uuid, old_naa_proj_uuid)
from opencontext_py.apps.ldata.linkannotations.manage import LinkAnnoManagement
lam = LinkAnnoManagement()
parent_uri = 'http://eol.org/pages/2195' # molluscs
child_uri = 'http://eol.org/pages/448836' # cuttlefish
lam.add_skos_hierarachy(parent_uri, child_uri)
"""
PRED_SBJ_IS_SUB_OF_OBJ = 'skos:broader' # default predicate for subject item is subordinate to object item
def __init__(self):
self.project_uuid = '0'
self.source_id = 'manual'
def add_skos_hierarachy(self, parent_uri, child_uri):
""" Add a hiearchy assertion for
linked entities
"""
try:
parent = LinkEntity.objects.get(uri=parent_uri)
except LinkEntity.DoesNotExist:
parent = False
try:
child = LinkEntity.objects.get(uri=child_uri)
except LinkEntity.DoesNotExist:
child = False
if parent is not False and child is not False:
lr = LinkRecursion()
exiting_parents = lr.get_entity_parents(child_uri)
if len(exiting_parents) >= 1:
print('Child has parents: ' + str(exiting_parents))
else:
# child is not already in a hieararchy, ok to put it in one
la = LinkAnnotation()
la.subject = child.uri # the subordinate is the subject
la.subject_type = 'uri'
la.project_uuid = self.project_uuid
la.source_id = self.source_id + '-hierarchy'
la.predicate_uri = self.PRED_SBJ_IS_SUB_OF_OBJ
la.object_uri = parent.uri # the parent is the object
la.save()
print('Made: ' + child.uri + ' child of: ' + parent.uri)
else:
print('Cannot find parent or child')
def replace_hierarchy(self, old_parent, new_parent):
""" replaces hirearchy annotations, so that children
of the old_parent become children of the new_parent
"""
ok = False
lequiv = LinkEquivalence()
old_parent_ids = lequiv.get_identifier_list_variants(old_parent)
p_for_superobjs = LinkAnnotation.PREDS_SBJ_IS_SUB_OF_OBJ
preds_for_superobjs = lequiv.get_identifier_list_variants(p_for_superobjs)
p_for_subobjs = LinkAnnotation.PREDS_SBJ_IS_SUPER_OF_OBJ
preds_for_subobjs = lequiv.get_identifier_list_variants(p_for_subobjs)
new_parent_entity_obj = False
new_parent_entity_obj = Entity()
found = new_parent_entity_obj.dereference(new_parent)
if found:
ok = True
# get children (the subjects) where the parent is a superclass object
child_subs_by_superobjs = LinkAnnotation.objects\
.filter(object_uri__in=old_parent_ids,
predicate_uri__in=preds_for_superobjs)
for child_subj in child_subs_by_superobjs:
new_parent_superobj = child_subj
del_hash_id = child_subj.hash_id
# change the object (the super class) to the new parent
new_parent_superobj.object_uri = new_parent_entity_obj.uri
new_parent_superobj.source_id = self.source_id
LinkAnnotation.objects\
.filter(hash_id=del_hash_id).delete()
new_parent_superobj.save()
# get children (the objects) where the parent is a superclass subject
child_objs_by_subobjs = LinkAnnotation.objects\
.filter(subject__in=old_parent_ids,
predicate_uri__in=preds_for_subobjs)
for child_obj in child_objs_by_subobjs:
new_parent_supersubj = child_obj
del_hash_id = child_obj.hash_id
# change the subject (the super class) to the new parent
if isinstance(new_parent_superobj.uuid, str):
new_parent_supersubj.subject = new_parent_superobj.uuid
else:
new_parent_supersubj.subject = new_parent_superobj.uri
new_parent_supersubj.subject_type = new_parent_superobj.item_type
new_parent_supersubj.source_id = self.source_id
LinkAnnotation.objects\
.filter(hash_id=del_hash_id).delete()
new_parent_supersubj.save()
return ok
def replace_subject_uri(self,
old_subject_uri,
new_subject_uri):
""" replaces annotations using
a given old_object_uri with a new one
"""
lequiv = LinkEquivalence()
old_subj_list = lequiv.get_identifier_list_variants(old_subject_uri)
la_subjs = LinkAnnotation.objects\
.filter(subject__in=old_subj_list)
print('Change subjects for annotations: ' + str(len(la_subjs)))
for la_subj in la_subjs:
old_hash = la_subj.hash_id
new_la = la_subj
new_la.subject = new_subject_uri
try:
new_la.save()
ok = True
except Exception as error:
ok = False
print("Error: " + str(error))
if ok:
LinkAnnotation.objects\
.filter(hash_id=old_hash).delete()
def replace_predicate_uri(self,
old_pred_uri,
new_pred_uri):
""" replaces annotations using
a given old_predicate with a new one
"""
lequiv = LinkEquivalence()
old_pred_list = lequiv.get_identifier_list_variants(old_pred_uri)
la_preds = LinkAnnotation.objects\
.filter(predicate_uri__in=old_pred_list)
print('Change predicates for annotations: ' + str(len(la_preds)))
for la_pred in la_preds:
old_hash = la_pred.hash_id
new_la = la_pred
new_la.predicate_uri = new_pred_uri
try:
new_la.save()
ok = True
except Exception as error:
ok = False
if ok:
LinkAnnotation.objects\
.filter(hash_id=old_hash).delete()
def replace_predicate_uri_narrow(self,
old_pred_uri,
new_pred_uri,
limits_dict):
""" replaces annotations using
a given old_predicate with a new one
"""
if 'object_uri_root' in limits_dict:
object_uri_root = limits_dict['object_uri_root']
alt_old_pred = self.make_alt_uri(old_pred_uri)
la_objs = LinkAnnotation.objects\
.filter(Q(predicate_uri=old_pred_uri) |
Q(predicate_uri=alt_old_pred))\
.filter(object_uri__startswith=object_uri_root)
print('Change predicates for annotations: ' + str(len(la_objs)))
for la_obj in la_objs:
ok_edit = True
if 'subject_type' in limits_dict:
if la_obj.subject_type != limits_dict['subject_type']:
ok_edit = False
if 'data_type' in limits_dict:
data_type = limits_dict['data_type']
predicate = False
try: # try to find the predicate with a given data_type
predicate = Predicate.objects.get(uuid=la_obj.subject)
except Predicate.DoesNotExist:
print('Cant find predicate: ' + str(la_obj.subject))
predicate = False
if predicate is False:
ok_edit = False
else:
if predicate.data_type != data_type:
print(str(predicate.data_type) + ' wrong data_type in: ' + str(la_obj.subject))
if ok_edit:
print('Editing annotation to subject: ' + str(la_obj.subject))
new_la = la_obj
new_la.predicate_uri = new_pred_uri
LinkAnnotation.objects\
.filter(hash_id=la_obj.hash_id).delete()
new_la.save()
else:
print('NO EDIT to subject: ' + str(la_obj.subject))
def replace_object_uri(self,
old_object_uri,
new_object_uri):
""" replaces annotations using
a given old_object_uri with a new one
"""
lequiv = LinkEquivalence()
old_obj_list = lequiv.get_identifier_list_variants(old_object_uri)
la_objs = LinkAnnotation.objects\
.filter(object_uri__in=old_obj_list)
print('Change object_uri for annotations: ' + str(len(la_objs)))
for la_obj in la_objs:
old_hash = la_obj.hash_id
new_la = la_obj
new_la.object_uri = new_object_uri
try:
new_la.save()
ok = True
except Exception as error:
ok = False
print("Error: " + str(error))
if ok:
LinkAnnotation.objects\
.filter(hash_id=old_hash).delete()
def make_von_den_driesch_equiv(self,
project_uuid,
equiv_pred='skos:closeMatch'):
""" makes a skos:closeMatch equivalence relation
between entities in the zooarch measurement
ontology and predicates in a project
"""
preds = Predicate.objects\
.filter(project_uuid=project_uuid,
data_type='xsd:double')
for pred in preds:
man_obj = False
try:
# try to find the manifest item
man_obj = Manifest.objects.get(uuid=pred.uuid)
except Manifest.DoesNotExist:
man_obj = False
if man_obj is not False:
l_ents = LinkEntity.objects\
.filter(label=man_obj.label,
vocab_uri='http://opencontext.org/vocabularies/open-context-zooarch/')[:1]
if len(l_ents) > 0:
# a Match! Now let's make a close match assertion
uri = l_ents[0].uri
print(str(man_obj.label) + ' matches ' + uri)
la = LinkAnnotation()
la.subject = man_obj.uuid # the subordinate is the subject
la.subject_type = man_obj.item_type
la.project_uuid = man_obj.project_uuid
la.source_id = 'label-match'
la.predicate_uri = equiv_pred
la.object_uri = uri
la.save()
# save also that the unit of measurement is in MM
la = LinkAnnotation()
la.subject = man_obj.uuid # the subordinate is the subject
la.subject_type = man_obj.item_type
la.project_uuid = man_obj.project_uuid
la.source_id = 'label-match'
la.predicate_uri = 'http://www.w3.org/2000/01/rdf-schema#range'
la.object_uri = 'http://www.wikidata.org/wiki/Q174789'
la.save()
def make_naa_annotations(self,
project_uuid,
naa_annotated_proj_uuid):
""" makes annotations to describe NAA
(Neutron Activation Analysis) attributes by
copying annoations from another project
with NAA attributes.
"""
old_pred_uuids = []
old_preds = Predicate.objects\
.filter(project_uuid=naa_annotated_proj_uuid,
data_type='xsd:double')
for old_pred in old_preds:
old_pred_uuids.append(old_pred.uuid)
old_pred_mans = Manifest.objects\
.filter(uuid__in=old_pred_uuids,
project_uuid=naa_annotated_proj_uuid)\
.order_by('label')
for old_pred_man in old_pred_mans:
new_man_pred = None
if len(old_pred_man.label) < 4:
# this has a short label, so more likely about a chemical
# element
new_man_preds = Manifest.objects\
.filter(item_type='predicates',
project_uuid=project_uuid,
label=old_pred_man.label)[:1]
if len(new_man_preds) > 0:
# the new project has a predicate with a matching label
new_man_pred = new_man_preds[0]
if new_man_pred is not None:
# we have a match between a predicate label in the old NAA project
# and the new project
print('-----------------------------')
print('Copy annotations from: ' + old_pred_man.label + ' (' + old_pred_man.uuid + ')')
print('To: ' + new_man_pred.uuid)
print('-----------------------------')
old_link_annos = LinkAnnotation.objects\
.filter(subject=old_pred_man.uuid)
for old_link_anno in old_link_annos:
new_link_anno = old_link_anno
new_link_anno.hash_id = None
new_link_anno.subject = new_man_pred.uuid
new_link_anno.subject_type = new_man_pred.item_type
new_link_anno.project_uuid = new_man_pred.project_uuid
new_link_anno.source_id = 'naa-link-annotations-method'
try:
new_link_anno.save()
except:
pass
preds = Predicate.objects\
.filter(project_uuid=project_uuid,
data_type='xsd:double')
for pred in preds:
man_obj = False
try:
# try to find the manifest item
man_obj = Manifest.objects.get(uuid=pred.uuid)
except Manifest.DoesNotExist:
man_obj = False
if man_obj is not False:
l_ents = LinkEntity.objects\
.filter(label=man_obj.label,
vocab_uri='http://opencontext.org/vocabularies/open-context-zooarch/')[:1]
if len(l_ents) > 0:
# a Match! Now let's make a close match assertion
uri = l_ents[0].uri
print(str(man_obj.label) + ' matches ' + uri)
|
[
" la = LinkAnnotation()"
] | 1,128
|
lcc
|
python
| null |
aa0355191116e9da144d262dfbaa191cd76f4cdbf90c55e2
|
|
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) Under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You Under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed Under the License is distributed on an "AS Is" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations Under the License.
==================================================================== */
namespace AL.Utils.NPOI.HSSF.Record
{
using System;
using System.Text;
using AL.Utils.NPOI.Util;
/**
* Title: Extended Format Record
* Description: Probably one of the more complex records. There are two breeds:
* Style and Cell.
*
* It should be noted that fields in the extended format record are
* somewhat arbitrary. Almost all of the fields are bit-level, but
* we name them as best as possible by functional Group. In some
* places this Is better than others.
*
*
* REFERENCE: PG 426 Microsoft Excel 97 Developer's Kit (ISBN: 1-57231-498-2)
* @author Andrew C. Oliver (acoliver at apache dot org)
* @version 2.0-pre
*/
public class ExtendedFormatRecord : StandardRecord
{
public const short sid = 0xE0;
// null constant
public const short NULL = unchecked((short)0xfff0);
// xf type
public const short XF_STYLE = 1;
public const short XF_CELL = 0;
// borders
public const short NONE = 0x0;
public const short THIN = 0x1;
public const short MEDIUM = 0x2;
public const short DASHED = 0x3;
public const short DOTTED = 0x4;
public const short THICK = 0x5;
public const short DOUBLE = 0x6;
public const short HAIR = 0x7;
public const short MEDIUM_DASHED = 0x8;
public const short DASH_DOT = 0x9;
public const short MEDIUM_DASH_DOT = 0xA;
public const short DASH_DOT_DOT = 0xB;
public const short MEDIUM_DASH_DOT_DOT = 0xC;
public const short SLANTED_DASH_DOT = 0xD;
// alignment
public const short GENERAL = 0x0;
public const short LEFT = 0x1;
public const short CENTER = 0x2;
public const short RIGHT = 0x3;
public const short FILL = 0x4;
public const short JUSTIFY = 0x5;
public const short CENTER_SELECTION = 0x6;
// vertical alignment
public const short VERTICAL_TOP = 0x0;
public const short VERTICAL_CENTER = 0x1;
public const short VERTICAL_BOTTOM = 0x2;
public const short VERTICAL_JUSTIFY = 0x3;
// fill
public const short NO_FILL = 0;
public const short SOLID_FILL = 1;
public const short FINE_DOTS = 2;
public const short ALT_BARS = 3;
public const short SPARSE_DOTS = 4;
public const short THICK_HORZ_BANDS = 5;
public const short THICK_VERT_BANDS = 6;
public const short THICK_BACKWARD_DIAG = 7;
public const short THICK_FORWARD_DIAG = 8;
public const short BIG_SPOTS = 9;
public const short BRICKS = 10;
public const short THIN_HORZ_BANDS = 11;
public const short THIN_VERT_BANDS = 12;
public const short THIN_BACKWARD_DIAG = 13;
public const short THIN_FORWARD_DIAG = 14;
public const short SQUARES = 15;
public const short DIAMONDS = 16;
// fields in BOTH style and Cell XF records
private short field_1_font_index; // not bit-mapped
private short field_2_format_index; // not bit-mapped
// field_3_cell_options bit map
static private BitField _locked = BitFieldFactory.GetInstance(0x0001);
static private BitField _hidden = BitFieldFactory.GetInstance(0x0002);
static private BitField _xf_type = BitFieldFactory.GetInstance(0x0004);
static private BitField _123_prefix = BitFieldFactory.GetInstance(0x0008);
static private BitField _parent_index = BitFieldFactory.GetInstance(0xFFF0);
private short field_3_cell_options;
// field_4_alignment_options bit map
static private BitField _alignment = BitFieldFactory.GetInstance(0x0007);
static private BitField _wrap_text = BitFieldFactory.GetInstance(0x0008);
static private BitField _vertical_alignment = BitFieldFactory.GetInstance(0x0070);
static private BitField _justify_last = BitFieldFactory.GetInstance(0x0080);
static private BitField _rotation = BitFieldFactory.GetInstance(0xFF00);
private short field_4_alignment_options;
// field_5_indention_options
static private BitField _indent =
BitFieldFactory.GetInstance(0x000F);
static private BitField _shrink_to_fit =
BitFieldFactory.GetInstance(0x0010);
static private BitField _merge_cells =
BitFieldFactory.GetInstance(0x0020);
static private BitField _Reading_order =
BitFieldFactory.GetInstance(0x00C0);
// apparently bits 8 and 9 are Unused
static private BitField _indent_not_parent_format =
BitFieldFactory.GetInstance(0x0400);
static private BitField _indent_not_parent_font =
BitFieldFactory.GetInstance(0x0800);
static private BitField _indent_not_parent_alignment =
BitFieldFactory.GetInstance(0x1000);
static private BitField _indent_not_parent_border =
BitFieldFactory.GetInstance(0x2000);
static private BitField _indent_not_parent_pattern =
BitFieldFactory.GetInstance(0x4000);
static private BitField _indent_not_parent_cell_options =
BitFieldFactory.GetInstance(0x8000);
private short field_5_indention_options;
// field_6_border_options bit map
static private BitField _border_left = BitFieldFactory.GetInstance(0x000F);
static private BitField _border_right = BitFieldFactory.GetInstance(0x00F0);
static private BitField _border_top = BitFieldFactory.GetInstance(0x0F00);
static private BitField _border_bottom = BitFieldFactory.GetInstance(0xF000);
private short field_6_border_options;
// all three of the following attributes are palette options
// field_7_palette_options bit map
static private BitField _left_border_palette_idx =
BitFieldFactory.GetInstance(0x007F);
static private BitField _right_border_palette_idx =
BitFieldFactory.GetInstance(0x3F80);
static private BitField _diag =
BitFieldFactory.GetInstance(0xC000);
private short field_7_palette_options;
// field_8_adtl_palette_options bit map
static private BitField _top_border_palette_idx =
BitFieldFactory.GetInstance(0x0000007F);
static private BitField _bottom_border_palette_idx =
BitFieldFactory.GetInstance(0x00003F80);
static private BitField _adtl_diag =
BitFieldFactory.GetInstance(0x001fc000);
static private BitField _adtl_diag_line_style =
BitFieldFactory.GetInstance(0x01e00000);
// apparently bit 25 Is Unused
static private BitField _adtl_Fill_pattern =
BitFieldFactory.GetInstance(unchecked((int)0xfc000000));
private int field_8_adtl_palette_options; // Additional to avoid 2
// field_9_fill_palette_options bit map
static private BitField _fill_foreground = BitFieldFactory.GetInstance(0x007F);
static private BitField _fill_background = BitFieldFactory.GetInstance(0x3f80);
// apparently bits 15 and 14 are Unused
private short field_9_fill_palette_options;
/**
* Constructor ExtendedFormatRecord
*
*
*/
public ExtendedFormatRecord()
{
}
/**
* Constructs an ExtendedFormat record and Sets its fields appropriately.
* @param in the RecordInputstream to Read the record from
*/
public ExtendedFormatRecord(RecordInputStream in1)
{
field_1_font_index = in1.ReadShort();
field_2_format_index = in1.ReadShort();
field_3_cell_options = in1.ReadShort();
field_4_alignment_options = in1.ReadShort();
field_5_indention_options = in1.ReadShort();
field_6_border_options = in1.ReadShort();
field_7_palette_options = in1.ReadShort();
field_8_adtl_palette_options = in1.ReadInt();
field_9_fill_palette_options = in1.ReadShort();
}
/**
* Clones all the style information from another
* ExtendedFormatRecord, onto this one. This
* will then hold all the same style options.
*
* If The source ExtendedFormatRecord comes from
* a different Workbook, you will need to sort
* out the font and format indicies yourself!
*/
public void CloneStyleFrom(ExtendedFormatRecord source)
{
field_1_font_index = source.field_1_font_index;
field_2_format_index = source.field_2_format_index;
field_3_cell_options = source.field_3_cell_options;
field_4_alignment_options = source.field_4_alignment_options;
field_5_indention_options = source.field_5_indention_options;
field_6_border_options = source.field_6_border_options;
field_7_palette_options = source.field_7_palette_options;
field_8_adtl_palette_options = source.field_8_adtl_palette_options;
field_9_fill_palette_options = source.field_9_fill_palette_options;
}
/// <summary>
/// Get the index to the FONT record (which font to use 0 based)
/// </summary>
public short FontIndex
{
get { return field_1_font_index; }
set { field_1_font_index = value; }
}
/// <summary>
/// Get the index to the Format record (which FORMAT to use 0-based)
/// </summary>
public short FormatIndex
{
get
{
return field_2_format_index;
}
set { field_2_format_index = value; }
}
/// <summary>
/// Gets the options bitmask - you can also use corresponding option bit Getters
/// (see other methods that reference this one)
/// </summary>
public short CellOptions
{
get
{
return field_3_cell_options;
}
set { field_3_cell_options = value; }
}
/// <summary>
/// Get whether the cell Is locked or not
/// </summary>
public bool IsLocked
{
get
{
return _locked.IsSet(field_3_cell_options);
}
set
{
field_3_cell_options = _locked.SetShortBoolean(field_3_cell_options,
value);
}
}
/// <summary>
/// Get whether the cell Is hidden or not
/// </summary>
public bool IsHidden
{
get
{
return _hidden.IsSet(field_3_cell_options);
}
set
{
field_3_cell_options = _hidden.SetShortBoolean(field_3_cell_options,
value);
}
}
/// <summary>
/// Get whether the cell Is a cell or style XFRecord
/// </summary>
public short XFType
{
get
{
return _xf_type.GetShortValue(field_3_cell_options);
}
set
{
field_3_cell_options = _xf_type.SetShortValue(field_3_cell_options,
value);
}
}
/// <summary>
/// Get some old holdover from lotus 123. Who cares, its all over for Lotus.
/// RIP Lotus.
/// </summary>
public bool _123Prefix
{
get{
return _123_prefix.IsSet(field_3_cell_options);
}
set
{
field_3_cell_options =
_123_prefix.SetShortBoolean(field_3_cell_options, value);
}
}
/// <summary>
/// for cell XF types this Is the parent style (usually 0/normal). For
/// style this should be NULL.
/// </summary>
public short ParentIndex
{
get
{
return _parent_index.GetShortValue(field_3_cell_options);
}
set
{
field_3_cell_options =
_parent_index.SetShortValue(field_3_cell_options, value);
}
}
/// <summary>
/// Get the alignment options bitmask. See corresponding bitGetter methods
/// that reference this one.
/// </summary>
public short AlignmentOptions
{
get
{
return field_4_alignment_options;
}
set { field_4_alignment_options = value; }
}
/// <summary>
/// Get the horizontal alignment of the cell.
/// </summary>
public short Alignment
{
get
{
return _alignment.GetShortValue(field_4_alignment_options);
}
set
{
field_4_alignment_options =
_alignment.SetShortValue(field_4_alignment_options, value);
}
}
/// <summary>
/// Get whether to wrap the text in the cell
/// </summary>
public bool WrapText
{
get
{
return _wrap_text.IsSet(field_4_alignment_options);
}
set
{
field_4_alignment_options =
_wrap_text.SetShortBoolean(field_4_alignment_options, value);
}
}
/// <summary>
/// Get the vertical alignment of text in the cell
/// </summary>
public short VerticalAlignment
{
get
{
return _vertical_alignment.GetShortValue(field_4_alignment_options);
}
set
{
field_4_alignment_options =
_vertical_alignment.SetShortValue(field_4_alignment_options,
value);
}
}
/// <summary>
/// Docs just say this Is for far east versions.. (I'm guessing it
/// justifies for right-to-left Read languages)
/// </summary>
public short JustifyLast
{
get
{// for far east languages supported only for format always 0 for US
return _justify_last.GetShortValue(field_4_alignment_options);
}
set
{ // for far east languages supported only for format always 0 for US
field_4_alignment_options =
_justify_last.SetShortValue(field_4_alignment_options, value);
}
}
/// <summary>
/// Get the degree of rotation. (I've not actually seen this used anywhere)
/// </summary>
public short Rotation
{
get
{
return _rotation.GetShortValue(field_4_alignment_options);
}
set
{
field_4_alignment_options =
_rotation.SetShortValue(field_4_alignment_options, value);
}
}
/// <summary>
/// Get the indent options bitmask (see corresponding bit Getters that reference
/// this field)
/// </summary>
public short IndentionOptions
{
get
{
return field_5_indention_options;
}
set { field_5_indention_options = value; }
}
/// <summary>
/// Get indention (not sure of the Units, think its spaces)
/// </summary>
public short Indent
{
get
{
return _indent.GetShortValue(field_5_indention_options);
}
set
{
field_5_indention_options =
_indent.SetShortValue(field_5_indention_options, value);
}
}
/// <summary>
/// Get whether to shrink the text to fit
/// </summary>
public bool ShrinkToFit
{
get
{
return _shrink_to_fit.IsSet(field_5_indention_options);
}
set
{
field_5_indention_options =
_shrink_to_fit.SetShortBoolean(field_5_indention_options, value);
}
}
/// <summary>
/// Get whether to merge cells
/// </summary>
public bool MergeCells
{
get
{
return _merge_cells.IsSet(field_5_indention_options);
}
set
{
field_5_indention_options =
_merge_cells.SetShortBoolean(field_5_indention_options, value);
}
}
/// <summary>
/// Get the Reading order for far east versions (0 - Context, 1 - Left to right,
/// 2 - right to left) - We could use some help with support for the far east.
/// </summary>
public short ReadingOrder
{
get
{// only for far east always 0 in US
return _Reading_order.GetShortValue(field_5_indention_options);
}
set
{ // only for far east always 0 in US
field_5_indention_options =
_Reading_order.SetShortValue(field_5_indention_options, value);
}
}
/// <summary>
/// Get whether or not to use the format in this XF instead of the parent XF.
/// </summary>
public bool IsIndentNotParentFormat
{
get
{
return _indent_not_parent_format.IsSet(field_5_indention_options);
}
set
{
field_5_indention_options =
_indent_not_parent_format
.SetShortBoolean(field_5_indention_options, value);
}
}
/// <summary>
/// Get whether or not to use the font in this XF instead of the parent XF.
/// </summary>
public bool IsIndentNotParentFont
{
get
{
return _indent_not_parent_font.IsSet(field_5_indention_options);
}
set
{
field_5_indention_options =
_indent_not_parent_font.SetShortBoolean(field_5_indention_options,
value);
}
}
/// <summary>
/// Get whether or not to use the alignment in this XF instead of the parent XF.
/// </summary>
public bool IsIndentNotParentAlignment
{
get{return _indent_not_parent_alignment.IsSet(field_5_indention_options);}
set
{
field_5_indention_options =
_indent_not_parent_alignment
.SetShortBoolean(field_5_indention_options, value);
}
}
/// <summary>
/// Get whether or not to use the border in this XF instead of the parent XF.
/// </summary>
public bool IsIndentNotParentBorder
{
get { return _indent_not_parent_border.IsSet(field_5_indention_options); }
set
{
field_5_indention_options =
_indent_not_parent_border
.SetShortBoolean(field_5_indention_options, value);
}
}
/// <summary>
/// Get whether or not to use the pattern in this XF instead of the parent XF.
/// (foregrount/background)
/// </summary>
public bool IsIndentNotParentPattern
{
get { return _indent_not_parent_pattern.IsSet(field_5_indention_options); }
set
{
field_5_indention_options =
_indent_not_parent_pattern
.SetShortBoolean(field_5_indention_options, value);
}
}
/// <summary>
/// Get whether or not to use the locking/hidden in this XF instead of the parent XF.
/// </summary>
public bool IsIndentNotParentCellOptions
{
get
{
return _indent_not_parent_cell_options
.IsSet(field_5_indention_options);
}
set
{
field_5_indention_options =
_indent_not_parent_cell_options
.SetShortBoolean(field_5_indention_options, value);
}
}
/// <summary>
/// Get the border options bitmask (see the corresponding bit Getter methods
/// that reference back to this one)
/// </summary>
public short BorderOptions
{
get { return field_6_border_options; }
set { field_6_border_options = value; }
}
/// <summary>
/// Get the borderline style for the left border
/// </summary>
public short BorderLeft
{
get{return _border_left.GetShortValue(field_6_border_options);}
set
{
field_6_border_options =
_border_left.SetShortValue(field_6_border_options, value);
}
}
/// <summary>
/// Get the borderline style for the right border
/// </summary>
public short BorderRight
{
get{return _border_right.GetShortValue(field_6_border_options);}
set
{
field_6_border_options =
_border_right.SetShortValue(field_6_border_options, value);
}
}
/// <summary>
/// Get the borderline style for the top border
/// </summary>
public short BorderTop
{
get{return _border_top.GetShortValue(field_6_border_options);}
set {
field_6_border_options =_border_top.SetShortValue(field_6_border_options, value);
}
}
/// <summary>
/// Get the borderline style for the bottom border
/// </summary>
public short BorderBottom
{
get{return _border_bottom.GetShortValue(field_6_border_options);}
set {
field_6_border_options =_border_bottom.SetShortValue(field_6_border_options, value);
}
}
/// <summary>
/// Get the palette options bitmask (see the individual bit Getter methods that
/// reference this one)
/// </summary>
public short PaletteOptions
{
get{return field_7_palette_options;}
set { field_7_palette_options = value; }
}
/// <summary>
/// Get the palette index for the left border color
/// </summary>
public short LeftBorderPaletteIdx
{
get{return _left_border_palette_idx
.GetShortValue(field_7_palette_options);
}
set {
field_7_palette_options =
_left_border_palette_idx.SetShortValue(field_7_palette_options,
value);
}
}
/// <summary>
/// Get the palette index for the right border color
/// </summary>
public short RightBorderPaletteIdx
{
get{return _right_border_palette_idx
.GetShortValue(field_7_palette_options);
}
set
{
field_7_palette_options =
_right_border_palette_idx.SetShortValue(field_7_palette_options,
value);
}
}
/// <summary>
/// Not sure what this Is for (maybe Fill lines?) 1 = down, 2 = up, 3 = both, 0 for none..
/// </summary>
public short Diag
{
get{return _diag.GetShortValue(field_7_palette_options);}
set
{
field_7_palette_options = _diag.SetShortValue(field_7_palette_options,
value);
}
}
/// <summary>
/// Get the Additional palette options bitmask (see individual bit Getter methods
/// that reference this method)
/// </summary>
public int AdtlPaletteOptions
{
get{return field_8_adtl_palette_options;}
set { field_8_adtl_palette_options = value; }
}
/// <summary>
/// Get the palette index for the top border
/// </summary>
public short TopBorderPaletteIdx
{
get{return (short)_top_border_palette_idx
.GetValue(field_8_adtl_palette_options);}
set
{
field_8_adtl_palette_options =
_top_border_palette_idx.SetValue(field_8_adtl_palette_options,
value);
}
}
/// <summary>
/// Get the palette index for the bottom border
/// </summary>
public short BottomBorderPaletteIdx
{
get{return (short)_bottom_border_palette_idx
.GetValue(field_8_adtl_palette_options);
}
set
{
field_8_adtl_palette_options =
_bottom_border_palette_idx.SetValue(field_8_adtl_palette_options,
value);
}
}
/// <summary>
/// Get for diagonal borders
/// </summary>
public short AdtlDiag
{
get{return (short)_adtl_diag.GetValue(field_8_adtl_palette_options);}
set
{
field_8_adtl_palette_options =
_adtl_diag.SetValue(field_8_adtl_palette_options, value);
}
}
/// <summary>
/// Get the diagonal border line style
/// </summary>
public short AdtlDiagLineStyle
{
get{return (short)_adtl_diag_line_style
.GetValue(field_8_adtl_palette_options);}
set
{
field_8_adtl_palette_options =
_adtl_diag_line_style.SetValue(field_8_adtl_palette_options,
value);
}
}
/// <summary>
/// Get the Additional Fill pattern
/// </summary>
public short AdtlFillPattern
{
get{return (short)_adtl_Fill_pattern
.GetValue(field_8_adtl_palette_options);}
set
{
field_8_adtl_palette_options =
_adtl_Fill_pattern.SetValue(field_8_adtl_palette_options, value);
}
}
/// <summary>
/// Get the Fill palette options bitmask (see indivdual bit Getters that
/// reference this method)
/// </summary>
public short FillPaletteOptions
{
get{return field_9_fill_palette_options;}
set { field_9_fill_palette_options = value; }
}
/// <summary>
/// Get the foreground palette color index
/// </summary>
public short FillForeground
{
get{return _fill_foreground.GetShortValue(field_9_fill_palette_options);}
set
{
field_9_fill_palette_options =
_fill_foreground.SetShortValue(field_9_fill_palette_options,
value);
}
}
/// <summary>
/// Get the background palette color index
/// </summary>
public short FillBackground
{
get{return _fill_background.GetShortValue(field_9_fill_palette_options);}
set
{
field_9_fill_palette_options =
_fill_background.SetShortValue(field_9_fill_palette_options,
value);
}
}
public override String ToString()
{
StringBuilder buffer = new StringBuilder();
buffer.Append("[EXTENDEDFORMAT]\n");
if (XFType == XF_STYLE)
{
buffer.Append(" STYLE_RECORD_TYPE\n");
}
else if (XFType == XF_CELL)
{
buffer.Append(" CELL_RECORD_TYPE\n");
}
buffer.Append(" .fontindex = ")
.Append(StringUtil.ToHexString(FontIndex)).Append("\n");
buffer.Append(" .formatindex = ")
.Append(StringUtil.ToHexString(FormatIndex)).Append("\n");
buffer.Append(" .celloptions = ")
.Append(StringUtil.ToHexString(CellOptions)).Append("\n");
buffer.Append(" .Islocked = ").Append(IsLocked)
.Append("\n");
buffer.Append(" .Ishidden = ").Append(IsHidden)
.Append("\n");
buffer.Append(" .recordtype= ")
.Append(StringUtil.ToHexString(XFType)).Append("\n");
buffer.Append(" .parentidx = ")
.Append(StringUtil.ToHexString(ParentIndex)).Append("\n");
buffer.Append(" .alignmentoptions= ")
.Append(StringUtil.ToHexString(AlignmentOptions)).Append("\n");
buffer.Append(" .alignment = ").Append(Alignment)
.Append("\n");
buffer.Append(" .wraptext = ").Append(WrapText)
.Append("\n");
buffer.Append(" .valignment= ")
.Append(StringUtil.ToHexString(VerticalAlignment)).Append("\n");
buffer.Append(" .justlast = ")
.Append(StringUtil.ToHexString(JustifyLast)).Append("\n");
buffer.Append(" .rotation = ")
.Append(StringUtil.ToHexString(Rotation)).Append("\n");
buffer.Append(" .indentionoptions= ")
.Append(StringUtil.ToHexString(IndentionOptions)).Append("\n");
buffer.Append(" .indent = ")
.Append(StringUtil.ToHexString(Indent)).Append("\n");
|
[
" buffer.Append(\" .shrinktoft= \").Append(ShrinkToFit)"
] | 2,537
|
lcc
|
csharp
| null |
36b953ee4b12b6a4faa25120b4dd3ff257e71cd2c3c9e231
|
|
/*
* CP51932.cs - Japanese EUC-JP code page.
*
* It is based on CP932.cs from Portable.NET
*
* Author:
* Atsushi Enomoto <atsushi@ximian.com>
*
* Below are original (CP932.cs) copyright lines
*
* (C)2004 Novell Inc.
*
* Copyright (c) 2002 Southern Storm Software, Pty Ltd
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/*
Well, there looks no jis.table source. Thus, it seems like it is
generated from text files from Unicode Home Page such like
ftp://ftp.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/JIS/JIS0208.TXT
However, it is non-normative and in Japan it is contains many problem.
FIXME: Some characters such as 0xFF0B (wide "plus") are missing in
that table.
*/
/*
0x00-0x1F, 0x7F : control characters
0x20-0x7E : ASCII
0xA1A1-0xFEFE : Kanji (precisely, both bytes contain only A1-FE)
0x8EA1-0x8EDF : half-width Katakana
0x8FA1A1-0x8FFEFE : Complemental Kanji
*/
namespace I18N.CJK
{
using System;
using System.Text;
using I18N.Common;
#if DISABLE_UNSAFE
using MonoEncoder = I18N.Common.MonoSafeEncoder;
using MonoEncoding = I18N.Common.MonoSafeEncoding;
#endif
[Serializable]
public class CP51932 : MonoEncoding
{
// Magic number used by Windows for the EUC-JP code page.
private const int EUC_JP_CODE_PAGE = 51932;
// Constructor.
public CP51932 () : base (EUC_JP_CODE_PAGE, 932)
{
}
#if !DISABLE_UNSAFE
public unsafe override int GetByteCountImpl (char* chars, int count)
{
return new CP51932Encoder (this).GetByteCountImpl (chars, count, true);
}
public unsafe override int GetBytesImpl (char* chars, int charCount, byte* bytes, int byteCount)
{
return new CP51932Encoder (this).GetBytesImpl (chars, charCount, bytes, byteCount, true);
}
#else
public override int GetByteCount (char [] chars, int index, int length)
{
return new CP51932Encoder (this).GetByteCount (chars, index, length, true);
}
public override int GetBytes(char[] chars, int charIndex, int charCount, byte[] bytes, int byteIndex)
{
return new CP51932Encoder (this).GetBytes (chars, charIndex, charCount, bytes, byteIndex, true);
}
#endif
public override int GetCharCount (byte [] bytes, int index, int count)
{
return new CP51932Decoder ().GetCharCount (
bytes, index, count, true);
}
public override int GetChars (
byte [] bytes, int byteIndex, int byteCount,
char [] chars, int charIndex)
{
return new CP51932Decoder ().GetChars (bytes,
byteIndex, byteCount, chars, charIndex, true);
}
// Get the maximum number of bytes needed to encode a
// specified number of characters.
public override int GetMaxByteCount(int charCount)
{
if(charCount < 0)
{
throw new ArgumentOutOfRangeException
("charCount",
Strings.GetString("ArgRange_NonNegative"));
}
return charCount * 3;
}
// Get the maximum number of characters needed to decode a
// specified number of bytes.
public override int GetMaxCharCount(int byteCount)
{
if(byteCount < 0)
{
throw new ArgumentOutOfRangeException
("byteCount",
Strings.GetString ("ArgRange_NonNegative"));
}
return byteCount;
}
public override Encoder GetEncoder ()
{
return new CP51932Encoder (this);
}
public override Decoder GetDecoder ()
{
return new CP51932Decoder ();
}
#if !ECMA_COMPAT
// Get the mail body name for this encoding.
public override String BodyName {
get { return "euc-jp"; }
}
// Get the human-readable name for this encoding.
public override String EncodingName {
get { return "Japanese (EUC)"; }
}
// Get the mail agent header name for this encoding.
public override String HeaderName {
get { return "euc-jp"; }
}
// Determine if this encoding can be displayed in a Web browser.
public override bool IsBrowserDisplay {
get { return true; }
}
// Determine if this encoding can be saved from a Web browser.
public override bool IsBrowserSave {
get { return true; }
}
// Determine if this encoding can be displayed in a mail/news agent.
public override bool IsMailNewsDisplay {
get { return true; }
}
// Determine if this encoding can be saved from a mail/news agent.
public override bool IsMailNewsSave {
get { return true; }
}
// Get the IANA-preferred Web name for this encoding.
public override String WebName {
get { return "euc-jp"; }
}
} // CP51932
#endif // !ECMA_COMPAT
public class CP51932Encoder : MonoEncoder
{
public CP51932Encoder (MonoEncoding encoding)
: base (encoding)
{
}
#if !DISABLE_UNSAFE
// Get the number of bytes needed to encode a character buffer.
public unsafe override int GetByteCountImpl (
char* chars, int count, bool refresh)
{
// Determine the length of the final output.
int index = 0;
int length = 0;
int ch, value;
byte [] cjkToJis = JISConvert.Convert.cjkToJis;
byte [] extraToJis = JISConvert.Convert.extraToJis;
while (count > 0) {
ch = chars [index++];
--count;
++length;
if (ch < 0x0080) {
// Character maps to itself.
continue;
} else if (ch < 0x0100) {
// Check for special Latin 1 characters that
// can be mapped to double-byte code points.
if(ch == 0x00A2 || ch == 0x00A3 || ch == 0x00A7 ||
ch == 0x00A8 || ch == 0x00AC || ch == 0x00B0 ||
ch == 0x00B1 || ch == 0x00B4 || ch == 0x00B6 ||
ch == 0x00D7 || ch == 0x00F7)
{
++length;
}
} else if (ch >= 0x0391 && ch <= 0x0451) {
// Greek subset characters.
++length;
} else if (ch >= 0x2010 && ch <= 0x9FA5) {
// This range contains the bulk of the CJK set.
value = (ch - 0x2010) * 2;
value = ((int) (cjkToJis[value])) | (((int)(cjkToJis[value + 1])) << 8);
if(value >= 0x0100)
++length;
} else if(ch >= 0xFF01 && ch < 0xFF60) {
// This range contains extra characters.
value = (ch - 0xFF01) * 2;
value = ((int)(extraToJis[value])) |
(((int)(extraToJis[value + 1])) << 8);
if(value >= 0x0100)
++length;
} else if(ch >= 0xFF60 && ch <= 0xFFA0) {
++length; // half-width kana
}
}
// Return the length to the caller.
return length;
}
// Get the bytes that result from encoding a character buffer.
public unsafe override int GetBytesImpl (
char* chars, int charCount, byte* bytes, int byteCount, bool refresh)
{
int charIndex = 0;
int byteIndex = 0;
int end = charCount;
// Convert the characters into their byte form.
int posn = byteIndex;
int byteLength = byteCount;
int ch, value;
byte[] cjkToJis = JISConvert.Convert.cjkToJis;
byte[] greekToJis = JISConvert.Convert.greekToJis;
byte[] extraToJis = JISConvert.Convert.extraToJis;
for (int i = charIndex; i < end; i++, charCount--) {
ch = chars [i];
if (posn >= byteLength) {
throw new ArgumentException (Strings.GetString ("Arg_InsufficientSpace"), "bytes");
}
if (ch < 0x0080) {
// Character maps to itself.
bytes[posn++] = (byte)ch;
continue;
} else if (ch >= 0x0391 && ch <= 0x0451) {
// Greek subset characters.
value = (ch - 0x0391) * 2;
value = ((int)(greekToJis[value])) |
(((int)(greekToJis[value + 1])) << 8);
} else if (ch >= 0x2010 && ch <= 0x9FA5) {
// This range contains the bulk of the CJK set.
value = (ch - 0x2010) * 2;
value = ((int) (cjkToJis[value])) |
(((int)(cjkToJis[value + 1])) << 8);
} else if (ch >= 0xFF01 && ch <= 0xFF60) {
// This range contains extra characters,
// including half-width katakana.
value = (ch - 0xFF01) * 2;
value = ((int) (extraToJis [value])) |
(((int) (extraToJis [value + 1])) << 8);
} else if (ch >= 0xFF60 && ch <= 0xFFA0) {
|
[
"\t\t\t\tvalue = ch - 0xFF60 + 0x8EA0;"
] | 1,301
|
lcc
|
csharp
| null |
774fe32bf5c3e50a3060d52bef095025decb114d4ab2a37b
|
|
# -*- coding: utf-8 -*-
# This code is part of Amoco
# Copyright (C) 2021 Axel Tillequin (bdcht3@gmail.com)
# published under GPLv2 license
from amoco.arch.tricore import env
from amoco.arch.core import *
# -------------------------------------------------------
# from TriCore TC1.6.2 core architecture manual V1.2.2
# (32-bit Unified Processor Core), 2020-01-15
# define all except FPU instructions
# -------------------------------------------------------
ISPECS = []
@ispec("32<[ disp1(16) disp2(8) {6d} ]", mnemonic="CALL")
@ispec("32<[ disp1(16) disp2(8) {61} ]", mnemonic="FCALL")
@ispec("32<[ disp1(16) disp2(8) {1d} ]", mnemonic="J")
@ispec("32<[ disp1(16) disp2(8) {5d} ]", mnemonic="JL")
def tricore_branch(obj, disp1, disp2):
v = env.cst(((disp2<<16)+disp1)<<1,24)
obj.operands = [disp.signextend(32)]
obj.type = type_control_flow
@ispec("32<[ disp1(16) disp2(8) {ed} ]", mnemonic="CALLA")
@ispec("32<[ disp1(16) disp2(8) {e1} ]", mnemonic="FCALLA")
@ispec("32<[ disp1(16) disp2(8) {9d} ]", mnemonic="JA")
@ispec("32<[ disp1(16) disp2(8) {dd} ]", mnemonic="JLA")
def tricore_branch(obj, disp1, disp2):
v = env.cst((disp2<<16)+disp1,24)
addr = composer([env.bit0,v[0:20],env.cst(0,7),v[20:24]])
obj.operands = [addr]
obj.type = type_control_flow
@ispec("32<[ ---- {00} ---- ---- a(4) {2d} ]", mnemonic="CALLI")
@ispec("32<[ ---- {01} ---- ---- a(4) {2d} ]", mnemonic="FCALLI")
@ispec("32<[ ---- {03} ---- ---- a(4) {2d} ]", mnemonic="JI")
@ispec("32<[ ---- {02} ---- ---- a(4) {2d} ]", mnemonic="JLI")
def tricore_branchI(obj, a):
src = env.A[a]
obj.operands = [src]
obj.type = type_control_flow
@ispec("16<[ disp(8) {5c} ]", mnemonic="CALL")
@ispec("16<[ disp(8) {3c} ]", mnemonic="J")
@ispec("16<[ disp(8) {ee} ]", mnemonic="JNZ")
@ispec("16<[ disp(8) {6e} ]", mnemonic="JZ")
def tricore_branch(obj, disp):
disp = env.cst(disp<<1,8)
obj.operands = [disp.signextend(32)]
obj.type = type_control_flow
@ispec("32<[ ---- 0000000 const9(9) ---- {ad} ]", mnemonic="BISR")
@ispec("32<[ ---- 0000100 const9(9) ---- {ad} ]", mnemonic="SYSCALL")
def tricore_system(obj, const9):
obj.operands = [env.cst(const9,9)]
obj.type = type_system
@ispec("32<[ c(4) {1c} ---- b(4) ---- {0b} ]", mnemonic="ABS")
@ispec("32<[ c(4) {5c} ---- b(4) ---- {0b} ]", mnemonic="ABS_B")
@ispec("32<[ c(4) {7c} ---- b(4) ---- {0b} ]", mnemonic="ABS_H")
@ispec("32<[ c(4) {1d} ---- b(4) ---- {0b} ]", mnemonic="ABSS")
@ispec("32<[ c(4) {7d} ---- b(4) ---- {0b} ]", mnemonic="ABSS_H")
@ispec("32<[ c(4) {1f} ---- b(4) ---- {0b} ]", mnemonic="MOV")
def tricore_dd_arithmetic(obj, c, b):
src = env.D[b]
dst = env.D[c]
obj.operands = [dst, src]
obj.type = type_data_processing
@ispec("32<[ c(4) {80} ---- b(4) ---- {0b} ]", mnemonic="MOV")
def tricore_dd_arithmetic(obj, c, b):
src = env.D[b]
dst = env.E[c]
obj.operands = [dst, src.signextend(64)]
obj.type = type_data_processing
@ispec("32<[ c(4) {81} ---- b(4) a(4) {0b} ]", mnemonic="MOV")
def tricore_dd_arithmetic(obj, c, b, a):
src2 = env.D[b]
dst = env.E[c]
obj.operands = [dst, composer([src2,src1])]
obj.type = type_data_processing
@ispec("32<[ c(4) {0e} ---- b(4) a(4) {0b} ]", mnemonic="ABSDIF")
@ispec("32<[ c(4) {4e} ---- b(4) a(4) {0b} ]", mnemonic="ABSDIF_B")
@ispec("32<[ c(4) {6e} ---- b(4) a(4) {0b} ]", mnemonic="ABSDIF_H")
@ispec("32<[ c(4) {0f} ---- b(4) a(4) {0b} ]", mnemonic="ABSDIFS")
@ispec("32<[ c(4) {6f} ---- b(4) a(4) {0b} ]", mnemonic="ABSDIFS_H")
@ispec("32<[ c(4) {00} ---- b(4) a(4) {0b} ]", mnemonic="ADD")
@ispec("32<[ c(4) {40} ---- b(4) a(4) {0b} ]", mnemonic="ADD_B")
@ispec("32<[ c(4) {60} ---- b(4) a(4) {0b} ]", mnemonic="ADD_H")
@ispec("32<[ c(4) {05} ---- b(4) a(4) {0b} ]", mnemonic="ADDC")
@ispec("32<[ c(4) {02} ---- b(4) a(4) {0b} ]", mnemonic="ADDS")
@ispec("32<[ c(4) {62} ---- b(4) a(4) {0b} ]", mnemonic="ADDS_H")
@ispec("32<[ c(4) {63} ---- b(4) a(4) {0b} ]", mnemonic="ADDS_HU")
@ispec("32<[ c(4) {03} ---- b(4) a(4) {0b} ]", mnemonic="ADDS_U")
@ispec("32<[ c(4) {04} ---- b(4) a(4) {0b} ]", mnemonic="ADDX")
@ispec("32<[ c(4) {08} ---- b(4) a(4) {0f} ]", mnemonic="AND")
@ispec("32<[ c(4) {20} ---- b(4) a(4) {0b} ]", mnemonic="AND_EQ")
@ispec("32<[ c(4) {24} ---- b(4) a(4) {0b} ]", mnemonic="AND_GE")
@ispec("32<[ c(4) {25} ---- b(4) a(4) {0b} ]", mnemonic="AND_GE_U")
@ispec("32<[ c(4) {22} ---- b(4) a(4) {0b} ]", mnemonic="AND_LT")
@ispec("32<[ c(4) {23} ---- b(4) a(4) {0b} ]", mnemonic="AND_LT_U")
@ispec("32<[ c(4) {21} ---- b(4) a(4) {0b} ]", mnemonic="AND_NE")
@ispec("32<[ c(4) {0e} ---- b(4) a(4) {0f} ]", mnemonic="ANDN")
@ispec("32<[ c(4) {10} ---- b(4) a(4) {0b} ]", mnemonic="EQ")
@ispec("32<[ c(4) {50} ---- b(4) a(4) {0b} ]", mnemonic="EQ_B")
@ispec("32<[ c(4) {70} ---- b(4) a(4) {0b} ]", mnemonic="EQ_H")
@ispec("32<[ c(4) {90} ---- b(4) a(4) {0b} ]", mnemonic="EQ_W")
@ispec("32<[ c(4) {56} ---- b(4) a(4) {0b} ]", mnemonic="EQANY_B")
@ispec("32<[ c(4) {76} ---- b(4) a(4) {0b} ]", mnemonic="EQANY_H")
@ispec("32<[ c(4) {14} ---- b(4) a(4) {0b} ]", mnemonic="GE")
@ispec("32<[ c(4) {15} ---- b(4) a(4) {0b} ]", mnemonic="GE_U")
@ispec("32<[ c(4) {12} ---- b(4) a(4) {0b} ]", mnemonic="LT")
@ispec("32<[ c(4) {13} ---- b(4) a(4) {0b} ]", mnemonic="LT_U")
@ispec("32<[ c(4) {52} ---- b(4) a(4) {0b} ]", mnemonic="LT_B")
@ispec("32<[ c(4) {53} ---- b(4) a(4) {0b} ]", mnemonic="LT_BU")
@ispec("32<[ c(4) {72} ---- b(4) a(4) {0b} ]", mnemonic="LT_H")
@ispec("32<[ c(4) {73} ---- b(4) a(4) {0b} ]", mnemonic="LT_HU")
@ispec("32<[ c(4) {92} ---- b(4) a(4) {0b} ]", mnemonic="LT_W")
@ispec("32<[ c(4) {93} ---- b(4) a(4) {0b} ]", mnemonic="LT_WU")
@ispec("32<[ c(4) {1a} ---- b(4) a(4) {0b} ]", mnemonic="MAX")
@ispec("32<[ c(4) {1b} ---- b(4) a(4) {0b} ]", mnemonic="MAX_U")
@ispec("32<[ c(4) {5a} ---- b(4) a(4) {0b} ]", mnemonic="MAX_B")
@ispec("32<[ c(4) {5b} ---- b(4) a(4) {0b} ]", mnemonic="MAX_BU")
@ispec("32<[ c(4) {7a} ---- b(4) a(4) {0b} ]", mnemonic="MAX_H")
@ispec("32<[ c(4) {7b} ---- b(4) a(4) {0b} ]", mnemonic="MAX_HU")
@ispec("32<[ c(4) {18} ---- b(4) a(4) {0b} ]", mnemonic="MIN")
@ispec("32<[ c(4) {19} ---- b(4) a(4) {0b} ]", mnemonic="MIN_U")
@ispec("32<[ c(4) {58} ---- b(4) a(4) {0b} ]", mnemonic="MIN_B")
@ispec("32<[ c(4) {59} ---- b(4) a(4) {0b} ]", mnemonic="MIN_BU")
@ispec("32<[ c(4) {78} ---- b(4) a(4) {0b} ]", mnemonic="MIN_H")
@ispec("32<[ c(4) {79} ---- b(4) a(4) {0b} ]", mnemonic="MIN_HU")
@ispec("32<[ c(4) {09} ---- b(4) a(4) {0f} ]", mnemonic="NAND")
@ispec("32<[ c(4) {11} ---- b(4) a(4) {0b} ]", mnemonic="NE")
@ispec("32<[ c(4) {0b} ---- b(4) a(4) {0f} ]", mnemonic="NOR")
@ispec("32<[ c(4) {0a} ---- b(4) a(4) {0f} ]", mnemonic="OR")
@ispec("32<[ c(4) {27} ---- b(4) a(4) {0b} ]", mnemonic="OR_EQ")
@ispec("32<[ c(4) {2b} ---- b(4) a(4) {0b} ]", mnemonic="OR_GE")
@ispec("32<[ c(4) {2c} ---- b(4) a(4) {0b} ]", mnemonic="OR_GE_U")
@ispec("32<[ c(4) {29} ---- b(4) a(4) {0b} ]", mnemonic="OR_LT")
@ispec("32<[ c(4) {2a} ---- b(4) a(4) {0b} ]", mnemonic="OR_LT_U")
@ispec("32<[ c(4) {28} ---- b(4) a(4) {0b} ]", mnemonic="OR_NE")
@ispec("32<[ c(4) {0f} ---- b(4) a(4) {0f} ]", mnemonic="ORN")
@ispec("32<[ c(4) {00} ---- b(4) a(4) {0f} ]", mnemonic="SH")
@ispec("32<[ c(4) {37} ---- b(4) a(4) {0b} ]", mnemonic="SH_EQ")
@ispec("32<[ c(4) {3b} ---- b(4) a(4) {0b} ]", mnemonic="SH_GE")
@ispec("32<[ c(4) {3c} ---- b(4) a(4) {0b} ]", mnemonic="SH_GE_U")
@ispec("32<[ c(4) {40} ---- b(4) a(4) {0f} ]", mnemonic="SH_H")
@ispec("32<[ c(4) {39} ---- b(4) a(4) {0b} ]", mnemonic="SH_LT")
@ispec("32<[ c(4) {3a} ---- b(4) a(4) {0b} ]", mnemonic="SH_LT_U")
@ispec("32<[ c(4) {38} ---- b(4) a(4) {0b} ]", mnemonic="SH_NE")
@ispec("32<[ c(4) {01} ---- b(4) a(4) {0f} ]", mnemonic="SHA")
@ispec("32<[ c(4) {41} ---- b(4) a(4) {0f} ]", mnemonic="SHA_H")
@ispec("32<[ c(4) {02} ---- b(4) a(4) {0f} ]", mnemonic="SHAS")
@ispec("32<[ c(4) {08} ---- b(4) a(4) {0b} ]", mnemonic="SUB")
@ispec("32<[ c(4) {48} ---- b(4) a(4) {0b} ]", mnemonic="SUB_B")
@ispec("32<[ c(4) {68} ---- b(4) a(4) {0b} ]", mnemonic="SUB_H")
@ispec("32<[ c(4) {0d} ---- b(4) a(4) {0b} ]", mnemonic="SUBC")
@ispec("32<[ c(4) {0a} ---- b(4) a(4) {0b} ]", mnemonic="SUBS")
@ispec("32<[ c(4) {0b} ---- b(4) a(4) {0b} ]", mnemonic="SUBS_U")
@ispec("32<[ c(4) {6a} ---- b(4) a(4) {0b} ]", mnemonic="SUBS_H")
@ispec("32<[ c(4) {6b} ---- b(4) a(4) {0b} ]", mnemonic="SUBS_HU")
@ispec("32<[ c(4) {0c} ---- b(4) a(4) {0b} ]", mnemonic="SUBX")
@ispec("32<[ c(4) {0d} ---- b(4) a(4) {0f} ]", mnemonic="XNOR")
@ispec("32<[ c(4) {0c} ---- b(4) a(4) {0f} ]", mnemonic="XOR")
@ispec("32<[ c(4) {2f} ---- b(4) a(4) {0b} ]", mnemonic="XOR_EQ")
@ispec("32<[ c(4) {30} ---- b(4) a(4) {0b} ]", mnemonic="XOR_NE")
def tricore_ddd_arithmetic(obj, c, b, a):
src1 = env.D[a]
src2 = env.D[b]
dst = env.D[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) {40} ---- b(4) a(4) {01} ]", mnemonic="EQ_A")
@ispec("32<[ c(4) {43} ---- b(4) a(4) {01} ]", mnemonic="GE_A")
@ispec("32<[ c(4) {42} ---- b(4) a(4) {01} ]", mnemonic="LT_A")
@ispec("32<[ c(4) {41} ---- b(4) a(4) {01} ]", mnemonic="NE_A")
def tricore_daa_arithmetic(obj, c, b, a):
src1 = env.A[a]
src2 = env.A[b]
dst = env.D[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) {63} ---- b(4) ---- {01} ]", mnemonic="MOV_A", _dst=env.A, _src=env.D)
@ispec("32<[ c(4) {00} ---- b(4) ---- {01} ]", mnemonic="MOV_AA", _dst=env.A, _src=env.A)
@ispec("32<[ c(4) {4c} ---- b(4) ---- {01} ]", mnemonic="MOV_D", _dst=env.D, _src=env.A)
def tricore_daa_arithmetic(obj, c, b, _dst, _src):
dst = _dst[c]
src = _src[b]
obj.operands = [dst, src]
obj.type = type_data_processing
@ispec("32<[ c(4) {48} ---- ---- a(4) {01} ]", mnemonic="EQZ_A")
@ispec("32<[ c(4) {49} ---- ---- a(4) {01} ]", mnemonic="NEZ_A")
def tricore_da_arithmetic(obj, c, a):
src1 = env.A[a]
dst = env.D[c]
obj.operands = [dst, src1]
obj.type = type_data_processing
@ispec("32<[ c(4) {01} --00 b(4) a(4) {4b} ]", mnemonic="BMERGE")
def tricore_ddd_arithmetic(obj, c, b, a):
src1 = env.D[a]
src2 = env.D[b]
dst = env.D[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) {06} --00 b(4) a(4) {4b} ]", mnemonic="CRC32_B")
@ispec("32<[ c(4) {03} --00 b(4) a(4) {4b} ]", mnemonic="CRC32B_W")
@ispec("32<[ c(4) {03} --00 b(4) a(4) {4b} ]", mnemonic="CRC32L_W")
def tricore_crc32(obj, c, b, a):
src1 = env.D[a]
src2 = env.D[b]
dst = env.D[c]
obj.operands = [dst, src2, src1]
obj.type = type_data_processing
@ispec("32<[ c(4) {20} --01 b(4) a(4) {4b} ]", mnemonic="DIV")
@ispec("32<[ c(4) {21} --01 b(4) a(4) {4b} ]", mnemonic="DIV_U")
@ispec("32<[ c(4) {5a} --00 b(4) a(4) {4b} ]", mnemonic="DVINIT_B")
@ispec("32<[ c(4) {4a} --00 b(4) a(4) {4b} ]", mnemonic="DVINIT_BU")
@ispec("32<[ c(4) {3a} --00 b(4) a(4) {4b} ]", mnemonic="DVINIT_H")
@ispec("32<[ c(4) {2a} --00 b(4) a(4) {4b} ]", mnemonic="DVINIT_HU")
@ispec("32<[ c(4) {1a} --00 b(4) a(4) {4b} ]", mnemonic="DVINIT")
@ispec("32<[ c(4) {0a} --00 b(4) a(4) {4b} ]", mnemonic="DVINIT_U")
def tricore_edd_arithmetic(obj, c, b, a):
src1 = env.D[a]
src2 = env.D[b]
if c%2:
raise InstructionError(obj)
dst = env.E[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 100 ----- b(4) a(4) {17} ]", mnemonic="DEXTR")
def tricore_dddc(obj, c, d, b, a):
shift = env.D[d]
src1 = env.D[a]
src2 = env.D[b]
dst = env.D[c]
obj.operands = [dst, src1, src2, shift]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 010 ----- ---- a(4) {17} ]", mnemonic="EXTR")
@ispec("32<[ c(4) d(4) 011 ----- ---- a(4) {17} ]", mnemonic="EXTR_U")
def tricore_extr(obj, c, d, a):
if d%2:
raise InstructionError(obj)
width = env.E[d][32:37]
src1 = env.D[a]
dst = env.D[c]
obj.operands = [dst, src1, width]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 000 0--00 ---- a(4) {6b} ]", mnemonic="PACK")
def tricore_extr(obj, c, d, a):
if d%2:
raise InstructionError(obj)
src1 = env.E[d]
src2 = env.D[a]
dst = env.D[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) {08} -- 00 ---- a(4) {4b} ]", mnemonic="UNPACK")
def tricore_extr(obj, c, d, a):
src = env.D[a]
dst = env.E[c]
obj.operands = [dst, src]
obj.type = type_data_processing
@ispec("32<[ c(4) {02} -- 00 ---- a(4) {4b} ]", mnemonic="PARITY")
@ispec("32<[ c(4) {22} -- 00 ---- a(4) {4b} ]", mnemonic="POPCNT_W")
def tricore_extr(obj, c, d, a):
src = env.D[a]
dst = env.D[c]
obj.operands = [dst, src]
obj.type = type_data_processing
@ispec("32<[ c(4) pos(5) 00 ----- b(4) a(4) {77} ]", mnemonic="DEXTR")
def tricore_dextr(obj, c, pos, b, a):
src1 = env.D[a]
src2 = env.D[b]
dst = env.D[c]
obj.operands = [dst, src1, src2, env.cst(pos,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) pos(5) 10 width(5) ---- a(4) {37} ]", mnemonic="EXTR")
@ispec("32<[ c(4) pos(5) 11 width(5) ---- a(4) {37} ]", mnemonic="EXTR_U")
def tricore_extr(obj, c, pos, width, a):
src1 = env.D[a]
dst = env.D[c]
obj.operands = [dst, src1, env.cst(pos,5), env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) pos(5) 01 width(5) const(4) ---- {b7} ]", mnemonic="IMASK")
def tricore_imask(obj, c, pos, width, const):
if c%2:
raise InstructionError(obj)
dst = env.E[c]
obj.operands = [dst, env.cst(const,4), env.cst(pos,5), env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 001 width(5) const(4) ---- {d7} ]", mnemonic="IMASK")
def tricore_imask(obj, c, d, width, const):
src2 = env.D[d]
if c%2:
raise InstructionError(obj)
dst = env.E[c]
obj.operands = [dst, env.cst(const,4), src2, env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) pos(5) 01 width(5) b(4) ---- {37} ]", mnemonic="IMASK")
def tricore_imask(obj, c, pos, width, b):
src1 = env.D[b]
if c%2:
raise InstructionError(obj)
dst = env.E[c]
obj.operands = [dst, src1, env.cst(pos,5), env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 001 width(5) b(4) ---- {57} ]", mnemonic="IMASK")
def tricore_imask(obj, c, d, width, b):
src1 = env.D[b]
src2 = env.D[d]
if c%2:
raise InstructionError(obj)
dst = env.E[c]
obj.operands = [dst, src1, src2, env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) pos(5) 00 width(5) const(4) a(4) {b7} ]", mnemonic="INSERT")
def tricore_imask(obj, c, pos, width, const, a):
dst = env.D[c]
src1 = env.D[a]
obj.operands = [dst, src1, env.cst(const,4), env.cst(pos,5), env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 000 ----- const(4) a(4) {97} ]", mnemonic="INSERT")
def tricore_imask(obj, c, d, const, a):
src1 = env.D[a]
if d%2:
raise InstructionError(obj)
src3 = env.E[d]
dst = env.D[c]
obj.operands = [dst, src1, env.cst(const,4), src3]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 000 width(5) const(4) a(4) {d7} ]", mnemonic="INSERT")
def tricore_imask(obj, c, d, width, const, a):
src1 = env.D[a]
src3 = env.D[d]
dst = env.D[c]
obj.operands = [dst, src1, env.cst(const,4), src3]
obj.type = type_data_processing
@ispec("32<[ c(4) pos(5) 00 width(5) b(4) a(4) {37} ]", mnemonic="INSERT")
def tricore_imask(obj, c, pos, width, b, a):
dst = env.D[c]
src1 = env.D[a]
src2 = env.D[b]
obj.operands = [dst, src1, src2, env.cst(pos,5), env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 000 ----- b(4) a(4) {17} ]", mnemonic="INSERT")
def tricore_imask(obj, c, d, b, a):
src1 = env.D[a]
src2 = env.D[b]
if d%2:
raise InstructionError(obj)
src3 = env.E[d]
dst = env.D[c]
obj.operands = [dst, src1, src2, src3]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 000 width(5) b(4) a(4) {57} ]", mnemonic="INSERT")
def tricore_imask(obj, c, d, width, b, a):
src1 = env.D[a]
src2 = env.D[b]
src3 = env.D[d]
dst = env.D[c]
obj.operands = [dst, src1, src2, src3, env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 010 width(5) ---- a(4) {57} ]", mnemonic="EXTR")
@ispec("32<[ c(4) d(4) 011 width(5) ---- a(4) {57} ]", mnemonic="EXTR_U")
def tricore_extr(obj, c, d, width, a):
src2 = env.D[d]
src1 = env.D[a]
dst = env.D[c]
obj.operands = [dst, src1, src2, env.cst(width,5)]
obj.type = type_data_processing
@ispec("32<[ c(4) {09} --00 ---- a(4) {4b} ]", mnemonic="BSPLIT")
def tricore_edd_arithmetic(obj, c, a):
src1 = env.D[a]
dst = env.E[c]
obj.operands = [dst, src1]
obj.type = type_data_processing
@ispec("32<[ c(4) 0001110 ~const9(9) a(4) {8b} ]", mnemonic="ABSDIF")
@ispec("32<[ c(4) 0001111 ~const9(9) a(4) {8b} ]", mnemonic="ABSDIFS")
@ispec("32<[ c(4) 0000000 ~const9(9) a(4) {8b} ]", mnemonic="ADD")
@ispec("32<[ c(4) 0000101 ~const9(9) a(4) {8b} ]", mnemonic="ADDC")
@ispec("32<[ c(4) 0000010 ~const9(9) a(4) {8b} ]", mnemonic="ADDS")
@ispec("32<[ c(4) 0000011 ~const9(9) a(4) {8b} ]", mnemonic="ADDS_U") #const9 is signed
@ispec("32<[ c(4) 0000100 ~const9(9) a(4) {8b} ]", mnemonic="ADDX")
@ispec("32<[ c(4) 0100000 ~const9(9) a(4) {8b} ]", mnemonic="AND_EQ")
@ispec("32<[ c(4) 0100100 ~const9(9) a(4) {8b} ]", mnemonic="AND_GE")
@ispec("32<[ c(4) 0100010 ~const9(9) a(4) {8b} ]", mnemonic="AND_LT")
@ispec("32<[ c(4) 0100001 ~const9(9) a(4) {8b} ]", mnemonic="AND_NE")
@ispec("32<[ c(4) 0010000 ~const9(9) a(4) {8b} ]", mnemonic="EQ")
@ispec("32<[ c(4) 1010110 ~const9(9) a(4) {8b} ]", mnemonic="EQANY_B")
@ispec("32<[ c(4) 1110110 ~const9(9) a(4) {8b} ]", mnemonic="EQANY_H")
@ispec("32<[ c(4) 0010100 ~const9(9) a(4) {8b} ]", mnemonic="GE")
@ispec("32<[ c(4) 0010010 ~const9(9) a(4) {8b} ]", mnemonic="LT")
@ispec("32<[ c(4) 0011010 ~const9(9) a(4) {8b} ]", mnemonic="MAX")
@ispec("32<[ c(4) 0010001 ~const9(9) a(4) {8b} ]", mnemonic="NE")
@ispec("32<[ c(4) 0100111 ~const9(9) a(4) {8b} ]", mnemonic="OR_EQ")
@ispec("32<[ c(4) 0101011 ~const9(9) a(4) {8b} ]", mnemonic="OR_GE")
@ispec("32<[ c(4) 0101001 ~const9(9) a(4) {8b} ]", mnemonic="OR_LT")
@ispec("32<[ c(4) 0001000 ~const9(9) a(4) {8b} ]", mnemonic="RSUB")
@ispec("32<[ c(4) 0001001 ~const9(9) a(4) {8b} ]", mnemonic="RSUBS")
@ispec("32<[ c(4) 0001011 ~const9(9) a(4) {8b} ]", mnemonic="RSUBS_U") #const9 is signed
@ispec("32<[ c(4) 0000000 ~const9(9) a(4) {8f} ]", mnemonic="SH")
@ispec("32<[ c(4) 1000000 ~const9(9) a(4) {8f} ]", mnemonic="SH_H")
@ispec("32<[ c(4) 0110111 ~const9(9) a(4) {8b} ]", mnemonic="SH_EQ")
@ispec("32<[ c(4) 0111011 ~const9(9) a(4) {8b} ]", mnemonic="SH_GE")
@ispec("32<[ c(4) 0111001 ~const9(9) a(4) {8b} ]", mnemonic="SH_LT")
@ispec("32<[ c(4) 0111000 ~const9(9) a(4) {8b} ]", mnemonic="SH_NE")
@ispec("32<[ c(4) 0000001 ~const9(9) a(4) {8f} ]", mnemonic="SHA")
@ispec("32<[ c(4) 1000001 ~const9(9) a(4) {8f} ]", mnemonic="SHA_H")
@ispec("32<[ c(4) 0000010 ~const9(9) a(4) {8f} ]", mnemonic="SHAS")
@ispec("32<[ c(4) 0101111 ~const9(9) a(4) {8b} ]", mnemonic="XOR_EQ")
@ispec("32<[ c(4) 0110011 ~const9(9) a(4) {8b} ]", mnemonic="XOR_GE")
@ispec("32<[ c(4) 0110001 ~const9(9) a(4) {8b} ]", mnemonic="XOR_LT")
@ispec("32<[ c(4) 0110000 ~const9(9) a(4) {8b} ]", mnemonic="XOR_NE")
def tricore_ddc_arithmetic(obj, c, const9, a):
src1 = env.D[a]
if obj.mnemonic in ("SH","SHA","SHAS"):
const9 = const9[0:6]
elif obj.mnemonic in ("SH_H","SHA_H"):
const9 = const9[0:5]
src2 = env.cst(const9.int(-1),32)
dst = env.D[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) pos2(5) 00 pos1(5) b(4) a(4) {47} ]", mnemonic="AND_AND_T")
@ispec("32<[ c(4) pos2(5) 11 pos1(5) b(4) a(4) {47} ]", mnemonic="AND_ANDN_T")
@ispec("32<[ c(4) pos2(5) 10 pos1(5) b(4) a(4) {47} ]", mnemonic="AND_NOR_T")
@ispec("32<[ c(4) pos2(5) 01 pos1(5) b(4) a(4) {47} ]", mnemonic="AND_OR_T")
@ispec("32<[ c(4) pos2(5) 00 pos1(5) b(4) a(4) {87} ]", mnemonic="AND_T")
@ispec("32<[ c(4) pos2(5) 11 pos1(5) b(4) a(4) {87} ]", mnemonic="ANDN_T")
@ispec("32<[ c(4) pos2(5) 00 pos1(5) b(4) a(4) {67} ]", mnemonic="INS_T")
@ispec("32<[ c(4) pos2(5) 01 pos1(5) b(4) a(4) {67} ]", mnemonic="INSN_T")
@ispec("32<[ c(4) pos2(5) 00 pos1(5) b(4) a(4) {07} ]", mnemonic="NAND_T")
@ispec("32<[ c(4) pos2(5) 10 pos1(5) b(4) a(4) {87} ]", mnemonic="NOR_T")
@ispec("32<[ c(4) pos2(5) 00 pos1(5) b(4) a(4) {c7} ]", mnemonic="OR_AND_T")
@ispec("32<[ c(4) pos2(5) 11 pos1(5) b(4) a(4) {c7} ]", mnemonic="OR_ANDN_T")
@ispec("32<[ c(4) pos2(5) 10 pos1(5) b(4) a(4) {c7} ]", mnemonic="OR_NOR_T")
@ispec("32<[ c(4) pos2(5) 01 pos1(5) b(4) a(4) {c7} ]", mnemonic="OR_OR_T")
@ispec("32<[ c(4) pos2(5) 01 pos1(5) b(4) a(4) {87} ]", mnemonic="OR_T")
@ispec("32<[ c(4) pos2(5) 01 pos1(5) b(4) a(4) {07} ]", mnemonic="ORN_T")
@ispec("32<[ c(4) pos2(5) 00 pos1(5) b(4) a(4) {27} ]", mnemonic="SH_AND_T")
@ispec("32<[ c(4) pos2(5) 11 pos1(5) b(4) a(4) {27} ]", mnemonic="SH_ANDN_T")
@ispec("32<[ c(4) pos2(5) 00 pos1(5) b(4) a(4) {a7} ]", mnemonic="SH_NAND_T")
@ispec("32<[ c(4) pos2(5) 10 pos1(5) b(4) a(4) {27} ]", mnemonic="SH_NOR_T")
@ispec("32<[ c(4) pos2(5) 01 pos1(5) b(4) a(4) {27} ]", mnemonic="SH_OR_T")
@ispec("32<[ c(4) pos2(5) 01 pos1(5) b(4) a(4) {a7} ]", mnemonic="SH_ORN_T")
@ispec("32<[ c(4) pos2(5) 10 pos1(5) b(4) a(4) {a7} ]", mnemonic="SH_XNOR_T")
@ispec("32<[ c(4) pos2(5) 11 pos1(5) b(4) a(4) {a7} ]", mnemonic="SH_XOR_T")
@ispec("32<[ c(4) pos2(5) 10 pos1(5) b(4) a(4) {07} ]", mnemonic="XNOR_T")
@ispec("32<[ c(4) pos2(5) 11 pos1(5) b(4) a(4) {07} ]", mnemonic="XOR_T")
def tricore_ddd_arithmetic(obj, c, pos2, pos1, b, a):
src1 = env.D[a]
src2 = env.D[b]
dst = env.D[c]
obj.operands = [dst, src1[pos1:pos1+1], src2[pos2:pos2+1]]
obj.type = type_data_processing
@ispec("32<[ c(4) 0001000 const9(9) a(4) {8f} ]", mnemonic="AND")
@ispec("32<[ c(4) 0100101 const9(9) a(4) {8b} ]", mnemonic="AND_GE_U")
@ispec("32<[ c(4) 0100011 const9(9) a(4) {8b} ]", mnemonic="AND_LT_U")
@ispec("32<[ c(4) 0001110 const9(9) a(4) {8f} ]", mnemonic="ANDN")
@ispec("32<[ c(4) 0001001 const9(9) a(4) {8f} ]", mnemonic="NAND")
@ispec("32<[ c(4) 0001011 const9(9) a(4) {8f} ]", mnemonic="NOR")
@ispec("32<[ c(4) 0010101 const9(9) a(4) {8b} ]", mnemonic="GE_U")
@ispec("32<[ c(4) 0001010 const9(9) a(4) {8f} ]", mnemonic="OR")
@ispec("32<[ c(4) 0101100 const9(9) a(4) {8b} ]", mnemonic="OR_GE_U")
@ispec("32<[ c(4) 0101010 const9(9) a(4) {8b} ]", mnemonic="OR_LT_U")
@ispec("32<[ c(4) 0101000 const9(9) a(4) {8b} ]", mnemonic="OR_NE")
@ispec("32<[ c(4) 0001111 const9(9) a(4) {8f} ]", mnemonic="ORN")
@ispec("32<[ c(4) 0000111 const9(9) a(4) {8f} ]", mnemonic="SHUFFLE")
@ispec("32<[ c(4) 0001101 const9(9) a(4) {8f} ]", mnemonic="XNOR")
@ispec("32<[ c(4) 0001100 const9(9) a(4) {8f} ]", mnemonic="XOR")
@ispec("32<[ c(4) 0111100 const9(9) a(4) {8b} ]", mnemonic="SH_GE_U")
@ispec("32<[ c(4) 0111010 const9(9) a(4) {8b} ]", mnemonic="SH_LT_U")
@ispec("32<[ c(4) 0110100 const9(9) a(4) {8b} ]", mnemonic="XOR_GE_U")
@ispec("32<[ c(4) 0110011 const9(9) a(4) {8b} ]", mnemonic="XOR_LT_U")
@ispec("32<[ c(4) 0011011 const9(9) a(4) {8b} ]", mnemonic="MAX_U")
@ispec("32<[ c(4) 0010011 const9(9) a(4) {8b} ]", mnemonic="LT_U")
def tricore_ddc_arithmetic(obj, c, const9, a):
src1 = env.D[a]
src2 = env.cst(const9,32)
dst = env.D[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("16<[ ~const4(4) a(4) {c2} ]", mnemonic="ADD")
@ispec("16<[ ~const4(4) a(4) {06} ]", mnemonic="SH")
@ispec("16<[ ~const4(4) a(4) {86} ]", mnemonic="SHA")
def tricore_ddc_arithmetic(obj, const4, a):
dst = env.D[a]
src2 = env.cst(const4.int(-1),32)
src1 = env.D[a]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("16<[ ~const4(4) a(4) {92} ]", mnemonic="ADD")
@ispec("16<[ ~const4(4) a(4) {8a} ]", mnemonic="CADD")
@ispec("16<[ ~const4(4) a(4) {ca} ]", mnemonic="CADDN")
@ispec("16<[ ~const4(4) a(4) {aa} ]", mnemonic="CMOV")
@ispec("16<[ ~const4(4) a(4) {ea} ]", mnemonic="CMOVN")
def tricore_ddc_arithmetic(obj, const4, a):
dst = env.D[a]
src2 = env.cst(const4.int(-1),32)
src1 = env.D[15]
obj.operands = [dst, src1, src2]
if "CADD" in obj.mnemonic:
obj.operands = [dst, src1, dst, src2]
obj.type = type_data_processing
@ispec("16<[ ~const4(4) a(4) {9a} ]", mnemonic="ADD")
@ispec("16<[ ~const4(4) a(4) {ba} ]", mnemonic="EQ")
@ispec("16<[ ~const4(4) a(4) {fa} ]", mnemonic="LT")
@ispec("16<[ ~const4(4) a(4) {82} ]", mnemonic="MOV")
def tricore_ddc_arithmetic(obj, const4, a):
dst = env.D[15]
src2 = env.cst(const4.int(-1),32)
src1 = env.D[a]
obj.operands = [dst, src1, src2]
if obj.mnemonic=="MOV":
obj.operands = [src1,src2]
obj.type = type_data_processing
@ispec("16<[ ~const4(4) a(4) {d2} ]", mnemonic="MOV")
def tricore_ec_arithmetic(obj, const4, a):
dst = env.E[a]
src = env.cst(const4.int(-1),64)
obj.operands = [dst, src]
obj.type = type_data_processing
@ispec("16<[ const4(4) a(4) {a0} ]", mnemonic="MOV_A")
def tricore_ec_arithmetic(obj, const4, a):
dst = env.A[a]
src = env.cst(const4,32)
obj.operands = [dst, src]
obj.type = type_data_processing
@ispec("16<[ const8(8) {16} ]", mnemonic="AND")
@ispec("16<[ const8(8) {da} ]", mnemonic="MOV")
@ispec("16<[ const8(8) {96} ]", mnemonic="OR")
def tricore_ddc_arithmetic(obj, const8):
dst = env.D[15]
src2 = env.cst(const8,32)
src1 = env.D[15]
obj.operands = [dst, src1, src2]
if obj.mnemonic=="MOV":
obj.operands = [src1,src2]
obj.type = type_data_processing
@ispec("16<[ b(4) a(4) {42} ]", mnemonic="ADD")
@ispec("16<[ b(4) a(4) {26} ]", mnemonic="AND")
@ispec("16<[ b(4) a(4) {a6} ]", mnemonic="OR")
@ispec("16<[ b(4) a(4) {a2} ]", mnemonic="SUB")
@ispec("16<[ b(4) a(4) {62} ]", mnemonic="SUBS")
@ispec("16<[ b(4) a(4) {c6} ]", mnemonic="XOR")
def tricore_dd_arithmetic(obj, b, a):
dst = env.D[a]
src1 = env.D[a]
src2 = env.D[b]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("16<[ b(4) a(4) {02} ]", mnemonic="MOV" , _dst=env.D, _src=env.D)
@ispec("16<[ b(4) a(4) {60} ]", mnemonic="MOV_A" , _dst=env.A, _src=env.D)
@ispec("16<[ b(4) a(4) {40} ]", mnemonic="MOV_AA" , _dst=env.A, _src=env.A)
@ispec("16<[ b(4) a(4) {80} ]", mnemonic="MOV_D" , _dst=env.D, _src=env.A)
def tricore_mov(obj, b, a, _dst, _src):
dst = _dst[a]
src = _src[b]
obj.operands = [dst, src]
obj.type = type_data_processing
@ispec("16<[ b(4) a(4) {12} ]", mnemonic="ADD")
@ispec("16<[ b(4) a(4) {2a} ]", mnemonic="CMOV")
@ispec("16<[ b(4) a(4) {6a} ]", mnemonic="CMOVN")
@ispec("16<[ b(4) a(4) {52} ]", mnemonic="SUB")
def tricore_dd_arithmetic(obj, b, a):
dst = env.D[a]
src1 = env.D[15]
src2 = env.D[b]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("16<[ b(4) a(4) {1a} ]", mnemonic="ADD")
@ispec("16<[ b(4) a(4) {22} ]", mnemonic="ADDS")
@ispec("16<[ b(4) a(4) {3a} ]", mnemonic="EQ")
@ispec("16<[ b(4) a(4) {7a} ]", mnemonic="LT")
@ispec("16<[ b(4) a(4) {5a} ]", mnemonic="SUB")
def tricore_dd_arithmetic(obj, b, a):
dst = env.D[15]
src1 = env.D[a]
src2 = env.D[b]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) {01} ---- b(4) a(4) {01} ]", mnemonic="ADD_A")
@ispec("32<[ c(4) {02} ---- b(4) a(4) {01} ]", mnemonic="SUB_A")
def tricore_aaa_arithmetic(obj, c, b, a):
src1 = env.A[a]
src2 = env.A[b]
dst = env.A[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("16<[ ~const4(4) a(4) {b0} ]", mnemonic="ADD_A")
def tricore_aac_arithmetic(obj, const4, a):
dst = env.A[a]
src2 = env.cst(const4.int(-1),32)
src1 = env.A[a]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("16<[ const8(8) {20} ]", mnemonic="SUB_A")
def tricore_aac_arithmetic(obj, const8, a):
dst = env.A[10]
src2 = env.cst(const8,32)
src1 = env.A[10]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("16<[ b(4) a(4) {30} ]", mnemonic="ADD_A")
def tricore_aa_arithmetic(obj, b, a):
dst = env.A[a]
src1 = env.A[a]
src2 = env.A[b]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) ~const16(16) a(4) {1b} ]", mnemonic="ADDI")
@ispec("32<[ c(4) ~const16(16) a(4) {9b} ]", mnemonic="ADDIH")
def tricore_di_arithmetic(obj, c, const16, a):
src1 = env.D[a]
src2 = env.cst(const16.int(-1),32)
if self.mnemonic=="ADDIH": src2=src2<<16
dst = env.D[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) ~const16(16) a(4) {11} ]", mnemonic="ADDIH_A")
def tricore_ai_arithmetic(obj, c, const16, a):
src1 = env.A[a]
src2 = env.cst(const16.int(-1),32)<<16
dst = env.A[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) {60} -- n(2) b(4) a(4) {01} ]", mnemonic="ADDSC_A")
def tricore_aaa_arithmetic(obj, c, n, b, a):
src1 = env.D[a]
src2 = env.A[b]
dst = env.A[c]
obj.operands = [dst, src2, src1, env.cst(n,2)]
obj.type = type_data_processing
@ispec("32<[ c(4) {62} ---- b(4) a(4) {01} ]", mnemonic="ADDSC_AT")
def tricore_aaa_arithmetic(obj, c, b, a):
src1 = env.D[a]
src2 = env.A[b]
dst = env.A[c]
obj.operands = [dst, src2, src1]
obj.type = type_data_processing
@ispec("16<[ b(4) a(4) n(2) 010000 ]", mnemonic="ADDSC_A")
def tricore_aa_arithmetic(obj, b, a, n):
dst = env.A[a]
src1 = env.D[15]
src2 = env.A[b]
obj.operands = [dst, src2, src1, env.cst(n,2)]
obj.type = type_data_processing
@ispec("32<[ off2(4) 10 1110 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_I", mode="Short-offset")
@ispec("32<[ off2(4) 00 1110 off1(6) b(4) ---- {a9} ]", mnemonic="CACHEA_I", mode="Bit-reverse")
@ispec("32<[ off2(4) 01 1110 off1(6) b(4) ---- {a9} ]", mnemonic="CACHEA_I", mode="Circular")
@ispec("32<[ off2(4) 00 1110 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_I", mode="Post-increment")
@ispec("32<[ off2(4) 01 1110 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_I", mode="Pre-increment")
@ispec("32<[ off2(4) 10 1100 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_W", mode="Short-offset")
@ispec("32<[ off2(4) 00 1100 off1(6) b(4) ---- {a9} ]", mnemonic="CACHEA_W", mode="Bit-reverse")
@ispec("32<[ off2(4) 01 1100 off1(6) b(4) ---- {a9} ]", mnemonic="CACHEA_W", mode="Circular")
@ispec("32<[ off2(4) 00 1100 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_W", mode="Post-increment")
@ispec("32<[ off2(4) 01 1100 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_W", mode="Pre-increment")
@ispec("32<[ off2(4) 10 1101 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_WI", mode="Short-offset")
@ispec("32<[ off2(4) 00 1101 off1(6) b(4) ---- {a9} ]", mnemonic="CACHEA_WI", mode="Bit-reverse")
@ispec("32<[ off2(4) 01 1101 off1(6) b(4) ---- {a9} ]", mnemonic="CACHEA_WI", mode="Circular")
@ispec("32<[ off2(4) 00 1101 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_WI", mode="Post-increment")
@ispec("32<[ off2(4) 01 1101 off1(6) b(4) ---- {89} ]", mnemonic="CACHEA_WI", mode="Pre-increment")
@ispec("32<[ off2(4) 10 1011 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_W", mode="Short-offset")
@ispec("32<[ off2(4) 00 1011 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_W", mode="Post-increment")
@ispec("32<[ off2(4) 01 1011 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_W", mode="Pre-increment")
@ispec("32<[ off2(4) 10 1010 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_I", mode="Short-offset")
@ispec("32<[ off2(4) 00 1010 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_I", mode="Post-increment")
@ispec("32<[ off2(4) 01 1010 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_I", mode="Pre-increment")
@ispec("32<[ off2(4) 10 1111 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_WI", mode="Short-offset")
@ispec("32<[ off2(4) 00 1111 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_WI", mode="Post-increment")
@ispec("32<[ off2(4) 01 1111 off1(6) b(4) ---- {89} ]", mnemonic="CACHEI_WI", mode="Pre-increment")
def tricore_cache(obj, off2, off1, b):
src2 = env.A[b]
src1 = env.cst((off2<<6)+off1,10)
obj.operands = [src2, src1]
obj.type = type_system
@ispec("32<[ off2(4) 10 0011 off1(6) b(4) a(4) {49} ]", mnemonic="CMPSWAP_W", mode="Short-offset")
@ispec("32<[ off2(4) 00 0011 off1(6) b(4) a(4) {69} ]", mnemonic="CMPSWAP_W", mode="Bit-reverse")
@ispec("32<[ off2(4) 01 0011 off1(6) b(4) a(4) {69} ]", mnemonic="CMPSWAP_W", mode="Circular")
@ispec("32<[ off2(4) 00 0011 off1(6) b(4) a(4) {49} ]", mnemonic="CMPSWAP_W", mode="Post-increment")
@ispec("32<[ off2(4) 01 0011 off1(6) b(4) a(4) {49} ]", mnemonic="CMPSWAP_W", mode="Pre-increment")
@ispec("32<[ off2(4) 10 0010 off1(6) b(4) a(4) {49} ]", mnemonic="SWAPMSK_W", mode="Short-offset")
@ispec("32<[ off2(4) 00 0010 off1(6) b(4) a(4) {69} ]", mnemonic="SWAPMSK_W", mode="Bit-reverse")
@ispec("32<[ off2(4) 01 0010 off1(6) b(4) a(4) {69} ]", mnemonic="SWAPMSK_W", mode="Circular")
@ispec("32<[ off2(4) 00 0010 off1(6) b(4) a(4) {49} ]", mnemonic="SWAPMSK_W", mode="Post-increment")
@ispec("32<[ off2(4) 01 0010 off1(6) b(4) a(4) {49} ]", mnemonic="SWAPMSK_W", mode="Pre-increment")
def tricore_swap(obj, off2, off1, b, a):
if a%2:
raise InstructionError(obj)
dst = env.D[a]
src1 = env.A[b]
src2 = env.cst((off2<<6)+off1,10)
src3 = env.E[a]
obj.operands = [dst, src1, src2, src3]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 000 ~const9(9) a(4) {ab} ]", mnemonic="CADD")
@ispec("32<[ c(4) d(4) 001 ~const9(9) a(4) {ab} ]", mnemonic="CADDN")
@ispec("32<[ c(4) d(4) 001 ~const9(9) a(4) {13} ]", mnemonic="MADD", opt4="32+(32+K9)->32")
@ispec("32<[ c(4) d(4) 101 ~const9(9) a(4) {13} ]", mnemonic="MADDS", opt4="32+(32+K9)->32")
@ispec("32<[ c(4) d(4) 100 ~const9(9) a(4) {13} ]", mnemonic="MADDS_U", opt4="32+(32+K9)->32")
@ispec("32<[ c(4) d(4) 001 ~const9(9) a(4) {33} ]", mnemonic="MSUB", opt4="32+(32+K9)->32")
@ispec("32<[ c(4) d(4) 101 ~const9(9) a(4) {33} ]", mnemonic="MSUBS", opt4="32+(32+K9)->32")
@ispec("32<[ c(4) d(4) 100 ~const9(9) a(4) {33} ]", mnemonic="MSUBS_U", opt4="32+(32+K9)->32")
@ispec("32<[ c(4) d(4) 100 ~const9(9) a(4) {ab} ]", mnemonic="SEL")
@ispec("32<[ c(4) d(4) 101 ~const9(9) a(4) {ab} ]", mnemonic="SELN")
def tricore_cond_ddc(obj, c, d, const9, a):
cond = env.D[d]
src1 = env.D[a]
src2 = env.cst(const9.int(-1),32)
dst = env.D[c]
obj.operands = [dst, cond, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 011 ~const9(9) a(4) {13} ]", mnemonic="MADD", opt4="64+(32+K9)->64")
@ispec("32<[ c(4) d(4) 111 ~const9(9) a(4) {13} ]", mnemonic="MADDS", opt4="64+(32+K9)->64")
@ispec("32<[ c(4) d(4) 010 ~const9(9) a(4) {13} ]", mnemonic="MADD_U", opt4="64+(32+K9)->64")
@ispec("32<[ c(4) d(4) 111 ~const9(9) a(4) {13} ]", mnemonic="MADDS_U", opt4="64+(32+K9)->64")
@ispec("32<[ c(4) d(4) 011 ~const9(9) a(4) {33} ]", mnemonic="MSUB", opt4="64+(32+K9)->64")
@ispec("32<[ c(4) d(4) 111 ~const9(9) a(4) {33} ]", mnemonic="MSUBS", opt4="64+(32+K9)->64")
@ispec("32<[ c(4) d(4) 010 ~const9(9) a(4) {33} ]", mnemonic="MSUB_U", opt4="64+(32+K9)->64")
@ispec("32<[ c(4) d(4) 111 ~const9(9) a(4) {33} ]", mnemonic="MSUBS_U", opt4="64+(32+K9)->64")
def tricore_cond_eec(obj, c, d, const9, a):
cond = env.E[d]
src1 = env.D[a]
src2 = env.cst(const9.int(-1),32)
dst = env.E[c]
obj.operands = [dst, cond, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 011010 n(2) b(4) a(4) {83} ]", mnemonic="MADD_H", op4="LL")
@ispec("32<[ c(4) d(4) 011001 n(2) b(4) a(4) {83} ]", mnemonic="MADD_H", op4="LU")
@ispec("32<[ c(4) d(4) 011000 n(2) b(4) a(4) {83} ]", mnemonic="MADD_H", op4="UL")
@ispec("32<[ c(4) d(4) 011011 n(2) b(4) a(4) {83} ]", mnemonic="MADD_H", op4="UU")
@ispec("32<[ c(4) d(4) 111010 n(2) b(4) a(4) {83} ]", mnemonic="MADDS_H", op4="LL")
@ispec("32<[ c(4) d(4) 111001 n(2) b(4) a(4) {83} ]", mnemonic="MADDS_H", op4="LU")
@ispec("32<[ c(4) d(4) 111000 n(2) b(4) a(4) {83} ]", mnemonic="MADDS_H", op4="UL")
@ispec("32<[ c(4) d(4) 111011 n(2) b(4) a(4) {83} ]", mnemonic="MADDS_H", op4="UU")
@ispec("32<[ c(4) d(4) 000010 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="32+(32*32)Up->32")
@ispec("32<[ c(4) d(4) 011011 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="64+(32*32)->64")
@ispec("32<[ c(4) d(4) 000001 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="32+(16L*32)Up->32")
@ispec("32<[ c(4) d(4) 011001 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="64+(16L*32)->64")
@ispec("32<[ c(4) d(4) 000000 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="32+(16U*32)Up->32")
@ispec("32<[ c(4) d(4) 011000 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="64+(16U*32)->64")
@ispec("32<[ c(4) d(4) 000101 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="32+(16L*16L)->32")
@ispec("32<[ c(4) d(4) 011101 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="64+(16L*16L)->64")
@ispec("32<[ c(4) d(4) 000100 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="32+(16U*16U)->32")
@ispec("32<[ c(4) d(4) 011100 n(2) b(4) a(4) {43} ]", mnemonic="MADD_Q", op4="64+(16U*16U)->64")
@ispec("32<[ c(4) d(4) 100010 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="32+(32*32)Up->32")
@ispec("32<[ c(4) d(4) 111011 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="64+(32*32)->64")
@ispec("32<[ c(4) d(4) 100001 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="32+(16L*32)Up->32")
@ispec("32<[ c(4) d(4) 111001 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="64+(16L*32)->64")
@ispec("32<[ c(4) d(4) 100000 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="32+(16U*32)Up->32")
@ispec("32<[ c(4) d(4) 111000 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="64+(16U*32)->64")
@ispec("32<[ c(4) d(4) 100101 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="32+(16L*16L)->32")
@ispec("32<[ c(4) d(4) 111101 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="64+(16L*16L)->64")
@ispec("32<[ c(4) d(4) 100100 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="32+(16U*16U)->32")
@ispec("32<[ c(4) d(4) 111100 n(2) b(4) a(4) {43} ]", mnemonic="MADDS_Q", op4="64+(16U*16U)->64")
@ispec("32<[ c(4) d(4) 011010 n(2) b(4) a(4) {a3} ]", mnemonic="MSUB_H", op4="LL")
@ispec("32<[ c(4) d(4) 011001 n(2) b(4) a(4) {a3} ]", mnemonic="MSUB_H", op4="LU")
@ispec("32<[ c(4) d(4) 011000 n(2) b(4) a(4) {a3} ]", mnemonic="MSUB_H", op4="UL")
@ispec("32<[ c(4) d(4) 011011 n(2) b(4) a(4) {a3} ]", mnemonic="MSUB_H", op4="UU")
@ispec("32<[ c(4) d(4) 111010 n(2) b(4) a(4) {a3} ]", mnemonic="MSUBS_H", op4="LL")
@ispec("32<[ c(4) d(4) 111001 n(2) b(4) a(4) {a3} ]", mnemonic="MSUBS_H", op4="LU")
@ispec("32<[ c(4) d(4) 111000 n(2) b(4) a(4) {a3} ]", mnemonic="MSUBS_H", op4="UL")
@ispec("32<[ c(4) d(4) 111011 n(2) b(4) a(4) {a3} ]", mnemonic="MSUBS_H", op4="UU")
@ispec("32<[ c(4) d(4) 000010 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="32+(32*32)Up->32")
@ispec("32<[ c(4) d(4) 011011 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="64+(32*32)->64")
@ispec("32<[ c(4) d(4) 000001 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="32+(16L*32)Up->32")
@ispec("32<[ c(4) d(4) 011001 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="64+(16L*32)->64")
@ispec("32<[ c(4) d(4) 000000 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="32+(16U*32)Up->32")
@ispec("32<[ c(4) d(4) 011000 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="64+(16U*32)->64")
@ispec("32<[ c(4) d(4) 000101 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="32+(16L*16L)->32")
@ispec("32<[ c(4) d(4) 011101 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="64+(16L*16L)->64")
@ispec("32<[ c(4) d(4) 000100 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="32+(16U*16U)->32")
@ispec("32<[ c(4) d(4) 011100 n(2) b(4) a(4) {63} ]", mnemonic="MSUB_Q", op4="64+(16U*16U)->64")
@ispec("32<[ c(4) d(4) 100010 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="32+(32*32)Up->32")
@ispec("32<[ c(4) d(4) 111011 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="64+(32*32)->64")
@ispec("32<[ c(4) d(4) 100001 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="32+(16L*32)Up->32")
@ispec("32<[ c(4) d(4) 111001 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="64+(16L*32)->64")
@ispec("32<[ c(4) d(4) 100000 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="32+(16U*32)Up->32")
@ispec("32<[ c(4) d(4) 111000 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="64+(16U*32)->64")
@ispec("32<[ c(4) d(4) 100101 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="32+(16L*16L)->32")
@ispec("32<[ c(4) d(4) 111101 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="64+(16L*16L)->64")
@ispec("32<[ c(4) d(4) 100100 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="32+(16U*16U)->32")
@ispec("32<[ c(4) d(4) 111100 n(2) b(4) a(4) {63} ]", mnemonic="MSUBS_Q", op4="64+(16U*16U)->64")
def tricore_cond_eec(obj, c, d, n, b, a):
cond = env.E[d]
src1 = env.D[a]
src2 = env.D[b]
dst = env.E[c]
obj.operands = [dst, cond, src1, src2, env.cst(n,2)]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) 0000 ---- b(4) a(4) {2b} ]", mnemonic="CADD")
@ispec("32<[ c(4) d(4) 0001 ---- b(4) a(4) {2b} ]", mnemonic="CADDN")
@ispec("32<[ c(4) d(4) 0010 ---- b(4) a(4) {2b} ]", mnemonic="CSUB")
@ispec("32<[ c(4) d(4) 0011 ---- b(4) a(4) {2b} ]", mnemonic="CSUBN")
@ispec("32<[ c(4) d(4) {0a} b(4) a(4) {03} ]", mnemonic="MADD", opt4="32+(32*32)->32")
@ispec("32<[ c(4) d(4) {8a} b(4) a(4) {03} ]", mnemonic="MADDS", opt4="32+(32*32)->32")
@ispec("32<[ c(4) d(4) {88} b(4) a(4) {03} ]", mnemonic="MADDS_U", opt4="32+(32*32)->32")
@ispec("32<[ c(4) d(4) 0100 ---- b(4) a(4) {2b} ]", mnemonic="SEL")
@ispec("32<[ c(4) d(4) 0101 ---- b(4) a(4) {2b} ]", mnemonic="SELN")
def tricore_cond_ddd(obj, c, d, b, a):
cond = env.D[d]
src1 = env.D[a]
src2 = env.D[b]
dst = env.D[c]
obj.operands = [dst, cond, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) d(4) {6a} b(4) a(4) {03} ]", mnemonic="MADD", opt4="64+(32*32)->64")
@ispec("32<[ c(4) d(4) {ea} b(4) a(4) {03} ]", mnemonic="MADDS", opt4="64+(32*32)->64")
@ispec("32<[ c(4) d(4) {68} b(4) a(4) {03} ]", mnemonic="MADD_U", opt4="64+(32*32)->64")
@ispec("32<[ c(4) d(4) {e8} b(4) a(4) {03} ]", mnemonic="MADDS_U", opt4="64+(32*32)->64")
def tricore_cond_ddd(obj, c, d, b, a):
cond = env.E[d]
src1 = env.D[a]
src2 = env.D[b]
dst = env.E[c]
obj.operands = [dst, cond, src1, src2]
obj.type = type_data_processing
@ispec("32<[ c(4) {1c} ---- ---- a(4) {0f} ]", mnemonic="CLO")
@ispec("32<[ c(4) {7d} ---- ---- a(4) {0f} ]", mnemonic="CLO_H")
@ispec("32<[ c(4) {1d} ---- ---- a(4) {0f} ]", mnemonic="CLS")
@ispec("32<[ c(4) {7e} ---- ---- a(4) {0f} ]", mnemonic="CLS_H")
@ispec("32<[ c(4) {1b} ---- ---- a(4) {0f} ]", mnemonic="CLZ")
@ispec("32<[ c(4) {7c} ---- ---- a(4) {0f} ]", mnemonic="CLZ_H")
@ispec("32<[ c(4) {5e} ---- ---- a(4) {0b} ]", mnemonic="SAT_B")
@ispec("32<[ c(4) {5f} ---- ---- a(4) {0b} ]", mnemonic="SAT_BU")
@ispec("32<[ c(4) {7e} ---- ---- a(4) {0b} ]", mnemonic="SAT_H")
@ispec("32<[ c(4) {7f} ---- ---- a(4) {0b} ]", mnemonic="SAT_HU")
def tricore_dd_arithmetic(obj, c, a):
src = env.D[a]
dst = env.D[c]
obj.operands = [dst, src]
obj.type = type_data_processing
@ispec("16<[ 1010 ---- {00} ]", mnemonic="DEBUG")
@ispec("16<[ 0000 ---- {00} ]", mnemonic="NOP")
def tricore_system(obj):
obj.operands = []
obj.type = type_system
@ispec("16<[ 0111 ---- {00} ]", mnemonic="FRET")
@ispec("16<[ 1001 ---- {00} ]", mnemonic="RET")
@ispec("16<[ 1000 ---- {00} ]", mnemonic="RFE")
def tricore_ret(obj):
obj.operands = []
obj.type = type_control_flow
@ispec("32<[ ---- 000100 ---------- ---- {0d} ]", mnemonic="DEBUG")
@ispec("32<[ ---- 001101 ---------- ---- {0d} ]", mnemonic="DISABLE")
@ispec("32<[ ---- 010010 ---------- ---- {0d} ]", mnemonic="DSYNC")
@ispec("32<[ ---- 001100 ---------- ---- {0d} ]", mnemonic="ENABLE")
@ispec("32<[ ---- 010011 ---------- ---- {0d} ]", mnemonic="ISYNC")
@ispec("32<[ ---- 010101 ---------- ---- {0d} ]", mnemonic="TRAPSV")
@ispec("32<[ ---- 010100 ---------- ---- {0d} ]", mnemonic="TRAPV")
@ispec("32<[ ---- 000000 ---------- ---- {0d} ]", mnemonic="NOP")
@ispec("32<[ ---- 001001 ---------- ---- {0d} ]", mnemonic="RSLCX")
@ispec("32<[ ---- 000000 ---------- ---- {2f} ]", mnemonic="RSTV")
@ispec("32<[ ---- 001000 ---------- ---- {0d} ]", mnemonic="SVLCX")
@ispec("32<[ ---- 010110 ---------- ---- {0d} ]", mnemonic="WAIT")
def tricore_system(obj):
obj.operands = []
obj.type = type_system
@ispec("32<[ ---- 000011 ---------- ---- {0d} ]", mnemonic="FRET")
@ispec("32<[ ---- 000110 ---------- ---- {0d} ]", mnemonic="RET")
@ispec("32<[ ---- 000111 ---------- ---- {0d} ]", mnemonic="RFE")
@ispec("32<[ ---- 000101 ---------- ---- {0d} ]", mnemonic="RFM")
def tricore_ret(obj):
obj.operands = []
obj.type = type_control_flow
@ispec("32<[ ---- 001111 ---------- a(4) {0d} ]", mnemonic="DISABLE")
@ispec("32<[ ---- 001110 ---------- a(4) {0d} ]", mnemonic="RESTORE")
def tricore_system(obj, a):
obj.operands = [env.D[a]]
obj.type = type_system
@ispec("32<[ c(4) d(4) 1101 -- 00 b(4) ---- {6b} ]", mnemonic="DVADJ")
@ispec("32<[ c(4) d(4) 1111 -- 00 b(4) ---- {6b} ]", mnemonic="DVSTEP")
@ispec("32<[ c(4) d(4) 1110 -- 00 b(4) ---- {6b} ]", mnemonic="DVSTEP_U")
@ispec("32<[ c(4) d(4) 1010 -- 00 b(4) ---- {6b} ]", mnemonic="IXMAX")
@ispec("32<[ c(4) d(4) 1011 -- 00 b(4) ---- {6b} ]", mnemonic="IXMAX_U")
@ispec("32<[ c(4) d(4) 1000 -- 00 b(4) ---- {6b} ]", mnemonic="IXMIN")
@ispec("32<[ c(4) d(4) 1001 -- 00 b(4) ---- {6b} ]", mnemonic="IXMIN_U")
def tricore_eee(obj, c, d, b):
if d%2 or b%2 or c%2:
raise InstructionError(obj)
src1 = env.E[d]
src2 = env.E[b]
dst = env.E[c]
obj.operands = [dst, src1, src2]
obj.type = type_data_processing
@ispec("16<[ ~const4(4) disp(4) {1e} ]", mnemonic="JEQ", _off=0)
@ispec("16<[ ~const4(4) disp(4) {9e} ]", mnemonic="JEQ", _off=16)
@ispec("16<[ ~const4(4) disp(4) {5e} ]", mnemonic="JNE", _off=0)
@ispec("16<[ ~const4(4) disp(4) {de} ]", mnemonic="JNE", _off=16)
def tricore_jcc(obj, const4, disp, _off):
dst = env.D[15]
src1 = env.cst(const4.int(-1),32)
src2 = env.cst(disp,32)+_off
obj.operands = [dst, src1, src2]
obj.type = type_control_flow
@ispec("16<[ b(4) disp(4) {3e} ]", mnemonic="JEQ", _off=0)
@ispec("16<[ b(4) disp(4) {be} ]", mnemonic="JEQ", _off=16)
@ispec("16<[ b(4) disp(4) {7e} ]", mnemonic="JNE", _off=0)
@ispec("16<[ b(4) disp(4) {fe} ]", mnemonic="JNE", _off=16)
def tricore_jcc(obj, b, disp, _off):
dst = env.D[15]
src1 = env.D[b]
src2 = env.cst(disp,32)+_off
obj.operands = [dst, src1, src2]
obj.type = type_control_flow
@ispec("16<[ b(4) disp(4) {ce} ]", mnemonic="JGEZ")
@ispec("16<[ b(4) disp(4) {4e} ]", mnemonic="JGTZ")
@ispec("16<[ b(4) disp(4) {8e} ]", mnemonic="JLEZ")
@ispec("16<[ b(4) disp(4) {0e} ]", mnemonic="JLTZ")
@ispec("16<[ b(4) disp(4) {f6} ]", mnemonic="JNZ")
@ispec("16<[ b(4) disp(4) {76} ]", mnemonic="JZ")
def tricore_jcc(obj, b, disp):
src1 = env.D[b]
src2 = env.cst(disp,32)
obj.operands = [src1, src2]
obj.type = type_control_flow
@ispec("32<[ 0 ~disp(15) const(4) a(4) {df} ]", mnemonic="JEQ")
@ispec("32<[ 1 ~disp(15) const(4) a(4) {df} ]", mnemonic="JNE")
@ispec("32<[ 0 ~disp(15) const(4) a(4) {ff} ]", mnemonic="JGE")
@ispec("32<[ 1 ~disp(15) const(4) a(4) {ff} ]", mnemonic="JGE_U")
@ispec("32<[ 0 ~disp(15) const(4) a(4) {bf} ]", mnemonic="JLT")
@ispec("32<[ 1 ~disp(15) const(4) a(4) {bf} ]", mnemonic="JLT_U")
@ispec("32<[ 1 ~disp(15) const(4) a(4) {9f} ]", mnemonic="JNED")
@ispec("32<[ 0 ~disp(15) const(4) a(4) {9f} ]", mnemonic="JNEI")
def tricore_jcc(obj, disp, const, a):
src1 = env.D[a]
src2 = env.cst(const,4)
obj.operands = [src1, src2, env.cst(disp.int(-1),32)]
obj.type = type_control_flow
@ispec("32<[ 0 ~disp(15) b(4) a(4) {5f} ]", mnemonic="JEQ")
@ispec("32<[ 1 ~disp(15) b(4) a(4) {5f} ]", mnemonic="JNE")
@ispec("32<[ 0 ~disp(15) b(4) a(4) {7f} ]", mnemonic="JGE")
@ispec("32<[ 1 ~disp(15) b(4) a(4) {7f} ]", mnemonic="JGE_U")
@ispec("32<[ 0 ~disp(15) b(4) a(4) {3f} ]", mnemonic="JLT")
@ispec("32<[ 1 ~disp(15) b(4) a(4) {3f} ]", mnemonic="JLT_U")
@ispec("32<[ 1 ~disp(15) b(4) a(4) {1f} ]", mnemonic="JNED")
@ispec("32<[ 0 ~disp(15) b(4) a(4) {1f} ]", mnemonic="JNEI")
def tricore_jcc(obj, disp, b, a):
src1 = env.D[a]
src2 = env.D[b]
obj.operands = [src1, src2, env.cst(disp.int(-1),32)]
obj.type = type_control_flow
@ispec("32<[ 0 ~disp(15) b(4) a(4) {7d} ]", mnemonic="JEQ_A")
@ispec("32<[ 1 ~disp(15) b(4) a(4) {7d} ]", mnemonic="JNE_A")
def tricore_jcc(obj, disp, b, a):
src1 = env.A[a]
src2 = env.A[b]
obj.operands = [src1, src2, env.cst(disp.int(-1),32)]
obj.type = type_control_flow
@ispec("32<[ 1 ~disp(15) ---- a(4) {bd} ]", mnemonic="JNZ_A")
@ispec("32<[ 0 ~disp(15) ---- a(4) {bd} ]", mnemonic="JZ_A")
def tricore_jcc(obj, disp, a):
src1 = env.A[a]
src2 = env.A[b]
obj.operands = [src1, src2, env.cst(disp.int(-1),32)]
obj.type = type_control_flow
@ispec("32<[ 0 ~disp(15) b(4) ---- {fd} ]", mnemonic="LOOP")
@ispec("32<[ 1 ~disp(15) b(4) ---- {fd} ]", mnemonic="LOOPU")
def tricore_jcc(obj, disp, b):
src1 = env.A[b]
src2 = env.cst(disp.int(-1)*2,32)
obj.operands = [src1, src2]
if obj.mnemonic=="LOOPU":
obj.operands = [src2]
obj.type = type_control_flow
@ispec("16<[ b(4) disp(4) {7c} ]", mnemonic="JNZ_A")
@ispec("16<[ b(4) disp(4) {bc} ]", mnemonic="JZ_A")
def tricore_jcc(obj, b, disp):
src1 = env.A[b]
src2 = env.cst(disp,32)
obj.operands = [src1, src2]
obj.type = type_control_flow
@ispec("16<[ b(4) #disp(4) {fc} ]", mnemonic="LOOP")
def tricore_jcc(obj, b, disp):
src1 = env.A[b]
src2 = env.cst(int(("1"*27)+disp+"0",2),32)
obj.operands = [src1, src2]
obj.type = type_control_flow
@ispec("16<[ 0000 a(4) {dc} ]", mnemonic="JI")
def tricore_ji(obj, a):
src = env.A[a]
obj.operands = [src]
obj.type = type_control_flow
@ispec("16<[ 0000 a(4) {46} ]", mnemonic="NOT")
@ispec("16<[ 0101 a(4) {32} ]", mnemonic="RSUB")
@ispec("16<[ 0000 a(4) {32} ]", mnemonic="SAT_B")
@ispec("16<[ 0001 a(4) {32} ]", mnemonic="SAT_BU")
@ispec("16<[ 0010 a(4) {32} ]", mnemonic="SAT_H")
@ispec("16<[ 0011 a(4) {32} ]", mnemonic="SAT_HU")
def tricore_a(obj, a):
src = env.D[a]
obj.operands = [src]
obj.type = type_data_processing
@ispec("16<[ n(4) disp(4) {ae} ]", mnemonic="JNZ_T")
@ispec("16<[ n(4) disp(4) {2e} ]", mnemonic="JZ_T")
def tricore_ji(obj, n, disp):
obj.operands = [env.D[15][n:n+1], env.cst(disp,32)]
obj.type = type_control_flow
@ispec("32<[ 1 ~disp(15) n(4) a(4) h 1101111 ]", mnemonic="JNZ_T")
@ispec("32<[ 0 ~disp(15) n(4) a(4) h 1101111 ]", mnemonic="JZ_T")
def tricore_jcc(obj, disp, n, a, h):
i = n+(h<<4)
src = env.D[a][i:i+1]
obj.operands = [src, env.cst(disp.int(-1),32)]
obj.type = type_control_flow
@ispec("32<[ ~off2(4) 10 ~off3(4) ~off1(6) ~off4(4) a(4) {85} ]", mnemonic="LD_A", mode="Absolute")
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) a(4) {05} ]", mnemonic="LD_B", mode="Absolute")
@ispec("32<[ ~off2(4) 01 ~off3(4) ~off1(6) ~off4(4) a(4) {05} ]", mnemonic="LD_BU", mode="Absolute")
@ispec("32<[ ~off2(4) 01 ~off3(4) ~off1(6) ~off4(4) a(4) {85} ]", mnemonic="LD_D", mode="Absolute")
@ispec("32<[ ~off2(4) 11 ~off3(4) ~off1(6) ~off4(4) a(4) {85} ]", mnemonic="LD_DA", mode="Absolute")
@ispec("32<[ ~off2(4) 10 ~off3(4) ~off1(6) ~off4(4) a(4) {05} ]", mnemonic="LD_H", mode="Absolute")
@ispec("32<[ ~off2(4) 11 ~off3(4) ~off1(6) ~off4(4) a(4) {05} ]", mnemonic="LD_HU", mode="Absolute")
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) a(4) {45} ]", mnemonic="LD_Q", mode="Absolute")
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) a(4) {85} ]", mnemonic="LD_W", mode="Absolute")
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) a(4) {c5} ]", mnemonic="LEA", mode="Absolute")
def tricore_ld(obj, off2, off3, off1, off4, a):
dst = env.D[a]
if obj.mnemonic in ("LD_A", "LEA") : dst = env.A[a]
if obj.mnemonic in ("LD_D","LDMST") : dst = env.E[a]
if obj.mnemonic=="LD_DA": dst = env.P[a]
src = off1//off2//off3
obj.operands = [dst, composer([env.cst(src.int(),28),env.cst(off4,4)])]
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 01 ~off3(4) ~off1(6) ~off4(4) a(4) {c5} ]", mnemonic="LHA", mode="Absolute")
def tricore_ld(obj, off2, off3, off1, off4, a):
dst = env.A[a]
src = off1//off2//off3//off4
obj.operands = [dst, composer([env.cst(0,14),env.cst(src.int(),18)])]
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 10 ~off3(4) ~off1(6) ~off4(4) a(4) {a5} ]", mnemonic="ST_A", mode="Absolute")
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) a(4) {25} ]", mnemonic="ST_B", mode="Absolute")
@ispec("32<[ ~off2(4) 01 ~off3(4) ~off1(6) ~off4(4) a(4) {a5} ]", mnemonic="ST_D", mode="Absolute")
@ispec("32<[ ~off2(4) 11 ~off3(4) ~off1(6) ~off4(4) a(4) {a5} ]", mnemonic="ST_DA", mode="Absolute")
@ispec("32<[ ~off2(4) 10 ~off3(4) ~off1(6) ~off4(4) a(4) {25} ]", mnemonic="ST_H", mode="Absolute")
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) a(4) {65} ]", mnemonic="ST_Q", mode="Absolute")
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) a(4) {a5} ]", mnemonic="ST_W", mode="Absolute")
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) a(4) {e5} ]", mnemonic="SWAP_W", mode="Absolute")
@ispec("32<[ ~off2(4) 01 ~off3(4) ~off1(6) ~off4(4) a(4) {e5} ]", mnemonic="LDMST", mode="Absolute")
def tricore_st(obj, off2, off3, off1, off4, a):
src = env.D[a]
if obj.mnemonic in ("ST_A",) : src = env.A[a]
if obj.mnemonic in ("ST_D","LDMST") : src = env.E[a]
if obj.mnemonic=="ST_DA": src = env.P[a]
addr = off1//off2//off3
obj.operands = [composer([env.cst(addr.int(),28),env.cst(off4,4)]), src]
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) b bpos(3) {d5} ]", mnemonic="ST_T", mode="Absolute")
def tricore_st(obj, off2, off3, off1, off4, b, bpos):
obj.operands = [composer([env.cst(src.int(),28),env.cst(off4,4)]), env.cst(bpos,3), env.cst(b,1)]
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 00 ~off3(4) ~off1(6) ~off4(4) ---- {15} ]", mnemonic="STLCX", mode="Absolute")
def tricore_st(obj, off2, off3, off1, off4):
obj.operands = [composer([env.cst(src.int(),28),env.cst(off4,4)])]
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 10 ~off3(4) ~off1(6) ~off4(4) a(4) {15} ]", mnemonic="LDLCX", mode="Absolute")
@ispec("32<[ ~off2(4) 11 ~off3(4) ~off1(6) ~off4(4) a(4) {15} ]", mnemonic="LDUCX", mode="Absolute")
def tricore_ld(obj, off2, off3, off1, off4, a):
src = off1//off2//off3
obj.operands = [composer([env.cst(src.int(),28),env.cst(off4,4)])]
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 10 0110 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_A", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0110 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_A", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0110 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_A", mode="Circular")
@ispec("32<[ ~off2(4) 00 0110 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_A", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0110 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_A", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0000 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_B", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0000 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_B", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0000 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_B", mode="Circular")
@ispec("32<[ ~off2(4) 00 0000 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_B", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0000 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_B", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0001 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_BU", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0001 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_BU", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0001 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_BU", mode="Circular")
@ispec("32<[ ~off2(4) 00 0001 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_BU", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0001 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_BU", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0101 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_D", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0101 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_D", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0101 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_D", mode="Circular")
@ispec("32<[ ~off2(4) 00 0101 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_D", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0101 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_D", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0111 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_DA", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0111 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_DA", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0111 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_DA", mode="Circular")
@ispec("32<[ ~off2(4) 00 0111 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_DA", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0111 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_DA", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0010 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_H", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0010 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_H", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0010 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_H", mode="Circular")
@ispec("32<[ ~off2(4) 00 0010 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_H", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0010 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_H", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0011 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_HU", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0011 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_HU", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0011 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_HU", mode="Circular")
@ispec("32<[ ~off2(4) 00 0011 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_HU", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0011 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_HU", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 1000 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_Q", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 1000 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_Q", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 1000 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_Q", mode="Circular")
@ispec("32<[ ~off2(4) 00 1000 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_Q", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 1000 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_Q", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0100 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_W", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0100 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_W", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0100 ~off1(6) b(4) a(4) {29} ]", mnemonic="LD_W", mode="Circular")
@ispec("32<[ ~off2(4) 00 0100 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_W", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0100 ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_W", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 1000 ~off1(6) b(4) a(4) {49} ]", mnemonic="LEA", mode="Short-offset")
def tricore_ld(obj, off2, off1, b, a):
dst = env.D[a]
if obj.mnemonic=="LD_A" : dst = env.A[a]
elif obj.mnemonic=="LEA" : dst = env.A[a]
elif obj.mnemonic=="LD_D" : dst = env.E[a]
elif obj.mnemonic=="LDMST" : dst = env.E[a]
elif obj.mnemonic=="LD_DA" : dst = env.P[a]
obj.b = b
src1 = env.A[b]
off10 = off1//off2
src2 = env.cst(off10.int(-1),10)
obj.operands = [dst, src1, src2]
if obj.mode == "Bit-Reverse":
obj.operands.pop()
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 10 0110 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_A", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0110 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_A", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0110 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_A", mode="Circular")
@ispec("32<[ ~off2(4) 00 0110 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_A", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0110 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_A", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0000 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_B", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0000 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_B", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0000 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_B", mode="Circular")
@ispec("32<[ ~off2(4) 00 0000 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_B", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0000 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_B", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0101 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_D", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0101 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_D", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0101 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_D", mode="Circular")
@ispec("32<[ ~off2(4) 00 0101 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_D", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0101 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_D", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0111 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_DA", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0111 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_DA", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0111 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_DA", mode="Circular")
@ispec("32<[ ~off2(4) 00 0111 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_DA", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0111 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_DA", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0010 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_H", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0010 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_H", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0010 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_H", mode="Circular")
@ispec("32<[ ~off2(4) 00 0010 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_H", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0010 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_H", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 1000 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_Q", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 1000 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_Q", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 1000 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_Q", mode="Circular")
@ispec("32<[ ~off2(4) 00 1000 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_Q", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 1000 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_Q", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0100 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_W", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0100 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_W", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0100 ~off1(6) b(4) a(4) {a9} ]", mnemonic="ST_W", mode="Circular")
@ispec("32<[ ~off2(4) 00 0100 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_W", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0100 ~off1(6) b(4) a(4) {89} ]", mnemonic="ST_W", mode="Pre-increment")
@ispec("32<[ ~off2(4) 10 0001 ~off1(6) b(4) a(4) {49} ]", mnemonic="LDMST", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 0001 ~off1(6) b(4) a(4) {69} ]", mnemonic="LDMST", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 0001 ~off1(6) b(4) a(4) {69} ]", mnemonic="LDMST", mode="Circular")
@ispec("32<[ ~off2(4) 00 0001 ~off1(6) b(4) a(4) {49} ]", mnemonic="LDMST", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 0001 ~off1(6) b(4) a(4) {49} ]", mnemonic="LDMST", mode="Pre-increment")
def tricore_st(obj, off2, off1, b, a):
dst = env.D[a]
if obj.mnemonic=="ST_A" : dst = env.A[a]
elif obj.mnemonic=="ST_D" : dst = env.E[a]
elif obj.mnemonic=="ST_DA" : dst = env.P[a]
elif obj.mnemonic=="LDMST" : dst = env.E[a]
obj.b = b
src1 = env.A[b]
off10 = off1//off2
src2 = env.cst(off10.int(-1),10)
obj.operands = [src1, src2, dst]
if obj.mode == "Bit-Reverse":
obj.operands.pop()
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 10 1000 ~off1(6) b(4) a(4) {49} ]", mnemonic="SWAP_W", mode="Short-offset")
@ispec("32<[ ~off2(4) 00 1000 ~off1(6) b(4) a(4) {69} ]", mnemonic="SWAP_W", mode="Bit-reverse")
@ispec("32<[ ~off2(4) 01 1000 ~off1(6) b(4) a(4) {69} ]", mnemonic="SWAP_W", mode="Circular")
@ispec("32<[ ~off2(4) 00 1000 ~off1(6) b(4) a(4) {49} ]", mnemonic="SWAP_W", mode="Post-increment")
@ispec("32<[ ~off2(4) 01 1000 ~off1(6) b(4) a(4) {49} ]", mnemonic="SWAP_W", mode="Pre-increment")
def tricore_ld(obj, off2, off1, b, a):
dst = env.D[a]
src1 = env.P[b]
off10 = off1//off2
src2 = env.cst(off10.int(-1),10)
obj.operands = [src1, src2, dst]
obj.type = type_data_processing
@ispec("32<[ ~off2(4) 10 0100 ~off1(6) b(4) ---- {49} ]", mnemonic="LDLCX", mode="Short-offset")
@ispec("32<[ ~off2(4) 10 0101 ~off1(6) b(4) ---- {49} ]", mnemonic="LDUCX", mode="Short-offset")
@ispec("32<[ ~off2(4) 10 0110 ~off1(6) b(4) ---- {49} ]", mnemonic="STLCX", mode="Short-offset")
@ispec("32<[ ~off2(4) 10 0111 ~off1(6) b(4) ---- {49} ]", mnemonic="STUCX", mode="Short-offset")
def tricore_ld(obj, off2, off1, b):
src1 = env.A[b]
off10 = off1//off2
src2 = env.cst(off10.int(-1),10)
obj.operands = [src1, src2]
obj.type = type_data_processing
@ispec("32<[ ~off2(4) ~off3(6) ~off1(6) b(4) a(4) {99} ]", mnemonic="LD_A", mode="Long-offset")
@ispec("32<[ ~off2(4) ~off3(6) ~off1(6) b(4) a(4) {79} ]", mnemonic="LD_B", mode="Long-offset")
@ispec("32<[ ~off2(4) ~off3(6) ~off1(6) b(4) a(4) {39} ]", mnemonic="LD_BU", mode="Long-offset")
@ispec("32<[ ~off2(4) ~off3(6) ~off1(6) b(4) a(4) {09} ]", mnemonic="LD_H", mode="Long-offset")
@ispec("32<[ ~off2(4) ~off3(6) ~off1(6) b(4) a(4) {b9} ]", mnemonic="LD_HU", mode="Long-offset")
@ispec("32<[ ~off2(4) ~off3(6) ~off1(6) b(4) a(4) {19} ]", mnemonic="LD_W", mode="Long-offset")
@ispec("32<[ ~off2(4) ~off3(6) ~off1(6) b(4) a(4) {d9} ]", mnemonic="LEA", mode="Long-offset")
def tricore_ld(obj, off2, off3, off1, b, a):
dst = env.D[a]
|
[
" if obj.mnemonic in (\"LD_A\", \"LEA\"): dst = env.A[a]"
] | 7,935
|
lcc
|
python
| null |
1af3862e060454bfb42e00c39c490c31b97f3330caa0e1f1
|
|
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2011 Jaspersoft Corporation. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see <http://www.gnu.org/licenses/>.
*/
package org.oss.pdfreporter.engine;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import org.oss.pdfreporter.engine.design.events.JRPropertyChangeSupport;
import org.oss.pdfreporter.net.IURL;
import org.oss.pdfreporter.uses.java.util.Properties;
/**
* Properties map of an JR element.
* <p/>
* The order of the properties (obtained by {@link #getPropertyNames() getPropertyNames()}
* is the same as the order in which the properties were added.
*
* @author Lucian Chirita (lucianc@users.sourceforge.net)
* @version $Id: JRPropertiesMap.java 5738 2012-10-23 08:24:25Z lucianc $
*/
public class JRPropertiesMap implements Serializable, Cloneable
{
private static final long serialVersionUID = JRConstants.SERIAL_VERSION_UID;
public static final String PROPERTY_VALUE = "value";
private Map<String, String> propertiesMap;
private List<String> propertiesList;
private JRPropertiesMap base;
/**
* Creates a properties map.
*/
public JRPropertiesMap()
{
}
/**
* Clones a properties map.
*
* @param propertiesMap the original properties map
*/
public JRPropertiesMap(JRPropertiesMap propertiesMap)
{
this();
this.base = propertiesMap.base;
String[] propertyNames = propertiesMap.getPropertyNames();
if (propertyNames != null && propertyNames.length > 0)
{
for(int i = 0; i < propertyNames.length; i++)
{
setProperty(propertyNames[i], propertiesMap.getProperty(propertyNames[i]));
}
}
}
protected synchronized void ensureInit()
{
if (propertiesMap == null)
{
init();
}
}
private void init()
{
// start with small collections
propertiesMap = new HashMap<String, String>(4, 0.75f);
propertiesList = new ArrayList<String>(2);
}
/**
* Returns the names of the properties.
*
* @return the names of the properties
*/
public String[] getPropertyNames()
{
String[] names;
if (hasOwnProperties())
{
if (base == null)
{
names = propertiesList.toArray(new String[propertiesList.size()]);
}
else
{
LinkedHashSet<String> namesSet = new LinkedHashSet<String>();
collectPropertyNames(namesSet);
names = namesSet.toArray(new String[namesSet.size()]);
}
}
else if (base != null)
{
names = base.getPropertyNames();
}
else
{
names = new String[0];
}
return names;
}
protected void collectPropertyNames(Collection<String> names)
{
if (base != null)
{
base.collectPropertyNames(names);
}
if (propertiesList != null)
{
names.addAll(propertiesList);
}
}
/**
* Returns the value of a property.
*
* @param propName the name of the property
* @return the value
*/
public String getProperty(String propName)
{
String val;
if (hasOwnProperty(propName))
{
val = getOwnProperty(propName);
}
else if (base != null)
{
val = base.getProperty(propName);
}
else
{
val = null;
}
return val;
}
/**
* Decides whether the map contains a specified property.
*
* The method returns true even if the property value is null.
*
* @param propName the property name
* @return <code>true</code> if and only if the map contains the property
*/
public boolean containsProperty(String propName)
{
return hasOwnProperty(propName)
|| base != null && base.containsProperty(propName);
}
protected boolean hasOwnProperty(String propName)
{
return propertiesMap != null && propertiesMap.containsKey(propName);
}
protected String getOwnProperty(String propName)
{
return propertiesMap != null ? (String) propertiesMap.get(propName) : null;
}
/**
* Adds/sets a property value.
*
* @param propName the name of the property
* @param value the value of the property
*/
public void setProperty(String propName, String value)
{
Object old = getOwnProperty(propName);
ensureInit();
if (!hasOwnProperty(propName))
{
propertiesList.add(propName);
}
propertiesMap.put(propName, value);
if (hasEventSupport())
{
getEventSupport().firePropertyChange(PROPERTY_VALUE, old, value);
}
}
/**
* Removes a property.
*
* @param propName the property name
*/
public void removeProperty(String propName)
{
//FIXME base properties?
if (hasOwnProperty(propName))
{
propertiesList.remove(propName);
propertiesMap.remove(propName);
}
}
/**
* Clones this property map.
*
* @return a clone of this property map
*/
public JRPropertiesMap cloneProperties()
{
return new JRPropertiesMap(this);
}
/**
*
*/
public Object clone()
{
return this.cloneProperties();
}
public String toString()
{
return propertiesMap == null ? "" : propertiesMap.toString();
}
// TODO: Daniel (19.4.2013) - Removed, unused
// private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException
// {
// in.defaultReadObject();
//
// if (propertiesList == null && propertiesMap != null)// an instance from an old version has been deserialized
// {
// //recreate the properties list and map
// propertiesList = new ArrayList<String>(propertiesMap.keySet());
// propertiesMap = new HashMap<String, String>(propertiesMap);
// }
// }
/**
* Checks whether there are any properties.
*
* @return whether there are any properties
*/
public boolean hasProperties()
{
return hasOwnProperties()
|| base != null && base.hasProperties();
}
/**
* Checks whether this object has properties of its own
* (i.e. not inherited from the base properties).
*
* @return whether this object has properties of its own
* @see #setBaseProperties(JRPropertiesMap)
*/
public boolean hasOwnProperties()
{
return propertiesList != null && !propertiesList.isEmpty();
}
/**
* Clones the properties map of a properties holder.
* If the holder does not have any properties, null is returned.
*
* @param propertiesHolder the properties holder
* @return a clone of the holder's properties map, or <code>null</code>
* if the holder does not have any properties
*/
public static JRPropertiesMap getPropertiesClone(JRPropertiesHolder propertiesHolder)
{
JRPropertiesMap clone;
if (propertiesHolder.hasProperties())
{
clone = propertiesHolder.getPropertiesMap().cloneProperties();
}
else
{
clone = null;
}
return clone;
}
/**
* Returns the base properties map, if any.
*
* @return the base properties map
* @see #setBaseProperties(JRPropertiesMap)
*/
public JRPropertiesMap getBaseProperties()
{
return base;
}
/**
* Sets the base properties map.
*
* <p>
* The base properties map are used as base/default properties for this
* instance. All of the {@link #containsProperty(String)},
* {@link #getProperty(String)}, {@link #getPropertyNames()} and
* {@link #hasProperties()} methods include base properties as well.
* </p>
*
* @param base the base properties map
*/
public void setBaseProperties(JRPropertiesMap base)
{
this.base = base;
}
/**
* Loads a properties file from a location.
*
* @param location the properties file URL
* @return the properties file loaded as a in-memory properties map
*/
public static JRPropertiesMap loadProperties(IURL location)
{
boolean close = true;
InputStream stream = null;
try
{
|
[
"\t\t\tstream = location.openStream();"
] | 1,068
|
lcc
|
java
| null |
ff129ecf1bae342708a06c6afdaf4c0c520b36db7f9fe934
|
|
package net.minecraft.world;
import net.minecraft.entity.player.PlayerCapabilities;
import net.minecraft.world.storage.WorldInfo;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public final class WorldSettings
{
/** The seed for the map. */
private final long seed;
/** The EnumGameType. */
private final WorldSettings.GameType theGameType;
/** Switch for the map features. 'true' for enabled, 'false' for disabled. */
private final boolean mapFeaturesEnabled;
/** True if hardcore mode is enabled */
private final boolean hardcoreEnabled;
private final WorldType terrainType;
/** True if Commands (cheats) are allowed. */
private boolean commandsAllowed;
/** True if the Bonus Chest is enabled. */
private boolean bonusChestEnabled;
private String worldName;
public WorldSettings(long seedIn, WorldSettings.GameType gameType, boolean enableMapFeatures, boolean hardcoreMode, WorldType worldTypeIn)
{
this.worldName = "";
this.seed = seedIn;
this.theGameType = gameType;
this.mapFeaturesEnabled = enableMapFeatures;
this.hardcoreEnabled = hardcoreMode;
this.terrainType = worldTypeIn;
}
public WorldSettings(WorldInfo info)
{
this(info.getSeed(), info.getGameType(), info.isMapFeaturesEnabled(), info.isHardcoreModeEnabled(), info.getTerrainType());
}
/**
* Enables the bonus chest.
*/
public WorldSettings enableBonusChest()
{
this.bonusChestEnabled = true;
return this;
}
public WorldSettings setWorldName(String name)
{
this.worldName = name;
return this;
}
/**
* Enables Commands (cheats).
*/
@SideOnly(Side.CLIENT)
public WorldSettings enableCommands()
{
this.commandsAllowed = true;
return this;
}
/**
* Returns true if the Bonus Chest is enabled.
*/
public boolean isBonusChestEnabled()
{
return this.bonusChestEnabled;
}
/**
* Returns the seed for the world.
*/
public long getSeed()
{
return this.seed;
}
/**
* Gets the game type.
*/
public WorldSettings.GameType getGameType()
{
return this.theGameType;
}
/**
* Returns true if hardcore mode is enabled, otherwise false
*/
public boolean getHardcoreEnabled()
{
return this.hardcoreEnabled;
}
/**
* Get whether the map features (e.g. strongholds) generation is enabled or disabled.
*/
public boolean isMapFeaturesEnabled()
{
return this.mapFeaturesEnabled;
}
public WorldType getTerrainType()
{
return this.terrainType;
}
/**
* Returns true if Commands (cheats) are allowed.
*/
public boolean areCommandsAllowed()
{
return this.commandsAllowed;
}
/**
* Gets the GameType by ID
*/
public static WorldSettings.GameType getGameTypeById(int id)
{
return WorldSettings.GameType.getByID(id);
}
public String getWorldName()
{
return this.worldName;
}
public static enum GameType
{
NOT_SET(-1, ""),
SURVIVAL(0, "survival"),
CREATIVE(1, "creative"),
ADVENTURE(2, "adventure"),
SPECTATOR(3, "spectator");
int id;
String name;
private GameType(int typeId, String nameIn)
{
this.id = typeId;
this.name = nameIn;
}
/**
* Returns the ID of this game type
*/
public int getID()
{
return this.id;
}
/**
* Returns the name of this game type
*/
public String getName()
{
return this.name;
}
/**
* Configures the player capabilities based on the game type
*/
public void configurePlayerCapabilities(PlayerCapabilities capabilities)
{
if (this == CREATIVE)
{
capabilities.allowFlying = true;
capabilities.isCreativeMode = true;
capabilities.disableDamage = true;
}
else if (this == SPECTATOR)
{
capabilities.allowFlying = true;
capabilities.isCreativeMode = false;
capabilities.disableDamage = true;
capabilities.isFlying = true;
}
else
{
capabilities.allowFlying = false;
capabilities.isCreativeMode = false;
capabilities.disableDamage = false;
capabilities.isFlying = false;
}
capabilities.allowEdit = !this.isAdventure();
}
/**
* Returns true if this is the ADVENTURE game type
*/
public boolean isAdventure()
{
return this == ADVENTURE || this == SPECTATOR;
}
/**
* Returns true if this is the CREATIVE game type
*/
public boolean isCreative()
{
|
[
" return this == CREATIVE;"
] | 497
|
lcc
|
java
| null |
812a9c309ebcd63dc97a9d9c993e5afbcef9d934b3a428bf
|
|
// CommonSecurityDescriptorTest.cs - NUnit Test Cases for CommonSecurityDescriptor
//
// Authors:
// James Bellinger <jfb@zer7.com>
//
// Copyright (C) 2012 James Bellinger
using System;
using System.Collections.Generic;
using System.Security.AccessControl;
using System.Security.Principal;
using NUnit.Framework;
namespace MonoTests.System.Security.AccessControl
{
[TestFixture]
public class CommonSecurityDescriptorTest
{
[Test]
public void DefaultOwnerAndGroup ()
{
CommonSecurityDescriptor csd = new CommonSecurityDescriptor
(false, false, ControlFlags.None, null, null, null, null);
Assert.IsNull (csd.Owner);
Assert.IsNull (csd.Group);
Assert.AreEqual (ControlFlags.DiscretionaryAclPresent
| ControlFlags.SelfRelative, csd.ControlFlags);
}
[Test]
public void GetBinaryForm ()
{
CommonSecurityDescriptor csd = new CommonSecurityDescriptor
(false, false, ControlFlags.None, null, null, null, null);
Assert.AreEqual (20, csd.BinaryLength);
byte[] binaryForm = new byte[csd.BinaryLength];
csd.GetBinaryForm (binaryForm, 0);
Assert.AreEqual (ControlFlags.DiscretionaryAclPresent | ControlFlags.SelfRelative,
csd.ControlFlags);
// The default 'Allow Everyone Full Access' serializes as NOT having a
// DiscretionaryAcl, as the above demonstrates (byte 3 is 0 not 4).
Assert.AreEqual (new byte[20] {
1, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
}, binaryForm);
// Changing SystemAcl protection does nothing special.
csd.SetSystemAclProtection (true, true);
Assert.AreEqual (20, csd.BinaryLength);
// Modifying the DiscretionaryAcl (even effective no-ops like this) causes serialization.
csd.SetDiscretionaryAclProtection (false, true);
Assert.AreEqual (48, csd.BinaryLength);
}
[Test, ExpectedException (typeof (ArgumentOutOfRangeException))]
public void GetBinaryFormOffset ()
{
CommonSecurityDescriptor csd = new CommonSecurityDescriptor
(false, false, ControlFlags.None, null, null, null, null);
csd.GetBinaryForm (new byte[csd.BinaryLength], 1);
}
[Test, ExpectedException (typeof (ArgumentNullException))]
public void GetBinaryFormNull ()
{
CommonSecurityDescriptor csd = new CommonSecurityDescriptor
(false, false, ControlFlags.None, null, null, null, null);
csd.GetBinaryForm (null, 0);
}
[Test]
public void AefaModifiedFlagIsStoredOnDiscretionaryAcl ()
{
CommonSecurityDescriptor csd1, csd2;
// Incidentally this shows the DiscretionaryAcl is NOT cloned.
csd1 = new CommonSecurityDescriptor (false, false, ControlFlags.None, null, null, null, null);
csd2 = new CommonSecurityDescriptor (false, false, ControlFlags.None, null, null, null, csd1.DiscretionaryAcl);
Assert.AreSame (csd1.DiscretionaryAcl, csd2.DiscretionaryAcl);
Assert.AreEqual ("", csd1.GetSddlForm (AccessControlSections.Access));
csd2.SetDiscretionaryAclProtection (false, true);
Assert.AreEqual ("D:(A;;0xffffffff;;;WD)", csd1.GetSddlForm (AccessControlSections.Access));
Assert.AreEqual ("D:(A;;0xffffffff;;;WD)", csd2.GetSddlForm (AccessControlSections.Access));
}
[Test]
public void AefaRoundtrip ()
{
CommonSecurityDescriptor csd;
csd = new CommonSecurityDescriptor (false, false, ControlFlags.None, null, null, null, null);
Assert.AreEqual (20, csd.BinaryLength);
byte[] binaryForm1 = new byte[csd.BinaryLength];
csd.GetBinaryForm (binaryForm1, 0);
csd = new CommonSecurityDescriptor (false, false, new RawSecurityDescriptor (binaryForm1, 0));
byte[] binaryForm2 = new byte[csd.BinaryLength];
csd.GetBinaryForm (binaryForm2, 0);
Assert.AreEqual (binaryForm1, binaryForm2);
}
[Test]
public void GetSddlFormAefaRemovesDacl ()
{
CommonSecurityDescriptor csd = new CommonSecurityDescriptor
(false, false, ControlFlags.None, null, null, null, null);
Assert.AreEqual (1, csd.DiscretionaryAcl.Count);
Assert.AreEqual ("", csd.GetSddlForm (AccessControlSections.Access));
Assert.AreEqual (ControlFlags.DiscretionaryAclPresent
| ControlFlags.SelfRelative,
csd.ControlFlags);
Assert.AreSame (csd.DiscretionaryAcl, csd.DiscretionaryAcl);
Assert.AreNotSame (csd.DiscretionaryAcl[0], csd.DiscretionaryAcl[0]);
Assert.AreEqual ("", csd.GetSddlForm (AccessControlSections.Access));
csd.SetDiscretionaryAclProtection (false, true);
Assert.AreEqual ("D:(A;;0xffffffff;;;WD)", csd.GetSddlForm (AccessControlSections.Access));
Assert.AreSame (csd.DiscretionaryAcl, csd.DiscretionaryAcl);
Assert.AreNotSame (csd.DiscretionaryAcl[0], csd.DiscretionaryAcl[0]);
Assert.AreEqual (ControlFlags.DiscretionaryAclPresent
| ControlFlags.SelfRelative,
csd.ControlFlags);
csd.SetDiscretionaryAclProtection (true, true);
Assert.AreEqual (1, csd.DiscretionaryAcl.Count);
Assert.AreEqual ("D:P(A;;0xffffffff;;;WD)", csd.GetSddlForm (AccessControlSections.Access));
Assert.AreEqual (ControlFlags.DiscretionaryAclPresent
| ControlFlags.DiscretionaryAclProtected
| ControlFlags.SelfRelative,
csd.ControlFlags);
csd.SetDiscretionaryAclProtection (false, false);
Assert.AreEqual (1, csd.DiscretionaryAcl.Count);
Assert.AreEqual ("D:(A;;0xffffffff;;;WD)", csd.GetSddlForm (AccessControlSections.Access));
Assert.AreEqual (ControlFlags.DiscretionaryAclPresent
| ControlFlags.SelfRelative,
csd.ControlFlags);
}
[Test, ExpectedException (typeof (ArgumentException))]
public void ContainerAndDSConsistencyEnforcedA ()
{
SecurityIdentifier userSid = new SecurityIdentifier (WellKnownSidType.LocalSystemSid, null);
SecurityIdentifier groupSid = new SecurityIdentifier (WellKnownSidType.BuiltinAdministratorsSid, null);
DiscretionaryAcl dacl = new DiscretionaryAcl (true, true, 0);
new CommonSecurityDescriptor (true, false, ControlFlags.None, userSid, groupSid, null, dacl);
}
[Test, ExpectedException (typeof (ArgumentException))]
public void ContainerAndDSConsistencyEnforcedB ()
{
SecurityIdentifier userSid = new SecurityIdentifier (WellKnownSidType.LocalSystemSid, null);
SecurityIdentifier groupSid = new SecurityIdentifier (WellKnownSidType.BuiltinAdministratorsSid, null);
SystemAcl sacl = new SystemAcl (false, false, 0);
new CommonSecurityDescriptor (true, false, ControlFlags.None, userSid, groupSid, sacl, null);
}
[Test, ExpectedException (typeof (ArgumentException))]
public void ContainerAndDSConsistencyEnforcedInSetter ()
{
SecurityIdentifier userSid = new SecurityIdentifier (WellKnownSidType.LocalSystemSid, null);
SecurityIdentifier groupSid = new SecurityIdentifier (WellKnownSidType.BuiltinAdministratorsSid, null);
CommonSecurityDescriptor csd = new CommonSecurityDescriptor
(true, false, ControlFlags.None, userSid, groupSid, null, null);
csd.DiscretionaryAcl = new DiscretionaryAcl (true, true, 0);
}
[Test]
public void DefaultDaclIsAllowEveryoneFullAccess ()
{
SecurityIdentifier userSid = new SecurityIdentifier ("SY");
SecurityIdentifier groupSid = new SecurityIdentifier ("BA");
SecurityIdentifier everyoneSid = new SecurityIdentifier ("WD");
CommonSecurityDescriptor csd; DiscretionaryAcl dacl; CommonAce ace;
csd = new CommonSecurityDescriptor (false, false, ControlFlags.None, userSid, groupSid, null, null);
dacl = csd.DiscretionaryAcl;
Assert.AreEqual (1, dacl.Count);
ace = (CommonAce)dacl [0];
Assert.AreEqual (-1, ace.AccessMask);
Assert.AreEqual (AceFlags.None, ace.AceFlags);
Assert.AreEqual (AceType.AccessAllowed, ace.AceType);
Assert.AreEqual (20, ace.BinaryLength);
Assert.IsFalse (ace.IsCallback);
Assert.IsFalse (ace.IsInherited);
Assert.AreEqual (0, ace.OpaqueLength);
Assert.AreEqual (ace.SecurityIdentifier, everyoneSid);
csd = new CommonSecurityDescriptor (true, false, ControlFlags.None, userSid, groupSid, null, null);
dacl = csd.DiscretionaryAcl;
Assert.AreEqual (1, dacl.Count);
ace = (CommonAce)dacl [0];
Assert.AreEqual (-1, ace.AccessMask);
Assert.AreEqual (AceFlags.ObjectInherit | AceFlags.ContainerInherit, ace.AceFlags);
Assert.AreEqual (AceType.AccessAllowed, ace.AceType);
Assert.AreEqual (20, ace.BinaryLength);
Assert.IsFalse (ace.IsCallback);
Assert.IsFalse (ace.IsInherited);
Assert.AreEqual (0, ace.OpaqueLength);
Assert.AreEqual (ace.SecurityIdentifier, everyoneSid);
}
[Test]
public void PurgeDefaultDacl ()
{
|
[
"\t\t\tSecurityIdentifier userSid = new SecurityIdentifier (\"SY\");"
] | 692
|
lcc
|
csharp
| null |
a179ca21c89046426dbe560130d12a91ef853ad8e707e8c2
|
|
# -*- coding: utf-8 -*-
# Copyright 2011,2013 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import json
import collections
import threading
import gzip
from xml.dom.minidom import parseString
from gi.repository import GLib
from quodlibet.util import print_w
from quodlibet.compat import iteritems, urlencode, queue, cBytesIO
from quodlibet.util.urllib import urlopen, Request
from .util import get_api_key, GateKeeper
APP_KEY = "C6IduH7D"
gatekeeper = GateKeeper(requests_per_sec=3)
class AcoustidSubmissionThread(threading.Thread):
URL = "https://api.acoustid.org/v2/submit"
SONGS_PER_SUBMISSION = 50
TIMEOUT = 10.0
def __init__(self, results, progress_cb, done_cb):
super(AcoustidSubmissionThread, self).__init__()
self.__callback = done_cb
self.__results = results
self.__stopped = False
self.__progress_cb = progress_cb
self.__done = 0
self.start()
def __idle(self, func, *args, **kwargs):
def delayed():
if self.__stopped:
return
func(*args, **kwargs)
GLib.idle_add(delayed)
def __send(self, urldata):
if self.__stopped:
return
gatekeeper.wait()
self.__done += len(urldata)
basedata = urlencode({
"format": "xml",
"client": APP_KEY,
"user": get_api_key(),
})
urldata = "&".join([basedata] + list(map(urlencode, urldata)))
obj = cBytesIO()
gzip.GzipFile(fileobj=obj, mode="wb").write(urldata.encode())
urldata = obj.getvalue()
headers = {
"Content-Encoding": "gzip",
"Content-type": "application/x-www-form-urlencoded"
}
req = Request(self.URL, urldata, headers)
error = None
try:
response = urlopen(req, timeout=self.TIMEOUT)
except EnvironmentError as e:
error = "urllib error: " + str(e)
else:
xml = response.read()
try:
dom = parseString(xml)
except:
error = "xml error"
else:
status = dom.getElementsByTagName("status")
if not status or not status[0].childNodes or not \
status[0].childNodes[0].nodeValue == "ok":
error = "response status error"
if error:
print_w("[fingerprint] Submission failed: " + error)
# emit progress
self.__idle(self.__progress_cb,
float(self.__done) / len(self.__results))
def run(self):
urldata = []
for i, result in enumerate(self.__results):
song = result.song
track = {
"duration": int(round(result.length)),
"fingerprint": result.chromaprint,
"bitrate": song("~#bitrate"),
"fileformat": song("~format"),
"mbid": song("musicbrainz_trackid"),
"track": song("title"),
"artist": song.list("artist"),
"album": song("album"),
"albumartist": song("albumartist"),
"year": song("~year"),
"trackno": song("~#track"),
"discno": song("~#disc"),
}
tuples = []
for key, value in iteritems(track):
# this also dismisses 0.. which should be ok here.
if not value:
continue
# the postfixes don't have to start at a specific point,
# they just need to be different and numbers
key += ".%d" % i
if isinstance(value, list):
for val in value:
tuples.append((key, val))
else:
tuples.append((key, value))
urldata.append(tuples)
if len(urldata) >= self.SONGS_PER_SUBMISSION:
self.__send(urldata)
urldata = []
if self.__stopped:
return
if urldata:
self.__send(urldata)
self.__idle(self.__callback)
def stop(self):
self.__stopped = True
class LookupResult(object):
def __init__(self, fresult, releases, error):
self.fresult = fresult
self.releases = releases
self.error = error
@property
def song(self):
return self.fresult.song
Release = collections.namedtuple(
"Release", ["id", "score", "sources", "all_sources",
"medium_count", "tags"])
def parse_acoustid_response(json_data):
"""Get all possible tag combinations including the release ID and score.
The idea is that for multiple songs the variant for each wins where
the release ID is present for more songs and if equal
(one song for example) the score wins.
Needs meta=releases+recordings+tracks responses.
"""
VARIOUS_ARTISTS_ARTISTID = "89ad4ac3-39f7-470e-963a-56509c546377"
releases = []
for res in json_data.get("results", []):
score = res["score"]
all_sources = 0
recordings = []
for rec in res.get("recordings", []):
sources = rec["sources"]
all_sources += sources
rec_id = rec["id"]
artists = [a["name"] for a in rec.get("artists", [])]
artist_ids = [a["id"] for a in rec.get("artists", [])]
for release in rec.get("releases", []):
# release
id_ = release["id"]
date = release.get("date", {})
album = release.get("title", "")
album_id = release["id"]
parts = [date.get(k) for k in ["year", "month", "day"]]
date = "-".join([u"%02d" % p for p in parts if p is not None])
albumartists = []
albumartist_ids = []
for artist in release.get("artists", []):
if artist["id"] != VARIOUS_ARTISTS_ARTISTID:
albumartists.append(artist["name"])
albumartist_ids.append(artist["id"])
discs = release.get("medium_count", 1)
# meadium
medium = release["mediums"][0]
disc = medium.get("position", 0)
tracks = medium.get("track_count", 1)
# track
track_info = medium["tracks"][0]
track_id = track_info["id"]
track = track_info.get("position", 0)
title = track_info.get("title", "")
if disc and discs > 1:
discnumber = u"%d/%d" % (disc, discs)
else:
discnumber = u""
if track and tracks > 1:
tracknumber = u"%d/%d" % (track, tracks)
else:
tracknumber = u""
tags = {
"title": title,
"artist": "\n".join(artists),
"albumartist": "\n".join(albumartists),
"date": date,
"discnumber": discnumber,
"tracknumber": tracknumber,
"album": album,
}
mb = {
"musicbrainz_releasetrackid": track_id,
"musicbrainz_trackid": rec_id,
"musicbrainz_albumid": album_id,
"musicbrainz_albumartistid": "\n".join(albumartist_ids),
"musicbrainz_artistid": "\n".join(artist_ids),
}
# not that useful, ignore for now
del mb["musicbrainz_releasetrackid"]
tags.update(mb)
recordings.append([id_, score, sources, 0, discs, tags])
for rec in recordings:
rec[3] = all_sources
releases.append(Release(*rec))
return releases
class AcoustidLookupThread(threading.Thread):
URL = "https://api.acoustid.org/v2/lookup"
MAX_SONGS_PER_SUBMISSION = 5
TIMEOUT = 10.0
def __init__(self, progress_cb):
super(AcoustidLookupThread, self).__init__()
self.__progress_cb = progress_cb
self.__queue = queue.Queue()
self.__stopped = False
self.start()
def put(self, result):
"""Queue a FingerPrintResult"""
self.__queue.put(result)
def __idle(self, func, *args, **kwargs):
def delayed():
if self.__stopped:
return
func(*args, **kwargs)
GLib.idle_add(delayed)
def __process(self, results):
req_data = []
req_data.append(urlencode({
"format": "json",
"client": APP_KEY,
"batch": "1",
}))
for i, result in enumerate(results):
postfix = ".%d" % i
req_data.append(urlencode({
"duration" + postfix: str(int(round(result.length))),
"fingerprint" + postfix: result.chromaprint,
}))
req_data.append("meta=releases+recordings+tracks+sources")
urldata = "&".join(req_data)
obj = cBytesIO()
gzip.GzipFile(fileobj=obj, mode="wb").write(urldata.encode())
urldata = obj.getvalue()
headers = {
"Content-Encoding": "gzip",
"Content-type": "application/x-www-form-urlencoded"
}
req = Request(self.URL, urldata, headers)
releases = {}
error = ""
try:
response = urlopen(req, timeout=self.TIMEOUT)
except EnvironmentError as e:
error = "urllib error: " + str(e)
else:
try:
data = response.read()
data = json.loads(data.decode())
except ValueError as e:
error = str(e)
else:
if data["status"] == "ok":
for result_data in data.get("fingerprints", []):
if "index" not in result_data:
continue
index = result_data["index"]
releases[index] = parse_acoustid_response(result_data)
|
[
" for i, result in enumerate(results):"
] | 864
|
lcc
|
python
| null |
fbaaf263e4f759d000390c802a4c3c1ccb343adcb8654c23
|
|
// ----------------------------------------------------------------------------
// <copyright file="PhotonEditor.cs" company="Exit Games GmbH">
// PhotonNetwork Framework for Unity - Copyright (C) 2011 Exit Games GmbH
// </copyright>
// <summary>
// MenuItems and in-Editor scripts for PhotonNetwork.
// </summary>
// <author>developer@exitgames.com</author>
// ----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using ExitGames.Client.Photon;
using UnityEditor;
using UnityEditorInternal;
using UnityEngine;
public class Text
{
public string WindowTitle = "PUN Wizard";
public string SetupWizardWarningTitle = "Warning";
public string SetupWizardWarningMessage = "You have not yet run the Photon setup wizard! Your game won't be able to connect. See Windows -> Photon Unity Networking.";
public string MainMenuButton = "Main Menu";
public string ConnectButton = "Connect to Photon Cloud";
public string UsePhotonLabel = "Using the Photon Cloud is free for development. If you don't have an account yet, enter your email and register.";
public string SendButton = "Send";
public string EmailLabel = "Email:";
public string SignedUpAlreadyLabel = "I am already signed up. Let me enter my AppId.";
public string SetupButton = "Setup";
public string RegisterByWebsiteLabel = "I want to register by a website.";
public string AccountWebsiteButton = "Open account website";
public string SelfHostLabel = "I want to host my own server. Let me set it up.";
public string SelfHostSettingsButton = "Open self-hosting settings";
public string MobileExportNoteLabel = "Build for mobiles impossible. Get PUN+ or Unity Pro for mobile.";
public string MobilePunPlusExportNoteLabel = "PUN+ available. Using native sockets for iOS/Android.";
public string EmailInUseLabel = "The provided e-mail-address has already been registered.";
public string KnownAppIdLabel = "Ah, I know my Application ID. Get me to setup.";
public string SeeMyAccountLabel = "Mh, see my account page";
public string SelfHostSettingButton = "Open self-hosting settings";
public string OopsLabel = "Oops!";
public string SeeMyAccountPage = "";
public string CancelButton = "Cancel";
public string PhotonCloudConnect = "Connect to Photon Cloud";
public string SetupOwnHostLabel = "Setup own Photon Host";
public string PUNWizardLabel = "Photon Unity Networking (PUN) Wizard";
public string SettingsButton = "Settings";
public string SetupServerCloudLabel = "Setup wizard for setting up your own server or the cloud.";
public string WarningPhotonDisconnect = "";
public string ConverterLabel = "Converter";
public string StartButton = "Start";
public string UNtoPUNLabel = "Converts pure Unity Networking to Photon Unity Networking.";
public string SettingsFileLabel = "Settings File";
public string LocateSettingsButton = "Locate settings asset";
public string SettingsHighlightLabel = "Highlights the used photon settings file in the project.";
public string DocumentationLabel = "Documentation";
public string OpenPDFText = "Open PDF";
public string OpenPDFTooltip = "Opens the local documentation pdf.";
public string OpenDevNetText = "Open DevNet";
public string OpenDevNetTooltip = "Online documentation for Photon.";
public string OpenCloudDashboardText = "Open Cloud Dashboard";
public string OpenCloudDashboardTooltip = "Review Cloud App information and statistics.";
public string OpenForumText = "Open Forum";
public string OpenForumTooltip = "Online support for Photon.";
public string QuestionsLabel = "Questions? Need help or want to give us feedback? You are most welcome!";
public string SeeForumButton = "See the Photon Forum";
public string OpenDashboardButton = "Open Dashboard (web)";
public string AppIdLabel = "Your AppId";
public string AppIdInfoLabel = "The AppId a Guid that identifies your game in the Photon Cloud. Find it on your dashboard page.";
public string CloudRegionLabel = "Cloud Region";
public string RegionalServersInfo = "Photon Cloud has regional servers. Picking one near your customers improves ping times. You could use more than one but this setup does not support it.";
public string SaveButton = "Save";
public string SettingsSavedTitle = "Success";
public string SettingsSavedMessage = "Saved your settings.\nConnectUsingSettings() will use the settings file.";
public string OkButton = "Ok";
public string SeeMyAccountPageButton = "Mh, see my account page";
public string SetupOwnServerLabel = "Running my app in the cloud was fun but...\nLet me setup my own Photon server.";
public string OwnHostCloudCompareLabel = "I am not quite sure how 'my own host' compares to 'cloud'.";
public string ComparisonPageButton = "See comparison page";
public string YourPhotonServerLabel = "Your Photon Server";
public string AddressIPLabel = "Address/ip:";
public string PortLabel = "Port:";
public string LicensesLabel = "Licenses";
public string LicenseDownloadText = "Free License Download";
public string LicenseDownloadTooltip = "Get your free license for up to 100 concurrent players.";
public string TryPhotonAppLabel = "Running my own server is too much hassle..\nI want to give Photon's free app a try.";
public string GetCloudAppButton = "Get the free cloud app";
public string ConnectionTitle = "Connecting";
public string ConnectionInfo = "Connecting to the account service..";
public string ErrorTextTitle = "Error";
public string ServerSettingsMissingLabel = "Photon Unity Networking (PUN) is missing the 'ServerSettings' script. Re-import PUN to fix this.";
public string MoreThanOneLabel = "There are more than one ";
public string FilesInResourceFolderLabel = " files in 'Resources' folder. Check your project to keep only one. Using: ";
public string IncorrectRPCListTitle = "Warning: RPC-list becoming incompatible!";
public string IncorrectRPCListLabel = "Your project's RPC-list is full, so we can't add some RPCs just compiled.\n\nBy removing outdated RPCs, the list will be long enough but incompatible with older client builds!\n\nMake sure you change the game version where you use PhotonNetwork.ConnectUsingSettings().";
public string RemoveOutdatedRPCsLabel = "Remove outdated RPCs";
public string FullRPCListTitle = "Warning: RPC-list is full!";
public string FullRPCListLabel = "Your project's RPC-list is too long for PUN.\n\nYou can change PUN's source to use short-typed RPC index. Look for comments 'LIMITS RPC COUNT'\n\nAlternatively, remove some RPC methods (use more parameters per RPC maybe).\n\nAfter a RPC-list refresh, make sure you change the game version where you use PhotonNetwork.ConnectUsingSettings().";
public string SkipRPCListUpdateLabel = "Skip RPC-list update";
public string PUNNameReplaceTitle = "Warning: RPC-list Compatibility";
public string PUNNameReplaceLabel = "PUN replaces RPC names with numbers by using the RPC-list. All clients must use the same list for that.\n\nClearing it most likely makes your client incompatible with previous versions! Change your game version or make sure the RPC-list matches other clients.";
public string RPCListCleared = "Clear RPC-list";
public string ServerSettingsCleanedWarning = "Cleared the PhotonServerSettings.RpcList! This makes new builds incompatible with older ones. Better change game version in PhotonNetwork.ConnectUsingSettings().";
public string BestRegionLabel = "best";
}
[InitializeOnLoad]
public class PhotonEditor : EditorWindow
{
public static Text CurrentLang = new Text();
protected static AccountService.Origin RegisterOrigin = AccountService.Origin.Pun;
protected Vector2 scrollPos = Vector2.zero;
protected static string DocumentationLocation = "Assets/Photon Unity Networking/PhotonNetwork-Documentation.pdf";
protected static string UrlFreeLicense = "https://www.exitgames.com/en/OnPremise/Dashboard";
protected static string UrlDevNet = "http://doc.exitgames.com/en/pun/current/getting-started";
protected static string UrlForum = "http://forum.exitgames.com";
protected static string UrlCompare = "http://doc.exitgames.com/en/realtime/current/getting-started/onpremise-or-saas";
protected static string UrlHowToSetup = "http://doc.exitgames.com/en/onpremise/current/getting-started/photon-server-in-5min";
protected static string UrlAppIDExplained = "http://doc.exitgames.com/en/realtime/current/getting-started/obtain-your-app-id";
protected static string UrlAccountPage = "https://www.exitgames.com/Account/SignIn?email="; // opened in browser
protected static string UrlCloudDashboard = "https://www.exitgames.com/Dashboard?email=";
private enum GUIState
{
Uninitialized,
Main,
Setup
}
private enum PhotonSetupStates
{
RegisterForPhotonCloud,
EmailAlreadyRegistered,
SetupPhotonCloud,
SetupSelfHosted
}
private GUIState guiState = GUIState.Uninitialized;
private bool isSetupWizard = false;
bool open = false;
private PhotonSetupStates photonSetupState = PhotonSetupStates.RegisterForPhotonCloud;
private static double lastWarning = 0;
private static bool postCompileActionsDone;
private string photonAddress = "127.0.0.1"; // custom server
private int photonPort = 5055;
private ConnectionProtocol photonProtocol;
private string emailAddress = string.Empty;
private string cloudAppId = string.Empty;
private static bool dontCheckPunSetupField;
private static Texture2D WizardIcon;
protected static Type WindowType = typeof(PhotonEditor);
private static readonly string[] CloudServerRegionNames;
private static CloudRegionCode selectedRegion;
private bool helpRegion;
private static bool isPunPlus;
private static bool androidLibExists;
private static bool iphoneLibExists;
/// <summary>
/// Can be used to (temporarily) disable the checks for PUN Setup and scene PhotonViews.
/// This will prevent scene PhotonViews from being updated, so be careful.
/// When you re-set this value, checks are used again and scene PhotonViews get IDs as needed.
/// </summary>
protected static bool dontCheckPunSetup
{
get
{
return dontCheckPunSetupField;
}
set
{
if (dontCheckPunSetupField != value)
{
dontCheckPunSetupField = value;
}
}
}
static PhotonEditor()
{
EditorApplication.projectWindowChanged += EditorUpdate;
EditorApplication.hierarchyWindowChanged += EditorUpdate;
EditorApplication.playmodeStateChanged += PlaymodeStateChanged;
EditorApplication.update += OnUpdate;
WizardIcon = AssetDatabase.LoadAssetAtPath("Assets/Photon Unity Networking/photoncloud-icon.png", typeof(Texture2D)) as Texture2D;
// to be used in toolbar, the enum needs conversion to string[] being done here, once.
Array enumValues = Enum.GetValues(typeof(CloudRegionCode));
CloudServerRegionNames = new string[enumValues.Length];
for (int i = 0; i < CloudServerRegionNames.Length; i++)
{
CloudServerRegionNames[i] = enumValues.GetValue(i).ToString();
if (CloudServerRegionNames[i].Equals("none"))
{
CloudServerRegionNames[i] = PhotonEditor.CurrentLang.BestRegionLabel;
}
}
// detect optional packages
PhotonEditor.CheckPunPlus();
}
internal protected static bool CheckPunPlus()
{
androidLibExists = File.Exists("Assets/Plugins/Android/libPhotonSocketPlugin.so");
iphoneLibExists = File.Exists("Assets/Plugins/IPhone/libPhotonSocketPlugin.a");
isPunPlus = androidLibExists || iphoneLibExists;
return isPunPlus;
}
private static void ImportWin8Support()
{
if (EditorApplication.isCompiling || EditorApplication.isPlayingOrWillChangePlaymode)
{
return; // don't import while compiling
}
#if UNITY_4_2 || UNITY_4_3 || UNITY_4_4 || UNITY_4_5 || UNITY_4_6 || UNITY_5_0 || UNITY_5_1 || UNITY_5_2
const string win8Package = "Assets/Plugins/Photon3Unity3D-Win8.unitypackage";
bool win8LibsExist = File.Exists("Assets/Plugins/WP8/Photon3Unity3D.dll") && File.Exists("Assets/Plugins/Metro/Photon3Unity3D.dll");
if (!win8LibsExist && File.Exists(win8Package))
{
AssetDatabase.ImportPackage(win8Package, false);
}
#endif
}
[MenuItem("Window/Photon Unity Networking/Locate Settings Asset %#&p")]
protected static void Inspect()
{
EditorGUIUtility.PingObject(PhotonNetwork.PhotonServerSettings);
Selection.activeObject = PhotonNetwork.PhotonServerSettings;
}
[MenuItem("Window/Photon Unity Networking/PUN Wizard &p")]
protected static void Init()
{
PhotonEditor win = GetWindow(WindowType, false, CurrentLang.WindowTitle, true) as PhotonEditor;
win.InitPhotonSetupWindow();
win.isSetupWizard = false;
win.SwitchMenuState(GUIState.Main);
}
/// <summary>Creates an Editor window, showing the cloud-registration wizard for Photon (entry point to setup PUN).</summary>
protected static void ShowRegistrationWizard()
{
PhotonEditor win = GetWindow(WindowType, false, CurrentLang.WindowTitle, true) as PhotonEditor;
win.isSetupWizard = true;
win.InitPhotonSetupWindow();
}
/// <summary>Re-initializes the Photon Setup window and shows one of three states: register cloud, setup cloud, setup self-hosted.</summary>
protected void InitPhotonSetupWindow()
{
this.minSize = MinSize;
this.SwitchMenuState(GUIState.Setup);
this.ReApplySettingsToWindow();
switch (PhotonEditor.Current.HostType)
{
case ServerSettings.HostingOption.PhotonCloud:
case ServerSettings.HostingOption.BestRegion:
this.photonSetupState = PhotonSetupStates.SetupPhotonCloud;
break;
case ServerSettings.HostingOption.SelfHosted:
this.photonSetupState = PhotonSetupStates.SetupSelfHosted;
break;
case ServerSettings.HostingOption.NotSet:
default:
this.photonSetupState = PhotonSetupStates.RegisterForPhotonCloud;
break;
}
}
// called 100 times / sec
private static void OnUpdate()
{
// after a compile, check RPCs to create a cache-list
if (!postCompileActionsDone && !EditorApplication.isCompiling && !EditorApplication.isPlayingOrWillChangePlaymode && PhotonEditor.Current != null)
{
#if UNITY_4_2 || UNITY_4_3 || UNITY_4_4 || UNITY_4_5 || UNITY_4_6 || UNITY_5_0 || UNITY_5_1 || UNITY_5_2
if (EditorApplication.isUpdating) return;
#endif
PhotonEditor.UpdateRpcList();
postCompileActionsDone = true; // on compile, this falls back to false (without actively doing anything)
#if UNITY_4_2 || UNITY_4_3 || UNITY_4_4 || UNITY_4_5 || UNITY_4_6 || UNITY_5_0 || UNITY_5_1 || UNITY_5_2
PhotonEditor.ImportWin8Support();
#endif
}
}
// called in editor, opens wizard for initial setup, keeps scene PhotonViews up to date and closes connections when compiling (to avoid issues)
private static void EditorUpdate()
{
if (dontCheckPunSetup || PhotonEditor.Current == null)
{
return;
}
// serverSetting is null when the file gets deleted. otherwise, the wizard should only run once and only if hosting option is not (yet) set
if (!PhotonEditor.Current.DisableAutoOpenWizard && PhotonEditor.Current.HostType == ServerSettings.HostingOption.NotSet)
{
ShowRegistrationWizard();
}
// Workaround for TCP crash. Plus this surpresses any other recompile errors.
if (EditorApplication.isCompiling)
{
if (PhotonNetwork.connected)
{
if (lastWarning > EditorApplication.timeSinceStartup - 3)
{
// Prevent error spam
Debug.LogWarning(CurrentLang.WarningPhotonDisconnect);
lastWarning = EditorApplication.timeSinceStartup;
}
PhotonNetwork.Disconnect();
}
}
}
// called in editor on change of play-mode (used to show a message popup that connection settings are incomplete)
private static void PlaymodeStateChanged()
{
if (dontCheckPunSetup || EditorApplication.isPlaying || !EditorApplication.isPlayingOrWillChangePlaymode)
{
return;
}
if (PhotonEditor.Current.HostType == ServerSettings.HostingOption.NotSet)
{
EditorUtility.DisplayDialog(CurrentLang.SetupWizardWarningTitle, CurrentLang.SetupWizardWarningMessage, CurrentLang.OkButton);
}
}
private void SwitchMenuState(GUIState newState)
{
this.guiState = newState;
if (this.isSetupWizard && newState != GUIState.Setup)
{
this.Close();
}
}
protected virtual void OnGUI()
{
PhotonSetupStates oldGuiState = this.photonSetupState; // used to fix an annoying Editor input field issue: wont refresh until focus is changed.
GUI.SetNextControlName("");
this.scrollPos = GUILayout.BeginScrollView(this.scrollPos);
if (this.guiState == GUIState.Uninitialized)
{
this.ReApplySettingsToWindow();
this.guiState = (PhotonEditor.Current.HostType == ServerSettings.HostingOption.NotSet) ? GUIState.Setup : GUIState.Main;
}
if (this.guiState == GUIState.Main)
{
this.OnGuiMainWizard();
}
else
{
this.OnGuiRegisterCloudApp();
}
GUILayout.EndScrollView();
if (oldGuiState != this.photonSetupState)
{
GUI.FocusControl("");
}
}
protected virtual void OnGuiRegisterCloudApp()
{
GUI.skin.label.wordWrap = true;
if (!this.isSetupWizard)
{
GUILayout.BeginHorizontal();
GUILayout.FlexibleSpace();
if (GUILayout.Button(CurrentLang.MainMenuButton, GUILayout.ExpandWidth(false)))
{
this.SwitchMenuState(GUIState.Main);
}
GUILayout.EndHorizontal();
GUILayout.Space(15);
}
if (this.photonSetupState == PhotonSetupStates.RegisterForPhotonCloud)
{
GUI.skin.label.fontStyle = FontStyle.Bold;
GUILayout.Label(CurrentLang.ConnectButton);
EditorGUILayout.Separator();
GUI.skin.label.fontStyle = FontStyle.Normal;
GUILayout.Label(CurrentLang.UsePhotonLabel);
EditorGUILayout.Separator();
this.emailAddress = EditorGUILayout.TextField(CurrentLang.EmailLabel, this.emailAddress);
if (GUILayout.Button(CurrentLang.SendButton))
{
GUIUtility.keyboardControl = 0;
this.RegisterWithEmail(this.emailAddress);
}
GUILayout.Space(20);
GUILayout.Label(CurrentLang.SignedUpAlreadyLabel);
if (GUILayout.Button(CurrentLang.SetupButton))
{
this.photonSetupState = PhotonSetupStates.SetupPhotonCloud;
}
EditorGUILayout.Separator();
GUILayout.Label(CurrentLang.RegisterByWebsiteLabel);
if (GUILayout.Button(CurrentLang.AccountWebsiteButton))
{
EditorUtility.OpenWithDefaultApp(UrlAccountPage + Uri.EscapeUriString(this.emailAddress));
}
EditorGUILayout.Separator();
GUILayout.Label(CurrentLang.SelfHostLabel);
if (GUILayout.Button(CurrentLang.SelfHostSettingsButton))
{
this.photonSetupState = PhotonSetupStates.SetupSelfHosted;
}
GUILayout.FlexibleSpace();
if (!InternalEditorUtility.HasAdvancedLicenseOnBuildTarget(BuildTarget.Android) || !InternalEditorUtility.HasAdvancedLicenseOnBuildTarget(BuildTarget.iPhone))
{
GUILayout.Label(CurrentLang.MobileExportNoteLabel);
}
EditorGUILayout.Separator();
}
else if (this.photonSetupState == PhotonSetupStates.EmailAlreadyRegistered)
{
GUI.skin.label.fontStyle = FontStyle.Bold;
GUILayout.Label(CurrentLang.OopsLabel);
GUI.skin.label.fontStyle = FontStyle.Normal;
GUILayout.Label(CurrentLang.EmailInUseLabel);
if (GUILayout.Button(CurrentLang.SeeMyAccountPageButton))
{
EditorUtility.OpenWithDefaultApp(UrlCloudDashboard + Uri.EscapeUriString(this.emailAddress));
}
EditorGUILayout.Separator();
GUILayout.Label(CurrentLang.KnownAppIdLabel);
GUILayout.BeginHorizontal();
if (GUILayout.Button(CurrentLang.CancelButton))
{
this.photonSetupState = PhotonSetupStates.RegisterForPhotonCloud;
}
if (GUILayout.Button(CurrentLang.SetupButton))
{
this.photonSetupState = PhotonSetupStates.SetupPhotonCloud;
}
GUILayout.EndHorizontal();
}
else if (this.photonSetupState == PhotonSetupStates.SetupPhotonCloud)
{
// cloud setup
GUI.skin.label.fontStyle = FontStyle.Bold;
GUILayout.Label(CurrentLang.PhotonCloudConnect);
GUI.skin.label.fontStyle = FontStyle.Normal;
EditorGUILayout.Separator();
this.OnGuiSetupCloudAppId();
this.OnGuiCompareAndHelpOptions();
}
else if (this.photonSetupState == PhotonSetupStates.SetupSelfHosted)
{
// self-hosting setup
GUI.skin.label.fontStyle = FontStyle.Bold;
GUILayout.Label(CurrentLang.SetupOwnHostLabel);
GUI.skin.label.fontStyle = FontStyle.Normal;
EditorGUILayout.Separator();
this.OnGuiSetupSelfhosting();
this.OnGuiCompareAndHelpOptions();
}
}
protected virtual void OnGuiMainWizard()
{
GUILayout.BeginHorizontal();
GUILayout.FlexibleSpace();
GUILayout.Label(WizardIcon);
GUILayout.FlexibleSpace();
GUILayout.EndHorizontal();
EditorGUILayout.Separator();
GUILayout.Label(CurrentLang.PUNWizardLabel, EditorStyles.boldLabel);
if (isPunPlus)
{
GUILayout.Label(CurrentLang.MobilePunPlusExportNoteLabel);
}
else if (!InternalEditorUtility.HasAdvancedLicenseOnBuildTarget(BuildTarget.Android) || !InternalEditorUtility.HasAdvancedLicenseOnBuildTarget(BuildTarget.iPhone))
{
GUILayout.Label(CurrentLang.MobileExportNoteLabel);
}
EditorGUILayout.Separator();
// settings button
GUILayout.BeginHorizontal();
GUILayout.Label(CurrentLang.SettingsButton, EditorStyles.boldLabel, GUILayout.Width(100));
if (GUILayout.Button(new GUIContent(CurrentLang.SetupButton, CurrentLang.SetupServerCloudLabel)))
{
this.InitPhotonSetupWindow();
}
GUILayout.EndHorizontal();
EditorGUILayout.Separator();
// find / select settings asset
GUILayout.BeginHorizontal();
GUILayout.Label(CurrentLang.SettingsFileLabel, EditorStyles.boldLabel, GUILayout.Width(100));
if (GUILayout.Button(new GUIContent(CurrentLang.LocateSettingsButton, CurrentLang.SettingsHighlightLabel)))
{
EditorGUIUtility.PingObject(PhotonEditor.Current);
}
GUILayout.EndHorizontal();
GUILayout.FlexibleSpace();
// converter
GUILayout.BeginHorizontal();
GUILayout.Label(CurrentLang.ConverterLabel, EditorStyles.boldLabel, GUILayout.Width(100));
if (GUILayout.Button(new GUIContent(CurrentLang.StartButton, CurrentLang.UNtoPUNLabel)))
{
PhotonConverter.RunConversion();
}
GUILayout.EndHorizontal();
EditorGUILayout.Separator();
// documentation
GUILayout.BeginHorizontal();
GUILayout.Label(CurrentLang.DocumentationLabel, EditorStyles.boldLabel, GUILayout.Width(100));
GUILayout.BeginVertical();
if (GUILayout.Button(new GUIContent(CurrentLang.OpenPDFText, CurrentLang.OpenPDFTooltip)))
{
EditorUtility.OpenWithDefaultApp(DocumentationLocation);
}
if (GUILayout.Button(new GUIContent(CurrentLang.OpenDevNetText, CurrentLang.OpenDevNetTooltip)))
{
EditorUtility.OpenWithDefaultApp(UrlDevNet);
}
if (GUILayout.Button(new GUIContent(CurrentLang.OpenCloudDashboardText, CurrentLang.OpenCloudDashboardTooltip)))
{
EditorUtility.OpenWithDefaultApp(UrlCloudDashboard + Uri.EscapeUriString(this.emailAddress));
}
if (GUILayout.Button(new GUIContent(CurrentLang.OpenForumText, CurrentLang.OpenForumTooltip)))
{
EditorUtility.OpenWithDefaultApp(UrlForum);
}
GUILayout.EndVertical();
GUILayout.EndHorizontal();
}
protected virtual void OnGuiCompareAndHelpOptions()
{
GUILayout.FlexibleSpace();
GUILayout.Label(CurrentLang.QuestionsLabel);
if (GUILayout.Button(CurrentLang.SeeForumButton))
{
Application.OpenURL(UrlForum);
}
if (photonSetupState != PhotonSetupStates.SetupSelfHosted)
{
if (GUILayout.Button(CurrentLang.OpenDashboardButton))
{
EditorUtility.OpenWithDefaultApp(UrlCloudDashboard + Uri.EscapeUriString(this.emailAddress));
}
}
}
protected virtual void OnGuiSetupCloudAppId()
{
GUILayout.Label(CurrentLang.AppIdLabel);
GUILayout.BeginHorizontal();
this.cloudAppId = EditorGUILayout.TextField(this.cloudAppId);
open = GUILayout.Toggle(open, PhotonGUI.HelpIcon, GUIStyle.none, GUILayout.ExpandWidth(false));
GUILayout.EndHorizontal();
if (open) GUILayout.Label(CurrentLang.AppIdInfoLabel);
EditorGUILayout.Separator();
GUILayout.Label(CurrentLang.CloudRegionLabel);
GUILayout.BeginHorizontal();
int toolbarValue = GUILayout.Toolbar((int)selectedRegion, CloudServerRegionNames); // the enum CloudRegionCode is converted into a string[] in init (toolbar can't use enum)
helpRegion = GUILayout.Toggle( helpRegion, PhotonGUI.HelpIcon, GUIStyle.none, GUILayout.ExpandWidth( false ) );
GUILayout.EndHorizontal();
if (helpRegion) GUILayout.Label(CurrentLang.RegionalServersInfo);
PhotonEditor.selectedRegion = (CloudRegionCode)toolbarValue;
EditorGUILayout.Separator();
GUILayout.BeginHorizontal();
if (GUILayout.Button(CurrentLang.CancelButton))
{
GUIUtility.keyboardControl = 0;
this.ReApplySettingsToWindow();
}
if (GUILayout.Button(CurrentLang.SaveButton))
{
GUIUtility.keyboardControl = 0;
this.cloudAppId = this.cloudAppId.Trim();
PhotonEditor.Current.UseCloud(this.cloudAppId);
PhotonEditor.Current.PreferredRegion = PhotonEditor.selectedRegion;
PhotonEditor.Current.HostType = (PhotonEditor.Current.PreferredRegion == CloudRegionCode.none)
? ServerSettings.HostingOption.BestRegion
: ServerSettings.HostingOption.PhotonCloud;
PhotonEditor.Save();
Inspect();
EditorUtility.DisplayDialog(CurrentLang.SettingsSavedTitle, CurrentLang.SettingsSavedMessage, CurrentLang.OkButton);
}
GUILayout.EndHorizontal();
GUILayout.Space(20);
GUILayout.Label(CurrentLang.SetupOwnServerLabel);
if (GUILayout.Button(CurrentLang.SelfHostSettingsButton))
{
//this.photonAddress = ServerSettings.DefaultServerAddress;
//this.photonPort = ServerSettings.DefaultMasterPort;
this.photonSetupState = PhotonSetupStates.SetupSelfHosted;
}
EditorGUILayout.Separator();
GUILayout.Label(CurrentLang.OwnHostCloudCompareLabel);
if (GUILayout.Button(CurrentLang.ComparisonPageButton))
{
Application.OpenURL(UrlCompare);
}
}
protected virtual void OnGuiSetupSelfhosting()
{
GUILayout.Label(CurrentLang.YourPhotonServerLabel);
this.photonAddress = EditorGUILayout.TextField(CurrentLang.AddressIPLabel, this.photonAddress);
this.photonPort = EditorGUILayout.IntField(CurrentLang.PortLabel, this.photonPort);
this.photonProtocol = (ConnectionProtocol)EditorGUILayout.EnumPopup("Protocol", this.photonProtocol);
EditorGUILayout.Separator();
GUILayout.BeginHorizontal();
if (GUILayout.Button(CurrentLang.CancelButton))
{
GUIUtility.keyboardControl = 0;
this.ReApplySettingsToWindow();
}
if (GUILayout.Button(CurrentLang.SaveButton))
{
GUIUtility.keyboardControl = 0;
PhotonEditor.Current.UseMyServer(this.photonAddress, this.photonPort, null);
PhotonEditor.Current.Protocol = this.photonProtocol;
PhotonEditor.Save();
Inspect();
EditorUtility.DisplayDialog(CurrentLang.SettingsSavedTitle, CurrentLang.SettingsSavedMessage, CurrentLang.OkButton);
}
GUILayout.EndHorizontal();
GUILayout.Space(20);
// license
GUILayout.BeginHorizontal();
GUILayout.Label(CurrentLang.LicensesLabel, EditorStyles.boldLabel, GUILayout.Width(100));
if (GUILayout.Button(new GUIContent(CurrentLang.LicenseDownloadText, CurrentLang.LicenseDownloadTooltip)))
{
EditorUtility.OpenWithDefaultApp(UrlFreeLicense);
}
GUILayout.EndHorizontal();
GUILayout.Space(20);
GUILayout.Label(CurrentLang.TryPhotonAppLabel);
if (GUILayout.Button(CurrentLang.GetCloudAppButton))
{
this.cloudAppId = string.Empty;
this.photonSetupState = PhotonSetupStates.RegisterForPhotonCloud;
}
EditorGUILayout.Separator();
GUILayout.Label(CurrentLang.OwnHostCloudCompareLabel);
if (GUILayout.Button(CurrentLang.ComparisonPageButton))
{
Application.OpenURL(UrlCompare);
}
}
protected virtual void RegisterWithEmail(string email)
{
EditorUtility.DisplayProgressBar(CurrentLang.ConnectionTitle, CurrentLang.ConnectionInfo, 0.5f);
var client = new AccountService();
client.RegisterByEmail(email, RegisterOrigin); // this is the synchronous variant using the static RegisterOrigin. "result" is in the client
EditorUtility.ClearProgressBar();
if (client.ReturnCode == 0)
{
PhotonEditor.Current.UseCloud(client.AppId, 0);
PhotonEditor.Save();
this.ReApplySettingsToWindow();
this.photonSetupState = PhotonSetupStates.SetupPhotonCloud;
}
else
{
if (client.Message.Contains(CurrentLang.EmailInUseLabel))
{
this.photonSetupState = PhotonSetupStates.EmailAlreadyRegistered;
}
else
{
EditorUtility.DisplayDialog(CurrentLang.ErrorTextTitle, client.Message, CurrentLang.OkButton);
// Debug.Log(client.Exception);
this.photonSetupState = PhotonSetupStates.RegisterForPhotonCloud;
}
}
}
#region SettingsFileHandling
private static ServerSettings currentSettings;
private Vector2 MinSize = new Vector2(350, 400);
public static ServerSettings Current
{
get
{
if (currentSettings == null)
{
// find out if ServerSettings can be instantiated (existing script check)
ScriptableObject serverSettingTest = CreateInstance("ServerSettings");
if (serverSettingTest == null)
{
Debug.LogError(CurrentLang.ServerSettingsMissingLabel);
return null;
}
DestroyImmediate(serverSettingTest);
// try to load settings from file
ReLoadCurrentSettings();
// if still not loaded, create one
if (currentSettings == null)
{
string settingsPath = Path.GetDirectoryName(PhotonNetwork.serverSettingsAssetPath);
if (!Directory.Exists(settingsPath))
{
Directory.CreateDirectory(settingsPath);
AssetDatabase.ImportAsset(settingsPath);
}
currentSettings = (ServerSettings)ScriptableObject.CreateInstance("ServerSettings");
if (currentSettings != null)
{
AssetDatabase.CreateAsset(currentSettings, PhotonNetwork.serverSettingsAssetPath);
}
else
{
Debug.LogError(CurrentLang.ServerSettingsMissingLabel);
}
}
// settings were loaded or created. set this editor's initial selected region now (will be changed in GUI)
if (currentSettings != null)
{
selectedRegion = currentSettings.PreferredRegion;
}
}
return currentSettings;
}
protected set
{
currentSettings = value;
}
}
public static void Save()
{
EditorUtility.SetDirty(PhotonEditor.Current);
}
public static void ReLoadCurrentSettings()
{
// this now warns developers if there are more than one settings files in resources folders. first will be used.
UnityEngine.Object[] settingFiles = Resources.LoadAll(PhotonNetwork.serverSettingsAssetFile, typeof(ServerSettings));
if (settingFiles != null && settingFiles.Length > 0)
{
PhotonEditor.Current = (ServerSettings)settingFiles[0];
if (settingFiles.Length > 1)
{
Debug.LogWarning(CurrentLang.MoreThanOneLabel + PhotonNetwork.serverSettingsAssetFile + CurrentLang.FilesInResourceFolderLabel + AssetDatabase.GetAssetPath(PhotonEditor.Current));
}
}
}
protected void ReApplySettingsToWindow()
{
this.cloudAppId = string.IsNullOrEmpty(PhotonEditor.Current.AppID) ? string.Empty : PhotonEditor.Current.AppID;
this.photonAddress = string.IsNullOrEmpty(PhotonEditor.Current.ServerAddress) ? string.Empty : PhotonEditor.Current.ServerAddress;
this.photonPort = PhotonEditor.Current.ServerPort;
this.photonProtocol = PhotonEditor.Current.Protocol;
}
public static void UpdateRpcList()
{
List<string> additionalRpcs = new List<string>();
|
[
" HashSet<string> currentRpcs = new HashSet<string>();"
] | 2,652
|
lcc
|
csharp
| null |
752a89a5b36c7b8f6f0e35bfd40db4c72421fcad3b20b85b
|
|
/*
* Copyright (c) 2016-2017 Viktor Fedenyov <me@ii-net.tk> <https://ii-net.tk>
*
* This file is part of IDEC Mobile.
*
* IDEC Mobile is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* IDEC Mobile is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with IDEC Mobile. If not, see <http://www.gnu.org/licenses/>.
*/
package vit01.idecmobile.GUI.Reading;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.text.Html;
import android.util.Patterns;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.fragment.app.Fragment;
import com.mikepenz.google_material_typeface_library.GoogleMaterial;
import com.mikepenz.iconics.IconicsDrawable;
import java.util.regex.Matcher;
import vit01.idecmobile.Core.AbstractTransport;
import vit01.idecmobile.Core.GlobalTransport;
import vit01.idecmobile.Core.IIMessage;
import vit01.idecmobile.Core.SimpleFunctions;
import vit01.idecmobile.GUI.Drafts.DraftEditor;
import vit01.idecmobile.QuoteEditActivity;
import vit01.idecmobile.R;
import vit01.idecmobile.gui_helpers.CustomLinkMovementMethod;
import vit01.idecmobile.gui_helpers.MyTextView;
import vit01.idecmobile.prefs.Config;
public class MessageView_full extends Fragment {
public AbstractTransport transport;
public boolean messageStarred = false, is_corrupt = false;
MenuItem discussionBack;
TextView full_subj, full_from_to, full_date, full_msgid, full_repto, full_echo;
MyTextView full_msg;
Fragment parentContext;
Button fullNewMessageBtn;
private String msgid;
private IIMessage message;
public MessageView_full() {
// Required empty public constructor
}
public static MessageView_full newInstance(String msgid) {
MessageView_full fragment = new MessageView_full();
Bundle args = new Bundle();
args.putString("msgid", msgid);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
msgid = getArguments().getString("msgid");
setHasOptionsMenu(true);
}
}
@Override
public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootLayout = inflater.inflate(R.layout.message_view, null, false);
full_subj = rootLayout.findViewById(R.id.full_subj);
full_msg = rootLayout.findViewById(R.id.full_text);
full_from_to = rootLayout.findViewById(R.id.full_from_to);
full_date = rootLayout.findViewById(R.id.full_date);
full_msgid = rootLayout.findViewById(R.id.full_msgid);
full_repto = rootLayout.findViewById(R.id.full_repto);
full_echo = rootLayout.findViewById(R.id.full_echo);
full_msgid.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
ClipboardManager clipboard = (ClipboardManager)
getActivity().getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clip = ClipData.newPlainText("idec msgid", message.id);
clipboard.setPrimaryClip(clip);
Toast.makeText(getActivity(), R.string.msgid_clipboard_done, Toast.LENGTH_SHORT).show();
}
});
full_msg.setMovementMethod(CustomLinkMovementMethod.getInstance());
int secondaryColor = SimpleFunctions.colorFromTheme(getActivity(), android.R.attr.textColorSecondary);
Button fullAnswerBtn = rootLayout.findViewById(R.id.full_answer_button);
fullAnswerBtn.setCompoundDrawablesWithIntrinsicBounds(null, new IconicsDrawable(getActivity(), GoogleMaterial.Icon.gmd_reply).sizeDp(20).color(secondaryColor), null, null);
fullAnswerBtn.setCompoundDrawablePadding(30);
fullAnswerBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(getActivity(), DraftEditor.class);
intent.putExtra("task", "new_answer");
intent.putExtra("nodeindex",
SimpleFunctions.getPreferredOutboxId(message.echo));
intent.putExtra("message", message);
intent.putExtra("quote", false);
startActivity(intent);
}
});
Button fullQuoteAnswerBtn = rootLayout.findViewById(R.id.full_quote_answer_button);
fullQuoteAnswerBtn.setCompoundDrawablesWithIntrinsicBounds(null, new IconicsDrawable(getActivity(), GoogleMaterial.Icon.gmd_format_quote).sizeDp(20).color(secondaryColor), null, null);
fullQuoteAnswerBtn.setCompoundDrawablePadding(30);
fullQuoteAnswerBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(getActivity(), DraftEditor.class);
intent.putExtra("task", "new_answer");
intent.putExtra("nodeindex",
SimpleFunctions.getPreferredOutboxId(message.echo));
intent.putExtra("message", message);
intent.putExtra("quote", true);
startActivity(intent);
}
});
fullQuoteAnswerBtn.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
Intent intent = new Intent(getActivity(), QuoteEditActivity.class);
intent.putExtra("nodeindex",
SimpleFunctions.getPreferredOutboxId(message.echo));
intent.putExtra("message", message);
startActivity(intent);
return true;
}
});
fullNewMessageBtn = rootLayout.findViewById(R.id.full_new_button);
fullNewMessageBtn.setCompoundDrawablesWithIntrinsicBounds(null, new IconicsDrawable(getActivity(), GoogleMaterial.Icon.gmd_create).sizeDp(20).color(secondaryColor), null, null);
fullNewMessageBtn.setCompoundDrawablePadding(30);
fullNewMessageBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(getActivity(), DraftEditor.class);
intent.putExtra("task", "new_in_echo");
intent.putExtra("echoarea", message.echo);
intent.putExtra("nodeindex", SimpleFunctions.getPreferredOutboxId(message.echo));
startActivity(intent);
}
});
Button kdeconnectBtn = rootLayout.findViewById(R.id.full_share_kdeconnect);
kdeconnectBtn.setCompoundDrawablesWithIntrinsicBounds(null, new IconicsDrawable(getActivity(), GoogleMaterial.Icon.gmd_cast).sizeDp(20).color(secondaryColor), null, null);
kdeconnectBtn.setCompoundDrawablePadding(30);
kdeconnectBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
String currentLink = "";
Matcher lnk = Patterns.WEB_URL.matcher(message.msg);
while (lnk.find()) currentLink = lnk.group();
if (!currentLink.equals("")) {
try {
Intent launchIntent = new Intent(Intent.ACTION_SEND);
launchIntent.setClassName("org.kde.kdeconnect_tp", "org.kde.kdeconnect.Plugins.SharePlugin.ShareActivity");
launchIntent.putExtra(Intent.EXTRA_TEXT, currentLink);
startActivity(launchIntent);
}
catch (Exception e) {
SimpleFunctions.debug(e.getMessage());
}
} else {
Toast.makeText(getActivity(), R.string.error_no_links, Toast.LENGTH_SHORT).show();
}
}
});
|
[
" if (!Config.isKDEConnectInstalled) {"
] | 551
|
lcc
|
java
| null |
4ba808cbdaf8b17ba189c38980dbfe9f2db8c50e68c78815
|
|
# Copyright (C) 2003 CAMP
# Please see the accompanying LICENSE file for further information.
"""K-point/spin combination-descriptors
This module contains classes for defining combinations of two indices:
* Index k for irreducible kpoints in the 1st Brillouin zone.
* Index s for spin up/down if spin-polarized (otherwise ignored).
"""
import numpy as np
from ase.units import Bohr
from ase.dft.kpoints import monkhorst_pack, get_monkhorst_pack_size_and_offset
from gpaw.symmetry import Symmetry
from gpaw.kpoint import KPoint
import gpaw.mpi as mpi
import _gpaw
class KPointDescriptor:
"""Descriptor-class for k-points."""
def __init__(self, kpts, nspins=1, collinear=True):
"""Construct descriptor object for kpoint/spin combinations (ks-pair).
Parameters
----------
kpts: None, sequence of 3 ints, or (n,3) shaped ndarray
Specification of the k-point grid. None=Gamma, list of
ints=Monkhorst-Pack, ndarray=user specified.
nspins: int
Number of spins.
Attributes
============ ======================================================
``N_c`` Number of k-points in the different directions.
``nspins`` Number of spins in total.
``mynspins`` Number of spins on this CPU.
``nibzkpts`` Number of irreducible kpoints in 1st Brillouin zone.
``nks`` Number of k-point/spin combinations in total.
``mynks`` Number of k-point/spin combinations on this CPU.
``gamma`` Boolean indicator for gamma point calculation.
``comm`` MPI-communicator for kpoint distribution.
============ ======================================================
"""
if kpts is None:
self.bzk_kc = np.zeros((1, 3))
self.N_c = np.array((1, 1, 1), dtype=int)
self.offset_c = np.zeros(3)
elif isinstance(kpts[0], int):
self.bzk_kc = monkhorst_pack(kpts)
self.N_c = np.array(kpts, dtype=int)
self.offset_c = np.zeros(3)
else:
self.bzk_kc = np.array(kpts, float)
try:
self.N_c, self.offset_c = \
get_monkhorst_pack_size_and_offset(self.bzk_kc)
except ValueError:
self.N_c = None
self.offset_c = None
self.collinear = collinear
self.nspins = nspins
self.nbzkpts = len(self.bzk_kc)
# Gamma-point calculation?
self.gamma = self.nbzkpts == 1 and not self.bzk_kc[0].any()
self.set_symmetry(None, None, usesymm=None)
self.set_communicator(mpi.serial_comm)
if self.gamma:
self.description = '1 k-point (Gamma)'
else:
self.description = '%d k-points' % self.nbzkpts
if self.N_c is not None:
self.description += (' (%d x %d x %d Monkhorst-Pack grid' %
tuple(self.N_c))
if self.offset_c.any():
self.description += ' + ['
for x in self.offset_c:
if x != 0 and abs(round(1 / x) - 1 / x) < 1e-12:
self.description += '1/%d,' % (1 / x)
else:
self.description += '%f,' % x
self.description = self.description[:-1] + ']'
self.description += ')'
def __len__(self):
"""Return number of k-point/spin combinations of local CPU."""
return self.mynks
def set_symmetry(self, atoms, setups, magmom_av=None,
usesymm=False, N_c=None, comm=None):
"""Create symmetry object and construct irreducible Brillouin zone.
atoms: Atoms object
Defines atom positions and types and also unit cell and
boundary conditions.
setups: instance of class Setups
PAW setups for the atoms.
magmom_av: ndarray
Initial magnetic moments.
usesymm: bool
Symmetry flag.
N_c: three int's or None
If not None: Check also symmetry of grid.
"""
if atoms is not None:
if (~atoms.pbc & self.bzk_kc.any(0)).any():
raise ValueError('K-points can only be used with PBCs!')
if magmom_av is None:
magmom_av = np.zeros((len(atoms), 3))
magmom_av[:, 2] = atoms.get_initial_magnetic_moments()
magmom_av = magmom_av.round(decimals=3) # round off
id_a = zip(setups.id_a, *magmom_av.T)
# Construct a Symmetry instance containing the identity operation
# only
self.symmetry = Symmetry(id_a, atoms.cell / Bohr, atoms.pbc)
else:
self.symmetry = None
if self.gamma or usesymm is None:
# Point group and time-reversal symmetry neglected
self.weight_k = np.ones(self.nbzkpts) / self.nbzkpts
self.ibzk_kc = self.bzk_kc.copy()
self.sym_k = np.zeros(self.nbzkpts, int)
self.time_reversal_k = np.zeros(self.nbzkpts, bool)
self.bz2ibz_k = np.arange(self.nbzkpts)
self.ibz2bz_k = np.arange(self.nbzkpts)
self.bz2bz_ks = np.arange(self.nbzkpts)[:, np.newaxis]
else:
if usesymm:
# Find symmetry operations of atoms
self.symmetry.analyze(atoms.get_scaled_positions())
if N_c is not None:
self.symmetry.prune_symmetries_grid(N_c)
(self.ibzk_kc, self.weight_k,
self.sym_k,
self.time_reversal_k,
self.bz2ibz_k,
self.ibz2bz_k,
self.bz2bz_ks) = self.symmetry.reduce(self.bzk_kc, comm)
if setups is not None:
setups.set_symmetry(self.symmetry)
# Number of irreducible k-points and k-point/spin combinations.
self.nibzkpts = len(self.ibzk_kc)
if self.collinear:
self.nks = self.nibzkpts * self.nspins
else:
self.nks = self.nibzkpts
# Wrap k-points to 1. BZ:
self.i1bzk_kc = self.ibzk_kc.copy()
if atoms is not None:
B_cv = 2.0 * np.pi * np.linalg.inv(atoms.cell / Bohr).T
K_kv = np.dot(self.ibzk_kc, B_cv)
N_xc = np.indices((3, 3, 3)).reshape((3, 27)).T - 1
G_xv = np.dot(N_xc, B_cv)
for k, K_v in enumerate(K_kv):
x = ((G_xv - K_v)**2).sum(1).argmin()
self.i1bzk_kc[k] -= N_xc[x]
def set_communicator(self, comm):
"""Set k-point communicator."""
# Ranks < self.rank0 have mynks0 k-point/spin combinations and
# ranks >= self.rank0 have mynks0+1 k-point/spin combinations.
mynks0, x = divmod(self.nks, comm.size)
self.rank0 = comm.size - x
self.comm = comm
# My number and offset of k-point/spin combinations
self.mynks, self.ks0 = self.get_count(), self.get_offset()
if self.nspins == 2 and comm.size == 1: # NCXXXXXXXX
# Avoid duplicating k-points in local list of k-points.
self.ibzk_qc = self.ibzk_kc.copy()
self.i1bzk_qc = self.i1bzk_kc.copy()
else:
self.ibzk_qc = np.vstack((self.ibzk_kc,
self.ibzk_kc))[self.get_slice()]
self.i1bzk_qc = np.vstack((self.i1bzk_kc,
self.i1bzk_kc))[self.get_slice()]
def create_k_points(self, gd):
"""Return a list of KPoints."""
sdisp_cd = gd.sdisp_cd
kpt_u = []
for ks in range(self.ks0, self.ks0 + self.mynks):
s, k = divmod(ks, self.nibzkpts)
q = (ks - self.ks0) % self.nibzkpts
if self.collinear:
weight = self.weight_k[k] * 2 / self.nspins
else:
weight = self.weight_k[k]
if self.gamma:
phase_cd = np.ones((3, 2), complex)
else:
phase_cd = np.exp(2j * np.pi *
sdisp_cd * self.ibzk_kc[k, :, np.newaxis])
kpt_u.append(KPoint(weight, s, k, q, phase_cd))
return kpt_u
def collect(self, a_ux, broadcast=True):
"""Collect distributed data to all."""
if self.comm.rank == 0 or broadcast:
xshape = a_ux.shape[1:]
a_skx = np.empty((self.nspins, self.nibzkpts) + xshape, a_ux.dtype)
a_Ux = a_skx.reshape((-1,) + xshape)
else:
a_skx = None
if self.comm.rank > 0:
self.comm.send(a_ux, 0)
else:
u1 = self.get_count(0)
a_Ux[0:u1] = a_ux
requests = []
for rank in range(1, self.comm.size):
u2 = u1 + self.get_count(rank)
requests.append(self.comm.receive(a_Ux[u1:u2], rank,
block=False))
u1 = u2
assert u1 == len(a_Ux)
self.comm.waitall(requests)
if broadcast:
self.comm.broadcast(a_Ux, 0)
return a_skx
def transform_wave_function(self, psit_G, k):
"""Transform wave function from IBZ to BZ.
k is the index of the desired k-point in the full BZ.
"""
s = self.sym_k[k]
time_reversal = self.time_reversal_k[k]
op_cc = np.linalg.inv(self.symmetry.op_scc[s]).round().astype(int)
# Identity
if (np.abs(op_cc - np.eye(3, dtype=int)) < 1e-10).all():
if time_reversal:
return psit_G.conj()
else:
return psit_G
# General point group symmetry
else:
|
[
" ik = self.bz2ibz_k[k]"
] | 906
|
lcc
|
python
| null |
887af1daa5b7038bf77928f7810b6046e0a6e79e65f0db63
|
|
/*
* Copyright (C) 2022 Inera AB (http://www.inera.se)
*
* This file is part of sklintyg (https://github.com/sklintyg).
*
* sklintyg is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* sklintyg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package se.inera.intyg.webcert.web.web.controller.api;
import com.google.common.base.Strings;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import se.inera.intyg.common.fk7263.support.Fk7263EntryPoint;
import se.inera.intyg.common.luse.support.LuseEntryPoint;
import se.inera.intyg.common.services.texts.IntygTextsService;
import se.inera.intyg.common.support.model.UtkastStatus;
import se.inera.intyg.common.support.model.common.internal.Patient;
import se.inera.intyg.common.support.modules.registry.IntygModule;
import se.inera.intyg.common.support.modules.registry.IntygModuleRegistry;
import se.inera.intyg.common.support.modules.registry.ModuleNotFoundException;
import se.inera.intyg.infra.integration.hsatk.model.legacy.SelectableVardenhet;
import se.inera.intyg.infra.integration.hsatk.model.legacy.Vardenhet;
import se.inera.intyg.infra.integration.hsatk.model.legacy.Vardgivare;
import se.inera.intyg.infra.integration.hsatk.services.HsatkEmployeeService;
import se.inera.intyg.infra.security.common.model.AuthoritiesConstants;
import se.inera.intyg.infra.security.common.model.Feature;
import se.inera.intyg.infra.security.common.model.Privilege;
import se.inera.intyg.infra.security.common.model.RequestOrigin;
import se.inera.intyg.schemas.contract.Personnummer;
import se.inera.intyg.webcert.common.model.SekretessStatus;
import se.inera.intyg.webcert.persistence.utkast.model.Utkast;
import se.inera.intyg.webcert.persistence.utkast.model.VardpersonReferens;
import se.inera.intyg.webcert.web.converter.util.IntygDraftDecorator;
import se.inera.intyg.webcert.web.service.access.AccessEvaluationParameters;
import se.inera.intyg.webcert.web.service.access.AccessResult;
import se.inera.intyg.webcert.web.service.access.DraftAccessServiceHelper;
import se.inera.intyg.webcert.web.service.log.LogService;
import se.inera.intyg.webcert.web.service.patient.PatientDetailsResolver;
import se.inera.intyg.webcert.web.service.patient.PatientDetailsResolverResponse;
import se.inera.intyg.webcert.web.service.user.WebCertUserService;
import se.inera.intyg.webcert.web.service.user.dto.WebCertUser;
import se.inera.intyg.webcert.web.service.utkast.UtkastService;
import se.inera.intyg.webcert.web.service.utkast.dto.CreateNewDraftRequest;
import se.inera.intyg.webcert.web.service.utkast.dto.PreviousIntyg;
import se.inera.intyg.webcert.web.web.controller.api.dto.CreateUtkastRequest;
import se.inera.intyg.webcert.web.web.controller.api.dto.QueryIntygParameter;
import se.inera.intyg.webcert.web.web.controller.api.dto.QueryIntygResponse;
import se.riv.infrastructure.directory.v1.PersonInformationType;
import javax.ws.rs.core.Response;
import javax.xml.ws.WebServiceException;
import java.time.LocalDateTime;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static javax.ws.rs.core.Response.Status.OK;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.Silent.class)
public class UtkastApiControllerTest {
private static final String PATIENT_EFTERNAMN = "Tolvansson";
private static final String PATIENT_FORNAMN = "Tolvan";
private static final String PATIENT_MELLANNAMN = "Von";
private static final String PATIENT_POSTADRESS = "Testadress";
private static final String PATIENT_POSTNUMMER = "12345";
private static final String PATIENT_POSTORT = "Testort";
private static final Personnummer PATIENT_PERSONNUMMER = createPnr("19121212-1212");
private static final Personnummer PATIENT_PERSONNUMMER_PU_SEKRETESS = createPnr("20121212-1212");
private static final java.lang.String INTYG_TYPE_VERSION = "1.2";
@Mock
private UtkastService utkastService;
@Mock
private WebCertUserService webcertUserService;
@Mock
private PatientDetailsResolver patientDetailsResolver;
@Mock
private IntygModuleRegistry moduleRegistry;
@Mock
private IntygTextsService intygTextsService;
@Mock
private DraftAccessServiceHelper draftAccessServiceHelper;
@Mock
private HsatkEmployeeService hsaEmployeeService;
@Mock
private IntygDraftDecorator intygDraftDecorator;
@Mock
private LogService logService;
@InjectMocks
private UtkastApiController utkastController;
@Before
public void setup() throws ModuleNotFoundException {
when(patientDetailsResolver.getSekretessStatus(eq(PATIENT_PERSONNUMMER))).thenReturn(SekretessStatus.FALSE);
when(patientDetailsResolver.resolvePatient(any(Personnummer.class), anyString(), anyString())).thenReturn(buildPatient());
when(moduleRegistry.getIntygModule(eq(LuseEntryPoint.MODULE_ID)))
.thenReturn(new IntygModule("luse", "", "", "", "", "", "", "", "", false));
when(moduleRegistry.getIntygModule(eq(Fk7263EntryPoint.MODULE_ID)))
.thenReturn(new IntygModule("fk7263", "", "", "", "", "", "", "", "", true));
Map<String, Map<String, PreviousIntyg>> hasPrevious = new HashMap<>();
Map<String, PreviousIntyg> hasPreviousIntyg = new HashMap<>();
hasPreviousIntyg.put("luse", PreviousIntyg.of(true, false, false, "Enhet", "intygsId", null));
hasPrevious.put("intyg", hasPreviousIntyg);
when(utkastService.checkIfPersonHasExistingIntyg(eq(PATIENT_PERSONNUMMER), any(), any())).thenReturn(hasPrevious);
when(intygTextsService.getLatestVersion(any(String.class))).thenReturn(INTYG_TYPE_VERSION);
// Return hsaId as name
when(hsaEmployeeService.getEmployee(anyString(), any())).thenAnswer(invocation -> {
PersonInformationType personInformation = new PersonInformationType();
personInformation.setMiddleAndSurName((String) invocation.getArguments()[0]);
List<PersonInformationType> personInformationTypeList = new ArrayList<>();
personInformationTypeList.add(personInformation);
return personInformationTypeList;
});
Map<Personnummer, PatientDetailsResolverResponse> statusMap = mock(Map.class);
PatientDetailsResolverResponse response = new PatientDetailsResolverResponse();
response.setTestIndicator(false);
response.setDeceased(false);
response.setProtectedPerson(SekretessStatus.FALSE);
when(statusMap.get(any(Personnummer.class))).thenReturn(response);
Mockito.when(patientDetailsResolver.getPersonStatusesForList(any())).thenReturn(statusMap);
}
@Test
public void testCreateUtkastFailsForDeprecated() {
String intygsTyp = "fk7263";
setupUser(AuthoritiesConstants.PRIVILEGE_SKRIVA_INTYG, intygsTyp, AuthoritiesConstants.FEATURE_HANTERA_INTYGSUTKAST);
Response response = utkastController.createUtkast(intygsTyp, buildRequest("fk7263"));
assertEquals(BAD_REQUEST.getStatusCode(), response.getStatus());
}
@Test
public void testCreateUtkast() {
String intygsTyp = "luse";
setupUser(AuthoritiesConstants.PRIVILEGE_SKRIVA_INTYG, intygsTyp, AuthoritiesConstants.FEATURE_HANTERA_INTYGSUTKAST);
when(utkastService.createNewDraft(any(CreateNewDraftRequest.class))).thenReturn(new Utkast());
doReturn(AccessResult.noProblem()).when(draftAccessServiceHelper).evaluateAllowToCreateUtkast(anyString(), any(Personnummer.class));
Response response = utkastController.createUtkast(intygsTyp, buildRequest("luse"));
assertEquals(OK.getStatusCode(), response.getStatus());
}
@Test
public void testCreateUtkastSetsPatientFullName() {
String intygsTyp = "luse";
setupUser(AuthoritiesConstants.PRIVILEGE_SKRIVA_INTYG, intygsTyp, AuthoritiesConstants.FEATURE_HANTERA_INTYGSUTKAST);
doReturn(AccessResult.noProblem()).when(draftAccessServiceHelper).evaluateAllowToCreateUtkast(anyString(), any(Personnummer.class));
when(utkastService.createNewDraft(any(CreateNewDraftRequest.class))).thenReturn(new Utkast());
Response response = utkastController.createUtkast(intygsTyp, buildRequest("luse"));
assertEquals(OK.getStatusCode(), response.getStatus());
ArgumentCaptor<CreateNewDraftRequest> requestCaptor = ArgumentCaptor.forClass(CreateNewDraftRequest.class);
verify(utkastService).createNewDraft(requestCaptor.capture());
assertNotNull(requestCaptor.getValue().getPatient().getFullstandigtNamn());
assertEquals(PATIENT_FORNAMN + " " + PATIENT_MELLANNAMN + " " + PATIENT_EFTERNAMN,
requestCaptor.getValue().getPatient().getFullstandigtNamn());
}
@Test
public void testCreateUtkastSetsPatientFullNameWithoutMiddlename() {
String intygsTyp = "luse";
setupUser(AuthoritiesConstants.PRIVILEGE_SKRIVA_INTYG, intygsTyp, AuthoritiesConstants.FEATURE_HANTERA_INTYGSUTKAST);
when(utkastService.createNewDraft(any(CreateNewDraftRequest.class))).thenReturn(new Utkast());
// Fake PU service being down
when(patientDetailsResolver.resolvePatient(PATIENT_PERSONNUMMER, intygsTyp, INTYG_TYPE_VERSION)).thenReturn(null);
doReturn(AccessResult.noProblem()).when(draftAccessServiceHelper).evaluateAllowToCreateUtkast(anyString(), any(Personnummer.class));
CreateUtkastRequest utkastRequest = buildRequest("luse");
utkastRequest.setPatientMellannamn(null); // no middlename
Response response = utkastController.createUtkast(intygsTyp, utkastRequest);
assertEquals(OK.getStatusCode(), response.getStatus());
ArgumentCaptor<CreateNewDraftRequest> requestCaptor = ArgumentCaptor.forClass(CreateNewDraftRequest.class);
verify(utkastService).createNewDraft(requestCaptor.capture());
assertNotNull(requestCaptor.getValue().getPatient().getFullstandigtNamn());
assertEquals(PATIENT_FORNAMN + " " + PATIENT_EFTERNAMN,
requestCaptor.getValue().getPatient().getFullstandigtNamn());
}
@Test
public void testCreateUtkastFornamnOk() {
String intygsTyp = "luse";
setupUser(AuthoritiesConstants.PRIVILEGE_SKRIVA_INTYG, intygsTyp, AuthoritiesConstants.FEATURE_HANTERA_INTYGSUTKAST);
when(utkastService.createNewDraft(any(CreateNewDraftRequest.class))).thenReturn(new Utkast());
doReturn(AccessResult.noProblem()).when(draftAccessServiceHelper).evaluateAllowToCreateUtkast(anyString(), any(Personnummer.class));
CreateUtkastRequest utkastRequest = buildRequest(intygsTyp);
utkastRequest.setPatientFornamn(Strings.repeat("a", 255));
Response response = utkastController.createUtkast(intygsTyp, utkastRequest);
assertEquals(OK.getStatusCode(), response.getStatus());
}
@Test
public void testCreateUtkastFornamnTooLong() {
String intygsTyp = "luse";
setupUser(AuthoritiesConstants.PRIVILEGE_SKRIVA_INTYG, intygsTyp, AuthoritiesConstants.FEATURE_HANTERA_INTYGSUTKAST);
CreateUtkastRequest utkastRequest = buildRequest(intygsTyp);
utkastRequest.setPatientFornamn(Strings.repeat("a", 256));
Response response = utkastController.createUtkast(intygsTyp, utkastRequest);
assertEquals(BAD_REQUEST.getStatusCode(), response.getStatus());
}
@Test
public void testCreateUtkastEfternamnOk() {
String intygsTyp = "luse";
setupUser(AuthoritiesConstants.PRIVILEGE_SKRIVA_INTYG, intygsTyp, AuthoritiesConstants.FEATURE_HANTERA_INTYGSUTKAST);
when(utkastService.createNewDraft(any(CreateNewDraftRequest.class))).thenReturn(new Utkast());
doReturn(AccessResult.noProblem()).when(draftAccessServiceHelper).evaluateAllowToCreateUtkast(anyString(), any(Personnummer.class));
|
[
" CreateUtkastRequest utkastRequest = buildRequest(intygsTyp);"
] | 675
|
lcc
|
java
| null |
641e1c881986075c2c1d4de3afcfdf28d45ea43407457b7d
|
|
// Copyright (c) 2011 AlphaSierraPapa for the SharpDevelop Team
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this
// software and associated documentation files (the "Software"), to deal in the Software
// without restriction, including without limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
// to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using ICSharpCode.Decompiler.FlowAnalysis;
using ICSharpCode.NRefactory.Utils;
using dnlib.DotNet;
using dnlib.DotNet.Emit;
namespace ICSharpCode.Decompiler.ILAst
{
public enum ILAstOptimizationStep
{
RemoveRedundantCode,
ReduceBranchInstructionSet,
InlineVariables,
CopyPropagation,
YieldReturn,
AsyncAwait,
PropertyAccessInstructions,
SplitToMovableBlocks,
TypeInference,
HandlePointerArithmetic,
SimplifyShortCircuit,
SimplifyTernaryOperator,
SimplifyNullCoalescing,
JoinBasicBlocks,
SimplifyLogicNot,
SimplifyShiftOperators,
TypeConversionSimplifications,
SimplifyLdObjAndStObj,
SimplifyCustomShortCircuit,
SimplifyLiftedOperators,
TransformArrayInitializers,
TransformMultidimensionalArrayInitializers,
TransformObjectInitializers,
MakeAssignmentExpression,
IntroducePostIncrement,
InlineExpressionTreeParameterDeclarations,
InlineVariables2,
FindLoops,
FindConditions,
FlattenNestedMovableBlocks,
RemoveEndFinally,
RemoveRedundantCode2,
GotoRemoval,
DuplicateReturns,
GotoRemoval2,
ReduceIfNesting,
InlineVariables3,
CachedDelegateInitialization,
IntroduceFixedStatements,
RecombineVariables,
TypeInference2,
RemoveRedundantCode3,
None
}
public partial class ILAstOptimizer
{
int nextLabelIndex = 0;
DecompilerContext context;
ICorLibTypes corLib;
ILBlock method;
public void Optimize(DecompilerContext context, ILBlock method, ILAstOptimizationStep abortBeforeStep = ILAstOptimizationStep.None)
{
this.context = context;
this.corLib = context.CurrentMethod.Module.CorLibTypes;
this.method = method;
if (abortBeforeStep == ILAstOptimizationStep.RemoveRedundantCode) return;
RemoveRedundantCode(method);
if (abortBeforeStep == ILAstOptimizationStep.ReduceBranchInstructionSet) return;
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
ReduceBranchInstructionSet(block);
}
// ReduceBranchInstructionSet runs before inlining because the non-aggressive inlining heuristic
// looks at which type of instruction consumes the inlined variable.
if (abortBeforeStep == ILAstOptimizationStep.InlineVariables) return;
// Works better after simple goto removal because of the following debug pattern: stloc X; br Next; Next:; ldloc X
ILInlining inlining1 = new ILInlining(method);
inlining1.InlineAllVariables();
if (abortBeforeStep == ILAstOptimizationStep.CopyPropagation) return;
inlining1.CopyPropagation();
if (abortBeforeStep == ILAstOptimizationStep.YieldReturn) return;
YieldReturnDecompiler.Run(context, method);
AsyncDecompiler.RunStep1(context, method);
if (abortBeforeStep == ILAstOptimizationStep.AsyncAwait) return;
AsyncDecompiler.RunStep2(context, method);
if (abortBeforeStep == ILAstOptimizationStep.PropertyAccessInstructions) return;
IntroducePropertyAccessInstructions(method);
if (abortBeforeStep == ILAstOptimizationStep.SplitToMovableBlocks) return;
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
SplitToBasicBlocks(block);
}
if (abortBeforeStep == ILAstOptimizationStep.TypeInference) return;
// Types are needed for the ternary operator optimization
TypeAnalysis.Run(context, method);
if (abortBeforeStep == ILAstOptimizationStep.HandlePointerArithmetic) return;
HandlePointerArithmetic(method);
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
bool modified;
do {
modified = false;
if (abortBeforeStep == ILAstOptimizationStep.SimplifyShortCircuit) return;
modified |= block.RunOptimization(new SimpleControlFlow(context, method).SimplifyShortCircuit);
if (abortBeforeStep == ILAstOptimizationStep.SimplifyTernaryOperator) return;
modified |= block.RunOptimization(new SimpleControlFlow(context, method).SimplifyTernaryOperator);
if (abortBeforeStep == ILAstOptimizationStep.SimplifyNullCoalescing) return;
modified |= block.RunOptimization(new SimpleControlFlow(context, method).SimplifyNullCoalescing);
if (abortBeforeStep == ILAstOptimizationStep.JoinBasicBlocks) return;
modified |= block.RunOptimization(new SimpleControlFlow(context, method).JoinBasicBlocks);
if (abortBeforeStep == ILAstOptimizationStep.SimplifyLogicNot) return;
modified |= block.RunOptimization(SimplifyLogicNot);
if (abortBeforeStep == ILAstOptimizationStep.SimplifyShiftOperators) return;
modified |= block.RunOptimization(SimplifyShiftOperators);
if (abortBeforeStep == ILAstOptimizationStep.TypeConversionSimplifications) return;
modified |= block.RunOptimization(TypeConversionSimplifications);
if (abortBeforeStep == ILAstOptimizationStep.SimplifyLdObjAndStObj) return;
modified |= block.RunOptimization(SimplifyLdObjAndStObj);
if (abortBeforeStep == ILAstOptimizationStep.SimplifyCustomShortCircuit) return;
modified |= block.RunOptimization(new SimpleControlFlow(context, method).SimplifyCustomShortCircuit);
if (abortBeforeStep == ILAstOptimizationStep.SimplifyLiftedOperators) return;
modified |= block.RunOptimization(SimplifyLiftedOperators);
if (abortBeforeStep == ILAstOptimizationStep.TransformArrayInitializers) return;
modified |= block.RunOptimization(TransformArrayInitializers);
if (abortBeforeStep == ILAstOptimizationStep.TransformMultidimensionalArrayInitializers) return;
modified |= block.RunOptimization(TransformMultidimensionalArrayInitializers);
if (abortBeforeStep == ILAstOptimizationStep.TransformObjectInitializers) return;
modified |= block.RunOptimization(TransformObjectInitializers);
if (abortBeforeStep == ILAstOptimizationStep.MakeAssignmentExpression) return;
if (context.Settings.MakeAssignmentExpressions) {
modified |= block.RunOptimization(MakeAssignmentExpression);
}
modified |= block.RunOptimization(MakeCompoundAssignments);
if (abortBeforeStep == ILAstOptimizationStep.IntroducePostIncrement) return;
if (context.Settings.IntroduceIncrementAndDecrement) {
modified |= block.RunOptimization(IntroducePostIncrement);
}
if (abortBeforeStep == ILAstOptimizationStep.InlineExpressionTreeParameterDeclarations) return;
if (context.Settings.ExpressionTrees) {
modified |= block.RunOptimization(InlineExpressionTreeParameterDeclarations);
}
if (abortBeforeStep == ILAstOptimizationStep.InlineVariables2) return;
modified |= new ILInlining(method).InlineAllInBlock(block);
new ILInlining(method).CopyPropagation();
} while(modified);
}
if (abortBeforeStep == ILAstOptimizationStep.FindLoops) return;
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
new LoopsAndConditions(context).FindLoops(block);
}
if (abortBeforeStep == ILAstOptimizationStep.FindConditions) return;
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
new LoopsAndConditions(context).FindConditions(block);
}
if (abortBeforeStep == ILAstOptimizationStep.FlattenNestedMovableBlocks) return;
FlattenBasicBlocks(method);
if (abortBeforeStep == ILAstOptimizationStep.RemoveEndFinally) return;
RemoveEndFinally(method);
if (abortBeforeStep == ILAstOptimizationStep.RemoveRedundantCode2) return;
RemoveRedundantCode(method);
if (abortBeforeStep == ILAstOptimizationStep.GotoRemoval) return;
new GotoRemoval().RemoveGotos(method);
if (abortBeforeStep == ILAstOptimizationStep.DuplicateReturns) return;
DuplicateReturnStatements(method);
if (abortBeforeStep == ILAstOptimizationStep.GotoRemoval2) return;
new GotoRemoval().RemoveGotos(method);
if (abortBeforeStep == ILAstOptimizationStep.ReduceIfNesting) return;
ReduceIfNesting(method);
if (abortBeforeStep == ILAstOptimizationStep.InlineVariables3) return;
// The 2nd inlining pass is necessary because DuplicateReturns and the introduction of ternary operators
// open up additional inlining possibilities.
new ILInlining(method).InlineAllVariables();
if (abortBeforeStep == ILAstOptimizationStep.CachedDelegateInitialization) return;
if (context.Settings.AnonymousMethods) {
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
for (int i = 0; i < block.Body.Count; i++) {
// TODO: Move before loops
CachedDelegateInitializationWithField(block, ref i);
CachedDelegateInitializationWithLocal(block, ref i);
}
}
}
if (abortBeforeStep == ILAstOptimizationStep.IntroduceFixedStatements) return;
// we need post-order traversal, not pre-order, for "fixed" to work correctly
foreach (ILBlock block in TreeTraversal.PostOrder<ILNode>(method, n => n.GetChildren()).OfType<ILBlock>()) {
for (int i = block.Body.Count - 1; i >= 0; i--) {
// TODO: Move before loops
if (i < block.Body.Count)
IntroduceFixedStatements(block, block.Body, i);
}
}
if (abortBeforeStep == ILAstOptimizationStep.RecombineVariables) return;
RecombineVariables(method);
if (abortBeforeStep == ILAstOptimizationStep.TypeInference2) return;
TypeAnalysis.Reset(method);
TypeAnalysis.Run(context, method);
if (abortBeforeStep == ILAstOptimizationStep.RemoveRedundantCode3) return;
GotoRemoval.RemoveRedundantCode(method);
// ReportUnassignedILRanges(method);
}
/// <summary>
/// Removes redundatant Br, Nop, Dup, Pop
/// Ignore arguments of 'leave'
/// </summary>
/// <param name="method"></param>
internal static void RemoveRedundantCode(ILBlock method)
{
Dictionary<ILLabel, int> labelRefCount = new Dictionary<ILLabel, int>();
foreach (ILLabel target in method.GetSelfAndChildrenRecursive<ILExpression>(e => e.IsBranch()).SelectMany(e => e.GetBranchTargets())) {
labelRefCount[target] = labelRefCount.GetOrDefault(target) + 1;
}
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
List<ILNode> body = block.Body;
List<ILNode> newBody = new List<ILNode>(body.Count);
for (int i = 0; i < body.Count; i++) {
ILLabel target;
ILExpression popExpr;
if (body[i].Match(ILCode.Br, out target) && i+1 < body.Count && body[i+1] == target) {
ILNode prev = newBody.Count > 0 ? newBody[newBody.Count - 1] : null;
ILNode label = null;
ILNode br = body[i];
// Ignore the branch
if (labelRefCount[target] == 1) {
label = body[i + 1];
i++; // Ignore the label as well
}
ILNode next = i + 1 < body.Count ? body[i + 1] : null;
Utils.AddILRangesTryPreviousFirst(br, prev, next, block);
if (label != null)
Utils.AddILRangesTryPreviousFirst(label, prev, next, block);
} else if (body[i].Match(ILCode.Nop)){
// Ignore nop
Utils.NopMergeILRanges(block, newBody, i);
} else if (body[i].Match(ILCode.Pop, out popExpr)) {
ILVariable v;
if (!popExpr.Match(ILCode.Ldloc, out v))
throw new Exception("Pop should have just ldloc at this stage");
// Best effort to move the ILRange to previous statement
ILVariable prevVar;
ILExpression prevExpr;
if (i - 1 >= 0 && body[i - 1].Match(ILCode.Stloc, out prevVar, out prevExpr) && prevVar == v)
prevExpr.ILRanges.AddRange(((ILExpression)body[i]).ILRanges);
else
Utils.AddILRangesTryPreviousFirst(newBody, body, i, block);
// Ignore pop
} else {
ILLabel label = body[i] as ILLabel;
if (label != null) {
if (labelRefCount.GetOrDefault(label) > 0)
newBody.Add(label);
else
Utils.LabelMergeILRanges(block, newBody, i);
} else {
newBody.Add(body[i]);
}
}
}
block.Body = newBody;
}
// Ignore arguments of 'leave'
foreach (ILExpression expr in method.GetSelfAndChildrenRecursive<ILExpression>(e => e.Code == ILCode.Leave)) {
if (expr.Arguments.Any(arg => !arg.Match(ILCode.Ldloc)))
throw new Exception("Leave should have just ldloc at this stage");
foreach (var arg in expr.Arguments)
expr.ILRanges.AddRange(arg.GetSelfAndChildrenRecursiveILRanges());
expr.Arguments.Clear();
}
// 'dup' removal
foreach (ILExpression expr in method.GetSelfAndChildrenRecursive<ILExpression>()) {
for (int i = 0; i < expr.Arguments.Count; i++) {
ILExpression child;
if (expr.Arguments[i].Match(ILCode.Dup, out child)) {
child.ILRanges.AddRange(expr.Arguments[i].AllILRanges);
expr.Arguments[i] = child;
}
}
}
}
/// <summary>
/// Reduces the branch codes to just br and brtrue.
/// Moves ILRanges to the branch argument
/// </summary>
void ReduceBranchInstructionSet(ILBlock block)
{
for (int i = 0; i < block.Body.Count; i++) {
ILExpression expr = block.Body[i] as ILExpression;
if (expr != null && expr.Prefixes == null) {
ILCode op;
switch(expr.Code) {
case ILCode.Switch:
case ILCode.Brtrue:
expr.Arguments.Single().ILRanges.AddRange(expr.ILRanges);
expr.ILRanges.Clear();
continue;
case ILCode.Brfalse: op = ILCode.LogicNot; break;
case ILCode.Beq: op = ILCode.Ceq; break;
case ILCode.Bne_Un: op = ILCode.Cne; break;
case ILCode.Bgt: op = ILCode.Cgt; break;
case ILCode.Bgt_Un: op = ILCode.Cgt_Un; break;
case ILCode.Ble: op = ILCode.Cle; break;
case ILCode.Ble_Un: op = ILCode.Cle_Un; break;
case ILCode.Blt: op = ILCode.Clt; break;
case ILCode.Blt_Un: op = ILCode.Clt_Un; break;
case ILCode.Bge: op = ILCode.Cge; break;
case ILCode.Bge_Un: op = ILCode.Cge_Un; break;
default:
continue;
}
var newExpr = new ILExpression(op, null, expr.Arguments);
block.Body[i] = new ILExpression(ILCode.Brtrue, expr.Operand, newExpr);
newExpr.ILRanges.AddRange(expr.ILRanges);
}
}
}
/// <summary>
/// Converts call and callvirt instructions that read/write properties into CallGetter/CallSetter instructions.
///
/// CallGetter/CallSetter is used to allow the ILAst to represent "while ((SomeProperty = value) != null)".
///
/// Also simplifies 'newobj(SomeDelegate, target, ldvirtftn(F, target))' to 'newobj(SomeDelegate, target, ldvirtftn(F))'
/// </summary>
void IntroducePropertyAccessInstructions(ILNode node)
{
ILExpression parentExpr = node as ILExpression;
if (parentExpr != null) {
for (int i = 0; i < parentExpr.Arguments.Count; i++) {
ILExpression expr = parentExpr.Arguments[i];
IntroducePropertyAccessInstructions(expr);
IntroducePropertyAccessInstructions(expr, parentExpr, i);
}
} else {
foreach (ILNode child in node.GetChildren()) {
IntroducePropertyAccessInstructions(child);
ILExpression expr = child as ILExpression;
if (expr != null) {
IntroducePropertyAccessInstructions(expr, null, -1);
}
}
}
}
void IntroducePropertyAccessInstructions(ILExpression expr, ILExpression parentExpr, int posInParent)
{
if (expr.Code == ILCode.Call || expr.Code == ILCode.Callvirt) {
IMethod cecilMethod = (IMethod)expr.Operand;
var declType = cecilMethod.DeclaringType as dnlib.DotNet.TypeSpec;
var declArrayType = declType == null ? null : declType.TypeSig.RemovePinnedAndModifiers() as ArraySigBase;
if (declArrayType != null) {
switch (cecilMethod.Name) {
case "Get":
expr.Code = ILCode.CallGetter;
break;
case "Set":
expr.Code = ILCode.CallSetter;
break;
case "Address":
ByRefSig brt = cecilMethod.MethodSig.GetRetType() as ByRefSig;
if (brt != null) {
IMethod getMethod = new MemberRefUser(cecilMethod.Module, "Get", cecilMethod.MethodSig == null ? null : cecilMethod.MethodSig.Clone(), declArrayType.ToTypeDefOrRef());
if (getMethod.MethodSig != null)
getMethod.MethodSig.RetType = declArrayType.Next;
expr.Operand = getMethod;
}
expr.Code = ILCode.CallGetter;
if (parentExpr != null) {
parentExpr.Arguments[posInParent] = new ILExpression(ILCode.AddressOf, null, expr);
}
break;
}
} else {
MethodDef cecilMethodDef = cecilMethod.Resolve();
if (cecilMethodDef != null) {
if (cecilMethodDef.IsGetter)
expr.Code = (expr.Code == ILCode.Call) ? ILCode.CallGetter : ILCode.CallvirtGetter;
else if (cecilMethodDef.IsSetter)
expr.Code = (expr.Code == ILCode.Call) ? ILCode.CallSetter : ILCode.CallvirtSetter;
}
}
} else if (expr.Code == ILCode.Newobj && expr.Arguments.Count == 2) {
// Might be 'newobj(SomeDelegate, target, ldvirtftn(F, target))'.
ILVariable target;
if (expr.Arguments[0].Match(ILCode.Ldloc, out target)
&& expr.Arguments[1].Code == ILCode.Ldvirtftn
&& expr.Arguments[1].Arguments.Count == 1
&& expr.Arguments[1].Arguments[0].MatchLdloc(target))
{
// Remove the 'target' argument from the ldvirtftn instruction.
// It's not needed in the translation to C#, and needs to be eliminated so that the target expression
// can be inlined.
expr.Arguments[1].ILRanges.AddRange(expr.Arguments[1].Arguments[0].GetSelfAndChildrenRecursiveILRanges());
expr.Arguments[1].Arguments.Clear();
}
}
}
/// <summary>
/// Group input into a set of blocks that can be later arbitraliby schufled.
/// The method adds necessary branches to make control flow between blocks
/// explicit and thus order independent.
/// </summary>
void SplitToBasicBlocks(ILBlock block)
{
List<ILNode> basicBlocks = new List<ILNode>();
ILLabel entryLabel = block.Body.FirstOrDefault() as ILLabel ?? new ILLabel() { Name = "Block_" + (nextLabelIndex++) };
ILBasicBlock basicBlock = new ILBasicBlock();
basicBlocks.Add(basicBlock);
basicBlock.Body.Add(entryLabel);
block.EntryGoto = new ILExpression(ILCode.Br, entryLabel);
if (block.Body.Count > 0) {
if (block.Body[0] != entryLabel)
basicBlock.Body.Add(block.Body[0]);
for (int i = 1; i < block.Body.Count; i++) {
ILNode lastNode = block.Body[i - 1];
ILNode currNode = block.Body[i];
// Start a new basic block if necessary
if (currNode is ILLabel ||
currNode is ILTryCatchBlock || // Counts as label
lastNode.IsConditionalControlFlow() ||
lastNode.IsUnconditionalControlFlow())
{
// Try to reuse the label
ILLabel label = currNode as ILLabel ?? new ILLabel() { Name = "Block_" + (nextLabelIndex++).ToString() };
// Terminate the last block
if (!lastNode.IsUnconditionalControlFlow()) {
// Explicit branch from one block to other
basicBlock.Body.Add(new ILExpression(ILCode.Br, label));
}
// Start the new block
basicBlock = new ILBasicBlock();
basicBlocks.Add(basicBlock);
basicBlock.Body.Add(label);
// Add the node to the basic block
if (currNode != label)
basicBlock.Body.Add(currNode);
} else {
basicBlock.Body.Add(currNode);
}
}
}
block.Body = basicBlocks;
return;
}
void DuplicateReturnStatements(ILBlock method)
{
Dictionary<ILLabel, ILNode> nextSibling = new Dictionary<ILLabel, ILNode>();
// Build navigation data
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
for (int i = 0; i < block.Body.Count - 1; i++) {
ILLabel curr = block.Body[i] as ILLabel;
if (curr != null) {
nextSibling[curr] = block.Body[i + 1];
}
}
}
// Duplicate returns
foreach(ILBlock block in method.GetSelfAndChildrenRecursive<ILBlock>()) {
for (int i = 0; i < block.Body.Count; i++) {
ILLabel targetLabel;
if (block.Body[i].Match(ILCode.Br, out targetLabel) || block.Body[i].Match(ILCode.Leave, out targetLabel)) {
// Skip extra labels
while(nextSibling.ContainsKey(targetLabel) && nextSibling[targetLabel] is ILLabel) {
targetLabel = (ILLabel)nextSibling[targetLabel];
}
// Inline return statement
ILNode target;
List<ILExpression> retArgs;
if (nextSibling.TryGetValue(targetLabel, out target)) {
if (target.Match(ILCode.Ret, out retArgs)) {
ILVariable locVar;
object constValue;
if (retArgs.Count == 0) {
block.Body[i] = new ILExpression(ILCode.Ret, null).WithILRanges(block.Body[i].GetSelfAndChildrenRecursiveILRanges());
} else if (retArgs.Single().Match(ILCode.Ldloc, out locVar)) {
block.Body[i] = new ILExpression(ILCode.Ret, null, new ILExpression(ILCode.Ldloc, locVar)).WithILRanges(block.Body[i].GetSelfAndChildrenRecursiveILRanges());
} else if (retArgs.Single().Match(ILCode.Ldc_I4, out constValue)) {
block.Body[i] = new ILExpression(ILCode.Ret, null, new ILExpression(ILCode.Ldc_I4, constValue)).WithILRanges(block.Body[i].GetSelfAndChildrenRecursiveILRanges());
}
}
} else {
if (method.Body.Count > 0 && method.Body.Last() == targetLabel) {
// It exits the main method - so it is same as return;
block.Body[i] = new ILExpression(ILCode.Ret, null).WithILRanges(block.Body[i].GetSelfAndChildrenRecursiveILRanges());
}
}
}
}
}
}
/// <summary>
/// Flattens all nested basic blocks, except the the top level 'node' argument
/// </summary>
void FlattenBasicBlocks(ILNode node)
{
ILBlock block = node as ILBlock;
if (block != null) {
ILBasicBlock prevChildAsBB = null;
List<ILNode> flatBody = new List<ILNode>();
|
[
"\t\t\t\tforeach (ILNode child in block.GetChildren()) {"
] | 2,070
|
lcc
|
csharp
| null |
d2cc14959cc14d9e5f5c314ff857cbc70f1ff16fa1441fa2
|
|
using UnityEngine;
using System;
using System.Collections;
using System.Collections.Generic;
using Frontiers;
using Frontiers.World.Gameplay;
using TNet;
using Frontiers.World;
namespace Frontiers.World
{
public class WorldBody : TNBehaviour
{
//used as a base for CharacterBody and CreatureBody
//central hub for animation components, sound components etc.
//this is also where the bulk of networking is done for creatures / characters
//also handles the really messy business of converting a body to ragdoll and back
public TNObject NObject;
public IBodyOwner Owner;
public bool DisplayMode = false;
public Transform RotationPivot;
public Transform MovementPivot;
public Rigidbody rb;
public bool IsRagdoll;
public bool HasSpawned = false;
public int OverrideMovementMode;
public float VerticalAxisVelocityMultiplier = 0.5f;
public float JumpForceMultiplier = 5f;
#if UNITY_EDITOR
public bool DebugMovement = false;
#endif
public bool IsInitialized {
get {
return mInitialized;
}
}
[NObjectSync]
public Vector3 SmoothPosition {
get {
if (IsRagdoll && RootBodyPart != null) {
return RootBodyPart.tr.position;
}
return mSmoothPosition;
}
set {
mSmoothPosition = value;
}
}
[NObjectSync]
public Quaternion SmoothRotation {
get {
return mSmoothRotation;
}
set {
mSmoothRotation = value;
}
}
public Vector3 Velocity {
get {
return mVelocity;
}
}
public Vector3 LookDirection {
get {
return mLookDir;
}
}
public BodyAnimator Animator = null;
public BodyTransforms Transforms = null;
public BodySounds Sounds = null;
public BodyPart RootBodyPart = null;
public BodyPart BaseBodyPart = null;
public float FootstepDistance = 0.15f;
public System.Collections.Generic.List <BodyPart> BodyParts = new System.Collections.Generic.List <BodyPart>();
public System.Collections.Generic.List <WearablePart> WearableParts = new System.Collections.Generic.List <WearablePart>();
public System.Collections.Generic.List <EquippablePart> EquippableParts = new System.Collections.Generic.List <EquippablePart>();
public System.Collections.Generic.List <Renderer> Renderers = new System.Collections.Generic.List <Renderer>();
public string TransformPrefix = "Base_Human";
public Material MainMaterial {
get {
return mMainMaterial;
}
set {
try {
mMainMaterial = value;
if (mBloodSplatterMaterial == null) {
//make a copy of the local blood splatter material
if (BloodSplatterMaterial == null) {
BloodSplatterMaterial = Mats.Get.BloodSplatterMaterial;
}
mBloodSplatterMaterial = new Material(BloodSplatterMaterial);
mBloodSplatterMaterial.SetFloat("_Cutoff", 1f);
}
for (int i = 0; i < Renderers.Count; i++) {
Renderer r = Renderers[i];
if (r.CompareTag("BodyGeneral")) {
Material[] currentSharedMaterials = r.sharedMaterials;
if (currentSharedMaterials.Length > 1) {
//we'll have to check for blood splatter mats
System.Collections.Generic.List <Material> newSharedMaterials = new System.Collections.Generic.List <Material>(currentSharedMaterials);
bool foundBloodMat = false;
for (int j = 0; j < newSharedMaterials.Count; j++) {
if (newSharedMaterials[j].name.Contains("Blood")) {
newSharedMaterials[j] = mBloodSplatterMaterial;
foundBloodMat = true;
} else if (newSharedMaterials[j].name.Contains("Body")) {
newSharedMaterials[j] = mMainMaterial;
}
}
if (!foundBloodMat) {
newSharedMaterials.Add(mBloodSplatterMaterial);
}
r.sharedMaterials = newSharedMaterials.ToArray();
newSharedMaterials.Clear();
} else if (r.sharedMaterial != null && r.sharedMaterial.name.Contains("Body")) {
Material[] newSharedMaterials = new Material [2];
newSharedMaterials[0] = mMainMaterial;
newSharedMaterials[1] = mBloodSplatterMaterial;
r.sharedMaterials = newSharedMaterials;
}
}
}
} catch (Exception e) {
Debug.Log(e);
}
}
}
public Material BloodSplatterMaterial;
public System.Collections.Generic.List <Renderer> EyeRenderers = new System.Collections.Generic.List <Renderer>();
public Material EyeMaterial;
public Color EyeColor;
public float EyeBrightness;
public Color ScaredEyeColor;
public Color TimidEyeColor;
public Color AggressiveEyeColor;
public Color HostileEyeColor;
public Color TargetEyeColor;
public float TargetEyeBrightness;
public bool IsVisible = false;
public BodyEyeMode EyeMode {
get {
return mEyeMode;
}
set {
if (mEyeMode != value) {
mEyeMode = value;
switch (EyeMode) {
case BodyEyeMode.Scared:
TargetEyeColor = ScaredEyeColor;
break;
case BodyEyeMode.Timid:
default:
TargetEyeColor = TimidEyeColor;
break;
case BodyEyeMode.Aggressive:
TargetEyeColor = AggressiveEyeColor;
break;
case BodyEyeMode.Hostile:
TargetEyeColor = HostileEyeColor;
break;
case BodyEyeMode.Dead:
TargetEyeBrightness = 0f;
TargetEyeColor = Color.black;
EyeColor = TargetEyeColor;
EyeBrightness = TargetEyeBrightness;
RefreshEyes();
break;
}
}
}
}
protected BodyEyeMode mEyeMode = BodyEyeMode.Timid;
public bool HasOwner {
get {
return Owner != null;
}
}
public virtual void Initialize(IItemOfInterest bodyPartOwner)
{
if (mInitialized) {
return;
}
gameObject.tag = "BodyGeneral";
//if we haven't created our main texture set it now
if (mMainMaterial == null) {
//TEMP
//TODO figure this out another way
try {
MainMaterial = Renderers[0].material;
} catch (Exception e) {
//Debug.LogError (e);
}
}
for (int i = 0; i < BodyParts.Count; i++) {
//if this is set to null the body part will set its tag so that it won't be recognized
BodyParts[i].Initialize(bodyPartOwner, BodyParts);
}
for (int i = 0; i < BodyParts.Count; i++) {
for (int j = 0; j < BodyParts.Count; j++) {
if (i != j) {
#if UNITY_EDITOR
if (BodyParts [i] == BodyParts [j]) {
Debug.Log ("Body part was the same in world body " + name);
}
#endif
Physics.IgnoreCollision(BodyParts[i].PartCollider, BodyParts[j].PartCollider);
}
}
}
if (EyeRenderers.Count > 0) {
EyeMaterial = EyeRenderers[0].material;
for (int i = 0; i < EyeRenderers.Count; i++) {
EyeRenderers[i].sharedMaterial = EyeMaterial;
}
}
RefreshShadowCasters();
mInitialized = true;
}
public virtual void SetBloodColor(Color bloodColor)
{
if (mBloodSplatterMaterial == null) {
mBloodSplatterMaterial = new Material(BloodSplatterMaterial);
}
mBloodSplatterMaterial.color = bloodColor;
}
public virtual void SetBloodOpacity(float bloodOpacity)
{
if (mBloodSplatterMaterial == null) {
mBloodSplatterMaterial = new Material(BloodSplatterMaterial);
}
mBloodSplatterMaterial.SetFloat("_Cutoff", Mathf.Max(1.0f - bloodOpacity, 0.025f));
}
public void IgnoreCollisions(bool ignore)
{
if (IsRagdoll) {
for (int i = 0; i < BodyParts.Count; i++) {
BodyParts[i].RagdollRB.isKinematic = ignore;
BodyParts[i].RagdollRB.detectCollisions = !ignore;
}
} else {
rb.detectCollisions = !ignore;
}
}
public void SetVisible(bool visible)
{
if (mDestroyed) {
return;
}
try {
for (int i = 0; i < Renderers.Count; i++) {
if (Renderers[i].CompareTag("BodyGeneral") || Renderers [i].CompareTag ("NonInteractive")) {
Renderers[i].enabled = visible;
} else {
Renderers[i].enabled = false;
}
}
} catch (Exception e) {
Debug.LogError("Warning: Renderer null in " + name + ", disabling");
IsVisible = false;
enabled = false;
}
//Animator.animator.enabled = !visible;
//IsVisible = visible;
}
public bool LockVisible = false;
public virtual void OnSpawn(IBodyOwner owner)
{
if (RootBodyPart == null || BaseBodyPart == null) {
for (int i = 0; i < BodyParts.Count; i++) {
if (BodyParts[i].Type == BodyPartType.Hip) {
RootBodyPart = BodyParts[i];
} else if (BodyParts[i].Type == BodyPartType.Base) {
BaseBodyPart = BodyParts[i];
}
if (RootBodyPart != null && BaseBodyPart != null) {
break;
}
}
}
Owner = owner;
owner.Body = this;
SetVisible(true);
IgnoreCollisions(false);
Animator.enabled = true;
enabled = true;
rb.MovePosition(Owner.Position);
rb.MoveRotation(Owner.Rotation);
SmoothPosition = Owner.Position;
SmoothRotation = Owner.Rotation;
HasSpawned = true;
}
public virtual void Awake()
{ //we're guaranteed to have this
rb = gameObject.GetOrAdd <Rigidbody>();
rb.interpolation = RigidbodyInterpolation.None;
rb.useGravity = false;
rb.isKinematic = true;
gameObject.layer = Globals.LayerNumBodyPart;
NObject = gameObject.GetComponent <TNObject>();
MovementPivot = transform;
if (RotationPivot == null) {
RotationPivot = MovementPivot;
}
MovementPivot.localRotation = Quaternion.identity;
RotationPivot.localRotation = Quaternion.identity;
// _worldBodyNetworkUpdateTime = NetworkManager.WorldBodyUpdateRate;
// _bodyAnimatorNetworkUpdateTime = NetworkManager.BodyAnimatorUpdateRate;
Animator = gameObject.GetComponent <BodyAnimator>();
Animator.animator = gameObject.GetComponent <Animator>();
if (Animator.animator == null) {
Animator.animator = RotationPivot.gameObject.GetComponent <Animator>();
}
Transforms = gameObject.GetComponent <BodyTransforms>();
Sounds = gameObject.GetComponent <BodySounds>();
if (Sounds != null) {
Sounds.Animator = Animator;
}
SetVisible(false);
IgnoreCollisions(true);
}
public virtual void Update()
{
if (!GameManager.Is(FGameState.InGame) || DisplayMode)
return;
if (!mInitialized || !HasOwner || !Owner.Initialized || Owner.IsImmobilized) {
/*#if UNITY_EDITOR
if (DebugMovement) {
Debug.Log ("WORLD BODY Returning: initialized "
+ mInitialized.ToString ()
+ ", Has Owner: "
+ HasOwner.ToString ()
+ ", Owner initialized: "
+ Owner.Initialized.ToString ()
+ ", Owner immobilized: "
+ Owner.IsImmobilized.ToString ());
}
#endif*/
return;
}
if (Owner.IsDead) {
Animator.Dead = true;
/*#if UNITY_EDITOR
Debug.Log ("Body is dead");
#endif*/
return;
}
if (Owner.IsDestroyed) {
GameObject.Destroy(gameObject);
enabled = false;
return;
}
if (IsRagdoll != Owner.IsRagdoll) {
SetRagdoll(Owner.IsRagdoll, 0.1f);
//wait for this to finish before the next update
return;
}
//if we're the brain then we're the one setting the position
//update the position based on the owner's position
if (NObject.isMine && HasOwner) {
if (Owner.IsRagdoll) {
//don't do anything
//let the owner pick up its position from our position
return;
}
//otherwise update the movement and smooth movement
//TODO reenable
/*
Decrease Timer
_worldBodyNetworkUpdateTime -= Time.deltaTime;
if (_worldBodyNetworkUpdateTime <= 0) {
tno.Send ("OnNetworkWorldBodyUpdate", Target.Others, new WorldBodyUpdate (
SmoothPosition, SmoothRotation));
// Reset to send again
_worldBodyNetworkUpdateTime = NetworkManager.WorldBodyUpdateRate;
}
_bodyAnimatorNetworkUpdateTime -= Time.deltaTime;
if (_bodyAnimatorNetworkUpdateTime <= 0) {
tno.Send ("OnBodyAnimatorUpdate", Target.Others, new BodyAnimatorUpdate (Animator));
_bodyAnimatorNetworkUpdateTime = NetworkManager.BodyAnimatorUpdateRate;
}
*/
SmoothRotation = Owner.Rotation;
if (rb.isKinematic) {
SmoothPosition = Owner.Position;
} else {
SmoothPosition = rb.position;
mVelocity = rb.velocity;
if (mVelocity.magnitude < gMinWorldBodyVelocity) {
rb.velocity = Vector3.zero;
mVelocity = Vector3.zero;
}
mLookDir = mVelocity;
mLookDir.y = 0f;
mLookDir.Normalize();
}
}
if (rb.isKinematic) {
rb.MovePosition(Vector3.Lerp(rb.position, mSmoothPosition, 0.5f));
}
rb.MoveRotation(SmoothRotation);
}
public virtual void FixedUpdate()
{
if (!mInitialized && !HasSpawned) {
if (HasOwner) {
rb.position = Owner.Position;
rb.rotation = Owner.Rotation;
}
return;
}
if (!LockVisible) {
if (Renderers.Count > 0) {
IsVisible = false;
for (int i = 0; i < Renderers.Count; i++) {
if (Renderers[i].isVisible) {
IsVisible = true;
break;
}
}
} else {
IsVisible = true;
}
}
if (rb != null) {
if (Owner == null) {
Debug.Log("Owner null in body " + name + " setting to kinematic");
rb.isKinematic = true;
return;
}
if (Owner.IsDead) {
Animator.Dead = true;
rb.isKinematic = false;
rb.useGravity = rb.detectCollisions && Owner.UseGravity;
rb.constraints = RigidbodyConstraints.None;
rb.drag = 0.25f;
rb.angularDrag = 0.25f;
rb.mass = 1f;
return;
}
if (IsVisible) {
rb.isKinematic = Owner.IsKinematic;
/*#if UNITY_EDITOR
if (DebugMovement) {
Debug.Log ("WORLDBODY: Is visible and setting kinematic " + Owner.IsKinematic.ToString () + " in " + name);
}
#endif*/
} else {
rb.isKinematic = rb.detectCollisions && Owner.UseGravity;
/*#if UNITY_EDITOR
if (DebugMovement) {
Debug.Log ("WORLDBODY: Is NOT visible, detect collisions? " + rb.detectCollisions.ToString () + ", Owner use gravity? " + Owner.UseGravity.ToString () + " in " + name);
}
#endif*/
}
if (rb.isKinematic) {
rb.useGravity = false;
rb.constraints = RigidbodyConstraints.FreezeAll;
} else {
rb.useGravity = rb.detectCollisions && Owner.UseGravity;
rb.constraints = RigidbodyConstraints.FreezeRotation;
rb.drag = Owner.IsGrounded ? 0.95f : 0.25f;
rb.angularDrag = Owner.IsGrounded ? 0.95f : 0.25f;
rb.mass = Owner.IsGrounded ? Globals.WorldBodyMass : 1f;
}
if (IsVisible) {
mDistanceThisFrame = Vector3.Distance(MovementPivot.position, mSmoothPosition);
Animator.YRotation = SmoothRotation.y;
//use the distance this frame to set the movement speed
if (rb.isKinematic) {
Animator.VerticalAxisMovement = (float)Owner.CurrentMovementSpeed;
} else {
float mag = Mathf.Round(mVelocity.magnitude * VerticalAxisVelocityMultiplier);
Animator.VerticalAxisMovement = mag;
}
Animator.HorizontalAxisMovement = (float)Owner.CurrentRotationSpeed;
Animator.ForceWalk = Owner.ForceWalk;
Animator.IdleAnimation = Owner.CurrentIdleAnimation;
RefreshEyes();
}
//do this regardelss of network state
//this will ensure a smooth transition even if the updates don't happen very often
if (!IsRagdoll) {
mDistanceSinceLastFootstep += mDistanceThisFrame;
if (mDistanceSinceLastFootstep > FootstepDistance) {
Sounds.MakeFootStep();
mDistanceSinceLastFootstep = 0f;
}
if (mDistanceSinceLastFootstep > gSnapDistance) {
mSmoothPosition = MovementPivot.position;
}
}
}
}
protected void RefreshEyes()
{
if (EyeMaterial != null) {
EyeColor = Color.Lerp(EyeColor, TargetEyeColor, (float)WorldClock.ARTDeltaTime);
EyeBrightness = Mathf.Lerp(EyeBrightness, TargetEyeBrightness, (float)WorldClock.ARTDeltaTime);
EyeMaterial.SetColor("_RimColor", Colors.Alpha(EyeColor, EyeBrightness));
}
}
public void UpdateForces(Vector3 position, Vector3 forceDirection, Vector3 groundNormal, bool isGrounded, float jumpForce, float targetMovementSpeed)
{ //use the normal of the ground we're on to determine if we need to add upwards force
if (targetMovementSpeed > 0f) {
if (isGrounded) {
float dot = Vector3.Dot(groundNormal, Vector3.up);
if (dot < 0.75f && dot > 0) {
//a dot of 1 would mean the ground is straight up
//a dot of less than 0 is impossible / wrong in this case
//anything less than 0.75 is going to offer substantial resistance
//so add force in the up direction
forceDirection.y = forceDirection.y + (1f - dot);
}
forceDirection = Vector3.Lerp(forceDirection, -groundNormal, 0.25f);
}
forceDirection += Vector3.up * rb.mass * 0.25f;
if (jumpForce > 0f) {
Animator.Jump = true;
//add an impulse force immediately
rb.AddForce(Vector3.up * jumpForce * JumpForceMultiplier, ForceMode.Force);
} else {
Animator.Jump = false;
}
if (forceDirection != Vector3.zero) {
rb.AddForce(forceDirection * targetMovementSpeed);
}
rb.maxAngularVelocity = targetMovementSpeed;
} else {
rb.maxAngularVelocity = 0f;
}
}
#region Network Specific Code
// Internal network timer, decreased and reset based on the update function
internal float _worldBodyNetworkUpdateTime = 1f;
internal float _bodyAnimatorNetworkUpdateTime = 1f;
public class WorldBodyUpdate
{
public Vector3 Position;
public Quaternion Rotation;
public WorldBodyUpdate(Vector3 position, Quaternion rotation)
{
Position = position;
Rotation = rotation;
}
}
public class BodyAnimatorUpdate
{
public int BaseMovementMode;
public int OverrideMovementMode;
public float VerticalAxisMovement;
public float HorizontalAxisMovement;
public bool TakingDamage;
public bool Dead;
public bool Warn;
public bool Attack1;
public bool Attack2;
public bool Grounded;
public bool Jump;
public bool Paused;
public bool Idling;
public BodyAnimatorUpdate(BodyAnimator target)
{
BaseMovementMode = target.BaseMovementMode;
OverrideMovementMode = target.BaseMovementMode;
VerticalAxisMovement = target.VerticalAxisMovement;
HorizontalAxisMovement = target.HorizontalAxisMovement;
TakingDamage = target.TakingDamage;
Dead = target.Dead;
Warn = target.Warn;
Attack1 = target.Attack1;
Attack2 = target.Attack2;
Grounded = target.Grounded;
Jump = target.Jump;
Paused = target.Paused;
Idling = target.Idling;
}
}
[RFC]
public void OnNetworkWorldBodyUpdate(WorldBodyUpdate update)
{
SmoothPosition = update.Position;
SmoothRotation = update.Rotation;
}
[RFC]
public void OnBodyAnimatorUpdate(BodyAnimatorUpdate update)
{
if (Animator == null)
return;
Animator.BaseMovementMode = update.BaseMovementMode;
Animator.VerticalAxisMovement = update.VerticalAxisMovement;
Animator.HorizontalAxisMovement = update.HorizontalAxisMovement;
Animator.TakingDamage = update.TakingDamage;
Animator.Dead = update.Dead;
Animator.Warn = update.Warn;
Animator.Attack1 = update.Attack1;
Animator.Attack2 = update.Attack2;
Animator.Grounded = update.Grounded;
Animator.Jump = update.Jump;
Animator.Paused = update.Paused;
Animator.Idling = update.Idling;
}
#endregion
public bool GetBodyPart(BodyPartType type, out BodyPart part)
{
part = null;
for (int i = 0; i < BodyParts.Count; i++) {
if (BodyParts[i].Type == type) {
part = BodyParts[i];
break;
}
}
return part != null;
}
public void SetRagdoll(bool ragdoll, float delay)
{
//don't do it again if we're already a ragdoll
|
[
"\t if (mConvertingToRagdoll) {"
] | 2,058
|
lcc
|
csharp
| null |
73339a8013cf28c56a4c1187df921540e969f355e17c82b2
|
|
import sys
import os
from gm_base.json_data import *
import gm_base.geometry_files.layers_io as lfc
class LayerType(IntEnum):
"""Layer type"""
stratum = 0
fracture = 1
shadow = 2
class TopologyType(IntEnum):
given = 0
interpolated = 1
class RegionDim(IntEnum):
invalid = -2
none = -1
point = 0
well = 1
fracture = 2
bulk = 3
class TopologyDim(IntEnum):
invalid = -1
node = 0
segment = 1
polygon = 2
class Curve(JsonData):
def __init__(self, config={}):
super().__init__(config)
class SurfaceApproximation(JsonData):
"""
Serialization class for Z_Surface.
"""
def __init__(self, config={}):
self.u_knots = [float]
self.v_knots = [float]
self.u_degree = 2
self.v_degree = 2
self.rational = False
self.poles = [ [ [float] ] ]
self.orig_quad = 4*(2*(float,),)
self.xy_map = [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0]]
self.z_map = [1.0, 0.0]
super().__init__(config)
class Surface(JsonData):
def __init__(self, config={}):
self.grid_file = ""
"""File with approximated points (grid of 3D points). None for plane"""
self.file_skip_lines = 0
"""Number of header lines to skip. """
self.file_delimiter = ' '
""" Delimiter of data fields on a single line."""
self.name = ""
"""Surface name"""
self.approximation = ClassFactory(SurfaceApproximation)
"""Serialization of the Z_Surface."""
self.regularization = 1.0
"""Regularization weight."""
self.approx_error = 0.0
"""L-inf error of aproximation"""
super().__init__(config)
# @staticmethod
# def make_surface():
# surf = Surface()
# surf.approximation = None
# return surf
@property
def quad(self):
return self.approximation.quad
@classmethod
def convert(cls, other):
new_surf = lfc.convert_json_data(sys.modules[__name__], other, cls)
new_surf.approx_error = 0.0
new_surf.regularization = 1.0
new_surf.file_skip_lines = 0
new_surf.file_delimiter = ' '
return new_surf
class Interface(JsonData):
def __init__(self, config={}):
self.surface_id = int
"""Surface index"""
self.transform_z = 2*(float,)
"""Transformation in Z direction (scale and shift)."""
self.elevation = float
""" Representative Z coord of the surface."""
# Grid polygon should be in SurfaceApproximation, however
# what for the case of planar interfaces without surface reference.
#self.grid_polygon = 4*(2*(float,))
"""Vertices of the boundary polygon of the grid."""
super().__init__(config)
def __eq__(self, other):
"""operators for comparation"""
return self.elevation == other.elevation \
and self.transform_z == other.transform_z \
and self.surface_id != other.surface_id
class Segment(JsonData):
"""Line object"""
def __init__(self, config={}):
self.node_ids = ( int, int )
"""First point index"""
"""Second point index"""
self.interface_id = None
"""Interface index"""
super().__init__(config)
def __eq__(self, other):
return self.node_ids == other.node.ids \
and self.surface_id == other.surface_id
class Polygon(JsonData):
"""Polygon object"""
def __init__(self, config={}):
self.segment_ids = [ int ]
"""List of segments index of the outer wire."""
self.holes = []
"""List of lists of segments of hole's wires"""
self.free_points = [ int ]
"""List of free points in polygon."""
self.interface_id = None
"""Interface index"""
super().__init__(config)
def __eq__(self, other):
return self.segment_ids == other.segment_ids \
and self.holes == other.holes \
and self.free_points == other.free_points \
and self.surface_id == other.surface_id
class Topology(JsonData):
"""Topological presentation of geometry objects"""
def __init__(self, config={}):
self.segments = [ ClassFactory(Segment) ]
"""List of topology segments (line)"""
self.polygons = [ ClassFactory(Polygon) ]
"""List of topology polygons"""
super().__init__(config)
def __eq__(self, other):
return self.segments == other.segments \
and self.polygons == other.polygons \
class NodeSet(JsonData):
"""Set of point (nodes) with topology"""
def __init__(self, config={}):
self.topology_id = int
"""Topology index"""
self.nodes = [ (float, float) ]
"""list of Nodes"""
self.linked_node_set_id = None
"""node_set_idx of pair interface node set or None"""
self.linked_node_ids = [ ]
"""List of node IDs that match node ids in other nodesets on the same interface. I.e. arbitrary number of nodesets can be linkedIf linked_node_set is not None there is list od pair indexes of nodes or none
if node has not pair"""
super().__init__(config)
def reset(self):
"""Reset node set"""
self.nodes = []
class InterfaceNodeSet(JsonData):
"""Node set in space for transformation(x,y) ->(u,v).
Only for GL"""
_not_serialized_attrs_ = ['interface_type']
def __init__(self, config={}):
self.nodeset_id = int
"""Node set index"""
self.interface_id = int
"""Interface index"""
super().__init__(config)
self.interface_type = TopologyType.given
class InterpolatedNodeSet(JsonData):
"""Two node set with same Topology in space for transformation(x,y) ->(u,v).
If both node sets is same, topology is vertical
Only for GL"""
_not_serialized_attrs_ = ['interface_type']
def __init__(self, config={}):
self.surf_nodesets = ( ClassFactory([InterfaceNodeSet]), ClassFactory([InterfaceNodeSet]) )
"""Top and bottom node set index"""
self.interface_id = int
"""Interface index"""
super().__init__(config)
self.interface_type = TopologyType.interpolated
class Region(JsonData):
"""Description of disjunct geometri area sorte by dimension (dim=1 well, dim=2 fracture, dim=3 bulk). """
def __init__(self, config={}):
self.color = ""
"""8-bite region color"""
self.name = ""
"""region name"""
self.dim = RegionDim.invalid
""" Real dimension of the region. (0,1,2,3)"""
self.topo_dim = TopologyDim.invalid
"""For backward compatibility. Dimension (0,1,2) in Stratum layer: node, segment, polygon"""
self.boundary = False
"""Is boundary region"""
self.not_used = False
"""is used """
self.mesh_step = 0.0
"""mesh step - 0.0 is automatic choice"""
self.brep_shape_ids = [ ]
"""List of shape indexes - in BREP geometry """
super().__init__(config)
def fix_dim(self, extruded):
if self.topo_dim != TopologyDim.invalid:
# old format
if self.dim == RegionDim.invalid:
self.dim = RegionDim(self.topo_dim + extruded)
if self.not_used:
return
assert self.dim.value == self.topo_dim + extruded, "Region {} , dimension mismatch."
assert self.dim != RegionDim.invalid
class GeoLayer(JsonData):
"""Geological layers"""
_not_serialized_attrs_ = ['layer_type']
def __init__(self, config={}):
self.name = ""
"""Layer Name"""
self.top = ClassFactory( [InterfaceNodeSet, InterpolatedNodeSet] )
"""Accoding topology type interface node set or interpolated node set"""
# assign regions to every topology object
self.polygon_region_ids = [ int ]
self.segment_region_ids = [ int ]
self.node_region_ids = [ int ]
super().__init__(config)
self.layer_type = LayerType.shadow
def fix_region_dim(self, regions):
extruded = (self.layer_type == LayerType.stratum)
for reg_list in [self.polygon_region_ids, self.segment_region_ids, self.node_region_ids]:
for reg_idx in reg_list:
if reg_idx>0:
reg = regions[reg_idx]
reg.fix_dim(extruded)
def fix_region_id(self):
for reg_list in [self.polygon_region_ids, self.segment_region_ids, self.node_region_ids]:
for i in range(0, len(reg_list)):
if reg_list[i]>2:
reg_list[i] -= 2
else:
reg_list[i] = 0
class FractureLayer(GeoLayer):
|
[
" _not_serialized_attrs_ = ['layer_type', 'top_type']"
] | 878
|
lcc
|
python
| null |
b1cefd5f19084d4e4a500f51ff0fd6a59f8c2c70e47b868c
|
|
/* Copyright Statement:
*
* This software/firmware and related documentation ("MediaTek Software") are
* protected under relevant copyright laws. The information contained herein is
* confidential and proprietary to MediaTek Inc. and/or its licensors. Without
* the prior written permission of MediaTek inc. and/or its licensors, any
* reproduction, modification, use or disclosure of MediaTek Software, and
* information contained herein, in whole or in part, shall be strictly
* prohibited.
*
* MediaTek Inc. (C) 2010. All rights reserved.
*
* BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
* THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
* RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
* ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
* WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
* RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
* INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
* TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
* RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
* OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
* SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
* RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
* STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
* ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
* RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
* MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
* CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
*
* The following software/firmware and/or related documentation ("MediaTek
* Software") have been modified by MediaTek Inc. All revisions are subject to
* any receiver's applicable license agreements with MediaTek Inc.
*/
package org.bouncycastle.crypto.modes;
import org.bouncycastle.crypto.BlockCipher;
import org.bouncycastle.crypto.CipherParameters;
import org.bouncycastle.crypto.DataLengthException;
import org.bouncycastle.crypto.params.ParametersWithIV;
/**
* implements the GOST 28147 OFB counter mode (GCTR).
*/
public class GOFBBlockCipher
implements BlockCipher
{
private byte[] IV;
private byte[] ofbV;
private byte[] ofbOutV;
private final int blockSize;
private final BlockCipher cipher;
boolean firstStep = true;
int N3;
int N4;
static final int C1 = 16843012; //00000001000000010000000100000100
static final int C2 = 16843009; //00000001000000010000000100000001
/**
* Basic constructor.
*
* @param cipher the block cipher to be used as the basis of the
* counter mode (must have a 64 bit block size).
*/
public GOFBBlockCipher(
BlockCipher cipher)
{
this.cipher = cipher;
this.blockSize = cipher.getBlockSize();
if (blockSize != 8)
{
throw new IllegalArgumentException("GCTR only for 64 bit block ciphers");
}
this.IV = new byte[cipher.getBlockSize()];
this.ofbV = new byte[cipher.getBlockSize()];
this.ofbOutV = new byte[cipher.getBlockSize()];
}
/**
* return the underlying block cipher that we are wrapping.
*
* @return the underlying block cipher that we are wrapping.
*/
public BlockCipher getUnderlyingCipher()
{
return cipher;
}
/**
* Initialise the cipher and, possibly, the initialisation vector (IV).
* If an IV isn't passed as part of the parameter, the IV will be all zeros.
* An IV which is too short is handled in FIPS compliant fashion.
*
* @param encrypting if true the cipher is initialised for
* encryption, if false for decryption.
* @param params the key and other data required by the cipher.
* @exception IllegalArgumentException if the params argument is
* inappropriate.
*/
public void init(
boolean encrypting, //ignored by this CTR mode
CipherParameters params)
throws IllegalArgumentException
{
firstStep = true;
N3 = 0;
N4 = 0;
if (params instanceof ParametersWithIV)
{
ParametersWithIV ivParam = (ParametersWithIV)params;
byte[] iv = ivParam.getIV();
if (iv.length < IV.length)
{
// prepend the supplied IV with zeros (per FIPS PUB 81)
System.arraycopy(iv, 0, IV, IV.length - iv.length, iv.length);
for (int i = 0; i < IV.length - iv.length; i++)
{
IV[i] = 0;
}
}
else
{
System.arraycopy(iv, 0, IV, 0, IV.length);
}
reset();
cipher.init(true, ivParam.getParameters());
}
else
{
reset();
cipher.init(true, params);
}
}
/**
* return the algorithm name and mode.
*
* @return the name of the underlying algorithm followed by "/GCTR"
* and the block size in bits
*/
public String getAlgorithmName()
{
return cipher.getAlgorithmName() + "/GCTR";
}
/**
* return the block size we are operating at (in bytes).
*
* @return the block size we are operating at (in bytes).
*/
public int getBlockSize()
{
return blockSize;
}
/**
* Process one block of input from the array in and write it to
* the out array.
*
* @param in the array containing the input data.
* @param inOff offset into the in array the data starts at.
* @param out the array the output data will be copied into.
* @param outOff the offset into the out array the output will start at.
* @exception DataLengthException if there isn't enough data in in, or
* space in out.
* @exception IllegalStateException if the cipher isn't initialised.
* @return the number of bytes processed and produced.
*/
public int processBlock(
byte[] in,
int inOff,
byte[] out,
int outOff)
throws DataLengthException, IllegalStateException
{
if ((inOff + blockSize) > in.length)
{
throw new DataLengthException("input buffer too short");
}
if ((outOff + blockSize) > out.length)
{
throw new DataLengthException("output buffer too short");
}
if (firstStep)
{
firstStep = false;
cipher.processBlock(ofbV, 0, ofbOutV, 0);
N3 = bytesToint(ofbOutV, 0);
N4 = bytesToint(ofbOutV, 4);
}
N3 += C2;
N4 += C1;
intTobytes(N3, ofbV, 0);
intTobytes(N4, ofbV, 4);
cipher.processBlock(ofbV, 0, ofbOutV, 0);
//
// XOR the ofbV with the plaintext producing the cipher text (and
// the next input block).
//
for (int i = 0; i < blockSize; i++)
{
out[outOff + i] = (byte)(ofbOutV[i] ^ in[inOff + i]);
}
//
// change over the input block.
//
System.arraycopy(ofbV, blockSize, ofbV, 0, ofbV.length - blockSize);
System.arraycopy(ofbOutV, 0, ofbV, ofbV.length - blockSize, blockSize);
return blockSize;
}
/**
* reset the feedback vector back to the IV and reset the underlying
* cipher.
*/
public void reset()
{
System.arraycopy(IV, 0, ofbV, 0, IV.length);
cipher.reset();
}
//array of bytes to type int
private int bytesToint(
byte[] in,
int inOff)
{
return ((in[inOff + 3] << 24) & 0xff000000) + ((in[inOff + 2] << 16) & 0xff0000) +
((in[inOff + 1] << 8) & 0xff00) + (in[inOff] & 0xff);
}
//int to array of bytes
private void intTobytes(
int num,
byte[] out,
int outOff)
{
|
[
" out[outOff + 3] = (byte)(num >>> 24);"
] | 1,083
|
lcc
|
java
| null |
fe08dcb281de4c3a0e2cca60ae83b079f30ce3924ae5131b
|
|
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.graphmatching;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.neo4j.graphdb.Node;
import org.neo4j.graphmatching.filter.AbstractFilterExpression;
import org.neo4j.graphmatching.filter.FilterBinaryNode;
import org.neo4j.graphmatching.filter.FilterExpression;
import org.neo4j.graphmatching.filter.FilterValueGetter;
import org.neo4j.helpers.Predicate;
import org.neo4j.helpers.collection.FilteringIterable;
/**
* The PatternMatcher is the engine that performs the matching of a graph
* pattern with the actual graph.
*/
@Deprecated
public class PatternMatcher
{
private static PatternMatcher matcher = new PatternMatcher();
private PatternMatcher()
{
}
/**
* Get the sole instance of the {@link PatternMatcher}.
*
* @return the instance of {@link PatternMatcher}.
*/
public static PatternMatcher getMatcher()
{
return matcher;
}
/**
* Find occurrences of the pattern defined by the given {@link PatternNode}
* where the given {@link PatternNode} starts matching at the given
* {@link Node}.
*
* @param start the {@link PatternNode} to start matching at.
* @param startNode the {@link Node} to start matching at.
* @return all matching instances of the pattern.
*/
public Iterable<PatternMatch> match( PatternNode start,
Node startNode )
{
return match( start, startNode, null );
}
/**
* Find occurrences of the pattern defined by the given {@link PatternNode}
* where the given {@link PatternNode} starts matching at the given
* {@link Node}.
*
* @param start the {@link PatternNode} to start matching at.
* @param startNode the {@link Node} to start matching at.
* @param objectVariables mapping from names to {@link PatternNode}s.
* @return all matching instances of the pattern.
*/
public Iterable<PatternMatch> match( PatternNode start,
Node startNode, Map<String, PatternNode> objectVariables )
{
return match( start, startNode, objectVariables,
( Collection<PatternNode> ) null );
}
/**
* Find occurrences of the pattern defined by the given {@link PatternNode}
* where the given {@link PatternNode} starts matching at the given
* {@link Node}.
*
* @param start the {@link PatternNode} to start matching at.
* @param objectVariables mapping from names to {@link PatternNode}s.
* @param optional nodes that form sub-patterns connected to this pattern.
* @return all matching instances of the pattern.
*/
public Iterable<PatternMatch> match( PatternNode start,
Map<String, PatternNode> objectVariables,
PatternNode... optional )
{
return match( start, objectVariables,
Arrays.asList( optional ) );
}
/**
* Find occurrences of the pattern defined by the given {@link PatternNode}
* where the given {@link PatternNode} starts matching at the given
* {@link Node}.
*
* @param start the {@link PatternNode} to start matching at.
* @param objectVariables mapping from names to {@link PatternNode}s.
* @param optional nodes that form sub-patterns connected to this pattern.
* @return all matching instances of the pattern.
*/
public Iterable<PatternMatch> match( PatternNode start,
Map<String, PatternNode> objectVariables,
Collection<PatternNode> optional )
{
Node startNode = start.getAssociation();
if ( startNode == null )
{
throw new IllegalStateException(
"Associating node for start pattern node is null" );
}
return match( start, startNode, objectVariables, optional );
}
/**
* Find occurrences of the pattern defined by the given {@link PatternNode}
* where the given {@link PatternNode} starts matching at the given
* {@link Node}.
*
* @param start the {@link PatternNode} to start matching at.
* @param startNode the {@link Node} to start matching at.
* @param objectVariables mapping from names to {@link PatternNode}s.
* @param optional nodes that form sub-patterns connected to this pattern.
* @return all matching instances of the pattern.
*/
public Iterable<PatternMatch> match( PatternNode start,
Node startNode, Map<String, PatternNode> objectVariables,
Collection<PatternNode> optional )
{
Node currentStartNode = start.getAssociation();
if ( currentStartNode != null && !currentStartNode.equals( startNode ) )
{
throw new IllegalStateException(
"Start patter node already has associated " +
currentStartNode + ", can not start with " + startNode );
}
Iterable<PatternMatch> result = null;
if ( optional == null || optional.size() < 1 )
{
result = new PatternFinder( this, start, startNode );
}
else
{
result = new PatternFinder( this, start, startNode, false,
optional );
}
if ( objectVariables != null )
{
// Uses the FILTER expressions
result = new FilteredPatternFinder( result, objectVariables );
}
return result;
}
/**
* Find occurrences of the pattern defined by the given {@link PatternNode}
* where the given {@link PatternNode} starts matching at the given
* {@link Node}.
*
* @param start the {@link PatternNode} to start matching at.
* @param startNode the {@link Node} to start matching at.
* @param objectVariables mapping from names to {@link PatternNode}s.
* @param optional nodes that form sub-patterns connected to this pattern.
* @return all matching instances of the pattern.
*/
public Iterable<PatternMatch> match( PatternNode start,
Node startNode, Map<String, PatternNode> objectVariables,
PatternNode... optional )
{
return match( start, startNode, objectVariables,
Arrays.asList( optional ) );
}
private static class SimpleRegexValueGetter implements FilterValueGetter
{
private PatternMatch match;
private Map<String, PatternNode> labelToNode =
new HashMap<String, PatternNode>();
private Map<String, String> labelToProperty =
new HashMap<String, String>();
SimpleRegexValueGetter( Map<String, PatternNode> objectVariables,
PatternMatch match, FilterExpression[] expressions )
{
this.match = match;
for ( FilterExpression expression : expressions )
{
mapFromExpression( expression );
}
this.labelToNode = objectVariables;
}
private void mapFromExpression( FilterExpression expression )
{
if ( expression instanceof FilterBinaryNode )
{
FilterBinaryNode node = ( FilterBinaryNode ) expression;
mapFromExpression( node.getLeftExpression() );
mapFromExpression( node.getRightExpression() );
}
else
{
AbstractFilterExpression pattern =
( AbstractFilterExpression ) expression;
labelToProperty.put( pattern.getLabel(),
pattern.getProperty() );
}
}
public String[] getValues( String label )
{
PatternNode pNode = labelToNode.get( label );
if ( pNode == null )
{
throw new RuntimeException( "No node for label '" + label +
"'" );
}
Node node = this.match.getNodeFor( pNode );
String propertyKey = labelToProperty.get( label );
if ( propertyKey == null )
{
throw new RuntimeException( "No property key for label '" +
label + "'" );
}
Object rawValue = node.getProperty( propertyKey, null );
if ( rawValue == null )
{
return new String[ 0 ];
}
Collection<Object> values =
ArrayPropertyUtil.propertyValueToCollection( rawValue );
String[] result = new String[ values.size() ];
int counter = 0;
for ( Object value : values )
{
result[ counter++ ] = ( String ) value;
}
return result;
}
}
private static class FilteredPatternFinder
extends FilteringIterable<PatternMatch>
{
public FilteredPatternFinder( Iterable<PatternMatch> source,
final Map<String, PatternNode> objectVariables )
{
|
[
" super( source, new Predicate<PatternMatch>()"
] | 1,124
|
lcc
|
java
| null |
e65940b61936a514284b1b107717706bb95a8df5df726f13
|
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Handle legislative parameters in XML format (and convert then to JSON)."""
import collections
import logging
import itertools
import datetime
from openfisca_core import conv
from datetime import datetime as dt
#legislation_json_key_by_xml_tag = dict(
# ASSIETTE = 'base', # "base" is singular, because a slice has only one base.
# BAREME = 'scales',
# CODE = 'parameters',
# NODE = 'nodes',
# SEUIL= 'threshold', # "threshold" is singular, because a slice has only one base.
# TAUX = 'rate', # "rate" is singular, because a slice has only one base.
# TRANCHE = 'slices',
# VALUE = 'values',
# )
log = logging.getLogger(__name__)
json_unit_by_xml_json_type = dict(
age = u'year',
days = u'day',
hours = u'hour',
monetary = u'currency',
months = u'month',
)
N_ = lambda message: message
xml_json_formats = (
'bool',
'float',
'integer',
'percent',
'date',
)
def make_validate_values_xml_json_dates(require_consecutive_dates = False):
def validate_values_xml_json_dates(values_xml_json, state = None):
if not values_xml_json:
return values_xml_json, None
if state is None:
state = conv.default_state
errors = {}
for index, value_xml_json in enumerate(values_xml_json):
if value_xml_json['deb'] > value_xml_json['fin']:
errors[index] = dict(fin = state._(u"Last date must be greater than first date"))
sorted_values_xml_json = sorted(values_xml_json, key = lambda value_xml_json: value_xml_json['deb'],
reverse = True)
next_value_xml_json = sorted_values_xml_json[0]
for index, value_xml_json in enumerate(itertools.islice(sorted_values_xml_json, 1, None)):
next_date_str = (datetime.date(*(int(fragment) for fragment in value_xml_json['fin'].split('-')))
+ datetime.timedelta(days = 1)).isoformat()
if require_consecutive_dates and next_date_str < next_value_xml_json['deb']:
errors.setdefault(index, {})['deb'] = state._(u"Dates of values are not consecutive")
elif next_date_str > next_value_xml_json['deb']:
errors.setdefault(index, {})['deb'] = state._(u"Dates of values overlap")
next_value_xml_json = value_xml_json
return sorted_values_xml_json, errors or None
return validate_values_xml_json_dates
def translate_xml_element_to_json_item(xml_element):
json_element = collections.OrderedDict()
text = xml_element.text
if text is not None:
text = text.strip().strip('#').strip() or None
if text is not None:
json_element['text'] = text
json_element.update(xml_element.attrib)
for xml_child in xml_element:
json_child_key, json_child = translate_xml_element_to_json_item(xml_child)
json_element.setdefault(json_child_key, []).append(json_child)
tail = xml_element.tail
if tail is not None:
tail = tail.strip().strip('#').strip() or None
if tail is not None:
json_element['tail'] = tail
return xml_element.tag, json_element
def transform_node_xml_json_to_json(node_xml_json, root = True):
comments = []
node_json = collections.OrderedDict()
if root:
node_json['@context'] = u'http://openfisca.fr/contexts/legislation.jsonld'
node_json['@type'] = 'Node'
child_json_by_code = {}
for key, value in node_xml_json.iteritems():
if key == 'BAREME':
for child_xml_json in value:
child_code, child_json = transform_scale_xml_json_to_json(child_xml_json)
child_json_by_code[child_code] = child_json
elif key == 'VALBYTRANCHES':
for child_xml_json in value:
child_code, child_json = transform_generation_xml_json_to_json(child_xml_json)
child_json_by_code[child_code] = child_json
elif key == 'CODE':
for child_xml_json in value:
child_code, child_json = transform_parameter_xml_json_to_json(child_xml_json)
child_json_by_code[child_code] = child_json
elif key == 'code':
pass
elif key == 'deb':
node_json['from'] = value
elif key == 'fin':
node_json['to'] = value
elif key == 'NODE':
for child_xml_json in value:
child_code, child_json = transform_node_xml_json_to_json(child_xml_json, root = False)
child_json_by_code[child_code] = child_json
elif key in ('tail', 'text'):
comments.append(value)
else:
node_json[key] = value
node_json['children'] = collections.OrderedDict(sorted(child_json_by_code.iteritems()))
if comments:
node_json['comment'] = u'\n\n'.join(comments)
return node_xml_json['code'], node_json
def transform_parameter_xml_json_to_json(parameter_xml_json):
comments = []
parameter_json = collections.OrderedDict()
parameter_json['@type'] = 'Parameter'
xml_json_value_to_json_transformer = float
for key, value in parameter_xml_json.iteritems():
if key in ('code', 'taille'):
pass
elif key == 'format':
parameter_json[key] = dict(
bool = u'boolean',
percent = u'rate',
date = u'date',
).get(value, value)
if value == 'bool':
xml_json_value_to_json_transformer = lambda xml_json_value: bool(int(xml_json_value))
elif value == 'integer':
xml_json_value_to_json_transformer = int
elif key in ('tail', 'text'):
comments.append(value)
elif key == 'type':
parameter_json['unit'] = json_unit_by_xml_json_type.get(value, value)
elif key == 'VALUE':
if 'format' in parameter_xml_json:
if parameter_xml_json['format'] == 'date':
format = 'date'
elif parameter_xml_json['format'] == 'integer':
format = int
elif parameter_xml_json['format'] == 'percent':
format = float
else:
format = eval(parameter_xml_json['format'])
else:
format = float
parameter_json['values'] = [ transform_value_xml_json_to_json(item, format)
for item in value
]
else:
parameter_json[key] = value
if comments:
parameter_json['comment'] = u'\n\n'.join(comments)
return parameter_xml_json['code'], parameter_json
def transform_scale_xml_json_to_json(scale_xml_json):
comments = []
scale_json = collections.OrderedDict()
scale_json['@type'] = 'Scale'
for key, value in scale_xml_json.iteritems():
if key == 'code':
pass
elif key in ('tail', 'text'):
comments.append(value)
elif key == 'TRANCHE':
scale_json['slices'] = [
transform_slice_xml_json_to_json(item)
for item in value
]
elif key == 'type':
scale_json['unit'] = json_unit_by_xml_json_type.get(value, value)
else:
scale_json[key] = value
if comments:
scale_json['comment'] = u'\n\n'.join(comments)
return scale_xml_json['code'], scale_json
def transform_generation_xml_json_to_json(generation_xml_json):
# Note: update with OF ?
comments = []
generation_json = collections.OrderedDict()
generation_json['@type'] = 'Generation'
for key, value in generation_xml_json.iteritems():
if key == 'code':
pass
elif key in ('tail', 'text'):
comments.append(value)
elif key == 'VARCONTROL':
generation_json['control'] = [
transform_value_xml_json_to_json(item, str)
for item in value[0]['CONTROL']
]
elif key == 'TRANCHE':
generation_json['slices'] = [
transform_slice2_xml_json_to_json(item)
for item in value
]
elif key == 'type':
generation_json['unit'] = json_unit_by_xml_json_type.get(value, value)
else:
generation_json[key] = value
if comments:
generation_json['comment'] = u'\n\n'.join(comments)
return generation_xml_json['code'], generation_json
def transform_slice2_xml_json_to_json(slice_xml_json):
comments = []
slice_json = collections.OrderedDict()
for key, value in slice_xml_json.iteritems():
if key == 'code':
pass
elif key == 'SEUIL':
slice_json['threshold'] = transform_values_holder_xml_json_to_json(value[0], format ='date')
elif key in ('tail', 'text'):
comments.append(value)
elif key == 'VALEUR':
slice_json['valeur'] = transform_values_holder_xml_json_to_json(value[0])
else:
slice_json[key] = value
if comments:
slice_json['comment'] = u'\n\n'.join(comments)
return slice_json
def transform_slice_xml_json_to_json(slice_xml_json):
comments = []
slice_json = collections.OrderedDict()
for key, value in slice_xml_json.iteritems():
if key == 'ASSIETTE':
slice_json['base'] = transform_values_holder_xml_json_to_json(value[0])
elif key == 'code':
pass
elif key == 'SEUIL':
slice_json['threshold'] = transform_values_holder_xml_json_to_json(value[0])
elif key in ('tail', 'text'):
comments.append(value)
|
[
" elif key == 'TAUX':"
] | 954
|
lcc
|
python
| null |
acf5e37bfbe33cbb9516c325004620f2cae48ca166d4d0ad
|
|
package org.openswing.swing.mdi.client;
import java.beans.*;
import java.util.*;
import java.awt.*;
import java.awt.event.*;
import java.awt.image.BufferedImage;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.*;
import javax.swing.event.*;
import org.openswing.swing.util.client.*;
import java.util.List;
import java.util.Collections;
/**
* <p>Title: OpenSwing Framework</p>
* <p>Description: Panel used to show the last opened windows and to switch between them.
* It can contains a toggle button for each added internal frame.
* User can click on the button to set to front the related internal frame or
* can reduce to icon or close internal frame by means of the popup menu opened by clicking with the right mouse button on the toggle button or
* can set to front the internal frame by entering the toggle button with the left mouse button clicked.
* <p>Copyright: Copyright (C) 2006 Mauro Carniel</p>
*
* <p> This file is part of OpenSwing Framework.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the (LGPL) Lesser General Public
* License as published by the Free Software Foundation;
*
* GNU LESSER GENERAL PUBLIC LICENSE
* Version 2.1, February 1999
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* The author may be contacted at:
* maurocarniel@tin.it</p>
*
* @author Mauro Carniel
* @version 1.0
*/
public class WinIconsPanel extends JPanel {
FlowLayout flowLayout1 = new FlowLayout();
/** collection of button, linked frame */
private Hashtable buttons = new Hashtable();
/** collection of pairs <frame title, SortedSet of associated Integer number> */
private Hashtable buttonsNr = new Hashtable();
/* toggle button width */
private static final int len = 120;
/** current horizontal position when locating a new toggle button */
private int x = 0;
/** used to show a popup menu containing a "close frame" menu item */
private JPopupMenu menu = new JPopupMenu();
/** menu item inserted into the popup menu */
private JMenuItem closeMenu = new JMenuItem(ClientSettings.getInstance().getResources().getResource("close window"));
/** menu item inserted into the popup menu */
private JMenuItem iconMenu = new JMenuItem(ClientSettings.getInstance().getResources().getResource("reduce to icon"));
/** internal frame to close */
private InternalFrame frameToClose = null;
public WinIconsPanel() {
try {
jbInit();
}
catch(Exception e) {
e.printStackTrace();
}
}
public final void init() {
this.removeAll();
buttons.clear();
buttonsNr.clear();
this.setMinimumSize(new Dimension(2000,26));
this.setPreferredSize(new Dimension(2000,26));
}
private void jbInit() throws Exception {
this.setBorder(BorderFactory.createLoweredBevelBorder());
flowLayout1.setAlignment(FlowLayout.LEFT);
flowLayout1.setHgap(0);
flowLayout1.setVgap(0);
this.setLayout(flowLayout1);
menu.add(closeMenu);
closeMenu.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try {
x = x-len;
frameToClose.closeFrame();
frameToClose = null;
}
catch (PropertyVetoException ex) {
}
}
});
closeMenu.setVisible(ClientSettings.SHOW_POPUP_MENU_CLOSE);
if(ClientSettings.ICON_MENU_WINDOW_CLOSE!=null)
closeMenu.setIcon(new ImageIcon(ClientUtils.getImage(ClientSettings.ICON_MENU_WINDOW_CLOSE)));
menu.add(iconMenu);
iconMenu.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try {
frameToClose.setIcon(true);
}
catch (PropertyVetoException ex) {
ex.printStackTrace();
}
frameToClose = null;
}
});
iconMenu.setVisible(ClientSettings.SHOW_ICON_POPUP_MENU_REDUCE_ICON);
if(ClientSettings.ICON_POPUP_MENU_REDUCE_ICON!=null)
iconMenu.setIcon(new ImageIcon(ClientUtils.getImage(ClientSettings.ICON_POPUP_MENU_REDUCE_ICON)));
}
/**
* Add an internal frame icon to the panel.
* Add an internal frame listener.
* @param frame internal frame to add
*/
public final void add(final InternalFrame frame) {
try {
Integer n = null;
SortedSet list = (SortedSet)buttonsNr.get(frame.getTitle());
if (list==null) {
list = new TreeSet();
n = new Integer(1);
list.add(n);
buttonsNr.put(frame.getTitle(),list);
}
else {
n = new Integer( ((Integer)list.last()).intValue()+1 );
for(int i=1;i<n.intValue();i++)
if (!list.contains(new Integer(i))) {
n = new Integer(i);
break;
}
list.add(n);
}
final JToggleButton btn = new JToggleButton((n.intValue()>1?" ["+n.intValue()+"] ":"")+frame.getTitle());
if (ClientSettings.ICON_ENABLE_FRAME!=null)
btn.setIcon(new ImageIcon(ClientUtils.getImage(ClientSettings.ICON_ENABLE_FRAME)));
btn.setHorizontalAlignment(SwingConstants.LEFT);
btn.setToolTipText(frame.getTitle());
// int len = btn.getFontMetrics(btn.getFont()).stringWidth(btn.getText());
// btn.setMinimumSize(new Dimension(len+20,24));
btn.setMinimumSize(new Dimension(len,24));
btn.setMaximumSize(new Dimension(len,24));
btn.setPreferredSize(new Dimension(len,24));
btn.setSize(new Dimension(len,24));
// while (x+len+20>this.getWidth()-200) {
// x = x-this.getComponent(0).getWidth();
// this.remove(0);
//
// this.revalidate();
// this.repaint();
// }
while (x+len+20>this.getWidth()-200) {
if (this.getComponentCount()>0)
x = x-this.getComponent(0).getWidth();
if (this.getComponentCount()>0)
this.remove(0);
this.revalidate();
this.repaint();
}
this.add(btn,null);
//x = x+len+20;
x = x+len;
buttons.put(btn,frame);
btn.setSelected(true);
this.revalidate();
this.repaint();
btn.addMouseMotionListener(new MouseMotionAdapter() {
public void mouseMoved(MouseEvent e) {
if (e.getX()<25) {
if (ClientSettings.ICON_CLOSE_FRAME_SELECTED!=null)
btn.setIcon(new ImageIcon(ClientUtils.getImage(ClientSettings.ICON_CLOSE_FRAME_SELECTED)));
} else {
if (ClientSettings.ICON_CLOSE_FRAME!=null)
btn.setIcon(new ImageIcon(ClientUtils.getImage(ClientSettings.ICON_CLOSE_FRAME)));
}
}
});
btn.addMouseListener(new MouseAdapter() {
public void mouseExited(MouseEvent e) {
if (frame.isSelected()) {
if (ClientSettings.ICON_ENABLE_FRAME!=null)
btn.setIcon(new ImageIcon(ClientUtils.getImage(ClientSettings.ICON_ENABLE_FRAME)));
} else {
if(!btn.isSelected())
if(ClientSettings.ICON_DISABLE_FRAME!=null)
btn.setIcon(new ImageIcon(ClientUtils.getImage(ClientSettings.ICON_DISABLE_FRAME)));
}
}
public void mouseClicked(MouseEvent e) {
if (SwingUtilities.isRightMouseButton(e)) {
frameToClose = (InternalFrame)buttons.get(btn);
if (frameToClose!=null &&
frameToClose.getDesktopPane()!=null &&
((DesktopPane)frameToClose.getDesktopPane()).isModal() &&
!frameToClose.isModal()) {
e.consume();
return;
}
iconMenu.setVisible( frameToClose.isIconifiable() );
menu.show(btn,e.getX(),e.getY());
}else{
if(e.getX() < 25){
frameToClose = (InternalFrame)buttons.get(btn);
try {
frameToClose.closeFrame();
} catch (PropertyVetoException ex) {
} }
}
}
public void mouseEntered(MouseEvent e) {
if (SwingUtilities.isLeftMouseButton(e)) {
btn.setSelected(true);
|
[
" InternalFrame f = (InternalFrame)buttons.get(btn);"
] | 769
|
lcc
|
java
| null |
c15dc271a99f31760a613bea95abc84961ad13d294310c3c
|
|
import os
import zmq
import warnings
TIMEOUT = 1000 # milliseconds
VERBOSE = False
RETRY = True # Should we try to get another server if we can't connect?
SERVERFILE = "serverlist.dat" # Base name of the file containing server names
if __name__ == '__main__':
VERBOSE = True
def printV(*args):
if VERBOSE:
for arg in args:
print arg,
print ''
def getBasePath(): # Get the base directory of this script
return __file__[:__file__.rfind("clientBase.py")]
def getServerFile(): # Return the full path the the server list file
return os.path.join(getBasePath(), SERVERFILE)
def generateConfig(): # generates the config and server files if not found
serverFile = getServerFile()
if os.path.isfile(serverFile):
printV("Server List Found at %s" % serverFile)
else:
printV("No Server List Found")
printV("Generating New Server List at %s" % serverFile)
with open(serverFile,'wb') as f:
f.write("echidna tcp://108.52.218.107:5001")
class SDSSError(Exception): # custom SDSSError that relates to serverside issues
def __init__(self, message, errors=None):
super(SDSSError, self).__init__(message)
self.errors = errors
class ServerList(dict): # dictionary like class that manages the possible servers
def __init__(self, *args, **kwargs):
super(ServerList, self).__init__(*args, **kwargs)
self.best = None
self.priority = {}
def addServer(self, name, address, priority): # add a server to our list of servers
server = {"address": address, "priority": priority}
self[name] = server
self.priority[priority] = name
def addServersFromFile(self, filename):
servers = []
priority = 0
with open(filename, 'rb') as f:
for line in f:
name, address = line.strip().split()
self.addServer(name, address, priority)
priority += 1
def saveServersToFile(self, filename):
lines = []
for p in sorted(self.priority.keys()):
name = self.priority[p]
address = self[name]['address']
lines.append(' '.join((name, address)))
with open(filename, 'wb') as f:
f.write('\n'.join(lines))
def testServer(self, server):
try:
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.LINGER = False
socket.connect(server['address'])
socket.send(b"ping\n", flags=zmq.NOBLOCK)
if socket.poll(timeout=1000, flags=zmq.POLLIN):
return True
else:
return False
except zmq.ZMQError as e:
raise SDSSError(e.message, e.errno)
def getBestServer(self): # determine the best server
for key, server in sorted(self.items(), key=lambda x: x[1]['priority']):
isGood = self.testServer(server)
if isGood:
printV("Best Server is %s" % key)
self.best = server['address']
break
else:
self.best = None
raise SDSSError("No good servers available at the moment", self.best)
def setBestServer(self, server): # manually override the best server
printV("Testing Server %s" % server)
isGood = self.testServer(self[server])
if isGood:
self.best = self[server]['address']
printV("%s is now connected" % server)
else:
raise SDSSError("Bad Server: %s" % server, server)
generateConfig() # Setup the server list and config if needed
servers = ServerList() # Instantiate a new server list
servers.addServersFromFile(getServerFile()) # Add servers from our server list
servers.getBestServer() # Find the best server based on priority and availability
def getSocket():
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.LINGER = False
socket.connect(servers.best)
return socket
def zmqSocketDecorator(func): # a decorator that handles the zmq sockets and raises SDSS exceptions
def wrapper(*args, **kwargs):
try:
socket = getSocket()
return func(socket, *args, **kwargs)
except zmq.ZMQError as e:
raise SDSSError(e.message, e.errno)
return wrapper
@zmqSocketDecorator
def getCommandResult(socket, cmd): # send a command to the server and return the result
global RETRY
socket.send(cmd)
if socket.poll(timeout=TIMEOUT, flags=zmq.POLLIN):
result = socket.recv_pyobj(flags=zmq.NOBLOCK)
else:
if RETRY:
printV("Server Disconnected. Attempting to Connect to Another Server")
servers.getBestServer()
RETRY = False
result = getCommandResult(cmd)
RETRY = True
return result
else:
raise SDSSError("Socket timed out", TIMEOUT)
if isinstance(result, Exception):
raise SDSSError(*result.args)
return result
def createCommand(server_func, *args): # get the command string for a function and it's arguments
if len(args):
args = " ".join(map(str, args))
else:
args = ''
cmd = b"%s\n%s" % (server_func, args)
return cmd
def isValid(server_func): # checks if a server_func is valid
cmd = createCommand('isValid', server_func)
result = getCommandResult(cmd)
return result
def commandArgCount(server_func): # gets information about the server func
cmd = createCommand('argCount', server_func)
result = getCommandResult(cmd)
return result
def _createFunction(server_func, docstr=None):
# Create a function object that acts on a server side func with name 'server_func'
if isValid(server_func):
nargs = commandArgCount(server_func) - 1
else:
raise SDSSError("Invalid Function: %s" % server_func, server_func)
def Func(*args):
if len(args) != nargs:
message = "%s takes exactly %i arguments (%i given)" % (server_func, nargs, len(args))
raise TypeError(message)
if docstr is not None:
Func.__doc__ = docstr
cmd = createCommand(server_func, *args)
result = getCommandResult(cmd)
return result
return Func
def createFunction(server_func, docstr=None):
def initialFunc(*args):
initalFunc = _createFunction(server_func, docstr)
return initalFunc(*args)
return initialFunc
# define our client-side functions below
getRandLC = createFunction("randLC",
"""
args: None
returns:
filename, redshift, data (tuple):
filename (str): name of the file on disk
redshift (float): redshift of the object
data (numpy structure array): structured array of the data from the LC file
""")
getLC = createFunction("getLC",
"""
args:
ID (str): SDSS J2000 name
returns:
filename, redshift, data (tuple):
filename (str): name of the file on disk
redshift (float): redshift of the object
data (numpy structure array): structured array of the data from the LC file
""")
getIDList = createFunction("IDList",
"""
args: None
returns:
IDList (list): List of strings of SDSS Objects names on disk
""")
getNearestLC = createFunction('getNearestLC',
"""
args:
ID (str): SDSS J200 name
tol (float): matching tolerance in degrees
returns:
filename, reshift, data (tuple):
see above
""")
if __name__ == '__main__':
import sys
if len(sys.argv) == 1:
print "Test"
if sys.argv[1] == '--check':
for name in sys.argv[2:]:
try:
getNearestLC(name, 2/60.0/60.0)
except SDSSError as e:
if 'No objects in list' in e.message:
print "LC does not exist in data base", 0, name
except IndexError as e:
print "No File Specified"
else:
print "LC does exist in database ", 1, name
elif sys.argv[1] == '--rand':
print getRandLC()
|
[
" elif sys.argv[1] == '--list':"
] | 866
|
lcc
|
python
| null |
2e4d03a695dc0527178e4eddff2b2c0651ddc6a0588d6cb5
|
|
/**
* This file is part of LibLaserCut.
* Copyright (C) 2011 - 2014 Thomas Oster <mail@thomas-oster.de>
*
* LibLaserCut is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* LibLaserCut is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with LibLaserCut. If not, see <http://www.gnu.org/licenses/>.
*
**/
package com.t_oster.liblasercut.drivers;
import com.t_oster.liblasercut.IllegalJobException;
import com.t_oster.liblasercut.JobPart;
import com.t_oster.liblasercut.LaserCutter;
import com.t_oster.liblasercut.LaserJob;
import com.t_oster.liblasercut.LaserProperty;
import com.t_oster.liblasercut.ProgressListener;
import com.t_oster.liblasercut.Raster3dPart;
import com.t_oster.liblasercut.RasterPart;
import com.t_oster.liblasercut.VectorCommand;
import com.t_oster.liblasercut.VectorPart;
import com.t_oster.liblasercut.platform.Point;
import com.t_oster.liblasercut.platform.Util;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
*
* @author Thomas Oster <thomas.oster@rwth-aachen.de>
*
*
* some technical details about the iModela IM-01:
* max. part dimensions (x,y,z): (86mm, 55mm, 26mm)
* operating speed x and y axis: 6 to 240 mm/min
* operating speed z axis: 6 to 180 mm/min
* software resolution: 0.001mm/step (in NC-code mode), 0.01mm/step (RML-1 mode)
* mechanical resolution: 0.000186mm/step
*
* This driver controls the mill using NC codes.
* Reference: http://icreate.rolanddg.com/iModela/download/dl/manual/NC_CODE_EN.pdf
*
* Currently, this driver just engraves/cuts material in 2D. 2.5D data is not supported by VisiCut (yet).
*
*
*/
public class IModelaMill extends LaserCutter
{
private static String HOSTNAME = "Hostname/IP";
private static String PORT = "port";
private static String BED_WIDTH = "bed width";
private static String BED_HEIGHT = "bed height";
private static String FLIP_YAXIS = "flip y axis";
private static String HOME_ON_END = "move home after job";
private Map<String, Object> properties = new LinkedHashMap<String, Object>();
public IModelaMill()
{
properties.put(BED_WIDTH, (Double) 85d);
properties.put(BED_HEIGHT, (Double) 55d);
properties.put(HOSTNAME, "file:///dev/usb/lp0");
properties.put(PORT, (Integer) 5000);
properties.put(HOME_ON_END, (Boolean) true);
properties.put(FLIP_YAXIS, (Boolean) false);
}
private boolean spindleOn = false;
private void setSpindleOn(PrintStream out, boolean spindleOn)
{
if (spindleOn != this.spindleOn)
{
this.spindleOn = spindleOn;
out.println(spindleOn ? "M03" : "M05");//start/stop spindle
}
}
private void writeInitializationCode(PrintStream out)
{
out.println("%");
out.println("O00000001");//program number 00000001 - can be changed to any number, must be 8 digits
out.println("G90");//absolute positioning
out.println("G21");//select mm as input unit
}
private void writeFinalizationCode(PrintStream out)
{
this.setSpindleOn(out, false);
out.println("G0 Z0");//head up
if ((Boolean) properties.get(HOME_ON_END))
{
out.println("G0 X0 Y0");//go back to home
}
out.println("M02");//END_OF_PROGRAM
out.println("%");
}
//all depth values are positive, 0 is top
private double movedepth = 0;
private double linedepth = 0;
private double headdepth = 0;
private double spindleSpeed = 0;
private double feedRate = 0;
private int tool = 0;
//is applied to next G command
private String parameters = "";
private void moveHead(PrintStream out, double depth)
{
if (headdepth > depth)
{//move up fast
out.println(String.format(Locale.ENGLISH, "G00 Z%f%s\n", -depth, parameters));
parameters = "";
}
else if (headdepth < depth)
{//move down slow
out.println(String.format(Locale.ENGLISH, "G01 Z%f%s\n", -depth, parameters));
parameters = "";
}
headdepth = depth;
}
private void move(PrintStream out, double x, double y)
{
moveHead(out, movedepth);
//TODO: check if last command was also move and lies on the
//same line. If so, replace the last move command
out.print(String.format(Locale.ENGLISH, "G00 X%f Y%f%s\n", x, properties.get(FLIP_YAXIS) == Boolean.TRUE ? getBedHeight()-y : y, parameters));
parameters = "";
}
private void line(PrintStream out, double x, double y)
{
setSpindleOn(out, true);
moveHead(out, linedepth);
//TODO: check if last command was also line and lies on the
//same line. If so, replace the last move command
out.print(String.format(Locale.ENGLISH, "G01 X%f Y%f%s\n", x, properties.get(FLIP_YAXIS) == Boolean.TRUE ? getBedHeight()-y : y, parameters));
parameters = "";
}
private void applyProperty(PrintStream out, IModelaProperty pr)
{
linedepth = pr.getDepth();
if (pr.getSpindleSpeed() != spindleSpeed)
{
spindleSpeed = pr.getSpindleSpeed();
parameters += String.format(Locale.ENGLISH, " S%f\n", spindleSpeed);
}
if (pr.getFeedRate() != feedRate)
{
feedRate = pr.getFeedRate();
parameters += String.format(Locale.ENGLISH, " F%f\n", feedRate);
}
if (pr.getTool() != tool)
{
tool = pr.getTool();
//TODO: Maybe stop spindle and move to some location?
out.print(String.format(Locale.ENGLISH, "M06T0\n"));//return current tool
out.print(String.format(Locale.ENGLISH, "M06T%d\n", tool));
}
}
/*
* Returns the percentage of black pixels in a square rectangle with
* side length toolDiameter
* arount x/y in the given raster
*/
private double getBlackPercent(RasterPart p, int cx, int cy, int toolDiameter)
{
double count = toolDiameter*toolDiameter;
double black = 0;
for (int x = Math.max(cx-toolDiameter/2, 0); x < Math.min(cx+toolDiameter/2, p.getRasterWidth()); x++)
{
for (int y = Math.max(cy-toolDiameter/2, 0); y < Math.min(cy+toolDiameter/2, p.getRasterHeight()); y++)
{
if (p.isBlack(x, y))
{
black++;
}
}
}
return black/count;
}
private double getAverageGrey(Raster3dPart p, int cx, int cy, int toolDiameter)
{
double count = toolDiameter*toolDiameter;
double value = 0;
for (int y = Math.max(cy-toolDiameter/2, 0); y < Math.min(cy+toolDiameter/2, p.getRasterHeight()); y++)
{
List<Byte> line = p.getRasterLine(y);
for (int x = Math.max(cx-toolDiameter/2, 0); x < Math.min(cx+toolDiameter/2, p.getRasterWidth()); x++)
{
value += line.get(x);
}
}
return (value/count)/255;
}
private void writeRasterCode(RasterPart p, PrintStream out)
{
double dpi = p.getDPI();
//how many pixels(%) have to be black until we move the head down
double treshold = 0.7;
IModelaProperty prop = (IModelaProperty) p.getLaserProperty();
int toolDiameterInPx = (int) Util.mm2px(prop.getToolDiameter(), dpi);
applyProperty(out, prop);
boolean leftToRight = true;
Point offset = p.getRasterStart();
move(out, Util.px2mm(offset.x, dpi), Util.px2mm(offset.y, dpi));
for (int y = 0; y < p.getRasterHeight(); y+= toolDiameterInPx/2)
{
for (int x = leftToRight ? 0 : p.getRasterWidth() - 1;
(leftToRight && x < p.getRasterWidth()) || (!leftToRight && x >= 0);
x += leftToRight ? 1 : -1)
{
if (getBlackPercent(p, x, y, toolDiameterInPx)<treshold)
{
//skip intermediate move commands
while((leftToRight && x+1 < p.getRasterWidth()) || (!leftToRight && x-1 >= 0) && getBlackPercent(p, leftToRight ? x+1 : x-1, y, toolDiameterInPx) < treshold)
{
x+= leftToRight ? 1 : -1;
}
move(out, Util.px2mm(offset.x+x, dpi), Util.px2mm(offset.y+y, dpi));
}
else
{
//skip intermediate line commands
while((leftToRight && x+1 < p.getRasterWidth()) || (!leftToRight && x-1 >= 0) && getBlackPercent(p, leftToRight ? x+1 : x-1, y, toolDiameterInPx) >= treshold)
{
x+= leftToRight ? 1 : -1;
}
line(out, Util.px2mm(offset.x+x, dpi), Util.px2mm(offset.y+y, dpi));
}
}
//invert direction
leftToRight = !leftToRight;
}
}
private void writeRaster3dCode(Raster3dPart p, PrintStream out)
{
double dpi = p.getDPI();
IModelaProperty prop = (IModelaProperty) p.getLaserProperty();
int toolDiameterInPx = (int) Util.mm2px(prop.getToolDiameter(), dpi);
applyProperty(out, prop);
boolean leftToRight = true;
Point offset = p.getRasterStart();
|
[
" move(out, Util.px2mm(offset.x, dpi), Util.px2mm(offset.y, dpi));"
] | 1,057
|
lcc
|
java
| null |
9a6eb97bf19cf92ba543d527e5650f3a6198498b114bcbf3
|
|
/*
* Copyright 2013-2015 Daniel Pereira Coelho
*
* This file is part of the Expenses Android Application.
*
* Expenses is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation in version 3.
*
* Expenses is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Expenses. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.dpcsoftware.mn;
import android.app.Dialog;
import android.content.ContentValues;
import android.content.Context;
import android.content.DialogInterface;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.support.v4.widget.CursorAdapter;
import android.support.v4.widget.SimpleCursorAdapter;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ListView;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import android.widget.Spinner;
import android.widget.TextView;
public class EditGroups extends AppCompatActivity {
private ListView lv;
private GroupsAdapter adapter;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.listview);
lv = (ListView) findViewById(R.id.listView1);
renderGroups();
getSupportActionBar().setTitle(R.string.editgroups_c1);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.groups, menu);
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.item1:
Bundle args = new Bundle();
args.putInt("MODE", AddEditDialog.ADD);
AddEditDialog addDg = new AddEditDialog();
addDg.setArguments(args);
addDg.show(getSupportFragmentManager(), null);
break;
}
return true;
}
private void renderGroups() {
SQLiteDatabase db = DatabaseHelper.quickDb(this, DatabaseHelper.MODE_READ);
Cursor c = db.rawQuery("SELECT "
+ Db.Table3._ID + ","
+ Db.Table3.GROUP_NAME +
" FROM " + Db.Table3.TABLE_NAME +
" ORDER BY " + Db.Table3.GROUP_NAME + " ASC", null);
if(adapter == null) {
adapter = new GroupsAdapter(this, c);
lv.setAdapter(adapter);
setContentView(lv);
}
else {
adapter.swapCursor(c);
adapter.notifyDataSetChanged();
}
db.close();
}
private class GroupsAdapter extends CursorAdapter implements OnClickListener {
private LayoutInflater mInflater;
public GroupsAdapter(Context context, Cursor c) {
super(context, c, 0);
mInflater=LayoutInflater.from(context);
}
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(R.layout.editgroups_listitem,parent,false);
}
public void bindView(View view, Context context, Cursor cursor) {
((TextView) view.findViewById(R.id.textViewGroup)).setText(cursor.getString(1));
ImageButton btEdit = (ImageButton) view.findViewById(R.id.imageButtonEdit);
btEdit.setOnClickListener(this);
btEdit.setTag(cursor.getPosition());
ImageButton btDelete = (ImageButton) view.findViewById(R.id.imageButtonDelete);
btDelete.setOnClickListener(this);
btDelete.setTag(cursor.getPosition());
}
@Override
public void onClick(View v) {
switch(v.getId()) {
case R.id.imageButtonDelete:
if(getCursor().getCount() == 1) {
AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(EditGroups.this);
dialogBuilder.setTitle(R.string.editgroups_c2);
dialogBuilder.setMessage(R.string.editgroups_c3);
dialogBuilder.create().show();
}
else {
Bundle args = new Bundle();
args.putLong("DELETE_ID", getItemId((Integer) v.getTag()));
DeleteDialog delDg = new DeleteDialog();
delDg.setArguments(args);
delDg.show(getSupportFragmentManager(), null);
}
break;
case R.id.imageButtonEdit:
Bundle args2 = new Bundle();
args2.putLong("EDIT_ID", getItemId((Integer) v.getTag()));
Cursor c = getCursor();
c.moveToPosition((Integer) v.getTag());
args2.putString("CURRENT_NAME", c.getString(c.getColumnIndex(Db.Table3.GROUP_NAME)));
args2.putInt("MODE", AddEditDialog.EDIT);
AddEditDialog edtDg = new AddEditDialog();
edtDg.setArguments(args2);
edtDg.show(getSupportFragmentManager(), null);
break;
}
}
}
public static class DeleteDialog extends DialogFragment implements OnCheckedChangeListener, DialogInterface.OnClickListener {
private long deleteId;
private EditGroups act;
private App app;
private View layout;
@NonNull
public Dialog onCreateDialog(Bundle savedInstance) {
act = (EditGroups) getActivity();
app = (App) act.getApplication();
Bundle args = getArguments();
LayoutInflater li = LayoutInflater.from(act);
layout = li.inflate(R.layout.editgroupseditcategories_deldialog, null);
deleteId = args.getLong("DELETE_ID");
Spinner sp = (Spinner) layout.findViewById(R.id.spinner1);
SQLiteDatabase db = DatabaseHelper.quickDb(act, DatabaseHelper.MODE_READ);
Cursor c = db.rawQuery("SELECT "
+ Db.Table3._ID + ","
+ Db.Table3.GROUP_NAME +
" FROM " + Db.Table3.TABLE_NAME +
" WHERE " + Db.Table3._ID + " <> " + deleteId +
" ORDER BY " + Db.Table3.GROUP_NAME + " ASC", null);
SimpleCursorAdapter adapter = new SimpleCursorAdapter(act, android.R.layout.simple_spinner_item, c, new String[] {Db.Table3.GROUP_NAME}, new int[] {android.R.id.text1}, 0);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
sp.setAdapter(adapter);
sp.setEnabled(false);
((RadioButton) layout.findViewById(R.id.radio1)).setOnCheckedChangeListener(this);
db.close();
return new AlertDialog.Builder(act)
.setView(layout)
.setTitle(R.string.editgroups_c4)
.setPositiveButton(R.string.gp_2, this)
.setNegativeButton(R.string.gp_3, this)
.create();
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
layout.findViewById(R.id.spinner1).setEnabled(isChecked);
}
@Override
public void onClick(DialogInterface dialog, int which) {
if(which == DialogInterface.BUTTON_POSITIVE)
deleteGroup();
else
dismiss();
}
private void deleteGroup() {
SQLiteDatabase db = DatabaseHelper.quickDb(act, DatabaseHelper.MODE_WRITE);
RadioGroup rg = (RadioGroup) layout.findViewById(R.id.radioGroup1);
int toastString;
int result = db.delete(Db.Table3.TABLE_NAME, Db.Table3._ID + " = " + deleteId, null);
if(result == 1) {
if(rg.getCheckedRadioButtonId() == R.id.radio0) {
//Delete expenses
db.delete(Db.Table1.TABLE_NAME, Db.Table1.ID_GROUP + " = " + deleteId, null);
//Delete budget items
db.delete(Db.Table4.TABLE_NAME, Db.Table4.ID_GROUP + " = " + deleteId, null);
}
else {
long newId = ((Spinner) layout.findViewById(R.id.spinner1)).getSelectedItemId();
//Update expenses
|
[
"\t\t\t\t\tContentValues cv = new ContentValues();"
] | 666
|
lcc
|
java
| null |
f6cf9afee16a6f9eb253d2163fc5f2accb17108da85e51a3
|
|
// Taken from https://stackoverflow.com/questions/6596327/how-to-check-if-a-file-is-signed-in-c
using System;
using System.Runtime.InteropServices;
namespace VisualStudioHelpDownloaderPlus
{
internal static class AuthenticodeTools
{
[DllImport("Wintrust.dll", PreserveSig = true, SetLastError = false)]
private static extern uint WinVerifyTrust(IntPtr hWnd, IntPtr pgActionID, IntPtr pWinTrustData);
private static uint WinVerifyTrust(string fileName)
{
Guid wintrust_action_generic_verify_v2 = new Guid("{00AAC56B-CD44-11d0-8CC2-00C04FC295EE}");
uint result = 0;
using (WINTRUST_FILE_INFO fileInfo = new WINTRUST_FILE_INFO(fileName, Guid.Empty))
using (UnmanagedPointer guidPtr = new UnmanagedPointer(Marshal.AllocHGlobal(Marshal.SizeOf(typeof(Guid))), AllocMethod.HGlobal))
using (UnmanagedPointer wvtDataPtr = new UnmanagedPointer(Marshal.AllocHGlobal(Marshal.SizeOf(typeof(WINTRUST_DATA))), AllocMethod.HGlobal))
{
WINTRUST_DATA data = new WINTRUST_DATA(fileInfo);
IntPtr pGuid = guidPtr;
IntPtr pData = wvtDataPtr;
Marshal.StructureToPtr(wintrust_action_generic_verify_v2, pGuid, true);
Marshal.StructureToPtr(data, pData, true);
result = WinVerifyTrust(IntPtr.Zero, pGuid, pData);
}
return result;
}
public static bool IsTrusted(string fileName)
{
return WinVerifyTrust(fileName) == 0;
}
}
internal struct WINTRUST_FILE_INFO : IDisposable
{
public WINTRUST_FILE_INFO(string fileName, Guid subject)
{
cbStruct = (uint)Marshal.SizeOf(typeof(WINTRUST_FILE_INFO));
pcwszFilePath = fileName;
if (subject != Guid.Empty)
{
pgKnownSubject = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(Guid)));
Marshal.StructureToPtr(subject, pgKnownSubject, true);
}
else
{
pgKnownSubject = IntPtr.Zero;
}
hFile = IntPtr.Zero;
}
public uint cbStruct;
[MarshalAs(UnmanagedType.LPTStr)]
public string pcwszFilePath;
public IntPtr hFile;
public IntPtr pgKnownSubject;
#region IDisposable Members
public void Dispose()
{
Dispose(true);
}
private void Dispose(bool disposing)
{
if (pgKnownSubject != IntPtr.Zero)
{
Marshal.DestroyStructure(pgKnownSubject, typeof(Guid));
Marshal.FreeHGlobal(pgKnownSubject);
}
}
#endregion
}
enum AllocMethod
{
HGlobal,
CoTaskMem
};
enum UnionChoice
{
File = 1,
Catalog,
Blob,
Signer,
Cert
};
enum UiChoice
{
All = 1,
NoUI,
NoBad,
NoGood
};
enum RevocationCheckFlags
{
None = 0,
WholeChain
};
enum StateAction
{
Ignore = 0,
Verify,
Close,
AutoCache,
AutoCacheFlush
};
enum TrustProviderFlags
{
UseIE4Trust = 1,
NoIE4Chain = 2,
NoPolicyUsage = 4,
RevocationCheckNone = 16,
RevocationCheckEndCert = 32,
RevocationCheckChain = 64,
RecovationCheckChainExcludeRoot = 128,
Safer = 256,
HashOnly = 512,
UseDefaultOSVerCheck = 1024,
LifetimeSigning = 2048
};
enum UIContext
{
Execute = 0,
Install
};
[StructLayout(LayoutKind.Sequential)]
internal struct WINTRUST_DATA : IDisposable
{
public WINTRUST_DATA(WINTRUST_FILE_INFO fileInfo)
{
cbStruct = (uint)Marshal.SizeOf(typeof(WINTRUST_DATA));
pInfoStruct = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(WINTRUST_FILE_INFO)));
Marshal.StructureToPtr(fileInfo, pInfoStruct, false);
dwUnionChoice = UnionChoice.File;
pPolicyCallbackData = IntPtr.Zero;
pSIPCallbackData = IntPtr.Zero;
dwUIChoice = UiChoice.NoUI;
fdwRevocationChecks = RevocationCheckFlags.None;
dwStateAction = StateAction.Ignore;
hWVTStateData = IntPtr.Zero;
pwszURLReference = IntPtr.Zero;
dwProvFlags = TrustProviderFlags.Safer;
dwUIContext = UIContext.Execute;
}
public uint cbStruct;
public IntPtr pPolicyCallbackData;
public IntPtr pSIPCallbackData;
public UiChoice dwUIChoice;
public RevocationCheckFlags fdwRevocationChecks;
public UnionChoice dwUnionChoice;
public IntPtr pInfoStruct;
public StateAction dwStateAction;
public IntPtr hWVTStateData;
private IntPtr pwszURLReference;
public TrustProviderFlags dwProvFlags;
public UIContext dwUIContext;
#region IDisposable Members
public void Dispose()
{
Dispose(true);
}
private void Dispose(bool disposing)
{
if (dwUnionChoice == UnionChoice.File)
{
using (WINTRUST_FILE_INFO info = new WINTRUST_FILE_INFO())
{
Marshal.PtrToStructure(pInfoStruct, info);
info.Dispose();
}
Marshal.DestroyStructure(pInfoStruct, typeof(WINTRUST_FILE_INFO));
}
Marshal.FreeHGlobal(pInfoStruct);
}
#endregion
}
internal sealed class UnmanagedPointer : IDisposable
{
private IntPtr m_ptr;
private AllocMethod m_meth;
internal UnmanagedPointer(IntPtr ptr, AllocMethod method)
{
m_meth = method;
m_ptr = ptr;
}
~UnmanagedPointer()
{
Dispose(false);
}
#region IDisposable Members
private void Dispose(bool disposing)
{
if (m_ptr != IntPtr.Zero)
{
if (m_meth == AllocMethod.HGlobal)
{
Marshal.FreeHGlobal(m_ptr);
}
|
[
" else if (m_meth == AllocMethod.CoTaskMem)"
] | 459
|
lcc
|
csharp
| null |
2237275bf34eb140554aa5a5e98dc8bde6dd285b688c247c
|
|
# Default Django settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
# This is defined here as a do-nothing function because we can't import
# django.utils.translation -- that module depends on the settings.
gettext_noop = lambda s: s
####################
# CORE #
####################
DEBUG = False
TEMPLATE_DEBUG = False
# Whether the framework should propagate raw exceptions rather than catching
# them. This is useful under some testing situations and should never be used
# on a live site.
DEBUG_PROPAGATE_EXCEPTIONS = False
# Whether to use the "Etag" header. This saves bandwidth but slows down performance.
USE_ETAGS = False
# People who get code error notifications.
# In the format (('Full Name', 'email@example.com'), ('Full Name', 'anotheremail@example.com'))
ADMINS = ()
# Tuple of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = ()
# Hosts/domain names that are valid for this site.
# "*" matches anything, ".example.com" matches example.com and all subdomains
ALLOWED_HOSTS = []
# Local time zone for this installation. All choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
# systems may support all possibilities). When USE_TZ is True, this is
# interpreted as the default user time zone.
TIME_ZONE = 'America/Chicago'
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = False
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Languages we provide translations for, out of the box.
LANGUAGES = (
('af', gettext_noop('Afrikaans')),
('ar', gettext_noop('Arabic')),
('az', gettext_noop('Azerbaijani')),
('bg', gettext_noop('Bulgarian')),
('be', gettext_noop('Belarusian')),
('bn', gettext_noop('Bengali')),
('br', gettext_noop('Breton')),
('bs', gettext_noop('Bosnian')),
('ca', gettext_noop('Catalan')),
('cs', gettext_noop('Czech')),
('cy', gettext_noop('Welsh')),
('da', gettext_noop('Danish')),
('de', gettext_noop('German')),
('el', gettext_noop('Greek')),
('en', gettext_noop('English')),
('en-au', gettext_noop('Australian English')),
('en-gb', gettext_noop('British English')),
('eo', gettext_noop('Esperanto')),
('es', gettext_noop('Spanish')),
('es-ar', gettext_noop('Argentinian Spanish')),
('es-mx', gettext_noop('Mexican Spanish')),
('es-ni', gettext_noop('Nicaraguan Spanish')),
('es-ve', gettext_noop('Venezuelan Spanish')),
('et', gettext_noop('Estonian')),
('eu', gettext_noop('Basque')),
('fa', gettext_noop('Persian')),
('fi', gettext_noop('Finnish')),
('fr', gettext_noop('French')),
('fy', gettext_noop('Frisian')),
('ga', gettext_noop('Irish')),
('gl', gettext_noop('Galician')),
('he', gettext_noop('Hebrew')),
('hi', gettext_noop('Hindi')),
('hr', gettext_noop('Croatian')),
('hu', gettext_noop('Hungarian')),
('ia', gettext_noop('Interlingua')),
('id', gettext_noop('Indonesian')),
('is', gettext_noop('Icelandic')),
('it', gettext_noop('Italian')),
('ja', gettext_noop('Japanese')),
('ka', gettext_noop('Georgian')),
('kk', gettext_noop('Kazakh')),
('km', gettext_noop('Khmer')),
('kn', gettext_noop('Kannada')),
('ko', gettext_noop('Korean')),
('lb', gettext_noop('Luxembourgish')),
('lt', gettext_noop('Lithuanian')),
('lv', gettext_noop('Latvian')),
('mk', gettext_noop('Macedonian')),
('ml', gettext_noop('Malayalam')),
('mn', gettext_noop('Mongolian')),
('my', gettext_noop('Burmese')),
('nb', gettext_noop('Norwegian Bokmal')),
('ne', gettext_noop('Nepali')),
('nl', gettext_noop('Dutch')),
('nn', gettext_noop('Norwegian Nynorsk')),
('os', gettext_noop('Ossetic')),
('pa', gettext_noop('Punjabi')),
('pl', gettext_noop('Polish')),
('pt', gettext_noop('Portuguese')),
('pt-br', gettext_noop('Brazilian Portuguese')),
('ro', gettext_noop('Romanian')),
('ru', gettext_noop('Russian')),
('sk', gettext_noop('Slovak')),
('sl', gettext_noop('Slovenian')),
('sq', gettext_noop('Albanian')),
('sr', gettext_noop('Serbian')),
('sr-latn', gettext_noop('Serbian Latin')),
('sv', gettext_noop('Swedish')),
('sw', gettext_noop('Swahili')),
('ta', gettext_noop('Tamil')),
('te', gettext_noop('Telugu')),
('th', gettext_noop('Thai')),
('tr', gettext_noop('Turkish')),
('tt', gettext_noop('Tatar')),
('udm', gettext_noop('Udmurt')),
('uk', gettext_noop('Ukrainian')),
('ur', gettext_noop('Urdu')),
('vi', gettext_noop('Vietnamese')),
('zh-cn', gettext_noop('Simplified Chinese')),
('zh-hans', gettext_noop('Simplified Chinese')),
('zh-hant', gettext_noop('Traditional Chinese')),
('zh-tw', gettext_noop('Traditional Chinese')),
)
# Languages using BiDi (right-to-left) layout
LANGUAGES_BIDI = ("he", "ar", "fa", "ur")
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
LOCALE_PATHS = ()
# Settings for language cookie
LANGUAGE_COOKIE_NAME = 'django_language'
LANGUAGE_COOKIE_AGE = None
LANGUAGE_COOKIE_DOMAIN = None
LANGUAGE_COOKIE_PATH = '/'
# If you set this to True, Django will format dates, numbers and calendars
# according to user current locale.
USE_L10N = False
# Not-necessarily-technical managers of the site. They get broken link
# notifications and other various emails.
MANAGERS = ADMINS
# Default content type and charset to use for all HttpResponse objects, if a
# MIME type isn't manually specified. These are used to construct the
# Content-Type header.
DEFAULT_CONTENT_TYPE = 'text/html'
DEFAULT_CHARSET = 'utf-8'
# Encoding of files read from disk (template and initial SQL files).
FILE_CHARSET = 'utf-8'
# Email address that error messages come from.
SERVER_EMAIL = 'root@localhost'
# Whether to send broken-link emails. Deprecated, must be removed in 1.8.
SEND_BROKEN_LINK_EMAILS = False
# Database connection info. If left empty, will default to the dummy backend.
DATABASES = {}
# Classes used to implement DB routing behavior.
DATABASE_ROUTERS = []
# The email backend to use. For possible shortcuts see django.core.mail.
# The default is to use the SMTP backend.
# Third-party backends can be specified by providing a Python path
# to a module that defines an EmailBackend class.
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Host for sending email.
EMAIL_HOST = 'localhost'
# Port for sending email.
EMAIL_PORT = 25
# Optional SMTP authentication information for EMAIL_HOST.
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
EMAIL_USE_SSL = False
# List of strings representing installed apps.
INSTALLED_APPS = ()
# List of locations of the template source files, in search order.
TEMPLATE_DIRS = ()
# List of callables that know how to import templates from various sources.
# See the comments in django/core/template/loader.py for interface
# documentation.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
# List of processors used by RequestContext to populate the context.
# Each one should be a callable that takes the request object as its
# only parameter and returns a dictionary to add to the context.
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
# 'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
)
# Output to use in template system for invalid (e.g. misspelled) variables.
TEMPLATE_STRING_IF_INVALID = ''
# Default email address to use for various automated correspondence from
# the site managers.
DEFAULT_FROM_EMAIL = 'webmaster@localhost'
# Subject-line prefix for email messages send with django.core.mail.mail_admins
# or ...mail_managers. Make sure to include the trailing space.
EMAIL_SUBJECT_PREFIX = '[Django] '
# Whether to append trailing slashes to URLs.
APPEND_SLASH = True
# Whether to prepend the "www." subdomain to URLs that don't have it.
PREPEND_WWW = False
# Override the server-derived value of SCRIPT_NAME
FORCE_SCRIPT_NAME = None
# List of compiled regular expression objects representing User-Agent strings
# that are not allowed to visit any page, systemwide. Use this for bad
# robots/crawlers. Here are a few examples:
# import re
# DISALLOWED_USER_AGENTS = (
# re.compile(r'^NaverBot.*'),
# re.compile(r'^EmailSiphon.*'),
# re.compile(r'^SiteSucker.*'),
# re.compile(r'^sohu-search')
# )
DISALLOWED_USER_AGENTS = ()
ABSOLUTE_URL_OVERRIDES = {}
# Tuple of strings representing allowed prefixes for the {% ssi %} tag.
# Example: ('/home/html', '/var/www')
ALLOWED_INCLUDE_ROOTS = ()
# If this is a admin settings module, this should be a list of
# settings modules (in the format 'foo.bar.baz') for which this admin
# is an admin.
ADMIN_FOR = ()
# List of compiled regular expression objects representing URLs that need not
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
# import re
# IGNORABLE_404_URLS = (
# re.compile(r'^/apple-touch-icon.*\.png$'),
# re.compile(r'^/favicon.ico$),
# re.compile(r'^/robots.txt$),
# re.compile(r'^/phpmyadmin/),
# re.compile(r'\.(cgi|php|pl)$'),
# )
IGNORABLE_404_URLS = ()
# A secret key for this particular Django installation. Used in secret-key
# hashing algorithms. Set this in your settings, or Django will complain
# loudly.
SECRET_KEY = ''
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = None
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = None
# List of upload handler classes to be applied in order.
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
# Maximum size, in bytes, of a request before it will be streamed to the
# file system instead of into memory.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Directory in which upload streamed files will be temporarily saved. A value of
# `None` will make Django use the operating system's default temporary directory
# (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
# you'd pass directly to os.chmod; see http://docs.python.org/lib/os-file-dir.html.
FILE_UPLOAD_PERMISSIONS = None
# The numeric mode to assign to newly-created directories, when uploading files.
# The value should be a mode as you'd pass to os.chmod;
# see http://docs.python.org/lib/os-file-dir.html.
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
# Python module path where user will place custom format definition.
# The directory where this setting is pointing should contain subdirectories
# named as the locales, containing a formats.py file
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
FORMAT_MODULE_PATH = None
# Default formatting for date objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
# Default formatting for datetime objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = 'N j, Y, P'
# Default formatting for time objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = 'P'
# Default formatting for date objects when only the year and month are relevant.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = 'F Y'
# Default formatting for date objects when only the month and day are relevant.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = 'F j'
# Default short formatting for date objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = 'm/d/Y'
# Default short formatting for datetime objects.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATETIME_FORMAT = 'm/d/Y P'
# Default formats to be used when parsing dates from input boxes, in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
# Default formats to be used when parsing times from input boxes, in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
)
# Default formats to be used when parsing dates and times from input boxes,
# in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
)
# First day of week, to be used on calendars
# 0 means Sunday, 1 means Monday...
FIRST_DAY_OF_WEEK = 0
# Decimal separator symbol
DECIMAL_SEPARATOR = '.'
# Boolean that sets whether to add thousand separator when formatting numbers
USE_THOUSAND_SEPARATOR = False
# Number of digits that will be together, when splitting them by
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
NUMBER_GROUPING = 0
# Thousand separator symbol
THOUSAND_SEPARATOR = ','
# Do you want to manage transactions manually?
# Hint: you really don't!
TRANSACTIONS_MANAGED = False
# The tablespaces to use for each model when not specified otherwise.
DEFAULT_TABLESPACE = ''
DEFAULT_INDEX_TABLESPACE = ''
# Default X-Frame-Options header value
X_FRAME_OPTIONS = 'SAMEORIGIN'
USE_X_FORWARDED_HOST = False
# The Python dotted path to the WSGI application that Django's internal servers
# (runserver, runfcgi) will use. If `None`, the return value of
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
# behavior as previous versions of Django. Otherwise this should point to an
# actual WSGI application object.
WSGI_APPLICATION = None
# If your Django app is behind a proxy that sets a header to specify secure
# connections, AND that proxy ensures that user-submitted headers with the
# same name are ignored (so that people can't spoof it), set this value to
# a tuple of (header_name, header_value). For any requests that come in with
# that header/value, request.is_secure() will return True.
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
# you may be opening yourself up to a security risk.
SECURE_PROXY_SSL_HEADER = None
##############
# MIDDLEWARE #
##############
# List of middleware classes to use. Order is important; in the request phase,
# this middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# 'django.middleware.http.ConditionalGetMiddleware',
# 'django.middleware.gzip.GZipMiddleware',
)
############
# SESSIONS #
############
SESSION_CACHE_ALIAS = 'default' # Cache to store session data if using the cache session backend.
SESSION_COOKIE_NAME = 'sessionid' # Cookie name. This can be whatever you want.
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_DOMAIN = None # A string like ".example.com", or None for standard domain cookie.
SESSION_COOKIE_SECURE = False # Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_PATH = '/' # The path of the session cookie.
SESSION_COOKIE_HTTPONLY = True # Whether to use the non-RFC standard httpOnly flag (IE, FF3+, others)
SESSION_SAVE_EVERY_REQUEST = False # Whether to save the session data on every request.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False # Whether a user's session cookie expires when the Web browser is closed.
SESSION_ENGINE = 'django.contrib.sessions.backends.db' # The module to store session data
SESSION_FILE_PATH = None # Directory to store session files if using the file session module. If None, the backend will use a sensible default.
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer' # class to serialize session data
#########
# CACHE #
#########
# The cache backends to use.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
CACHE_MIDDLEWARE_KEY_PREFIX = ''
CACHE_MIDDLEWARE_SECONDS = 600
CACHE_MIDDLEWARE_ALIAS = 'default'
####################
# COMMENTS #
####################
COMMENTS_ALLOW_PROFANITIES = False
# The profanities that will trigger a validation error in
# CommentDetailsForm.clean_comment. All of these should be in lowercase.
PROFANITIES_LIST = ()
##################
# AUTHENTICATION #
##################
AUTH_USER_MODEL = 'auth.User'
AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',)
LOGIN_URL = '/accounts/login/'
|
[
"LOGOUT_URL = '/accounts/logout/'"
] | 2,324
|
lcc
|
python
| null |
d9aa0fae1843d2d73e9ac2308f31a3760b5ff6be9bc4face
|
|
#region Copyright & License Information
/*
* Copyright 2007-2015 The OpenRA Developers (see AUTHORS)
* This file is part of OpenRA, which is free software. It is made
* available to you under the terms of the GNU General Public License
* as published by the Free Software Foundation. For more information,
* see COPYING.
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using OpenRA.Traits;
namespace OpenRA.Mods.Common.Traits
{
[Desc("Attach this to an actor (usually a building) to let it produce units or construct buildings.",
"If one builds another actor of this type, he will get a separate queue to create two actors",
"at the same time. Will only work together with the Production: trait.")]
public class ProductionQueueInfo : ITraitInfo
{
[FieldLoader.Require]
[Desc("What kind of production will be added (e.g. Building, Infantry, Vehicle, ...)")]
public readonly string Type = null;
[Desc("Group queues from separate buildings together into the same tab.")]
public readonly string Group = null;
[Desc("Only enable this queue for certain factions.")]
public readonly HashSet<string> Factions = new HashSet<string>();
[Desc("Should the prerequisite remain enabled if the owner changes?")]
public readonly bool Sticky = true;
[Desc("This value is used to translate the unit cost into build time.")]
public readonly float BuildSpeed = 0.4f;
[Desc("The build time is multiplied with this value on low power.")]
public readonly int LowPowerSlowdown = 3;
[Desc("Notification played when production is complete.",
"The filename of the audio is defined per faction in notifications.yaml.")]
public readonly string ReadyAudio = "UnitReady";
[Desc("Notification played when you can't train another unit",
"when the build limit exceeded or the exit is jammed.",
"The filename of the audio is defined per faction in notifications.yaml.")]
public readonly string BlockedAudio = "NoBuild";
[Desc("Notification played when user clicks on the build palette icon.",
"The filename of the audio is defined per faction in notifications.yaml.")]
public readonly string QueuedAudio = "Training";
[Desc("Notification played when player right-clicks on the build palette icon.",
"The filename of the audio is defined per faction in notifications.yaml.")]
public readonly string OnHoldAudio = "OnHold";
[Desc("Notification played when player right-clicks on a build palette icon that is already on hold.",
"The filename of the audio is defined per faction in notifications.yaml.")]
public readonly string CancelledAudio = "Cancelled";
public virtual object Create(ActorInitializer init) { return new ProductionQueue(init, init.Self.Owner.PlayerActor, this); }
}
public class ProductionQueue : IResolveOrder, ITick, ITechTreeElement, INotifyOwnerChanged, INotifyKilled, INotifySold, ISync, INotifyTransform
{
public readonly ProductionQueueInfo Info;
readonly Actor self;
// A list of things we could possibly build
readonly Dictionary<ActorInfo, ProductionState> produceable = new Dictionary<ActorInfo, ProductionState>();
readonly List<ProductionItem> queue = new List<ProductionItem>();
readonly IEnumerable<ActorInfo> allProduceables;
readonly IEnumerable<ActorInfo> buildableProduceables;
// Will change if the owner changes
PowerManager playerPower;
PlayerResources playerResources;
protected DeveloperMode developerMode;
public Actor Actor { get { return self; } }
[Sync] public int QueueLength { get { return queue.Count; } }
[Sync] public int CurrentRemainingCost { get { return QueueLength == 0 ? 0 : queue[0].RemainingCost; } }
[Sync] public int CurrentRemainingTime { get { return QueueLength == 0 ? 0 : queue[0].RemainingTime; } }
[Sync] public int CurrentSlowdown { get { return QueueLength == 0 ? 0 : queue[0].Slowdown; } }
[Sync] public bool CurrentPaused { get { return QueueLength != 0 && queue[0].Paused; } }
[Sync] public bool CurrentDone { get { return QueueLength != 0 && queue[0].Done; } }
[Sync] public bool Enabled { get; private set; }
public string Faction { get; private set; }
public ProductionQueue(ActorInitializer init, Actor playerActor, ProductionQueueInfo info)
{
self = init.Self;
Info = info;
playerResources = playerActor.Trait<PlayerResources>();
playerPower = playerActor.Trait<PowerManager>();
developerMode = playerActor.Trait<DeveloperMode>();
Faction = init.Contains<FactionInit>() ? init.Get<FactionInit, string>() : self.Owner.Faction.InternalName;
Enabled = !info.Factions.Any() || info.Factions.Contains(Faction);
CacheProduceables(playerActor);
allProduceables = produceable.Where(a => a.Value.Buildable || a.Value.Visible).Select(a => a.Key);
buildableProduceables = produceable.Where(a => a.Value.Buildable).Select(a => a.Key);
}
void ClearQueue()
{
if (queue.Count == 0)
return;
// Refund the current item
playerResources.GiveCash(queue[0].TotalCost - queue[0].RemainingCost);
queue.Clear();
}
public void OnOwnerChanged(Actor self, Player oldOwner, Player newOwner)
{
ClearQueue();
playerPower = newOwner.PlayerActor.Trait<PowerManager>();
playerResources = newOwner.PlayerActor.Trait<PlayerResources>();
developerMode = newOwner.PlayerActor.Trait<DeveloperMode>();
if (!Info.Sticky)
{
Faction = self.Owner.Faction.InternalName;
Enabled = !Info.Factions.Any() || Info.Factions.Contains(Faction);
}
// Regenerate the produceables and tech tree state
oldOwner.PlayerActor.Trait<TechTree>().Remove(this);
CacheProduceables(newOwner.PlayerActor);
newOwner.PlayerActor.Trait<TechTree>().Update();
}
public void Killed(Actor killed, AttackInfo e) { if (killed == self) { ClearQueue(); Enabled = false; } }
public void Selling(Actor self) { ClearQueue(); Enabled = false; }
public void Sold(Actor self) { }
public void BeforeTransform(Actor self) { ClearQueue(); Enabled = false; }
public void OnTransform(Actor self) { }
public void AfterTransform(Actor self) { }
void CacheProduceables(Actor playerActor)
{
produceable.Clear();
if (!Enabled)
return;
var ttc = playerActor.Trait<TechTree>();
foreach (var a in AllBuildables(Info.Type))
{
var bi = a.TraitInfo<BuildableInfo>();
produceable.Add(a, new ProductionState());
ttc.Add(a.Name, bi.Prerequisites, bi.BuildLimit, this);
}
}
IEnumerable<ActorInfo> AllBuildables(string category)
{
return self.World.Map.Rules.Actors.Values
.Where(x =>
x.Name[0] != '^' &&
x.HasTraitInfo<BuildableInfo>() &&
x.TraitInfo<BuildableInfo>().Queue.Contains(category));
}
public void PrerequisitesAvailable(string key)
{
produceable[self.World.Map.Rules.Actors[key]].Buildable = true;
}
public void PrerequisitesUnavailable(string key)
{
produceable[self.World.Map.Rules.Actors[key]].Buildable = false;
}
public void PrerequisitesItemHidden(string key)
{
produceable[self.World.Map.Rules.Actors[key]].Visible = false;
}
public void PrerequisitesItemVisible(string key)
{
produceable[self.World.Map.Rules.Actors[key]].Visible = true;
}
public ProductionItem CurrentItem()
{
return queue.ElementAtOrDefault(0);
}
public IEnumerable<ProductionItem> AllQueued()
{
return queue;
}
public virtual IEnumerable<ActorInfo> AllItems()
{
if (self.World.AllowDevCommands && developerMode.AllTech)
return produceable.Keys;
return allProduceables;
}
public virtual IEnumerable<ActorInfo> BuildableItems()
{
if (!Enabled)
return Enumerable.Empty<ActorInfo>();
if (self.World.AllowDevCommands && developerMode.AllTech)
return produceable.Keys;
return buildableProduceables;
}
public bool CanBuild(ActorInfo actor)
{
ProductionState ps;
if (!produceable.TryGetValue(actor, out ps))
return false;
return ps.Buildable || (self.World.AllowDevCommands && developerMode.AllTech);
}
public virtual void Tick(Actor self)
{
while (queue.Count > 0 && BuildableItems().All(b => b.Name != queue[0].Item))
{
playerResources.GiveCash(queue[0].TotalCost - queue[0].RemainingCost); // refund what's been paid so far.
FinishProduction();
}
if (queue.Count > 0)
queue[0].Tick(playerResources);
}
public void ResolveOrder(Actor self, Order order)
{
if (!Enabled)
return;
var rules = self.World.Map.Rules;
switch (order.OrderString)
{
case "StartProduction":
{
var unit = rules.Actors[order.TargetString];
var bi = unit.TraitInfo<BuildableInfo>();
if (!bi.Queue.Contains(Info.Type))
return; /* Not built by this queue */
var cost = unit.HasTraitInfo<ValuedInfo>() ? unit.TraitInfo<ValuedInfo>().Cost : 0;
var time = GetBuildTime(order.TargetString);
if (BuildableItems().All(b => b.Name != order.TargetString))
return; /* you can't build that!! */
// Check if the player is trying to build more units that they are allowed
var fromLimit = int.MaxValue;
if (!developerMode.AllTech && bi.BuildLimit > 0)
{
var inQueue = queue.Count(pi => pi.Item == order.TargetString);
var owned = self.Owner.World.ActorsWithTrait<Buildable>().Count(a => a.Actor.Info.Name == order.TargetString && a.Actor.Owner == self.Owner);
fromLimit = bi.BuildLimit - (inQueue + owned);
if (fromLimit <= 0)
return;
}
var amountToBuild = Math.Min(fromLimit, order.ExtraData);
for (var n = 0; n < amountToBuild; n++)
{
var hasPlayedSound = false;
BeginProduction(new ProductionItem(this, order.TargetString, cost, playerPower, () => self.World.AddFrameEndTask(_ =>
{
var isBuilding = unit.HasTraitInfo<BuildingInfo>();
if (isBuilding && !hasPlayedSound)
hasPlayedSound = Game.Sound.PlayNotification(rules, self.Owner, "Speech", Info.ReadyAudio, self.Owner.Faction.InternalName);
else if (!isBuilding)
{
if (BuildUnit(order.TargetString))
Game.Sound.PlayNotification(rules, self.Owner, "Speech", Info.ReadyAudio, self.Owner.Faction.InternalName);
else if (!hasPlayedSound && time > 0)
hasPlayedSound = Game.Sound.PlayNotification(rules, self.Owner, "Speech", Info.BlockedAudio, self.Owner.Faction.InternalName);
}
})));
}
break;
}
case "PauseProduction":
{
if (queue.Count > 0 && queue[0].Item == order.TargetString)
queue[0].Pause(order.ExtraData != 0);
break;
}
case "CancelProduction":
{
CancelProduction(order.TargetString, order.ExtraData);
break;
}
}
}
public virtual int GetBuildTime(string unitString)
{
var unit = self.World.Map.Rules.Actors[unitString];
if (unit == null || !unit.HasTraitInfo<BuildableInfo>())
return 0;
if (self.World.AllowDevCommands && self.Owner.PlayerActor.Trait<DeveloperMode>().FastBuild)
return 0;
var time = unit.GetBuildTime() * Info.BuildSpeed;
return (int)time;
}
protected void CancelProduction(string itemName, uint numberToCancel)
{
|
[
"\t\t\tfor (var i = 0; i < numberToCancel; i++)"
] | 1,183
|
lcc
|
csharp
| null |
f928fa8a814e6601eba3952b11bcbc31cc18a3d2f086048f
|
|
from PyQt4 import QtCore,QtGui,Qt
import sys, os
from ui import design
from genericpath import isdir, isfile
from collections import OrderedDict
from src import Utils, showTags
from functools import partial
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class WindowSource(QtGui.QMainWindow,design.Ui_Dialog):
currentDir = "."
clickedFile = ""
clickedFileOrDir = ""
activeTreeview = 0
filter = ""
ftpParams=[]
def __init__(self,parent=None):
super(WindowSource,self).__init__(parent)
self.setupUi(self)
self.connectActions()
print self.__class__.__name__ + " is initialized"
self.treeViews = [ self.treeView, self.treeView_2 ]
self.fileSystemModels = [ self.fileSystemModel, self.fileSystemModel2 ]
self.roots = [ self.root, self.root2 ]
self.treeviewClicked(self.root)
self.currentDirTxtLine2.setText(self.currentDir)
self.changeActiveTreeview(0)
self.showTagsOnMainWindow()
def main(self):
#self.showMaximized()
self.show()
print "window is showed"
def connectActions(self):
#self.showDir.clicked.connect(self.doShowDir)
self.currentDirTxtLine.returnPressed.connect(lambda: self.doShowDir(0))
self.currentDirTxtLine2.returnPressed.connect(lambda: self.doShowDir(1))
self.newDirButton.triggered.connect(self.callNewDir)
self.homeTreeView.clicked.connect(self.homeTreeviewClicked)
self.treeView.clicked.connect(lambda: self.changeActiveTreeview(0))
self.treeView_2.clicked.connect(lambda: self.changeActiveTreeview(1))
self.treeView.clicked.connect(self.changeclickedFileOrDir)
self.treeView_2.clicked.connect(self.changeclickedFileOrDir)
self.treeView.doubleClicked.connect(self.treeviewClicked)
self.treeView_2.doubleClicked.connect(self.treeviewClicked)
self.newFileButton.triggered.connect(self.callNewFile)
self.parentDir.triggered.connect(self.showParentDir)
self.openFileButton.triggered.connect(self.callOpenFile)
self.renameButton.triggered.connect(self.callRename)
self.deleteButton.triggered.connect(self.callDelete)
self.fileTypeButton.triggered.connect(self.callFileTypeInfo)
self.bookmarkButton.triggered.connect(self.callAddToBookmarks)
self.bookmarkListButton.triggered.connect(self.callListBookmarks)
self.ftpConnectionButton.triggered.connect(self.callFtp)
self.createTagButton.triggered.connect(self.callCreateTag)
self.searchButton.triggered.connect(self.search)
self.aboutButton.triggered.connect(self.about)
self.treeView.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.treeView.customContextMenuRequested.connect(self.rightClickMenu)
self.treeView_2.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.treeView_2.customContextMenuRequested.connect(self.rightClickMenu)
self.filterTxtLine.textChanged.connect(self.setFilter)
def showTagsOnMainWindow(self):
tagObj = showTags.showTags()
self.tags = tagObj.getTags()
buts = {}
colorList = []
for i in reversed(self.tags):
buts.update({i['name'] : i['color']})
colorList.append(i['color'])
self.buttons = []
i=0
for name, color in buts.items():
self.buttons.append(QtGui.QPushButton("#"+name, self))
width = self.buttons[-1].fontMetrics().boundingRect(name).width() + 20
self.buttons[-1].setMaximumWidth(width)
self.buttons[-1].clicked.connect(partial(self.callClickedTag, data=name))
self.buttons[-1].setStyleSheet("QPushButton { background-color : transparent; color : "+color+"; }")
self.buttons[-1].setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.tagButtons.addWidget(self.buttons[-1])
i += 1
def clearTagsOnMainWindow(self):
for i in self.buttons:
i.setParent(None)
def callClickedTag(self, x, data):
import showTagsPaths
self.newTagPath = ""
tag = showTagsPaths.showTagsPaths(data)
if tag.changePath:
if self.activeTreeview == 0:
self.currentDirTxtLine.setText(tag.newTagPath)
elif self.activeTreeview == 1:
self.currentDirTxtLine2.setText(tag.newTagPath)
self.doShowDir(self.activeTreeview)
self.clearTagsOnMainWindow()
self.showTagsOnMainWindow()
def search(self):
import searchFile
self.newSearchPath = ""
sf = searchFile.searchFile(self.currentDir)
if sf.changePath:
if self.activeTreeview==0:
self.currentDirTxtLine.setText(sf.newSearchPath)
elif self.activeTreeview==1:
self.currentDirTxtLine2.setText(sf.newSearchPath)
self.doShowDir(self.activeTreeview)
def about(self):
import about
about.aboutDialog()
def setFilter(self):
self.filter = unicode( self.filterTxtLine.text() )
self.doShowDir(self.activeTreeview)
def changeActiveTreeview(self, i):
self.activeTreeview = i
print "active treeview is now " + str(i)
if i==0:
self.currentDir = self.currentDirTxtLine.text()
self.currentDirTxtLine2.setStyleSheet("QLineEdit { background-color : #ccc; color : #999; }")
self.currentDirTxtLine.setStyleSheet("")
elif i==1:
self.currentDir = self.currentDirTxtLine2.text()
self.currentDirTxtLine.setStyleSheet("QLineEdit { background-color : #ccc; color : #999; }")
self.currentDirTxtLine2.setStyleSheet("")
self.showCurrentDirInfo()
def rightClickMenu(self, pos):
print "right clicked"
menu = QtGui.QMenu()
actionsList = OrderedDict((('Open', 'callOpenFile'), ('Copy', 'copyFile'), ('Cut' , 'cutFile'), ('Paste', 'pasteFile'), ('Rename', 'callRename'), ('Delete', 'callDelete'), ('Add to Bookmarks', 'callAddToBookmarks'), ('Add Tag', 'callAddToTags'), ('File Type Info', 'callFileTypeInfo'), ('Properties', 'callProperties')))
seperatorAfterThis = ['callOpenFile', 'pasteFile', 'callDelete', 'callAddToTags']
actions = []
actionFunctions = []
for k,v in actionsList.iteritems():
actions.append(menu.addAction(k))
actionFunctions.append(v)
if v in seperatorAfterThis:
menu.addSeparator()
action = menu.exec_(self.treeViews[self.activeTreeview].mapToGlobal(pos))
for i in range(0, len(actions)):
if action == actions[i]:
getattr(self, actionFunctions[i])()
def copyFile(self):
self.copyCutFile = ['copy', self.currentDir + "/" + self.clickedFileOrDir]
print self.clickedFileOrDir + " file set to be copied"
def cutFile(self):
self.copyCutFile = ['cut', self.currentDir + "/" + self.clickedFileOrDir]
print self.clickedFileOrDir + " file set to be cut"
def pasteFile(self):
import copyCutPaste
if self.activeTreeview == 0:
p = unicode(self.currentDirTxtLine.text())
elif self.activeTreeview == 1:
p = unicode(self.currentDirTxtLine2.text())
copyCutPaste.copyCutPaste(self.copyCutFile[0], self.copyCutFile[1], p)
def callFileTypeInfo(self):
import fileTypeInfo
fileTypeInfo.fileTypeInfo(self.clickedFileOrDir)
def callAddToBookmarks(self):
import addToBookmarks
addToBookmarks.addToBookmarks(self.currentDir + "/" + self.clickedFileOrDir)
def callListBookmarks(self):
self.newBookmarkPath = ""
import showBookmarksList
bm = showBookmarksList.showBookmarksList(self.currentDir + "/" + self.clickedFileOrDir)
if bm.changePath:
if self.activeTreeview==0:
self.currentDirTxtLine.setText(bm.newBookmarkPath)
elif self.activeTreeview==1:
self.currentDirTxtLine2.setText(bm.newBookmarkPath)
self.doShowDir(self.activeTreeview)
def callDelete(self):
import deleteFileDir
deleteFileDir.deleteFileDir(self.currentDir + "/" + self.clickedFileOrDir)
def callRename(self):
import renameFileDir
renameFileDir.renameFileDir(self.currentDir + "/" + self.clickedFileOrDir)
def callOpenFile(self):
print "to open"
import openFile
from os.path import isfile
toOpenFile = self.clickedFile
if(isfile(toOpenFile)):
openFile.openFile(toOpenFile)
def callAddToTags(self):
import addToTags
addToTags.addToTags(self.currentDir + "/" + self.clickedFileOrDir)
def callCreateTag(self):
import createTag
newTag = createTag.createTag(self.currentDir + "/" + self.clickedFileOrDir)
newTag.showNewTagDialog()
self.clearTagsOnMainWindow()
self.showTagsOnMainWindow()
def callProperties(self):
import properties
properties.properties(self.currentDir + "/" + self.clickedFileOrDir)
def callNewFile(self):
import newFile
newFile.newFile(self.currentDir)
def callNewDir(self):
import newDir
newDir.newDir(self.currentDir)
def callFtp(self):
import ftpConn
f = ftpConn.ftpConn()
if self.activeTreeview==0:
self.currentDirTxtLine.setText(f.getPath())
elif self.activeTreeview==1:
self.currentDirTxtLine2.setText(f.getPath())
self.doShowDir(self.activeTreeview)
def callShowDir(self):
import showDir
if self.activeTreeview==0:
self.currentDir = showDir.showDir(self.currentDirTxtLine.text())
elif self.activeTreeview==1:
self.currentDir = showDir.showDir(self.currentDirTxtLine2.text())
def showParentDir(self):
self.clickedFileOrDir = ""
parentDir = str(self.currentDir).rsplit('/',1)[0]
if(isdir(parentDir)):
self.roots[self.activeTreeview] = self.fileSystemModels[self.activeTreeview].setRootPath(parentDir)
self.treeViews[self.activeTreeview].setModel(self.fileSystemModels[self.activeTreeview])
self.treeViews[self.activeTreeview].setRootIndex(self.roots[self.activeTreeview])
self.currentDir = parentDir
if self.activeTreeview==0:
self.currentDirTxtLine.setText(self.currentDir)
elif self.activeTreeview==1:
self.currentDirTxtLine2.setText(self.currentDir)
self.showCurrentDirInfo()
else:
print parentDir + " is not a directory"
def doShowDir(self, tv):
self.activeTreeview = tv
if self.activeTreeview==0:
newDir = unicode(self.currentDirTxtLine.text())
elif self.activeTreeview==1:
newDir = unicode(self.currentDirTxtLine2.text())
self.clickedFileOrDir = ""
if(isdir(newDir)):
self.fileSystemModels[self.activeTreeview].setNameFilters([self.filter+"*"])
self.fileSystemModels[self.activeTreeview].setNameFilterDisables(False)
self.roots[self.activeTreeview] = self.fileSystemModels[self.activeTreeview].setRootPath(newDir)
self.treeViews[self.activeTreeview].setModel(self.fileSystemModels[self.activeTreeview])
self.treeViews[self.activeTreeview].setRootIndex(self.roots[self.activeTreeview])
self.currentDir = newDir
self.changeActiveTreeview(tv)
else:
print unicode(newDir) + " is not a directory"
def treeviewClicked(self, index):
print "> " + unicode(self.fileSystemModels[self.activeTreeview].filePath(index))
newPath = self.fileSystemModels[self.activeTreeview].filePath(index)
print "new path is " + unicode(newPath)
if isdir(newPath):
self.currentDir = newPath
if self.activeTreeview==0:
self.currentDirTxtLine.setText(self.currentDir)
elif self.activeTreeview==1:
self.currentDirTxtLine2.setText(self.currentDir)
self.doShowDir(self.activeTreeview)
elif isfile(newPath):
self.clickedFile = newPath
self.callOpenFile()
def homeTreeviewClicked(self, index):
newPath = unicode(self.fileSystemModel3.filePath(index))
print "new path is set to" + newPath + " by home treeview"
self.currentDir = newPath
if self.activeTreeview==0:
self.currentDirTxtLine.setText(self.currentDir)
elif self.activeTreeview==1:
self.currentDirTxtLine2.setText(self.currentDir)
self.doShowDir(self.activeTreeview)
def changeclickedFileOrDir(self, index):
self.clickedFileOrDir = unicode(self.fileSystemModels[self.activeTreeview].filePath(index)).rsplit('/')[-1]
from genericpath import isfile
if isfile(self.currentDir + "/" + self.clickedFileOrDir):
self.clickedFile = self.currentDir + "/" + self.clickedFileOrDir
##elif isdir(self.clickedFileOrDir):
# self.currentDir = self.clickedFileOrDir
print self.clickedFileOrDir + " is clicked"
from preview import preview
preImg = preview()
if preImg.showPreview(self.currentDir + "/" + self.clickedFileOrDir):
self.imageLabel.setPixmap(QtGui.QPixmap.fromImage(QtGui.QImage(self.currentDir + "/" + self.clickedFileOrDir)))
self.imageLabel.setVisible(True)
self.scrollArea.setVisible(True)
self.previewLabel.setVisible(True)
else:
self.imageLabel.setVisible(False)
self.scrollArea.setVisible(False)
self.previewLabel.setVisible(False)
def showCurrentDirInfo(self):
numberOfFiles = len([item for item in os.listdir(unicode(self.currentDir)) if not item[0] == '.' and os.path.isfile(os.path.join(unicode(self.currentDir), item))])
numberOfDirs = len([item for item in os.listdir(unicode(self.currentDir)) if not item[0] == '.' and os.path.isdir(os.path.join(unicode(self.currentDir), item))])
numberOfHiddenFiles = len([item for item in os.listdir(unicode(self.currentDir)) if item[0] == '.' and os.path.isfile(os.path.join(unicode(self.currentDir), item))])
numberOfHiddenDirs = len([item for item in os.listdir(unicode(self.currentDir)) if item[0] == '.' and os.path.isdir(os.path.join(unicode(self.currentDir), item))])
infoText = "<u>" + Utils.getFileNameFromFullPath(unicode(self.currentDir)) + "</u><br><br>"
infoText += str(numberOfDirs)
infoText += " directory" if numberOfDirs==1 else " directories"
infoText += "<br>"
if numberOfHiddenDirs>0:
infoText += "(+" + str(numberOfHiddenDirs) + " hidden "
infoText += "directory" if numberOfHiddenDirs==1 else "directories"
infoText += ")<br>"
infoText += str(numberOfFiles)
|
[
" infoText += \" file\" if numberOfFiles==1 else \" files\" "
] | 886
|
lcc
|
python
| null |
a6ff01b3019b5f686be4e5365d4e5986aec163213a88b95e
|
|
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
"""
Task generators
The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
The instances can have various parameters, but the creation of task nodes (Task.py)
is always postponed. To achieve this, various methods are called from the method "apply"
"""
import copy, re
from waflib import Task, Utils, Logs, Errors, ConfigSet
feats = Utils.defaultdict(set)
"""remember the methods declaring features"""
class task_gen(object):
"""
Instances of this class create :py:class:`waflib.Task.TaskBase` when
calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
A few notes:
* The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..)
* The 'features' are used to add methods to self.meths and then execute them
* The attribute 'path' is a node representing the location of the task generator
* The tasks created are added to the attribute *tasks*
* The attribute 'idx' is a counter of task generators in the same path
"""
mappings = {}
prec = Utils.defaultdict(list)
def __init__(self, *k, **kw):
"""
The task generator objects predefine various attributes (source, target) for possible
processing by process_rule (make-like rules) or process_source (extensions, misc methods)
The tasks are stored on the attribute 'tasks'. They are created by calling methods
listed in self.meths *or* referenced in the attribute features
A topological sort is performed to ease the method re-use.
The extra key/value elements passed in kw are set as attributes
"""
# so we will have to play with directed acyclic graphs
# detect cycles, etc
self.source = ''
self.target = ''
self.meths = []
"""
List of method names to execute (it is usually a good idea to avoid touching this)
"""
self.prec = Utils.defaultdict(list)
"""
Precedence table for sorting the methods in self.meths
"""
self.mappings = {}
"""
List of mappings {extension -> function} for processing files by extension
"""
self.features = []
"""
List of feature names for bringing new methods in
"""
self.tasks = []
"""
List of tasks created.
"""
if not 'bld' in kw:
# task generators without a build context :-/
self.env = ConfigSet.ConfigSet()
self.idx = 0
self.path = None
else:
self.bld = kw['bld']
self.env = self.bld.env.derive()
self.path = self.bld.path # emulate chdir when reading scripts
# provide a unique id
try:
self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1
except AttributeError:
self.bld.idx = {}
self.idx = self.bld.idx[id(self.path)] = 1
for key, val in kw.items():
setattr(self, key, val)
def __str__(self):
"""for debugging purposes"""
return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())
def __repr__(self):
"""for debugging purposes"""
lst = []
for x in self.__dict__.keys():
if x not in ['env', 'bld', 'compiled_tasks', 'tasks']:
lst.append("%s=%s" % (x, repr(getattr(self, x))))
return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())
def get_name(self):
"""
If not set, the name is computed from the target name::
def build(bld):
x = bld(name='foo')
x.get_name() # foo
y = bld(target='bar')
y.get_name() # bar
:rtype: string
:return: name of this task generator
"""
try:
return self._name
except AttributeError:
if isinstance(self.target, list):
lst = [str(x) for x in self.target]
name = self._name = ','.join(lst)
else:
name = self._name = str(self.target)
return name
def set_name(self, name):
self._name = name
name = property(get_name, set_name)
def to_list(self, val):
"""
Ensure that a parameter is a list
:type val: string or list of string
:param val: input to return as a list
:rtype: list
"""
if isinstance(val, str): return val.split()
else: return val
def post(self):
"""
Create task objects. The following operations are performed:
#. The body of this method is called only once and sets the attribute ``posted``
#. The attribute ``features`` is used to add more methods in ``self.meths``
#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
#. The methods are then executed in order
#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
"""
# we could add a decorator to let the task run once, but then python 2.3 will be difficult to support
if getattr(self, 'posted', None):
#error("OBJECT ALREADY POSTED" + str( self))
return False
self.posted = True
keys = set(self.meths)
# add the methods listed in the features
self.features = Utils.to_list(self.features)
for x in self.features + ['*']:
st = feats[x]
if not st:
if not x in Task.classes:
Logs.warn('feature %r does not exist - bind at least one method to it' % x)
keys.update(list(st)) # ironpython 2.7 wants the cast to list
# copy the precedence table
prec = {}
prec_tbl = self.prec or task_gen.prec
for x in prec_tbl:
if x in keys:
prec[x] = prec_tbl[x]
# elements disconnected
tmp = []
for a in keys:
for x in prec.values():
if a in x: break
else:
tmp.append(a)
# TODO waf 1.7
#tmp.sort()
# topological sort
out = []
while tmp:
e = tmp.pop()
if e in keys: out.append(e)
try:
nlst = prec[e]
except KeyError:
pass
else:
del prec[e]
for x in nlst:
for y in prec:
if x in prec[y]:
break
else:
tmp.append(x)
if prec:
raise Errors.WafError('Cycle detected in the method execution %r' % prec)
out.reverse()
self.meths = out
# then we run the methods in order
Logs.debug('task_gen: posting %s %d' % (self, id(self)))
for x in out:
try:
v = getattr(self, x)
except AttributeError:
raise Errors.WafError('%r is not a valid task generator method' % x)
Logs.debug('task_gen: -> %s (%d)' % (x, id(self)))
v()
Logs.debug('task_gen: posted %s' % self.name)
return True
def get_hook(self, node):
"""
:param node: Input file to process
:type node: :py:class:`waflib.Tools.Node.Node`
:return: A method able to process the input node by looking at the extension
:rtype: function
"""
name = node.name
for k in self.mappings:
if name.endswith(k):
return self.mappings[k]
for k in task_gen.mappings:
if name.endswith(k):
return task_gen.mappings[k]
raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)" % (node, task_gen.mappings.keys()))
def create_task(self, name, src=None, tgt=None):
"""
Wrapper for creating task objects easily
:param name: task class name
:type name: string
:param src: input nodes
:type src: list of :py:class:`waflib.Tools.Node.Node`
:param tgt: output nodes
:type tgt: list of :py:class:`waflib.Tools.Node.Node`
:return: A task object
:rtype: :py:class:`waflib.Task.TaskBase`
"""
task = Task.classes[name](env=self.env.derive(), generator=self)
if src:
task.set_inputs(src)
if tgt:
task.set_outputs(tgt)
self.tasks.append(task)
return task
def clone(self, env):
"""
Make a copy of a task generator. Once the copy is made, it is necessary to ensure that the
task generator does not create the same output files as the original, or the same files may
be compiled twice.
:param env: A configuration set
:type env: :py:class:`waflib.ConfigSet.ConfigSet`
:return: A copy
:rtype: :py:class:`waflib.TaskGen.task_gen`
"""
newobj = self.bld()
for x in self.__dict__:
if x in ['env', 'bld']:
continue
elif x in ['path', 'features']:
setattr(newobj, x, getattr(self, x))
else:
setattr(newobj, x, copy.copy(getattr(self, x)))
newobj.posted = False
if isinstance(env, str):
newobj.env = self.bld.all_envs[env].derive()
else:
newobj.env = env.derive()
return newobj
def declare_chain(name='', rule=None, reentrant=True, color='BLUE',
ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
"""
Create a new mapping and a task class for processing files by extension.
See Tools/flex.py for an example.
:param name: name for the task class
:type name: string
:param rule: function to execute or string to be compiled in a function
:type rule: string or function
:param reentrant: re-inject the output file in the process
:type reentrant: bool
:param color: color for the task output
:type color: string
:param ext_in: execute the task only after the files of such extensions are created
:type ext_in: list of string
:param ext_out: execute the task only before files of such extensions are processed
:type ext_out: list of string
:param before: execute instances of this task before classes of the given names
:type before: list of string
:param after: execute instances of this task after classes of the given names
:type after: list of string
:param decider: if present, use it to create the output nodes for the task
:type decider: function
:param scan: scanner function for the task
:type scan: function
:param install_path: installation path for the output nodes
:type install_path: string
"""
ext_in = Utils.to_list(ext_in)
ext_out = Utils.to_list(ext_out)
if not name:
name = rule
cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)
def x_file(self, node):
ext = decider and decider(self, node) or cls.ext_out
if ext_in:
_ext_in = ext_in[0]
out_source = [node.change_ext(x, ext_in=_ext_in) for x in ext]
if reentrant:
for i in range(reentrant):
self.source.append(out_source[i])
tsk = self.create_task(name, node, out_source)
if install_path:
self.bld.install_files(install_path, out_source)
return tsk
for x in cls.ext_in:
task_gen.mappings[x] = x_file
return x_file
def taskgen_method(func):
"""
Decorator: register a method as a task generator method.
The function must accept a task generator as first parameter::
from waflib.TaskGen import taskgen_method
@taskgen_method
def mymethod(self):
pass
:param func: task generator method to add
:type func: function
:rtype: function
"""
setattr(task_gen, func.__name__, func)
return func
def feature(*k):
"""
Decorator: register a task generator method that will be executed when the
object attribute 'feature' contains the corresponding key(s)::
from waflib.Task import feature
@feature('myfeature')
def myfunction(self):
print('that is my feature!')
def build(bld):
bld(features='myfeature')
:param k: feature names
:type k: list of string
"""
def deco(func):
setattr(task_gen, func.__name__, func)
for name in k:
feats[name].update([func.__name__])
return func
return deco
def before_method(*k):
"""
Decorator: register a task generator method which will be executed
before the functions of given name(s)::
from waflib.TaskGen import feature, before
@feature('myfeature')
@before_method('fun2')
def fun1(self):
print('feature 1!')
@feature('myfeature')
def fun2(self):
print('feature 2!')
def build(bld):
bld(features='myfeature')
:param k: method names
:type k: list of string
"""
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
#task_gen.prec[fun_name].sort()
return func
return deco
before = before_method
def after_method(*k):
"""
Decorator: register a task generator method which will be executed
after the functions of given name(s)::
from waflib.TaskGen import feature, after
@feature('myfeature')
@after_method('fun2')
def fun1(self):
print('feature 1!')
@feature('myfeature')
def fun2(self):
print('feature 2!')
def build(bld):
bld(features='myfeature')
:param k: method names
:type k: list of string
"""
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
#task_gen.prec[func.__name__].sort()
return func
return deco
after = after_method
def extension(*k):
"""
Decorator: register a task generator method which will be invoked during
the processing of source files for the extension given::
from waflib import Task
class mytask(Task):
run_str = 'cp ${SRC} ${TGT}'
@extension('.moo')
def create_maa_file(self, node):
self.create_task('mytask', node, node.change_ext('.maa'))
def build(bld):
bld(source='foo.moo')
"""
def deco(func):
setattr(task_gen, func.__name__, func)
for x in k:
task_gen.mappings[x] = func
return func
return deco
# ---------------------------------------------------------------
# The following methods are task generator methods commonly used
# they are almost examples, the rest of waf core does not depend on them
@taskgen_method
def to_nodes(self, lst, path=None):
"""
Convert the input list into a list of nodes.
It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
:param lst: input list
:type lst: list of string and nodes
:param path: path from which to search the nodes (by default, :py:attr:`waflib.TaskGen.task_gen.path`)
:type path: :py:class:`waflib.Tools.Node.Node`
:rtype: list of :py:class:`waflib.Tools.Node.Node`
"""
tmp = []
path = path or self.path
find = path.find_resource
if isinstance(lst, self.path.__class__):
lst = [lst]
# either a list or a string, convert to a list of nodes
for x in Utils.to_list(lst):
if isinstance(x, str):
node = find(x)
if not node:
raise Errors.WafError("source not found: %r in %r" % (x, self))
else:
node = x
tmp.append(node)
return tmp
@feature('*')
def process_source(self):
"""
Process each element in the attribute ``source`` by extension.
#. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
#. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
#. The method is retrieved through :py:meth:`waflib.TaskGen.task_gen.get_hook`
#. When called, the methods may modify self.source to append more source to process
#. The mappings can map an extension or a filename (see the code below)
"""
self.source = self.to_nodes(getattr(self, 'source', []))
for node in self.source:
self.get_hook(node)(self, node)
@feature('*')
@before_method('process_source')
def process_rule(self):
"""
Process the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
def build(bld):
bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
"""
if not getattr(self, 'rule', None):
return
# create the task class
name = str(getattr(self, 'name', None) or self.target or self.rule)
cls = Task.task_factory(name, self.rule,
getattr(self, 'vars', []),
shell=getattr(self, 'shell', True), color=getattr(self, 'color', 'BLUE'))
# now create one instance
tsk = self.create_task(name)
if getattr(self, 'target', None):
if isinstance(self.target, str):
self.target = self.target.split()
if not isinstance(self.target, list):
self.target = [self.target]
for x in self.target:
if isinstance(x, str):
tsk.outputs.append(self.path.find_or_declare(x))
else:
x.parent.mkdir() # if a node was given, create the required folders
tsk.outputs.append(x)
if getattr(self, 'install_path', None):
# from waf 1.5
# although convenient, it does not 1. allow to name the target file and 2. symlinks
# TODO remove in waf 1.7
self.bld.install_files(self.install_path, tsk.outputs)
if getattr(self, 'source', None):
tsk.inputs = self.to_nodes(self.source)
# bypass the execution of process_source by setting the source to an empty list
self.source = []
if getattr(self, 'scan', None):
cls.scan = self.scan
if getattr(self, 'cwd', None):
tsk.cwd = self.cwd
# TODO remove on_results in waf 1.7
if getattr(self, 'update_outputs', None) or getattr(self, 'on_results', None):
Task.update_outputs(cls)
if getattr(self, 'always', None):
Task.always_run(cls)
for x in ['after', 'before', 'ext_in', 'ext_out']:
setattr(cls, x, getattr(self, x, []))
@feature('seq')
def sequence_order(self):
"""
Add a strict sequential constraint between the tasks generated by task generators.
It works because task generators are posted in order.
It will not post objects which belong to other folders.
Example::
bld(features='javac seq')
bld(features='jar seq')
To start a new sequence, set the attribute seq_start, for example::
obj = bld(features='seq')
obj.seq_start = True
Note that the method is executed in last position. This is more an
example than a widely-used solution.
"""
if self.meths and self.meths[-1] != 'sequence_order':
self.meths.append('sequence_order')
return
if getattr(self, 'seq_start', None):
return
# all the tasks previously declared must be run before these
if getattr(self.bld, 'prev', None):
self.bld.prev.post()
for x in self.bld.prev.tasks:
for y in self.tasks:
y.set_run_after(x)
self.bld.prev = self
re_m4 = re.compile('@(\w+)@', re.M)
class subst_pc(Task.Task):
"""
Create *.pc* files from *.pc.in*. The task is executed whenever an input variable used
in the substitution changes.
"""
def run(self):
"Substitutes variables in a .in file"
code = self.inputs[0].read()
# replace all % by %% to prevent errors by % signs
|
[
" code = code.replace('%', '%%')"
] | 2,292
|
lcc
|
python
| null |
c954057ff94e553216187b97466689a94cf01d35b64c1d11
|
|
#!/usr/bin/env python
'''
Created on Jan 28, 2016
@author: cme
'''
#****************************************************************
# \file
#
# \note
# Copyright (c) 2016 \n
# Fraunhofer Institute for Manufacturing Engineering
# and Automation (IPA) \n\n
#
#*****************************************************************
#
# \note
# Project name: Care-O-bot
# \note
# ROS stack name: ipa_pars
# \note
# ROS package name: ipa_pars_main
#
# \author
# Author: Christian Ehrmann
# \author
# Supervised by: Richard Bormann
#
# \date Date of creation: 01.2016
#
# \brief
#
#
#*****************************************************************
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer. \n
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution. \n
# - Neither the name of the Fraunhofer Institute for Manufacturing
# Engineering and Automation (IPA) nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission. \n
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License LGPL as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License LGPL for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License LGPL along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
#****************************************************************/
import actionlib
import rospy
import sys
import cv2
import yaml
import os
from yaml import load
# from cv_bridge import CvBridge, CvBridgeError
from ipa_pars_main.msg._LogicPlanAction import *
from ipa_pars_main.msg._PlanSolverAction import *
from ipa_pars_main.msg._KnowledgeParserAction import *
from ipa_pars_main.msg._PlanExecutorAction import *
from std_msgs.msg import String
#from std_msgs import String[]
# import numpy as np
# from sensor_msgs.msg._Image import Image
# import sensor_msgs.msg
# from map_analyzer.srv import MapAnalyzer
# from cob_srvs.srv._SetString import SetString
# from map_analyzer.srv._MapAnalyzer import MapAnalyzerResponse
# from ipa_pars_main.srv._PlanData import PlanData, PlanDataRequest
class PlanningServer(object):
_feedback = ipa_pars_main.msg.LogicPlanFeedback()
_result = ipa_pars_main.msg.LogicPlanResult()
def __init__(self):
rospy.loginfo("Initialize PlanningServer ...")
self._planningSolverClient = actionlib.SimpleActionClient('planning_solver_server', PlanSolverAction)
rospy.logwarn("Waiting for PlanSolverServer to come available ...")
self._planningSolverClient.wait_for_server()
rospy.logwarn("PlanningSolverServer is online!")
self._knowledgeParserClient = actionlib.SimpleActionClient('knowledge_parser_server', KnowledgeParserAction)
rospy.logwarn("Waiting for KnowledgeParserServer to come available ...")
self._knowledgeParserClient.wait_for_server()
rospy.loginfo("Read static and dynamic knowledge from file")
self._static_knowledge = yaml.dump(self.readKnowledgeBase("static-knowledge-base.yaml"))
self._dynamic_knowledge = yaml.dump(self.readKnowledgeBase("dynamic-knowledge-base.yaml"))
rospy.logwarn("KnowledgeParserServer is online!")
self._planningExecutorClient = actionlib.SimpleActionClient('planning_executor_server', PlanExecutorAction)
rospy.logwarn("Waiting for PlanExecutorServer to come available ...")
self._planningExecutorClient.wait_for_server()
rospy.logwarn("PlanExecutorServer is online!")
self._as = actionlib.SimpleActionServer('planning_server', ipa_pars_main.msg.LogicPlanAction, execute_cb=self.execute_cb, auto_start=False)
self._as.start()
rospy.loginfo("PlanningServer running! Waiting for a new goal.")
def execute_cb(self, goal):
rospy.loginfo("Executing a new goal!")
rospy.loginfo("GOAL: %s , %s, %s " % (str(goal.goal_type), str(goal.what), str(goal.where)))
rospy.loginfo("in progress ...")
success = False
while not (success):
knowledge_parser_result = self.workOnKnowledge()
print knowledge_parser_result
planning_solver_result = self.workOnPlan(knowledge_parser_result.problem_pddl.data, knowledge_parser_result.domain_pddl.data)
print planning_solver_result
planning_executor_result = self.executeActionPlan(planning_solver_result.action_list)
print "This came back from PlanningExecutor:"
print planning_executor_result
self._dynamic_knowledge = planning_executor_result.dynamic_knowledge.data
if planning_executor_result.success:
success = True
break
print "i am sleeping now"
success = True
rospy.sleep(5)
#===========================
if self._as.is_preempt_requested():
rospy.loginfo('%s: Preempted' % 'pars_server')
success = False
if success:
self._result.success = True
rospy.loginfo("Succeeded the Logic Plan")
self._as.set_succeeded(self._result, "good job")
def workOnKnowledge(self):
knowledge_goal = ipa_pars_main.msg.KnowledgeParserGoal()
knowledge_goal.static_knowledge.data = self._static_knowledge
print knowledge_goal.static_knowledge.data
knowledge_goal.dynamic_knowledge.data = self._dynamic_knowledge
rospy.loginfo("Sending goal to KnowledgeParserServer ...")
self._knowledgeParserClient.send_goal(knowledge_goal)
rospy.loginfo("Waiting for result ...")
self._knowledgeParserClient.wait_for_result()
result = self._knowledgeParserClient.get_result()
rospy.loginfo("Received the result from KnowledgeParserServer!")
return result
def readKnowledgeBase(self, knowledge_yaml):
listOfInput = []
try:
if os.path.isdir("ipa_pars/knowledge/"):
fileObject = open("ipa_pars/knowledge/"+knowledge_yaml, "r")
yamlfile = load(fileObject)
fileObject.close()
return yamlfile
except IOError:
rospy.loginfo("Reading %s base failed!" % knowledge_yaml)
return None
def workOnPlan(self, domain, problem):
goal = ipa_pars_main.msg.PlanSolverGoal()
goal.problem.data = problem
goal.domain.data = domain
rospy.loginfo("Sending goal to solver ...")
self._planningSolverClient.send_goal(goal)
rospy.loginfo("Waiting for result ...")
self._planningSolverClient.wait_for_result()
result = self._planningSolverClient.get_result()
rospy.loginfo("Received the result from Solver:")
return result
def executeActionPlan(self, actionplan):
goal = ipa_pars_main.msg.PlanExecutorGoal()
#read goals for debug from file
listOfInput = []
for itm in actionplan:
listOfInput.append(itm.data)
print "this is the action list to send"
#delete last element
#del listOfInput[-1:]
print listOfInput
listOfOutput = []
for action_exe in listOfInput:
new_action = String()
new_action.data = action_exe.replace("(","").replace(")","")
listOfOutput.append(new_action)
print listOfOutput
goal.action_list = listOfOutput
rospy.loginfo("Send action list to PlanExecutorServer ...")
self._planningExecutorClient.send_goal(goal)
rospy.loginfo("Waiting for result of PlanExecutorServer ...")
self._planningExecutorClient.wait_for_result()
|
[
" result = self._planningExecutorClient.get_result()"
] | 747
|
lcc
|
python
| null |
e3985a9e93ec883a946ded49a6c0d5a149f870a2551a4bae
|
|
using System;
using System.Collections.Generic;
using System.Text;
using Axiom.Core;
using Axiom.Media;
using Axiom.Graphics;
using Axiom.Overlays;
using Axiom.Animating;
using Axiom.Math;
using System.Runtime.InteropServices;
namespace Axiom.Demos
{
public class DynamicTextures : TechDemo
{
Texture ptex;
HardwarePixelBuffer buffer;
Overlay overlay;
static readonly int reactorExtent = 130; // must be 2^N + 2
uint[] clut = new uint[ 1024 ];
AnimationState swim;
static float fDefDim;
static float fDefVel;
float tim;
List<int[]> chemical = new List<int[]>();
List<int[]> delta = new List<int[]>();
int mSize;
int dt, hdiv0, hdiv1; // diffusion parameters
int F, k; // reaction parameters
bool rpressed;
Random rand = new Random();
public DynamicTextures()
{
chemical.Add( null );
chemical.Add( null );
delta.Add( null );
delta.Add( null );
}
public override bool Setup()
{
if ( base.Setup() )
{
tim = 0;
rpressed = false;
// Create colour lookup
for ( int col = 0; col < 1024; col++ )
{
ColorEx c;
c = HSVtoRGB( ( 1.0f - col / 1024.0f ) * 90.0f + 225.0f, 0.9f, 0.75f + 0.25f * ( 1.0f - col / 1024.0f ) );
c.a = 1.0f - col / 1024.0f;
unsafe
{
fixed ( uint* dest = clut )
{
PixelConverter.PackColor( c, PixelFormat.A8R8G8B8, (IntPtr)( &dest[ col ] ) );
}
}
}
// Setup
LogManager.Instance.Write( "Creating chemical containment" );
mSize = reactorExtent * reactorExtent;
chemical[ 0 ] = new int[ mSize ];
chemical[ 1 ] = new int[ mSize ];
delta[ 0 ] = new int[ mSize ];
delta[ 1 ] = new int[ mSize ];
dt = FROMFLOAT( 2.0f );
hdiv0 = FROMFLOAT( 2.0E-5f / ( 2.0f * 0.01f * 0.01f ) ); // a / (2.0f*h*h); -- really diffusion rate
hdiv1 = FROMFLOAT( 1.0E-5f / ( 2.0f * 0.01f * 0.01f ) ); // a / (2.0f*h*h); -- really diffusion rate
//k = FROMFLOAT(0.056f);
//F = FROMFLOAT(0.020f);
k = FROMFLOAT( 0.0619f );
F = FROMFLOAT( 0.0316f );
resetReactor();
fireUpReactor();
updateInfoParamF();
updateInfoParamK();
updateInfoParamA0();
updateInfoParamA1();
LogManager.Instance.Write( "Cthulhu dawn" );
return true;
}
return false;
}
public override void CreateScene()
{
// Create dynamic texture
ptex = TextureManager.Instance.CreateManual( "DynaTex", ResourceGroupManager.DefaultResourceGroupName, TextureType.TwoD, reactorExtent - 2, reactorExtent - 2, 0, PixelFormat.A8R8G8B8, TextureUsage.DynamicWriteOnly );
buffer = ptex.GetBuffer( 0, 0 );
// Set ambient light
scene.AmbientLight = new ColorEx( 0.6F, 0.6F, 0.6F );
scene.SetSkyBox( true, "SkyBox/Space", 50 );
//mRoot->getRenderSystem()->clearFrameBuffer(FBT_COLOUR, ColourValue(255,255,255,0));
// Create a light
Light l = scene.CreateLight( "MainLight" );
l.Diffuse = new ColorEx( 0.75F, 0.75F, 0.80F );
l.Specular = new ColorEx( 0.9F, 0.9F, 1F );
l.Position = new Vector3( -100, 80, 50 );
scene.RootSceneNode.AttachObject( l );
Entity planeEnt = scene.CreateEntity( "TexPlane1", PrefabEntity.Plane );
// Give the plane a texture
planeEnt.MaterialName = "Examples/DynaTest";
SceneNode node = scene.RootSceneNode.CreateChildSceneNode( new Vector3( -100, -40, -100 ) );
node.AttachObject( planeEnt );
node.Scale = new Vector3( 3.0f, 3.0f, 3.0f );
// Create objects
SceneNode blaNode = scene.RootSceneNode.CreateChildSceneNode( new Vector3( -200, 0, 50 ) );
Entity ent2 = scene.CreateEntity( "knot", "knot.mesh" );
ent2.MaterialName = "Examples/DynaTest4";
blaNode.AttachObject( ent2 );
blaNode = scene.RootSceneNode.CreateChildSceneNode( new Vector3( 200, -90, 50 ) );
ent2 = scene.CreateEntity( "knot2", "knot.mesh" );
ent2.MaterialName = "Examples/DynaTest2";
blaNode.AttachObject( ent2 );
blaNode = scene.RootSceneNode.CreateChildSceneNode( new Vector3( -110, 200, 50 ) );
// Cloaked fish
ent2 = scene.CreateEntity( "knot3", "fish.mesh" );
ent2.MaterialName = "Examples/DynaTest3";
swim = ent2.GetAnimationState( "swim" );
swim.IsEnabled = true;
blaNode.AttachObject( ent2 );
blaNode.Scale = new Vector3( 50.0f, 50.0f, 50.0f );
LogManager.Instance.Write( "HardwarePixelBuffer {0} {1} {2} ", buffer.Width, buffer.Height, buffer.Depth );
buffer.Lock( BufferLocking.Normal );
PixelBox pb = buffer.CurrentLock;
LogManager.Instance.Write( "PixelBox {0} {1} {2} {3} {4} {5} {6}", pb.Width, pb.Height, pb.Depth, pb.RowPitch, pb.SlicePitch, pb.Data, pb.Format );
buffer.Unlock();
// show GUI
overlay = OverlayManager.Instance.GetByName( "Example/DynTexOverlay" );
overlay.Show();
}
protected override void OnFrameStarted( object source, FrameEventArgs evt )
{
for ( int x = 0; x < 10; x++ )
runStep();
buildTexture();
swim.AddTime( evt.TimeSinceLastFrame );
base.OnFrameStarted( source, evt );
}
void resetReactor()
{
LogManager.Instance.Write( "Facilitating neutral start up conditions" );
for ( int x = 0; x < mSize; x++ )
{
chemical[ 0 ][ x ] = FROMFLOAT( 1.0f );
chemical[ 1 ][ x ] = FROMFLOAT( 0.0f );
}
}
void fireUpReactor()
{
LogManager.Instance.Write( "Warning: reactor is being fired up" );
int center = reactorExtent / 2;
for ( int x = center - 10; x < center + 10; x++ )
{
for ( int y = center - 10; y < center + 10; y++ )
{
chemical[ 0 ][ y * reactorExtent + x ] = FROMFLOAT( 0.5f ) + rand.Next() % FROMFLOAT( 0.1f );
chemical[ 1 ][ y * reactorExtent + x ] = FROMFLOAT( 0.25f ) + rand.Next() % FROMFLOAT( 0.1f );
}
}
LogManager.Instance.Write( "Warning: reaction has begun" );
}
void runStep()
{
int x, y;
for ( x = 0; x < mSize; x++ )
{
delta[ 0 ][ x ] = 0;
delta[ 1 ][ x ] = 0;
}
// Boundary conditions
int idx;
idx = 0;
for ( y = 0; y < reactorExtent; y++ )
{
chemical[ 0 ][ idx ] = chemical[ 0 ][ idx + reactorExtent - 2 ];
chemical[ 0 ][ idx + reactorExtent - 1 ] = chemical[ 0 ][ idx + 1 ];
chemical[ 1 ][ idx ] = chemical[ 1 ][ idx + reactorExtent - 2 ];
chemical[ 1 ][ idx + reactorExtent - 1 ] = chemical[ 1 ][ idx + 1 ];
idx += reactorExtent;
}
int skip = reactorExtent * ( reactorExtent - 1 );
for ( y = 0; y < reactorExtent; y++ )
{
chemical[ 0 ][ y ] = chemical[ 0 ][ y + skip - reactorExtent ];
chemical[ 0 ][ y + skip ] = chemical[ 0 ][ y + reactorExtent ];
chemical[ 1 ][ y ] = chemical[ 1 ][ y + skip - reactorExtent ];
chemical[ 1 ][ y + skip ] = chemical[ 1 ][ y + reactorExtent ];
}
// Diffusion
idx = reactorExtent + 1;
for ( y = 0; y < reactorExtent - 2; y++ )
{
for ( x = 0; x < reactorExtent - 2; x++ )
{
delta[ 0 ][ idx ] += MULT( chemical[ 0 ][ idx - reactorExtent ] + chemical[ 0 ][ idx - 1 ]
- 4 * chemical[ 0 ][ idx ] + chemical[ 0 ][ idx + 1 ]
|
[
"\t\t\t\t\t\t\t\t\t+ chemical[ 0 ][ idx + reactorExtent ], hdiv0 );"
] | 1,036
|
lcc
|
csharp
| null |
ae83375fd17d59ade51acb7d21f140c348517cad10d87283
|
|
/*******************************************************************************
* ___ _ ____ ____
* / _ \ _ _ ___ ___| |_| _ \| __ )
* | | | | | | |/ _ \/ __| __| | | | _ \
* | |_| | |_| | __/\__ \ |_| |_| | |_) |
* \__\_\\__,_|\___||___/\__|____/|____/
*
* Copyright (c) 2014-2019 Appsicle
* Copyright (c) 2019-2022 QuestDB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package io.questdb.cutlass.text;
import io.questdb.cairo.ColumnType;
import io.questdb.cutlass.json.JsonException;
import io.questdb.cutlass.json.JsonLexer;
import io.questdb.cutlass.json.JsonParser;
import io.questdb.cutlass.text.types.TypeAdapter;
import io.questdb.cutlass.text.types.TypeManager;
import io.questdb.griffin.SqlKeywords;
import io.questdb.log.Log;
import io.questdb.log.LogFactory;
import io.questdb.std.*;
import io.questdb.std.datetime.DateLocale;
import io.questdb.std.datetime.DateLocaleFactory;
import io.questdb.std.datetime.microtime.TimestampFormatFactory;
import io.questdb.std.datetime.millitime.DateFormatFactory;
import io.questdb.std.str.AbstractCharSequence;
import java.io.Closeable;
public class TextMetadataParser implements JsonParser, Mutable, Closeable {
private static final Log LOG = LogFactory.getLog(TextMetadataParser.class);
private static final int S_NEED_ARRAY = 1;
private static final int S_NEED_OBJECT = 2;
private static final int S_NEED_PROPERTY = 3;
private static final int P_NAME = 1;
private static final int P_TYPE = 2;
private static final int P_PATTERN = 3;
private static final int P_LOCALE = 4;
private static final int P_UTF8 = 5;
private static final int P_INDEX = 6;
private static final CharSequenceIntHashMap propertyNameMap = new CharSequenceIntHashMap();
private final DateLocaleFactory dateLocaleFactory;
private final ObjectPool<FloatingCharSequence> csPool;
private final DateFormatFactory dateFormatFactory;
private final TimestampFormatFactory timestampFormatFactory;
private final ObjList<CharSequence> columnNames;
private final ObjList<TypeAdapter> columnTypes;
private final TypeManager typeManager;
private final DateLocale dateLocale;
private int state = S_NEED_ARRAY;
private CharSequence name;
private int type = -1;
private CharSequence pattern;
private CharSequence locale;
private int propertyIndex;
private long buf;
private long bufCapacity = 0;
private int bufSize = 0;
private CharSequence tableName;
private int localePosition;
private boolean utf8 = false;
private boolean index = false;
public TextMetadataParser(TextConfiguration textConfiguration, TypeManager typeManager) {
this.columnNames = new ObjList<>();
this.columnTypes = new ObjList<>();
this.csPool = new ObjectPool<>(FloatingCharSequence::new, textConfiguration.getMetadataStringPoolCapacity());
this.dateLocaleFactory = typeManager.getInputFormatConfiguration().getDateLocaleFactory();
this.dateFormatFactory = typeManager.getInputFormatConfiguration().getDateFormatFactory();
this.timestampFormatFactory = typeManager.getInputFormatConfiguration().getTimestampFormatFactory();
this.typeManager = typeManager;
this.dateLocale = textConfiguration.getDefaultDateLocale();
}
@Override
public void clear() {
bufSize = 0;
state = S_NEED_ARRAY;
columnNames.clear();
columnTypes.clear();
csPool.clear();
clearStage();
}
@Override
public void close() {
clear();
if (bufCapacity > 0) {
Unsafe.free(buf, bufCapacity, MemoryTag.NATIVE_DEFAULT);
bufCapacity = 0;
}
}
public ObjList<CharSequence> getColumnNames() {
return columnNames;
}
public ObjList<TypeAdapter> getColumnTypes() {
return columnTypes;
}
@Override
public void onEvent(int code, CharSequence tag, int position) throws JsonException {
switch (code) {
case JsonLexer.EVT_ARRAY_START:
if (state != S_NEED_ARRAY) {
throw JsonException.$(position, "Unexpected array");
}
state = S_NEED_OBJECT;
break;
case JsonLexer.EVT_OBJ_START:
if (state != S_NEED_OBJECT) {
throw JsonException.$(position, "Unexpected object");
}
state = S_NEED_PROPERTY;
break;
case JsonLexer.EVT_NAME:
this.propertyIndex = propertyNameMap.get(tag);
if (this.propertyIndex == -1) {
LOG.info().$("unknown [table=").$(tableName).$(", tag=").$(tag).$(']').$();
}
break;
case JsonLexer.EVT_VALUE:
switch (propertyIndex) {
case P_NAME:
name = copy(tag);
break;
case P_TYPE:
type = ColumnType.tagOf(tag);
if (type == -1) {
throw JsonException.$(position, "Invalid type");
}
break;
case P_PATTERN:
pattern = copy(tag);
break;
case P_LOCALE:
locale = copy(tag);
localePosition = position;
break;
case P_UTF8:
utf8 = SqlKeywords.isTrueKeyword(tag);
break;
case P_INDEX:
index = SqlKeywords.isTrueKeyword(tag);
break;
default:
LOG.info().$("ignoring [table=").$(tableName).$(", value=").$(tag).$(']').$();
break;
}
break;
case JsonLexer.EVT_OBJ_END:
state = S_NEED_OBJECT;
createImportedType(position);
break;
case JsonLexer.EVT_ARRAY_VALUE:
throw JsonException.$(position, "Must be an object");
default:
break;
}
}
private static void strcpyw(final CharSequence value, final int len, final long address) {
for (int i = 0; i < len; i++) {
Unsafe.getUnsafe().putChar(address + ((long) i << 1), value.charAt(i));
}
}
private static void checkInputs(int position, CharSequence name, int type) throws JsonException {
if (name == null) {
throw JsonException.$(position, "Missing 'name' property");
}
if (type == -1) {
throw JsonException.$(position, "Missing 'type' property");
}
}
private void clearStage() {
name = null;
type = -1;
pattern = null;
locale = null;
localePosition = 0;
utf8 = false;
index = false;
}
private CharSequence copy(CharSequence tag) {
final int l = tag.length() * 2;
final long n = bufSize + l;
if (n > bufCapacity) {
long ptr = Unsafe.malloc(n * 2, MemoryTag.NATIVE_DEFAULT);
Vect.memcpy(ptr, buf, bufSize);
if (bufCapacity > 0) {
Unsafe.free(buf, bufCapacity, MemoryTag.NATIVE_DEFAULT);
}
buf = ptr;
bufCapacity = n * 2;
}
strcpyw(tag, l / 2, buf + bufSize);
CharSequence cs = csPool.next().of(bufSize, l / 2);
bufSize += l;
return cs;
}
private void createImportedType(int position) throws JsonException {
checkInputs(position, name, type);
columnNames.add(name);
switch (ColumnType.tagOf(type)) {
case ColumnType.DATE:
DateLocale dateLocale = locale == null ? this.dateLocale : dateLocaleFactory.getLocale(locale);
if (dateLocale == null) {
throw JsonException.$(localePosition, "Invalid date locale");
}
// date pattern is required
if (pattern == null) {
throw JsonException.$(0, "DATE format pattern is required");
}
columnTypes.add(typeManager.nextDateAdapter().of(dateFormatFactory.get(pattern), dateLocale));
break;
case ColumnType.TIMESTAMP:
DateLocale timestampLocale =
locale == null ?
this.dateLocale
: dateLocaleFactory.getLocale(locale);
if (timestampLocale == null) {
throw JsonException.$(localePosition, "Invalid timestamp locale");
}
// timestamp pattern is required
|
[
" if (pattern == null) {"
] | 839
|
lcc
|
java
| null |
1b693ff15f56949b8dd6a9b75de17dd49f7f0b08bf33823d
|
|
/*
Classe gerada automaticamente pelo MSTech Code Creator
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data;
using MSTech.Data.Common;
using MSTech.Data.Common.Abstracts;
using MSTech.GestaoEscolar.Entities;
namespace MSTech.GestaoEscolar.DAL.Abstracts
{
/// <summary>
/// Classe abstrata de ORC_ConteudoItem
/// </summary>
public abstract class Abstract_ORC_ConteudoItemDAO : Abstract_DAL<ORC_ConteudoItem>
{
protected override string ConnectionStringName
{
get
{
return "MSTech.GestaoEscolar";
}
}
/// <summary>
/// Configura os parametros do metodo de carregar
/// </ssummary>
/// <param name="qs">Objeto da Store Procedure</param>
protected override void ParamCarregar(QuerySelectStoredProcedure qs, ORC_ConteudoItem entity)
{
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@obj_id";
Param.Size = 4;
Param.Value = entity.obj_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@ctd_id";
Param.Size = 4;
Param.Value = entity.ctd_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@cti_id";
Param.Size = 4;
Param.Value = entity.cti_id;
qs.Parameters.Add(Param);
}
/// <summary>
/// Configura os parametros do metodo de Inserir
/// </summary>
/// <param name="qs">Objeto da Store Procedure</param>
protected override void ParamInserir(QuerySelectStoredProcedure qs, ORC_ConteudoItem entity)
{
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@obj_id";
Param.Size = 4;
Param.Value = entity.obj_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@ctd_id";
Param.Size = 4;
Param.Value = entity.ctd_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@cti_id";
Param.Size = 4;
Param.Value = entity.cti_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.AnsiString;
Param.ParameterName = "@cti_descricao";
Param.Size = 2147483647;
Param.Value = entity.cti_descricao;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Byte;
Param.ParameterName = "@cti_situacao";
Param.Size = 1;
Param.Value = entity.cti_situacao;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.DateTime;
Param.ParameterName = "@cti_dataCriacao";
Param.Size = 16;
Param.Value = entity.cti_dataCriacao;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.DateTime;
Param.ParameterName = "@cti_dataAlteracao";
Param.Size = 16;
Param.Value = entity.cti_dataAlteracao;
qs.Parameters.Add(Param);
}
/// <summary>
/// Configura os parametros do metodo de Alterar
/// </summary>
/// <param name="qs">Objeto da Store Procedure</param>
protected override void ParamAlterar(QueryStoredProcedure qs, ORC_ConteudoItem entity)
{
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@obj_id";
Param.Size = 4;
Param.Value = entity.obj_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@ctd_id";
Param.Size = 4;
Param.Value = entity.ctd_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@cti_id";
Param.Size = 4;
Param.Value = entity.cti_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.AnsiString;
Param.ParameterName = "@cti_descricao";
Param.Size = 2147483647;
Param.Value = entity.cti_descricao;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Byte;
Param.ParameterName = "@cti_situacao";
Param.Size = 1;
Param.Value = entity.cti_situacao;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.DateTime;
Param.ParameterName = "@cti_dataCriacao";
Param.Size = 16;
Param.Value = entity.cti_dataCriacao;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.DateTime;
Param.ParameterName = "@cti_dataAlteracao";
Param.Size = 16;
Param.Value = entity.cti_dataAlteracao;
qs.Parameters.Add(Param);
}
/// <summary>
/// Configura os parametros do metodo de Deletar
/// </summary>
/// <param name="qs">Objeto da Store Procedure</param>
protected override void ParamDeletar(QueryStoredProcedure qs, ORC_ConteudoItem entity)
{
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@obj_id";
Param.Size = 4;
Param.Value = entity.obj_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@ctd_id";
Param.Size = 4;
Param.Value = entity.ctd_id;
qs.Parameters.Add(Param);
Param = qs.NewParameter();
Param.DbType = DbType.Int32;
Param.ParameterName = "@cti_id";
Param.Size = 4;
Param.Value = entity.cti_id;
qs.Parameters.Add(Param);
}
/// <summary>
/// Recebe o valor do auto incremento e coloca na propriedade
/// </summary>
/// <param name="qs">Objeto da Store Procedure</param>
protected override bool ReceberAutoIncremento(QuerySelectStoredProcedure qs, ORC_ConteudoItem entity)
{
|
[
" entity.cti_id = Convert.ToInt32(qs.Return.Rows[0][0]);"
] | 515
|
lcc
|
csharp
| null |
39bbeca7715c515fede070fff57d6f18c6608542a2b291d1
|
|
using System;
using System.Linq;
using System.Data.Common;
using NHibernate.Cfg.MappingSchema;
using NHibernate.Engine;
using NHibernate.Mapping.ByCode;
using NHibernate.Mapping.ByCode.Impl;
using NHibernate.Properties;
using NHibernate.SqlTypes;
using NHibernate.Type;
using NHibernate.UserTypes;
using NUnit.Framework;
namespace NHibernate.Test.MappingByCode.MappersTests
{
[TestFixture]
public class PropertyMapperTest
{
private enum MyEnum
{
One
}
private class MyClass
{
public string Autoproperty { get; set; }
public string ReadOnly { get { return ""; } }
public MyEnum EnumProp { get; set; }
}
private class MyAccessorMapper : IAccessorPropertyMapper
{
public bool AccessorCalled { get; set; }
public void Access(Accessor accessor)
{
AccessorCalled = true;
}
public void Access(System.Type accessorType)
{
}
}
[Test]
public void WhenCreateWithGivenAccessorMapperThenUseTheGivenAccessoMapper()
{
var member = typeof (MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var myAccessorMapper = new MyAccessorMapper();
var mapper = new PropertyMapper(member, mapping, myAccessorMapper);
mapper.Access(Accessor.Field);
Assert.That(myAccessorMapper.AccessorCalled, Is.True);
}
[Test]
public void WhenSettingByTypeThenCheckCompatibility()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
Assert.That(() => mapper.Access(typeof(object)), Throws.TypeOf<ArgumentOutOfRangeException>());
Assert.That(() => mapper.Access(typeof(FieldAccessor)), Throws.Nothing);
Assert.That(mapping.Access, Is.EqualTo(typeof(FieldAccessor).AssemblyQualifiedName));
}
[Test]
public void WhenSetTypeByITypeThenSetTypeName()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Type(NHibernateUtil.String);
Assert.That(mapping.Type.name, Is.EqualTo("String"));
}
[Test]
public void WhenSetTypeByIUserTypeThenSetTypeName()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Type<MyType>();
Assert.That(mapping.Type.name, Does.Contain("MyType"));
Assert.That(mapping.type, Is.Null);
}
[Test]
public void WhenSetTypeByICompositeUserTypeThenSetTypeName()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Type<MyCompoType>();
Assert.That(mapping.Type.name, Does.Contain("MyCompoType"));
Assert.That(mapping.type, Is.Null);
}
[Test]
public void WhenSetTypeByIUserTypeWithParamsThenSetType()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Type<MyType>(new { Param1 = "a", Param2 = 12 });
Assert.That(mapping.type1, Is.Null);
Assert.That(mapping.Type.name, Does.Contain("MyType"));
Assert.That(mapping.Type.param, Has.Length.EqualTo(2));
Assert.That(mapping.Type.param.Select(p => p.name), Is.EquivalentTo(new [] {"Param1", "Param2"}));
Assert.That(mapping.Type.param.Select(p => p.GetText()), Is.EquivalentTo(new [] {"a", "12"}));
}
[Test]
public void WhenSetTypeByIUserTypeWithNullParamsThenSetTypeName()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Type<MyType>(null);
Assert.That(mapping.Type.name, Does.Contain("MyType"));
Assert.That(mapping.type, Is.Null);
}
[Test]
public void WhenSetTypeByITypeTypeThenSetType()
{
var member = For<MyClass>.Property(c => c.EnumProp);
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Type<EnumStringType<MyEnum>>();
Assert.That(mapping.Type.name, Does.Contain(typeof(EnumStringType<MyEnum>).FullName));
Assert.That(mapping.type, Is.Null);
}
[Test]
public void WhenSetInvalidTypeThenThrow()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
Assert.That(() => mapper.Type(typeof(object), null), Throws.TypeOf<ArgumentOutOfRangeException>());
Assert.That(() => mapper.Type(null, null), Throws.TypeOf<ArgumentNullException>());
}
[Test]
public void WhenSetDifferentColumnNameThenSetTheName()
{
var member = typeof(MyClass).GetProperty("Autoproperty");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Column(cm => cm.Name("pepe"));
Assert.That(mapping.Columns.Count(), Is.EqualTo(1));
Assert.That(mapping.Columns.Single().name, Is.EqualTo("pepe"));
}
[Test]
public void WhenSetDefaultColumnNameThenDoesNotSetTheName()
{
var member = typeof(MyClass).GetProperty("Autoproperty");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Column(cm => { cm.Name("Autoproperty"); cm.Length(50); });
Assert.That(mapping.column, Is.Null);
Assert.That(mapping.length, Is.EqualTo("50"));
Assert.That(mapping.Columns, Is.Empty);
}
[Test]
public void WhenSetBasicColumnValuesThenSetPlainValues()
{
var member = typeof(MyClass).GetProperty("Autoproperty");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Column(cm =>
{
cm.Length(50);
cm.NotNullable(true);
});
Assert.That(mapping.Items, Is.Null);
Assert.That(mapping.length, Is.EqualTo("50"));
Assert.That(mapping.notnull, Is.EqualTo(true));
Assert.That(mapping.notnullSpecified, Is.EqualTo(true));
}
[Test]
public void WhenSetColumnValuesThenAddColumnTag()
{
var member = typeof(MyClass).GetProperty("Autoproperty");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Column(cm =>
{
cm.SqlType("VARCHAR(50)");
cm.NotNullable(true);
});
Assert.That(mapping.Items, Is.Not.Null);
Assert.That(mapping.Columns.Count(), Is.EqualTo(1));
}
[Test]
public void WhenSetBasicColumnValuesMoreThanOnesThenMergeColumn()
{
var member = typeof(MyClass).GetProperty("Autoproperty");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Column(cm => cm.Length(50));
mapper.Column(cm => cm.NotNullable(true));
Assert.That(mapping.Items, Is.Null);
Assert.That(mapping.length, Is.EqualTo("50"));
Assert.That(mapping.notnull, Is.EqualTo(true));
Assert.That(mapping.notnullSpecified, Is.EqualTo(true));
}
[Test]
public void WhenSetMultiColumnsValuesThenAddColumns()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Type<MyType>();
mapper.Columns(cm =>
{
cm.Name("column1");
cm.Length(50);
}, cm =>
{
cm.Name("column2");
cm.SqlType("VARCHAR(10)");
});
Assert.That(mapping.Columns.Count(), Is.EqualTo(2));
}
[Test]
public void WhenSetMultiColumnsValuesThenAutoassignColumnNames()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Columns(cm => cm.Length(50), cm => cm.SqlType("VARCHAR(10)"));
Assert.That(mapping.Columns.Count(), Is.EqualTo(2));
Assert.True(mapping.Columns.All(cm => !string.IsNullOrEmpty(cm.name)));
}
[Test]
public void AfterSetMultiColumnsCantSetSimpleColumn()
{
var member = typeof(MyClass).GetProperty("ReadOnly");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Columns(cm => cm.Length(50), cm => cm.SqlType("VARCHAR(10)"));
Assert.That(() => mapper.Column(cm => cm.Length(50)), Throws.TypeOf<MappingException>());
}
[Test]
public void WhenSetBasicColumnValuesThroughShortCutThenMergeColumn()
{
var member = typeof(MyClass).GetProperty("Autoproperty");
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Column("pizza");
mapper.Length(50);
mapper.Precision(10);
mapper.Scale(2);
mapper.NotNullable(true);
mapper.Unique(true);
mapper.UniqueKey("AA");
mapper.Index("II");
Assert.That(mapping.Items, Is.Null);
Assert.That(mapping.column, Is.EqualTo("pizza"));
Assert.That(mapping.length, Is.EqualTo("50"));
Assert.That(mapping.precision, Is.EqualTo("10"));
Assert.That(mapping.scale, Is.EqualTo("2"));
Assert.That(mapping.notnull, Is.EqualTo(true));
Assert.That(mapping.unique, Is.EqualTo(true));
Assert.That(mapping.uniquekey, Is.EqualTo("AA"));
Assert.That(mapping.index, Is.EqualTo("II"));
}
[Test]
public void WhenSetUpdateThenSetAttributes()
{
var member = For<MyClass>.Property(x => x.ReadOnly);
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Update(false);
Assert.That(mapping.update, Is.False);
Assert.That(mapping.updateSpecified, Is.True);
}
[Test]
public void WhenSetInsertThenSetAttributes()
{
var member = For<MyClass>.Property(x => x.ReadOnly);
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Insert(false);
Assert.That(mapping.insert, Is.False);
Assert.That(mapping.insertSpecified, Is.True);
}
[Test]
public void WhenSetLazyThenSetAttributes()
{
var member = For<MyClass>.Property(x => x.ReadOnly);
var mapping = new HbmProperty();
var mapper = new PropertyMapper(member, mapping);
mapper.Lazy(true);
Assert.That(mapping.lazy, Is.True);
Assert.That(mapping.IsLazyProperty, Is.True);
}
}
public class MyType : IUserType
{
#region Implementation of IUserType
public new bool Equals(object x, object y)
{
throw new NotImplementedException();
}
public int GetHashCode(object x)
{
throw new NotImplementedException();
}
public object NullSafeGet(DbDataReader rs, string[] names, ISessionImplementor session, object owner)
{
throw new NotImplementedException();
}
public void NullSafeSet(DbCommand cmd, object value, int index, ISessionImplementor session)
{
throw new NotImplementedException();
}
public object DeepCopy(object value)
{
throw new NotImplementedException();
}
public object Replace(object original, object target, object owner)
{
throw new NotImplementedException();
}
public object Assemble(object cached, object owner)
{
throw new NotImplementedException();
}
public object Disassemble(object value)
{
throw new NotImplementedException();
}
public SqlType[] SqlTypes
{
get { throw new NotImplementedException(); }
}
public System.Type ReturnedType
{
get { throw new NotImplementedException(); }
}
public bool IsMutable
{
get { throw new NotImplementedException(); }
}
#endregion
}
public class MyCompoType : ICompositeUserType
{
public object GetPropertyValue(object component, int property)
{
throw new NotImplementedException();
}
public void SetPropertyValue(object component, int property, object value)
{
throw new NotImplementedException();
}
public new bool Equals(object x, object y)
{
throw new NotImplementedException();
}
public int GetHashCode(object x)
{
throw new NotImplementedException();
}
public object NullSafeGet(DbDataReader dr, string[] names, ISessionImplementor session, object owner)
{
throw new NotImplementedException();
}
public void NullSafeSet(DbCommand cmd, object value, int index, bool[] settable, ISessionImplementor session)
{
throw new NotImplementedException();
}
public object DeepCopy(object value)
{
throw new NotImplementedException();
}
public object Disassemble(object value, ISessionImplementor session)
{
throw new NotImplementedException();
}
public object Assemble(object cached, ISessionImplementor session, object owner)
{
|
[
"\t\t\tthrow new NotImplementedException();"
] | 1,017
|
lcc
|
csharp
| null |
590b51d23289d047b57ac6677f20d8695fe1def2a0120e8d
|
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
module: ce_ntp_auth
version_added: "2.4"
short_description: Manages NTP authentication configuration on HUAWEI CloudEngine switches.
description:
- Manages NTP authentication configuration on HUAWEI CloudEngine switches.
author:
- Zhijin Zhou (@CloudEngine-Ansible)
notes:
- If C(state=absent), the module will attempt to remove the given key configuration.
If a matching key configuration isn't found on the device, the module will fail.
- If C(state=absent) and C(authentication=on), authentication will be turned on.
- If C(state=absent) and C(authentication=off), authentication will be turned off.
options:
key_id:
description:
- Authentication key identifier (numeric).
required: true
auth_pwd:
description:
- Plain text with length of 1 to 255, encrypted text with length of 20 to 392.
required: false
default: null
auth_mode:
description:
- Specify authentication algorithm.
required: false
default: null
choices: ['hmac-sha256', 'md5']
auth_type:
description:
- Whether the given password is in cleartext or
has been encrypted. If in cleartext, the device
will encrypt it before storing it.
required: false
default: encrypt
choices: ['text', 'encrypt']
trusted_key:
description:
- Whether the given key is required to be supplied by a time source
for the device to synchronize to the time source.
required: false
default: 'disable'
choices: ['enable', 'disable']
authentication:
description:
- Configure ntp authentication enable or unconfigure ntp authentication enable.
required: false
default: null
choices: ['enable', 'disable']
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: NTP AUTH test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Configure ntp authentication key-id"
ce_ntp_auth:
key_id: 32
auth_mode: md5
auth_pwd: 11111111111111111111111
provider: "{{ cli }}"
- name: "Configure ntp authentication key-id and trusted authentication keyid"
ce_ntp_auth:
key_id: 32
auth_mode: md5
auth_pwd: 11111111111111111111111
trusted_key: enable
provider: "{{ cli }}"
- name: "Configure ntp authentication key-id and authentication enable"
ce_ntp_auth:
key_id: 32
auth_mode: md5
auth_pwd: 11111111111111111111111
authentication: enable
provider: "{{ cli }}"
- name: "Unconfigure ntp authentication key-id and trusted authentication keyid"
ce_ntp_auth:
key_id: 32
state: absent
provider: "{{ cli }}"
- name: "Unconfigure ntp authentication key-id and authentication enable"
ce_ntp_auth:
key_id: 32
authentication: enable
state: absent
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {
"auth_type": "text",
"authentication": "enable",
"key_id": "32",
"auth_pwd": "1111",
"auth_mode": "md5",
"trusted_key": "enable",
"state": "present"
}
existing:
description: k/v pairs of existing ntp authentication
returned: always
type: dict
sample: {
"authentication": "off",
"authentication-keyid": [
{
"auth_mode": "md5",
"key_id": "1",
"trusted_key": "disable"
}
]
}
end_state:
description: k/v pairs of ntp authentication after module execution
returned: always
type: dict
sample: {
"authentication": "off",
"authentication-keyid": [
{
"auth_mode": "md5",
"key_id": "1",
"trusted_key": "disable"
},
{
"auth_mode": "md5",
"key_id": "32",
"trusted_key": "enable"
}
]
}
state:
description: state as sent in from the playbook
returned: always
type: string
sample: "present"
updates:
description: command sent to the device
returned: always
type: list
sample: [
"ntp authentication-key 32 md5 1111",
"ntp trusted-key 32",
"ntp authentication enable"
]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import copy
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import ce_argument_spec, load_config, get_nc_config, set_nc_config
CE_NC_GET_NTP_AUTH_CONFIG = """
<filter type="subtree">
<ntp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ntpAuthKeyCfgs>
<ntpAuthKeyCfg>
<keyId>%s</keyId>
<mode></mode>
<keyVal></keyVal>
<isReliable></isReliable>
</ntpAuthKeyCfg>
</ntpAuthKeyCfgs>
</ntp>
</filter>
"""
CE_NC_GET_ALL_NTP_AUTH_CONFIG = """
<filter type="subtree">
<ntp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ntpAuthKeyCfgs>
<ntpAuthKeyCfg>
<keyId></keyId>
<mode></mode>
<keyVal></keyVal>
<isReliable></isReliable>
</ntpAuthKeyCfg>
</ntpAuthKeyCfgs>
</ntp>
</filter>
"""
CE_NC_GET_NTP_AUTH_ENABLE = """
<filter type="subtree">
<ntp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ntpSystemCfg>
<isAuthEnable></isAuthEnable>
</ntpSystemCfg>
</ntp>
</filter>
"""
CE_NC_MERGE_NTP_AUTH_CONFIG = """
<config>
<ntp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ntpAuthKeyCfgs>
<ntpAuthKeyCfg operation="merge">
<keyId>%s</keyId>
<mode>%s</mode>
<keyVal>%s</keyVal>
<isReliable>%s</isReliable>
</ntpAuthKeyCfg>
</ntpAuthKeyCfgs>
</ntp>
</config>
"""
CE_NC_MERGE_NTP_AUTH_ENABLE = """
<config>
<ntp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ntpSystemCfg operation="merge">
<isAuthEnable>%s</isAuthEnable>
</ntpSystemCfg>
</ntp>
</config>
"""
CE_NC_DELETE_NTP_AUTH_CONFIG = """
<config>
<ntp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ntpAuthKeyCfgs>
<ntpAuthKeyCfg operation="delete">
<keyId>%s</keyId>
</ntpAuthKeyCfg>
</ntpAuthKeyCfgs>
</ntp>
</config>
"""
class NtpAuth(object):
"""Manage ntp authentication"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# ntp_auth configration info
self.key_id = self.module.params['key_id']
self.password = self.module.params['auth_pwd'] or None
self.auth_mode = self.module.params['auth_mode'] or None
self.auth_type = self.module.params['auth_type']
self.trusted_key = self.module.params['trusted_key']
self.authentication = self.module.params['authentication'] or None
self.state = self.module.params['state']
self.check_params()
self.ntp_auth_conf = dict()
self.key_id_exist = False
self.cur_trusted_key = 'disable'
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = list()
self.end_state = list()
self.get_ntp_auth_exist_config()
def check_params(self):
"""Check all input params"""
if not self.key_id.isdigit():
self.module.fail_json(
msg='Error: key_id is not digit.')
if (int(self.key_id) < 1) or (int(self.key_id) > 4294967295):
self.module.fail_json(
msg='Error: The length of key_id is between 1 and 4294967295.')
if self.state == "present":
if (self.auth_type == 'encrypt') and\
((len(self.password) < 20) or (len(self.password) > 392)):
self.module.fail_json(
msg='Error: The length of encrypted password is between 20 and 392.')
elif (self.auth_type == 'text') and\
((len(self.password) < 1) or (len(self.password) > 255)):
self.module.fail_json(
msg='Error: The length of text password is between 1 and 255.')
def init_module(self):
"""Init module object"""
required_if = [("state", "present", ("password", "auth_mode"))]
self.module = AnsibleModule(
argument_spec=self.spec,
required_if=required_if,
supports_check_mode=True
)
def check_response(self, xml_str, xml_name):
"""Check if response message is already succeed."""
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def get_ntp_auth_enable(self):
"""Get ntp authentication enable state"""
xml_str = CE_NC_GET_NTP_AUTH_ENABLE
con_obj = get_nc_config(self.module, xml_str)
if "<data/>" in con_obj:
return
# get ntp authentication enable
auth_en = re.findall(
r'.*<isAuthEnable>(.*)</isAuthEnable>.*', con_obj)
if auth_en:
if auth_en[0] == 'true':
self.ntp_auth_conf['authentication'] = 'enable'
else:
self.ntp_auth_conf['authentication'] = 'disable'
def get_ntp_all_auth_keyid(self):
"""Get all authentication keyid info"""
ntp_auth_conf = list()
xml_str = CE_NC_GET_ALL_NTP_AUTH_CONFIG
con_obj = get_nc_config(self.module, xml_str)
if "<data/>" in con_obj:
self.ntp_auth_conf["authentication-keyid"] = "None"
return ntp_auth_conf
# get ntp authentication config
ntp_auth = re.findall(
r'.*<keyId>(.*)</keyId>.*\s*<mode>(.*)</mode>.*\s*'
r'<keyVal>(.*)</keyVal>.*\s*<isReliable>(.*)</isReliable>.*', con_obj)
for ntp_auth_num in ntp_auth:
if ntp_auth_num[0] == self.key_id:
self.key_id_exist = True
if ntp_auth_num[3] == 'true':
self.cur_trusted_key = 'enable'
else:
self.cur_trusted_key = 'disable'
if ntp_auth_num[3] == 'true':
trusted_key = 'enable'
else:
trusted_key = 'disable'
ntp_auth_conf.append(dict(key_id=ntp_auth_num[0],
auth_mode=ntp_auth_num[1].lower(),
trusted_key=trusted_key))
self.ntp_auth_conf["authentication-keyid"] = ntp_auth_conf
return ntp_auth_conf
def get_ntp_auth_exist_config(self):
"""Get ntp authentication existed configure"""
self.get_ntp_auth_enable()
self.get_ntp_all_auth_keyid()
def config_ntp_auth_keyid(self):
"""Config ntp authentication keyid"""
if self.trusted_key == 'enable':
trusted_key = 'true'
else:
trusted_key = 'false'
xml_str = CE_NC_MERGE_NTP_AUTH_CONFIG % (
self.key_id, self.auth_mode.upper(), self.password, trusted_key)
ret_xml = set_nc_config(self.module, xml_str)
self.check_response(ret_xml, "NTP_AUTH_KEYID_CONFIG")
def config_ntp_auth_enable(self):
"""Config ntp authentication enable"""
if self.ntp_auth_conf['authentication'] != self.authentication:
if self.authentication == 'enable':
state = 'true'
else:
state = 'false'
xml_str = CE_NC_MERGE_NTP_AUTH_ENABLE % state
ret_xml = set_nc_config(self.module, xml_str)
self.check_response(ret_xml, "NTP_AUTH_ENABLE")
def undo_config_ntp_auth_keyid(self):
"""Undo ntp authentication key-id"""
xml_str = CE_NC_DELETE_NTP_AUTH_CONFIG % self.key_id
ret_xml = set_nc_config(self.module, xml_str)
self.check_response(ret_xml, "UNDO_NTP_AUTH_KEYID_CONFIG")
def cli_load_config(self, commands):
"""Load config by cli"""
if not self.module.check_mode:
load_config(self.module, commands)
def config_ntp_auth_keyid_by_cli(self):
"""Config ntp authentication keyid bye the way of CLI"""
|
[
" commands = list()"
] | 1,208
|
lcc
|
python
| null |
fec6e895c63341559f9f9bba6ca62f48771c6ee896904ae7
|
|
# pylint: disable=no-member
"""
Unit tests for the Mixed Modulestore, with DDT for the various stores (Split, Draft, XML)
"""
from collections import namedtuple
import datetime
import logging
import ddt
import itertools
import mimetypes
from unittest import skip
from uuid import uuid4
from contextlib import contextmanager
from mock import patch
# Mixed modulestore depends on django, so we'll manually configure some django settings
# before importing the module
# TODO remove this import and the configuration -- xmodule should not depend on django!
from django.conf import settings
# This import breaks this test file when run separately. Needs to be fixed! (PLAT-449)
from mock_django import mock_signal_receiver
from nose.plugins.attrib import attr
import pymongo
from pytz import UTC
from shutil import rmtree
from tempfile import mkdtemp
from xmodule.x_module import XModuleMixin
from xmodule.modulestore.edit_info import EditInfoMixin
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.modulestore.tests.test_cross_modulestore_import_export import MongoContentstoreBuilder
from xmodule.contentstore.content import StaticContent
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.xml_importer import import_course_from_xml
from xmodule.modulestore.xml_exporter import export_course_to_xml
from xmodule.modulestore.django import SignalHandler
if not settings.configured:
settings.configure()
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator, LibraryLocator
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.draft_and_published import UnsupportedRevisionError, DIRECT_ONLY_CATEGORIES
from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateCourseError, ReferentialIntegrityError, NoPathToItem
from xmodule.modulestore.mixed import MixedModuleStore
from xmodule.modulestore.search import path_to_location, navigation_index
from xmodule.modulestore.tests.factories import check_mongo_calls, check_exact_number_of_calls, \
mongo_uses_error_check
from xmodule.modulestore.tests.utils import create_modulestore_instance, LocationMixin, mock_tab_from_json
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
from xmodule.tests import DATA_DIR, CourseComparisonTest
log = logging.getLogger(__name__)
class CommonMixedModuleStoreSetup(CourseComparisonTest):
"""
Quasi-superclass which tests Location based apps against both split and mongo dbs (Locator and
Location-based dbs)
"""
HOST = MONGO_HOST
PORT = MONGO_PORT_NUM
DB = 'test_mongo_%s' % uuid4().hex[:5]
COLLECTION = 'modulestore'
ASSET_COLLECTION = 'assetstore'
FS_ROOT = DATA_DIR
DEFAULT_CLASS = 'xmodule.raw_module.RawDescriptor'
RENDER_TEMPLATE = lambda t_n, d, ctx=None, nsp='main': ''
MONGO_COURSEID = 'MITx/999/2013_Spring'
XML_COURSEID1 = 'edX/toy/2012_Fall'
XML_COURSEID2 = 'edX/simple/2012_Fall'
BAD_COURSE_ID = 'edX/simple'
modulestore_options = {
'default_class': DEFAULT_CLASS,
'fs_root': DATA_DIR,
'render_template': RENDER_TEMPLATE,
'xblock_mixins': (EditInfoMixin, InheritanceMixin, LocationMixin, XModuleMixin),
}
DOC_STORE_CONFIG = {
'host': HOST,
'port': PORT,
'db': DB,
'collection': COLLECTION,
'asset_collection': ASSET_COLLECTION,
}
MAPPINGS = {
XML_COURSEID1: 'xml',
XML_COURSEID2: 'xml',
BAD_COURSE_ID: 'xml',
}
OPTIONS = {
'stores': [
{
'NAME': 'draft',
'ENGINE': 'xmodule.modulestore.mongo.draft.DraftModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
},
{
'NAME': 'split',
'ENGINE': 'xmodule.modulestore.split_mongo.split_draft.DraftVersioningModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
},
{
'NAME': 'xml',
'ENGINE': 'xmodule.modulestore.xml.XMLModuleStore',
'OPTIONS': {
'data_dir': DATA_DIR,
'default_class': 'xmodule.hidden_module.HiddenDescriptor',
'xblock_mixins': modulestore_options['xblock_mixins'],
}
},
],
'xblock_mixins': modulestore_options['xblock_mixins'],
}
def _compare_ignore_version(self, loc1, loc2, msg=None):
"""
AssertEqual replacement for CourseLocator
"""
if loc1.for_branch(None) != loc2.for_branch(None):
self.fail(self._formatMessage(msg, u"{} != {}".format(unicode(loc1), unicode(loc2))))
def setUp(self):
"""
Set up the database for testing
"""
super(CommonMixedModuleStoreSetup, self).setUp()
self.exclude_field(None, 'wiki_slug')
self.exclude_field(None, 'xml_attributes')
self.exclude_field(None, 'parent')
self.ignore_asset_key('_id')
self.ignore_asset_key('uploadDate')
self.ignore_asset_key('content_son')
self.ignore_asset_key('thumbnail_location')
self.options = getattr(self, 'options', self.OPTIONS)
self.connection = pymongo.MongoClient(
host=self.HOST,
port=self.PORT,
tz_aware=True,
)
self.connection.drop_database(self.DB)
self.addCleanup(self.connection.drop_database, self.DB)
self.addCleanup(self.connection.close)
self.addTypeEqualityFunc(BlockUsageLocator, '_compare_ignore_version')
self.addTypeEqualityFunc(CourseLocator, '_compare_ignore_version')
# define attrs which get set in initdb to quell pylint
self.writable_chapter_location = self.store = self.fake_location = self.xml_chapter_location = None
self.course_locations = {}
self.user_id = ModuleStoreEnum.UserID.test
# pylint: disable=invalid-name
def _create_course(self, course_key):
"""
Create a course w/ one item in the persistence store using the given course & item location.
"""
# create course
with self.store.bulk_operations(course_key):
self.course = self.store.create_course(course_key.org, course_key.course, course_key.run, self.user_id)
if isinstance(self.course.id, CourseLocator):
self.course_locations[self.MONGO_COURSEID] = self.course.location
else:
self.assertEqual(self.course.id, course_key)
# create chapter
chapter = self.store.create_child(self.user_id, self.course.location, 'chapter', block_id='Overview')
self.writable_chapter_location = chapter.location
def _create_block_hierarchy(self):
"""
Creates a hierarchy of blocks for testing
Each block's (version_agnostic) location is assigned as a field of the class and can be easily accessed
"""
BlockInfo = namedtuple('BlockInfo', 'field_name, category, display_name, sub_tree')
trees = [
BlockInfo(
'chapter_x', 'chapter', 'Chapter_x', [
BlockInfo(
'sequential_x1', 'sequential', 'Sequential_x1', [
BlockInfo(
'vertical_x1a', 'vertical', 'Vertical_x1a', [
BlockInfo('problem_x1a_1', 'problem', 'Problem_x1a_1', []),
BlockInfo('problem_x1a_2', 'problem', 'Problem_x1a_2', []),
BlockInfo('problem_x1a_3', 'problem', 'Problem_x1a_3', []),
BlockInfo('html_x1a_1', 'html', 'HTML_x1a_1', []),
]
),
BlockInfo(
'vertical_x1b', 'vertical', 'Vertical_x1b', []
)
]
),
BlockInfo(
'sequential_x2', 'sequential', 'Sequential_x2', []
)
]
),
BlockInfo(
'chapter_y', 'chapter', 'Chapter_y', [
BlockInfo(
'sequential_y1', 'sequential', 'Sequential_y1', [
BlockInfo(
'vertical_y1a', 'vertical', 'Vertical_y1a', [
BlockInfo('problem_y1a_1', 'problem', 'Problem_y1a_1', []),
BlockInfo('problem_y1a_2', 'problem', 'Problem_y1a_2', []),
BlockInfo('problem_y1a_3', 'problem', 'Problem_y1a_3', []),
]
)
]
)
]
)
]
def create_sub_tree(parent, block_info):
"""
recursive function that creates the given block and its descendants
"""
block = self.store.create_child(
self.user_id, parent.location,
block_info.category, block_id=block_info.display_name,
fields={'display_name': block_info.display_name},
)
for tree in block_info.sub_tree:
create_sub_tree(block, tree)
setattr(self, block_info.field_name, block.location)
with self.store.bulk_operations(self.course.id):
for tree in trees:
create_sub_tree(self.course, tree)
def _course_key_from_string(self, string):
"""
Get the course key for the given course string
"""
return self.course_locations[string].course_key
def _has_changes(self, location):
"""
Helper function that loads the item before calling has_changes
"""
return self.store.has_changes(self.store.get_item(location))
# pylint: disable=dangerous-default-value
def _initialize_mixed(self, mappings=MAPPINGS, contentstore=None):
"""
initializes the mixed modulestore.
"""
self.store = MixedModuleStore(
contentstore, create_modulestore_instance=create_modulestore_instance,
mappings=mappings,
**self.options
)
self.addCleanup(self.store.close_all_connections)
def initdb(self, default):
"""
Initialize the database and create one test course in it
"""
# set the default modulestore
store_configs = self.options['stores']
for index in range(len(store_configs)):
if store_configs[index]['NAME'] == default:
if index > 0:
store_configs[index], store_configs[0] = store_configs[0], store_configs[index]
break
self._initialize_mixed()
# convert to CourseKeys
self.course_locations = {
course_id: CourseLocator.from_string(course_id)
for course_id in [self.MONGO_COURSEID, self.XML_COURSEID1, self.XML_COURSEID2]
}
# and then to the root UsageKey
self.course_locations = {
course_id: course_key.make_usage_key('course', course_key.run)
for course_id, course_key in self.course_locations.iteritems() # pylint: disable=maybe-no-member
}
mongo_course_key = self.course_locations[self.MONGO_COURSEID].course_key
self.fake_location = self.store.make_course_key(mongo_course_key.org, mongo_course_key.course, mongo_course_key.run).make_usage_key('vertical', 'fake')
self.xml_chapter_location = self.course_locations[self.XML_COURSEID1].replace(
category='chapter', name='Overview'
)
self._create_course(self.course_locations[self.MONGO_COURSEID].course_key)
@ddt.ddt
@attr('mongo')
class TestMixedModuleStore(CommonMixedModuleStoreSetup):
"""
Tests of the MixedModulestore interface methods.
"""
@ddt.data('draft', 'split')
def test_get_modulestore_type(self, default_ms):
"""
Make sure we get back the store type we expect for given mappings
"""
self.initdb(default_ms)
self.assertEqual(self.store.get_modulestore_type(
self._course_key_from_string(self.XML_COURSEID1)), ModuleStoreEnum.Type.xml
)
self.assertEqual(self.store.get_modulestore_type(
self._course_key_from_string(self.XML_COURSEID2)), ModuleStoreEnum.Type.xml
)
mongo_ms_type = ModuleStoreEnum.Type.mongo if default_ms == 'draft' else ModuleStoreEnum.Type.split
self.assertEqual(self.store.get_modulestore_type(
self._course_key_from_string(self.MONGO_COURSEID)), mongo_ms_type
)
# try an unknown mapping, it should be the 'default' store
self.assertEqual(self.store.get_modulestore_type(
SlashSeparatedCourseKey('foo', 'bar', '2012_Fall')), mongo_ms_type
)
@ddt.data('draft', 'split')
def test_get_modulestore_cache(self, default_ms):
"""
Make sure we cache discovered course mappings
"""
self.initdb(default_ms)
# unset mappings
self.store.mappings = {}
course_key = self.course_locations[self.MONGO_COURSEID].course_key
with check_exact_number_of_calls(self.store.default_modulestore, 'has_course', 1):
self.assertEqual(self.store.default_modulestore, self.store._get_modulestore_for_courselike(course_key)) # pylint: disable=protected-access
self.assertIn(course_key, self.store.mappings)
self.assertEqual(self.store.default_modulestore, self.store._get_modulestore_for_courselike(course_key)) # pylint: disable=protected-access
@ddt.data(*itertools.product(
(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split),
(True, False)
))
@ddt.unpack
def test_duplicate_course_error(self, default_ms, reset_mixed_mappings):
"""
Make sure we get back the store type we expect for given mappings
"""
self._initialize_mixed(mappings={})
with self.store.default_store(default_ms):
self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
if reset_mixed_mappings:
self.store.mappings = {}
with self.assertRaises(DuplicateCourseError):
self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
# Draft:
# problem: One lookup to locate an item that exists
# fake: one w/ wildcard version
# split has one lookup for the course and then one for the course items
@ddt.data(('draft', [1, 1], 0), ('split', [2, 2], 0))
@ddt.unpack
def test_has_item(self, default_ms, max_find, max_send):
self.initdb(default_ms)
self._create_block_hierarchy()
self.assertTrue(self.store.has_item(self.course_locations[self.XML_COURSEID1]))
with check_mongo_calls(max_find.pop(0), max_send):
self.assertTrue(self.store.has_item(self.problem_x1a_1))
# try negative cases
self.assertFalse(self.store.has_item(
self.course_locations[self.XML_COURSEID1].replace(name='not_findable', category='problem')
))
with check_mongo_calls(max_find.pop(0), max_send):
self.assertFalse(self.store.has_item(self.fake_location))
# verify that an error is raised when the revision is not valid
with self.assertRaises(UnsupportedRevisionError):
self.store.has_item(self.fake_location, revision=ModuleStoreEnum.RevisionOption.draft_preferred)
# draft queries:
# problem: find draft item, find all items pertinent to inheritance computation, find parent
# non-existent problem: find draft, find published
# split:
# problem: active_versions, structure
# non-existent problem: ditto
@ddt.data(('draft', [3, 2], 0), ('split', [2, 2], 0))
@ddt.unpack
def test_get_item(self, default_ms, max_find, max_send):
self.initdb(default_ms)
self._create_block_hierarchy()
self.assertIsNotNone(self.store.get_item(self.course_locations[self.XML_COURSEID1]))
with check_mongo_calls(max_find.pop(0), max_send):
self.assertIsNotNone(self.store.get_item(self.problem_x1a_1))
# try negative cases
with self.assertRaises(ItemNotFoundError):
self.store.get_item(
self.course_locations[self.XML_COURSEID1].replace(name='not_findable', category='problem')
)
with check_mongo_calls(max_find.pop(0), max_send):
with self.assertRaises(ItemNotFoundError):
self.store.get_item(self.fake_location)
# verify that an error is raised when the revision is not valid
with self.assertRaises(UnsupportedRevisionError):
self.store.get_item(self.fake_location, revision=ModuleStoreEnum.RevisionOption.draft_preferred)
# Draft:
# wildcard query, 6! load pertinent items for inheritance calls, load parents, course root fetch (why)
# Split:
# active_versions (with regex), structure, and spurious active_versions refetch
@ddt.data(('draft', 14, 0), ('split', 3, 0))
@ddt.unpack
def test_get_items(self, default_ms, max_find, max_send):
self.initdb(default_ms)
self._create_block_hierarchy()
course_locn = self.course_locations[self.XML_COURSEID1]
# NOTE: use get_course if you just want the course. get_items is expensive
modules = self.store.get_items(course_locn.course_key, qualifiers={'category': 'course'})
self.assertEqual(len(modules), 1)
self.assertEqual(modules[0].location, course_locn)
course_locn = self.course_locations[self.MONGO_COURSEID]
with check_mongo_calls(max_find, max_send):
modules = self.store.get_items(course_locn.course_key, qualifiers={'category': 'problem'})
self.assertEqual(len(modules), 6)
# verify that an error is raised when the revision is not valid
with self.assertRaises(UnsupportedRevisionError):
self.store.get_items(
self.course_locations[self.MONGO_COURSEID].course_key,
revision=ModuleStoreEnum.RevisionOption.draft_preferred
)
# draft: get draft, get ancestors up to course (2-6), compute inheritance
# sends: update problem and then each ancestor up to course (edit info)
# split: active_versions, definitions (calculator field), structures
# 2 sends to update index & structure (note, it would also be definition if a content field changed)
@ddt.data(('draft', 7, 5), ('split', 3, 2))
@ddt.unpack
def test_update_item(self, default_ms, max_find, max_send):
"""
Update should fail for r/o dbs and succeed for r/w ones
"""
self.initdb(default_ms)
self._create_block_hierarchy()
course = self.store.get_course(self.course_locations[self.XML_COURSEID1].course_key)
# if following raised, then the test is really a noop, change it
self.assertFalse(course.show_calculator, "Default changed making test meaningless")
course.show_calculator = True
with self.assertRaises(NotImplementedError): # ensure it doesn't allow writing
self.store.update_item(course, self.user_id)
# now do it for a r/w db
problem = self.store.get_item(self.problem_x1a_1)
# if following raised, then the test is really a noop, change it
self.assertNotEqual(problem.max_attempts, 2, "Default changed making test meaningless")
problem.max_attempts = 2
with check_mongo_calls(max_find, max_send):
problem = self.store.update_item(problem, self.user_id)
self.assertEqual(problem.max_attempts, 2, "Update didn't persist")
@ddt.data('draft', 'split')
def test_has_changes_direct_only(self, default_ms):
"""
Tests that has_changes() returns false when a new xblock in a direct only category is checked
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create dummy direct only xblocks
chapter = self.store.create_item(
self.user_id,
test_course.id,
'chapter',
block_id='vertical_container'
)
# Check that neither xblock has changes
self.assertFalse(self.store.has_changes(test_course))
self.assertFalse(self.store.has_changes(chapter))
@ddt.data('draft', 'split')
def test_has_changes(self, default_ms):
"""
Tests that has_changes() only returns true when changes are present
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create a dummy component to test against
xblock = self.store.create_item(
self.user_id,
test_course.id,
'vertical',
block_id='test_vertical'
)
# Not yet published, so changes are present
self.assertTrue(self.store.has_changes(xblock))
# Publish and verify that there are no unpublished changes
newXBlock = self.store.publish(xblock.location, self.user_id)
self.assertFalse(self.store.has_changes(newXBlock))
# Change the component, then check that there now are changes
component = self.store.get_item(xblock.location)
component.display_name = 'Changed Display Name'
component = self.store.update_item(component, self.user_id)
self.assertTrue(self.store.has_changes(component))
# Publish and verify again
component = self.store.publish(component.location, self.user_id)
self.assertFalse(self.store.has_changes(component))
@ddt.data('draft', 'split')
def test_unit_stuck_in_draft_mode(self, default_ms):
"""
After revert_to_published() the has_changes() should return false if draft has no changes
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create a dummy component to test against
xblock = self.store.create_item(
self.user_id,
test_course.id,
'vertical',
block_id='test_vertical'
)
# Not yet published, so changes are present
self.assertTrue(self.store.has_changes(xblock))
# Publish and verify that there are no unpublished changes
component = self.store.publish(xblock.location, self.user_id)
self.assertFalse(self.store.has_changes(component))
self.store.revert_to_published(component.location, self.user_id)
component = self.store.get_item(component.location)
self.assertFalse(self.store.has_changes(component))
# Publish and verify again
component = self.store.publish(component.location, self.user_id)
self.assertFalse(self.store.has_changes(component))
@ddt.data('draft', 'split')
def test_unit_stuck_in_published_mode(self, default_ms):
"""
After revert_to_published() the has_changes() should return true if draft has changes
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create a dummy component to test against
xblock = self.store.create_item(
self.user_id,
test_course.id,
'vertical',
block_id='test_vertical'
)
# Not yet published, so changes are present
self.assertTrue(self.store.has_changes(xblock))
# Publish and verify that there are no unpublished changes
component = self.store.publish(xblock.location, self.user_id)
self.assertFalse(self.store.has_changes(component))
# Discard changes and verify that there are no changes
self.store.revert_to_published(component.location, self.user_id)
component = self.store.get_item(component.location)
self.assertFalse(self.store.has_changes(component))
# Change the component, then check that there now are changes
component = self.store.get_item(component.location)
component.display_name = 'Changed Display Name'
self.store.update_item(component, self.user_id)
# Verify that changes are present
self.assertTrue(self.store.has_changes(component))
def setup_has_changes(self, default_ms):
"""
Common set up for has_changes tests below.
Returns a dictionary of useful location maps for testing.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
locations = {
'grandparent': self.chapter_x,
'parent_sibling': self.sequential_x2,
'parent': self.sequential_x1,
'child_sibling': self.vertical_x1b,
'child': self.vertical_x1a,
}
# Publish the vertical units
self.store.publish(locations['parent_sibling'], self.user_id)
self.store.publish(locations['parent'], self.user_id)
return locations
@ddt.data('draft', 'split')
def test_has_changes_ancestors(self, default_ms):
"""
Tests that has_changes() returns true on ancestors when a child is changed
"""
locations = self.setup_has_changes(default_ms)
# Verify that there are no unpublished changes
for key in locations:
self.assertFalse(self._has_changes(locations[key]))
# Change the child
child = self.store.get_item(locations['child'])
child.display_name = 'Changed Display Name'
self.store.update_item(child, self.user_id)
# All ancestors should have changes, but not siblings
self.assertTrue(self._has_changes(locations['grandparent']))
self.assertTrue(self._has_changes(locations['parent']))
self.assertTrue(self._has_changes(locations['child']))
self.assertFalse(self._has_changes(locations['parent_sibling']))
self.assertFalse(self._has_changes(locations['child_sibling']))
# Publish the unit with changes
self.store.publish(locations['parent'], self.user_id)
# Verify that there are no unpublished changes
for key in locations:
self.assertFalse(self._has_changes(locations[key]))
@ddt.data('draft', 'split')
def test_has_changes_publish_ancestors(self, default_ms):
"""
Tests that has_changes() returns false after a child is published only if all children are unchanged
"""
locations = self.setup_has_changes(default_ms)
# Verify that there are no unpublished changes
for key in locations:
self.assertFalse(self._has_changes(locations[key]))
# Change both children
child = self.store.get_item(locations['child'])
child_sibling = self.store.get_item(locations['child_sibling'])
child.display_name = 'Changed Display Name'
child_sibling.display_name = 'Changed Display Name'
self.store.update_item(child, user_id=self.user_id)
self.store.update_item(child_sibling, user_id=self.user_id)
# Verify that ancestors have changes
self.assertTrue(self._has_changes(locations['grandparent']))
self.assertTrue(self._has_changes(locations['parent']))
# Publish one child
self.store.publish(locations['child_sibling'], self.user_id)
# Verify that ancestors still have changes
self.assertTrue(self._has_changes(locations['grandparent']))
self.assertTrue(self._has_changes(locations['parent']))
# Publish the other child
self.store.publish(locations['child'], self.user_id)
# Verify that ancestors now have no changes
self.assertFalse(self._has_changes(locations['grandparent']))
self.assertFalse(self._has_changes(locations['parent']))
@ddt.data('draft', 'split')
def test_has_changes_add_remove_child(self, default_ms):
"""
Tests that has_changes() returns true for the parent when a child with changes is added
and false when that child is removed.
"""
locations = self.setup_has_changes(default_ms)
# Test that the ancestors don't have changes
self.assertFalse(self._has_changes(locations['grandparent']))
self.assertFalse(self._has_changes(locations['parent']))
# Create a new child and attach it to parent
self.store.create_child(
self.user_id,
locations['parent'],
'vertical',
block_id='new_child',
)
# Verify that the ancestors now have changes
self.assertTrue(self._has_changes(locations['grandparent']))
self.assertTrue(self._has_changes(locations['parent']))
# Remove the child from the parent
parent = self.store.get_item(locations['parent'])
parent.children = [locations['child'], locations['child_sibling']]
self.store.update_item(parent, user_id=self.user_id)
# Verify that ancestors now have no changes
self.assertFalse(self._has_changes(locations['grandparent']))
self.assertFalse(self._has_changes(locations['parent']))
@ddt.data('draft', 'split')
def test_has_changes_non_direct_only_children(self, default_ms):
"""
Tests that has_changes() returns true after editing the child of a vertical (both not direct only categories).
"""
self.initdb(default_ms)
parent = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='parent',
)
child = self.store.create_child(
self.user_id,
parent.location,
'html',
block_id='child',
)
self.store.publish(parent.location, self.user_id)
# Verify that there are no changes
self.assertFalse(self._has_changes(parent.location))
self.assertFalse(self._has_changes(child.location))
# Change the child
child.display_name = 'Changed Display Name'
self.store.update_item(child, user_id=self.user_id)
# Verify that both parent and child have changes
self.assertTrue(self._has_changes(parent.location))
self.assertTrue(self._has_changes(child.location))
@ddt.data(*itertools.product(
('draft', 'split'),
(ModuleStoreEnum.Branch.draft_preferred, ModuleStoreEnum.Branch.published_only)
))
@ddt.unpack
def test_has_changes_missing_child(self, default_ms, default_branch):
"""
Tests that has_changes() does not throw an exception when a child doesn't exist.
"""
self.initdb(default_ms)
with self.store.branch_setting(default_branch, self.course.id):
# Create the parent and point it to a fake child
parent = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='parent',
)
parent.children += [self.course.id.make_usage_key('vertical', 'does_not_exist')]
parent = self.store.update_item(parent, self.user_id)
# Check the parent for changes should return True and not throw an exception
self.assertTrue(self.store.has_changes(parent))
# Draft
# Find: find parents (definition.children query), get parent, get course (fill in run?),
# find parents of the parent (course), get inheritance items,
# get item (to delete subtree), get inheritance again.
# Sends: delete item, update parent
# Split
# Find: active_versions, 2 structures (published & draft), definition (unnecessary)
# Sends: updated draft and published structures and active_versions
@ddt.data(('draft', 7, 2), ('split', 4, 3))
@ddt.unpack
def test_delete_item(self, default_ms, max_find, max_send):
"""
Delete should reject on r/o db and work on r/w one
"""
self.initdb(default_ms)
if default_ms == 'draft' and mongo_uses_error_check(self.store):
max_find += 1
# r/o try deleting the chapter (is here to ensure it can't be deleted)
with self.assertRaises(NotImplementedError):
self.store.delete_item(self.xml_chapter_location, self.user_id)
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.writable_chapter_location.course_key):
with check_mongo_calls(max_find, max_send):
self.store.delete_item(self.writable_chapter_location, self.user_id)
# verify it's gone
with self.assertRaises(ItemNotFoundError):
self.store.get_item(self.writable_chapter_location)
# verify it's gone from published too
with self.assertRaises(ItemNotFoundError):
self.store.get_item(self.writable_chapter_location, revision=ModuleStoreEnum.RevisionOption.published_only)
# Draft:
# queries: find parent (definition.children), count versions of item, get parent, count grandparents,
# inheritance items, draft item, draft child, inheritance
# sends: delete draft vertical and update parent
# Split:
# queries: active_versions, draft and published structures, definition (unnecessary)
# sends: update published (why?), draft, and active_versions
@ddt.data(('draft', 9, 2), ('split', 2, 2))
@ddt.unpack
def test_delete_private_vertical(self, default_ms, max_find, max_send):
"""
Because old mongo treated verticals as the first layer which could be draft, it has some interesting
behavioral properties which this deletion test gets at.
"""
self.initdb(default_ms)
if default_ms == 'draft' and mongo_uses_error_check(self.store):
max_find += 1
# create and delete a private vertical with private children
private_vert = self.store.create_child(
# don't use course_location as it may not be the repr
self.user_id, self.course_locations[self.MONGO_COURSEID],
'vertical', block_id='private'
)
private_leaf = self.store.create_child(
# don't use course_location as it may not be the repr
self.user_id, private_vert.location, 'html', block_id='private_leaf'
)
# verify pre delete state (just to verify that the test is valid)
if hasattr(private_vert.location, 'version_guid'):
# change to the HEAD version
vert_loc = private_vert.location.for_version(private_leaf.location.version_guid)
else:
vert_loc = private_vert.location
self.assertTrue(self.store.has_item(vert_loc))
self.assertTrue(self.store.has_item(private_leaf.location))
course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key, 0)
self.assertIn(vert_loc, course.children)
# delete the vertical and ensure the course no longer points to it
with check_mongo_calls(max_find, max_send):
self.store.delete_item(vert_loc, self.user_id)
course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key, 0)
if hasattr(private_vert.location, 'version_guid'):
# change to the HEAD version
vert_loc = private_vert.location.for_version(course.location.version_guid)
leaf_loc = private_leaf.location.for_version(course.location.version_guid)
else:
vert_loc = private_vert.location
leaf_loc = private_leaf.location
self.assertFalse(self.store.has_item(vert_loc))
self.assertFalse(self.store.has_item(leaf_loc))
self.assertNotIn(vert_loc, course.children)
# Draft:
# find: find parent (definition.children) 2x, find draft item, get inheritance items
# send: one delete query for specific item
# Split:
# find: active_version & structure (cached)
# send: update structure and active_versions
@ddt.data(('draft', 4, 1), ('split', 2, 2))
@ddt.unpack
def test_delete_draft_vertical(self, default_ms, max_find, max_send):
"""
Test deleting a draft vertical which has a published version.
"""
self.initdb(default_ms)
# reproduce bug STUD-1965
# create and delete a private vertical with private children
private_vert = self.store.create_child(
# don't use course_location as it may not be the repr
self.user_id, self.course_locations[self.MONGO_COURSEID], 'vertical', block_id='publish'
)
private_leaf = self.store.create_child(
self.user_id, private_vert.location, 'html', block_id='bug_leaf'
)
# verify that an error is raised when the revision is not valid
with self.assertRaises(UnsupportedRevisionError):
self.store.delete_item(
private_leaf.location,
self.user_id,
revision=ModuleStoreEnum.RevisionOption.draft_preferred
)
self.store.publish(private_vert.location, self.user_id)
private_leaf.display_name = 'change me'
private_leaf = self.store.update_item(private_leaf, self.user_id)
# test succeeds if delete succeeds w/o error
if default_ms == 'draft' and mongo_uses_error_check(self.store):
max_find += 1
with check_mongo_calls(max_find, max_send):
self.store.delete_item(private_leaf.location, self.user_id)
# Draft:
# 1) find all courses (wildcard),
# 2) get each course 1 at a time (1 course),
# 3) wildcard split if it has any (1) but it doesn't
# Split:
# 1) wildcard split search,
# 2-4) active_versions, structure, definition (s/b lazy; so, unnecessary)
# 5) wildcard draft mongo which has none
@ddt.data(('draft', 3, 0), ('split', 5, 0))
@ddt.unpack
def test_get_courses(self, default_ms, max_find, max_send):
self.initdb(default_ms)
# we should have 3 total courses across all stores
with check_mongo_calls(max_find, max_send):
courses = self.store.get_courses()
course_ids = [course.location for course in courses]
self.assertEqual(len(courses), 3, "Not 3 courses: {}".format(course_ids))
self.assertIn(self.course_locations[self.MONGO_COURSEID], course_ids)
self.assertIn(self.course_locations[self.XML_COURSEID1], course_ids)
self.assertIn(self.course_locations[self.XML_COURSEID2], course_ids)
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
draft_courses = self.store.get_courses(remove_branch=True)
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
published_courses = self.store.get_courses(remove_branch=True)
self.assertEquals([c.id for c in draft_courses], [c.id for c in published_courses])
@ddt.data('draft', 'split')
def test_create_child_detached_tabs(self, default_ms):
"""
test 'create_child' method with a detached category ('static_tab')
to check that new static tab is not a direct child of the course
"""
self.initdb(default_ms)
mongo_course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key)
self.assertEqual(len(mongo_course.children), 1)
# create a static tab of the course
self.store.create_child(
self.user_id,
self.course.location,
'static_tab'
)
# now check that the course has same number of children
mongo_course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key)
self.assertEqual(len(mongo_course.children), 1)
def test_xml_get_courses(self):
"""
Test that the xml modulestore only loaded the courses from the maps.
"""
self.initdb('draft')
xml_store = self.store._get_modulestore_by_type(ModuleStoreEnum.Type.xml) # pylint: disable=protected-access
courses = xml_store.get_courses()
self.assertEqual(len(courses), 2)
course_ids = [course.id for course in courses]
self.assertIn(self.course_locations[self.XML_COURSEID1].course_key, course_ids)
self.assertIn(self.course_locations[self.XML_COURSEID2].course_key, course_ids)
# this course is in the directory from which we loaded courses but not in the map
self.assertNotIn("edX/toy/TT_2012_Fall", course_ids)
def test_xml_no_write(self):
"""
Test that the xml modulestore doesn't allow write ops.
"""
self.initdb('draft')
xml_store = self.store._get_modulestore_by_type(ModuleStoreEnum.Type.xml) # pylint: disable=protected-access
# the important thing is not which exception it raises but that it raises an exception
with self.assertRaises(AttributeError):
xml_store.create_course("org", "course", "run", self.user_id)
# draft is 2: find out which ms owns course, get item
# split: active_versions, structure, definition (to load course wiki string)
@ddt.data(('draft', 2, 0), ('split', 3, 0))
@ddt.unpack
def test_get_course(self, default_ms, max_find, max_send):
"""
This test is here for the performance comparison not functionality. It tests the performance
of getting an item whose scope.content fields are looked at.
"""
self.initdb(default_ms)
with check_mongo_calls(max_find, max_send):
course = self.store.get_item(self.course_locations[self.MONGO_COURSEID])
self.assertEqual(course.id, self.course_locations[self.MONGO_COURSEID].course_key)
course = self.store.get_item(self.course_locations[self.XML_COURSEID1])
self.assertEqual(course.id, self.course_locations[self.XML_COURSEID1].course_key)
@ddt.data('draft', 'split')
def test_get_library(self, default_ms):
"""
Test that create_library and get_library work regardless of the default modulestore.
Other tests of MixedModulestore support are in test_libraries.py but this one must
be done here so we can test the configuration where Draft/old is the first modulestore.
"""
self.initdb(default_ms)
with self.store.default_store(ModuleStoreEnum.Type.split): # The CMS also wraps create_library like this
library = self.store.create_library("org", "lib", self.user_id, {"display_name": "Test Library"})
library_key = library.location.library_key
self.assertIsInstance(library_key, LibraryLocator)
# Now load with get_library and make sure it works:
library = self.store.get_library(library_key)
self.assertEqual(library.location.library_key, library_key)
# Clear the mappings so we can test get_library code path without mapping set:
self.store.mappings.clear()
library = self.store.get_library(library_key)
self.assertEqual(library.location.library_key, library_key)
# notice this doesn't test getting a public item via draft_preferred which draft would have 2 hits (split
# still only 2)
# Draft: get_parent
# Split: active_versions, structure
@ddt.data(('draft', 1, 0), ('split', 2, 0))
@ddt.unpack
def test_get_parent_locations(self, default_ms, max_find, max_send):
"""
Test a simple get parent for a direct only category (i.e, always published)
"""
self.initdb(default_ms)
self._create_block_hierarchy()
with check_mongo_calls(max_find, max_send):
parent = self.store.get_parent_location(self.problem_x1a_1)
self.assertEqual(parent, self.vertical_x1a)
parent = self.store.get_parent_location(self.xml_chapter_location)
self.assertEqual(parent, self.course_locations[self.XML_COURSEID1])
def verify_get_parent_locations_results(self, expected_results):
"""
Verifies the results of calling get_parent_locations matches expected_results.
"""
for child_location, parent_location, revision in expected_results:
self.assertEqual(
parent_location,
self.store.get_parent_location(child_location, revision=revision)
)
@ddt.data('draft', 'split')
def test_get_parent_locations_moved_child(self, default_ms):
self.initdb(default_ms)
self._create_block_hierarchy()
# publish the course
self.course = self.store.publish(self.course.location, self.user_id)
with self.store.bulk_operations(self.course.id):
# make drafts of verticals
self.store.convert_to_draft(self.vertical_x1a, self.user_id)
self.store.convert_to_draft(self.vertical_y1a, self.user_id)
# move child problem_x1a_1 to vertical_y1a
child_to_move_location = self.problem_x1a_1
new_parent_location = self.vertical_y1a
old_parent_location = self.vertical_x1a
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
old_parent = self.store.get_item(child_to_move_location).get_parent()
self.assertEqual(old_parent_location, old_parent.location)
child_to_move_contextualized = child_to_move_location.map_into_course(old_parent.location.course_key)
old_parent.children.remove(child_to_move_contextualized)
self.store.update_item(old_parent, self.user_id)
new_parent = self.store.get_item(new_parent_location)
new_parent.children.append(child_to_move_location)
self.store.update_item(new_parent, self.user_id)
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
self.assertEqual(new_parent_location, self.store.get_item(child_to_move_location).get_parent().location)
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
self.assertEqual(old_parent_location, self.store.get_item(child_to_move_location).get_parent().location)
old_parent_published_location = old_parent_location.for_branch(ModuleStoreEnum.BranchName.published)
self.verify_get_parent_locations_results([
(child_to_move_location, new_parent_location, None),
(child_to_move_location, new_parent_location, ModuleStoreEnum.RevisionOption.draft_preferred),
(child_to_move_location, old_parent_published_location, ModuleStoreEnum.RevisionOption.published_only),
])
# publish the course again
self.store.publish(self.course.location, self.user_id)
new_parent_published_location = new_parent_location.for_branch(ModuleStoreEnum.BranchName.published)
self.verify_get_parent_locations_results([
(child_to_move_location, new_parent_location, None),
(child_to_move_location, new_parent_location, ModuleStoreEnum.RevisionOption.draft_preferred),
(child_to_move_location, new_parent_published_location, ModuleStoreEnum.RevisionOption.published_only),
])
@ddt.data('draft')
def test_get_parent_locations_deleted_child(self, default_ms):
self.initdb(default_ms)
self._create_block_hierarchy()
# publish the course
self.store.publish(self.course.location, self.user_id)
# make draft of vertical
self.store.convert_to_draft(self.vertical_y1a, self.user_id)
# delete child problem_y1a_1
child_to_delete_location = self.problem_y1a_1
old_parent_location = self.vertical_y1a
self.store.delete_item(child_to_delete_location, self.user_id)
self.verify_get_parent_locations_results([
(child_to_delete_location, old_parent_location, None),
# Note: The following could be an unexpected result, but we want to avoid an extra database call
(child_to_delete_location, old_parent_location, ModuleStoreEnum.RevisionOption.draft_preferred),
(child_to_delete_location, old_parent_location, ModuleStoreEnum.RevisionOption.published_only),
])
# publish the course again
self.store.publish(self.course.location, self.user_id)
self.verify_get_parent_locations_results([
(child_to_delete_location, None, None),
(child_to_delete_location, None, ModuleStoreEnum.RevisionOption.draft_preferred),
(child_to_delete_location, None, ModuleStoreEnum.RevisionOption.published_only),
])
@ddt.data('draft')
def test_get_parent_location_draft(self, default_ms):
"""
Test that "get_parent_location" method returns first published parent
for a draft component, if it has many possible parents (including
draft parents).
"""
self.initdb(default_ms)
course_id = self.course_locations[self.MONGO_COURSEID].course_key
# create parented children
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
mongo_store = self.store._get_modulestore_for_courselike(course_id) # pylint: disable=protected-access
# add another parent (unit) "vertical_x1b" for problem "problem_x1a_1"
mongo_store.collection.update(
self.vertical_x1b.to_deprecated_son('_id.'),
{'$push': {'definition.children': unicode(self.problem_x1a_1)}}
)
# convert first parent (unit) "vertical_x1a" of problem "problem_x1a_1" to draft
self.store.convert_to_draft(self.vertical_x1a, self.user_id)
item = self.store.get_item(self.vertical_x1a)
self.assertTrue(self.store.has_published_version(item))
# now problem "problem_x1a_1" has 3 parents [vertical_x1a (draft),
# vertical_x1a (published), vertical_x1b (published)]
# check that "get_parent_location" method of draft branch returns first
# published parent "vertical_x1a" without raising "AssertionError" for
# problem location revision
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_id):
parent = mongo_store.get_parent_location(self.problem_x1a_1)
self.assertEqual(parent, self.vertical_x1a)
# Draft:
# Problem path:
# 1. Get problem
# 2-6. get parent and rest of ancestors up to course
# 7-8. get sequential, compute inheritance
# 8-9. get vertical, compute inheritance
# 10-11. get other vertical_x1b (why?) and compute inheritance
# Split: active_versions & structure
@ddt.data(('draft', [12, 3], 0), ('split', [2, 2], 0))
@ddt.unpack
def test_path_to_location(self, default_ms, num_finds, num_sends):
"""
Make sure that path_to_location works
"""
self.initdb(default_ms)
course_key = self.course_locations[self.MONGO_COURSEID].course_key
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
self._create_block_hierarchy()
should_work = (
(self.problem_x1a_2,
(course_key, u"Chapter_x", u"Sequential_x1", '1')),
(self.chapter_x,
(course_key, "Chapter_x", None, None)),
)
for location, expected in should_work:
# each iteration has different find count, pop this iter's find count
with check_mongo_calls(num_finds.pop(0), num_sends):
self.assertEqual(path_to_location(self.store, location), expected)
not_found = (
course_key.make_usage_key('video', 'WelcomeX'),
course_key.make_usage_key('course', 'NotHome'),
)
for location in not_found:
with self.assertRaises(ItemNotFoundError):
path_to_location(self.store, location)
# Orphaned items should not be found.
orphan = course_key.make_usage_key('chapter', 'OrphanChapter')
self.store.create_item(
self.user_id,
orphan.course_key,
orphan.block_type,
block_id=orphan.block_id
)
with self.assertRaises(NoPathToItem):
path_to_location(self.store, orphan)
def test_xml_path_to_location(self):
"""
Make sure that path_to_location works: should be passed a modulestore
with the toy and simple courses loaded.
"""
# only needs course_locations set
self.initdb('draft')
course_key = self.course_locations[self.XML_COURSEID1].course_key
should_work = (
(course_key.make_usage_key('video', 'Welcome'),
(course_key, "Overview", "Welcome", None)),
(course_key.make_usage_key('chapter', 'Overview'),
(course_key, "Overview", None, None)),
)
for location, expected in should_work:
self.assertEqual(path_to_location(self.store, location), expected)
not_found = (
course_key.make_usage_key('video', 'WelcomeX'),
course_key.make_usage_key('course', 'NotHome'),
)
for location in not_found:
with self.assertRaises(ItemNotFoundError):
path_to_location(self.store, location)
def test_navigation_index(self):
"""
Make sure that navigation_index correctly parses the various position values that we might get from calls to
path_to_location
"""
self.assertEqual(1, navigation_index("1"))
self.assertEqual(10, navigation_index("10"))
self.assertEqual(None, navigation_index(None))
self.assertEqual(1, navigation_index("1_2"))
self.assertEqual(5, navigation_index("5_2"))
self.assertEqual(7, navigation_index("7_3_5_6_"))
@ddt.data('draft', 'split')
def test_revert_to_published_root_draft(self, default_ms):
"""
Test calling revert_to_published on draft vertical.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
vertical = self.store.get_item(self.vertical_x1a)
vertical_children_num = len(vertical.children)
self.store.publish(self.course.location, self.user_id)
self.assertFalse(self._has_changes(self.vertical_x1a))
# delete leaf problem (will make parent vertical a draft)
self.store.delete_item(self.problem_x1a_1, self.user_id)
self.assertTrue(self._has_changes(self.vertical_x1a))
draft_parent = self.store.get_item(self.vertical_x1a)
self.assertEqual(vertical_children_num - 1, len(draft_parent.children))
published_parent = self.store.get_item(
self.vertical_x1a,
revision=ModuleStoreEnum.RevisionOption.published_only
)
self.assertEqual(vertical_children_num, len(published_parent.children))
self.store.revert_to_published(self.vertical_x1a, self.user_id)
reverted_parent = self.store.get_item(self.vertical_x1a)
self.assertEqual(vertical_children_num, len(published_parent.children))
self.assertBlocksEqualByFields(reverted_parent, published_parent)
self.assertFalse(self._has_changes(self.vertical_x1a))
@ddt.data('draft', 'split')
def test_revert_to_published_root_published(self, default_ms):
"""
Test calling revert_to_published on a published vertical with a draft child.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
problem = self.store.get_item(self.problem_x1a_1)
orig_display_name = problem.display_name
# Change display name of problem and update just it (so parent remains published)
problem.display_name = "updated before calling revert"
self.store.update_item(problem, self.user_id)
self.store.revert_to_published(self.vertical_x1a, self.user_id)
reverted_problem = self.store.get_item(self.problem_x1a_1)
self.assertEqual(orig_display_name, reverted_problem.display_name)
@ddt.data('draft', 'split')
def test_revert_to_published_no_draft(self, default_ms):
"""
Test calling revert_to_published on vertical with no draft content does nothing.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
orig_vertical = self.store.get_item(self.vertical_x1a)
self.store.revert_to_published(self.vertical_x1a, self.user_id)
reverted_vertical = self.store.get_item(self.vertical_x1a)
self.assertBlocksEqualByFields(orig_vertical, reverted_vertical)
@ddt.data('draft', 'split')
def test_revert_to_published_no_published(self, default_ms):
"""
Test calling revert_to_published on vertical with no published version errors.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
with self.assertRaises(InvalidVersionError):
self.store.revert_to_published(self.vertical_x1a, self.user_id)
@ddt.data('draft', 'split')
def test_revert_to_published_direct_only(self, default_ms):
"""
Test calling revert_to_published on a direct-only item is a no-op.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
num_children = len(self.store.get_item(self.sequential_x1).children)
self.store.revert_to_published(self.sequential_x1, self.user_id)
reverted_parent = self.store.get_item(self.sequential_x1)
# It does not discard the child vertical, even though that child is a draft (with no published version)
self.assertEqual(num_children, len(reverted_parent.children))
# Draft: get all items which can be or should have parents
# Split: active_versions, structure
@ddt.data(('draft', 1, 0), ('split', 2, 0))
@ddt.unpack
def test_get_orphans(self, default_ms, max_find, max_send):
"""
Test finding orphans.
"""
self.initdb(default_ms)
course_id = self.course_locations[self.MONGO_COURSEID].course_key
# create parented children
self._create_block_hierarchy()
# orphans
orphan_locations = [
course_id.make_usage_key('chapter', 'OrphanChapter'),
course_id.make_usage_key('vertical', 'OrphanVertical'),
course_id.make_usage_key('problem', 'OrphanProblem'),
course_id.make_usage_key('html', 'OrphanHTML'),
]
# detached items (not considered as orphans)
detached_locations = [
course_id.make_usage_key('static_tab', 'StaticTab'),
course_id.make_usage_key('course_info', 'updates'),
]
for location in (orphan_locations + detached_locations):
self.store.create_item(
self.user_id,
location.course_key,
location.block_type,
block_id=location.block_id
)
with check_mongo_calls(max_find, max_send):
found_orphans = self.store.get_orphans(self.course_locations[self.MONGO_COURSEID].course_key)
self.assertItemsEqual(found_orphans, orphan_locations)
@ddt.data('draft')
def test_get_non_orphan_parents(self, default_ms):
"""
Test finding non orphan parents from many possible parents.
"""
self.initdb(default_ms)
course_id = self.course_locations[self.MONGO_COURSEID].course_key
# create parented children
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
# test that problem "problem_x1a_1" has only one published parent
mongo_store = self.store._get_modulestore_for_courselike(course_id) # pylint: disable=protected-access
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_id):
parent = mongo_store.get_parent_location(self.problem_x1a_1)
self.assertEqual(parent, self.vertical_x1a)
# add some published orphans
orphan_sequential = course_id.make_usage_key('sequential', 'OrphanSequential')
orphan_vertical = course_id.make_usage_key('vertical', 'OrphanVertical')
orphan_locations = [orphan_sequential, orphan_vertical]
for location in orphan_locations:
self.store.create_item(
self.user_id,
location.course_key,
location.block_type,
block_id=location.block_id
)
self.store.publish(location, self.user_id)
found_orphans = mongo_store.get_orphans(course_id)
self.assertEqual(set(found_orphans), set(orphan_locations))
self.assertEqual(len(set(found_orphans)), 2)
# add orphan vertical and sequential as another parents of problem "problem_x1a_1"
mongo_store.collection.update(
orphan_sequential.to_deprecated_son('_id.'),
{'$push': {'definition.children': unicode(self.problem_x1a_1)}}
)
mongo_store.collection.update(
orphan_vertical.to_deprecated_son('_id.'),
{'$push': {'definition.children': unicode(self.problem_x1a_1)}}
)
# test that "get_parent_location" method of published branch still returns the correct non-orphan parent for
# problem "problem_x1a_1" since the two other parents are orphans
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_id):
parent = mongo_store.get_parent_location(self.problem_x1a_1)
self.assertEqual(parent, self.vertical_x1a)
# now add valid published vertical as another parent of problem
mongo_store.collection.update(
self.sequential_x1.to_deprecated_son('_id.'),
{'$push': {'definition.children': unicode(self.problem_x1a_1)}}
)
# now check that "get_parent_location" method of published branch raises "ReferentialIntegrityError" for
# problem "problem_x1a_1" since it has now 2 valid published parents
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_id):
self.assertTrue(self.store.has_item(self.problem_x1a_1))
with self.assertRaises(ReferentialIntegrityError):
self.store.get_parent_location(self.problem_x1a_1)
@ddt.data('draft')
def test_create_item_from_parent_location(self, default_ms):
"""
Test a code path missed by the above: passing an old-style location as parent but no
new location for the child
"""
self.initdb(default_ms)
self.store.create_child(
self.user_id,
self.course_locations[self.MONGO_COURSEID],
'problem',
block_id='orphan'
)
orphans = self.store.get_orphans(self.course_locations[self.MONGO_COURSEID].course_key)
self.assertEqual(len(orphans), 0, "unexpected orphans: {}".format(orphans))
@ddt.data('draft', 'split')
def test_create_item_populates_edited_info(self, default_ms):
self.initdb(default_ms)
block = self.store.create_item(
self.user_id,
self.course.location.course_key,
'problem'
)
self.assertEqual(self.user_id, block.edited_by)
self.assertGreater(datetime.datetime.now(UTC), block.edited_on)
@ddt.data('draft', 'split')
def test_create_item_populates_subtree_edited_info(self, default_ms):
self.initdb(default_ms)
block = self.store.create_item(
self.user_id,
self.course.location.course_key,
'problem'
)
self.assertEqual(self.user_id, block.subtree_edited_by)
self.assertGreater(datetime.datetime.now(UTC), block.subtree_edited_on)
# Draft: wildcard search of draft and split
# Split: wildcard search of draft and split
@ddt.data(('draft', 2, 0), ('split', 2, 0))
@ddt.unpack
def test_get_courses_for_wiki(self, default_ms, max_find, max_send):
"""
Test the get_courses_for_wiki method
"""
self.initdb(default_ms)
# Test XML wikis
wiki_courses = self.store.get_courses_for_wiki('toy')
self.assertEqual(len(wiki_courses), 1)
self.assertIn(self.course_locations[self.XML_COURSEID1].course_key, wiki_courses)
wiki_courses = self.store.get_courses_for_wiki('simple')
self.assertEqual(len(wiki_courses), 1)
self.assertIn(self.course_locations[self.XML_COURSEID2].course_key, wiki_courses)
# Test Mongo wiki
with check_mongo_calls(max_find, max_send):
wiki_courses = self.store.get_courses_for_wiki('999')
self.assertEqual(len(wiki_courses), 1)
self.assertIn(
self.course_locations[self.MONGO_COURSEID].course_key.replace(branch=None), # Branch agnostic
wiki_courses
)
self.assertEqual(len(self.store.get_courses_for_wiki('edX.simple.2012_Fall')), 0)
self.assertEqual(len(self.store.get_courses_for_wiki('no_such_wiki')), 0)
# Draft:
# Find: find vertical, find children
# Sends:
# 1. delete all of the published nodes in subtree
# 2. insert vertical as published (deleted in step 1) w/ the deleted problems as children
# 3-6. insert the 3 problems and 1 html as published
# Split: active_versions, 2 structures (pre & post published?)
# Sends:
# - insert structure
# - write index entry
@ddt.data(('draft', 2, 6), ('split', 3, 2))
@ddt.unpack
def test_unpublish(self, default_ms, max_find, max_send):
"""
Test calling unpublish
"""
self.initdb(default_ms)
if default_ms == 'draft' and mongo_uses_error_check(self.store):
max_find += 1
self._create_block_hierarchy()
# publish
self.store.publish(self.course.location, self.user_id)
published_xblock = self.store.get_item(
self.vertical_x1a,
revision=ModuleStoreEnum.RevisionOption.published_only
)
self.assertIsNotNone(published_xblock)
# unpublish
with check_mongo_calls(max_find, max_send):
self.store.unpublish(self.vertical_x1a, self.user_id)
with self.assertRaises(ItemNotFoundError):
self.store.get_item(
self.vertical_x1a,
revision=ModuleStoreEnum.RevisionOption.published_only
)
# make sure draft version still exists
draft_xblock = self.store.get_item(
self.vertical_x1a,
revision=ModuleStoreEnum.RevisionOption.draft_only
)
self.assertIsNotNone(draft_xblock)
# Draft: specific query for revision None
# Split: active_versions, structure
@ddt.data(('draft', 1, 0), ('split', 2, 0))
@ddt.unpack
def test_has_published_version(self, default_ms, max_find, max_send):
"""
Test the has_published_version method
"""
self.initdb(default_ms)
self._create_block_hierarchy()
# start off as Private
item = self.store.create_child(self.user_id, self.writable_chapter_location, 'problem', 'test_compute_publish_state')
item_location = item.location
with check_mongo_calls(max_find, max_send):
self.assertFalse(self.store.has_published_version(item))
# Private -> Public
self.store.publish(item_location, self.user_id)
item = self.store.get_item(item_location)
self.assertTrue(self.store.has_published_version(item))
# Public -> Private
self.store.unpublish(item_location, self.user_id)
item = self.store.get_item(item_location)
self.assertFalse(self.store.has_published_version(item))
# Private -> Public
self.store.publish(item_location, self.user_id)
item = self.store.get_item(item_location)
self.assertTrue(self.store.has_published_version(item))
# Public -> Draft with NO changes
self.store.convert_to_draft(item_location, self.user_id)
item = self.store.get_item(item_location)
self.assertTrue(self.store.has_published_version(item))
# Draft WITH changes
item.display_name = 'new name'
item = self.store.update_item(item, self.user_id)
self.assertTrue(self.store.has_changes(item))
self.assertTrue(self.store.has_published_version(item))
@ddt.data('draft', 'split')
def test_update_edit_info_ancestors(self, default_ms):
"""
Tests that edited_on, edited_by, subtree_edited_on, and subtree_edited_by are set correctly during update
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
def check_node(location_key, after, before, edited_by, subtree_after, subtree_before, subtree_by):
"""
Checks that the node given by location_key matches the given edit_info constraints.
"""
node = self.store.get_item(location_key)
if after:
self.assertLess(after, node.edited_on)
self.assertLess(node.edited_on, before)
self.assertEqual(node.edited_by, edited_by)
if subtree_after:
self.assertLess(subtree_after, node.subtree_edited_on)
self.assertLess(node.subtree_edited_on, subtree_before)
self.assertEqual(node.subtree_edited_by, subtree_by)
with self.store.bulk_operations(test_course.id):
# Create a dummy vertical & html to test against
component = self.store.create_child(
self.user_id,
test_course.location,
'vertical',
block_id='test_vertical'
)
child = self.store.create_child(
self.user_id,
component.location,
'html',
block_id='test_html'
)
sibling = self.store.create_child(
self.user_id,
component.location,
'html',
block_id='test_html_no_change'
)
after_create = datetime.datetime.now(UTC)
# Verify that all nodes were last edited in the past by create_user
for block in [component, child, sibling]:
check_node(block.location, None, after_create, self.user_id, None, after_create, self.user_id)
# Change the component, then check that there now are changes
component.display_name = 'Changed Display Name'
editing_user = self.user_id - 2
with self.store.bulk_operations(test_course.id): # TNL-764 bulk ops disabled ancestor updates
|
[
" component = self.store.update_item(component, editing_user)"
] | 4,935
|
lcc
|
python
| null |
c83f71189a57f9b8fbf6439c1c211939a9c292a33024b337
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.