repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
StepProgrammer/CommunityServer | web/studio/ASC.Web.Studio/Products/Projects/Controls/Common/CommonList.ascx.designer.cs | 1936 | /*
*
* (c) Copyright Ascensio System Limited 2010-2015
*
* This program is freeware. You can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) version 3 as published by the Free Software Foundation (https://www.gnu.org/copyleft/gpl.html).
* In accordance with Section 7(a) of the GNU GPL its Section 15 shall be amended to the effect that
* Ascensio System SIA expressly excludes the warranty of non-infringement of any third-party rights.
*
* THIS PROGRAM IS DISTRIBUTED WITHOUT ANY WARRANTY; WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR
* FITNESS FOR A PARTICULAR PURPOSE. For more details, see GNU GPL at https://www.gnu.org/copyleft/gpl.html
*
* You can contact Ascensio System SIA by email at sales@onlyoffice.com
*
* The interactive user interfaces in modified source and object code versions of ONLYOFFICE must display
* Appropriate Legal Notices, as required under Section 5 of the GNU GPL version 3.
*
* Pursuant to Section 7 § 3(b) of the GNU GPL you must retain the original ONLYOFFICE logo which contains
* relevant author attributions when distributing the software. If the display of the logo in its graphic
* form is not reasonably feasible for technical reasons, you must include the words "Powered by ONLYOFFICE"
* in every copy of the program you distribute.
* Pursuant to Section 7 § 3(e) we decline to grant you any rights under trademark law for use of our trademarks.
*
*/
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace ASC.Web.Projects.Controls.Common
{
public partial class CommonList
{
}
}
| agpl-3.0 |
ael-code/libreant | users/test/test_capability.py | 6105 | from . import TestBaseClass
from nose.tools import eq_
from users import User, Group, GroupToCapability, Capability, Action
from peewee import IntegrityError
class TestCapability(TestBaseClass):
def populate(self):
with self.udb.atomic():
cap1 = Capability.create(domain='res1', action=Action.READ)
cap2 = Capability.create(domain='res2', action=Action.UPDATE)
grp1 = Group.create(name='grp2')
grp2 = Group.create(name='grp1')
usr = User.create(name='usr')
grp1.capabilities.add(cap1)
grp2.capabilities.add(cap2)
usr.groups.add([grp1, grp2])
return usr, [cap1, cap2]
def test_capability_creation(self):
Capability.create(domain='res', action=Action.CREATE)
eq_(Capability.select().count(), 1)
def test_assign_capability_to_group(self):
cap = Capability.create(domain='res', action=Action.DELETE)
anons = Group.create(name='anons')
anons.capabilities.add(cap)
anons.save()
eq_(anons.capabilities.count(), 1)
eq_(anons.capabilities.get(), cap)
def test_assign_same_capability_to_group(self):
cap = Capability.create(domain='res', action=Action.DELETE)
anons = Group.create(name='anons')
anons.capabilities.add(cap)
anons.save()
with self.assertRaises(IntegrityError):
anons.capabilities.add(cap)
anons.save()
eq_(anons.capabilities.count(), 1)
eq_(anons.capabilities.get(), cap)
def test_get_capabilities_from_user(self):
usr, caps = self.populate()
userCapIds = [c.id for c in usr.capabilities]
eq_(len(userCapIds), 2)
eq_(set(userCapIds), set([c.id for c in caps]))
def test_remove_capabilities(self):
usr, caps = self.populate()
Capability.delete().where(Capability.domain == caps[0].domain,
Capability.action == caps[0].action).execute()
eq_(usr.capabilities.count(), 1)
eq_(usr.capabilities.get(), caps[1])
eq_(GroupToCapability.select().count(), 1)
def test_action_creation(self):
Action.from_list(Action.ACTIONS)
with self.assertRaises(ValueError):
Action(Action.from_list(Action.ACTIONS) + 1)
def test_action_matching(self):
cap = Capability.create(domain='s',
action=(Action.CREATE | Action.READ | Action.UPDATE))
self.assertTrue(cap.match_action(Action.UPDATE))
self.assertTrue(cap.match_action(Action.READ | Action.READ))
self.assertFalse(cap.match_action(Action.DELETE))
self.assertFalse(cap.match_action(Action.READ | Action.DELETE))
self.assertFalse(cap.match_action(123123))
def test_domain_matching_true(self):
res = Capability.simToReg('volumes/*/attachments/*')
cap = Capability.create(domain=res, action='21')
self.assertTrue(cap.match_domain('volumes/j12j3213j/attachments/z7s71kj23'))
self.assertTrue(cap.match_domain('/volumes/123nj12j3k/attachments/kj321k'))
self.assertTrue(cap.match_domain('volumes/123nj12j3k/attachments/kj321k/'))
def test_domain_matching_false(self):
res = Capability.simToReg('volumes/*/attachments/*')
cap = Capability.create(domain=res, action='21')
self.assertFalse(cap.match_domain('volumes//attachments/z7s71kj23'))
self.assertFalse(cap.match_domain('volumes/123123'))
self.assertFalse(cap.match_domain('volumes/123123/attachments'))
self.assertFalse(cap.match_domain('volumes/attachments/z7s71kj23'))
self.assertFalse(cap.match_domain('volumes/j12j3213j/attachments'))
self.assertFalse(cap.match_domain('volumes/j12j3213j/attachments/123123/name'))
self.assertFalse(cap.match_domain('nothere/volumes/j12j3213j/attachments/123123/name'))
def test_capability_matching(self):
res = Capability.simToReg('/volumes/*/attachemnts/*')
cap = Capability.create(domain=res, action=Action.READ)
cap.match('volumes/1/attachments/3', Action.READ)
def test_simplified_to_reg_conversion(self):
self.assertEqual(Capability.regToSim(Capability.simToReg('/volumes/*/attachments')), 'volumes/*/attachments')
self.assertEqual(Capability.regToSim(Capability.simToReg('volumes/*/attachments/')), 'volumes/*/attachments')
self.assertEqual(Capability.regToSim(Capability.simToReg('/*/')), '*')
def test_user_can(self):
cap1 = Capability.create(domain=Capability.simToReg('volumes/*'),
action=Action.CREATE | Action.READ)
cap2 = Capability.create(domain=Capability.simToReg('volumes/123'),
action=Action.UPDATE)
grp1 = Group.create(name='grp2')
grp2 = Group.create(name='grp1')
usr = User.create(name='usr')
grp1.capabilities.add(cap1)
grp2.capabilities.add(cap2)
usr.groups.add([grp1, grp2])
self.assertTrue(usr.can('volumes/61273', action=Action.CREATE))
self.assertTrue(usr.can('volumes/123', Action.CREATE | Action.READ))
self.assertFalse(usr.can('volumes/82828', Action.DELETE))
self.assertFalse(usr.can('volumes/123', Action.DELETE))
def test_group_can(self):
cap1 = Capability.create(domain=Capability.simToReg('volumes/*'),
action=Action.CREATE | Action.READ)
cap2 = Capability.create(domain=Capability.simToReg('users/123'),
action=Action.CREATE | Action.DELETE)
grp = Group.create(name='grp2')
grp.capabilities.add([cap1, cap2])
self.assertTrue(grp.can('volumes/123', Action.CREATE | Action.READ))
self.assertFalse(grp.can('volumes/82828', Action.DELETE))
self.assertTrue(grp.can('users/123', Action.DELETE))
self.assertFalse(grp.can('users/123', Action.UPDATE))
def test_bitmask(self):
self.assertEqual(Action.from_list(Action.ACTIONS).to_list(), Action.ACTIONS)
| agpl-3.0 |
osgcc/ryzom | ryzom/tools/leveldesign/mission_compiler_lib/main.cpp | 8334 | // Ryzom - MMORPG Framework <http://dev.ryzom.com/projects/ryzom/>
// Copyright (C) 2010 Winch Gate Property Limited
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#include "nel/misc/i18n.h"
#include "mission_compiler.h"
#include "nel/misc/config_file.h"
using namespace std;
using namespace NLMISC;
using namespace NLLIGO;
class CMissionData;
class IStep;
int main(int argc, char *argv[])
{
new NLMISC::CApplicationContext;
CPath::addSearchPath("L:\\primitives\\", true, false);
bool test = false;
if (argc == 4 && string(argv[3]) == "-test")
{
test = true;
}
else if ( argc != 3)
{
printf("%s <world_edit_class> <primitive_file> [-test]", argv[0]);
return -1;
}
string sourceDocName;
if (!test)
sourceDocName = argv[2];
else
sourceDocName = "test_compilateur.primitive";
// remove the path
sourceDocName = CFile::getFilename(sourceDocName);
// init ligo
NLLIGO::CLigoConfig LigoConfig;
CPrimitiveContext::instance().CurrentLigoConfig = &LigoConfig;
nlinfo("Reading ligo configuration file...");
if (!LigoConfig.readPrimitiveClass (argv[1], false))
{
nlwarning("Can't read '%s' !", argv[1]);
return -1;
}
NLLIGO::Register();
nlinfo("Reading primitive file...");
CPrimitives primDoc;
CPrimitiveContext::instance().CurrentPrimitive = &primDoc;
loadXmlPrimitiveFile(primDoc, sourceDocName, LigoConfig);
CMissionCompiler mc;
if (test)
{
nlinfo("Compiling test mission");
try
{
mc.compileMissions(primDoc.RootNode, sourceDocName);
TMissionDataPtr testMission = mc.getMission(0);
CSString script = testMission->generateMissionScript(sourceDocName);
script += "======================================================"+NL;
script += testMission->generatePhraseFile();
script += "======================================================"+NL;
script += testMission->generateDotScript();
script = script.replace(NL.c_str(), "\n");
char *tmp = ::getenv("TEMP");
FILE *fp = ::fopen((string(tmp)+"/compiled_mission.script").c_str(), "w");
::fwrite(script.data(), script.size(), 1, fp);
::fclose(fp);
system((string("\"C:\\Program Files\\Beyond Compare 2\\bc2.exe\" ")+string(tmp)+"/compiled_mission.script test_compilateur.script").c_str());
}
catch(const EParseException &e)
{
nlwarning(e.Why.c_str());
return -1;
}
return 0;
}
nlinfo("Compiling missions...");
try
{
mc.compileMissions(primDoc.RootNode, sourceDocName);
mc.installCompiledMission(LigoConfig, sourceDocName);
/* std::vector <TMissionDataPtr> &missions = mc.getMissions();
// generate the mission script into the npcs...
{
map<string, TLoadedPrimitive > loadedPrimitives;
// First loop to remove any mission that belong to the compiled primitive file
for (uint i=0; i<missions.size(); ++i)
{
CMissionData &mission = *(missions[i]);
// first, look for the primitive file to load
string fileName = mission.getGiverPrimitive();
if (fileName.empty())
{
// use mission primitive instead
fileName = sourceDocName;
}
if (loadedPrimitives.find(fileName) == loadedPrimitives.end())
{
string fullFileName = CPath::lookup(fileName);
if (fullFileName.empty())
{
nlwarning("Can't find primitive file '%s' in path", fileName.c_str());
throw EParseException(NULL, "Destination primitive file not found");
}
// we need to load this primitive file.
CPrimitives *primDoc = new CPrimitives;
if (loadXmlPrimitiveFile(*primDoc, fullFileName, LigoConfig))
{
// the primitive file is loaded correctly
loadedPrimitives.insert(make_pair(fileName, TLoadedPrimitive(primDoc, fullFileName)));
}
else
throw EParseException(NULL, "Can't read primitive file");
}
TLoadedPrimitive &loadedPrim = loadedPrimitives[fileName];
CPrimitives *primDoc = loadedPrim.PrimDoc;
TPrimitiveSet scripts;
CPrimitiveSet<TPrimitiveClassPredicate> filter;
filter.buildSet(primDoc->RootNode, TPrimitiveClassPredicate("mission"), scripts);
// for each script, check if it was generated, and if so, check the name
// of the source primitive file.
for (uint i=0; i<scripts.size(); ++i)
{
vector<string> *script;
if (scripts[i]->getPropertyByName("script", script) && !script->empty())
{
// Format should be : #compiled from <source_primitive_name>
if (script->front().find("compiled from"))
{
// we have a compiled mission
if (script->front().find(sourceDocName))
{
// ok, this mission is compiled from the same primitive, remove it
scripts[i]->getParent()->removeChild(scripts[i]);
}
}
}
}
}
// second loop to assign compiled mission to giver npc
for (uint i=0; i<missions.size(); ++i)
{
CMissionData &mission = *(missions[i]);
string fileName = mission.getGiverPrimitive();
if (fileName.empty())
{
// no giver primitive file specified in the mission, use the mission primitive instead
fileName = sourceDocName;
}
TLoadedPrimitive &loadedPrim = loadedPrimitives[fileName];
CPrimitives *primDoc = loadedPrim.PrimDoc;
TPrimitiveSet bots;
CPrimitiveSet<TPrimitiveClassAndNamePredicate> filter;
filter.buildSet(primDoc->RootNode, TPrimitiveClassAndNamePredicate("npc_bot", mission.getGiverName()), bots);
if (bots.empty())
{
nlwarning("Can't find bot '%s' in primitive '%s' !",
mission.getGiverName().c_str(),
fileName.c_str());
throw EParseException(NULL, "Can't find giver in primitive");
}
else if (bots.size() > 1)
{
nlwarning("Found more than one bot named '%s' in primitive '%s' !",
mission.getGiverName().c_str(),
fileName.c_str());
throw EParseException(NULL, "More than one bot with giver name in primitive");
}
// ok, all is good, we can add the mission node to the giver
IPrimitive *giver = bots.front();
// create a new node for the mission
IPrimitive *script = new CPrimNode;
// set the class
script->addPropertyByName("class", new CPropertyString("mission"));
// set the name
script->addPropertyByName("name", new CPropertyString(mission.getMissionName()));
// string alias(toString("%u", makeHash32(mission.getMissionName())));
script->addPropertyByName("alias", new CPropertyString(mission.getAlias()));
string scriptLines = mission.generateMissionScript();
vector<string> lines;
explode(scriptLines, NL, lines, false);
script->addPropertyByName("script", new CPropertyStringArray(lines));
// insert the script into the giver
giver->insertChild(script);
}
// Save the modified primitive files
while (!loadedPrimitives.empty())
{
TLoadedPrimitive &loadedPrim = loadedPrimitives.begin()->second;
saveXmlPrimitiveFile(*(loadedPrim.PrimDoc), loadedPrim.FullFileName);
// Free the memory
delete loadedPrim.PrimDoc;
loadedPrimitives.erase(loadedPrimitives.begin());
}
}
// generate the phrase file (in any)
{
string phraseFileName = CFile::getFilenameWithoutExtension(sourceDocName) + "_en.txt";
CSString content;
for (uint i=0; i<missions.size(); ++i)
{
content += missions[i]->generatePhraseFile();
}
// transform NL (\n\r) into single \n
content = content.replace(NL.c_str(), "\n");
ucstring ucs;
ucs.fromUtf8(content);
CI18N::writeTextFile(phraseFileName, ucs, true);
}
*/
}
catch (const EParseException &e)
{
CPrimitiveContext::instance().CurrentLigoConfig = NULL;
nlerror("Compilation error : '%s'", e.Why.c_str());
}
CPrimitiveContext::instance().CurrentLigoConfig = NULL;
}
| agpl-3.0 |
Khamull/CommunityServer | module/ASC.SignalR.Base/Hubs/Chat/Chat.cs | 23062 | /*
*
* (c) Copyright Ascensio System Limited 2010-2015
*
* This program is freeware. You can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) version 3 as published by the Free Software Foundation (https://www.gnu.org/copyleft/gpl.html).
* In accordance with Section 7(a) of the GNU GPL its Section 15 shall be amended to the effect that
* Ascensio System SIA expressly excludes the warranty of non-infringement of any third-party rights.
*
* THIS PROGRAM IS DISTRIBUTED WITHOUT ANY WARRANTY; WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR
* FITNESS FOR A PARTICULAR PURPOSE. For more details, see GNU GPL at https://www.gnu.org/copyleft/gpl.html
*
* You can contact Ascensio System SIA by email at sales@onlyoffice.com
*
* The interactive user interfaces in modified source and object code versions of ONLYOFFICE must display
* Appropriate Legal Notices, as required under Section 5 of the GNU GPL version 3.
*
* Pursuant to Section 7 § 3(b) of the GNU GPL you must retain the original ONLYOFFICE logo which contains
* relevant author attributions when distributing the software. If the display of the logo in its graphic
* form is not reasonably feasible for technical reasons, you must include the words "Powered by ONLYOFFICE"
* in every copy of the program you distribute.
* Pursuant to Section 7 § 3(e) we decline to grant you any rights under trademark law for use of our trademarks.
*
*/
using ASC.Common.Security.Authentication;
using ASC.Core;
using ASC.Core.Common.Notify.Jabber;
using ASC.Core.Tenants;
using ASC.Core.Users;
using log4net;
using Microsoft.AspNet.SignalR;
using Microsoft.AspNet.SignalR.Hubs;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Security;
using System.Security.Principal;
using System.Threading.Tasks;
namespace ASC.SignalR.Base.Hubs.Chat
{
[AuthorizeHub]
[HubName("c")]
public class Chat : Hub
{
public readonly static ConnectionMapping Connections = new ConnectionMapping();
private readonly static JabberServiceClient jabberServiceClient = new JabberServiceClient();
private readonly static ILog log = LogManager.GetLogger(typeof(Chat));
private volatile static int allConnectionsCount;
private const string websockets = "webSockets";
private const string transport = "transport";
private const byte userOnline = 1;
public const byte UserOffline = 4;
public const byte TraceError = 0;
public const byte TraceDebug = 1;
// общие методы
public override Task OnDisconnected(bool stopCalled)
{
try
{
var user = Context.Request.Environment["server.User"] as GenericPrincipal;
if (user == null)
{
AuthorizeHubAttribute.Authorize(Context.Request);
}
DisconnectUser();
}
catch
{
}
return base.OnDisconnected(stopCalled);
}
// Method for JS-clients
[HubMethodName("s")]
public void Send(string calleeUserName, string messageText)
{
try
{
var user = (IUserAccount)Context.User.Identity;
CoreContext.TenantManager.SetCurrentTenant(user.Tenant);
var currentUser = CoreContext.UserManager.GetUsers(user.ID);
if (calleeUserName != string.Empty && CoreContext.UserManager.GetUserByUserName(calleeUserName).Equals(Core.Users.Constants.LostUser))
{
TraceMessage(TraceError, String.Format("Can't get UserInfo by calleeUserName={0}, TenantId={1}.", calleeUserName, currentUser.Tenant));
throw new HubException();
}
TraceMessage(TraceDebug, String.Format("Send: calleeUserName={0}, messageText={1}", calleeUserName, messageText));
var callerUserName = currentUser.UserName.ToLowerInvariant();
var message = new MessageClass
{
UserName = callerUserName,
Text = messageText
};
if (calleeUserName != string.Empty)
{
// send
Clients.Group(currentUser.Tenant + calleeUserName).s(message, calleeUserName);
// send
Clients.OthersInGroup(currentUser.Tenant + callerUserName).s(message, calleeUserName);
}
jabberServiceClient.SendMessage(currentUser.Tenant, callerUserName, calleeUserName, messageText);
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on sending message to Jabber service. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
}
[HubMethodName("gs")]
public void GetStates()
{
try
{
var user = (IUserAccount)Context.User.Identity;
CoreContext.TenantManager.SetCurrentTenant(user.Tenant);
var currentUserInfo = CoreContext.UserManager.GetUsers(user.ID);
var currentUserName = currentUserInfo.UserName.ToLowerInvariant();
TraceMessage(TraceDebug, String.Format("Get States currentUserName={0}", currentUserName));
// statesRetrieved
Clients.Caller.sr(jabberServiceClient.GetAllStates(user.Tenant, currentUserName));
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on GetStates to Jabber service. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
}
[HubMethodName("gci")]
public Tuple<string, byte> GetContactInfo(string userName)
{
try
{
var u = (IUserAccount)Context.User.Identity;
CoreContext.TenantManager.SetCurrentTenant(u.Tenant);
var user = CoreContext.UserManager.GetUserByUserName(userName);
TraceMessage(TraceDebug, String.Format("Get Contact Info userName={0}", userName));
if (user.Equals(Core.Users.Constants.LostUser))
{
TraceMessage(TraceError, String.Format("Can't getUserInfo by userName={0}, TenantId={1}.",
userName, CoreContext.TenantManager.GetCurrentTenant().TenantId));
throw new HubException();
}
return Tuple.Create(user.DisplayUserName(), jabberServiceClient.GetState(user.Tenant, userName));
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on GetContactInfo to Jabber service. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
return null;
}
[HubMethodName("gid")]
public void GetInitData()
{
try
{
var user = (IUserAccount)Context.User.Identity;
CoreContext.TenantManager.SetCurrentTenant(user.Tenant);
var currentUserInfo = CoreContext.UserManager.GetUsers(user.ID);
TraceMessage(TraceDebug, String.Format("Get Init Data userName={0}", currentUserInfo.UserName));
// initDataRetrieved
Clients.Caller.idr(currentUserInfo.UserName.ToLowerInvariant(), currentUserInfo.DisplayUserName(),
GetUsers(jabberServiceClient.GetAllStates(currentUserInfo.Tenant, currentUserInfo.UserName.ToLowerInvariant())),
currentUserInfo.Tenant, CoreContext.TenantManager.GetCurrentTenant().GetTenantDomain());
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on GetInitData to Jabber service. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
}
[HubMethodName("st")]
public void SendTyping(string calleeUserName)
{
try
{
var user = (IUserAccount)Context.User.Identity;
CoreContext.TenantManager.SetCurrentTenant(user.Tenant);
var currentUser = CoreContext.UserManager.GetUsers(user.ID);
if (CoreContext.UserManager.GetUserByUserName(calleeUserName).Equals(Core.Users.Constants.LostUser))
{
TraceMessage(TraceError, String.Format("Can't getUserInfo by calleeUserName = {0}, TenantId = {1}.",
calleeUserName, currentUser.Tenant));
throw new HubException();
}
// sendTypingSignal
Clients.Group(currentUser.Tenant + calleeUserName).sts(currentUser.UserName.ToLowerInvariant());
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on sending typing to Jabber service. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
}
[HubMethodName("sstt")]
public void SendStateToTenant(byte state)
{
try
{
var user = (IUserAccount)Context.User.Identity;
CoreContext.TenantManager.SetCurrentTenant(user.Tenant);
var currentUser = CoreContext.UserManager.GetUsers(user.ID);
var userName = currentUser.UserName.ToLowerInvariant();
TraceMessage(TraceDebug, String.Format("Send State To Tenant userName={0}, state={1}", userName, state));
state = jabberServiceClient.SendState(currentUser.Tenant, userName, state);
// setState
Clients.OthersInGroup(currentUser.Tenant.ToString(CultureInfo.InvariantCulture)).ss(userName, state, false);
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on SendStateToTenant to Jabber. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
}
[HubMethodName("grm")]
public MessageClass[] GetRecentMessages(string calleeUserName, int id)
{
MessageClass[] recentMessages = null;
try
{
var user = (IUserAccount)Context.User.Identity;
CoreContext.TenantManager.SetCurrentTenant(user.Tenant);
var currentUser = CoreContext.UserManager.GetUsers(user.ID);
var calleeUser = CoreContext.UserManager.GetUserByUserName(calleeUserName);
if (calleeUserName != string.Empty && calleeUser.Equals(Core.Users.Constants.LostUser))
{
TraceMessage(TraceError, String.Format("Can't getUserInfo by calleeUserName = {0}, TenantId = {1}.", calleeUserName, currentUser.Tenant));
throw new HubException();
}
var callerUserName = currentUser.UserName.ToLowerInvariant();
TraceMessage(TraceDebug, String.Format("Get Recent Messages calleeUserName={0}, callerUserName={1}, id={2}", calleeUserName, callerUserName, id));
recentMessages = jabberServiceClient.GetRecentMessages(currentUser.Tenant,
callerUserName, calleeUserName == string.Empty ? null : calleeUserName, id);
if (recentMessages != null)
{
for (var i = 0; i < recentMessages.Length; i++)
{
recentMessages[i].DateTime = TenantUtil.DateTimeFromUtc(recentMessages[i].DateTime.AddMilliseconds(1));
if (recentMessages[i].UserName == null ||
String.Equals(recentMessages[i].UserName, calleeUserName, StringComparison.InvariantCultureIgnoreCase))
{
recentMessages[i].UserName = calleeUserName;
}
else
{
recentMessages[i].UserName = callerUserName;
}
}
}
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on receiving recent messages from Jabber service. {0}, {1}, {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
return recentMessages;
}
[HubMethodName("p")]
public void Ping(byte state)
{
try
{
var user = (IUserAccount)Context.User.Identity;
CoreContext.TenantManager.SetCurrentTenant(user.Tenant);
var userInfo = CoreContext.UserManager.GetUsers(user.ID);
TraceMessage(TraceDebug, String.Format("Ping from JS client: {0}", userInfo.ID));
jabberServiceClient.Ping(userInfo.ID.ToString(), userInfo.Tenant, userInfo.UserName, state);
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on Ping to Jabber. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
}
}
[HubMethodName("cu")]
public void ConnectUser(string stateNumber)
{
try
{
var user = Context.Request.Environment["server.User"] as GenericPrincipal;
if (user != null)
{
var userAccount = user.Identity as IUserAccount;
if (userAccount == null)
{
TraceMessage(TraceError, "Unknown user tries to connect to SignalR hub.");
throw new SecurityException();
}
CoreContext.TenantManager.SetCurrentTenant(userAccount.Tenant);
byte state;
try
{
state = Convert.ToByte(stateNumber);
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Possible wrong state on connecting, state = {0}. {1}", stateNumber, e));
state = userOnline;
}
var currentUser = CoreContext.UserManager.GetUsers(userAccount.ID);
if (!currentUser.Equals(Core.Users.Constants.LostUser))
{
var currentUserName = currentUser.UserName.ToLowerInvariant();
Groups.Add(Context.ConnectionId, currentUser.Tenant + currentUserName);
Groups.Add(Context.ConnectionId, currentUser.Tenant.ToString(CultureInfo.InvariantCulture));
var connectionsCount = Connections.Add(currentUser.Tenant, currentUserName, Context.ConnectionId);
TraceMessage(TraceDebug, String.Format("Add Connection. {0}. Count: {1}", currentUserName, ++allConnectionsCount));
if (connectionsCount == 1)
{
state = jabberServiceClient.AddXmppConnection(currentUser.ID.ToString(), currentUserName, state, currentUser.Tenant);
}
else
{
state = jabberServiceClient.SendState(currentUser.Tenant, currentUserName, state);
if (state != UserOffline)
{
// setStatus
Clients.OthersInGroup(currentUser.Tenant + currentUserName).sst(state);
}
}
// setState
Clients.OthersInGroup(currentUser.Tenant.ToString(CultureInfo.InvariantCulture)).ss(currentUserName, state, false);
}
else
{
TraceMessage(TraceError, "Unknown user tries to connect.");
throw new SecurityException("Unknown user tries to connect.");
}
}
else
{
TraceMessage(TraceError, "Unknown user tries to connect.");
throw new SecurityException("Unknown user tries to connect.");
}
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on ConnectUser to Jabber. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
}
[HubMethodName("dcu")]
public void DisconnectUser()
{
try
{
var user = Context.Request.Environment["server.User"] as GenericPrincipal;
if (user != null)
{
var userAccount = user.Identity as IUserAccount;
if (userAccount != null)
{
CoreContext.TenantManager.SetCurrentTenant(userAccount.Tenant);
}
else
{
TraceMessage(TraceError, String.Format("Unknown request without user.Identity as IUserAccount, url={0}",
Context.Request.Url));
throw new SecurityException("Unknown request without user.Identity as IUserAccount");
}
var currentUser = CoreContext.UserManager.GetUsers(userAccount.ID);
if (!currentUser.Equals(Core.Users.Constants.LostUser))
{
var currentUserName = currentUser.UserName.ToLowerInvariant();
Groups.Remove(Context.ConnectionId, currentUser.Tenant + currentUserName);
Groups.Remove(Context.ConnectionId, currentUser.Tenant.ToString(CultureInfo.InvariantCulture));
byte state = UserOffline;
bool result;
var connectionsCount = Connections.Remove(currentUser.Tenant, currentUserName, Context.ConnectionId, out result);
if (result)
{
TraceMessage(TraceDebug, String.Format("Remove Connection. {0}. Count: {1}", currentUserName, --allConnectionsCount));
if (connectionsCount == 0)
{
state = jabberServiceClient.RemoveXmppConnection(currentUser.ID.ToString(), currentUserName, currentUser.Tenant);
}
else
{
state = jabberServiceClient.GetState(currentUser.Tenant, currentUserName);
if (state != UserOffline)
{
// setStatus
Clients.OthersInGroup(currentUser.Tenant + currentUserName).sst(state);
}
}
// setState
Clients.OthersInGroup(currentUser.Tenant.ToString(CultureInfo.InvariantCulture)).ss(currentUserName, state, false);
}
}
else
{
TraceMessage(TraceError, "Unknown user tries to disconnect.");
throw new SecurityException("Unknown user tries to disconnect.");
}
}
else
{
TraceMessage(TraceError, "Unknown user tries to disconnect from SignalR hub.");
throw new SecurityException("Unknown user tries to disconnect from SignalR hub.");
}
}
catch (Exception e)
{
TraceMessage(TraceError, String.Format("Error on DisconnectUser to Jabber. {0} {1} {2}",
e.ToString(), e.StackTrace, e.InnerException != null ? e.InnerException.Message : string.Empty));
// error
Clients.Caller.e();
}
}
private static UserClass[] GetUsers(IReadOnlyDictionary<string, byte> states)
{
var users = CoreContext.UserManager.GetUsers().Where(user => !user.IsMe()).SortByUserName();
var usersArray = new UserClass[users.Count];
for (var i = 0; i < users.Count; i++)
{
byte state;
var userName = users[i].UserName.ToLowerInvariant();
if (!states.TryGetValue(userName, out state))
{
state = UserOffline;
}
usersArray[i] = new UserClass { UserName = userName, DisplayUserName = users[i].DisplayUserName(), State = state };
}
return usersArray;
}
public static void TraceMessage(byte messageState, string message, [CallerMemberName] string memberName = "",
[CallerFilePath] string filePath = "", [CallerLineNumber] int lineNumber = 0)
{
switch (messageState)
{
case TraceError:
log.ErrorFormat(message + " {0}:{1}:{2}", filePath, memberName, lineNumber);
break;
case TraceDebug:
log.DebugFormat(message + " {0}:{1}:{2}", filePath, memberName, lineNumber);
break;
}
}
}
} | agpl-3.0 |
wlwwt/shopware | engine/Shopware/Bundle/StoreFrontBundle/Struct/Product.php | 6967 | <?php
/**
* Shopware 5
* Copyright (c) shopware AG
*
* According to our dual licensing model, this program can be used either
* under the terms of the GNU Affero General Public License, version 3,
* or under a proprietary license.
*
* The texts of the GNU Affero General Public License with an additional
* permission and of our proprietary license can be found at and
* in the LICENSE file you have received along with this program.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* "Shopware" is a registered trademark of shopware AG.
* The licensing of the program under the AGPLv3 does not imply a
* trademark license. Therefore any rights, title and interest in
* our trademarks remain entirely with us.
*/
namespace Shopware\Bundle\StoreFrontBundle\Struct;
use Shopware\Bundle\StoreFrontBundle\Struct\Configurator\Group;
use Shopware\Bundle\StoreFrontBundle\Struct\Property\Set;
/**
* @category Shopware
*
* @copyright Copyright (c) shopware AG (http://www.shopware.de)
*/
class Product extends ListProduct
{
/**
* @var ListProduct[]
*/
protected $relatedProducts = [];
/**
* @var ProductStream[]
*/
protected $relatedProductStreams = [];
/**
* @var ListProduct[]
*/
protected $similarProducts = [];
/**
* @var Product\Download[]
*/
protected $downloads = [];
/**
* @var Product\Link[]
*/
protected $links = [];
/**
* @var Media[]
*/
protected $media = [];
/**
* @var Product\Vote[]
*/
protected $votes = [];
/**
* @var Set
*/
protected $propertySet;
/**
* @var Group[]
*/
protected $configuration = [];
/**
* @return Product
*/
public static function createFromListProduct(ListProduct $listProduct)
{
$product = new self(
$listProduct->getId(),
$listProduct->getVariantId(),
$listProduct->getNumber()
);
foreach ($listProduct as $key => $value) {
$product->$key = $value;
}
return $product;
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\Media[] $media
*/
public function setMedia($media)
{
$this->media = $media;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\Media[]
*/
public function getMedia()
{
return $this->media;
}
/**
* @param int $index
*
* @return Thumbnail[]
*/
public function getThumbnailsBySize($index)
{
/** @var Media $media */
$result = array_filter($this->media, function (Media $media) {
return $media->getType() === Media::TYPE_IMAGE;
});
return array_map(function (Media $media) use ($index) {
return $media->getThumbnail($index);
}, $result);
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\Property\Set $propertySet
*/
public function setPropertySet($propertySet)
{
$this->propertySet = $propertySet;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\Property\Set|null
*/
public function getPropertySet()
{
return $this->propertySet;
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\Product\Vote[] $votes
*/
public function setVotes($votes)
{
$this->votes = $votes;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\Product\Vote[]
*/
public function getVotes()
{
return $this->votes;
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\ListProduct[] $relatedProducts
*/
public function setRelatedProducts($relatedProducts)
{
$this->relatedProducts = $relatedProducts;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\ListProduct[]
*/
public function getRelatedProducts()
{
return $this->relatedProducts;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\ProductStream[]
*/
public function getRelatedProductStreams()
{
return $this->relatedProductStreams;
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\ProductStream[] $relatedProductStreams
*/
public function setRelatedProductStreams($relatedProductStreams)
{
$this->relatedProductStreams = $relatedProductStreams;
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\ListProduct[] $similarProducts
*/
public function setSimilarProducts($similarProducts)
{
$this->similarProducts = $similarProducts;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\ListProduct[]
*/
public function getSimilarProducts()
{
return $this->similarProducts;
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\Product\Download[] $downloads
*/
public function setDownloads($downloads)
{
$this->downloads = $downloads;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\Product\Download[]
*/
public function getDownloads()
{
return $this->downloads;
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\Product\Link[] $links
*/
public function setLinks($links)
{
$this->links = $links;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\Product\Link[]
*/
public function getLinks()
{
return $this->links;
}
/**
* @param \Shopware\Bundle\StoreFrontBundle\Struct\Configurator\Group[] $configuration
*/
public function setConfiguration($configuration)
{
$this->configuration = $configuration;
}
/**
* @return \Shopware\Bundle\StoreFrontBundle\Struct\Configurator\Group[]
*/
public function getConfiguration()
{
return $this->configuration;
}
/**
* Helper function which used to get the configuration selection of
* the passed product number.
* The result array contains a simple array which elements are indexed by
* the configurator group id and the value contains the configurator option id.
*
* This function is required to load different product variations on the product
* detail page via order number.
*
* @return array
*/
public function getSelectedOptions()
{
$selection = [];
foreach ($this->configuration as $group) {
$selection[$group->getId()] = $group->getOptions()[0]->getId();
}
return $selection;
}
/**
* {@inheritdoc}
*/
public function jsonSerialize()
{
return get_object_vars($this);
}
}
| agpl-3.0 |
closeio/nylas | inbox/test/imap/test_update_metadata.py | 3732 | import pytest
import json
from inbox.crispin import GmailFlags, Flags
from inbox.models.backends.imap import ImapUid
from inbox.mailsync.backends.imap.common import (update_metadata,
update_message_metadata)
from inbox.test.util.base import (add_fake_message, add_fake_imapuid,
add_fake_folder, add_fake_thread)
def test_gmail_label_sync(db, default_account, message, folder,
imapuid, default_namespace):
# Note that IMAPClient parses numeric labels into integer types. We have to
# correctly handle those too.
new_flags = {
imapuid.msg_uid: GmailFlags((),
(u'\\Important', u'\\Starred', u'foo', 42),
None)
}
update_metadata(default_namespace.account.id,
folder.id, folder.canonical_name, new_flags, db.session)
category_canonical_names = {c.name for c in message.categories}
category_display_names = {c.display_name for c in message.categories}
assert 'important' in category_canonical_names
assert {'foo', '42'}.issubset(category_display_names)
def test_gmail_drafts_flag_constrained_by_folder(db, default_account, message,
imapuid, folder):
new_flags = {imapuid.msg_uid: GmailFlags((), (u'\\Draft',), None)}
update_metadata(default_account.id, folder.id, 'all', new_flags,
db.session)
assert message.is_draft
update_metadata(default_account.id, folder.id, 'trash', new_flags,
db.session)
assert not message.is_draft
@pytest.mark.parametrize('folder_role', ['drafts', 'trash', 'archive'])
def test_generic_drafts_flag_constrained_by_folder(db, generic_account,
folder_role):
msg_uid = 22
thread = add_fake_thread(db.session, generic_account.namespace.id)
message = add_fake_message(db.session, generic_account.namespace.id,
thread)
folder = add_fake_folder(db.session, generic_account)
add_fake_imapuid(db.session, generic_account.id, message, folder, msg_uid)
new_flags = {msg_uid: Flags(('\\Draft',), None)}
update_metadata(generic_account.id, folder.id, folder_role, new_flags,
db.session)
assert message.is_draft == (folder_role == 'drafts')
def test_update_categories_when_actionlog_entry_missing(
db, default_account, message, imapuid):
message.categories_changes = True
db.session.commit()
update_message_metadata(db.session, imapuid.account, message, False)
assert message.categories == {imapuid.folder.category}
def test_truncate_imapuid_extra_flags(db, default_account, message, folder):
imapuid = ImapUid(message=message, account_id=default_account.id,
msg_uid=2222, folder=folder)
imapuid.update_flags(['We', 'the', 'People', 'of', 'the', 'United',
'States', 'in', 'Order', 'to', 'form', 'a', 'more',
'perfect', 'Union', 'establish', 'Justice',
'insure', 'domestic', 'Tranquility', 'provide',
'for', 'the', 'common', 'defence', 'promote', 'the',
'general', 'Welfare', 'and', 'secure', 'the',
'Blessings', 'of', 'Liberty', 'to', 'ourselves',
'and', 'our', 'Posterity', 'do', 'ordain', 'and',
'establish', 'this', 'Constitution', 'for', 'the',
'United', 'States', 'of', 'America'])
assert len(json.dumps(imapuid.extra_flags)) < 255
| agpl-3.0 |
dfsilva/actor-platform | actor-sdk/sdk-core/core/core-js/src/main/java/im/actor/core/js/entity/JsContent.java | 6843 | /*
* Copyright (C) 2015 Actor LLC. <https://actor.im>
*/
package im.actor.core.js.entity;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.JsArrayString;
import im.actor.core.api.ApiTextModernAttach;
import im.actor.core.api.ApiTextModernField;
import im.actor.core.api.ApiTextModernMessage;
import im.actor.core.entity.ImageLocation;
import im.actor.core.entity.content.AbsContent;
import im.actor.core.entity.content.AnimationContent;
import im.actor.core.entity.content.ContactContent;
import im.actor.core.entity.content.DocumentContent;
import im.actor.core.entity.content.FileLocalSource;
import im.actor.core.entity.content.FileRemoteSource;
import im.actor.core.entity.content.LocationContent;
import im.actor.core.entity.content.PhotoContent;
import im.actor.core.entity.content.ServiceContent;
import im.actor.core.entity.content.StickerContent;
import im.actor.core.entity.content.TextContent;
import im.actor.core.entity.content.VoiceContent;
import im.actor.core.js.JsMessenger;
import im.actor.runtime.crypto.Base64Utils;
public abstract class JsContent extends JavaScriptObject {
public static JsContent createContent(AbsContent src, int sender) {
JsMessenger messenger = JsMessenger.getInstance();
JsContent content;
if (src instanceof TextContent) {
TextContent textContent = (TextContent) src;
if (textContent.getTextMessageEx() instanceof ApiTextModernMessage) {
ApiTextModernMessage modernMessage = (ApiTextModernMessage) textContent.getTextMessageEx();
String text = modernMessage.getText();
JsParagraphStyle paragraphStyle = JsParagraphStyle.create(modernMessage.getStyle());
JsArray<JsAttach> attaches = JsArray.createArray().cast();
for (ApiTextModernAttach srcAttach : modernMessage.getAttaches()) {
JsArray<JsAttachField> fields = JsArray.createArray().cast();
for (ApiTextModernField f : srcAttach.getFields()) {
boolean isShort = f.isShort() != null ? f.isShort() : true;
fields.push(JsAttachField.create(f.getTitle(), f.getValue(),
isShort));
}
attaches.push(JsAttach.create(
srcAttach.getTitle(),
srcAttach.getTitleUrl(),
srcAttach.getText(),
JsParagraphStyle.create(srcAttach.getStyle()),
fields));
}
content = JsContentTextModern.create(text, paragraphStyle, attaches);
} else {
content = JsContentText.create(((TextContent) src).getText());
}
} else if (src instanceof ServiceContent) {
content = JsContentService.create(messenger.getFormatter().formatFullServiceMessage(sender, (ServiceContent) src, false));
} else if (src instanceof DocumentContent) {
DocumentContent doc = (DocumentContent) src;
String fileName = doc.getName();
String fileExtension = doc.getExt();
String fileSize = messenger.getFormatter().formatFileSize(doc.getSource().getSize());
String fileUrl = null;
if (doc.getSource() instanceof FileRemoteSource) {
fileUrl = messenger.getFileUrl(((FileRemoteSource) doc.getSource()).getFileReference());
}
boolean isUploading = doc.getSource() instanceof FileLocalSource;
String thumb = null;
if (doc.getFastThumb() != null) {
String thumbBase64 = Base64Utils.toBase64(doc.getFastThumb().getImage());
thumb = "data:image/jpg;base64," + thumbBase64;
}
if (src instanceof PhotoContent && thumb != null) {
PhotoContent photoContent = (PhotoContent) src;
content = JsContentPhoto.create(
fileName, fileExtension, fileSize,
photoContent.getW(), photoContent.getH(), thumb,
fileUrl, isUploading);
} else if (src instanceof AnimationContent) {
AnimationContent animationContent = (AnimationContent) src;
content = JsContentAnimation.create(fileName, fileExtension, fileSize,
animationContent.getW(), animationContent.getH(), thumb,
fileUrl, isUploading);
} else if (src instanceof VoiceContent) {
VoiceContent voiceContent = (VoiceContent) src;
content = JsContentVoice.create(fileName, fileExtension, fileSize, fileUrl,
isUploading, voiceContent.getDuration());
} else {
content = JsContentDocument.create(fileName, fileExtension, fileSize,
thumb, fileUrl, isUploading);
}
} else if (src instanceof StickerContent) {
StickerContent sticker = (StickerContent) src;
ImageLocation stickerImage = sticker.getImage256();
if (sticker.getImage512() != null) {
stickerImage = sticker.getImage512();
}
String fileUrl = messenger.getFileUrl(stickerImage.getReference());
String fileSize = messenger.getFormatter().formatFileSize(stickerImage.getReference().getFileSize());
content = JsContentSticker.create(
stickerImage.getReference().getFileName(),
".webp", fileSize,
stickerImage.getWidth(), stickerImage.getHeight(), null, fileUrl, false);
} else if (src instanceof ContactContent) {
ContactContent contactContent = (ContactContent) src;
JsArrayString phones = JsArray.createArray().cast();
JsArrayString emails = JsArray.createArray().cast();
for (String s : contactContent.getEmails()) {
emails.push(s);
}
for (String s : contactContent.getPhones()) {
phones.push(s);
}
content = JsContentContact.create(contactContent.getName(),
contactContent.getPhoto64(), phones, emails);
} else if (src instanceof LocationContent) {
LocationContent locationContent = (LocationContent) src;
content = JsContentLocation.create(locationContent.getLongitude(), locationContent.getLatitude(),
locationContent.getStreet(), locationContent.getPlace());
} else {
content = JsContentUnsupported.create();
}
return content;
}
protected JsContent() {
}
}
| agpl-3.0 |
krisis/mc | cmd/admin-bucket-remote-rm.go | 3014 | // Copyright (c) 2015-2021 MinIO, Inc.
//
// This file is part of MinIO Object Storage stack
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package cmd
import (
"github.com/fatih/color"
"github.com/minio/cli"
"github.com/minio/mc/pkg/probe"
"github.com/minio/pkg/console"
)
var adminBucketRemoteRmFlags = []cli.Flag{
cli.StringFlag{
Name: "arn",
Usage: "ARN to be removed",
},
}
var adminBucketRemoteRmCmd = cli.Command{
Name: "rm",
Usage: "remove configured remote target",
Action: mainAdminBucketRemoteRemove,
OnUsageError: onUsageError,
Before: setGlobalsFromContext,
Flags: append(globalFlags, adminBucketRemoteRmFlags...),
CustomHelpTemplate: `NAME:
{{.HelpName}} - {{.Usage}}
USAGE:
{{.HelpName}} TARGET
FLAGS:
{{range .VisibleFlags}}{{.}}
{{end}}
EXAMPLES:
1. Remove existing remote target with arn "arn:minio:replication:us-west-1:993bc6b6-accd-45e3-884f-5f3e652aed2a:dest1"
for bucket srcbucket on MinIO server.
{{.Prompt}} {{.HelpName}} myminio/srcbucket --arn "arn:minio:replication:us-west-1:993bc6b6-accd-45e3-884f-5f3e652aed2a:dest1"
`,
}
// checkAdminBucketRemoteRemoveSyntax - validate all the passed arguments
func checkAdminBucketRemoteRemoveSyntax(ctx *cli.Context) {
if len(ctx.Args()) != 1 {
cli.ShowCommandHelpAndExit(ctx, ctx.Command.Name, 1) // last argument is exit code
}
}
// mainAdminBucketRemoteRemove is the handle for "mc admin bucket remote rm" command.
func mainAdminBucketRemoteRemove(ctx *cli.Context) error {
checkAdminBucketRemoteRemoveSyntax(ctx)
console.SetColor("RemoteMessage", color.New(color.FgGreen))
// Get the alias parameter from cli
args := ctx.Args()
aliasedURL := args.Get(0)
// Create a new MinIO Admin Client
client, cerr := newAdminClient(aliasedURL)
fatalIf(cerr.Trace(aliasedURL), "Unable to initialize admin connection.")
_, sourceBucket := url2Alias(args[0])
if sourceBucket == "" {
fatalIf(errInvalidArgument(), "Source bucket not specified in `"+args[0]+"`.")
}
arn := ctx.String("arn")
if arn == "" {
fatalIf(errInvalidArgument(), "ARN needs to be specified.")
}
fatalIf(probe.NewError(client.RemoveRemoteTarget(globalContext, sourceBucket, arn)).Trace(args...), "Unable to remove remote target")
printMsg(RemoteMessage{
op: ctx.Command.Name,
SourceBucket: sourceBucket,
RemoteARN: arn,
})
return nil
}
| agpl-3.0 |
ewheeler/rapidpro | temba/contacts/migrations/0025_unblock_contacts_imported_again_after_being_blocked.py | 874 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.db.models import Count, Q
def unblock_contacts_imported_again(apps, schema_editor):
Contact = apps.get_model('contacts', 'Contact')
blocked_contacts = Contact.objects.filter(is_blocked=True, is_test=False).annotate(group_count=Count('all_groups'))
reimported_contacts = blocked_contacts.filter(Q(group_count__gt=1) | Q(group_count__lt=1))
updated = Contact.objects.filter(pk__in=reimported_contacts).update(is_blocked=False)
if updated:
print "Fixed %d contacts that are blocked and has another group" % updated
class Migration(migrations.Migration):
dependencies = [
('contacts', '0024_exportcontactstask_uuid'),
]
operations = [
migrations.RunPython(unblock_contacts_imported_again)
]
| agpl-3.0 |
Turan-no/Turan | media/openlayers/lib/OpenLayers/Layer/TileCache.js | 5309 | /* Copyright (c) 2006-2011 by OpenLayers Contributors (see authors.txt for
* full list of contributors). Published under the Clear BSD license.
* See http://svn.openlayers.org/trunk/openlayers/license.txt for the
* full text of the license. */
/**
* @requires OpenLayers/Layer/Grid.js
* @requires OpenLayers/Tile/Image.js
*/
/**
* Class: OpenLayers.Layer.TileCache
* A read only TileCache layer. Used to requests tiles cached by TileCache in
* a web accessible cache. This means that you have to pre-populate your
* cache before this layer can be used. It is meant only to read tiles
* created by TileCache, and not to make calls to TileCache for tile
* creation. Create a new instance with the
* <OpenLayers.Layer.TileCache> constructor.
*
* Inherits from:
* - <OpenLayers.Layer.Grid>
*/
OpenLayers.Layer.TileCache = OpenLayers.Class(OpenLayers.Layer.Grid, {
/**
* APIProperty: isBaseLayer
* {Boolean} Treat this layer as a base layer. Default is true.
*/
isBaseLayer: true,
/**
* APIProperty: format
* {String} Mime type of the images returned. Default is image/png.
*/
format: 'image/png',
/**
* APIProperty: serverResolutions
* {Array} A list of all resolutions available on the server. Only set this
* property if the map resolutions differs from the server.
*/
serverResolutions: null,
/**
* Constructor: OpenLayers.Layer.TileCache
* Create a new read only TileCache layer.
*
* Parameters:
* name - {String} Name of the layer displayed in the interface
* url - {String} Location of the web accessible cache (not the location of
* your tilecache script!)
* layername - {String} Layer name as defined in the TileCache
* configuration
* options - {Object} Optional object with properties to be set on the
* layer. Note that you should speficy your resolutions to match
* your TileCache configuration. This can be done by setting
* the resolutions array directly (here or on the map), by setting
* maxResolution and numZoomLevels, or by using scale based properties.
*/
initialize: function(name, url, layername, options) {
this.layername = layername;
OpenLayers.Layer.Grid.prototype.initialize.apply(this,
[name, url, {}, options]);
this.extension = this.format.split('/')[1].toLowerCase();
this.extension = (this.extension == 'jpg') ? 'jpeg' : this.extension;
},
/**
* APIMethod: clone
* obj - {Object}
*
* Returns:
* {<OpenLayers.Layer.TileCache>} An exact clone of this
* <OpenLayers.Layer.TileCache>
*/
clone: function (obj) {
if (obj == null) {
obj = new OpenLayers.Layer.TileCache(this.name,
this.url,
this.layername,
this.getOptions());
}
//get all additions from superclasses
obj = OpenLayers.Layer.Grid.prototype.clone.apply(this, [obj]);
// copy/set any non-init, non-simple values here
return obj;
},
/**
* Method: getURL
*
* Parameters:
* bounds - {<OpenLayers.Bounds>}
*
* Returns:
* {String} A string with the layer's url and parameters and also the
* passed-in bounds and appropriate tile size specified as parameters.
*/
getURL: function(bounds) {
var res = this.map.getResolution();
var bbox = this.maxExtent;
var size = this.tileSize;
var tileX = Math.round((bounds.left - bbox.left) / (res * size.w));
var tileY = Math.round((bounds.bottom - bbox.bottom) / (res * size.h));
var tileZ = this.serverResolutions != null ?
OpenLayers.Util.indexOf(this.serverResolutions, res) :
this.map.getZoom();
/**
* Zero-pad a positive integer.
* number - {Int}
* length - {Int}
*
* Returns:
* {String} A zero-padded string
*/
function zeroPad(number, length) {
number = String(number);
var zeros = [];
for(var i=0; i<length; ++i) {
zeros.push('0');
}
return zeros.join('').substring(0, length - number.length) + number;
}
var components = [
this.layername,
zeroPad(tileZ, 2),
zeroPad(parseInt(tileX / 1000000), 3),
zeroPad((parseInt(tileX / 1000) % 1000), 3),
zeroPad((parseInt(tileX) % 1000), 3),
zeroPad(parseInt(tileY / 1000000), 3),
zeroPad((parseInt(tileY / 1000) % 1000), 3),
zeroPad((parseInt(tileY) % 1000), 3) + '.' + this.extension
];
var path = components.join('/');
var url = this.url;
if (url instanceof Array) {
url = this.selectUrl(path, url);
}
url = (url.charAt(url.length - 1) == '/') ? url : url + '/';
return url + path;
},
CLASS_NAME: "OpenLayers.Layer.TileCache"
});
| agpl-3.0 |
alexhuang888/onlogistics | lib/Custom/UploadedDocumentGrid.php | 3941 | <?php
/* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4: */
/**
* This file is part of Onlogistics, a web based ERP and supply chain
* management application.
*
* Copyright (C) 2003-2008 ATEOR
*
* This program is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
* License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* PHP version 5.1.0+
*
* @package Onlogistics
* @author ATEOR dev team <dev@ateor.com>
* @copyright 2003-2008 ATEOR <contact@ateor.com>
* @license http://www.fsf.org/licensing/licenses/agpl-3.0.html GNU AGPL
* @version SVN: $Id$
* @link http://www.onlogistics.org
* @link http://onlogistics.googlecode.com
* @since File available since release 0.1.0
* @filesource
*/
class UploadedDocumentGrid extends GenericGrid
{
// Constructeur {{{
/**
* Constructeur
*
* @param array $params tableau de paramètres
* @return void
*/
public function __construct($params=array()) {
$params['itemsperpage'] = 200;
parent::__construct($params);
}
// }}}
// UploadedDocumentGrid::onAfterBuildSearchForm() {{{
/**
* Ajout des champs de SearchForm nécessaires.
*
* @access protected
* @return void
*/
public function onAfterBuildSearchForm() {
require_once('Objects/Task.inc.php');
$tasks = SearchTools::createArrayIDFromCollection(
'Task',
array('Id' => getConsultingTaskIds()),
MSG_SELECT_AN_ELEMENT
);
$this->searchForm->addElement(
'select',
'TaskId',
_('Task'),
array($tasks),
array('Path'=> 'ActivatedChainTask.Task.Id')
);
$this->searchForm->addElement(
'text',
'CustomerName',
_('Customer name'),
array(),
array('Path'=> 'Customer.Name')
);
$this->searchForm->addElement(
'text',
'CommandCommandNo',
_('Order number'),
array(),
array('Path'=> 'ActivatedChainTask.ActivatedOperation.ActivatedChain.CommandItem().Command.CommandNo')
);
}
// }}}
// UploadedDocumentGrid::renderColumnActivatedChainTask() {{{
/**
* Surchargée pour afficher le nom de la tâche et non de l'ack.
*
* @access protected
* @return void
*/
public function renderColumnActivatedChainTask() {
return $this->grid->newColumn(
'FieldMapper',
_('Task'),
array('Macro' => '%ActivatedChainTask.Task.Name|default@%')
);
}
// }}}
// UploadedDocumentGrid::additionalGridActions() {{{
/**
* Actions du grid additionnelles
*
* @access protected
* @return void
*/
protected function additionalGridActions() {
$this->grid->newAction(
'DownloadUploadedDocument',
array('Caption'=>_('Download'))
);
$this->grid->newAction('Redirect', array(
'Caption' => _('Assign'),
'URL' => 'dispatcher.php?entity=ActivatedChainTask&udId=%d'
));
$this->grid->newAction('Redirect', array(
'Caption' => _('Unassign'),
'URL' => 'UploadedDocumentActivatedChainTask.php?action=unassign&udId=%d'
));
}
// }}}
}
?>
| agpl-3.0 |
artofhuman/consul | spec/features/proposal_notifications_spec.rb | 7032 | require 'rails_helper'
feature 'Proposal Notifications' do
scenario "Send a notification" do
author = create(:user)
proposal = create(:proposal, author: author)
login_as(author)
visit root_path
click_link "My activity"
within("#proposal_#{proposal.id}") do
click_link "Send notification"
end
fill_in 'proposal_notification_title', with: "Thank you for supporting my proposal"
fill_in 'proposal_notification_body', with: "Please share it with others so we can make it happen!"
click_button "Send message"
expect(page).to have_content "Your message has been sent correctly."
expect(page).to have_content "Thank you for supporting my proposal"
expect(page).to have_content "Please share it with others so we can make it happen!"
end
scenario "Send a notification (Active voter)" do
author = create(:user)
proposal = create(:proposal, author: author)
voter = create(:user, :level_two)
create(:vote, voter: voter, votable: proposal)
create_proposal_notification(proposal)
expect(Notification.count).to eq(1)
end
scenario "Send a notification (Follower)" do
author = create(:user)
proposal = create(:proposal, author: author)
user_follower = create(:user)
create(:follow, :followed_proposal, user: user_follower, followable: proposal)
create_proposal_notification(proposal)
expect(Notification.count).to eq(1)
end
scenario "Send a notification (Follower and Voter)" do
author = create(:user)
proposal = create(:proposal, author: author)
user_voter_follower = create(:user)
create(:follow, :followed_proposal, user: user_voter_follower, followable: proposal)
create(:vote, voter: user_voter_follower, votable: proposal)
user_follower = create(:user)
create(:follow, :followed_proposal, user: user_follower, followable: proposal)
create_proposal_notification(proposal)
expect(Notification.count).to eq(2)
end
scenario "Send a notification (Blocked voter)" do
author = create(:user)
proposal = create(:proposal, author: author)
voter = create(:user, :level_two)
create(:vote, voter: voter, votable: proposal)
voter.block
create_proposal_notification(proposal)
expect(Notification.count).to eq(0)
end
scenario "Send a notification (Erased voter)" do
author = create(:user)
proposal = create(:proposal, author: author)
voter = create(:user, :level_two)
create(:vote, voter: voter, votable: proposal)
voter.erase
create_proposal_notification(proposal)
expect(Notification.count).to eq(0)
end
scenario "Show notifications" do
proposal = create(:proposal)
notification1 = create(:proposal_notification, proposal: proposal, title: "Hey guys", body: "Just wanted to let you know that...")
notification2 = create(:proposal_notification, proposal: proposal, title: "Another update",
body: "We are almost there please share with your peoples!")
visit proposal_path(proposal)
expect(page).to have_content "Hey guys"
expect(page).to have_content "Just wanted to let you know that..."
expect(page).to have_content "Another update"
expect(page).to have_content "We are almost there please share with your peoples!"
end
scenario "Message about receivers (Voters)" do
author = create(:user)
proposal = create(:proposal, author: author)
7.times { create(:vote, votable: proposal, vote_flag: true) }
login_as(author)
visit new_proposal_notification_path(proposal_id: proposal.id)
expect(page).to have_content "This message will be send to 7 people and it will be visible in the proposal's page"
expect(page).to have_link("the proposal's page", href: proposal_path(proposal, anchor: 'comments'))
end
scenario "Message about receivers (Followers)" do
author = create(:user)
proposal = create(:proposal, author: author)
7.times { create(:follow, :followed_proposal, followable: proposal) }
login_as(author)
visit new_proposal_notification_path(proposal_id: proposal.id)
expect(page).to have_content "This message will be send to 7 people and it will be visible in the proposal's page"
expect(page).to have_link("the proposal's page", href: proposal_path(proposal, anchor: 'comments'))
end
scenario "Message about receivers (Disctinct Followers and Voters)" do
author = create(:user)
proposal = create(:proposal, author: author)
7.times { create(:follow, :followed_proposal, followable: proposal) }
7.times { create(:vote, votable: proposal, vote_flag: true) }
login_as(author)
visit new_proposal_notification_path(proposal_id: proposal.id)
expect(page).to have_content "This message will be send to 14 people and it will be visible in the proposal's page"
expect(page).to have_link("the proposal's page", href: proposal_path(proposal, anchor: 'comments'))
end
scenario "Message about receivers (Same Followers and Voters)" do
author = create(:user)
proposal = create(:proposal, author: author)
user_voter_follower = create(:user)
create(:follow, :followed_proposal, user: user_voter_follower, followable: proposal)
create(:vote, voter: user_voter_follower, votable: proposal)
login_as(author)
visit new_proposal_notification_path(proposal_id: proposal.id)
expect(page).to have_content "This message will be send to 1 people and it will be visible in the proposal's page"
expect(page).to have_link("the proposal's page", href: proposal_path(proposal, anchor: 'comments'))
end
context "Permissions" do
scenario "Link to send the message" do
user = create(:user)
author = create(:user)
proposal = create(:proposal, author: author)
login_as(author)
visit user_path(author)
within("#proposal_#{proposal.id}") do
expect(page).to have_link "Send notification"
end
login_as(user)
visit user_path(author)
within("#proposal_#{proposal.id}") do
expect(page).to_not have_link "Send message"
end
end
scenario "Accessing form directly" do
user = create(:user)
author = create(:user)
proposal = create(:proposal, author: author)
login_as(user)
visit new_proposal_notification_path(proposal_id: proposal.id)
expect(current_path).to eq(proposals_path)
expect(page).to have_content("You do not have permission to carry out the action")
end
end
scenario "Error messages" do
author = create(:user)
proposal = create(:proposal, author: author)
login_as(author)
visit new_proposal_notification_path(proposal_id: proposal.id)
click_button "Send message"
expect(page).to have_content error_message
end
context "Limits" do
pending "Cannot send more than one notification within established interval"
pending "use timecop to make sure notifications can be sent after time interval"
end
end
| agpl-3.0 |
libersoft/fengoffice-ls | library/PEAR/PEAR.php | 34809 | <?php
/**
* PEAR, the PHP Extension and Application Repository
*
* PEAR class and PEAR_Error class
*
* PHP versions 4 and 5
*
* LICENSE: This source file is subject to version 3.0 of the PHP license
* that is available through the world-wide-web at the following URI:
* http://www.php.net/license/3_0.txt. If you did not receive a copy of
* the PHP License and are unable to obtain it through the web, please
* send a note to license@php.net so we can mail you a copy immediately.
*
* @category pear
* @package PEAR
* @author Sterling Hughes <sterling@php.net>
* @author Stig Bakken <ssb@php.net>
* @author Tomas V.V.Cox <cox@idecnet.com>
* @author Greg Beaver <cellog@php.net>
* @copyright 1997-2008 The PHP Group
* @license http://www.php.net/license/3_0.txt PHP License 3.0
* @version CVS: $Id: PEAR.php,v 1.1 2010/01/22 18:11:14 acio Exp $
* @link http://pear.php.net/package/PEAR
* @since File available since Release 0.1
*/
/**#@+
* ERROR constants
*/
define('PEAR_ERROR_RETURN', 1);
define('PEAR_ERROR_PRINT', 2);
define('PEAR_ERROR_TRIGGER', 4);
define('PEAR_ERROR_DIE', 8);
define('PEAR_ERROR_CALLBACK', 16);
/**
* WARNING: obsolete
* @deprecated
*/
define('PEAR_ERROR_EXCEPTION', 32);
/**#@-*/
define('PEAR_ZE2', (function_exists('version_compare') &&
version_compare(zend_version(), "2-dev", "ge")));
if (substr(PHP_OS, 0, 3) == 'WIN') {
define('OS_WINDOWS', true);
define('OS_UNIX', false);
define('PEAR_OS', 'Windows');
} else {
define('OS_WINDOWS', false);
define('OS_UNIX', true);
define('PEAR_OS', 'Unix'); // blatant assumption
}
// instant backwards compatibility
if (!defined('PATH_SEPARATOR')) {
if (OS_WINDOWS) {
define('PATH_SEPARATOR', ';');
} else {
define('PATH_SEPARATOR', ':');
}
}
$GLOBALS['_PEAR_default_error_mode'] = PEAR_ERROR_RETURN;
$GLOBALS['_PEAR_default_error_options'] = E_USER_NOTICE;
$GLOBALS['_PEAR_destructor_object_list'] = array();
$GLOBALS['_PEAR_shutdown_funcs'] = array();
$GLOBALS['_PEAR_error_handler_stack'] = array();
@ini_set('track_errors', true);
/**
* Base class for other PEAR classes. Provides rudimentary
* emulation of destructors.
*
* If you want a destructor in your class, inherit PEAR and make a
* destructor method called _yourclassname (same name as the
* constructor, but with a "_" prefix). Also, in your constructor you
* have to call the PEAR constructor: $this->PEAR();.
* The destructor method will be called without parameters. Note that
* at in some SAPI implementations (such as Apache), any output during
* the request shutdown (in which destructors are called) seems to be
* discarded. If you need to get any debug information from your
* destructor, use error_log(), syslog() or something similar.
*
* IMPORTANT! To use the emulated destructors you need to create the
* objects by reference: $obj =& new PEAR_child;
*
* @category pear
* @package PEAR
* @author Stig Bakken <ssb@php.net>
* @author Tomas V.V. Cox <cox@idecnet.com>
* @author Greg Beaver <cellog@php.net>
* @copyright 1997-2006 The PHP Group
* @license http://www.php.net/license/3_0.txt PHP License 3.0
* @version Release: 1.7.2
* @link http://pear.php.net/package/PEAR
* @see PEAR_Error
* @since Class available since PHP 4.0.2
* @link http://pear.php.net/manual/en/core.pear.php#core.pear.pear
*/
class PEAR
{
// {{{ properties
/**
* Whether to enable internal debug messages.
*
* @var bool
* @access private
*/
var $_debug = false;
/**
* Default error mode for this object.
*
* @var int
* @access private
*/
var $_default_error_mode = null;
/**
* Default error options used for this object when error mode
* is PEAR_ERROR_TRIGGER.
*
* @var int
* @access private
*/
var $_default_error_options = null;
/**
* Default error handler (callback) for this object, if error mode is
* PEAR_ERROR_CALLBACK.
*
* @var string
* @access private
*/
var $_default_error_handler = '';
/**
* Which class to use for error objects.
*
* @var string
* @access private
*/
var $_error_class = 'PEAR_Error';
/**
* An array of expected errors.
*
* @var array
* @access private
*/
var $_expected_errors = array();
// }}}
// {{{ constructor
/**
* Constructor. Registers this object in
* $_PEAR_destructor_object_list for destructor emulation if a
* destructor object exists.
*
* @param string $error_class (optional) which class to use for
* error objects, defaults to PEAR_Error.
* @access public
* @return void
*/
function PEAR($error_class = null)
{
$classname = strtolower(get_class($this));
if ($this->_debug) {
print "PEAR constructor called, class=$classname\n";
}
if ($error_class !== null) {
$this->_error_class = $error_class;
}
while ($classname && strcasecmp($classname, "pear")) {
$destructor = "_$classname";
if (method_exists($this, $destructor)) {
global $_PEAR_destructor_object_list;
$_PEAR_destructor_object_list[] = &$this;
if (!isset($GLOBALS['_PEAR_SHUTDOWN_REGISTERED'])) {
register_shutdown_function("_PEAR_call_destructors");
$GLOBALS['_PEAR_SHUTDOWN_REGISTERED'] = true;
}
break;
} else {
$classname = get_parent_class($classname);
}
}
}
// }}}
// {{{ destructor
/**
* Destructor (the emulated type of...). Does nothing right now,
* but is included for forward compatibility, so subclass
* destructors should always call it.
*
* See the note in the class desciption about output from
* destructors.
*
* @access public
* @return void
*/
function _PEAR() {
if ($this->_debug) {
printf("PEAR destructor called, class=%s\n", strtolower(get_class($this)));
}
}
// }}}
// {{{ getStaticProperty()
/**
* If you have a class that's mostly/entirely static, and you need static
* properties, you can use this method to simulate them. Eg. in your method(s)
* do this: $myVar = &PEAR::getStaticProperty('myclass', 'myVar');
* You MUST use a reference, or they will not persist!
*
* @access public
* @param string $class The calling classname, to prevent clashes
* @param string $var The variable to retrieve.
* @return mixed A reference to the variable. If not set it will be
* auto initialised to NULL.
*/
function &getStaticProperty($class, $var)
{
static $properties;
if (!isset($properties[$class])) {
$properties[$class] = array();
}
if (!array_key_exists($var, $properties[$class])) {
$properties[$class][$var] = null;
}
return $properties[$class][$var];
}
// }}}
// {{{ registerShutdownFunc()
/**
* Use this function to register a shutdown method for static
* classes.
*
* @access public
* @param mixed $func The function name (or array of class/method) to call
* @param mixed $args The arguments to pass to the function
* @return void
*/
function registerShutdownFunc($func, $args = array())
{
// if we are called statically, there is a potential
// that no shutdown func is registered. Bug #6445
if (!isset($GLOBALS['_PEAR_SHUTDOWN_REGISTERED'])) {
register_shutdown_function("_PEAR_call_destructors");
$GLOBALS['_PEAR_SHUTDOWN_REGISTERED'] = true;
}
$GLOBALS['_PEAR_shutdown_funcs'][] = array($func, $args);
}
// }}}
// {{{ isError()
/**
* Tell whether a value is a PEAR error.
*
* @param mixed $data the value to test
* @param int $code if $data is an error object, return true
* only if $code is a string and
* $obj->getMessage() == $code or
* $code is an integer and $obj->getCode() == $code
* @access public
* @return bool true if parameter is an error
*/
function isError($data, $code = null)
{
if (is_a($data, 'PEAR_Error')) {
if (is_null($code)) {
return true;
} elseif (is_string($code)) {
return $data->getMessage() == $code;
} else {
return $data->getCode() == $code;
}
}
return false;
}
// }}}
// {{{ setErrorHandling()
/**
* Sets how errors generated by this object should be handled.
* Can be invoked both in objects and statically. If called
* statically, setErrorHandling sets the default behaviour for all
* PEAR objects. If called in an object, setErrorHandling sets
* the default behaviour for that object.
*
* @param int $mode
* One of PEAR_ERROR_RETURN, PEAR_ERROR_PRINT,
* PEAR_ERROR_TRIGGER, PEAR_ERROR_DIE,
* PEAR_ERROR_CALLBACK or PEAR_ERROR_EXCEPTION.
*
* @param mixed $options
* When $mode is PEAR_ERROR_TRIGGER, this is the error level (one
* of E_USER_NOTICE, E_USER_WARNING or E_USER_ERROR).
*
* When $mode is PEAR_ERROR_CALLBACK, this parameter is expected
* to be the callback function or method. A callback
* function is a string with the name of the function, a
* callback method is an array of two elements: the element
* at index 0 is the object, and the element at index 1 is
* the name of the method to call in the object.
*
* When $mode is PEAR_ERROR_PRINT or PEAR_ERROR_DIE, this is
* a printf format string used when printing the error
* message.
*
* @access public
* @return void
* @see PEAR_ERROR_RETURN
* @see PEAR_ERROR_PRINT
* @see PEAR_ERROR_TRIGGER
* @see PEAR_ERROR_DIE
* @see PEAR_ERROR_CALLBACK
* @see PEAR_ERROR_EXCEPTION
*
* @since PHP 4.0.5
*/
function setErrorHandling($mode = null, $options = null)
{
if (isset($this) && is_a($this, 'PEAR')) {
$setmode = &$this->_default_error_mode;
$setoptions = &$this->_default_error_options;
} else {
$setmode = &$GLOBALS['_PEAR_default_error_mode'];
$setoptions = &$GLOBALS['_PEAR_default_error_options'];
}
switch ($mode) {
case PEAR_ERROR_EXCEPTION:
case PEAR_ERROR_RETURN:
case PEAR_ERROR_PRINT:
case PEAR_ERROR_TRIGGER:
case PEAR_ERROR_DIE:
case null:
$setmode = $mode;
$setoptions = $options;
break;
case PEAR_ERROR_CALLBACK:
$setmode = $mode;
// class/object method callback
if (is_callable($options)) {
$setoptions = $options;
} else {
trigger_error("invalid error callback", E_USER_WARNING);
}
break;
default:
trigger_error("invalid error mode", E_USER_WARNING);
break;
}
}
// }}}
// {{{ expectError()
/**
* This method is used to tell which errors you expect to get.
* Expected errors are always returned with error mode
* PEAR_ERROR_RETURN. Expected error codes are stored in a stack,
* and this method pushes a new element onto it. The list of
* expected errors are in effect until they are popped off the
* stack with the popExpect() method.
*
* Note that this method can not be called statically
*
* @param mixed $code a single error code or an array of error codes to expect
*
* @return int the new depth of the "expected errors" stack
* @access public
*/
function expectError($code = '*')
{
if (is_array($code)) {
array_push($this->_expected_errors, $code);
} else {
array_push($this->_expected_errors, array($code));
}
return sizeof($this->_expected_errors);
}
// }}}
// {{{ popExpect()
/**
* This method pops one element off the expected error codes
* stack.
*
* @return array the list of error codes that were popped
*/
function popExpect()
{
return array_pop($this->_expected_errors);
}
// }}}
// {{{ _checkDelExpect()
/**
* This method checks unsets an error code if available
*
* @param mixed error code
* @return bool true if the error code was unset, false otherwise
* @access private
* @since PHP 4.3.0
*/
function _checkDelExpect($error_code)
{
$deleted = false;
foreach ($this->_expected_errors AS $key => $error_array) {
if (in_array($error_code, $error_array)) {
unset($this->_expected_errors[$key][array_search($error_code, $error_array)]);
$deleted = true;
}
// clean up empty arrays
if (0 == count($this->_expected_errors[$key])) {
unset($this->_expected_errors[$key]);
}
}
return $deleted;
}
// }}}
// {{{ delExpect()
/**
* This method deletes all occurences of the specified element from
* the expected error codes stack.
*
* @param mixed $error_code error code that should be deleted
* @return mixed list of error codes that were deleted or error
* @access public
* @since PHP 4.3.0
*/
function delExpect($error_code)
{
$deleted = false;
if ((is_array($error_code) && (0 != count($error_code)))) {
// $error_code is a non-empty array here;
// we walk through it trying to unset all
// values
foreach($error_code as $key => $error) {
if ($this->_checkDelExpect($error)) {
$deleted = true;
} else {
$deleted = false;
}
}
return $deleted ? true : PEAR::raiseError("The expected error you submitted does not exist"); // IMPROVE ME
} elseif (!empty($error_code)) {
// $error_code comes alone, trying to unset it
if ($this->_checkDelExpect($error_code)) {
return true;
} else {
return PEAR::raiseError("The expected error you submitted does not exist"); // IMPROVE ME
}
} else {
// $error_code is empty
return PEAR::raiseError("The expected error you submitted is empty"); // IMPROVE ME
}
}
// }}}
// {{{ raiseError()
/**
* This method is a wrapper that returns an instance of the
* configured error class with this object's default error
* handling applied. If the $mode and $options parameters are not
* specified, the object's defaults are used.
*
* @param mixed $message a text error message or a PEAR error object
*
* @param int $code a numeric error code (it is up to your class
* to define these if you want to use codes)
*
* @param int $mode One of PEAR_ERROR_RETURN, PEAR_ERROR_PRINT,
* PEAR_ERROR_TRIGGER, PEAR_ERROR_DIE,
* PEAR_ERROR_CALLBACK, PEAR_ERROR_EXCEPTION.
*
* @param mixed $options If $mode is PEAR_ERROR_TRIGGER, this parameter
* specifies the PHP-internal error level (one of
* E_USER_NOTICE, E_USER_WARNING or E_USER_ERROR).
* If $mode is PEAR_ERROR_CALLBACK, this
* parameter specifies the callback function or
* method. In other error modes this parameter
* is ignored.
*
* @param string $userinfo If you need to pass along for example debug
* information, this parameter is meant for that.
*
* @param string $error_class The returned error object will be
* instantiated from this class, if specified.
*
* @param bool $skipmsg If true, raiseError will only pass error codes,
* the error message parameter will be dropped.
*
* @access public
* @return object a PEAR error object
* @see PEAR::setErrorHandling
* @since PHP 4.0.5
*/
function &raiseError($message = null,
$code = null,
$mode = null,
$options = null,
$userinfo = null,
$error_class = null,
$skipmsg = false)
{
// The error is yet a PEAR error object
if (is_object($message)) {
$code = $message->getCode();
$userinfo = $message->getUserInfo();
$error_class = $message->getType();
$message->error_message_prefix = '';
$message = $message->getMessage();
}
if (isset($this) && isset($this->_expected_errors) && sizeof($this->_expected_errors) > 0 && sizeof($exp = end($this->_expected_errors))) {
if ($exp[0] == "*" ||
(is_int(reset($exp)) && in_array($code, $exp)) ||
(is_string(reset($exp)) && in_array($message, $exp))) {
$mode = PEAR_ERROR_RETURN;
}
}
// No mode given, try global ones
if ($mode === null) {
// Class error handler
if (isset($this) && isset($this->_default_error_mode)) {
$mode = $this->_default_error_mode;
$options = $this->_default_error_options;
// Global error handler
} elseif (isset($GLOBALS['_PEAR_default_error_mode'])) {
$mode = $GLOBALS['_PEAR_default_error_mode'];
$options = $GLOBALS['_PEAR_default_error_options'];
}
}
if ($error_class !== null) {
$ec = $error_class;
} elseif (isset($this) && isset($this->_error_class)) {
$ec = $this->_error_class;
} else {
$ec = 'PEAR_Error';
}
if (intval(PHP_VERSION) < 5) {
// little non-eval hack to fix bug #12147
include 'PEAR/FixPHP5PEARWarnings.php';
return $a;
}
if ($skipmsg) {
$a = new $ec($code, $mode, $options, $userinfo);
} else {
$a = new $ec($message, $code, $mode, $options, $userinfo);
}
return $a;
}
// }}}
// {{{ throwError()
/**
* Simpler form of raiseError with fewer options. In most cases
* message, code and userinfo are enough.
*
* @param string $message
*
*/
function &throwError($message = null,
$code = null,
$userinfo = null)
{
if (isset($this) && is_a($this, 'PEAR')) {
$a = &$this->raiseError($message, $code, null, null, $userinfo);
return $a;
} else {
$a = &PEAR::raiseError($message, $code, null, null, $userinfo);
return $a;
}
}
// }}}
function staticPushErrorHandling($mode, $options = null)
{
$stack = &$GLOBALS['_PEAR_error_handler_stack'];
$def_mode = &$GLOBALS['_PEAR_default_error_mode'];
$def_options = &$GLOBALS['_PEAR_default_error_options'];
$stack[] = array($def_mode, $def_options);
switch ($mode) {
case PEAR_ERROR_EXCEPTION:
case PEAR_ERROR_RETURN:
case PEAR_ERROR_PRINT:
case PEAR_ERROR_TRIGGER:
case PEAR_ERROR_DIE:
case null:
$def_mode = $mode;
$def_options = $options;
break;
case PEAR_ERROR_CALLBACK:
$def_mode = $mode;
// class/object method callback
if (is_callable($options)) {
$def_options = $options;
} else {
trigger_error("invalid error callback", E_USER_WARNING);
}
break;
default:
trigger_error("invalid error mode", E_USER_WARNING);
break;
}
$stack[] = array($mode, $options);
return true;
}
function staticPopErrorHandling()
{
$stack = &$GLOBALS['_PEAR_error_handler_stack'];
$setmode = &$GLOBALS['_PEAR_default_error_mode'];
$setoptions = &$GLOBALS['_PEAR_default_error_options'];
array_pop($stack);
list($mode, $options) = $stack[sizeof($stack) - 1];
array_pop($stack);
switch ($mode) {
case PEAR_ERROR_EXCEPTION:
case PEAR_ERROR_RETURN:
case PEAR_ERROR_PRINT:
case PEAR_ERROR_TRIGGER:
case PEAR_ERROR_DIE:
case null:
$setmode = $mode;
$setoptions = $options;
break;
case PEAR_ERROR_CALLBACK:
$setmode = $mode;
// class/object method callback
if (is_callable($options)) {
$setoptions = $options;
} else {
trigger_error("invalid error callback", E_USER_WARNING);
}
break;
default:
trigger_error("invalid error mode", E_USER_WARNING);
break;
}
return true;
}
// {{{ pushErrorHandling()
/**
* Push a new error handler on top of the error handler options stack. With this
* you can easily override the actual error handler for some code and restore
* it later with popErrorHandling.
*
* @param mixed $mode (same as setErrorHandling)
* @param mixed $options (same as setErrorHandling)
*
* @return bool Always true
*
* @see PEAR::setErrorHandling
*/
function pushErrorHandling($mode, $options = null)
{
$stack = &$GLOBALS['_PEAR_error_handler_stack'];
if (isset($this) && is_a($this, 'PEAR')) {
$def_mode = &$this->_default_error_mode;
$def_options = &$this->_default_error_options;
} else {
$def_mode = &$GLOBALS['_PEAR_default_error_mode'];
$def_options = &$GLOBALS['_PEAR_default_error_options'];
}
$stack[] = array($def_mode, $def_options);
if (isset($this) && is_a($this, 'PEAR')) {
$this->setErrorHandling($mode, $options);
} else {
PEAR::setErrorHandling($mode, $options);
}
$stack[] = array($mode, $options);
return true;
}
// }}}
// {{{ popErrorHandling()
/**
* Pop the last error handler used
*
* @return bool Always true
*
* @see PEAR::pushErrorHandling
*/
function popErrorHandling()
{
$stack = &$GLOBALS['_PEAR_error_handler_stack'];
array_pop($stack);
list($mode, $options) = $stack[sizeof($stack) - 1];
array_pop($stack);
if (isset($this) && is_a($this, 'PEAR')) {
$this->setErrorHandling($mode, $options);
} else {
PEAR::setErrorHandling($mode, $options);
}
return true;
}
// }}}
// {{{ loadExtension()
/**
* OS independant PHP extension load. Remember to take care
* on the correct extension name for case sensitive OSes.
*
* @param string $ext The extension name
* @return bool Success or not on the dl() call
*/
function loadExtension($ext)
{
if (!extension_loaded($ext)) {
// if either returns true dl() will produce a FATAL error, stop that
if ((ini_get('enable_dl') != 1) || (ini_get('safe_mode') == 1)) {
return false;
}
if (OS_WINDOWS) {
$suffix = '.dll';
} elseif (PHP_OS == 'HP-UX') {
$suffix = '.sl';
} elseif (PHP_OS == 'AIX') {
$suffix = '.a';
} elseif (PHP_OS == 'OSX') {
$suffix = '.bundle';
} else {
$suffix = '.so';
}
return @dl('php_'.$ext.$suffix) || @dl($ext.$suffix);
}
return true;
}
// }}}
}
// {{{ _PEAR_call_destructors()
function _PEAR_call_destructors()
{
global $_PEAR_destructor_object_list;
if (is_array($_PEAR_destructor_object_list) &&
sizeof($_PEAR_destructor_object_list))
{
reset($_PEAR_destructor_object_list);
if (PEAR::getStaticProperty('PEAR', 'destructlifo')) {
$_PEAR_destructor_object_list = array_reverse($_PEAR_destructor_object_list);
}
while (list($k, $objref) = each($_PEAR_destructor_object_list)) {
$classname = get_class($objref);
while ($classname) {
$destructor = "_$classname";
if (method_exists($objref, $destructor)) {
$objref->$destructor();
break;
} else {
$classname = get_parent_class($classname);
}
}
}
// Empty the object list to ensure that destructors are
// not called more than once.
$_PEAR_destructor_object_list = array();
}
// Now call the shutdown functions
if (is_array($GLOBALS['_PEAR_shutdown_funcs']) AND !empty($GLOBALS['_PEAR_shutdown_funcs'])) {
foreach ($GLOBALS['_PEAR_shutdown_funcs'] as $value) {
call_user_func_array($value[0], $value[1]);
}
}
}
// }}}
/**
* Standard PEAR error class for PHP 4
*
* This class is supserseded by {@link PEAR_Exception} in PHP 5
*
* @category pear
* @package PEAR
* @author Stig Bakken <ssb@php.net>
* @author Tomas V.V. Cox <cox@idecnet.com>
* @author Gregory Beaver <cellog@php.net>
* @copyright 1997-2006 The PHP Group
* @license http://www.php.net/license/3_0.txt PHP License 3.0
* @version Release: 1.7.2
* @link http://pear.php.net/manual/en/core.pear.pear-error.php
* @see PEAR::raiseError(), PEAR::throwError()
* @since Class available since PHP 4.0.2
*/
class PEAR_Error
{
// {{{ properties
var $error_message_prefix = '';
var $mode = PEAR_ERROR_RETURN;
var $level = E_USER_NOTICE;
var $code = -1;
var $message = '';
var $userinfo = '';
var $backtrace = null;
// }}}
// {{{ constructor
/**
* PEAR_Error constructor
*
* @param string $message message
*
* @param int $code (optional) error code
*
* @param int $mode (optional) error mode, one of: PEAR_ERROR_RETURN,
* PEAR_ERROR_PRINT, PEAR_ERROR_DIE, PEAR_ERROR_TRIGGER,
* PEAR_ERROR_CALLBACK or PEAR_ERROR_EXCEPTION
*
* @param mixed $options (optional) error level, _OR_ in the case of
* PEAR_ERROR_CALLBACK, the callback function or object/method
* tuple.
*
* @param string $userinfo (optional) additional user/debug info
*
* @access public
*
*/
function PEAR_Error($message = 'unknown error', $code = null,
$mode = null, $options = null, $userinfo = null)
{
if ($mode === null) {
$mode = PEAR_ERROR_RETURN;
}
$this->message = $message;
$this->code = $code;
$this->mode = $mode;
$this->userinfo = $userinfo;
if (!PEAR::getStaticProperty('PEAR_Error', 'skiptrace')) {
$this->backtrace = debug_backtrace();
if (isset($this->backtrace[0]) && isset($this->backtrace[0]['object'])) {
unset($this->backtrace[0]['object']);
}
}
if ($mode & PEAR_ERROR_CALLBACK) {
$this->level = E_USER_NOTICE;
$this->callback = $options;
} else {
if ($options === null) {
$options = E_USER_NOTICE;
}
$this->level = $options;
$this->callback = null;
}
if ($this->mode & PEAR_ERROR_PRINT) {
if (is_null($options) || is_int($options)) {
$format = "%s";
} else {
$format = $options;
}
printf($format, $this->getMessage());
}
if ($this->mode & PEAR_ERROR_TRIGGER) {
trigger_error($this->getMessage(), $this->level);
}
if ($this->mode & PEAR_ERROR_DIE) {
$msg = $this->getMessage();
if (is_null($options) || is_int($options)) {
$format = "%s";
if (substr($msg, -1) != "\n") {
$msg .= "\n";
}
} else {
$format = $options;
}
die(sprintf($format, $msg));
}
if ($this->mode & PEAR_ERROR_CALLBACK) {
if (is_callable($this->callback)) {
call_user_func($this->callback, $this);
}
}
if ($this->mode & PEAR_ERROR_EXCEPTION) {
trigger_error("PEAR_ERROR_EXCEPTION is obsolete, use class PEAR_Exception for exceptions", E_USER_WARNING);
eval('$e = new Exception($this->message, $this->code);throw($e);');
}
}
// }}}
// {{{ getMode()
/**
* Get the error mode from an error object.
*
* @return int error mode
* @access public
*/
function getMode() {
return $this->mode;
}
// }}}
// {{{ getCallback()
/**
* Get the callback function/method from an error object.
*
* @return mixed callback function or object/method array
* @access public
*/
function getCallback() {
return $this->callback;
}
// }}}
// {{{ getMessage()
/**
* Get the error message from an error object.
*
* @return string full error message
* @access public
*/
function getMessage()
{
return ($this->error_message_prefix . $this->message);
}
// }}}
// {{{ getCode()
/**
* Get error code from an error object
*
* @return int error code
* @access public
*/
function getCode()
{
return $this->code;
}
// }}}
// {{{ getType()
/**
* Get the name of this error/exception.
*
* @return string error/exception name (type)
* @access public
*/
function getType()
{
return get_class($this);
}
// }}}
// {{{ getUserInfo()
/**
* Get additional user-supplied information.
*
* @return string user-supplied information
* @access public
*/
function getUserInfo()
{
return $this->userinfo;
}
// }}}
// {{{ getDebugInfo()
/**
* Get additional debug information supplied by the application.
*
* @return string debug information
* @access public
*/
function getDebugInfo()
{
return $this->getUserInfo();
}
// }}}
// {{{ getBacktrace()
/**
* Get the call backtrace from where the error was generated.
* Supported with PHP 4.3.0 or newer.
*
* @param int $frame (optional) what frame to fetch
* @return array Backtrace, or NULL if not available.
* @access public
*/
function getBacktrace($frame = null)
{
if (defined('PEAR_IGNORE_BACKTRACE')) {
return null;
}
if ($frame === null) {
return $this->backtrace;
}
return $this->backtrace[$frame];
}
// }}}
// {{{ addUserInfo()
function addUserInfo($info)
{
if (empty($this->userinfo)) {
$this->userinfo = $info;
} else {
$this->userinfo .= " ** $info";
}
}
// }}}
// {{{ toString()
function __toString()
{
return $this->getMessage();
}
// }}}
// {{{ toString()
/**
* Make a string representation of this object.
*
* @return string a string with an object summary
* @access public
*/
function toString() {
$modes = array();
$levels = array(E_USER_NOTICE => 'notice',
E_USER_WARNING => 'warning',
E_USER_ERROR => 'error');
if ($this->mode & PEAR_ERROR_CALLBACK) {
if (is_array($this->callback)) {
$callback = (is_object($this->callback[0]) ?
strtolower(get_class($this->callback[0])) :
$this->callback[0]) . '::' .
$this->callback[1];
} else {
$callback = $this->callback;
}
return sprintf('[%s: message="%s" code=%d mode=callback '.
'callback=%s prefix="%s" info="%s"]',
strtolower(get_class($this)), $this->message, $this->code,
$callback, $this->error_message_prefix,
$this->userinfo);
}
if ($this->mode & PEAR_ERROR_PRINT) {
$modes[] = 'print';
}
if ($this->mode & PEAR_ERROR_TRIGGER) {
$modes[] = 'trigger';
}
if ($this->mode & PEAR_ERROR_DIE) {
$modes[] = 'die';
}
if ($this->mode & PEAR_ERROR_RETURN) {
$modes[] = 'return';
}
return sprintf('[%s: message="%s" code=%d mode=%s level=%s '.
'prefix="%s" info="%s"]',
strtolower(get_class($this)), $this->message, $this->code,
implode("|", $modes), $levels[$this->level],
$this->error_message_prefix,
$this->userinfo);
}
// }}}
}
/*
* Local Variables:
* mode: php
* tab-width: 4
* c-basic-offset: 4
* End:
*/
?>
| agpl-3.0 |
Loos/bike_index | db/migrate/20140526141810_create_customer_contacts.rb | 354 | class CreateCustomerContacts < ActiveRecord::Migration
def change
create_table :customer_contacts do |t|
t.integer :user_id
t.string :user_email
t.integer :creator_id
t.string :creator_email
t.string :title
t.string :contact_type
t.text :body
t.integer :bike_id
t.timestamps
end
end
end
| agpl-3.0 |
dmeltzer/snipe-it | resources/lang/cy/passwords.php | 165 | <?php
return [
'sent' => 'Mae eich linc cyfrinair wedi\'i yrru!',
'user' => 'Dim defnyddiwr wedi\'i ddarganfod hefo\'r cyfeiriad ebost yna.',
];
| agpl-3.0 |
DerDu/SPHERE-Framework | Library/MOC-V/Component/Document/Document.php | 5319 | <?php
namespace MOC\V\Component\Document;
use MOC\V\Component\Document\Component\Bridge\Repository\DomPdf;
use MOC\V\Component\Document\Component\Bridge\Repository\MPdf;
use MOC\V\Component\Document\Component\Bridge\Repository\PhpExcel;
use MOC\V\Component\Document\Component\Bridge\Repository\PhpWord;
use MOC\V\Component\Document\Component\Bridge\Repository\UniversalXml;
use MOC\V\Component\Document\Component\IBridgeInterface;
use MOC\V\Component\Document\Component\IVendorInterface;
use MOC\V\Component\Document\Component\Parameter\Repository\FileParameter;
use MOC\V\Component\Document\Exception\DocumentTypeException;
use MOC\V\Component\Document\Vendor\Vendor;
/**
* Class Document
*
* @package MOC\V\Component\Document
*/
class Document implements IVendorInterface
{
/** @var IVendorInterface $VendorInterface */
private $VendorInterface = null;
/**
* @param IVendorInterface $VendorInterface
*/
public function __construct(IVendorInterface $VendorInterface)
{
$this->setVendorInterface($VendorInterface);
}
/**
* @param string $Location
*
* @return IBridgeInterface
* @throws DocumentTypeException
*/
public static function getDocument($Location)
{
$FileInfo = new \SplFileInfo($Location);
switch (strtolower($FileInfo->getExtension())) {
case 'pdf': {
return self::getPdfDocument($Location);
}
case 'csv':
case 'xls':
case 'xlsx': {
return self::getExcelDocument($Location);
}
case 'doc':
case 'docx': {
return self::getWordDocument($Location);
}
case 'xml': {
return self::getXmlDocument($Location);
}
default:
throw new DocumentTypeException();
}
}
/**
* @param string $Location
*
* @return IBridgeInterface
*/
public static function getPdfDocument($Location)
{
$Document = new Document(
new Vendor(
new DomPdf()
)
);
if (file_exists(new FileParameter($Location))) {
$Document->getBridgeInterface()->loadFile(new FileParameter($Location));
}
return $Document->getBridgeInterface();
}
/**
* @return IBridgeInterface
*/
public function getBridgeInterface()
{
return $this->VendorInterface->getBridgeInterface();
}
/**
* @param string $Location
*
* @return IBridgeInterface
*/
public static function getExcelDocument($Location)
{
$Document = new Document(
new Vendor(
new PhpExcel()
)
);
/** @var PhpExcel $Bridge */
$Bridge = $Document->getBridgeInterface();
if (file_exists(new FileParameter($Location))) {
$Bridge->loadFile(new FileParameter($Location));
} else {
$Bridge->newFile(new FileParameter($Location));
}
return $Bridge;
}
/**
* @param string $Location
*
* @return IBridgeInterface
*/
public static function getWordDocument($Location)
{
$Document = new Document(
new Vendor(
new PhpWord()
)
);
/** @var PhpWord $Bridge */
$Bridge = $Document->getBridgeInterface();
if (file_exists(new FileParameter($Location))) {
$Bridge->loadFile(new FileParameter($Location));
} else {
$Bridge->newFile(new FileParameter($Location));
}
return $Bridge;
}
/**
* @param string $Location
*
* @return IBridgeInterface
*/
public static function getXmlDocument($Location)
{
$Document = new Document(
new Vendor(
new UniversalXml()
)
);
if (file_exists(new FileParameter($Location))) {
$Document->getBridgeInterface()->loadFile(new FileParameter($Location));
}
return $Document->getBridgeInterface();
}
/**
* @param string $Location
*
* @return IBridgeInterface
*/
public static function getPdfCreator($Location)
{
$Document = new Document(
new Vendor(
new MPdf()
)
);
if (file_exists(new FileParameter($Location))) {
$Document->getBridgeInterface()->loadFile(new FileParameter($Location));
}
return $Document->getBridgeInterface();
}
/**
* @return IVendorInterface
*/
public function getVendorInterface()
{
return $this->VendorInterface;
}
/**
* @param IVendorInterface $VendorInterface
*
* @return IVendorInterface
*/
public function setVendorInterface(IVendorInterface $VendorInterface)
{
$this->VendorInterface = $VendorInterface;
return $this;
}
/**
* @param IBridgeInterface $BridgeInterface
*
* @return IBridgeInterface
*/
public function setBridgeInterface(IBridgeInterface $BridgeInterface)
{
return $this->VendorInterface->setBridgeInterface($BridgeInterface);
}
}
| agpl-3.0 |
Nekonyx/osu-web | resources/views/changelog/_changelog_header.blade.php | 2249 | {{--
Copyright 2015-2017 ppy Pty. Ltd.
This file is part of osu!web. osu!web is distributed with the hope of
attracting more community contributions to the core ecosystem of osu!.
osu!web is free software: you can redistribute it and/or modify
it under the terms of the Affero GNU General Public License version 3
as published by the Free Software Foundation.
osu!web is distributed WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with osu!web. If not, see <http://www.gnu.org/licenses/>.
--}}
<div class="osu-layout__row osu-layout__row--changelog-header">
<ol class="page-mode page-mode--breadcrumb">
<li class="page-mode__item">
<a class="page-mode-link" href="{{ route('changelog.index') }}">
{{ trans("layout.menu.home.changelog-index") }}
<span class="page-mode-link__stripe">
</span>
</a>
</li>
<li class="page-mode__item">
<a class="page-mode-link page-mode-link--is-active" href="{{ $url }}">
{{ $breadcrumb }}
<span class="page-mode-link__stripe">
</span>
</a>
</li>
</ol>
<div class="changelog-header">
<div class="changelog-header__builds-box">
<div class="changelog-header__builds">
@include('changelog._changelog_build', ['build' => $featuredBuild, 'featured' => true])
</div>
<div class="changelog-header__builds">
@foreach($builds as $build)
@include('changelog._changelog_build', ['build' => $build, 'featured' => false])
@endforeach
</div>
</div>
</div>
<div class="changelog-chart js-changelog-chart"></div>
</div>
<script id="json-chart-config" type="application/json">
{
"buildHistory": {!! json_encode($buildHistory) !!},
"order": {!! json_encode($chartOrder) !!},
"isBuild": {!! json_encode(isset($activeBuild)) !!}
}
</script>
| agpl-3.0 |
cloudbrain/cloudbrain | src/cloudbrain/core/model.py | 1620 | """
Cloudbrain's OO data model.
"""
class MetricBuffer(object):
def __init__(self, name, num_channels, buffer_size):
self.name = name
self.num_channels = num_channels
self.metric_names = ["channel_%s" % i for i in range(self.num_channels)]
self.metric_names.append("timestamp")
self.buffer_size = buffer_size
self.data_buffer = []
def _validate_datum(self, datum):
"""
Validate if the datum being sent is following the right schema.
:param datum: MetricBuffer data point. E.g:
{"timestamp": <float>, "metric_0": <float>, ..., "metric_7": <float>}
:type datum: dict
"""
if sorted(datum.keys()) != sorted(self.metric_names):
raise ValueError("MetricBuffer keys should be %s but are %s" % (
self.metric_names, datum.keys()))
def add(self, datum):
"""
Append datum to the buffer.
:param datum: metric data point with the following format:
{"timestamp": <float>, "metric_0": <float>, ..., "metric_7": <float>}
:type datum: dict
:returns: (list of dicts) 'None' if the buffer isn't full yet.
A list of dicts otherwise. E.g:
[
{"timestamp": <float>, "metric_0": <float>, ..., "metric_7": <float>},
...
{"timestamp": <float>, "metric_0": <float>, ..., "metric_7": <float>}
]
:rtype: list of dicts
"""
self._validate_datum(datum)
self.data_buffer.append(datum)
if len(self.data_buffer) >= self.buffer_size:
data_buffer = self.data_buffer
self.data_buffer = []
return data_buffer
else:
return None
| agpl-3.0 |
guillermong/Tarea1Bigdata | tutorial/src/BitSequenceRGExample.cpp | 1520 | /**
* BitSequenceRGExample.cpp
* Copyright (C) 2011 Francisco Claude F.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <libcdsBasics.h>
#include <BitString.h>
#include <BitSequence.h>
#include <BitSequenceRG.h>
#include <BitSequenceRRR.h>
using namespace std;
using namespace cds_utils;
using namespace cds_static;
int main(int argc, char **argv) {
size_t N;
cout << "Length of the bitmap: ";
cin >> N;
uint * bs = new uint[uint_len(N,1)];
for(uint i=0;i<N;i++) {
uint b;
cout << "bit at position " << i << ": ";
cin >> b;
if(b==0) bitclean(bs,i);
else bitset(bs,i);
}
BitSequenceRG * bsrg = new BitSequenceRG(bs,N,20);
cout << "rank(" << N/2 << ")=" << bsrg->rank1(N/2) << endl;
cout << "select(1) = " << bsrg->select1(1) << endl;
cout << "size = " << bsrg->getSize() << endl;
delete bsrg;
delete [] bs;
return 0;
}
| lgpl-2.1 |
sitya/simplesamlphp | modules/adfs/www/idp/prp.php | 1112 | <?php
/**
* ADFS PRP IDP protocol support for SimpleSAMLphp.
*
* @author Hans Zandbelt, SURFnet bv, <hans.zandbelt@surfnet.nl>
* @package SimpleSAMLphp
*/
SimpleSAML\Logger::info('ADFS - IdP.prp: Accessing ADFS IdP endpoint prp');
$metadata = SimpleSAML_Metadata_MetaDataStorageHandler::getMetadataHandler();
$idpEntityId = $metadata->getMetaDataCurrentEntityID('adfs-idp-hosted');
$idp = SimpleSAML_IdP::getById('adfs:' . $idpEntityId);
if (isset($_GET['wa'])) {
if ($_GET['wa'] === 'wsignout1.0') {
sspmod_adfs_IdP_ADFS::receiveLogoutMessage($idp);
} else if ($_GET['wa'] === 'wsignin1.0') {
sspmod_adfs_IdP_ADFS::receiveAuthnRequest($idp);
}
assert('FALSE');
} elseif(isset($_GET['assocId'])) {
// logout response from ADFS SP
$assocId = $_GET['assocId']; // Association ID of the SP that sent the logout response
$relayState = $_GET['relayState']; // Data that was sent in the logout request to the SP. Can be null
$logoutError = NULL; /* NULL on success, or an instance of a SimpleSAML_Error_Exception on failure. */
$idp->handleLogoutResponse($assocId, $relayState, $logoutError);
}
| lgpl-2.1 |
Airphrame/mapnik | include/mapnik/svg/svg_path_adapter.hpp | 28475 | /*****************************************************************************
*
* This file is part of Mapnik (c++ mapping toolkit)
*
* Copyright (C) 2014 Artem Pavlenko
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
*****************************************************************************/
#ifndef MAPNIK_SVG_PATH_ADAPTER_HPP
#define MAPNIK_SVG_PATH_ADAPTER_HPP
// mapnik
#include <mapnik/util/noncopyable.hpp>
#include <mapnik/box2d.hpp>
// agg
#include "agg_math.h"
#include "agg_array.h"
#include "agg_bezier_arc.h"
// stl
#include <cmath>
#include <vector>
namespace mapnik {
namespace svg {
using namespace agg;
template<class VertexContainer> class path_adapter : util::noncopyable
{
public:
using container_type = VertexContainer ;
using self_type = path_adapter<VertexContainer>;
//--------------------------------------------------------------------
path_adapter(VertexContainer & vertices) : vertices_(vertices), iterator_(0) {}
//void remove_all() { vertices_.remove_all(); iterator_ = 0; }
//void free_all() { vertices_.free_all(); iterator_ = 0; }
// Make path functions
//--------------------------------------------------------------------
unsigned start_new_path();
void move_to(double x, double y);
void move_rel(double dx, double dy);
void line_to(double x, double y);
void line_rel(double dx, double dy);
void hline_to(double x);
void hline_rel(double dx);
void vline_to(double y);
void vline_rel(double dy);
void arc_to(double rx, double ry,
double angle,
bool large_arc_flag,
bool sweep_flag,
double x, double y);
void arc_rel(double rx, double ry,
double angle,
bool large_arc_flag,
bool sweep_flag,
double dx, double dy);
void curve3(double x_ctrl, double y_ctrl,
double x_to, double y_to);
void curve3_rel(double dx_ctrl, double dy_ctrl,
double dx_to, double dy_to);
void curve3(double x_to, double y_to);
void curve3_rel(double dx_to, double dy_to);
void curve4(double x_ctrl1, double y_ctrl1,
double x_ctrl2, double y_ctrl2,
double x_to, double y_to);
void curve4_rel(double dx_ctrl1, double dy_ctrl1,
double dx_ctrl2, double dy_ctrl2,
double dx_to, double dy_to);
void curve4(double x_ctrl2, double y_ctrl2,
double x_to, double y_to);
void curve4_rel(double x_ctrl2, double y_ctrl2,
double x_to, double y_to);
void end_poly(unsigned flags = path_flags_close);
void close_polygon(unsigned flags = path_flags_none);
// Accessors
//--------------------------------------------------------------------
const container_type& vertices() const { return vertices_; }
container_type& vertices() { return vertices_; }
unsigned total_vertices() const;
void rel_to_abs(double* x, double* y) const;
unsigned last_vertex(double* x, double* y) const;
unsigned prev_vertex(double* x, double* y) const;
double last_x() const;
double last_y() const;
unsigned vertex(unsigned idx, double* x, double* y) const;
unsigned command(unsigned idx) const;
void modify_vertex(unsigned idx, double x, double y);
void modify_vertex(unsigned idx, double x, double y, unsigned cmd);
void modify_command(unsigned idx, unsigned cmd);
// VertexSource interface
//--------------------------------------------------------------------
void rewind(unsigned path_id);
unsigned vertex(double* x, double* y);
// Arrange the orientation of a polygon, all polygons in a path,
// or in all paths. After calling arrange_orientations() or
// arrange_orientations_all_paths(), all the polygons will have
// the same orientation, i.e. path_flags_cw or path_flags_ccw
//--------------------------------------------------------------------
unsigned arrange_polygon_orientation(unsigned start, path_flags_e orientation);
unsigned arrange_orientations(unsigned path_id, path_flags_e orientation);
void arrange_orientations_all_paths(path_flags_e orientation);
void invert_polygon(unsigned start);
// Flip all vertices horizontally or vertically,
// between x1 and x2, or between y1 and y2 respectively
//--------------------------------------------------------------------
void flip_x(double x1, double x2);
void flip_y(double y1, double y2);
// Concatenate path. The path is added as is.
//--------------------------------------------------------------------
template<class VertexSource>
void concat_path(VertexSource& vs, unsigned path_id = 0)
{
double x(0), y(0);
unsigned cmd;
vs.rewind(path_id);
while(!is_stop(cmd = vs.vertex(&x, &y)))
{
vertices_.add_vertex(x, y, cmd);
}
}
//--------------------------------------------------------------------
// Join path. The path is joined with the existing one, that is,
// it behaves as if the pen of a plotter was always down (drawing)
template<class VertexSource>
void join_path(VertexSource& vs, unsigned path_id = 0)
{
double x, y;
unsigned cmd;
vs.rewind(path_id);
cmd = vs.vertex(&x, &y);
if(!is_stop(cmd))
{
if(is_vertex(cmd))
{
double x0, y0;
unsigned cmd0 = last_vertex(&x0, &y0);
if(is_vertex(cmd0))
{
if(calc_distance(x, y, x0, y0) > vertex_dist_epsilon)
{
if(is_move_to(cmd)) cmd = path_cmd_line_to;
vertices_.add_vertex(x, y, cmd);
}
}
else
{
if(is_stop(cmd0))
{
cmd = path_cmd_move_to;
}
else
{
if(is_move_to(cmd)) cmd = path_cmd_line_to;
}
vertices_.add_vertex(x, y, cmd);
}
}
while(!is_stop(cmd = vs.vertex(&x, &y)))
{
vertices_.add_vertex(x, y, is_move_to(cmd) ?
unsigned(path_cmd_line_to) :
cmd);
}
}
}
//--------------------------------------------------------------------
void translate(double dx, double dy, unsigned path_id=0);
void translate_all_paths(double dx, double dy);
//--------------------------------------------------------------------
template<class Trans>
void transform(const Trans& trans, unsigned path_id=0)
{
unsigned num_ver = vertices_.total_vertices();
for(; path_id < num_ver; path_id++)
{
double x, y;
unsigned cmd = vertices_.vertex(path_id, &x, &y);
if(is_stop(cmd)) break;
if(is_vertex(cmd))
{
trans.transform(&x, &y);
vertices_.modify_vertex(path_id, x, y);
}
}
}
//--------------------------------------------------------------------
template<class Trans>
void transform_all_paths(const Trans& trans)
{
unsigned idx;
unsigned num_ver = vertices_.total_vertices();
for(idx = 0; idx < num_ver; idx++)
{
double x, y;
if(is_vertex(vertices_.vertex(idx, &x, &y)))
{
trans.transform(&x, &y);
vertices_.modify_vertex(idx, x, y);
}
}
}
private:
unsigned perceive_polygon_orientation(unsigned start, unsigned end);
void invert_polygon(unsigned start, unsigned end);
VertexContainer & vertices_;
unsigned iterator_;
double start_x_;
double start_y_;
};
//------------------------------------------------------------------------
template<class VC>
unsigned path_adapter<VC>::start_new_path()
{
if(!is_stop(vertices_.last_command()))
{
vertices_.add_vertex(0.0, 0.0, path_cmd_stop);
}
return vertices_.total_vertices();
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::rel_to_abs(double* x, double* y) const
{
if(vertices_.total_vertices())
{
double x2;
double y2;
if(is_vertex(vertices_.last_vertex(&x2, &y2))
|| !is_stop(vertices_.last_command()))
{
*x += x2;
*y += y2;
}
}
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::move_to(double x, double y)
{
start_x_ = x;
start_y_ = y;
vertices_.add_vertex(x, y, path_cmd_move_to);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::move_rel(double dx, double dy)
{
rel_to_abs(&dx, &dy);
vertices_.add_vertex(dx, dy, path_cmd_move_to);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::line_to(double x, double y)
{
vertices_.add_vertex(x, y, path_cmd_line_to);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::line_rel(double dx, double dy)
{
rel_to_abs(&dx, &dy);
vertices_.add_vertex(dx, dy, path_cmd_line_to);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::hline_to(double x)
{
vertices_.add_vertex(x, last_y(), path_cmd_line_to);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::hline_rel(double dx)
{
double dy = 0;
rel_to_abs(&dx, &dy);
vertices_.add_vertex(dx, dy, path_cmd_line_to);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::vline_to(double y)
{
vertices_.add_vertex(last_x(), y, path_cmd_line_to);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::vline_rel(double dy)
{
double dx = 0;
rel_to_abs(&dx, &dy);
vertices_.add_vertex(dx, dy, path_cmd_line_to);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::arc_to(double rx, double ry,
double angle,
bool large_arc_flag,
bool sweep_flag,
double x, double y)
{
if(vertices_.total_vertices() && is_vertex(vertices_.last_command()))
{
const double epsilon = 1e-30;
double x0 = 0.0;
double y0 = 0.0;
vertices_.last_vertex(&x0, &y0);
rx = std::fabs(rx);
ry = std::fabs(ry);
// Ensure radii are valid
//-------------------------
if(rx < epsilon || ry < epsilon)
{
line_to(x, y);
return;
}
if(calc_distance(x0, y0, x, y) < epsilon)
{
// If the endpoints (x, y) and (x0, y0) are identical, then this
// is equivalent to omitting the elliptical arc segment entirely.
return;
}
bezier_arc_svg a(x0, y0, rx, ry, angle, large_arc_flag, sweep_flag, x, y);
if(a.radii_ok())
{
join_path(a);
}
else
{
line_to(x, y);
}
}
else
{
move_to(x, y);
}
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::arc_rel(double rx, double ry,
double angle,
bool large_arc_flag,
bool sweep_flag,
double dx, double dy)
{
rel_to_abs(&dx, &dy);
arc_to(rx, ry, angle, large_arc_flag, sweep_flag, dx, dy);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::curve3(double x_ctrl, double y_ctrl,
double x_to, double y_to)
{
vertices_.add_vertex(x_ctrl, y_ctrl, path_cmd_curve3);
vertices_.add_vertex(x_to, y_to, path_cmd_curve3);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::curve3_rel(double dx_ctrl, double dy_ctrl,
double dx_to, double dy_to)
{
rel_to_abs(&dx_ctrl, &dy_ctrl);
rel_to_abs(&dx_to, &dy_to);
vertices_.add_vertex(dx_ctrl, dy_ctrl, path_cmd_curve3);
vertices_.add_vertex(dx_to, dy_to, path_cmd_curve3);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::curve3(double x_to, double y_to)
{
double x0;
double y0;
if(is_vertex(vertices_.last_vertex(&x0, &y0)))
{
double x_ctrl;
double y_ctrl;
unsigned cmd = vertices_.prev_vertex(&x_ctrl, &y_ctrl);
if(is_curve(cmd))
{
x_ctrl = x0 + x0 - x_ctrl;
y_ctrl = y0 + y0 - y_ctrl;
}
else
{
x_ctrl = x0;
y_ctrl = y0;
}
curve3(x_ctrl, y_ctrl, x_to, y_to);
}
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::curve3_rel(double dx_to, double dy_to)
{
rel_to_abs(&dx_to, &dy_to);
curve3(dx_to, dy_to);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::curve4(double x_ctrl1, double y_ctrl1,
double x_ctrl2, double y_ctrl2,
double x_to, double y_to)
{
vertices_.add_vertex(x_ctrl1, y_ctrl1, path_cmd_curve4);
vertices_.add_vertex(x_ctrl2, y_ctrl2, path_cmd_curve4);
vertices_.add_vertex(x_to, y_to, path_cmd_curve4);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::curve4_rel(double dx_ctrl1, double dy_ctrl1,
double dx_ctrl2, double dy_ctrl2,
double dx_to, double dy_to)
{
rel_to_abs(&dx_ctrl1, &dy_ctrl1);
rel_to_abs(&dx_ctrl2, &dy_ctrl2);
rel_to_abs(&dx_to, &dy_to);
vertices_.add_vertex(dx_ctrl1, dy_ctrl1, path_cmd_curve4);
vertices_.add_vertex(dx_ctrl2, dy_ctrl2, path_cmd_curve4);
vertices_.add_vertex(dx_to, dy_to, path_cmd_curve4);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::curve4(double x_ctrl2, double y_ctrl2,
double x_to, double y_to)
{
double x0;
double y0;
if(is_vertex(last_vertex(&x0, &y0)))
{
double x_ctrl1;
double y_ctrl1;
unsigned cmd = prev_vertex(&x_ctrl1, &y_ctrl1);
if(is_curve(cmd))
{
x_ctrl1 = x0 + x0 - x_ctrl1;
y_ctrl1 = y0 + y0 - y_ctrl1;
}
else
{
x_ctrl1 = x0;
y_ctrl1 = y0;
}
curve4(x_ctrl1, y_ctrl1, x_ctrl2, y_ctrl2, x_to, y_to);
}
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::curve4_rel(double dx_ctrl2, double dy_ctrl2,
double dx_to, double dy_to)
{
rel_to_abs(&dx_ctrl2, &dy_ctrl2);
rel_to_abs(&dx_to, &dy_to);
curve4(dx_ctrl2, dy_ctrl2, dx_to, dy_to);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::end_poly(unsigned flags)
{
if(is_vertex(vertices_.last_command()))
{
vertices_.add_vertex(start_x_, start_y_, path_cmd_end_poly | flags);
}
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::close_polygon(unsigned flags)
{
end_poly(path_flags_close | flags);
}
//------------------------------------------------------------------------
template<class VC>
inline unsigned path_adapter<VC>::total_vertices() const
{
return vertices_.total_vertices();
}
//------------------------------------------------------------------------
template<class VC>
inline unsigned path_adapter<VC>::last_vertex(double* x, double* y) const
{
return vertices_.last_vertex(x, y);
}
//------------------------------------------------------------------------
template<class VC>
inline unsigned path_adapter<VC>::prev_vertex(double* x, double* y) const
{
return vertices_.prev_vertex(x, y);
}
//------------------------------------------------------------------------
template<class VC>
inline double path_adapter<VC>::last_x() const
{
return vertices_.last_x();
}
//------------------------------------------------------------------------
template<class VC>
inline double path_adapter<VC>::last_y() const
{
return vertices_.last_y();
}
//------------------------------------------------------------------------
template<class VC>
inline unsigned path_adapter<VC>::vertex(unsigned idx, double* x, double* y) const
{
return vertices_.vertex(idx, x, y);
}
//------------------------------------------------------------------------
template<class VC>
inline unsigned path_adapter<VC>::command(unsigned idx) const
{
return vertices_.command(idx);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::modify_vertex(unsigned idx, double x, double y)
{
vertices_.modify_vertex(idx, x, y);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::modify_vertex(unsigned idx, double x, double y, unsigned cmd)
{
vertices_.modify_vertex(idx, x, y, cmd);
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::modify_command(unsigned idx, unsigned cmd)
{
vertices_.modify_command(idx, cmd);
}
//------------------------------------------------------------------------
template<class VC>
inline void path_adapter<VC>::rewind(unsigned path_id)
{
iterator_ = path_id;
}
//------------------------------------------------------------------------
template<class VC>
inline unsigned path_adapter<VC>::vertex(double* x, double* y)
{
if(iterator_ >= vertices_.total_vertices()) return path_cmd_stop;
return vertices_.vertex(iterator_++, x, y);
}
//------------------------------------------------------------------------
template<class VC>
unsigned path_adapter<VC>::perceive_polygon_orientation(unsigned start,
unsigned end)
{
// Calculate signed area (double area to be exact)
//---------------------
unsigned np = end - start;
double area = 0.0;
unsigned i;
for(i = 0; i < np; i++)
{
double x1, y1, x2, y2;
vertices_.vertex(start + i, &x1, &y1);
vertices_.vertex(start + (i + 1) % np, &x2, &y2);
area += x1 * y2 - y1 * x2;
}
return (area < 0.0) ? path_flags_cw : path_flags_ccw;
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::invert_polygon(unsigned start, unsigned end)
{
unsigned i;
unsigned tmp_cmd = vertices_.command(start);
--end; // Make "end" inclusive
// Shift all commands to one position
for(i = start; i < end; i++)
{
vertices_.modify_command(i, vertices_.command(i + 1));
}
// Assign starting command to the ending command
vertices_.modify_command(end, tmp_cmd);
// Reverse the polygon
while(end > start)
{
vertices_.swap_vertices(start++, end--);
}
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::invert_polygon(unsigned start)
{
// Skip all non-vertices at the beginning
while(start < vertices_.total_vertices() &&
!is_vertex(vertices_.command(start))) ++start;
// Skip all insignificant move_to
while(start+1 < vertices_.total_vertices() &&
is_move_to(vertices_.command(start)) &&
is_move_to(vertices_.command(start+1))) ++start;
// Find the last vertex
unsigned end = start + 1;
while(end < vertices_.total_vertices() &&
!is_next_poly(vertices_.command(end))) ++end;
invert_polygon(start, end);
}
//------------------------------------------------------------------------
template<class VC>
unsigned path_adapter<VC>::arrange_polygon_orientation(unsigned start,
path_flags_e orientation)
{
if(orientation == path_flags_none) return start;
// Skip all non-vertices at the beginning
while(start < vertices_.total_vertices() &&
!is_vertex(vertices_.command(start))) ++start;
// Skip all insignificant move_to
while(start+1 < vertices_.total_vertices() &&
is_move_to(vertices_.command(start)) &&
is_move_to(vertices_.command(start+1))) ++start;
// Find the last vertex
unsigned end = start + 1;
while(end < vertices_.total_vertices() &&
!is_next_poly(vertices_.command(end))) ++end;
if(end - start > 2)
{
if(perceive_polygon_orientation(start, end) != unsigned(orientation))
{
// Invert polygon, set orientation flag, and skip all end_poly
invert_polygon(start, end);
unsigned cmd;
while(end < vertices_.total_vertices() &&
is_end_poly(cmd = vertices_.command(end)))
{
vertices_.modify_command(end++, set_orientation(cmd, orientation));
}
}
}
return end;
}
//------------------------------------------------------------------------
template<class VC>
unsigned path_adapter<VC>::arrange_orientations(unsigned start,
path_flags_e orientation)
{
if(orientation != path_flags_none)
{
while(start < vertices_.total_vertices())
{
start = arrange_polygon_orientation(start, orientation);
if(is_stop(vertices_.command(start)))
{
++start;
break;
}
}
}
return start;
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::arrange_orientations_all_paths(path_flags_e orientation)
{
if(orientation != path_flags_none)
{
unsigned start = 0;
while(start < vertices_.total_vertices())
{
start = arrange_orientations(start, orientation);
}
}
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::flip_x(double x1, double x2)
{
unsigned i;
double x, y;
for(i = 0; i < vertices_.total_vertices(); i++)
{
unsigned cmd = vertices_.vertex(i, &x, &y);
if(is_vertex(cmd))
{
vertices_.modify_vertex(i, x2 - x + x1, y);
}
}
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::flip_y(double y1, double y2)
{
unsigned i;
double x, y;
for(i = 0; i < vertices_.total_vertices(); i++)
{
unsigned cmd = vertices_.vertex(i, &x, &y);
if(is_vertex(cmd))
{
vertices_.modify_vertex(i, x, y2 - y + y1);
}
}
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::translate(double dx, double dy, unsigned path_id)
{
unsigned num_ver = vertices_.total_vertices();
for(; path_id < num_ver; path_id++)
{
double x, y;
unsigned cmd = vertices_.vertex(path_id, &x, &y);
if(is_stop(cmd)) break;
if(is_vertex(cmd))
{
x += dx;
y += dy;
vertices_.modify_vertex(path_id, x, y);
}
}
}
//------------------------------------------------------------------------
template<class VC>
void path_adapter<VC>::translate_all_paths(double dx, double dy)
{
unsigned idx;
unsigned num_ver = vertices_.total_vertices();
for(idx = 0; idx < num_ver; idx++)
{
double x, y;
if(is_vertex(vertices_.vertex(idx, &x, &y)))
{
x += dx;
y += dy;
vertices_.modify_vertex(idx, x, y);
}
}
}
template<class Container> class vertex_stl_adapter : util::noncopyable
{
public:
using vertex_type = typename Container::value_type;
using value_type = typename vertex_type::value_type;
explicit vertex_stl_adapter(Container & vertices)
: vertices_(vertices) {}
void add_vertex(double x, double y, unsigned cmd)
{
vertices_.push_back(vertex_type(value_type(x),
value_type(y),
int8u(cmd)));
}
void modify_vertex(unsigned idx, double x, double y)
{
vertex_type& v = vertices_[idx];
v.x = value_type(x);
v.y = value_type(y);
}
void modify_vertex(unsigned idx, double x, double y, unsigned cmd)
{
vertex_type& v = vertices_[idx];
v.x = value_type(x);
v.y = value_type(y);
v.cmd = int8u(cmd);
}
void modify_command(unsigned idx, unsigned cmd)
{
vertices_[idx].cmd = int8u(cmd);
}
void swap_vertices(unsigned v1, unsigned v2)
{
vertex_type t = vertices_[v1];
vertices_[v1] = vertices_[v2];
vertices_[v2] = t;
}
unsigned last_command() const
{
return vertices_.size() ?
vertices_[vertices_.size() - 1].cmd :
(unsigned)path_cmd_stop;
}
unsigned last_vertex(double* x, double* y) const
{
if(vertices_.size() == 0)
{
*x = *y = 0.0;
return path_cmd_stop;
}
return vertex(vertices_.size() - 1, x, y);
}
unsigned prev_vertex(double* x, double* y) const
{
if(vertices_.size() < 2)
{
*x = *y = 0.0;
return path_cmd_stop;
}
return vertex(vertices_.size() - 2, x, y);
}
double last_x() const
{
return vertices_.size() ? vertices_[vertices_.size() - 1].x : 0.0;
}
double last_y() const
{
return vertices_.size() ? vertices_[vertices_.size() - 1].y : 0.0;
}
std::size_t total_vertices() const
{
return vertices_.size();
}
unsigned vertex(unsigned idx, double* x, double* y) const
{
const vertex_type& v = vertices_[idx];
*x = v.x;
*y = v.y;
return v.cmd;
}
unsigned command(unsigned idx) const
{
return vertices_[idx].cmd;
}
private:
Container & vertices_;
};
using svg_path_storage = std::vector<vertex_d>;
using svg_path_adapter = path_adapter<vertex_stl_adapter<svg_path_storage> >;
}}
#endif // MAPNIK_SVG_PATH_ADAPTER
| lgpl-2.1 |
rspavel/spack | var/spack/repos/builtin/packages/py-azure-storage-nspkg/package.py | 669 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyAzureStorageNspkg(PythonPackage):
"""Microsoft Azure Storage Namespace Package."""
homepage = "https://github.com/Azure/azure-storage-python"
url = "https://pypi.io/packages/source/a/azure-storage-nspkg/azure-storage-nspkg-3.1.0.tar.gz"
version('3.1.0', sha256='6f3bbe8652d5f542767d8433e7f96b8df7f518774055ac7c92ed7ca85f653811')
depends_on('py-setuptools', type='build')
depends_on('py-azure-nspkg@2.0.0:', type=('build', 'run'))
| lgpl-2.1 |
hibernate/hibernate-ogm | mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/type/GeoLineString.java | 2908 | /*
* Hibernate OGM, Domain model persistence for NoSQL datastores
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.ogm.datastore.mongodb.type;
import java.util.Arrays;
import java.util.List;
import org.bson.BsonArray;
import org.bson.Document;
import org.hibernate.ogm.util.Experimental;
import org.hibernate.ogm.util.impl.Contracts;
/**
* Type used to represent a GeoJSON LineString in MongoDB and support spatial queries.
*
* @author Guillaume Smet
*/
@Experimental
public class GeoLineString extends AbstractGeoJsonObject {
public static final String TYPE = "LineString";
/**
* The start point of the line.
*/
private GeoPoint startPoint;
/**
* The end point of the line.
*/
private GeoPoint endPoint;
/**
* Instantiates a new LineString.
*
* @param startPoint the start point of the line
* @param endPoint the end point of the line
*/
public GeoLineString(GeoPoint startPoint, GeoPoint endPoint) {
super( TYPE );
Contracts.assertNotNull( startPoint, "startPoint" );
Contracts.assertNotNull( endPoint, "endPoint" );
this.startPoint = startPoint;
this.endPoint = endPoint;
}
/**
* @return the start point of the line
*/
public GeoPoint getStartPoint() {
return startPoint;
}
/**
* @return the start point of the line
*/
public GeoPoint getEndPoint() {
return endPoint;
}
@Override
protected BsonArray toCoordinates() {
BsonArray coordinates = new BsonArray( Arrays.asList(
startPoint.toCoordinates(),
endPoint.toCoordinates()
) );
return coordinates;
}
static GeoLineString fromCoordinates(List<List<Double>> coordinates) {
if ( coordinates == null ) {
return null;
}
return new GeoLineString( GeoPoint.fromCoordinates( coordinates.get( 0 ) ), GeoPoint.fromCoordinates( coordinates.get( 1 ) ) );
}
@SuppressWarnings("unchecked")
public static GeoLineString fromDocument(Document document) {
if ( document == null ) {
return null;
}
checkType( TYPE, document );
List<List<Double>> coordinates = (List<List<Double>>) document.get( "coordinates" );
return fromCoordinates( coordinates );
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) {
return true;
}
if ( obj == null || getClass() != obj.getClass() ) {
return false;
}
GeoLineString that = (GeoLineString) obj;
if ( !that.startPoint.equals( startPoint ) ) {
return false;
}
if ( !that.endPoint.equals( endPoint ) ) {
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = startPoint.hashCode();
hashCode = hashCode * 31 + endPoint.hashCode();
return hashCode;
}
@Override
public String toString() {
return "GeoLineString [startPoint=" + startPoint + ", endPoint=" + endPoint + "]";
}
}
| lgpl-2.1 |
luisbg/gst-introspection | gstgengui/event.py | 7694 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Very easy to use event-system
@author: Damien Boucard
"""
import logging
import gobject
logger = logging.getLogger('event')
try:
from easycast.utils.event import EventManager
except ImportError:
class EventManager:
""" Manages the event-system.
This class is instanciated on importing the module,
so it is not needed to use it directly but via EventLaunch and EventListener.
@cvar instance: The instance created on importing the module.
@type instance: C{L{EventManager}}
@ivar listeners: Dictionnary with keys of type C{str} representing a event type and with values of type C{list} representing a collection of C{EventListener}.
@type listeners: C{dict<str, list<L{EventListener}>>}
"""
def __init__(self):
""" EventManager constructor. """
EventManager.instance = self
self.listeners = {}
def addListener(self, obj, event_type):
""" Add a listener to a specific event.
@param obj: Listener to add.
@type obj: C{L{EventListener}}
@param event_type: Type of the event to listen.
@type event_type: C{str}
"""
if event_type in self.listeners:
if obj not in self.listeners[event_type]:
self.listeners[event_type].append(obj)
else:
self.listeners[event_type] = [obj]
def removeListener(self, obj, event_type):
""" Remove a listener from a specific event.
@param obj: Listener to remove.
@type obj: C{L{EventListener}}
@param event_type: Type of the event that was listening.
@type event_type: C{str}
"""
if event_type in self.listeners and obj in self.listeners[event_type]:
self.listeners[event_type].remove(obj)
def dispatchEvent(self, event):
""" Dispatch a launched event to all affected listeners.
@param event: Event launched.
@type event: C{L{Event}}
"""
if event.type in self.listeners:
for obj in self.listeners[event.type]:
# Try to call event-specific handle method
fctname = obj.event_pattern %(event.type)
if hasattr(obj, fctname):
function = getattr(obj, fctname)
if callable(function):
gobject.idle_add(function, event)
continue
else:
logger.warning('Event-specific handler found but not callable')
# Try to call default handle method
if hasattr(obj, obj.event_default):
function = getattr(obj, obj.event_default)
if callable(function):
gobject.idle_add(function, event)
continue
# No handle method found, raise error ?
if not obj.event_silent:
raise UnhandledEventError("%s has no method to handle %s", obj, event)
else:
logger.error("Pas de event.type %s" % event.type)
EventManager()
class EventListener:
""" Generic class for listening to events.
It is just needed to herite from this class and register to events to listen easily events.
It is also needed to write handler methods with event-specific and/or C{L{default}} function.
Event-specific functions have name as the concatenation of the C{prefix} parameter + the listened event type + the C{suffix} parameter.
If it does not exist, the default function is called as defined by the C{L{default}} parameter/attribute.
If the event cannot be handled, a C{L{UnhandledEventError}} is raised, except if C{L{silent}} flag is C{True}.
@ivar event_manager: The event manager instance.
@type event_manager: C{L{EventManager}}
@ivar event_pattern: Event-specific handler pattern.
@type event_pattern: C{str}
@ivar event_default: Default handler function name.
@type event_default: C{str}
@ivar silent: Silent flag. If C{False}, C{L{UnhandledEventError}} is raised if an event cannot be handled. If C{True}, do nothing, listener does not handle the event.
@type silent: C{str}
"""
def __init__(self, prefix="evt_", suffix="", default="eventPerformed", silent=False):
""" EventListener constructor.
@param prefix: Prefix for all event-specific handler function name.
@type prefix: C{str}
@param suffix: Suffix for all event-specific handler function name.
@type suffix: C{str}
@param default: Default handler function name.
@type default: C{str}
@param silent: Silent flag.
@type silent: C{bool}
"""
self.event_manager = EventManager.instance
self.event_pattern = prefix + "%s" + suffix
self.event_default = default
self.event_silent = silent
def registerEvent(self, event_type):
""" Registers itself to a new event.
@param event_type: Type of the event to listen.
@type event_type: C{str}
"""
self.event_manager.addListener(self, event_type)
def unregisterEvent(self, event_type):
""" Unregisters itself from a event.
@param event_type: Type of the event which was listening.
@type event_type: C{str}
"""
self.event_manager.removeListener(self, event_type)
class EventLauncher:
""" Generic class for launching events.
It is just needed to herite from this class to launch easily events.
@ivar event_manager: The event manager instance.
@type event_manager: C{L{EventManager}}
"""
def __init__(self):
""" EventLauncher constructor. """
self.event_manager = EventManager.instance
def launchEvent(self, event_type, content=None):
""" Launches a new event to the listeners.
@param event_type: Type of the event to launch.
@type event_type: C{str}
@param content: Content to attach with the event (Optional).
@type content: any
"""
self.event_manager.dispatchEvent(Event(event_type, self, content))
class Event:
""" Represents an event entity.
@ivar type: Type of the event.
@type type: C{str}
@ivar source: Instance which launched the event.
@type source: C{L{EventLauncher}}
@ivar content: Content attached to the event (C{None} if none).
@type content: any
"""
def __init__(self, type, source, content):
""" Event constructor.
@param type: Type of the event.
@type type: C{str}
@param source: Instance which launched the event.
@type source: C{L{EventLauncher}}
@param content: Content attached to the event (C{None} if none).
@type content: any
"""
self.type = type
self.source = source
self.content = content
def __str__(self):
""" Converts object itself to string.
@return: Object converted string.
@rtype: C{str}
"""
return "<%s.%s type=%s source=%s content=%s>" %(__name__, self.__class__.__name__, self.type, self.source, self.content)
class UnhandledEventError(AttributeError):
""" Error raised when an event cannot be handled, except if C{L{silent<EventListener.silent>}} flag is C{True}. """
pass
| lgpl-2.1 |
youfoh/webkit-efl | Source/WebCore/platform/graphics/cg/ImageBufferCG.cpp | 22543 | /*
* Copyright (C) 2006 Nikolas Zimmermann <zimmermann@kde.org>
* Copyright (C) 2008 Apple Inc. All rights reserved.
* Copyright (C) 2010 Torch Mobile (Beijing) Co. Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "ImageBuffer.h"
#include "BitmapImage.h"
#include "GraphicsContext.h"
#include "GraphicsContextCG.h"
#include "ImageData.h"
#include "MIMETypeRegistry.h"
#include <ApplicationServices/ApplicationServices.h>
#include <math.h>
#include <wtf/Assertions.h>
#include <wtf/CheckedArithmetic.h>
#include <wtf/MainThread.h>
#include <wtf/OwnArrayPtr.h>
#include <wtf/RetainPtr.h>
#include <wtf/UnusedParam.h>
#include <wtf/text/Base64.h>
#include <wtf/text/WTFString.h>
#if PLATFORM(MAC) || PLATFORM(CHROMIUM)
#include "WebCoreSystemInterface.h"
#endif
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
#include <IOSurface/IOSurface.h>
#endif
#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
#include <wtf/CurrentTime.h>
#endif
using namespace std;
namespace WebCore {
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
static const int maxIOSurfaceDimension = 4096;
static const int minIOSurfaceArea = 50 * 100;
static RetainPtr<IOSurfaceRef> createIOSurface(const IntSize& size)
{
unsigned pixelFormat = 'BGRA';
unsigned bytesPerElement = 4;
int width = size.width();
int height = size.height();
unsigned long bytesPerRow = IOSurfaceAlignProperty(kIOSurfaceBytesPerRow, size.width() * bytesPerElement);
if (!bytesPerRow)
return 0;
unsigned long allocSize = IOSurfaceAlignProperty(kIOSurfaceAllocSize, size.height() * bytesPerRow);
if (!allocSize)
return 0;
const void *keys[6];
const void *values[6];
keys[0] = kIOSurfaceWidth;
values[0] = CFNumberCreate(0, kCFNumberIntType, &width);
keys[1] = kIOSurfaceHeight;
values[1] = CFNumberCreate(0, kCFNumberIntType, &height);
keys[2] = kIOSurfacePixelFormat;
values[2] = CFNumberCreate(0, kCFNumberIntType, &pixelFormat);
keys[3] = kIOSurfaceBytesPerElement;
values[3] = CFNumberCreate(0, kCFNumberIntType, &bytesPerElement);
keys[4] = kIOSurfaceBytesPerRow;
values[4] = CFNumberCreate(0, kCFNumberLongType, &bytesPerRow);
keys[5] = kIOSurfaceAllocSize;
values[5] = CFNumberCreate(0, kCFNumberLongType, &allocSize);
RetainPtr<CFDictionaryRef> dict(AdoptCF, CFDictionaryCreate(0, keys, values, 6, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
for (unsigned i = 0; i < 6; i++)
CFRelease(values[i]);
return RetainPtr<IOSurfaceRef>(AdoptCF, IOSurfaceCreate(dict.get()));
}
#endif
static void releaseImageData(void*, const void* data, size_t)
{
fastFree(const_cast<void*>(data));
}
ImageBuffer::ImageBuffer(const IntSize& size, float resolutionScale, ColorSpace imageColorSpace, RenderingMode renderingMode, DeferralMode, bool& success)
: m_data(size) // NOTE: The input here isn't important as ImageBufferDataCG's constructor just ignores it.
, m_logicalSize(size)
, m_resolutionScale(resolutionScale)
{
float scaledWidth = ceilf(resolutionScale * size.width());
float scaledHeight = ceilf(resolutionScale * size.height());
// FIXME: Should we automatically use a lower resolution?
if (!FloatSize(scaledWidth, scaledHeight).isExpressibleAsIntSize())
return;
m_size = IntSize(scaledWidth, scaledHeight);
success = false; // Make early return mean failure.
bool accelerateRendering = renderingMode == Accelerated;
if (m_size.width() <= 0 || m_size.height() <= 0)
return;
Checked<int, RecordOverflow> width = m_size.width();
Checked<int, RecordOverflow> height = m_size.height();
// Prevent integer overflows
m_data.m_bytesPerRow = 4 * width;
Checked<size_t, RecordOverflow> numBytes = height * m_data.m_bytesPerRow;
if (numBytes.hasOverflowed())
return;
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
if (width.unsafeGet() >= maxIOSurfaceDimension || height.unsafeGet() >= maxIOSurfaceDimension || (width * height).unsafeGet() < minIOSurfaceArea)
accelerateRendering = false;
#else
ASSERT(renderingMode == Unaccelerated);
#endif
switch (imageColorSpace) {
case ColorSpaceDeviceRGB:
m_data.m_colorSpace = deviceRGBColorSpaceRef();
break;
case ColorSpaceSRGB:
m_data.m_colorSpace = sRGBColorSpaceRef();
break;
case ColorSpaceLinearRGB:
m_data.m_colorSpace = linearRGBColorSpaceRef();
break;
}
RetainPtr<CGContextRef> cgContext;
if (accelerateRendering) {
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
m_data.m_surface = createIOSurface(m_size);
cgContext.adoptCF(wkIOSurfaceContextCreate(m_data.m_surface.get(), width.unsafeGet(), height.unsafeGet(), m_data.m_colorSpace));
#endif
if (!cgContext)
accelerateRendering = false; // If allocation fails, fall back to non-accelerated path.
}
if (!accelerateRendering) {
if (!tryFastCalloc(height.unsafeGet(), m_data.m_bytesPerRow.unsafeGet()).getValue(m_data.m_data))
return;
ASSERT(!(reinterpret_cast<size_t>(m_data.m_data) & 2));
m_data.m_bitmapInfo = kCGImageAlphaPremultipliedLast;
cgContext.adoptCF(CGBitmapContextCreate(m_data.m_data, width.unsafeGet(), height.unsafeGet(), 8, m_data.m_bytesPerRow.unsafeGet(), m_data.m_colorSpace, m_data.m_bitmapInfo));
// Create a live image that wraps the data.
m_data.m_dataProvider.adoptCF(CGDataProviderCreateWithData(0, m_data.m_data, numBytes.unsafeGet(), releaseImageData));
}
if (!cgContext)
return;
m_context = adoptPtr(new GraphicsContext(cgContext.get()));
m_context->applyDeviceScaleFactor(m_resolutionScale);
m_context->scale(FloatSize(1, -1));
m_context->translate(0, -size.height());
m_context->setIsAcceleratedContext(accelerateRendering);
#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
m_data.m_lastFlushTime = currentTimeMS();
#endif
success = true;
}
ImageBuffer::~ImageBuffer()
{
}
GraphicsContext* ImageBuffer::context() const
{
#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
// Force a flush if last flush was more than 20ms ago
if (m_context->isAcceleratedContext()) {
double elapsedTime = currentTimeMS() - m_data.m_lastFlushTime;
double maxFlushInterval = 20; // in ms
if (elapsedTime > maxFlushInterval) {
CGContextRef context = m_context->platformContext();
CGContextFlush(context);
m_data.m_lastFlushTime = currentTimeMS();
}
}
#endif
return m_context.get();
}
PassRefPtr<Image> ImageBuffer::copyImage(BackingStoreCopy copyBehavior) const
{
RetainPtr<CGImageRef> image;
if (m_resolutionScale == 1)
image = copyNativeImage(copyBehavior);
else {
image.adoptCF(copyNativeImage(DontCopyBackingStore));
RetainPtr<CGContextRef> context(AdoptCF, CGBitmapContextCreate(0, logicalSize().width(), logicalSize().height(), 8, 4 * logicalSize().width(), deviceRGBColorSpaceRef(), kCGImageAlphaPremultipliedLast));
CGContextSetBlendMode(context.get(), kCGBlendModeCopy);
CGContextDrawImage(context.get(), CGRectMake(0, 0, logicalSize().width(), logicalSize().height()), image.get());
image = CGBitmapContextCreateImage(context.get());
}
if (!image)
return 0;
return BitmapImage::create(image.get());
}
NativeImagePtr ImageBuffer::copyNativeImage(BackingStoreCopy copyBehavior) const
{
CGImageRef image = 0;
if (!m_context->isAcceleratedContext()) {
switch (copyBehavior) {
case DontCopyBackingStore:
image = CGImageCreate(internalSize().width(), internalSize().height(), 8, 32, m_data.m_bytesPerRow.unsafeGet(), m_data.m_colorSpace, m_data.m_bitmapInfo, m_data.m_dataProvider.get(), 0, true, kCGRenderingIntentDefault);
break;
case CopyBackingStore:
image = CGBitmapContextCreateImage(context()->platformContext());
break;
default:
ASSERT_NOT_REACHED();
break;
}
}
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
else {
image = wkIOSurfaceContextCreateImage(context()->platformContext());
#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
m_data.m_lastFlushTime = currentTimeMS();
#endif
}
#endif
return image;
}
void ImageBuffer::draw(GraphicsContext* destContext, ColorSpace styleColorSpace, const FloatRect& destRect, const FloatRect& srcRect, CompositeOperator op, bool useLowQualityScale)
{
UNUSED_PARAM(useLowQualityScale);
ColorSpace colorSpace = (destContext == m_context) ? ColorSpaceDeviceRGB : styleColorSpace;
RetainPtr<CGImageRef> image;
if (destContext == m_context || destContext->isAcceleratedContext())
image.adoptCF(copyNativeImage(CopyBackingStore)); // Drawing into our own buffer, need to deep copy.
else
image.adoptCF(copyNativeImage(DontCopyBackingStore));
FloatRect adjustedSrcRect = srcRect;
adjustedSrcRect.scale(m_resolutionScale, m_resolutionScale);
destContext->drawNativeImage(image.get(), internalSize(), colorSpace, destRect, adjustedSrcRect, op);
}
void ImageBuffer::drawPattern(GraphicsContext* destContext, const FloatRect& srcRect, const AffineTransform& patternTransform, const FloatPoint& phase, ColorSpace styleColorSpace, CompositeOperator op, const FloatRect& destRect)
{
FloatRect adjustedSrcRect = srcRect;
adjustedSrcRect.scale(m_resolutionScale, m_resolutionScale);
if (!m_context->isAcceleratedContext()) {
if (destContext == m_context || destContext->isAcceleratedContext()) {
RefPtr<Image> copy = copyImage(CopyBackingStore); // Drawing into our own buffer, need to deep copy.
copy->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
} else {
RefPtr<Image> imageForRendering = copyImage(DontCopyBackingStore);
imageForRendering->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
}
} else {
RefPtr<Image> copy = copyImage(CopyBackingStore);
copy->drawPattern(destContext, adjustedSrcRect, patternTransform, phase, styleColorSpace, op, destRect);
}
}
void ImageBuffer::clip(GraphicsContext* contextToClip, const FloatRect& rect) const
{
CGContextRef platformContextToClip = contextToClip->platformContext();
// FIXME: This image needs to be grayscale to be used as an alpha mask here.
RetainPtr<CGImageRef> image(AdoptCF, copyNativeImage(DontCopyBackingStore));
CGContextTranslateCTM(platformContextToClip, rect.x(), rect.y() + rect.height());
CGContextScaleCTM(platformContextToClip, 1, -1);
CGContextClipToMask(platformContextToClip, FloatRect(FloatPoint(), rect.size()), image.get());
CGContextScaleCTM(platformContextToClip, 1, -1);
CGContextTranslateCTM(platformContextToClip, -rect.x(), -rect.y() - rect.height());
}
PassRefPtr<Uint8ClampedArray> ImageBuffer::getUnmultipliedImageData(const IntRect& rect, CoordinateSystem coordinateSystem) const
{
if (m_context->isAcceleratedContext()) {
CGContextFlush(context()->platformContext());
#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
m_data.m_lastFlushTime = currentTimeMS();
#endif
}
return m_data.getData(rect, internalSize(), m_context->isAcceleratedContext(), true, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
}
PassRefPtr<Uint8ClampedArray> ImageBuffer::getPremultipliedImageData(const IntRect& rect, CoordinateSystem coordinateSystem) const
{
if (m_context->isAcceleratedContext()) {
CGContextFlush(context()->platformContext());
#if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED == 1070
m_data.m_lastFlushTime = currentTimeMS();
#endif
}
return m_data.getData(rect, internalSize(), m_context->isAcceleratedContext(), false, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
}
void ImageBuffer::putByteArray(Multiply multiplied, Uint8ClampedArray* source, const IntSize& sourceSize, const IntRect& sourceRect, const IntPoint& destPoint, CoordinateSystem coordinateSystem)
{
if (!m_context->isAcceleratedContext()) {
m_data.putData(source, sourceSize, sourceRect, destPoint, internalSize(), m_context->isAcceleratedContext(), multiplied == Unmultiplied, coordinateSystem == LogicalCoordinateSystem ? m_resolutionScale : 1);
return;
}
#if USE(IOSURFACE_CANVAS_BACKING_STORE)
// Make a copy of the source to ensure the bits don't change before being drawn
IntSize sourceCopySize(sourceRect.width(), sourceRect.height());
OwnPtr<ImageBuffer> sourceCopy = ImageBuffer::create(sourceCopySize, 1, ColorSpaceDeviceRGB, Unaccelerated);
if (!sourceCopy)
return;
sourceCopy->m_data.putData(source, sourceSize, sourceRect, IntPoint(-sourceRect.x(), -sourceRect.y()), sourceCopy->internalSize(), sourceCopy->context()->isAcceleratedContext(), multiplied == Unmultiplied, 1);
// Set up context for using drawImage as a direct bit copy
CGContextRef destContext = context()->platformContext();
CGContextSaveGState(destContext);
if (coordinateSystem == LogicalCoordinateSystem)
CGContextConcatCTM(destContext, AffineTransform(wkGetUserToBaseCTM(destContext)).inverse());
else
CGContextConcatCTM(destContext, AffineTransform(CGContextGetCTM(destContext)).inverse());
wkCGContextResetClip(destContext);
CGContextSetInterpolationQuality(destContext, kCGInterpolationNone);
CGContextSetAlpha(destContext, 1.0);
CGContextSetBlendMode(destContext, kCGBlendModeCopy);
CGContextSetShadowWithColor(destContext, CGSizeZero, 0, 0);
// Draw the image in CG coordinate space
IntPoint destPointInCGCoords(destPoint.x() + sourceRect.x(), (coordinateSystem == LogicalCoordinateSystem ? logicalSize() : internalSize()).height() - (destPoint.y() + sourceRect.y()) - sourceRect.height());
IntRect destRectInCGCoords(destPointInCGCoords, sourceCopySize);
RetainPtr<CGImageRef> sourceCopyImage(AdoptCF, sourceCopy->copyNativeImage());
CGContextDrawImage(destContext, destRectInCGCoords, sourceCopyImage.get());
CGContextRestoreGState(destContext);
#endif
}
static inline CFStringRef jpegUTI()
{
#if PLATFORM(WIN)
static const CFStringRef kUTTypeJPEG = CFSTR("public.jpeg");
#endif
return kUTTypeJPEG;
}
static RetainPtr<CFStringRef> utiFromMIMEType(const String& mimeType)
{
#if PLATFORM(MAC)
RetainPtr<CFStringRef> mimeTypeCFString(AdoptCF, mimeType.createCFString());
return RetainPtr<CFStringRef>(AdoptCF, UTTypeCreatePreferredIdentifierForTag(kUTTagClassMIMEType, mimeTypeCFString.get(), 0));
#else
ASSERT(isMainThread()); // It is unclear if CFSTR is threadsafe.
// FIXME: Add Windows support for all the supported UTIs when a way to convert from MIMEType to UTI reliably is found.
// For now, only support PNG, JPEG, and GIF. See <rdar://problem/6095286>.
static const CFStringRef kUTTypePNG = CFSTR("public.png");
static const CFStringRef kUTTypeGIF = CFSTR("com.compuserve.gif");
if (equalIgnoringCase(mimeType, "image/png"))
return kUTTypePNG;
if (equalIgnoringCase(mimeType, "image/jpeg"))
return jpegUTI();
if (equalIgnoringCase(mimeType, "image/gif"))
return kUTTypeGIF;
ASSERT_NOT_REACHED();
return kUTTypePNG;
#endif
}
static bool CGImageEncodeToData(CGImageRef image, CFStringRef uti, const double* quality, CFMutableDataRef data)
{
if (!image || !uti || !data)
return false;
RetainPtr<CGImageDestinationRef> destination(AdoptCF, CGImageDestinationCreateWithData(data, uti, 1, 0));
if (!destination)
return false;
RetainPtr<CFDictionaryRef> imageProperties = 0;
if (CFEqual(uti, jpegUTI()) && quality && *quality >= 0.0 && *quality <= 1.0) {
// Apply the compression quality to the JPEG image destination.
RetainPtr<CFNumberRef> compressionQuality(AdoptCF, CFNumberCreate(kCFAllocatorDefault, kCFNumberDoubleType, quality));
const void* key = kCGImageDestinationLossyCompressionQuality;
const void* value = compressionQuality.get();
imageProperties.adoptCF(CFDictionaryCreate(0, &key, &value, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks));
}
// Setting kCGImageDestinationBackgroundColor to black for JPEG images in imageProperties would save some math
// in the calling functions, but it doesn't seem to work.
CGImageDestinationAddImage(destination.get(), image, imageProperties.get());
return CGImageDestinationFinalize(destination.get());
}
static String CGImageToDataURL(CGImageRef image, const String& mimeType, const double* quality)
{
RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
ASSERT(uti);
RetainPtr<CFMutableDataRef> data(AdoptCF, CFDataCreateMutable(kCFAllocatorDefault, 0));
if (!CGImageEncodeToData(image, uti.get(), quality, data.get()))
return "data:,";
Vector<char> base64Data;
base64Encode(reinterpret_cast<const char*>(CFDataGetBytePtr(data.get())), CFDataGetLength(data.get()), base64Data);
return "data:" + mimeType + ";base64," + base64Data;
}
String ImageBuffer::toDataURL(const String& mimeType, const double* quality, CoordinateSystem) const
{
ASSERT(MIMETypeRegistry::isSupportedImageMIMETypeForEncoding(mimeType));
RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
ASSERT(uti);
RefPtr<Uint8ClampedArray> premultipliedData;
RetainPtr<CGImageRef> image;
if (CFEqual(uti.get(), jpegUTI())) {
// JPEGs don't have an alpha channel, so we have to manually composite on top of black.
premultipliedData = getPremultipliedImageData(IntRect(IntPoint(0, 0), logicalSize()));
if (!premultipliedData)
return "data:,";
RetainPtr<CGDataProviderRef> dataProvider;
dataProvider.adoptCF(CGDataProviderCreateWithData(0, premultipliedData->data(), 4 * logicalSize().width() * logicalSize().height(), 0));
if (!dataProvider)
return "data:,";
image.adoptCF(CGImageCreate(logicalSize().width(), logicalSize().height(), 8, 32, 4 * logicalSize().width(),
deviceRGBColorSpaceRef(), kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast,
dataProvider.get(), 0, false, kCGRenderingIntentDefault));
} else if (m_resolutionScale == 1)
image.adoptCF(copyNativeImage(CopyBackingStore));
else {
image.adoptCF(copyNativeImage(DontCopyBackingStore));
RetainPtr<CGContextRef> context(AdoptCF, CGBitmapContextCreate(0, logicalSize().width(), logicalSize().height(), 8, 4 * logicalSize().width(), deviceRGBColorSpaceRef(), kCGImageAlphaPremultipliedLast));
CGContextSetBlendMode(context.get(), kCGBlendModeCopy);
CGContextDrawImage(context.get(), CGRectMake(0, 0, logicalSize().width(), logicalSize().height()), image.get());
image.adoptCF(CGBitmapContextCreateImage(context.get()));
}
return CGImageToDataURL(image.get(), mimeType, quality);
}
String ImageDataToDataURL(const ImageData& source, const String& mimeType, const double* quality)
{
ASSERT(MIMETypeRegistry::isSupportedImageMIMETypeForEncoding(mimeType));
RetainPtr<CFStringRef> uti = utiFromMIMEType(mimeType);
ASSERT(uti);
CGImageAlphaInfo dataAlphaInfo = kCGImageAlphaLast;
unsigned char* data = source.data()->data();
Vector<uint8_t> premultipliedData;
if (CFEqual(uti.get(), jpegUTI())) {
// JPEGs don't have an alpha channel, so we have to manually composite on top of black.
size_t size = 4 * source.width() * source.height();
if (!premultipliedData.tryReserveCapacity(size))
return "data:,";
unsigned char *buffer = premultipliedData.data();
for (size_t i = 0; i < size; i += 4) {
unsigned alpha = data[i + 3];
if (alpha != 255) {
buffer[i + 0] = data[i + 0] * alpha / 255;
buffer[i + 1] = data[i + 1] * alpha / 255;
buffer[i + 2] = data[i + 2] * alpha / 255;
} else {
buffer[i + 0] = data[i + 0];
buffer[i + 1] = data[i + 1];
buffer[i + 2] = data[i + 2];
}
}
dataAlphaInfo = kCGImageAlphaNoneSkipLast; // Ignore the alpha channel.
data = premultipliedData.data();
}
RetainPtr<CGDataProviderRef> dataProvider;
dataProvider.adoptCF(CGDataProviderCreateWithData(0, data, 4 * source.width() * source.height(), 0));
if (!dataProvider)
return "data:,";
RetainPtr<CGImageRef> image;
image.adoptCF(CGImageCreate(source.width(), source.height(), 8, 32, 4 * source.width(),
deviceRGBColorSpaceRef(), kCGBitmapByteOrderDefault | dataAlphaInfo,
dataProvider.get(), 0, false, kCGRenderingIntentDefault));
return CGImageToDataURL(image.get(), mimeType, quality);
}
} // namespace WebCore
| lgpl-2.1 |
microcosmx/jade | src/jade/domain/introspection/DeadAgent.java | 2961 | /*****************************************************************
JADE - Java Agent DEvelopment Framework is a framework to develop
multi-agent systems in compliance with the FIPA specifications.
Copyright (C) 2000 CSELT S.p.A.
GNU Lesser General Public License
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation,
version 2.1 of the License.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
*****************************************************************/
package jade.domain.introspection;
import jade.core.AID;
import jade.core.ContainerID;
/**
An introspection event, recording the death of an agent within the
platform.
@author Giovanni Rimassa - Universita' di Parma
@version $Date: 2006-02-21 08:56:26 +0100 (mar, 21 feb 2006) $ $Revision: 5860 $
*/
public class DeadAgent implements Event {
/**
A string constant for the name of this event.
*/
public static final String NAME = "Dead-Agent";
private AID agent;
private ContainerID where;
private Boolean containerRemoved;
/**
Default constructor. A default constructor is necessary for
ontological classes.
*/
public DeadAgent() {
}
/**
Retrieve the name of this event.
@return A constant value for the event name.
*/
public String getName() {
return NAME;
}
/**
Set the <code>where</code> slot of this event.
@param id The container identifier of the container where the
newly dead agent was deployed.
*/
public void setWhere(ContainerID id) {
where = id;
}
/**
Retrieve the value of the <code>where</code> slot of this
event, containing the container identifier of the container
where the newly dead agent was deployed.
@return The value of the <code>where</code> slot, or
<code>null</code> if no value was set.
*/
public ContainerID getWhere() {
return where;
}
/**
Set the <code>agent</code> slot of this event.
@param id The agent identifier of the newly dead agent.
*/
public void setAgent(AID id) {
agent = id;
}
/**
Retrieve the value of the <code>agent</code> slot of this
event, containing the agent identifier of the newly dead agent.
@return The value of the <code>agent</code> slot, or
<code>null</code> if no value was set.
*/
public AID getAgent() {
return agent;
}
public Boolean getContainerRemoved() {
return containerRemoved;
}
public void setContainerRemoved(Boolean cr) {
containerRemoved = cr;
}
}
| lgpl-2.1 |
mwtoews/libgeos | tests/unit/noding/NodedSegmentStringTest.cpp | 9486 | //
// Test Suite for geos::noding::NodedSegmentString class.
#include <tut/tut.hpp>
#include <utility.h>
// geos
#include <geos/io/WKTReader.h>
#include <geos/noding/NodedSegmentString.h>
#include <geos/noding/SegmentString.h>
#include <geos/noding/Octant.h>
#include <geos/geom/Coordinate.h>
#include <geos/geom/CoordinateSequence.h>
#include <geos/geom/CoordinateArraySequence.h>
#include <geos/geom/CoordinateArraySequenceFactory.h>
#include <geos/geom/GeometryFactory.h>
#include <geos/util.h>
// std
#include <memory>
using geos::io::WKTReader;
using geos::geom::CoordinateSequence;
using geos::geom::Geometry;
//using geos::geom::LineString;
using geos::geom::GeometryFactory;
using geos::noding::SegmentString;
namespace tut {
//
// Test Group
//
// Common data used by tests
struct test_nodedsegmentstring_data {
typedef std::unique_ptr<geos::geom::CoordinateSequence> \
CoordinateSequenceAutoPtr;
typedef std::unique_ptr<geos::noding::NodedSegmentString> \
SegmentStringAutoPtr;
const geos::geom::CoordinateSequenceFactory* csFactory;
WKTReader r;
SegmentStringAutoPtr
makeSegmentString(geos::geom::CoordinateSequence* cs, void* d = nullptr)
{
return SegmentStringAutoPtr(
new geos::noding::NodedSegmentString(cs, d)
);
}
std::unique_ptr<Geometry>
toLines(SegmentString::NonConstVect& ss, const GeometryFactory* gf)
{
std::vector<Geometry *> *lines = new std::vector<Geometry *>();
for (auto s: ss)
{
std::unique_ptr<CoordinateSequence> cs = s->getCoordinates()->clone();
lines->push_back(gf->createLineString(*cs));
}
return std::unique_ptr<Geometry>(gf->createMultiLineString(lines));
}
void
checkNoding(const std::string& wktLine, const std::string& wktNodes, std::vector<size_t> segmentIndex, const std::string& wktExpected)
{
using geos::noding::NodedSegmentString;
std::unique_ptr<Geometry> line = r.read(wktLine);
std::unique_ptr<Geometry> pts = r.read(wktNodes);
NodedSegmentString nss(line->getCoordinates().release(), 0);
std::unique_ptr<CoordinateSequence> node = pts->getCoordinates();
for (std::size_t i = 0, n=node->size(); i < n; ++i) {
nss.addIntersection(node->getAt(i), segmentIndex.at(i));
}
SegmentString::NonConstVect nodedSS;
nss.getNodeList().addSplitEdges(nodedSS);
std::unique_ptr<Geometry> result = toLines(nodedSS, line->getFactory());
//System.out.println(result);
for (auto ss: nodedSS) {
delete ss;
}
std::unique_ptr<Geometry> expected = r.read(wktExpected);
ensure_equals_geometry(expected.get(), result.get());
}
test_nodedsegmentstring_data()
:
csFactory(geos::geom::CoordinateArraySequenceFactory::instance())
{
}
~test_nodedsegmentstring_data()
{
}
};
typedef test_group<test_nodedsegmentstring_data> group;
typedef group::object object;
group test_nodedsegmentstring_group("geos::noding::NodedSegmentString");
//
// Test Cases
//
// test constructor with 2 equal points
template<>
template<>
void object::test<1>
()
{
auto cs = geos::detail::make_unique<geos::geom::CoordinateArraySequence>(0u, 2u);
ensure(nullptr != cs.get());
geos::geom::Coordinate c0(0, 0);
geos::geom::Coordinate c1(0, 0);
cs->add(c0);
cs->add(c1);
ensure_equals(cs->size(), 2u);
SegmentStringAutoPtr ss(makeSegmentString(cs.release()));
ensure(nullptr != ss.get());
ensure_equals(ss->size(), 2u);
ensure_equals(ss->getData(), (void*)nullptr);
ensure_equals(ss->getCoordinate(0), c0);
ensure_equals(ss->getCoordinate(1), c1);
ensure_equals(ss->isClosed(), true);
ensure_equals(ss->getNodeList().size(), 0u);
ensure_equals(ss->getSegmentOctant(0), 0);
}
// test constructor with 2 different points
template<>
template<>
void object::test<2>
()
{
auto cs = geos::detail::make_unique<geos::geom::CoordinateArraySequence>(0u, 2u);
ensure(nullptr != cs.get());
geos::geom::Coordinate c0(0, 0);
geos::geom::Coordinate c1(1, 0);
cs->add(c0);
cs->add(c1);
ensure_equals(cs->size(), 2u);
SegmentStringAutoPtr ss(makeSegmentString(cs.release()));
ensure(nullptr != ss.get());
ensure_equals(ss->size(), 2u);
ensure_equals(ss->getData(), (void*)nullptr);
ensure_equals(ss->getCoordinate(0), c0);
ensure_equals(ss->getCoordinate(1), c1);
ensure_equals(ss->isClosed(), false);
ensure_equals(ss->getSegmentOctant(0), 0);
ensure_equals(ss->getNodeList().size(), 0u);
}
// test constructor with 4 different points forming a ring
template<>
template<>
void object::test<3>
()
{
auto cs = geos::detail::make_unique<geos::geom::CoordinateArraySequence>(0u, 2u);
ensure(nullptr != cs.get());
geos::geom::Coordinate c0(0, 0);
geos::geom::Coordinate c1(1, 0);
geos::geom::Coordinate c2(1, 1);
cs->add(c0);
cs->add(c1);
cs->add(c2);
cs->add(c0);
ensure_equals(cs->size(), 4u);
SegmentStringAutoPtr ss(makeSegmentString(cs.release()));
ensure(nullptr != ss.get());
ensure_equals(ss->size(), 4u);
ensure_equals(ss->getData(), (void*)nullptr);
ensure_equals(ss->getCoordinate(0), c0);
ensure_equals(ss->getCoordinate(1), c1);
ensure_equals(ss->getCoordinate(2), c2);
ensure_equals(ss->getCoordinate(3), c0);
ensure_equals(ss->isClosed(), true);
ensure_equals(ss->getSegmentOctant(2), 4);
ensure_equals(ss->getSegmentOctant(1), 1);
ensure_equals(ss->getSegmentOctant(0), 0);
ensure_equals(ss->getNodeList().size(), 0u);
}
// test Octant class
template<>
template<>
void object::test<4>
()
{
geos::geom::Coordinate p0(0, 0);
geos::geom::Coordinate p1(5, -5);
int octant_rc1 = 0;
int octant_rc2 = 0;
int testPassed = true;
try {
octant_rc1 = geos::noding::Octant::octant(p0, p1);
octant_rc2 = geos::noding::Octant::octant(&p0, &p1);
testPassed = (octant_rc1 == octant_rc2);
}
catch(...) {
testPassed = false;
}
ensure(0 != testPassed);
}
// test adding intersections
template<>
template<>
void object::test<5>
()
{
geos::geom::Coordinate p0(0, 0);
geos::geom::Coordinate p1(10, 0);
auto cs = geos::detail::make_unique<geos::geom::CoordinateArraySequence>(0u, 2u);
cs->add(p0);
cs->add(p1);
SegmentStringAutoPtr ss(makeSegmentString(cs.release()));
ensure_equals(ss->getNodeList().size(), 0u);
// the intersection is invalid, but SegmentString trusts us
ss->addIntersection(p0, 0);
ensure_equals(ss->getNodeList().size(), 1u);
// This node is already present, so shouldn't be
// accepted as a new one
ss->addIntersection(p0, 0);
ensure_equals(ss->getNodeList().size(), 1u);
ss->addIntersection(p1, 0);
ensure_equals(ss->getNodeList().size(), 2u);
ss->addIntersection(p1, 0);
ensure_equals(ss->getNodeList().size(), 2u);
ss->addIntersection(p0, 0);
ensure_equals(ss->getNodeList().size(), 2u);
}
/**
* Tests a case which involves nodes added when using the SnappingNoder.
* In this case one of the added nodes is relatively "far" from its segment,
* and "near" the start vertex of the segment.
* Computing the noding correctly requires the fix to {@link SegmentNode#compareTo(Object)}
* added in https://github.com/locationtech/jts/pull/399
*
* See https://trac.osgeo.org/geos/ticket/1051
*/
template<>
template<>
void object::test<6>
()
{
std::vector<size_t> segmentIndex;
segmentIndex.push_back(0);
segmentIndex.push_back(0);
segmentIndex.push_back(1);
segmentIndex.push_back(1);
checkNoding("LINESTRING(655103.6628454948 1794805.456674405, 655016.20226 1794940.10998, 655014.8317182435 1794941.5196832407)",
"MULTIPOINT((655016.29615051334 1794939.965427252),(655016.20226531825 1794940.1099718122), (655016.20226 1794940.10998),(655016.20225819293 1794940.1099794197))",
segmentIndex,
"MULTILINESTRING ((655014.8317182435 1794941.5196832407,655016.2022581929 1794940.1099794197), (655016.2022581929 1794940.1099794197, 655016.20226 1794940.10998), (655016.20226 1794940.10998, 655016.2022653183 1794940.1099718122), (655016.2022653183 1794940.1099718122, 655016.2961505133 1794939.965427252), (655016.2961505133 1794939.965427252, 655103.6628454948 1794805.456674405))");
}
// TODO: test getting noded substrings
// template<>
// template<>
// void object::test<6>()
// {
// geos::geom::Coordinate cs1p0(0, 0);
// geos::geom::Coordinate cs1p1(10, 0);
// CoordinateSequenceAutoPtr cs1(csFactory->create(0, 2));
// cs1->add(cs1p0);
// cs1->add(cs1p1);
//
// geos::geom::Coordinate cs2p0(5, -5);
// geos::geom::Coordinate cs2p1(5, 5);
// CoordinateSequenceAutoPtr cs2(csFactory->create(0, 2));
// cs2->add(cs2p0);
// cs2->add(cs2p1);
//
// using geos::noding::SegmentString;
// using geos::noding::NodedSegmentString;
//
// SegmentString::NonConstVect inputStrings;
// inputStrings.push_back(makeSegmentString(cs2.get()).get());
//
// std::unique_ptr<SegmentString::NonConstVect> nodedStrings(
// NodedSegmentString::getNodedSubstrings(inputStrings)
// );
//
// ensure_equals(nodedStrings->size(), 0u);
// }
} // namespace tut
| lgpl-2.1 |
OuluPulu/strqtwln | src/client/qwaylandqtkey.cpp | 3843 | /****************************************************************************
**
** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the plugins of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qwaylandqtkey_p.h"
#include "qwaylandinputdevice_p.h"
QT_BEGIN_NAMESPACE
QWaylandQtKeyExtension::QWaylandQtKeyExtension(QWaylandDisplay *display, uint32_t id)
: QtWayland::qt_key_extension(display->wl_registry(), id)
, m_display(display)
{
}
void QWaylandQtKeyExtension::key_extension_qtkey(struct wl_surface *surface,
uint32_t time,
uint32_t type,
uint32_t key,
uint32_t modifiers,
uint32_t nativeScanCode,
uint32_t nativeVirtualKey,
uint32_t nativeModifiers,
const QString &text,
uint32_t autorep,
uint32_t count)
{
QList<QWaylandInputDevice *> inputDevices = m_display->inputDevices();
if (!surface && inputDevices.isEmpty()) {
qWarning("qt_key_extension: handle_qtkey: No input device");
return;
}
QWaylandInputDevice *dev = inputDevices.first();
QWaylandWindow *win = surface ? QWaylandWindow::fromWlSurface(surface) : dev->keyboardFocus();
if (!win || !win->window()) {
qWarning("qt_key_extension: handle_qtkey: No keyboard focus");
return;
}
QWindow *window = win->window();
QWindowSystemInterface::handleExtendedKeyEvent(window, time, QEvent::Type(type), key, Qt::KeyboardModifiers(modifiers),
nativeScanCode, nativeVirtualKey, nativeModifiers, text,
autorep, count);
}
QT_END_NAMESPACE
| lgpl-2.1 |
Fat-Zer/FreeCAD_sf_master | src/Mod/PartDesign/Gui/Resources/translations/PartDesign_sl.ts | 130589 | <?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="sl" sourcelanguage="en">
<context>
<name>CmdPartDesignAdditiveLoft</name>
<message>
<location filename="../../Command.cpp" line="1403"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1404"/>
<source>Additive loft</source>
<translation>Dodajni navleček</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1405"/>
<source>Loft a selected profile through other profile sections</source>
<translation>Ostreši izbran prerez preko ostalih prerezov</translation>
</message>
</context>
<context>
<name>CmdPartDesignAdditivePipe</name>
<message>
<location filename="../../Command.cpp" line="1303"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1304"/>
<source>Additive pipe</source>
<translation>Dodajna cev</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1305"/>
<source>Sweep a selected sketch along a path or to other profiles</source>
<translation>Povleči izbrano skico vzdolž poti ali do drugih prerezov</translation>
</message>
</context>
<context>
<name>CmdPartDesignBody</name>
<message>
<location filename="../../CommandBody.cpp" line="91"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="92"/>
<source>Create body</source>
<translation>Ustvari telo</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="93"/>
<source>Create a new body and make it active</source>
<translation>Ustvari novo telo in ga naredi dejavnega</translation>
</message>
</context>
<context>
<name>CmdPartDesignBoolean</name>
<message>
<location filename="../../Command.cpp" line="2235"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="2236"/>
<source>Boolean operation</source>
<translation>Logična operacija</translation>
</message>
<message>
<location filename="../../Command.cpp" line="2237"/>
<source>Boolean operation with two or more bodies</source>
<translation>Logična opeacija z dvema ali več telesi</translation>
</message>
</context>
<context>
<name>CmdPartDesignCS</name>
<message>
<location filename="../../Command.cpp" line="242"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="243"/>
<source>Create a local coordinate system</source>
<translation>Ustvari krajeven koordinatni sistem</translation>
</message>
<message>
<location filename="../../Command.cpp" line="244"/>
<source>Create a new local coordinate system</source>
<translation>Ustvari nov krajevni koordinatni sistem</translation>
</message>
</context>
<context>
<name>CmdPartDesignChamfer</name>
<message>
<location filename="../../Command.cpp" line="1635"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1636"/>
<source>Chamfer</source>
<translation>Prisekaj</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1637"/>
<source>Chamfer the selected edges of a shape</source>
<translation>Prisekaj izbrane robove oblike</translation>
</message>
</context>
<context>
<name>CmdPartDesignClone</name>
<message>
<location filename="../../Command.cpp" line="339"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="340"/>
<source>Create a clone</source>
<translation>Ustvari dvojnika</translation>
</message>
<message>
<location filename="../../Command.cpp" line="341"/>
<source>Create a new clone</source>
<translation>Ustvari nov dvojnik</translation>
</message>
</context>
<context>
<name>CmdPartDesignDraft</name>
<message>
<location filename="../../Command.cpp" line="1664"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1665"/>
<source>Draft</source>
<translation>Ugrez</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1666"/>
<source>Make a draft on a face</source>
<translation>Ustvari nagib na ploskvi</translation>
</message>
</context>
<context>
<name>CmdPartDesignDuplicateSelection</name>
<message>
<location filename="../../CommandBody.cpp" line="632"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="633"/>
<source>Duplicate selected object</source>
<translation>Podvoji izbrani predmet</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="634"/>
<source>Duplicates the selected object and adds it to the active body</source>
<translation>Podvoji izbran predmet in ga doda dejavnemu telesu</translation>
</message>
</context>
<context>
<name>CmdPartDesignFillet</name>
<message>
<location filename="../../Command.cpp" line="1607"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1608"/>
<source>Fillet</source>
<translation>Zaokrožitev</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1609"/>
<source>Make a fillet on an edge, face or body</source>
<translation>Ustvari zaokrožitev roba, ploskve ali telesa</translation>
</message>
</context>
<context>
<name>CmdPartDesignGroove</name>
<message>
<location filename="../../Command.cpp" line="1235"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1236"/>
<source>Groove</source>
<translation>Žlebič</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1237"/>
<source>Groove a selected sketch</source>
<translation>Vžlebiči izbrani očrt</translation>
</message>
</context>
<context>
<name>CmdPartDesignHole</name>
<message>
<location filename="../../Command.cpp" line="1129"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1130"/>
<source>Hole</source>
<translation>Luknja</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1131"/>
<source>Create a hole with the selected sketch</source>
<translation>Z izbranim očrtom naredi luknjo</translation>
</message>
</context>
<context>
<name>CmdPartDesignLine</name>
<message>
<location filename="../../Command.cpp" line="186"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="187"/>
<source>Create a datum line</source>
<translation>Ustvari referenčno črto</translation>
</message>
<message>
<location filename="../../Command.cpp" line="188"/>
<source>Create a new datum line</source>
<translation>Ustvari novo sklicno črto</translation>
</message>
</context>
<context>
<name>CmdPartDesignLinearPattern</name>
<message>
<location filename="../../Command.cpp" line="1941"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1942"/>
<source>LinearPattern</source>
<translation>Premočrtni vzorec</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1943"/>
<source>Create a linear pattern feature</source>
<translation>Ustvari premočrtni vzorec</translation>
</message>
</context>
<context>
<name>CmdPartDesignMigrate</name>
<message>
<location filename="../../CommandBody.cpp" line="338"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="339"/>
<source>Migrate</source>
<translation>Selitev</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="340"/>
<source>Migrate document to the modern PartDesign workflow</source>
<translation>Preseli dokument v sodobni PartDesignov delotok</translation>
</message>
</context>
<context>
<name>CmdPartDesignMirrored</name>
<message>
<location filename="../../Command.cpp" line="1877"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1878"/>
<source>Mirrored</source>
<translation>Zrcaljeno</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1879"/>
<source>Create a mirrored feature</source>
<translation>Ustvari zrcaljeno značilnost</translation>
</message>
</context>
<context>
<name>CmdPartDesignMoveFeature</name>
<message>
<location filename="../../CommandBody.cpp" line="688"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="689"/>
<source>Move object to other body</source>
<translation>Premakni predmet k drugemu telesu</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="690"/>
<source>Moves the selected object to another body</source>
<translation>Premakne izbran predmet k drugemu telesu</translation>
</message>
</context>
<context>
<name>CmdPartDesignMoveFeatureInTree</name>
<message>
<location filename="../../CommandBody.cpp" line="851"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="852"/>
<source>Move object after other object</source>
<translation>Premakni en predmet za drugega</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="853"/>
<source>Moves the selected object and insert it after another object</source>
<translation>Premakne izbran predmet in ga postavi za drugi predmet</translation>
</message>
</context>
<context>
<name>CmdPartDesignMoveTip</name>
<message>
<location filename="../../CommandBody.cpp" line="552"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="553"/>
<source>Set tip</source>
<translation>Določi vrh</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="554"/>
<source>Move the tip of the body</source>
<translation>Premakni vrh telesa</translation>
</message>
</context>
<context>
<name>CmdPartDesignMultiTransform</name>
<message>
<location filename="../../Command.cpp" line="2114"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="2115"/>
<source>Create MultiTransform</source>
<translation>Ustvari VečkratnoPreoblikovanje</translation>
</message>
<message>
<location filename="../../Command.cpp" line="2116"/>
<source>Create a multitransform feature</source>
<translation>Ustvari večkratno preoblikovanje</translation>
</message>
</context>
<context>
<name>CmdPartDesignNewSketch</name>
<message>
<location filename="../../Command.cpp" line="400"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="401"/>
<source>Create sketch</source>
<translation>Ustvari očrt</translation>
</message>
<message>
<location filename="../../Command.cpp" line="402"/>
<source>Create a new sketch</source>
<translation>Ustvari nov očrt</translation>
</message>
</context>
<context>
<name>CmdPartDesignPad</name>
<message>
<location filename="../../Command.cpp" line="1031"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1032"/>
<source>Pad</source>
<translation>Izboklina</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1033"/>
<source>Pad a selected sketch</source>
<translation>Izboči izbrano skico</translation>
</message>
</context>
<context>
<name>CmdPartDesignPlane</name>
<message>
<location filename="../../Command.cpp" line="158"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="159"/>
<source>Create a datum plane</source>
<translation>Ustvari sklicno ravnino</translation>
</message>
<message>
<location filename="../../Command.cpp" line="160"/>
<source>Create a new datum plane</source>
<translation>Ustvari novo sklicno ravnino</translation>
</message>
</context>
<context>
<name>CmdPartDesignPocket</name>
<message>
<location filename="../../Command.cpp" line="1082"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1083"/>
<source>Pocket</source>
<translation>Ugrez</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1084"/>
<source>Create a pocket with the selected sketch</source>
<translation>Ustvari ugrez z izbranim očrtom</translation>
</message>
</context>
<context>
<name>CmdPartDesignPoint</name>
<message>
<location filename="../../Command.cpp" line="214"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="215"/>
<source>Create a datum point</source>
<translation>Ustvari sklicno točko</translation>
</message>
<message>
<location filename="../../Command.cpp" line="216"/>
<source>Create a new datum point</source>
<translation>Ustvari novo sklicno točko</translation>
</message>
</context>
<context>
<name>CmdPartDesignPolarPattern</name>
<message>
<location filename="../../Command.cpp" line="2007"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="2008"/>
<source>PolarPattern</source>
<translation>Krožni vzorec</translation>
</message>
<message>
<location filename="../../Command.cpp" line="2009"/>
<source>Create a polar pattern feature</source>
<translation>Ustvari krožni vzorec</translation>
</message>
</context>
<context>
<name>CmdPartDesignRevolution</name>
<message>
<location filename="../../Command.cpp" line="1175"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1176"/>
<source>Revolution</source>
<translation>Zavrti</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1177"/>
<source>Revolve a selected sketch</source>
<translation>Zavrti izbrano skico</translation>
</message>
</context>
<context>
<name>CmdPartDesignScaled</name>
<message>
<location filename="../../Command.cpp" line="2074"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="2075"/>
<source>Scaled</source>
<translation>Povečava</translation>
</message>
<message>
<location filename="../../Command.cpp" line="2076"/>
<source>Create a scaled feature</source>
<translation>Ustvari povečavo značilnosti</translation>
</message>
</context>
<context>
<name>CmdPartDesignShapeBinder</name>
<message>
<location filename="../../Command.cpp" line="274"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="275"/>
<source>Create a shape binder</source>
<translation>Ustvari povezovalnik oblik</translation>
</message>
<message>
<location filename="../../Command.cpp" line="276"/>
<source>Create a new shape binder</source>
<translation>Ustvari nov povezovalnik oblik</translation>
</message>
</context>
<context>
<name>CmdPartDesignSubtractiveLoft</name>
<message>
<location filename="../../Command.cpp" line="1453"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1454"/>
<source>Subtractive loft</source>
<translation>Odvzemno ostrešje</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1455"/>
<source>Loft a selected profile through other profile sections and remove it from the body</source>
<translation>Ostreši izbran prerez preko drugih prerezov in ga odstrani iz telesa</translation>
</message>
</context>
<context>
<name>CmdPartDesignSubtractivePipe</name>
<message>
<location filename="../../Command.cpp" line="1353"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1354"/>
<source>Subtractive pipe</source>
<translation>Odvzemna cev</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1355"/>
<source>Sweep a selected sketch along a path or to other profiles and remove it from the body</source>
<translation>Povleči izbrano skico vzdolž poti ali do drugih prerezov in odstrani obliko iz telesa</translation>
</message>
</context>
<context>
<name>CmdPartDesignThickness</name>
<message>
<location filename="../../Command.cpp" line="1721"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1722"/>
<source>Thickness</source>
<translation>Debelina</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1723"/>
<source>Make a thick solid</source>
<translation>Ustvari telo z debelino</translation>
</message>
</context>
<context>
<name>CmdPrimtiveCompAdditive</name>
<message>
<location filename="../../CommandPrimitive.cpp" line="68"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="69"/>
<location filename="../../CommandPrimitive.cpp" line="70"/>
<source>Create an additive primitive</source>
<translation>Dodaj osnovnik</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="191"/>
<source>Additive Box</source>
<translation>Dodajni kvader</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="195"/>
<source>Additive Cylinder</source>
<translation>Dodajni valj</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="199"/>
<source>Additive Sphere</source>
<translation>Dodajna krogla</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="203"/>
<source>Additive Cone</source>
<translation>Dodajni stožec</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="207"/>
<source>Additive Ellipsoid</source>
<translation>Dodajni elipsoid</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="211"/>
<source>Additive Torus</source>
<translation>Dodajni svitek</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="215"/>
<source>Additive Prism</source>
<translation>Dodajna prizma</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="219"/>
<source>Additive Wedge</source>
<translation>Dodajni klin</translation>
</message>
</context>
<context>
<name>CmdPrimtiveCompSubtractive</name>
<message>
<location filename="../../CommandPrimitive.cpp" line="235"/>
<source>PartDesign</source>
<translation>PartDesign</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="236"/>
<location filename="../../CommandPrimitive.cpp" line="237"/>
<source>Create a subtractive primitive</source>
<translation>Odvzemi osnovnik</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="356"/>
<source>Subtractive Box</source>
<translation>Odvzemni kvader</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="360"/>
<source>Subtractive Cylinder</source>
<translation>Odvzemni Valj</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="364"/>
<source>Subtractive Sphere</source>
<translation>Odvzemna Krogla</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="368"/>
<source>Subtractive Cone</source>
<translation>Odvzemni Stožec</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="372"/>
<source>Subtractive Ellipsoid</source>
<translation>Odvzemni Elipsoid</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="376"/>
<source>Subtractive Torus</source>
<translation>Odvzemni Svitek</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="380"/>
<source>Subtractive Prism</source>
<translation>Odvzemna Prizma</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="384"/>
<source>Subtractive Wedge</source>
<translation>Odvzemni Klin</translation>
</message>
</context>
<context>
<name>FeaturePickDialog</name>
<message>
<location filename="../../FeaturePickDialog.cpp" line="45"/>
<source>Valid</source>
<translation>Veljavno</translation>
</message>
<message>
<location filename="../../FeaturePickDialog.cpp" line="46"/>
<source>Invalid shape</source>
<translation>Neveljavna oblika</translation>
</message>
<message>
<location filename="../../FeaturePickDialog.cpp" line="47"/>
<source>No wire in sketch</source>
<translation>V očrtu ni nobenega črtovja</translation>
</message>
<message>
<location filename="../../FeaturePickDialog.cpp" line="48"/>
<source>Sketch already used by other feature</source>
<translation>Očrt je že uporabljena za drugo značilnost</translation>
</message>
<message>
<location filename="../../FeaturePickDialog.cpp" line="49"/>
<source>Sketch belongs to another Body feature</source>
<translation>Očrt pripada drugi značilnosti Telesa</translation>
</message>
<message>
<location filename="../../FeaturePickDialog.cpp" line="50"/>
<source>Base plane</source>
<translation>Osnovna ravnina</translation>
</message>
<message>
<location filename="../../FeaturePickDialog.cpp" line="51"/>
<source>Feature is located after the Tip feature</source>
<translation>Značilnost je za Vrhom</translation>
</message>
</context>
<context>
<name>Gui::TaskView::TaskWatcherCommands</name>
<message>
<location filename="../../Workbench.cpp" line="52"/>
<source>Face tools</source>
<translation>Orodja za ploskve</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="53"/>
<source>Sketch tools</source>
<translation>Očrtovalna orodja</translation>
</message>
<message>
<location filename="../../Workbench.cpp" line="54"/>
<source>Create Geometry</source>
<translation>Ustvari geometrijo</translation>
</message>
</context>
<context>
<name>InvoluteGearParameter</name>
<message>
<location filename="../../../InvoluteGearFeature.ui" line="14"/>
<source>Involute parameter</source>
<translation>Določilka evolvente</translation>
</message>
<message>
<location filename="../../../InvoluteGearFeature.ui" line="20"/>
<source>Number of teeth:</source>
<translation>Število zob:</translation>
</message>
<message>
<location filename="../../../InvoluteGearFeature.ui" line="40"/>
<source>Modules:</source>
<translation>Modul:</translation>
</message>
<message>
<location filename="../../../InvoluteGearFeature.ui" line="84"/>
<source>Pressure angle:</source>
<translation>Vpadni kot:</translation>
</message>
<message>
<location filename="../../../InvoluteGearFeature.ui" line="125"/>
<source>High precision:</source>
<translation>Visoka natančnost:</translation>
</message>
<message>
<location filename="../../../InvoluteGearFeature.ui" line="139"/>
<location filename="../../../InvoluteGearFeature.ui" line="166"/>
<source>True</source>
<translation>Je</translation>
</message>
<message>
<location filename="../../../InvoluteGearFeature.ui" line="144"/>
<location filename="../../../InvoluteGearFeature.ui" line="171"/>
<source>False</source>
<translation>Ni</translation>
</message>
<message>
<location filename="../../../InvoluteGearFeature.ui" line="152"/>
<source>External gear:</source>
<translation>Zunanji zobnik:</translation>
</message>
</context>
<context>
<name>PartDesign::Groove</name>
<message>
<location filename="../../../App/FeatureGroove.cpp" line="106"/>
<source>The requested feature cannot be created. The reason may be that:
⢠the active Body does not contain a base shape, so there is no
material to be removed;
⢠the selected sketch does not belong to the active Body.</source>
<translation>Željene značilnosti ni bilo mogoče ustvariti. Razlog za to je lahko, da:
\xe2\x80\xa2 dejavno telo ne vsebuje izhodiščne oblike, zato ni kaj odstraniti;
\xe2\x80\xa2 izbrani očrt ne pripada dejavnemu telesu.</translation>
</message>
</context>
<context>
<name>PartDesign::Hole</name>
<message>
<location filename="../../../App/FeatureHole.cpp" line="956"/>
<source>The requested feature cannot be created. The reason may be that:
⢠the active Body does not contain a base shape, so there is no
material to be removed;
⢠the selected sketch does not belong to the active Body.</source>
<translation>Željene značilnosti ni bilo mogoče ustvariti. Razlog za to je lahko, da:
\xe2\x80\xa2 dejavno telo ne vsebuje izhodiščne oblike, zato ni kaj odstraniti;
\xe2\x80\xa2 izbrani očrt ne pripada dejavnemu telesu.</translation>
</message>
</context>
<context>
<name>PartDesign::Pocket</name>
<message>
<location filename="../../../App/FeaturePocket.cpp" line="121"/>
<source>The requested feature cannot be created. The reason may be that:
⢠the active Body does not contain a base shape, so there is no
material to be removed;
⢠the selected sketch does not belong to the active Body.</source>
<translation>Željene značilnosti ni bilo mogoče ustvariti. Razlog za to je lahko, da:
\xe2\x80\xa2 dejavno telo ne vsebuje izhodiščne oblike, zato ni kaj odstraniti;
\xe2\x80\xa2 izbrani očrt ne pripada dejavnemu telesu.</translation>
</message>
</context>
<context>
<name>PartDesignGui::DlgPrimitives</name>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="14"/>
<source>Geometric Primitives</source>
<translation>Geometrijski osnovniki</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="54"/>
<location filename="../../TaskPrimitiveParameters.ui" line="161"/>
<source>Width:</source>
<translation>Širina:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="61"/>
<location filename="../../TaskPrimitiveParameters.ui" line="154"/>
<source>Length:</source>
<translation>Dolžina:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="147"/>
<location filename="../../TaskPrimitiveParameters.ui" line="258"/>
<location filename="../../TaskPrimitiveParameters.ui" line="378"/>
<location filename="../../TaskPrimitiveParameters.ui" line="913"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1152"/>
<source>Height:</source>
<translation>Višina:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="198"/>
<location filename="../../TaskPrimitiveParameters.ui" line="305"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1159"/>
<source>Angle:</source>
<translation>Kót:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="251"/>
<location filename="../../TaskPrimitiveParameters.ui" line="525"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1138"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1260"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1325"/>
<source>Radius:</source>
<translation>Polmer:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="385"/>
<location filename="../../TaskPrimitiveParameters.ui" line="585"/>
<location filename="../../TaskPrimitiveParameters.ui" line="839"/>
<source>Radius 1:</source>
<translation>Polmer 1:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="392"/>
<location filename="../../TaskPrimitiveParameters.ui" line="592"/>
<location filename="../../TaskPrimitiveParameters.ui" line="832"/>
<source>Radius 2:</source>
<translation>Polmer 2:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="455"/>
<location filename="../../TaskPrimitiveParameters.ui" line="646"/>
<source>U parameter:</source>
<translation>Določilka U:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="462"/>
<source>V parameters:</source>
<translation>Določilke V:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="599"/>
<source>Radius 3:</source>
<translation>Polmer 3:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="669"/>
<location filename="../../TaskPrimitiveParameters.ui" line="759"/>
<source>V parameter:</source>
<translation>Določilka V:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="736"/>
<source>U Parameter:</source>
<translation>Določilka U:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="886"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1768"/>
<source>Polygon:</source>
<translation>Mnogokotnik:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="906"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1788"/>
<source>Circumradius:</source>
<translation>Polmer očrtane krožnice:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="957"/>
<source>X min/max:</source>
<translation>Najv./najm. X:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="964"/>
<source>Y min/max:</source>
<translation>Najv./najm. Y:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="971"/>
<source>Z min/max:</source>
<translation>Najv./najm. Z:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="978"/>
<source>X2 min/max:</source>
<translation>Najv./najm. X2:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="985"/>
<source>Z2 min/max:</source>
<translation>Najv./najm. Z:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1145"/>
<source>Pitch:</source>
<translation>Naklon:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1166"/>
<source>Coordinate system:</source>
<translation>Koordinatni sistem:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1174"/>
<source>Right-handed</source>
<translation>Desnosučni</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1179"/>
<source>Left-handed</source>
<translation>Levosučni</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1267"/>
<source>Growth:</source>
<translation>Rast:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1274"/>
<source>Number of rotations:</source>
<translation>Število vrtljajev:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1332"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1435"/>
<source>Angle 1:</source>
<translation>Kot 1:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1339"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1442"/>
<source>Angle 2:</source>
<translation>Kot 2:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1393"/>
<source>From three points</source>
<translation>Iz treh točk</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1421"/>
<source>Major radius:</source>
<translation>Veliki polmer:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1428"/>
<source>Minor radius:</source>
<translation>Mali polmer:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1510"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1591"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1645"/>
<source>X:</source>
<translation>X:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1520"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1601"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1655"/>
<source>Y:</source>
<translation>Y:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1530"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1611"/>
<location filename="../../TaskPrimitiveParameters.ui" line="1665"/>
<source>Z:</source>
<translation>Z:</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1628"/>
<source>End point</source>
<translation>Končna točka</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.ui" line="1638"/>
<source>Start point</source>
<translation>Začetna točka</translation>
</message>
</context>
<context>
<name>PartDesignGui::DlgReference</name>
<message>
<location filename="../../DlgReference.ui" line="14"/>
<source>Reference</source>
<translation>Osnova</translation>
</message>
<message>
<location filename="../../DlgReference.ui" line="20"/>
<source>You selected geometries which are not part of the active body. Please define how to handle those selections. If you do not want those references cancel the command.</source>
<translation>Izbrali ste geometrije, ki niso deli dejavnega telesa. Določite, kako naj se ta izbor obravnava. Če teh sklicev nočete, prekličite ukaz.</translation>
</message>
<message>
<location filename="../../DlgReference.ui" line="42"/>
<source>Make independent copy (recommended)</source>
<translation>Naredi neodvisen dvojnik (priporočeno)</translation>
</message>
<message>
<location filename="../../DlgReference.ui" line="52"/>
<source>Make dependent copy</source>
<translation>Naredi odvisen dvojnik</translation>
</message>
<message>
<location filename="../../DlgReference.ui" line="59"/>
<source>Create cross-reference</source>
<translation>Ustvari navzkrižni sklic</translation>
</message>
</context>
<context>
<name>PartDesignGui::NoDependentsSelection</name>
<message>
<location filename="../../ReferenceSelection.cpp" line="183"/>
<source>Selecting this will cause circular dependency.</source>
<translation>Če izberete to, povzročite krožno odvisnost.</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskBooleanParameters</name>
<message>
<location filename="../../TaskBooleanParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.ui" line="22"/>
<source>Add body</source>
<translation>Dodaj telo</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.ui" line="32"/>
<source>Remove body</source>
<translation>Odstrani telo</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.ui" line="48"/>
<source>Fuse</source>
<translation>Zlij</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.ui" line="53"/>
<source>Cut</source>
<translation>Izreži</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.ui" line="58"/>
<source>Common</source>
<translation>Presek</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.cpp" line="55"/>
<source>Boolean parameters</source>
<translation>Določilke logičnih vrednosti</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.cpp" line="81"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskBoxPrimitives</name>
<message>
<location filename="../../TaskPrimitiveParameters.cpp" line="54"/>
<source>Primitive parameters</source>
<translation>Določilke osnovnika</translation>
</message>
<message>
<location filename="../../TaskPrimitiveParameters.cpp" line="646"/>
<source>Create primitive</source>
<translation>Ustvari osnovnik</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskChamferParameters</name>
<message>
<location filename="../../TaskChamferParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskChamferParameters.ui" line="22"/>
<source>Add ref</source>
<translation>Dodaj sklic</translation>
</message>
<message>
<location filename="../../TaskChamferParameters.ui" line="32"/>
<source>Remove ref</source>
<translation>Odstrani sklic</translation>
</message>
<message>
<location filename="../../TaskChamferParameters.ui" line="50"/>
<source>Size:</source>
<translation>Velikost:</translation>
</message>
<message>
<location filename="../../TaskChamferParameters.cpp" line="87"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskDatumParameters</name>
<message>
<location filename="../../TaskDatumParameters.cpp" line="73"/>
<source> parameters</source>
<translation> določilke</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskDlgBooleanParameters</name>
<message>
<location filename="../../TaskBooleanParameters.cpp" line="327"/>
<source>Empty body list</source>
<translation>Izprazni seznam teles</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.cpp" line="328"/>
<source>The body list cannot be empty</source>
<translation>Seznam teles ne more biti prazen</translation>
</message>
<message>
<location filename="../../TaskBooleanParameters.cpp" line="339"/>
<source>Boolean: Accept: Input error</source>
<translation>Logične vrednosti: Potrdi: Vnosna napaka</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskDlgDatumParameters</name>
<message>
<location filename="../../TaskDatumParameters.cpp" line="121"/>
<source>Incompatible reference set</source>
<translation>Nazdružljiv nabor sklicev</translation>
</message>
<message>
<location filename="../../TaskDatumParameters.cpp" line="122"/>
<source>There is no attachment mode that fits the current set of references. If you choose to continue, the feature will remain where it is now, and will not be moved as the references change. Continue?</source>
<translation>Noben način pripenjanja ne ustreza trenutnemu naboru sklicev. Če se odločite za nadaljevanje, bo zmožnost ostala, kjer je, in se ne bo premaknila pri spreminjanju sklica. Želite nadaljevati?</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskDlgFeatureParameters</name>
<message>
<location filename="../../TaskFeatureParameters.cpp" line="140"/>
<source>Input error</source>
<translation>Napaka vnosa</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskDlgPipeParameters</name>
<message>
<location filename="../../TaskPipeParameters.cpp" line="886"/>
<source>Input error</source>
<translation>Napaka vnosa</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskDlgShapeBinder</name>
<message>
<location filename="../../TaskShapeBinder.cpp" line="331"/>
<source>Input error</source>
<translation>Napaka vnosa</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskDraftParameters</name>
<message>
<location filename="../../TaskDraftParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.ui" line="22"/>
<source>Add face</source>
<translation>Dodaj ploskev</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.ui" line="32"/>
<source>Remove face</source>
<translation>Odstrani ploskev</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.ui" line="49"/>
<source>Draft angle</source>
<translation>Nagibni kot</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.ui" line="79"/>
<source>Neutral plane</source>
<translation>Nevtralna ravnina</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.ui" line="96"/>
<source>Pull direction</source>
<translation>Smer vlečenja</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.ui" line="111"/>
<source>Reverse pull direction</source>
<translation>Obratna smer vlečenja</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.cpp" line="100"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskFeaturePick</name>
<message>
<location filename="../../TaskFeaturePick.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.ui" line="23"/>
<source>Allow used features</source>
<translation>Omogoči uporabljene značilke</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.ui" line="30"/>
<source>Allow external features</source>
<translation>Dovolji zunanje zmožnosti</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.ui" line="42"/>
<source>From other bodies of the same part</source>
<translation>Iz drugih teles istega dela</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.ui" line="49"/>
<source>From different parts or free features</source>
<translation>Iz različnih delov prostih zmožnosti</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.ui" line="66"/>
<source>Make independent copy (recommended)</source>
<translation>Naredi neodvisen dvojnik (priporočeno)</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.ui" line="79"/>
<source>Make dependent copy</source>
<translation>Naredi odvisen dvojnik</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.ui" line="89"/>
<source>Create cross-reference</source>
<translation>Ustvari navzkrižni sklic</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="65"/>
<source>Valid</source>
<translation>Veljavno</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="66"/>
<source>Invalid shape</source>
<translation>Neveljavna oblika</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="67"/>
<source>No wire in sketch</source>
<translation>V očrtu ni nobenega črtovja</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="68"/>
<source>Sketch already used by other feature</source>
<translation>Očrt je že uporabljena za drugo značilnost</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="69"/>
<source>Belongs to another body</source>
<translation>Pripada drugemu telesu</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="70"/>
<source>Belongs to another part</source>
<translation>Pripada drugemu delu</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="71"/>
<source>Doesn't belong to any body</source>
<translation>Ne pripada nobenemu telesu</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="72"/>
<source>Base plane</source>
<translation>Osnovna ravnina</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="73"/>
<source>Feature is located after the tip feature</source>
<translation>Značilnost je za Vrhom</translation>
</message>
<message>
<location filename="../../TaskFeaturePick.cpp" line="83"/>
<source>Select feature</source>
<translation>Izberi značilnost</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskFilletParameters</name>
<message>
<location filename="../../TaskFilletParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskFilletParameters.ui" line="22"/>
<source>Add ref</source>
<translation>Dodaj sklic</translation>
</message>
<message>
<location filename="../../TaskFilletParameters.ui" line="32"/>
<source>Remove ref</source>
<translation>Odstrani sklic</translation>
</message>
<message>
<location filename="../../TaskFilletParameters.ui" line="49"/>
<source>Radius:</source>
<translation>Polmer:</translation>
</message>
<message>
<location filename="../../TaskFilletParameters.cpp" line="87"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskHoleParameters</name>
<message>
<location filename="../../TaskHoleParameters.cpp" line="48"/>
<source>Hole parameters</source>
<translation>Določilke luknje</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.cpp" line="69"/>
<source>None</source>
<translation>Brez</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.cpp" line="70"/>
<source>ISO metric coarse profile</source>
<translation>Grobi metrični ISO profil</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.cpp" line="71"/>
<source>ISO metric fine profile</source>
<translation>Podrobni metrični ISO profil</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.cpp" line="72"/>
<source>UTS coarse profile</source>
<translation>Grobi UTS profil</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.cpp" line="73"/>
<source>UTS fine profile</source>
<translation>Podrobni UTS profil</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.cpp" line="74"/>
<source>UTS extra fine profile</source>
<translation>Iredno podroben UTS profil</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskLinearPatternParameters</name>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="22"/>
<source>Add feature</source>
<translation>Dodaj značilnost</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="32"/>
<source>Remove feature</source>
<translation>Odstrani značilnost</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="49"/>
<source>Direction</source>
<translation>Smer</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="61"/>
<source>Reverse direction</source>
<translation type="unfinished">Reverse direction</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="70"/>
<source>Length</source>
<translation>Dolžina</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="91"/>
<source>Occurrences</source>
<translation>Pojavitve</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="105"/>
<source>OK</source>
<translation>Potrdi</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.ui" line="114"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.cpp" line="114"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
<message>
<location filename="../../TaskLinearPatternParameters.cpp" line="328"/>
<source>Error</source>
<translation>Napaka</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskLoftParameters</name>
<message>
<location filename="../../TaskLoftParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.ui" line="20"/>
<source>Ruled surface</source>
<translation>Premonosna ploskev</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.ui" line="27"/>
<source>Closed</source>
<translation>Zaprto</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.ui" line="34"/>
<source>Profile</source>
<translation>Profil</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.ui" line="42"/>
<source>Object</source>
<translation>Predmet</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.ui" line="65"/>
<source>Add Section</source>
<translation>Dodaj Prerez</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.ui" line="78"/>
<source>Remove Section</source>
<translation>Odstrani Prerez</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.ui" line="113"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.cpp" line="59"/>
<source>Loft parameters</source>
<translation>Določilke navlečka</translation>
</message>
<message>
<location filename="../../TaskLoftParameters.cpp" line="80"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskMirroredParameters</name>
<message>
<location filename="../../TaskMirroredParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskMirroredParameters.ui" line="22"/>
<source>Add feature</source>
<translation>Dodaj značilnost</translation>
</message>
<message>
<location filename="../../TaskMirroredParameters.ui" line="32"/>
<source>Remove feature</source>
<translation>Odstrani značilnost</translation>
</message>
<message>
<location filename="../../TaskMirroredParameters.ui" line="49"/>
<source>Plane</source>
<translation>Ravnina</translation>
</message>
<message>
<location filename="../../TaskMirroredParameters.ui" line="63"/>
<source>OK</source>
<translation>Potrdi</translation>
</message>
<message>
<location filename="../../TaskMirroredParameters.ui" line="72"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskMirroredParameters.cpp" line="111"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
<message>
<location filename="../../TaskMirroredParameters.cpp" line="245"/>
<source>Error</source>
<translation>Napaka</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskMultiTransformParameters</name>
<message>
<location filename="../../TaskMultiTransformParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.ui" line="22"/>
<source>Add feature</source>
<translation>Dodaj značilnost</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.ui" line="32"/>
<source>Remove feature</source>
<translation>Odstrani značilnost</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.ui" line="47"/>
<source>Transformations</source>
<translation>Preobliovanja</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.ui" line="64"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="76"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="82"/>
<source>Edit</source>
<translation>Uredi</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="86"/>
<source>Delete</source>
<translation>Izbriši</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="90"/>
<source>Add mirrored transformation</source>
<translation>Dodaj zrcalno preoblikovanje</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="94"/>
<source>Add linear pattern</source>
<translation>Dodaj premočrtni vzorec</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="98"/>
<source>Add polar pattern</source>
<translation>Dodaj krožni vzorec</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="102"/>
<source>Add scaled transformation</source>
<translation>Dodaj velikostno preoblikovanja</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="106"/>
<source>Move up</source>
<translation>Premakni gor</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="110"/>
<source>Move down</source>
<translation>Premakni dol</translation>
</message>
<message>
<location filename="../../TaskMultiTransformParameters.cpp" line="137"/>
<source>Right-click to add</source>
<translation>Desni klik za dodajanje</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskPadParameters</name>
<message>
<location filename="../../TaskPadParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="22"/>
<source>Type</source>
<translation>Vrsta</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="30"/>
<location filename="../../TaskPadParameters.cpp" line="124"/>
<location filename="../../TaskPadParameters.cpp" line="412"/>
<source>Dimension</source>
<translation>Mera</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="42"/>
<source>Length</source>
<translation>Dolžina</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="60"/>
<source>Offset</source>
<translation>Odmik</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="75"/>
<source>Symmetric to plane</source>
<translation>Simetrično na ravnino</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="82"/>
<source>Reversed</source>
<translation>Obratno</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="91"/>
<source>2nd length</source>
<translation>2. dolžina</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="109"/>
<location filename="../../TaskPadParameters.cpp" line="112"/>
<location filename="../../TaskPadParameters.cpp" line="435"/>
<source>Face</source>
<translation>Ploskev</translation>
</message>
<message>
<location filename="../../TaskPadParameters.ui" line="128"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskPadParameters.cpp" line="56"/>
<source>Pad parameters</source>
<translation>Določilke izbokline</translation>
</message>
<message>
<location filename="../../TaskPadParameters.cpp" line="63"/>
<location filename="../../TaskPadParameters.cpp" line="420"/>
<source>No face selected</source>
<translation>Nobena ploskev ni izbrana</translation>
</message>
<message>
<location filename="../../TaskPadParameters.cpp" line="125"/>
<location filename="../../TaskPadParameters.cpp" line="413"/>
<source>To last</source>
<translation type="unfinished">To last</translation>
</message>
<message>
<location filename="../../TaskPadParameters.cpp" line="126"/>
<location filename="../../TaskPadParameters.cpp" line="414"/>
<source>To first</source>
<translation>Do prve</translation>
</message>
<message>
<location filename="../../TaskPadParameters.cpp" line="127"/>
<location filename="../../TaskPadParameters.cpp" line="415"/>
<source>Up to face</source>
<translation>Do ploskve</translation>
</message>
<message>
<location filename="../../TaskPadParameters.cpp" line="128"/>
<location filename="../../TaskPadParameters.cpp" line="416"/>
<source>Two dimensions</source>
<translation>Dve meri</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskPipeOrientation</name>
<message>
<location filename="../../TaskPipeOrientation.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="22"/>
<source>Orientation mode</source>
<translation>Orientacijski način</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="36"/>
<source>Standard</source>
<translation>Običajno</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="41"/>
<source>Fixed</source>
<translation>Nepremičen</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="46"/>
<source>Frenet</source>
<translation>Frenet</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="51"/>
<source>Auxiliary</source>
<translation>Pomožno</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="56"/>
<source>Binormal</source>
<translation>Binormala</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="76"/>
<source>Curvelinear equivalence</source>
<translation>Krivočrtna enakovrednost</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="83"/>
<source>Profile</source>
<translation>Profil</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="91"/>
<source>Object</source>
<translation>Predmet</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="117"/>
<source>Add Edge</source>
<translation>Dodaj rob</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="136"/>
<source>Remove Edge</source>
<translation>Odstrani rob</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="158"/>
<source>Set the constant binormal vector used to calculate the profiles orientation</source>
<translation>Nastavi stalni binormalni vektor za izračunavanje usmeritve prereza</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="182"/>
<source>X</source>
<translation>X</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="189"/>
<source>Y</source>
<translation>Y</translation>
</message>
<message>
<location filename="../../TaskPipeOrientation.ui" line="196"/>
<source>Z</source>
<translation>Z</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.cpp" line="365"/>
<source>Section orientation</source>
<translation>Usmerjenost preseka</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskPipeParameters</name>
<message>
<location filename="../../TaskPipeParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="20"/>
<source>Profile</source>
<translation>Profil</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="28"/>
<location filename="../../TaskPipeParameters.ui" line="93"/>
<source>Object</source>
<translation>Predmet</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="51"/>
<source>Corner Transition</source>
<translation>Kotni prehod</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="65"/>
<source>Transformed</source>
<translation>Preoblikovano</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="70"/>
<source>Right Corner</source>
<translation>Pravi kot</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="75"/>
<source>Round Corner</source>
<translation>Zaobljen vogal</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="85"/>
<source>Path to sweep along</source>
<translation>Pot vzdolž katere povleči</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="119"/>
<source>Add Edge</source>
<translation>Dodaj rob</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.ui" line="138"/>
<source>Remove Edge</source>
<translation>Odstrani rob</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.cpp" line="74"/>
<source>Pipe parameters</source>
<translation>Določilke cevi</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskPipeScaling</name>
<message>
<location filename="../../TaskPipeScaling.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskPipeScaling.ui" line="22"/>
<source>Transform mode</source>
<translation>Preoblikovalni način</translation>
</message>
<message>
<location filename="../../TaskPipeScaling.ui" line="36"/>
<source>Constant</source>
<translation>Nespremenljivo</translation>
</message>
<message>
<location filename="../../TaskPipeScaling.ui" line="41"/>
<source>Multisection</source>
<translation>Večkratni presek</translation>
</message>
<message>
<location filename="../../TaskPipeScaling.ui" line="64"/>
<source>Add Section</source>
<translation>Dodaj Prerez</translation>
</message>
<message>
<location filename="../../TaskPipeScaling.ui" line="77"/>
<source>Remove Section</source>
<translation>Odstrani Prerez</translation>
</message>
<message>
<location filename="../../TaskPipeParameters.cpp" line="617"/>
<source>Section transformation</source>
<translation>Preoblikovanje preseka</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskPocketParameters</name>
<message>
<location filename="../../TaskPocketParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="22"/>
<source>Type</source>
<translation>Vrsta</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="30"/>
<location filename="../../TaskPocketParameters.cpp" line="119"/>
<location filename="../../TaskPocketParameters.cpp" line="439"/>
<source>Dimension</source>
<translation>Mera</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="42"/>
<source>Length</source>
<translation>Dolžina</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="60"/>
<source>Offset</source>
<translation>Odmik</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="75"/>
<source>Symmetric to plane</source>
<translation>Simetrično na ravnino</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="82"/>
<source>Reversed</source>
<translation>Obratno</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="91"/>
<source>2nd length</source>
<translation>2. dolžina</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="109"/>
<location filename="../../TaskPocketParameters.cpp" line="107"/>
<location filename="../../TaskPocketParameters.cpp" line="462"/>
<source>Face</source>
<translation>Ploskev</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.ui" line="128"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.cpp" line="56"/>
<source>Pocket parameters</source>
<translation>Določilke ugreza</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.cpp" line="64"/>
<location filename="../../TaskPocketParameters.cpp" line="447"/>
<source>No face selected</source>
<translation>Nobena ploskev ni izbrana</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.cpp" line="120"/>
<location filename="../../TaskPocketParameters.cpp" line="440"/>
<source>Through all</source>
<translation>Skozi vse</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.cpp" line="121"/>
<location filename="../../TaskPocketParameters.cpp" line="441"/>
<source>To first</source>
<translation>Do prve</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.cpp" line="122"/>
<location filename="../../TaskPocketParameters.cpp" line="442"/>
<source>Up to face</source>
<translation>Do ploskve</translation>
</message>
<message>
<location filename="../../TaskPocketParameters.cpp" line="123"/>
<location filename="../../TaskPocketParameters.cpp" line="443"/>
<source>Two dimensions</source>
<translation>Dve meri</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskPolarPatternParameters</name>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="22"/>
<source>Add feature</source>
<translation>Dodaj značilnost</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="32"/>
<source>Remove feature</source>
<translation>Odstrani značilnost</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="49"/>
<source>Axis</source>
<translation>Os</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="61"/>
<source>Reverse direction</source>
<translation type="unfinished">Reverse direction</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="70"/>
<source>Angle</source>
<translation>Kot</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="97"/>
<source>Occurrences</source>
<translation>Pojavitve</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="111"/>
<source>OK</source>
<translation>Potrdi</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.ui" line="120"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.cpp" line="112"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
<message>
<location filename="../../TaskPolarPatternParameters.cpp" line="318"/>
<source>Error</source>
<translation>Napaka</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskRevolutionParameters</name>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="22"/>
<source>Axis:</source>
<translation>Os:</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="30"/>
<location filename="../../TaskRevolutionParameters.cpp" line="181"/>
<source>Base X axis</source>
<translation>Osnovna X os</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="35"/>
<location filename="../../TaskRevolutionParameters.cpp" line="182"/>
<source>Base Y axis</source>
<translation>Osnovna Y os</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="40"/>
<location filename="../../TaskRevolutionParameters.cpp" line="183"/>
<source>Base Z axis</source>
<translation>Osnovna Z os</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="45"/>
<source>Horizontal sketch axis</source>
<translation>Vodoravna os skice</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="50"/>
<source>Vertical sketch axis</source>
<translation>Navpična os skice</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="55"/>
<location filename="../../TaskRevolutionParameters.cpp" line="190"/>
<source>Select reference...</source>
<translation>Izberite osnovo …</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="67"/>
<source>Angle:</source>
<translation>Kót:</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="98"/>
<source>Symmetric to plane</source>
<translation>Simetrično na ravnino</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="105"/>
<source>Reversed</source>
<translation>Obratno</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.ui" line="119"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.cpp" line="61"/>
<source>Revolution parameters</source>
<translation>Določilke zvrtenja</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskScaledParameters</name>
<message>
<location filename="../../TaskScaledParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskScaledParameters.ui" line="22"/>
<source>Add feature</source>
<translation>Dodaj značilnost</translation>
</message>
<message>
<location filename="../../TaskScaledParameters.ui" line="32"/>
<source>Remove feature</source>
<translation>Odstrani značilnost</translation>
</message>
<message>
<location filename="../../TaskScaledParameters.ui" line="49"/>
<source>Factor</source>
<translation>Količnik</translation>
</message>
<message>
<location filename="../../TaskScaledParameters.ui" line="63"/>
<source>Occurrences</source>
<translation>Pojavitve</translation>
</message>
<message>
<location filename="../../TaskScaledParameters.ui" line="77"/>
<source>OK</source>
<translation>Potrdi</translation>
</message>
<message>
<location filename="../../TaskScaledParameters.ui" line="86"/>
<source>Update view</source>
<translation>Posodobi pogled</translation>
</message>
<message>
<location filename="../../TaskScaledParameters.cpp" line="98"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskShapeBinder</name>
<message>
<location filename="../../TaskShapeBinder.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskShapeBinder.ui" line="22"/>
<source>Object</source>
<translation>Predmet</translation>
</message>
<message>
<location filename="../../TaskShapeBinder.ui" line="48"/>
<source>Add Geometry</source>
<translation>Dodaj Geometrijo</translation>
</message>
<message>
<location filename="../../TaskShapeBinder.ui" line="67"/>
<source>Remove Geometry</source>
<translation>Odstrani Geometrijo</translation>
</message>
<message>
<location filename="../../TaskShapeBinder.cpp" line="61"/>
<source>Datum shape parameters</source>
<translation>Določilke sklicne oblike</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskSketchBasedParameters</name>
<message>
<location filename="../../TaskSketchBasedParameters.cpp" line="155"/>
<source>Face</source>
<translation>Ploskev</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskThicknessParameters</name>
<message>
<location filename="../../TaskThicknessParameters.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="22"/>
<source>Add face</source>
<translation>Dodaj ploskev</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="32"/>
<source>Remove face</source>
<translation>Odstrani ploskev</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="49"/>
<source>Thickness</source>
<translation>Debelina</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="75"/>
<source>Mode</source>
<translation>Način</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="82"/>
<source>Join Type</source>
<translation>Vrsta spojitve</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="90"/>
<source>Skin</source>
<translation>Površina</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="95"/>
<source>Pipe</source>
<translation>Cev</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="100"/>
<source>Recto Verso</source>
<translation>Dvostransko</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="109"/>
<source>Arc</source>
<translation>Lok</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="114"/>
<source>Intersection</source>
<translation>Sečišče</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.ui" line="124"/>
<source>Make thickness inwards</source>
<translation>Naredi debelino navznoter</translation>
</message>
<message>
<location filename="../../TaskThicknessParameters.cpp" line="98"/>
<source>Remove</source>
<translation>Odstrani</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskTransformedMessages</name>
<message>
<location filename="../../TaskTransformedMessages.cpp" line="43"/>
<source>Transformed feature messages</source>
<translation>Sporočila preoblikovane značilnosti</translation>
</message>
</context>
<context>
<name>PartDesignGui::TaskTransformedParameters</name>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="194"/>
<source>Normal sketch axis</source>
<translation>Os normale skice</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="195"/>
<source>Vertical sketch axis</source>
<translation>Navpična os skice</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="196"/>
<source>Horizontal sketch axis</source>
<translation>Vodoravna os skice</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="198"/>
<location filename="../../TaskTransformedParameters.cpp" line="234"/>
<source>Construction line %1</source>
<translation>Pomožna črta %1</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="212"/>
<source>Base X axis</source>
<translation>Osnovna X os</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="213"/>
<source>Base Y axis</source>
<translation>Osnovna Y os</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="214"/>
<source>Base Z axis</source>
<translation>Osnovna Z os</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="221"/>
<location filename="../../TaskTransformedParameters.cpp" line="257"/>
<source>Select reference...</source>
<translation>Izberite osnovo …</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="248"/>
<source>Base XY plane</source>
<translation>Osnovna XY ravnina</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="249"/>
<source>Base YZ plane</source>
<translation>Osnovna YZ ravnina</translation>
</message>
<message>
<location filename="../../TaskTransformedParameters.cpp" line="250"/>
<source>Base XZ plane</source>
<translation>Osnovna XZ ravnina</translation>
</message>
</context>
<context>
<name>PartDesignGui::ViewProviderBody</name>
<message>
<location filename="../../ViewProviderBody.cpp" line="137"/>
<source>Toggle active body</source>
<translation>Preklopi dejavno telo</translation>
</message>
</context>
<context>
<name>PartDesign_CompPrimitiveAdditive</name>
<message>
<location filename="../../CommandPrimitive.cpp" line="192"/>
<source>Create an additive box by its width, height and length</source>
<translation>Dodaj kvader z njegovo širino, višino in dolžino</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="196"/>
<source>Create an additive cylinder by its radius, height and angle</source>
<translation>Dodaj valj z njegovim polmerom, višino in kotom</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="200"/>
<source>Create an additive sphere by its radius and various angles</source>
<translation>Dodaj kroglo z njenim polmerom in kotoma</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="204"/>
<source>Create an additive cone</source>
<translation>Dodaj stožec</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="208"/>
<source>Create an additive ellipsoid</source>
<translation>Dodaj elipsoid</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="212"/>
<source>Create an additive torus</source>
<translation>Dodaj svitek</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="216"/>
<source>Create an additive prism</source>
<translation>Dodaj prizmo</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="220"/>
<source>Create an additive wedge</source>
<translation>Dodaj klin</translation>
</message>
</context>
<context>
<name>PartDesign_CompPrimitiveSubtractive</name>
<message>
<location filename="../../CommandPrimitive.cpp" line="357"/>
<source>Create a subtractive box by its width, height and length</source>
<translation>Odvzemi kvader z njegovo širino, višino in dolžino</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="361"/>
<source>Create a subtractive cylinder by its radius, height and angle</source>
<translation>Odvzemi valj z njegovim polmerom, višino in kotom</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="365"/>
<source>Create a subtractive sphere by its radius and various angles</source>
<translation>Odvzemi kroglo z njenim polmerom in kotoma</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="369"/>
<source>Create a subtractive cone</source>
<translation>Odvzemi stožec</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="373"/>
<source>Create a subtractive ellipsoid</source>
<translation>Odvzemi elipsoid</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="377"/>
<source>Create a subtractive torus</source>
<translation>Odvzemi svitek</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="381"/>
<source>Create a subtractive prism</source>
<translation>Odvzemi prizmo</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="385"/>
<source>Create a subtractive wedge</source>
<translation>Odvzemi klin</translation>
</message>
</context>
<context>
<name>PartDesign_MoveFeature</name>
<message>
<location filename="../../CommandBody.cpp" line="755"/>
<source>Select body</source>
<translation>Izberi telo</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="756"/>
<source>Select a body from the list</source>
<translation>Izberi telo s seznama</translation>
</message>
</context>
<context>
<name>PartDesign_MoveFeatureInTree</name>
<message>
<location filename="../../CommandBody.cpp" line="906"/>
<source>Select feature</source>
<translation>Izberi značilnost</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="907"/>
<source>Select a feature from the list</source>
<translation>Izberi značilnost s seznama</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="../../Command.cpp" line="135"/>
<source>Invalid selection</source>
<translation>Neveljavna izbira</translation>
</message>
<message>
<location filename="../../Command.cpp" line="135"/>
<source>There are no attachment modes that fit selected objects. Select something else.</source>
<translation>Izbranim predmetom ne ustreza noben način pripenjanja. Izberite nekaj drugega.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="141"/>
<location filename="../../Command.cpp" line="144"/>
<location filename="../../Command.cpp" line="146"/>
<source>Error</source>
<translation>Napaka</translation>
</message>
<message>
<location filename="../../Command.cpp" line="141"/>
<source>There is no active body. Please make a body active before inserting a datum entity.</source>
<translation>Ni dejavnega predmeta. Naredite telo dejavno pred vstavljanjem sklicne enote.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="475"/>
<source>Several sub-elements selected</source>
<translation>Izbranih več podelementov</translation>
</message>
<message>
<location filename="../../Command.cpp" line="476"/>
<source>You have to select a single face as support for a sketch!</source>
<translation>Izbrati morate eno ploskev kot podporo za skico!</translation>
</message>
<message>
<location filename="../../Command.cpp" line="486"/>
<source>No support face selected</source>
<translation>Nobena podporna ploskev ni izbrana</translation>
</message>
<message>
<location filename="../../Command.cpp" line="487"/>
<source>You have to select a face as support for a sketch!</source>
<translation>Izbrati morate ploskev kot podporo za skico!</translation>
</message>
<message>
<location filename="../../Command.cpp" line="496"/>
<source>No planar support</source>
<translation>Ni ravninske podpore</translation>
</message>
<message>
<location filename="../../Command.cpp" line="497"/>
<source>You need a planar face as support for a sketch!</source>
<translation>Izbrati morate ravninsko ploskev kot podporo za skico!</translation>
</message>
<message>
<location filename="../../Command.cpp" line="679"/>
<source>No valid planes in this document</source>
<translation>Ni veljavnih ravnin v tem dokumentu</translation>
</message>
<message>
<location filename="../../Command.cpp" line="680"/>
<source>Please create a plane first or select a face to sketch on</source>
<translation>Ustvarite najprej ravnino ali izberite ploskev, na katero želite očtavati</translation>
</message>
<message>
<location filename="../../Command.cpp" line="693"/>
<location filename="../../Command.cpp" line="983"/>
<location filename="../../Command.cpp" line="1825"/>
<location filename="../../ViewProvider.cpp" line="114"/>
<location filename="../../ViewProviderBoolean.cpp" line="85"/>
<location filename="../../ViewProviderDatum.cpp" line="251"/>
<location filename="../../ViewProviderHole.cpp" line="81"/>
<location filename="../../ViewProviderPrimitive.cpp" line="93"/>
<location filename="../../ViewProviderShapeBinder.cpp" line="92"/>
<source>A dialog is already open in the task panel</source>
<translation type="unfinished">A dialog is already open in the task panel</translation>
</message>
<message>
<location filename="../../Command.cpp" line="694"/>
<location filename="../../Command.cpp" line="984"/>
<location filename="../../Command.cpp" line="1826"/>
<location filename="../../ViewProvider.cpp" line="115"/>
<location filename="../../ViewProviderBoolean.cpp" line="86"/>
<location filename="../../ViewProviderDatum.cpp" line="252"/>
<location filename="../../ViewProviderHole.cpp" line="82"/>
<location filename="../../ViewProviderPrimitive.cpp" line="94"/>
<location filename="../../ViewProviderShapeBinder.cpp" line="93"/>
<source>Do you want to close this dialog?</source>
<translation type="unfinished">Do you want to close this dialog?</translation>
</message>
<message>
<location filename="../../Command.cpp" line="907"/>
<source>No sketch to work on</source>
<translation>Ni očrta za obravnavo</translation>
</message>
<message>
<location filename="../../Command.cpp" line="908"/>
<source>No sketch is available in the document</source>
<translation>V dokumentu ni nobenega razpoložjivega očrta</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1513"/>
<location filename="../../Command.cpp" line="1517"/>
<location filename="../../Command.cpp" line="1543"/>
<location filename="../../Command.cpp" line="1555"/>
<source>Wrong selection</source>
<translation>Napačen izbor</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1514"/>
<source>Select an edge, face or body.</source>
<translation>Izberi rob, ploskev ali telo.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1518"/>
<source>Select an edge, face or body from a single body.</source>
<translation>Izberi rob, ploskev ali telo enega smaega telesa.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1522"/>
<location filename="../../Command.cpp" line="1855"/>
<source>Selection is not in Active Body</source>
<translation>Izbira ni v Aktivnem Telesu</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1523"/>
<source>Select an edge, face or body from an active body.</source>
<translation>Izberi rob, ploskev ali telo iz dejavnega telesa.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1533"/>
<source>Wrong object type</source>
<translation>Napačna vrsta objekta</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1534"/>
<source>%1 works only on parts.</source>
<translation>%1 deluje samo na delih.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1544"/>
<source>Shape of the selected Part is empty</source>
<translation>Oblika izbranega Dela je prazna</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1556"/>
<source> not possible on selected faces/edges.</source>
<translation> ni mogoče na izbranih ploskvah/robovih.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1842"/>
<source>No valid features in this document</source>
<translation>Ni veljavnih značilnosti v tem dokumentu</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1843"/>
<source>Please create a subtractive or additive feature first.</source>
<translation>Najprej ustvari značilko dodajanja ali odvzemanja.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1848"/>
<source>Multiple Features Selected</source>
<translation>Izbranih več značilnosti</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1849"/>
<source>Please select only one subtractive or additive feature first.</source>
<translation>Najprej izberite samo eno odvzemno ali dodajno značilnost.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="1856"/>
<source>Please select only one subtractive or additive feature in an active body.</source>
<translation>Najprej izberite samo eno odvzemno ali dodajno značilnost v dejavnem telesu.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="73"/>
<source>Part creation failed</source>
<translation>Ustvarjanje dela spodletelo</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="74"/>
<source>Failed to create a part object.</source>
<translation>Dela ni bilo mogoče ustvariti.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="122"/>
<location filename="../../CommandBody.cpp" line="127"/>
<location filename="../../CommandBody.cpp" line="134"/>
<location filename="../../CommandBody.cpp" line="142"/>
<location filename="../../CommandBody.cpp" line="191"/>
<source>Bad base feature</source>
<translation>Slaba osnovna značilnost</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="123"/>
<source>Body can't be based on a PartDesign feature.</source>
<translation>Telo ne more biti osnovano na PartDesign značilnosti.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="128"/>
<source>%1 already belongs to a body, can't use it as base feature for another body.</source>
<translation>%1 že pripada telesu, zato ne more biti uporabljeno kot osnovna značilnost za drugo telo.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="135"/>
<source>Body can't be based on another body.</source>
<translation>Telo ne more biti osnovano na drugem telesu.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="143"/>
<source>Base feature (%1) belongs to other part.</source>
<translation>Osnovna značilnost(%1) pripada drugemu delu.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="167"/>
<source>The selected shape consists of multiple solids.
This may lead to unexpected results.</source>
<translation>Izbrana oblika je sestavljena iz več teles.
To lahko pripelje do nepričakovanih rezultatov.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="171"/>
<source>The selected shape consists of multiple shells.
This may lead to unexpected results.</source>
<translation>Izbrana oblika je sestavljena iz več lupin.
To lahko pripelje do nepričakovanih rezultatov.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="175"/>
<source>The selected shape consists of only a shell.
This may lead to unexpected results.</source>
<translation>Izbrana oblika je sestavljena samo iz lupine.
To lahko pripelje do nepričakovanih rezultatov.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="179"/>
<source>The selected shape consists of multiple solids or shells.
This may lead to unexpected results.</source>
<translation>Izbrana oblika je sestavljena iz več teles ali lupin.
To lahko pripelje do nepričakovanih rezultatov.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="184"/>
<source>Base feature</source>
<translation>Osnovna značilnost</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="192"/>
<source>Body may be based on no more than one feature.</source>
<translation>Telo ne sme biti osnovano na več kot eni značilnosti.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="368"/>
<source>Nothing to migrate</source>
<translation>Ni kaj seliti</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="369"/>
<source>No PartDesign features found that don't belong to a body.Nothing to migrate.</source>
<translation>Ni bilo mogoče najti nobene značilnosti PartDesigna, ki ne bi pripadala telesu. Ni kaj seliti.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="517"/>
<source>Sketch plane cannot be migrated</source>
<translation>Očrtovalne ravnine ni mogoče seliti</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="518"/>
<source>Please edit '%1' and redefine it to use a Base or Datum plane as the sketch plane.</source>
<translation>Uredite '%1' in ga ponovno definirajte za uporabo Osnovne ali Sklicne ravnine kot očrtovalno ravnino.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="580"/>
<location filename="../../CommandBody.cpp" line="584"/>
<location filename="../../CommandBody.cpp" line="589"/>
<location filename="../../CommandBody.cpp" line="878"/>
<location filename="../../CommandBody.cpp" line="885"/>
<source>Selection error</source>
<translation>Napaka izbire</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="581"/>
<source>Select exactly one PartDesign feature or a body.</source>
<translation>Izberi natanko eno PartDesign značilnost ali telo.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="585"/>
<source>Couldn't determine a body for the selected feature '%s'.</source>
<translation>Ni mogoče določiti telesa za izbrano značilnost '%s'.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="590"/>
<source>Only a solid feature can be the tip of a body.</source>
<translation>Le polno telo je lahko konca telesa.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="706"/>
<location filename="../../CommandBody.cpp" line="728"/>
<location filename="../../CommandBody.cpp" line="743"/>
<source>Features cannot be moved</source>
<translation>Značilnosti ni mogoče premakniti</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="707"/>
<source>Some of the selected features have dependencies in the source body</source>
<translation>Nekatere od izbranih značilnosti so odvisne od izhodišnega telesa</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="729"/>
<source>Only features of a single source Body can be moved</source>
<translation>Le značilnosti enega samega telesa je mogoče premikati</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="744"/>
<source>There are no other bodies to move to</source>
<translation>Ni drugega telesa za premikanje</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="879"/>
<source>Impossible to move the base feature of a body.</source>
<translation>Osnovne značilnosti telesa ni mogoče premikati.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="886"/>
<source>Select one or more features from the same body.</source>
<translation>Izberite eno ali več značilnosti istega telesa.</translation>
</message>
<message>
<location filename="../../CommandBody.cpp" line="899"/>
<source>Beginning of the body</source>
<translation>Začetek telesa</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="261"/>
<source>No previous feature found</source>
<translation>Predhodnih značilnosti ni mogoče najti</translation>
</message>
<message>
<location filename="../../CommandPrimitive.cpp" line="262"/>
<source>It is not possible to create a subtractive feature without a base feature available</source>
<translation>Značilke odvzemanja ni mogoče ustvariti, če osnovna značilnost ni na voljo</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.cpp" line="303"/>
<source>Missing neutral plane</source>
<translation>Manjkajoča nevtralna ravnina</translation>
</message>
<message>
<location filename="../../TaskDraftParameters.cpp" line="304"/>
<source>Please select a plane or an edge plus a pull direction</source>
<translation>Izberite ravnino ali rob ter smer vlečenja</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.cpp" line="164"/>
<location filename="../../TaskTransformedParameters.cpp" line="231"/>
<source>Vertical sketch axis</source>
<translation>Navpična os skice</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.cpp" line="165"/>
<location filename="../../TaskTransformedParameters.cpp" line="232"/>
<source>Horizontal sketch axis</source>
<translation>Vodoravna os skice</translation>
</message>
<message>
<location filename="../../TaskRevolutionParameters.cpp" line="167"/>
<source>Construction line %1</source>
<translation>Pomožna črta %1</translation>
</message>
<message>
<location filename="../../TaskSketchBasedParameters.cpp" line="84"/>
<source>Face</source>
<translation>Ploskev</translation>
</message>
<message>
<location filename="../../Utils.cpp" line="88"/>
<source>No active Body</source>
<translation>Nobenega dejavnega telesa</translation>
</message>
<message>
<location filename="../../Utils.cpp" line="89"/>
<source>In order to use PartDesign you need an active Body object in the document. Please make one active (double click) or create one.
If you have a legacy document with PartDesign objects without Body, use the migrate function in PartDesign to put them into a Body.</source>
<translation>Če želite uporabljati PartDesign, potrebujete v dokumentu dejavno Telo. Naredite enega dejavnega (dvoklik) ali ga ustvarite.
Če je vaš dokument podedovan in ima PartDesign predmete brez Teles, uporabitete PartDesighnovo zmožnost pretvorbe, da jih spremenite v Telo.</translation>
</message>
<message>
<location filename="../../Utils.cpp" line="104"/>
<source>Active Body Required</source>
<translation>Potrebno dejavno Telo</translation>
</message>
<message>
<location filename="../../Utils.cpp" line="105"/>
<source>To create a new PartDesign object, there must be an active Body object in the document. Please make one active (double click) or create a new Body.</source>
<translation>Če želite ustvariti nov PartDesign predmet, mora biti v dokumentu dejavno telo. Naredite enega dejavnega (dvoklik) ali ustvarite novega.</translation>
</message>
<message>
<location filename="../../Utils.cpp" line="142"/>
<source>Feature is not in a body</source>
<translation>Značilnost ni v telesu</translation>
</message>
<message>
<location filename="../../Utils.cpp" line="143"/>
<source>In order to use this feature it needs to belong to a body object in the document.</source>
<translation>Če želite uporabiti to značilnost, mora pripradati telesu v dokumentu.</translation>
</message>
<message>
<location filename="../../Utils.cpp" line="175"/>
<source>Feature is not in a part</source>
<translation>Značilnost ni v delu</translation>
</message>
<message>
<location filename="../../Utils.cpp" line="176"/>
<source>In order to use this feature it needs to belong to a part object in the document.</source>
<translation>Če želite uporabiti to značilnost, mora pripradati delu v dokumentu.</translation>
</message>
<message>
<location filename="../../ViewProvider.cpp" line="96"/>
<source>Set colors...</source>
<translation>Nastavi barve …</translation>
</message>
<message>
<location filename="../../ViewProviderBoolean.cpp" line="68"/>
<source>Edit boolean</source>
<translation>Uredi logične vrednosti</translation>
</message>
<message>
<location filename="../../ViewProviderDatum.cpp" line="121"/>
<location filename="../../ViewProviderDatum.cpp" line="211"/>
<source>Plane</source>
<translation>Ravnina</translation>
</message>
<message>
<location filename="../../ViewProviderDatum.cpp" line="125"/>
<location filename="../../ViewProviderDatum.cpp" line="207"/>
<source>Line</source>
<translation>Črta</translation>
</message>
<message>
<location filename="../../ViewProviderDatum.cpp" line="129"/>
<location filename="../../ViewProviderDatum.cpp" line="215"/>
<source>Point</source>
<translation>Točka</translation>
</message>
<message>
<location filename="../../ViewProviderDatum.cpp" line="133"/>
<source>Coordinate System</source>
<translation>Koordinatni sistem</translation>
</message>
<message>
<location filename="../../ViewProviderDatum.cpp" line="232"/>
<source>Edit datum</source>
<translation>Uredi osnovne mere</translation>
</message>
<message>
<location filename="../../ViewProviderDressUp.cpp" line="49"/>
<location filename="../../ViewProviderTransformed.cpp" line="68"/>
<source>Edit %1</source>
<translation>Uredi %1</translation>
</message>
<message>
<location filename="../../ViewProviderDressUp.cpp" line="70"/>
<source>Feature error</source>
<translation>Napaka značilnosti</translation>
</message>
<message>
<location filename="../../ViewProviderDressUp.cpp" line="71"/>
<source>%1 misses a base feature.
This feature is broken and can't be edited.</source>
<translation>%1 je brez osnovne značilnosti.
Ta značilnost je pokvarjena in je ni mogoče urejati.</translation>
</message>
<message>
<location filename="../../ViewProviderGroove.cpp" line="51"/>
<source>Edit groove</source>
<translation>Uredi zarezo</translation>
</message>
<message>
<location filename="../../ViewProviderHole.cpp" line="64"/>
<source>Edit hole</source>
<translation>Uredi luknjo</translation>
</message>
<message>
<location filename="../../ViewProviderLoft.cpp" line="77"/>
<source>Edit loft</source>
<translation>Uredi ostrešje</translation>
</message>
<message>
<location filename="../../ViewProviderPad.cpp" line="54"/>
<source>Edit pad</source>
<translation>Uredi izboklino</translation>
</message>
<message>
<location filename="../../ViewProviderPipe.cpp" line="80"/>
<source>Edit pipe</source>
<translation>Uredi cev</translation>
</message>
<message>
<location filename="../../ViewProviderPocket.cpp" line="53"/>
<source>Edit pocket</source>
<translation>Uredi ugrez</translation>
</message>
<message>
<location filename="../../ViewProviderPrimitive.cpp" line="76"/>
<source>Edit primitive</source>
<translation>Uredi osnovnik</translation>
</message>
<message>
<location filename="../../ViewProviderRevolution.cpp" line="51"/>
<source>Edit revolution</source>
<translation>Uredi vrtenino</translation>
</message>
<message>
<location filename="../../ViewProviderShapeBinder.cpp" line="184"/>
<source>Edit shape binder</source>
<translation>Uredi povezovalnik oblik</translation>
</message>
<message>
<location filename="../../ViewProviderTransformed.cpp" line="152"/>
<source>One transformed shape does not intersect support</source>
<translation>Ena preoblikovana oblika se ne seka s podporo</translation>
</message>
<message>
<location filename="../../ViewProviderTransformed.cpp" line="154"/>
<source>%1 transformed shapes do not intersect support</source>
<translation>%1 preoblikovanih oblik se ne seka s podporo</translation>
</message>
<message>
<location filename="../../ViewProviderTransformed.cpp" line="163"/>
<source>Transformation succeeded</source>
<translation>Preoblikovanje uspelo</translation>
</message>
<message>
<location filename="../../WorkflowManager.cpp" line="144"/>
<source>The document "%1" you are editing was designed with an old version of PartDesign workbench.</source>
<translation>Dokument "%1", ki ga urejate, je bil narejen v starejši različici delavnega okolja PartDesign.</translation>
</message>
<message>
<location filename="../../WorkflowManager.cpp" line="147"/>
<source>Do you want to migrate in order to use modern PartDesign features?</source>
<translation>Želite preseliti, da bi lahko uporabljali nove zmožnosti PartDesigna?</translation>
</message>
<message>
<location filename="../../WorkflowManager.cpp" line="150"/>
<source>The document "%1" seems to be either in the middle of the migration process from legacy PartDesign or have a slightly broken structure.</source>
<translation>Kaže, da je dokument "%1" bodisi sredi selitve iz podedovanega PartDesigna, bodisi ima malo pokvarjeno zasnovo.</translation>
</message>
<message>
<location filename="../../WorkflowManager.cpp" line="154"/>
<source>Do you want to make the migration automatically?</source>
<translation>Želite, da bi bila selitev samodejna?</translation>
</message>
<message>
<location filename="../../WorkflowManager.cpp" line="156"/>
<source>Note: If you choose to migrate you won't be able to edit the file with an older FreeCAD version.
If you refuse to migrate you won't be able to use new PartDesign features like Bodies and Parts. As a result you also won't be able to use your parts in the assembly workbench.
Although you will be able to migrate any moment later with 'Part Design->Migrate...'.</source>
<translation>Opomba: Če se odločite za selitev, datoteke ne boste mogli več urejati s starejšimi različicami FreeCADa.
Če zavrnete selitev, ne boste mogli uporabljati novih zmožnosti PartDesigna, kot so Telesa in Deli. Posledično, ne bomo mogoče uporabljati vaših delov v sestavljalnem delavnem okolju.
Neglede na to je selitev v bodoče z "Oblikovanje delov -> Preseli" mogoča kadarkoli.</translation>
</message>
<message>
<location filename="../../WorkflowManager.cpp" line="165"/>
<source>Migrate manually</source>
<translation>Preseli ročno</translation>
</message>
</context>
<context>
<name>TaskHole</name>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="24"/>
<source>Position</source>
<translation type="unfinished">Position</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="35"/>
<source>Face</source>
<translation>Ploskev</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="49"/>
<location filename="../../../FeatureHole/TaskHole.ui" line="87"/>
<source>Edge</source>
<translation>Rob</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="63"/>
<location filename="../../../FeatureHole/TaskHole.ui" line="101"/>
<source>Distance</source>
<translation type="unfinished">Distance</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="137"/>
<source>Type</source>
<translation>Vrsta</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="145"/>
<source>Through</source>
<translation>Skozi</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="152"/>
<location filename="../../../FeatureHole/TaskHole.ui" line="492"/>
<source>Depth</source>
<translation>Globina</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="161"/>
<source>Threaded</source>
<translation>Z navojem</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="168"/>
<source>Countersink</source>
<translation>Kotno grezenje</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="175"/>
<source>Counterbore</source>
<translation>Valjno grezenje</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="196"/>
<source>Hole norm</source>
<translation>Norma luknje</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="202"/>
<source>Custom dimensions</source>
<translation>Mere po izbiri</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="218"/>
<source>Tolerance</source>
<translation>Dopustno odstopanje</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="249"/>
<location filename="../../../FeatureHole/TaskHole.ui" line="368"/>
<location filename="../../../FeatureHole/TaskHole.ui" line="474"/>
<source>Diameter</source>
<translation>Premer</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="280"/>
<source>Bolt/Washer</source>
<translation>Vijak/Podložka</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="329"/>
<location filename="../../../FeatureHole/TaskHole.ui" line="337"/>
<source>Thread norm</source>
<translation>Norma navoja</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="399"/>
<source> Custom thread length</source>
<translation> Dolžina navoja po meri</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="423"/>
<source>Finish depth</source>
<translation>Končna globina</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="466"/>
<source>Data</source>
<translation>Podatki</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="510"/>
<source>Counterbore/sink dia</source>
<translation>Premer izvrtine</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="528"/>
<source>Counterbore depth</source>
<translation>Globina valjaste izvrtine</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="546"/>
<source>Countersink angle</source>
<translation>Kot stožčaste izvrtine</translation>
</message>
<message>
<location filename="../../../FeatureHole/TaskHole.ui" line="564"/>
<source>Thread length</source>
<translation>Dolžina navoja</translation>
</message>
</context>
<context>
<name>TaskHoleParameters</name>
<message>
<location filename="../../TaskHoleParameters.ui" line="14"/>
<source>Task Hole Parameters</source>
<translation>Določilke luknje</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="33"/>
<location filename="../../TaskHoleParameters.ui" line="300"/>
<source>Type</source>
<translation>Vrsta</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="40"/>
<location filename="../../TaskHoleParameters.ui" line="316"/>
<source>Diameter</source>
<translation>Premer</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="47"/>
<location filename="../../TaskHoleParameters.ui" line="332"/>
<source>Depth</source>
<translation>Globina</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="57"/>
<source>Cutoff inner</source>
<translation>Vreži notranji</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="70"/>
<source>Class</source>
<translation>Razred</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="77"/>
<source>Tapered</source>
<translation>Koničast</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="97"/>
<source>Direction</source>
<translation>Smer</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="110"/>
<source>Fit</source>
<translation>Prilagodi</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="158"/>
<source>Flat</source>
<translation>Plosko</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="173"/>
<source>Angled</source>
<translation>Pod kotom</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="195"/>
<source>Pitch</source>
<translation>Naklon</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="220"/>
<source>Right hand</source>
<translation>Desnosučni</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="227"/>
<source>Left hand</source>
<translation>Levosučni</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="240"/>
<source>Model actual thread</source>
<translation>Ustvari dejanski navoj</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="247"/>
<source>Threaded</source>
<translation>Z navojem</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="257"/>
<source>Angle</source>
<translation>Kot</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="283"/>
<source>Profile</source>
<translation>Profil</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="293"/>
<source>Countersink angle</source>
<translation>Kot stožčaste izvrtine</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="353"/>
<source>Dimension</source>
<translation>Mera</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="358"/>
<source>Through all</source>
<translation>Skozi vse</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="372"/>
<source>Size</source>
<translation>Velikost</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="380"/>
<source>Standard fit</source>
<translation>Običajno prileganje</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="385"/>
<source>Close fit</source>
<translation>Tesno prileganje</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="396"/>
<source>Cutoff outer</source>
<translation>Zunanji obrez</translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="449"/>
<source><b>Drill point</b></source>
<translation><b>Konica vrtanja</b></translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="459"/>
<source><b>Misc</b></source>
<translation><b>Ostalo</b></translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="466"/>
<source><b>Hole cut</b></source>
<translation><b>Izvrtina</b></translation>
</message>
<message>
<location filename="../../TaskHoleParameters.ui" line="473"/>
<source><b>Threading and size</b></source>
<translation><b>Navoj in velikost</b></translation>
</message>
</context>
<context>
<name>TaskTransformedMessages</name>
<message>
<location filename="../../TaskTransformedMessages.ui" line="14"/>
<source>Form</source>
<translation>Oblika</translation>
</message>
<message>
<location filename="../../TaskTransformedMessages.ui" line="26"/>
<source>No message</source>
<translation>Ni sporočila</translation>
</message>
</context>
<context>
<name>Workbench</name>
<message>
<location filename="../../Workbench.cpp" line="51"/>
<source>Part Design</source>
<translation>Oblikovanje delov</translation>
</message>
</context>
</TS>
| lgpl-2.1 |
garnertb/python-mapnik | src/mapnik_color.cpp | 5252 | /*****************************************************************************
*
* This file is part of Mapnik (c++ mapping toolkit)
*
* Copyright (C) 2015 Artem Pavlenko, Jean-Francois Doyon
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
*****************************************************************************/
#include <mapnik/config.hpp>
#include "boost_std_shared_shim.hpp"
// boost
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma GCC diagnostic ignored "-Wunused-local-typedef"
#pragma GCC diagnostic ignored "-Wmissing-field-initializers"
#pragma GCC diagnostic ignored "-Wshadow"
#pragma GCC diagnostic ignored "-Wshorten-64-to-32"
#include <boost/python.hpp>
#pragma GCC diagnostic pop
//mapnik
#include <mapnik/color.hpp>
using mapnik::color;
struct color_pickle_suite : boost::python::pickle_suite
{
static boost::python::tuple
getinitargs(const color& c)
{
using namespace boost::python;
return boost::python::make_tuple(c.red(),c.green(),c.blue(),c.alpha());
}
};
void export_color ()
{
using namespace boost::python;
class_<color>("Color", init<int,int,int,int>(
( arg("r"), arg("g"), arg("b"), arg("a") ),
"Creates a new color from its RGB components\n"
"and an alpha value.\n"
"All values between 0 and 255.\n")
)
.def(init<int,int,int,int,bool>(
( arg("r"), arg("g"), arg("b"), arg("a"), arg("premultiplied") ),
"Creates a new color from its RGB components\n"
"and an alpha value.\n"
"All values between 0 and 255.\n")
)
.def(init<int,int,int>(
( arg("r"), arg("g"), arg("b") ),
"Creates a new color from its RGB components.\n"
"All values between 0 and 255.\n")
)
.def(init<uint32_t>(
( arg("val") ),
"Creates a new color from an unsigned integer.\n"
"All values between 0 and 2^32-1\n")
)
.def(init<uint32_t, bool>(
( arg("val"), arg("premultiplied") ),
"Creates a new color from an unsigned integer.\n"
"All values between 0 and 2^32-1\n")
)
.def(init<std::string>(
( arg("color_string") ),
"Creates a new color from its CSS string representation.\n"
"The string may be a CSS color name (e.g. 'blue')\n"
"or a hex color string (e.g. '#0000ff').\n")
)
.def(init<std::string, bool>(
( arg("color_string"), arg("premultiplied") ),
"Creates a new color from its CSS string representation.\n"
"The string may be a CSS color name (e.g. 'blue')\n"
"or a hex color string (e.g. '#0000ff').\n")
)
.add_property("r",
&color::red,
&color::set_red,
"Gets or sets the red component.\n"
"The value is between 0 and 255.\n")
.add_property("g",
&color::green,
&color::set_green,
"Gets or sets the green component.\n"
"The value is between 0 and 255.\n")
.add_property("b",
&color::blue,
&color::set_blue,
"Gets or sets the blue component.\n"
"The value is between 0 and 255.\n")
.add_property("a",
&color::alpha,
&color::set_alpha,
"Gets or sets the alpha component.\n"
"The value is between 0 and 255.\n")
.def(self == self)
.def(self != self)
.def_pickle(color_pickle_suite())
.def("__str__",&color::to_string)
.def("set_premultiplied",&color::set_premultiplied)
.def("get_premultiplied",&color::get_premultiplied)
.def("premultiply",&color::premultiply)
.def("demultiply",&color::demultiply)
.def("packed",&color::rgba)
.def("to_hex_string",&color::to_hex_string,
"Returns the hexadecimal representation of this color.\n"
"\n"
"Example:\n"
">>> c = Color('blue')\n"
">>> c.to_hex_string()\n"
"'#0000ff'\n")
;
}
| lgpl-2.1 |
modius/railo | railo-java/railo-core/src/railo/runtime/tag/Wddx.java | 6570 | package railo.runtime.tag;
import java.io.IOException;
import javax.xml.parsers.FactoryConfigurationError;
import railo.runtime.converter.ConverterException;
import railo.runtime.converter.JSConverter;
import railo.runtime.converter.WDDXConverter;
import railo.runtime.exp.ApplicationException;
import railo.runtime.exp.ExpressionException;
import railo.runtime.exp.PageException;
import railo.runtime.ext.tag.TagImpl;
import railo.runtime.op.Caster;
/**
* Serializes and de-serializes CFML data structures to the XML-based WDDX format.
* Generates JavaScript statements to instantiate JavaScript objects equivalent to the contents of a
* WDDX packet or some CFML data structures.
*
*
*
**/
public final class Wddx extends TagImpl {
/** The value to be processed. */
private Object input;
/** Specifies the action taken by the cfwddx tag. */
private String action;
/** The name of the variable to hold the output of the operation. This attribute is required for
** action = 'WDDX2CFML'. For all other actions, if this attribute is not provided, the result of the
** WDDX processing is outputted in the HTML stream. */
private String output;
private boolean validate;
/** The name of the top-level JavaScript object created by the deserialization process. The object
** created is an instance of the WddxRecordset object, explained in WddxRecordset Object. */
private String toplevelvariable;
/** Indicates whether to output time-zone information when serializing CFML to WDDX. If time-zone
** information is taken into account, the hour-minute offset, as represented in the ISO8601 format, is
** calculated in the date-time output. If time-zone information is not taken into account, the local
** time is output. The default is Yes. */
private boolean usetimezoneinfo;
private boolean xmlConform;
@Override
public void release() {
super.release();
input=null;
action=null;
output=null;
validate=false;
toplevelvariable=null;
usetimezoneinfo=false;
xmlConform=false;
}
/** set the value input
* The value to be processed.
* @param input value to set
**/
public void setInput(Object input) {
this.input=input;
}
/** set the value action
* Specifies the action taken by the cfwddx tag.
* @param action value to set
**/
public void setAction(String action) {
this.action=action.toLowerCase();
}
/** set the value output
* The name of the variable to hold the output of the operation. This attribute is required for
* action = 'WDDX2CFML'. For all other actions, if this attribute is not provided, the result of the
* WDDX processing is outputted in the HTML stream.
* @param output value to set
**/
public void setOutput(String output) {
this.output=output;
}
/** set the value validate
*
* @param validate value to set
**/
public void setValidate(boolean validate) {
this.validate=validate;
}
/** set the value toplevelvariable
* The name of the top-level JavaScript object created by the deserialization process. The object
* created is an instance of the WddxRecordset object, explained in WddxRecordset Object.
* @param toplevelvariable value to set
**/
public void setToplevelvariable(String toplevelvariable) {
this.toplevelvariable=toplevelvariable;
}
/** set the value usetimezoneinfo
* Indicates whether to output time-zone information when serializing CFML to WDDX. If time-zone
* information is taken into account, the hour-minute offset, as represented in the ISO8601 format, is
* calculated in the date-time output. If time-zone information is not taken into account, the local
* time is output. The default is Yes.
* @param usetimezoneinfo value to set
**/
public void setUsetimezoneinfo(boolean usetimezoneinfo) {
this.usetimezoneinfo=usetimezoneinfo;
}
/**
* sets if generated code is xml or wddx conform
* @param xmlConform
*/
public void setXmlconform(boolean xmlConform) {
this.xmlConform=xmlConform;
}
@Override
public int doStartTag() throws PageException {
try {
doIt();
} catch (Exception e) {
throw Caster.toPageException(e);
}
return SKIP_BODY;
}
private void doIt() throws ExpressionException, PageException, ConverterException, IOException, FactoryConfigurationError {
// cfml > wddx
if(action.equals("cfml2wddx")) {
if(output!=null) pageContext.setVariable(output,cfml2wddx(input));
else pageContext.forceWrite(cfml2wddx(input));
}
// wddx > cfml
else if(action.equals("wddx2cfml")) {
if(output==null) throw new ApplicationException("at tag cfwddx the attribute output is required if you set action==wddx2cfml");
pageContext.setVariable(output,wddx2cfml(Caster.toString(input)));
}
// cfml > js
else if(action.equals("cfml2js")) {
if(output!=null) pageContext.setVariable(output,cfml2js(input));
else pageContext.forceWrite(cfml2js(input));
}
// wddx > js
else if(action.equals("wddx2js")) {
if(output!=null) pageContext.setVariable(output,wddx2js(Caster.toString(input)));
else pageContext.forceWrite(wddx2js(Caster.toString(input)));
}
else throw new ExpressionException("invalid attribute action for tag cfwddx, attributes are [cfml2wddx, wddx2cfml,cfml2js, wddx2js].");
}
private String cfml2wddx(Object input) throws ConverterException {
WDDXConverter converter =new WDDXConverter(pageContext.getTimeZone(),xmlConform,true);
if(!usetimezoneinfo)converter.setTimeZone(null);
return converter.serialize(input);
}
private Object wddx2cfml(String input) throws ConverterException, IOException, FactoryConfigurationError {
WDDXConverter converter =new WDDXConverter(pageContext.getTimeZone(),xmlConform,true);
converter.setTimeZone(pageContext.getTimeZone());
return converter.deserialize(input,validate);
}
private String cfml2js(Object input) throws ConverterException {
if(toplevelvariable==null)missingTopLevelVariable();
JSConverter converter =new JSConverter();
return converter.serialize(input,toplevelvariable);
}
private String wddx2js(String input) throws ConverterException, IOException, FactoryConfigurationError {
if(toplevelvariable==null)missingTopLevelVariable();
JSConverter converter =new JSConverter();
return converter.serialize(wddx2cfml(input),toplevelvariable);
}
private ApplicationException missingTopLevelVariable() {
return new ApplicationException("at tag cfwddx the attribute topLevelVariable is required if you set action equal wddx2js or cfml2js");
}
@Override
public int doEndTag() {
return EVAL_PAGE;
}
} | lgpl-2.1 |
lefou/kdepim-noakonadi | kresources/groupwise/kcal_resourcegroupwise.cpp | 8612 | /*
This file is part of kdepim.
Copyright (c) 2004 Cornelius Schumacher <schumacher@kde.org>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this library; see the file COPYING.LIB. If not, write to
the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
#include "kcal_resourcegroupwise.h"
#include "kcal_groupwiseprefsbase.h"
#include "kcal_resourcegroupwiseconfig.h"
#include "soap/groupwiseserver.h"
#include <kcal/icalformat.h>
#include <kcal/calendarlocal.h>
#include <kcal/confirmsavedialog.h>
#include <QApplication>
#include <QDateTime>
#include <QStringList>
#include <QTimer>
#include <kabc/locknull.h>
#include <kdebug.h>
#include <klocale.h>
#include <kresources/configwidget.h>
#include <kstandarddirs.h>
#include <kstringhandler.h>
#include <kurl.h>
#include <libkdepim/kpimprefs.h>
using namespace KCal;
ResourceGroupwise::ResourceGroupwise()
: ResourceCached(), mLock( true )
{
init();
mPrefs->addGroupPrefix( identifier() );
}
ResourceGroupwise::ResourceGroupwise( const KConfigGroup &group )
: ResourceCached( group ), mLock( true )
{
init();
mPrefs->addGroupPrefix( identifier() );
readConfig( group );
}
ResourceGroupwise::~ResourceGroupwise()
{
disableChangeNotification();
delete mPrefs;
mPrefs = 0;
}
void ResourceGroupwise::init()
{
mDownloadJob = 0;
mProgress = 0;
mIsShowingError = false;
mPrefs = new GroupwisePrefsBase();
setType( "groupwise" );
enableChangeNotification();
}
GroupwisePrefsBase *ResourceGroupwise::prefs()
{
return mPrefs;
}
void ResourceGroupwise::readConfig( const KConfigGroup &group )
{
kDebug() <<"KCal::ResourceGroupwise::readConfig()";
mPrefs->readConfig();
ResourceCached::readConfig( group );
}
void ResourceGroupwise::writeConfig( KConfigGroup &group )
{
kDebug() <<"KCal::ResourceGroupwise::writeConfig()";
ResourceCalendar::writeConfig( group );
mPrefs->writeConfig();
ResourceCached::writeConfig( group );
}
bool ResourceGroupwise::doOpen()
{
return true;
}
void ResourceGroupwise::doClose()
{
ResourceCached::doClose();
}
bool ResourceGroupwise::doLoad( bool )
{
kDebug() <<"ResourceGroupwise::load()";
if ( mIsShowingError ) {
kDebug() <<"Still showing error";
return true;
}
if ( mDownloadJob ) {
kDebug() <<"Download still in progress";
return true;
}
calendar()->close();
disableChangeNotification();
loadFromCache();
enableChangeNotification();
emit resourceChanged( this );
clearChanges();
KUrl url( prefs()->url() );
if ( url.protocol() == "http" ) url.setProtocol( "groupwise" );
else url.setProtocol( "groupwises" );
url.setPath( "/calendar/" );
url.setUser( prefs()->user() );
url.setPass( prefs()->password() );
kDebug() <<"Download URL:" << url;
mJobData.clear();
mDownloadJob = KIO::get( url, KIO::NoReload, KIO::HideProgressInfo );
connect( mDownloadJob, SIGNAL( result( KJob * ) ),
SLOT( slotJobResult( KJob * ) ) );
connect( mDownloadJob, SIGNAL( data( KIO::Job *, const QByteArray & ) ),
SLOT( slotJobData( KIO::Job *, const QByteArray & ) ) );
mProgress = KPIM::ProgressManager::instance()->createProgressItem(
KPIM::ProgressManager::getUniqueID(), i18n("Downloading calendar") );
connect( mProgress,
SIGNAL( progressItemCanceled( KPIM::ProgressItem * ) ),
SLOT( cancelLoad() ) );
return true;
}
void ResourceGroupwise::slotJobResult( KJob *job )
{
kDebug() <<"ResourceGroupwise::slotJobResult():";
if ( job->error() ) {
mIsShowingError = true;
loadError( job->errorString() );
mIsShowingError = false;
} else {
disableChangeNotification();
clearCache();
// FIXME: This does not take into account the time zone!
CalendarLocal calendar( QString::fromLatin1("UTC") );
ICalFormat ical;
if ( !ical.fromString( &calendar, mJobData ) ) {
loadError( i18n("Error parsing calendar data.") );
} else {
Incidence::List incidences = calendar.incidences();
Incidence::List::ConstIterator it;
for( it = incidences.constBegin(); it != incidences.constEnd(); ++it ) {
// kDebug() <<"INCIDENCE:" << (*it)->summary();
Incidence *i = (*it)->clone();
QString remote = (*it)->customProperty( "GWRESOURCE", "UID" );
if ( remote.isEmpty() ) {
kDebug() <<"INCIDENCE:" << (*it)->summary() << " HAS NO REMOTE UID, REJECTING!";
} else {
QString local = idMapper().localId( remote );
if ( local.isEmpty() ) {
idMapper().setRemoteId( i->uid(), remote );
} else {
i->setUid( local );
}
addIncidence( i );
}
}
}
saveToCache();
enableChangeNotification();
clearChanges();
emit resourceChanged( this );
emit resourceLoaded( this );
}
mDownloadJob = 0;
if ( mProgress ) mProgress->setComplete();
mProgress = 0;
}
void ResourceGroupwise::slotJobData( KIO::Job *, const QByteArray &data )
{
// kDebug() <<"ResourceGroupwise::slotJobData()";
mJobData.append( data.data() );
}
bool ResourceGroupwise::doSave( bool )
{
kDebug() <<"KCal::ResourceGroupwise::doSave()";
saveToCache();
if ( !hasChanges() ) {
kDebug() <<"No changes";
return true;
}
if ( !confirmSave() ) return false;
GroupwiseServer server( mPrefs->url(), mPrefs->user(), mPrefs->password(),
timeSpec(), 0 );
if ( !server.login() ) {
kError() <<"Unable to login to server" << server.error();
emit resourceSaveError( this, i18n( "Unable to login to server: " ) +server.errorText() );
return false;
}
Incidence::List::ConstIterator it;
Incidence::List added = addedIncidences();
for( it = added.constBegin(); it != added.constEnd(); ++it ) {
if ( server.addIncidence( *it, this ) ) {
clearChange( *it );
saveToCache();
}
}
Incidence::List changed = changedIncidences();
for( it = changed.constBegin(); it != changed.constEnd(); ++it ) {
if ( server.changeIncidence( *it ) ) clearChange( *it );
}
Incidence::List deleted = deletedIncidences();
for( it = deleted.constBegin(); it != deleted.constEnd(); ++it ) {
if ( server.deleteIncidence( *it ) ) clearChange( *it );
}
server.logout();
return true;
}
// FIXME: Put this into ResourceCached
bool ResourceGroupwise::confirmSave()
{
if ( !hasChanges() ) return true;
ConfirmSaveDialog dlg( resourceName(), 0 );
dlg.addIncidences( addedIncidences(), i18n("Added") );
dlg.addIncidences( changedIncidences(), i18n("Changed") );
dlg.addIncidences( deletedIncidences(), i18n("Deleted") );
int result = dlg.exec();
return result == QDialog::Accepted;
}
KABC::Lock *ResourceGroupwise::lock()
{
return &mLock;
}
void ResourceGroupwise::cancelLoad()
{
if ( mDownloadJob ) mDownloadJob->kill();
mDownloadJob = 0;
if ( mProgress ) mProgress->setComplete();
mProgress = 0;
}
bool ResourceGroupwise::userSettings( ngwt__Settings *&settings )
{
kDebug() <<"ResourceGroupwise::userSettings()";
GroupwiseServer server( prefs()->url(),
prefs()->user(),
prefs()->password(), timeSpec(), this );
server.login();
// get these out again, once we discover their format.
bool success = server.readUserSettings( settings );
server.logout();
return success;
}
bool ResourceGroupwise::modifyUserSettings( QMap<QString, QString> & settings )
{
kDebug() <<"ResourceGroupwise::modifyUserSettings()";
if ( settings.isEmpty() )
{
kDebug() <<"ResourceGroupwise::modifyUserSettings(): no changed settings";
return false;
}
GroupwiseServer server( prefs()->url(),
prefs()->user(),
prefs()->password(), timeSpec(), this );
server.login();
// get these out again, once we discover their format.
bool success = server.modifyUserSettings( settings );
server.logout();
return success;
}
#include "kcal_resourcegroupwise.moc"
| lgpl-2.1 |
golovnin/wildfly | clustering/infinispan/extension/src/main/java/org/jboss/as/clustering/infinispan/subsystem/GlobalConfigurationServiceConfigurator.java | 10337 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2015, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.clustering.infinispan.subsystem;
import static org.jboss.as.clustering.infinispan.subsystem.CacheContainerResourceDefinition.Attribute.DEFAULT_CACHE;
import static org.jboss.as.clustering.infinispan.subsystem.CacheContainerResourceDefinition.Attribute.STATISTICS_ENABLED;
import static org.jboss.as.clustering.infinispan.subsystem.CacheContainerResourceDefinition.Capability.CONFIGURATION;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import javax.management.MBeanServer;
import org.infinispan.commons.marshall.Ids;
import org.infinispan.configuration.global.GlobalConfiguration;
import org.infinispan.configuration.global.SerializationConfigurationBuilder;
import org.infinispan.configuration.global.ShutdownHookBehavior;
import org.infinispan.configuration.global.SiteConfiguration;
import org.infinispan.configuration.global.ThreadPoolConfiguration;
import org.infinispan.configuration.global.TransportConfiguration;
import org.infinispan.configuration.internal.PrivateGlobalConfigurationBuilder;
import org.infinispan.globalstate.ConfigurationStorage;
import org.jboss.as.clustering.controller.CapabilityServiceNameProvider;
import org.jboss.as.clustering.controller.CommonRequirement;
import org.jboss.as.clustering.controller.ResourceServiceConfigurator;
import org.jboss.as.clustering.infinispan.InfinispanLogger;
import org.jboss.as.clustering.infinispan.MBeanServerProvider;
import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.server.Services;
import org.jboss.dmr.ModelNode;
import org.jboss.marshalling.ModularClassResolver;
import org.jboss.modules.Module;
import org.jboss.modules.ModuleLoader;
import org.jboss.msc.Service;
import org.jboss.msc.service.ServiceBuilder;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceTarget;
import org.wildfly.clustering.marshalling.Externalizer;
import org.wildfly.clustering.marshalling.infinispan.AdvancedExternalizerAdapter;
import org.wildfly.clustering.marshalling.spi.DefaultExternalizer;
import org.wildfly.clustering.service.CompositeDependency;
import org.wildfly.clustering.service.Dependency;
import org.wildfly.clustering.service.FunctionalService;
import org.wildfly.clustering.service.ServiceConfigurator;
import org.wildfly.clustering.service.ServiceSupplierDependency;
import org.wildfly.clustering.service.SupplierDependency;
/**
* @author Paul Ferraro
*/
public class GlobalConfigurationServiceConfigurator extends CapabilityServiceNameProvider implements ResourceServiceConfigurator, Supplier<GlobalConfiguration> {
private final SupplierDependency<Module> module;
private final SupplierDependency<TransportConfiguration> transport;
private final SupplierDependency<SiteConfiguration> site;
private final Map<ThreadPoolResourceDefinition, SupplierDependency<ThreadPoolConfiguration>> pools = new EnumMap<>(ThreadPoolResourceDefinition.class);
private final Map<ScheduledThreadPoolResourceDefinition, SupplierDependency<ThreadPoolConfiguration>> schedulers = new EnumMap<>(ScheduledThreadPoolResourceDefinition.class);
private final String name;
private volatile SupplierDependency<MBeanServer> server;
private volatile Supplier<ModuleLoader> loader;
private volatile String defaultCache;
private volatile boolean statisticsEnabled;
GlobalConfigurationServiceConfigurator(PathAddress address) {
super(CONFIGURATION, address);
this.name = address.getLastElement().getValue();
this.module = new ServiceSupplierDependency<>(CacheContainerComponent.MODULE.getServiceName(address));
this.transport = new ServiceSupplierDependency<>(CacheContainerComponent.TRANSPORT.getServiceName(address));
this.site = new ServiceSupplierDependency<>(CacheContainerComponent.SITE.getServiceName(address));
for (ThreadPoolResourceDefinition pool : EnumSet.complementOf(EnumSet.of(ThreadPoolResourceDefinition.CLIENT))) {
this.pools.put(pool, new ServiceSupplierDependency<>(pool.getServiceName(address)));
}
for (ScheduledThreadPoolResourceDefinition pool : EnumSet.allOf(ScheduledThreadPoolResourceDefinition.class)) {
this.schedulers.put(pool, new ServiceSupplierDependency<>(pool.getServiceName(address)));
}
}
@Override
public ServiceConfigurator configure(OperationContext context, ModelNode model) throws OperationFailedException {
this.server = context.hasOptionalCapability(CommonRequirement.MBEAN_SERVER.getName(), null, null) ? new ServiceSupplierDependency<>(CommonRequirement.MBEAN_SERVER.getServiceName(context)) : null;
this.defaultCache = DEFAULT_CACHE.resolveModelAttribute(context, model).asStringOrNull();
this.statisticsEnabled = STATISTICS_ENABLED.resolveModelAttribute(context, model).asBoolean();
return this;
}
@Override
public GlobalConfiguration get() {
org.infinispan.configuration.global.GlobalConfigurationBuilder builder = new org.infinispan.configuration.global.GlobalConfigurationBuilder();
if (this.defaultCache != null) {
builder.defaultCacheName(this.defaultCache);
}
TransportConfiguration transport = this.transport.get();
// This fails due to ISPN-4755 !!
// this.builder.transport().read(this.transport.getValue());
// Workaround this by copying relevant fields individually
builder.transport().transport(transport.transport())
.distributedSyncTimeout(transport.distributedSyncTimeout())
.clusterName(transport.clusterName())
.machineId(transport.machineId())
.rackId(transport.rackId())
.siteId(transport.siteId())
;
Module module = this.module.get();
builder.serialization().classResolver(ModularClassResolver.getInstance(this.loader.get()));
builder.classLoader(module.getClassLoader());
int id = Ids.MAX_ID;
SerializationConfigurationBuilder serialization = builder.serialization();
for (Externalizer<?> externalizer : EnumSet.allOf(DefaultExternalizer.class)) {
serialization.addAdvancedExternalizer(new AdvancedExternalizerAdapter<>(id++, externalizer));
}
for (Externalizer<?> externalizer : module.loadService(Externalizer.class)) {
InfinispanLogger.ROOT_LOGGER.debugf("Cache container %s will use an externalizer for %s", this.name, externalizer.getTargetClass().getName());
serialization.addAdvancedExternalizer(new AdvancedExternalizerAdapter<>(id++, externalizer));
}
builder.transport().transportThreadPool().read(this.pools.get(ThreadPoolResourceDefinition.TRANSPORT).get());
builder.transport().remoteCommandThreadPool().read(this.pools.get(ThreadPoolResourceDefinition.REMOTE_COMMAND).get());
builder.asyncThreadPool().read(this.pools.get(ThreadPoolResourceDefinition.ASYNC_OPERATIONS).get());
builder.expirationThreadPool().read(this.schedulers.get(ScheduledThreadPoolResourceDefinition.EXPIRATION).get());
builder.listenerThreadPool().read(this.pools.get(ThreadPoolResourceDefinition.LISTENER).get());
builder.stateTransferThreadPool().read(this.pools.get(ThreadPoolResourceDefinition.STATE_TRANSFER).get());
builder.persistenceThreadPool().read(this.pools.get(ThreadPoolResourceDefinition.PERSISTENCE).get());
builder.shutdown().hookBehavior(ShutdownHookBehavior.DONT_REGISTER);
builder.globalJmxStatistics()
.enabled(this.statisticsEnabled)
.cacheManagerName(this.name)
.mBeanServerLookup(new MBeanServerProvider((this.server != null) ? this.server.get() : null))
.jmxDomain("org.wildfly.clustering.infinispan")
.allowDuplicateDomains(true);
builder.site().read(this.site.get());
// Disable triangle algorithm
// We optimize for originator as primary owner
builder.addModule(PrivateGlobalConfigurationBuilder.class).serverMode(true);
// Disable configuration storage
builder.globalState().configurationStorage(ConfigurationStorage.IMMUTABLE).disable();
return builder.build();
}
@Override
public ServiceBuilder<?> build(ServiceTarget target) {
ServiceBuilder<?> builder = target.addService(this.getServiceName());
Consumer<GlobalConfiguration> global = new CompositeDependency(this.module, this.transport, this.site, this.server).register(builder).provides(this.getServiceName());
this.loader = builder.requires(Services.JBOSS_SERVICE_MODULE_LOADER);
for (Dependency dependency: this.pools.values()) {
dependency.register(builder);
}
for (Dependency dependency : this.schedulers.values()) {
dependency.register(builder);
}
Service service = new FunctionalService<>(global, Function.identity(), this);
return builder.setInstance(service).setInitialMode(ServiceController.Mode.PASSIVE);
}
}
| lgpl-2.1 |
limb-php-framework/limb | dbal/src/drivers/lmbDbInfo.class.php | 1296 | <?php
/*
* Limb PHP Framework
*
* @link http://limb-project.com
* @copyright Copyright © 2004-2009 BIT(http://bit-creative.com)
* @license LGPL http://www.gnu.org/copyleft/lesser.html
*/
lmb_require('limb/dbal/src/exception/lmbDbException.class.php');
/**
* abstract class lmbDbInfo.
*
* @package dbal
* @version $Id: lmbDbInfo.class.php 8072 2010-01-20 08:33:41Z korchasa $
*/
abstract class lmbDbInfo
{
protected $tables = array();
protected $name;
protected $isTablesLoaded = false;
function __construct($name)
{
$this->name = $name;
}
function getName()
{
return $this->name;
}
function getTable($name)
{
if(!$this->hasTable($name))
throw new lmbDbException("Table '$name' does not exist");
return $this->tables[$name];
}
function hasTable($name)
{
if(!$this->isTablesLoaded)
$this->loadTables();
return array_key_exists($name, $this->tables);
}
function getTableList()
{
if(!$this->isTablesLoaded)
$this->loadTables();
return array_keys($this->tables);
}
function getTables()
{
$tables = array();
foreach ($this->getTableList() as $table_name)
$tables[$table_name] = $this->getTable($table_name);
return $tables;
}
abstract function loadTables();
}
| lgpl-2.1 |
hdeling/sofa | applications/plugins/DEPRECATED/PhysicsBasedInteractiveModeler/gui/qt/QMouseOperations.cpp | 6803 | /******************************************************************************
* SOFA, Simulation Open-Framework Architecture, development version *
* (c) 2006-2017 INRIA, USTL, UJF, CNRS, MGH *
* *
* This program is free software; you can redistribute it and/or modify it *
* under the terms of the GNU Lesser General Public License as published by *
* the Free Software Foundation; either version 2.1 of the License, or (at *
* your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, but WITHOUT *
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or *
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License *
* for more details. *
* *
* You should have received a copy of the GNU Lesser General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
*******************************************************************************
* Authors: The SOFA Team and external contributors (see Authors.txt) *
* *
* Contact information: contact@sofa-framework.org *
******************************************************************************/
#include "QMouseOperations.h"
#include <PhysicsBasedInteractiveModeler/pim/SculptBodyPerformer.h>
#ifdef SOFA_QT4
#include <QVBoxLayout>
#include <QHBoxLayout>
#include <QGridLayout>
#include <QLabel>
#include <QRadioButton>
#include <QPushButton>
#else
#include <qlayout.h>
#include <qlabel.h>
#include <qradiobutton.h>
#include <qpushbutton.h>
#endif
namespace plugins
{
namespace pim
{
namespace gui
{
namespace qt
{
using namespace sofa::defaulttype;
QSculptOperation::QSculptOperation()
{
//Building the GUI for Sculpt Operation
QVBoxLayout *layout=new QVBoxLayout(this);
options = new QGroupBox(QString("Options"),this);
VLayout = new QVBoxLayout(options);
QHBoxLayout *HLayout = new QHBoxLayout();
inflateRadioButton = new QRadioButton(QString("Inflate"), options);
inflateRadioButton->setChecked(true);
deflateRadioButton = new QRadioButton(QString("Deflate"), options);
fixRadioButton = new QRadioButton(QString("Fix"), options);
HLayout->addWidget(inflateRadioButton);
HLayout->addWidget(deflateRadioButton);
HLayout->addWidget(fixRadioButton);
VLayout->addLayout(HLayout);
QHBoxLayout *HLayout1 = new QHBoxLayout();
QLabel *scaleLabel=new QLabel(QString("Scale"), this);
scaleSlider=new QSlider(Qt::Horizontal, this);
scaleValue=new QSpinBox(0,100,1,this);
scaleValue->setEnabled(true);
HLayout1->addWidget(scaleLabel);
HLayout1->addWidget(scaleSlider);
HLayout1->addWidget(scaleValue);
VLayout->addLayout(HLayout1);
HLayout2 = new QHBoxLayout();
forceLabel=new QLabel(QString("Force"), this);
forceSlider=new QSlider(Qt::Horizontal, this);
forceValue=new QSpinBox(0,100,1,this);
forceValue->setEnabled(true);
HLayout2->addWidget(forceLabel);
HLayout2->addWidget(forceSlider);
HLayout2->addWidget(forceValue);
VLayout->addLayout(HLayout2);
HLayout3 = new QHBoxLayout();
massLabel=new QLabel(QString("Mass"), this);
massValue=new QLineEdit(this);
stiffnessLabel=new QLabel(QString("Stiffness"), this);
stiffnessValue=new QLineEdit(this);
dampingLabel=new QLabel(QString("Damping"), this);
dampingValue=new QLineEdit(this);
HLayout3->addWidget(massLabel);
HLayout3->addWidget(massValue);
HLayout3->addWidget(stiffnessLabel);
HLayout3->addWidget(stiffnessValue);
HLayout3->addWidget(dampingLabel);
HLayout3->addWidget(dampingValue);
VLayout->addLayout(HLayout3);
QHBoxLayout *HLayout3 = new QHBoxLayout();
animatePushButton = new QPushButton(QString("Animate"), options);
animatePushButton->setMaximumSize(75,30);
HLayout3->addWidget(animatePushButton);
VLayout->addLayout(HLayout3);
layout->addWidget(options);
connect(forceSlider,SIGNAL(valueChanged(int)), forceValue, SLOT(setValue(int)));
connect(scaleSlider,SIGNAL(valueChanged(int)), scaleValue, SLOT(setValue(int)));
connect(scaleSlider,SIGNAL(valueChanged(int)), this, SLOT(setScale()));
/* Add solver, mass and forcefield to simulate added materia */
connect(animatePushButton,SIGNAL(clicked()), this, SLOT(animate()));
connect(fixRadioButton,SIGNAL(toggled(bool)), this, SLOT(updateInterface(bool)));
forceSlider->setValue(60);
scaleSlider->setValue(70);
massValue->insert("10");
stiffnessValue->insert("100");
dampingValue->insert("0.2");
}
void QSculptOperation::updateInterface(bool checked)
{
if (!checked)
{
forceLabel->setHidden(false);
forceSlider->setHidden(false);
forceValue->setHidden(false);
animatePushButton->setHidden(false);
}
else
{
forceLabel->setHidden(true);
forceSlider->setHidden(true);
forceValue->setHidden(true);
animatePushButton->setHidden(true);
}
}
double QSculptOperation::getForce() const
{
return forceValue->value();
}
double QSculptOperation::getScale() const
{
return scaleValue->value();
}
double QSculptOperation::getMass() const
{
return (massValue->text()).toDouble();
}
double QSculptOperation::getStiffness() const
{
return (stiffnessValue->text()).toDouble();
}
double QSculptOperation::getDamping() const
{
return (dampingValue->text()).toDouble();
}
bool QSculptOperation::isCheckedInflate() const
{
return inflateRadioButton->isChecked();
}
bool QSculptOperation::isCheckedDeflate() const
{
return deflateRadioButton->isChecked();
}
bool QSculptOperation::isCheckedFix() const
{
return fixRadioButton->isChecked();
}
void QSculptOperation::setScale()
{
if (performer == NULL) return;
SculptBodyPerformerConfiguration *performerConfiguration=dynamic_cast<SculptBodyPerformerConfiguration*>(performer);
if (performerConfiguration == NULL) return;
performerConfiguration->setScale(getScale());
}
void QSculptOperation::animate()
{
if (performer == NULL) return;
SculptBodyPerformer<Vec3Types>* sculptPerformer=dynamic_cast<SculptBodyPerformer<Vec3Types>*>(performer);
sculptPerformer->animate();
}
} // namespace qt
} // namespace gui
} // namespace pim
} // namespace plugins
| lgpl-2.1 |
raghavaggarwal/moose | modules/phase_field/src/action/CHPFCRFFSplitKernelAction.C | 3612 | #include "CHPFCRFFSplitKernelAction.h"
#include "Factory.h"
#include "Parser.h"
#include "FEProblem.h"
template<>
InputParameters validParams<CHPFCRFFSplitKernelAction>()
{
InputParameters params = validParams<Action>();
params.addRequiredParam<unsigned int>("num_L", "specifies the number of complex L variables will be solved for");
params.addRequiredParam<NonlinearVariableName>("n_name", "Variable name used for the n variable");
params.addRequiredParam<std::string>("L_name_base", "Base name for the complex L variables");
params.addParam<MaterialPropertyName>("mob_name", "M", "The mobility used for n in this model");
MooseEnum log_options("tolerance cancelation expansion");
params.addRequiredParam<MooseEnum>("log_approach", log_options, "Which approach will be used to handle the natural log");
params.addParam<Real>("tol", 1.0e-9, "Tolerance used when the tolerance approach is chosen");
params.addParam<Real>("n_exp_terms", 4.0, "Number of terms used in the Taylor expansion of the natural log term");
params.addParam<bool>("use_displaced_mesh", false, "Whether to use displaced mesh in the kernels");
return params;
}
CHPFCRFFSplitKernelAction::CHPFCRFFSplitKernelAction(const InputParameters & params) :
Action(params),
_num_L(getParam<unsigned int>("num_L")),
_L_name_base(getParam<std::string>("L_name_base")),
_n_name(getParam<NonlinearVariableName>("n_name"))
{
}
void
CHPFCRFFSplitKernelAction::act()
{
#ifdef DEBUG
Moose::err << "Inside the CHPFCRFFSplitKernelAction Object\n";
Moose::err << "L name base:" << _L_name_base;
#endif
// Create the two kernels required for the n_variable, starting with the time derivative
InputParameters poly_params = _factory.getValidParams("TimeDerivative");
poly_params.set<NonlinearVariableName>("variable") = _n_name;
poly_params.set<bool>("use_displaced_mesh") = getParam<bool>("use_displaced_mesh");
_problem->addKernel("TimeDerivative", "IE_n", poly_params);
// Now the CHPFCRFF kernel
std::vector<VariableName> real_v; // First, we have to create the vector containing the names of the real L variables
real_v.resize(_num_L);
for (unsigned int l = 0; l < _num_L; ++l)
{
std::string L_name = _L_name_base;
std::stringstream out;
out << l;
L_name.append(out.str());
L_name.append("_real");
real_v[l] = L_name;
}
poly_params = _factory.getValidParams("CHPFCRFF");
poly_params.set<NonlinearVariableName>("variable") = _n_name;
poly_params.set<std::vector<VariableName> >("v") = real_v;
poly_params.set<MaterialPropertyName>("mob_name") = getParam<MaterialPropertyName>("mob_name");
poly_params.set<MooseEnum>("log_approach") = getParam<MooseEnum>("log_approach");
poly_params.set<Real>("tol") = getParam<Real>("tol");
poly_params.set<Real>("n_exp_terms") = getParam<Real>("n_exp_terms");
poly_params.set<bool>("use_displaced_mesh") = getParam<bool>("use_displaced_mesh");
_problem->addKernel("CHPFCRFF", "CH_bulk_n", poly_params);
// Loop over the L_variables
// \todo This looks like it is not done yet
for (unsigned int l = 0; l < _num_L; ++l)
{
// Create L base name
std::string L_name = _L_name_base;
std::stringstream out;
out << l;
L_name.append(out.str());
}
}
// DEPRECATED CONSTRUCTOR
CHPFCRFFSplitKernelAction::CHPFCRFFSplitKernelAction(const std::string & deprecated_name, InputParameters params) :
Action(deprecated_name, params),
_num_L(getParam<unsigned int>("num_L")),
_L_name_base(getParam<std::string>("L_name_base")),
_n_name(getParam<NonlinearVariableName>("n_name"))
{
}
| lgpl-2.1 |
step21/inkscape-osx-packaging-native | src/ui/view/edit.cpp | 515 | /**
* \brief Empty file left in repo for current desktop.cpp
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
namespace Inkscape {
namespace UI {
namespace View {
} // namespace View
} // namespace UI
} // namespace Inkscape
/*
Local Variables:
mode:c++
c-file-style:"stroustrup"
c-file-offsets:((innamespace . 0)(inline-open . 0)(case-label . +))
indent-tabs-mode:nil
fill-column:99
End:
*/
// vim: filetype=cpp:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:encoding=utf-8:textwidth=99 :
| lgpl-2.1 |
rspavel/spack | var/spack/repos/builtin.mock/packages/raiser/package.py | 1204 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from six.moves import builtins
from spack import *
class Raiser(Package):
"""A package that can raise a built-in exception
of any kind with any message
"""
homepage = "http://www.example.com"
url = "http://www.example.com/a-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
version('2.0', '2.0_a_hash')
variant(
'exc_type',
values=lambda x: isinstance(x, str),
default='RuntimeError',
description='type of the exception to be raised',
multi=False
)
variant(
'msg',
values=lambda x: isinstance(x, str),
default='Unknown Exception',
description='message that will be tied to the exception',
multi=False
)
def install(self, spec, prefix):
print('Raiser will raise ')
exc_typename = self.spec.variants['exc_type'].value
exc_type = getattr(builtins, exc_typename)
msg = self.spec.variants['msg'].value
raise exc_type(msg)
| lgpl-2.1 |
georgebyelas/molgenis-lifelines | src/main/java/org/molgenis/lifelines/catalog/LifeLinesCatalogManagerService.java | 2684 | package org.molgenis.lifelines.catalog;
import java.util.List;
import org.molgenis.catalog.Catalog;
import org.molgenis.catalog.CatalogMeta;
import org.molgenis.catalog.UnknownCatalogException;
import org.molgenis.catalogmanager.CatalogManagerService;
import org.molgenis.lifelines.studymanager.StudyDefinitionIdConverter;
import org.molgenis.omx.catalogmanager.OmxCatalogManagerService;
import org.molgenis.study.UnknownStudyDefinitionException;
public class LifeLinesCatalogManagerService implements CatalogManagerService
{
private final OmxCatalogManagerService omxCatalogManagerService;
public LifeLinesCatalogManagerService(OmxCatalogManagerService omxCatalogManagerService)
{
if (omxCatalogManagerService == null) throw new IllegalArgumentException("omxCatalogManagerService is null");
this.omxCatalogManagerService = omxCatalogManagerService;
}
@Override
public List<CatalogMeta> findCatalogs()
{
return omxCatalogManagerService.findCatalogs();
}
@Override
public Catalog getCatalog(String id) throws UnknownCatalogException
{
return omxCatalogManagerService.getCatalog(id);
}
@Override
public Catalog getCatalogOfStudyDefinition(String id) throws UnknownCatalogException,
UnknownStudyDefinitionException
{
String omxId = StudyDefinitionIdConverter.studyDefinitionIdToOmxIdentifier(id);
return omxCatalogManagerService.getCatalogOfStudyDefinition(omxId);
}
@Override
public void loadCatalog(String id) throws UnknownCatalogException
{
omxCatalogManagerService.loadCatalog(id);
}
@Override
public void unloadCatalog(String id) throws UnknownCatalogException
{
omxCatalogManagerService.unloadCatalog(id);
}
@Override
public boolean isCatalogLoaded(String id) throws UnknownCatalogException
{
return omxCatalogManagerService.isCatalogLoaded(id);
}
@Override
public void loadCatalogOfStudyDefinition(String id) throws UnknownCatalogException, UnknownStudyDefinitionException
{
String omxId = StudyDefinitionIdConverter.studyDefinitionIdToOmxIdentifier(id);
omxCatalogManagerService.loadCatalogOfStudyDefinition(omxId);
}
@Override
public void unloadCatalogOfStudyDefinition(String id) throws UnknownCatalogException,
UnknownStudyDefinitionException
{
String omxId = StudyDefinitionIdConverter.studyDefinitionIdToOmxIdentifier(id);
omxCatalogManagerService.unloadCatalogOfStudyDefinition(omxId);
}
@Override
public boolean isCatalogOfStudyDefinitionLoaded(String id) throws UnknownCatalogException,
UnknownStudyDefinitionException
{
String omxId = StudyDefinitionIdConverter.studyDefinitionIdToOmxIdentifier(id);
return omxCatalogManagerService.isCatalogOfStudyDefinitionLoaded(omxId);
}
}
| lgpl-3.0 |
wmaddisn/MesquiteCore | Source/mesquite/ornamental/ManagePictures/ManagePictures.java | 11832 | /* Mesquite source code. Copyright 1997 and onward, W. Maddison and D. Maddison.
Disclaimer: The Mesquite source code is lengthy and we are few. There are no doubt inefficiencies and goofs in this code.
The commenting leaves much to be desired. Please approach this source code with the spirit of helping out.
Perhaps with your help we can be more than a few, and make Mesquite better.
Mesquite is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY.
Mesquite's web site is http://mesquiteproject.org
This source code and its compiled class files are free and modifiable under the terms of
GNU Lesser General Public License. (http://www.gnu.org/copyleft/lesser.html)
*/
package mesquite.ornamental.ManagePictures;
/*~~ */
import java.util.*;
import java.awt.*;
import mesquite.lib.*;
import mesquite.lib.duties.*;
import mesquite.lib.characters.*;
/* ======================================================================== */
public class ManagePictures extends FileInit /*implements ElementManager*/ {
/*.................................................................................................................*/
public boolean startJob(String arguments, Object condition, boolean hiredByName) {
return true;
}
public boolean isSubstantive(){
return false;
}
public NexusBlock elementAdded(FileElement e){
return null;
}
public void elementDisposed(FileElement e){
//nothing needs doing since separate reference not stored locally
}
public Class getElementClass(){
return null;
}
/*.................................................................................................................*
public String getNexusCommands(MesquiteFile file, String blockName){
if (blockName.equalsIgnoreCase("NOTES")) {
String s ="";
boolean found = false;
NameReference ref = NameReference.getNameReference("image");
MesquiteProject project = file.getProject();
for (int i=0; i<project.getNumberTaxas(); i++){
Taxa taxa = getProject().getTaxa(i);
ObjectArray taxNotes = taxa.getWhichAssociatedObject(ref);
for (int it = 0; it<taxa.getNumTaxa(); it++){
String path = taxa.getTaxon(it).getIllustrationPath();
if (path!=null){
s += "\tPICTURE TAXA = " + StringUtil.tokenize(taxa.getName()) + " TAXON = " + (it+1) + " LOC = " + StringUtil.tokenize(MesquiteFile.decomposePath(getProject().getHomeDirectoryName(), path)) + ";" + StringUtil.lineEnding();
found = true;
}
if (taxNotes != null){
Object obj = taxNotes.getValue(it);
if (obj!=null && obj instanceof AttachedNotesVector){
AttachedNotesVector notes = (AttachedNotesVector)obj;
for (int iim=0; iim<notes.getNumNotes(); iim++){
AttachedNote im = notes.getAttachedNote(iim);
s += "\tNOTE TAXA = " + StringUtil.tokenize(taxa.getName()) + " TAXON = " + (it+1) + " " + im.getNexusString() + ";" + StringUtil.lineEnding();
found = true;
}
}
}
}
}
for (int i=0; i<project.getNumberCharMatrices(); i++){
CharacterData data = getProject().getCharacterMatrix(i);
ObjectArray charImages = data.getWhichAssociatedObject(ref);
if (charImages !=null){
for (int ic = 0; ic<data.getNumChars(); ic++){
Object obj = charImages.getValue(ic);
if (obj!=null && obj instanceof AttachedNotesVector){
AttachedNotesVector notes = (AttachedNotesVector)obj;
for (int iim=0; iim<notes.getNumNotes(); iim++){
AttachedNote im = notes.getAttachedNote(iim);
s += "\tNOTE TAXA = " + StringUtil.tokenize(data.getTaxa().getName()) + " CHARACTERS = " + StringUtil.tokenize(data.getName()) + " CHARACTER = " + (ic+1) + " " + im.getNexusString() + ";" + StringUtil.lineEnding();
found = true;
}
}
}
}
Object2DArray cellImages = data.getWhichCellObjects(ref);
if (cellImages !=null){
for (int ic = 0; ic<data.getNumChars(); ic++){
for (int it = 0; it<data.getNumTaxa(); it++){
Object obj = cellImages.getValue(ic, it);
if (obj!=null && obj instanceof AttachedNotesVector){
AttachedNotesVector notes = (AttachedNotesVector)obj;
for (int iim=0; iim<notes.getNumNotes(); iim++){
AttachedNote im = notes.getAttachedNote(iim);
s += "\tNOTE TAXA = " + StringUtil.tokenize(data.getTaxa().getName()) + " CHARACTERS = " + StringUtil.tokenize(data.getName()) + " TAXON = " + (it+1) + " CHARACTER = " + (ic+1) + " " + im.getNexusString() + ";" + StringUtil.lineEnding();
found = true;
}
}
}
}
}
}
if (found)
return s;
else
return null;
}
return null;
}
/*.................................................................................................................*/
public boolean readNexusCommand(MesquiteFile file, NexusBlock nBlock, String blockName, String command, MesquiteString comment){
if (blockName.equalsIgnoreCase("NOTES")) {
boolean fuse = parser.hasFileReadingArgument(file.fileReadingArguments, "fuseTaxaCharBlocks");
if (fuse)
return true;
MesquiteProject project = file.getProject();
MesquiteInteger startCharT = new MesquiteInteger(0);
String commandName = ParseUtil.getToken(command, startCharT);
Taxon taxon = null;
if (!commandName.equalsIgnoreCase("PICTURE"))
return false;
String token = ParseUtil.getToken(command, startCharT);
String dummy;
String pathName = "";
String commentString = null;
int taxonNumber=MesquiteInteger.unassigned;
Taxa taxa = getProject().getTaxa(0);
int charNumber=MesquiteInteger.unassigned;
CharacterData data = null;
if (getProject().getNumberCharMatrices()>0)
data = getProject().getCharacterMatrix(0);
while (!StringUtil.blank(token) && !token.equals(";")) {
if (token.equalsIgnoreCase("TAXON")) {
dummy =ParseUtil.getToken(command, startCharT); // =
String whichItem = (ParseUtil.getToken(command, startCharT)); // name of taxon/etc
taxonNumber = MesquiteInteger.fromString(whichItem);
if (MesquiteInteger.isCombinable(taxonNumber))
taxonNumber--; //to convert to internal
}
else if (token.equalsIgnoreCase("TAXA")) {
dummy =ParseUtil.getToken(command, startCharT); // =
String taxaTitle = (ParseUtil.getToken(command, startCharT));
taxa = getProject().getTaxaLastFirst(taxaTitle);
if (taxa == null) {
taxa = getProject().getTaxa(0);
}
}
else if (token.equalsIgnoreCase("SOURCE")) {
dummy =ParseUtil.getToken(command, startCharT); // =
String source = (ParseUtil.getToken(command, startCharT));
if (!("file".equalsIgnoreCase(source))) { //TODO: what if it is "file"? why not deal with it?
file.setOpenAsUntitled("A picture source (\"" + source + "\", in NOTES block) was not recognized. Mesquite may be unable to read and use the picture.");
if (taxon != null)
taxon.setIllustration(null, pathName);
return true; //returns true without saving object so that the note is deleted from the file
}
}
else if (token.equalsIgnoreCase("CHARACTER")) {
dummy =ParseUtil.getToken(command, startCharT); // =
String whichItem = (ParseUtil.getToken(command, startCharT)); // name of taxon/etc
charNumber = MesquiteInteger.fromString(whichItem);
if (MesquiteInteger.isCombinable(charNumber))
charNumber--; //to convert to internal
}
else if (token.equalsIgnoreCase("CHARACTERS")) {
dummy =ParseUtil.getToken(command, startCharT); // =
String matrixName = (ParseUtil.getToken(command, startCharT));
//logln(" for taxa " + taxaTitle);
data = getProject().getCharacterMatrixByReference(file, matrixName);
if (data == null) {
data = getProject().getCharacterMatrix(0);
}
}
else if (token.equalsIgnoreCase("COMMENT")) {
dummy =ParseUtil.getToken(command, startCharT); // =
commentString = (ParseUtil.getToken(command, startCharT));
}
else if (token.equalsIgnoreCase("PICTURE")) {
if (taxa!=null){
dummy =ParseUtil.getToken(command, startCharT); // =
pathName = ParseUtil.getToken(command, startCharT);
if ((MesquiteInteger.isCombinable(taxonNumber) || MesquiteInteger.isCombinable(charNumber)) && !StringUtil.blank(pathName)) {
//figure out if this is for character, cell, or taxon
if (MesquiteInteger.isCombinable(charNumber) && charNumber>=0){ // character or cell
if (MesquiteInteger.isCombinable(taxonNumber) && taxonNumber>=0) { //cell
if (data == null)
return true; //returns true without saving object so that the note is deleted from the file
NameReference imageNameRef = NameReference.getNameReference("notes");
AttachedNotesVector aim = (AttachedNotesVector)data.getCellObject(imageNameRef, charNumber, taxonNumber);
if (aim == null)
aim = new AttachedNotesVector(data);
AttachedNote hL = new AttachedNote();
aim.addNote(hL, false);
hL.setImagePath(pathName, MesquiteFile.composePath(getProject().getHomeDirectoryName(), pathName), false);
hL.setComment(commentString, false);
data.setCellObject(imageNameRef, charNumber, taxonNumber, aim);
data.setCellObjectDisplay(charNumber, taxonNumber);
}
else { //whole character
NameReference imageNameRef = data.makeAssociatedObjects("notes");
AttachedNotesVector aim =(AttachedNotesVector)data.getAssociatedObject(imageNameRef, charNumber);
if (aim == null)
aim = new AttachedNotesVector(data);
AttachedNote hL = new AttachedNote();
aim.addNote(hL, false);
hL.setImagePath(pathName, MesquiteFile.composePath(getProject().getHomeDirectoryName(), pathName), false);
hL.setComment(commentString, false);
data.setAssociatedObject(imageNameRef, charNumber, aim);
}
}
else { //taxon
NameReference imageNameRef = taxa.makeAssociatedObjects("notes");
AttachedNotesVector aim = (AttachedNotesVector)taxa.getAssociatedObject(imageNameRef, taxonNumber);
if (aim == null)
aim = new AttachedNotesVector(taxa);
AttachedNote hL = new AttachedNote();
aim.addNote(hL, false);
hL.setImagePath(pathName, MesquiteFile.composePath(getProject().getHomeDirectoryName(), pathName), false);
hL.setComment(commentString, false);
taxa.setAssociatedObject(imageNameRef, taxonNumber, aim);
}
}
}
}
token = ParseUtil.getToken(command, startCharT);
}
return true;
}
return false;
}
/*.................................................................................................................*/
public NexusCommandTest getNexusCommandTest(){
return new MPCT();
}
/*.................................................................................................................*/
public String getName() {
return "Manage pictures";
}
/*.................................................................................................................*/
/** returns an explanation of what the module does.*/
public String getExplanation() {
return "Manages (including NEXUS read/write) pictures." ;
}
/*.................................................................................................................*/
}
class MPCT extends NexusCommandTest{
public boolean readsWritesCommand(String blockName, String commandName, String command){ //returns whether or not can deal with command
return (blockName.equalsIgnoreCase("NOTES") && (commandName.equalsIgnoreCase("PICTURE")));
}
}
| lgpl-3.0 |
Alfresco/community-edition | projects/repository/source/java/org/alfresco/repo/virtual/bundle/VirtualVersionServiceExtension.java | 24876 | /*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.virtual.bundle;
import java.io.Serializable;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import org.alfresco.repo.version.Version2Model;
import org.alfresco.repo.version.VersionModel;
import org.alfresco.repo.version.VersionServicePolicies.CalculateVersionLabelPolicy;
import org.alfresco.repo.version.common.VersionImpl;
import org.alfresco.repo.version.traitextender.VersionServiceExtension;
import org.alfresco.repo.version.traitextender.VersionServiceTrait;
import org.alfresco.repo.virtual.ref.GetParentReferenceMethod;
import org.alfresco.repo.virtual.ref.NodeProtocol;
import org.alfresco.repo.virtual.ref.Reference;
import org.alfresco.repo.virtual.store.VirtualStore;
import org.alfresco.service.cmr.repository.AspectMissingException;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.repository.StoreRef;
import org.alfresco.service.cmr.version.ReservedVersionNameException;
import org.alfresco.service.cmr.version.Version;
import org.alfresco.service.cmr.version.VersionHistory;
import org.alfresco.service.namespace.QName;
import org.alfresco.traitextender.SpringBeanExtension;
public class VirtualVersionServiceExtension extends SpringBeanExtension<VersionServiceExtension, VersionServiceTrait>
implements VersionServiceExtension
{
private VirtualStore smartStore;
public class VirtualVersionHistory implements VersionHistory
{
/**
*
*/
private static final long serialVersionUID = 2640439550254763191L;
private Reference versionedReference;
private VersionHistory actualHistory;
public VirtualVersionHistory(Reference versionedReference, VersionHistory actualHistory)
{
super();
this.versionedReference = versionedReference;
this.actualHistory = actualHistory;
}
@Override
public Version getRootVersion()
{
Version actualRootVersion = actualHistory.getRootVersion();
return VirtualVersionServiceExtension.this.virtualizeVersion(versionedReference,
actualRootVersion);
}
@Override
public Version getHeadVersion()
{
Version actualHeadVersion = actualHistory.getRootVersion();
return VirtualVersionServiceExtension.this.virtualizeVersion(versionedReference,
actualHeadVersion);
}
@Override
public Collection<Version> getAllVersions()
{
Collection<Version> allActualVersions = actualHistory.getAllVersions();
return VirtualVersionServiceExtension.this.virtualizeVersions(versionedReference,
allActualVersions);
}
@Override
public Version getPredecessor(Version version)
{
Version actualVersion = VirtualVersionServiceExtension.this.materializeVersionIfReference(version);
Version actualPredecesor = actualHistory.getPredecessor(actualVersion);
return VirtualVersionServiceExtension.this.virtualizeVersion(versionedReference,
actualPredecesor);
}
@Override
public Collection<Version> getSuccessors(Version version)
{
Version actualVersion = VirtualVersionServiceExtension.this.materializeVersionIfReference(version);
Collection<Version> actualSuccessors = actualHistory.getSuccessors(actualVersion);
return VirtualVersionServiceExtension.this.virtualizeVersions(versionedReference,
actualSuccessors);
}
@Override
public Version getVersion(String versionLabel)
{
Version actualVersion = actualHistory.getVersion(versionLabel);
return VirtualVersionServiceExtension.this.virtualizeVersion(versionedReference,
actualVersion);
}
}
public VirtualVersionServiceExtension()
{
super(VersionServiceTrait.class);
}
public void setSmartStore(VirtualStore smartStore)
{
this.smartStore = smartStore;
}
@Override
public StoreRef getVersionStoreReference()
{
return getTrait().getVersionStoreReference();
}
@Override
public boolean isAVersion(NodeRef nodeRef)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
return theTrait.isAVersion(nodeRef);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
return theTrait.isAVersion(materialNode);
}
}
@Override
public boolean isVersioned(NodeRef nodeRef)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
return theTrait.isVersioned(nodeRef);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
return theTrait.isVersioned(materialNode);
}
}
private Version materializeVersionIfReference(Version virtualVersion)
{
NodeRef frozenStateNodeRef = virtualVersion.getFrozenStateNodeRef();
StoreRef frozenStoreRef = frozenStateNodeRef.getStoreRef();
NodeRef materialFrozenNodeRef = frozenStateNodeRef;
if (Reference.isReference(frozenStateNodeRef))
{
Reference frozenReference = Reference.fromNodeRef(frozenStateNodeRef);
materialFrozenNodeRef = smartStore.materialize(frozenReference);
}
Map<String, Serializable> virtualProperties = virtualVersion.getVersionProperties();
Map<String, Serializable> actualProperties = new HashMap<>(virtualProperties);
if (frozenStoreRef.getIdentifier().equals(Version2Model.STORE_ID))
{
// V2 version store (eg. workspace://version2Store)
NodeRef propFrozenNode = (NodeRef) virtualProperties.get(Version2Model.PROP_FROZEN_NODE_REF);
NodeRef propActualFrozenNode = propFrozenNode;
if (Reference.isReference(propFrozenNode))
{
Reference propFrozenReference = Reference.fromNodeRef(propFrozenNode);
propActualFrozenNode = smartStore.materialize(propFrozenReference);
}
actualProperties.put(Version2Model.PROP_FROZEN_NODE_REF,
propActualFrozenNode);
}
else if (frozenStoreRef.getIdentifier().equals(VersionModel.STORE_ID))
{
// Deprecated V1 version store (eg.
// workspace://lightWeightVersionStore)
String frozenNodeStoreProtocol = (String) virtualProperties
.get(VersionModel.PROP_FROZEN_NODE_STORE_PROTOCOL);
String frozenNodeStoreId = (String) virtualProperties.get(VersionModel.PROP_FROZEN_NODE_STORE_ID);
String frozenNodeId = (String) virtualProperties.get(VersionModel.PROP_FROZEN_NODE_ID);
NodeRef propFrozenNode = new NodeRef(frozenNodeStoreProtocol,
frozenNodeStoreId,
frozenNodeId);
NodeRef propActualFrozenNode = propFrozenNode;
if (Reference.isReference(propFrozenNode))
{
Reference propFrozenReference = Reference.fromNodeRef(propFrozenNode);
propActualFrozenNode = smartStore.materialize(propFrozenReference);
}
StoreRef propActualStoreRef = propFrozenNode.getStoreRef();
actualProperties.put(VersionModel.PROP_FROZEN_NODE_STORE_PROTOCOL,
propActualStoreRef.getProtocol());
actualProperties.put(VersionModel.PROP_FROZEN_NODE_STORE_ID,
propActualStoreRef.getIdentifier());
actualProperties.put(VersionModel.PROP_FROZEN_NODE_ID,
propActualFrozenNode.getId());
}
Version actualVersion = new VersionImpl(actualProperties,
materialFrozenNodeRef);
return actualVersion;
}
private Version virtualizeVersion(Reference versionedReference, Version actualVersion)
{
if (actualVersion == null)
{
return null;
}
NodeRef frozenStateNodeRef = actualVersion.getFrozenStateNodeRef();
StoreRef frozenStoreRef = frozenStateNodeRef.getStoreRef();
Reference parentReference = versionedReference.execute(new GetParentReferenceMethod());
Reference virtualFrozenReference = NodeProtocol.newReference(frozenStateNodeRef,
parentReference);
Map<String, Serializable> properties = actualVersion.getVersionProperties();
Map<String, Serializable> virtualProperties = new HashMap<String, Serializable>(properties);
// Switch VersionStore depending on configured impl
if (frozenStoreRef.getIdentifier().equals(Version2Model.STORE_ID))
{
// V2 version store (eg. workspace://version2Store)
NodeRef propFrozenNodeRef = (NodeRef) virtualProperties.get(Version2Model.PROP_FROZEN_NODE_REF);
Reference virtualPropFrozenReference = NodeProtocol.newReference(propFrozenNodeRef,
parentReference);
virtualProperties.put(Version2Model.PROP_FROZEN_NODE_REF,
virtualPropFrozenReference.toNodeRef(propFrozenNodeRef.getStoreRef()));
}
else if (frozenStoreRef.getIdentifier().equals(VersionModel.STORE_ID))
{
// Deprecated V1 version store (eg.
// workspace://lightWeightVersionStore)
String frozenNodeStoreProtocol = (String) virtualProperties
.get(VersionModel.PROP_FROZEN_NODE_STORE_PROTOCOL);
String frozenNodeStoreId = (String) virtualProperties.get(VersionModel.PROP_FROZEN_NODE_STORE_ID);
String frozenNodeId = (String) virtualProperties.get(VersionModel.PROP_FROZEN_NODE_ID);
StoreRef propFrozenStoreRef = new StoreRef(frozenNodeStoreProtocol,
frozenNodeStoreId);
NodeRef propFrozenNode = new NodeRef(propFrozenStoreRef,
frozenNodeId);
Reference virtualPropFrozenReference = NodeProtocol.newReference(propFrozenNode,
parentReference);
NodeRef virtualPropFrozenNodeRef = virtualPropFrozenReference.toNodeRef(propFrozenStoreRef);
virtualProperties.put(VersionModel.PROP_FROZEN_NODE_STORE_PROTOCOL,
propFrozenStoreRef.getProtocol());
virtualProperties.put(VersionModel.PROP_FROZEN_NODE_STORE_ID,
propFrozenStoreRef.getIdentifier());
virtualProperties.put(VersionModel.PROP_FROZEN_NODE_ID,
virtualPropFrozenNodeRef.getId());
}
return new VersionImpl(virtualProperties,
virtualFrozenReference.toNodeRef(frozenStateNodeRef.getStoreRef()));
}
private Collection<Version> virtualizeVersions(Reference versionedReference, Collection<Version> actualVersions)
{
Collection<Version> virtualizedVersions = new LinkedList<>();
for (Version actualVersion : actualVersions)
{
Version virtualizedVersion = virtualizeVersion(versionedReference,
actualVersion);
virtualizedVersions.add(virtualizedVersion);
}
return virtualizedVersions;
}
@Override
public Version createVersion(NodeRef nodeRef, Map<String, Serializable> versionProperties)
throws ReservedVersionNameException, AspectMissingException
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
return theTrait.createVersion(nodeRef,
versionProperties);
}
else
{
NodeRef materialNode = smartStore.materializeIfPossible(nodeRef);
Version actualVersion = theTrait.createVersion(materialNode,
versionProperties);
Reference reference = Reference.fromNodeRef(nodeRef);
return virtualizeVersion(reference,
actualVersion);
}
}
@Override
public Collection<Version> createVersion(NodeRef nodeRef, Map<String, Serializable> versionProperties,
boolean versionChildren) throws ReservedVersionNameException, AspectMissingException
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
return theTrait.createVersion(nodeRef,
versionProperties,
versionChildren);
}
else
{
NodeRef materialNode = smartStore.materializeIfPossible(nodeRef);
Collection<Version> actualVersions = theTrait.createVersion(materialNode,
versionProperties,
versionChildren);
Reference reference = Reference.fromNodeRef(nodeRef);
return virtualizeVersions(reference,
actualVersions);
}
}
@Override
public Collection<Version> createVersion(Collection<NodeRef> nodeRefs, Map<String, Serializable> versionProperties)
throws ReservedVersionNameException, AspectMissingException
{
VersionServiceTrait theTrait = getTrait();
Collection<NodeRef> materialNodeRefs = new LinkedList<>();
Map<NodeRef, Reference> materializedNodeRefs = new HashMap<>();
for (NodeRef nodeRef : nodeRefs)
{
if (!Reference.isReference(nodeRef))
{
materialNodeRefs.add(nodeRef);
}
else
{
NodeRef materialNode = smartStore.materializeIfPossible(nodeRef);
materialNodeRefs.add(materialNode);
materializedNodeRefs.put(materialNode,
Reference.fromNodeRef(nodeRef));
}
}
Collection<Version> versions = theTrait.createVersion(materialNodeRefs,
versionProperties);
Collection<Version> virtualizedVersions = new LinkedList<>();
for (Version version : versions)
{
NodeRef versionedNodeRef = version.getVersionedNodeRef();
Reference reference = materializedNodeRefs.get(versionedNodeRef);
if (reference != null)
{
Version virtualizedVersion = virtualizeVersion(reference,
version);
virtualizedVersions.add(virtualizedVersion);
}
else
{
virtualizedVersions.add(version);
}
}
return virtualizedVersions;
}
@Override
public VersionHistory getVersionHistory(NodeRef nodeRef) throws AspectMissingException
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
return theTrait.getVersionHistory(nodeRef);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
VersionHistory actualVersionHistory = theTrait.getVersionHistory(materialNode);
if (actualVersionHistory == null)
{
return null;
}
else
{
Reference versionedReference = Reference.fromNodeRef(nodeRef);
return new VirtualVersionHistory(versionedReference,
actualVersionHistory);
}
}
}
@Override
public Version getCurrentVersion(NodeRef nodeRef)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
return theTrait.getCurrentVersion(nodeRef);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
Reference versionedReference = Reference.fromNodeRef(nodeRef);
Version actualVersion = theTrait.getCurrentVersion(materialNode);
return virtualizeVersion(versionedReference,
actualVersion);
}
}
@Override
public void revert(NodeRef nodeRef)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
theTrait.revert(nodeRef);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
theTrait.revert(materialNode);
}
}
@Override
public void revert(NodeRef nodeRef, boolean deep)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
theTrait.revert(nodeRef,
deep);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
theTrait.revert(materialNode,
deep);
}
}
@Override
public void revert(NodeRef nodeRef, Version version)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
theTrait.revert(nodeRef,
version);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
Version actualVersion = VirtualVersionServiceExtension.this.materializeVersionIfReference(version);
theTrait.revert(materialNode,
actualVersion);
}
}
@Override
public void revert(NodeRef nodeRef, Version version, boolean deep)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
theTrait.revert(nodeRef,
version,
deep);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
Version actualVersion = VirtualVersionServiceExtension.this.materializeVersionIfReference(version);
theTrait.revert(materialNode,
actualVersion,
deep);
}
}
@Override
public NodeRef restore(NodeRef nodeRef, NodeRef parentNodeRef, QName assocTypeQName, QName assocQName)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
return theTrait.restore(nodeRef,
parentNodeRef,
assocTypeQName,
assocQName);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
return theTrait.restore(materialNode,
parentNodeRef,
assocTypeQName,
assocQName);
}
}
@Override
public NodeRef restore(NodeRef nodeRef, NodeRef parentNodeRef, QName assocTypeQName, QName assocQName, boolean deep)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
return theTrait.restore(nodeRef,
parentNodeRef,
assocTypeQName,
assocQName,
deep);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
return theTrait.restore(materialNode,
parentNodeRef,
assocTypeQName,
assocQName,
deep);
}
}
@Override
public void deleteVersionHistory(NodeRef nodeRef) throws AspectMissingException
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
theTrait.deleteVersionHistory(nodeRef);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
theTrait.deleteVersionHistory(materialNode);
}
}
@Override
public void deleteVersion(NodeRef nodeRef, Version version)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
theTrait.deleteVersion(nodeRef,
version);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
Version actualVersion = materializeVersionIfReference(version);
theTrait.deleteVersion(materialNode,
actualVersion);
}
}
@Override
public void ensureVersioningEnabled(NodeRef nodeRef, Map<QName, Serializable> versionProperties)
{
VersionServiceTrait theTrait = getTrait();
if (!Reference.isReference(nodeRef))
{
theTrait.ensureVersioningEnabled(nodeRef,
versionProperties);
}
else
{
Reference reference = Reference.fromNodeRef(nodeRef);
NodeRef materialNode = smartStore.materialize(reference);
theTrait.ensureVersioningEnabled(materialNode,
versionProperties);
}
}
@Override
public void registerVersionLabelPolicy(QName typeQName, CalculateVersionLabelPolicy policy)
{
getTrait().registerVersionLabelPolicy(typeQName,
policy);
}
}
| lgpl-3.0 |
kenguest/phing | tests/Phing/Support/HangDetectorProperties.php | 1435 | <?php
/**
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the LGPL. For more information please see
* <http://phing.info>.
*/
namespace Phing\Test\Support;
use Exception;
use Phing\Util\Properties;
/**
* @author Hans Lellelid (Phing)
* @author Conor MacNeill (Ant)
*/
class HangDetectorProperties extends Properties
{
private $accesses = 0;
public function getProperty($prop)
{
++$this->accesses;
if ($this->accesses > 100) {
throw new Exception('Cirular definition Hanged!');
}
return parent::getProperty($prop);
}
}
| lgpl-3.0 |
David-Desmaisons/MVVM.CEF.Glue | Examples/Example.Dictionary.Cfx.Vue/View/Main/src/install.js | 430 | import "bootstrap/dist/css/bootstrap.css";
import "font-awesome/less/font-awesome.less";
/*eslint no-unused-vars: ["error", { "args": "none" }]*/
function install(Vue) {
//Call vue use here if needed
}
/*eslint no-unused-vars: ["error", { "args": "none" }]*/
function vueInstanceOption(vm) {
//Return vue global option here, such as vue-router, vue-i18n, mix-ins, ....
return {};
}
export { install, vueInstanceOption };
| lgpl-3.0 |
wmaddisn/MesquiteCore | Source/mesquite/lib/TextContentArea.java | 1775 | /* Mesquite source code. Copyright 1997 and onward, W. Maddison and D. Maddison.
Disclaimer: The Mesquite source code is lengthy and we are few. There are no doubt inefficiencies and goofs in this code.
The commenting leaves much to be desired. Please approach this source code with the spirit of helping out.
Perhaps with your help we can be more than a few, and make Mesquite better.
Mesquite is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY.
Mesquite's web site is http://mesquiteproject.org
This source code and its compiled class files are free and modifiable under the terms of
GNU Lesser General Public License. (http://www.gnu.org/copyleft/lesser.html)
*/
package mesquite.lib;
import java.awt.*;
import java.awt.event.*;
import mesquite.lib.duties.*;
/* ======================================================================== */
/** A ContentArea specifically for text display. Has methods to control text, like a TextArea.*/
class TextContentArea extends ContentArea {
TextArea tA;
public TextContentArea () {
super(null);
mainPanel.setLayout(new CardLayout());
Font fontToSet = new Font ("Monospaced", 0, 12);
if (fontToSet!=null)
setFont(fontToSet);
tA= new TextArea("", 50, 50, TextArea.SCROLLBARS_BOTH); //or SCROLLBARS_VERTICAL_ONLY???
tA.setEditable(false);
setBackground(Color.white);
tA.setBackground(Color.white);
tA.setVisible(true);
add(tA, "text");
}
public void setEditable(boolean ed) {
tA.setEditable(ed);
}
public void print(Graphics g){
tA.printAll(g);
}
public void append(String s) {
tA.append(s);
}
public void setText(String s) {
tA.setText(s);
}
public String getText() {
return tA.getText();
}
public TextArea getTextArea() {
return tA;
}
}
| lgpl-3.0 |
marissaDubbelaar/GOAD3.1.1 | molgenis-data/src/main/java/org/molgenis/data/transaction/TransactionalRepositoryDecorator.java | 5223 | package org.molgenis.data.transaction;
import org.molgenis.data.*;
import org.molgenis.data.aggregation.AggregateQuery;
import org.molgenis.data.aggregation.AggregateResult;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.Iterator;
import java.util.List;
import java.util.function.Consumer;
import java.util.stream.Stream;
import static java.util.Objects.requireNonNull;
/**
* Repository decorator that wraps CRUD operations in a (read-only) transaction. Classes that extend from
* {@link AbstractRepositoryDecorator} might not be managed by Spring, so {@link TransactionTemplate} is used instead
* of the {@link Transactional} annotation.
*
* @param <E> entity type
*/
public class TransactionalRepositoryDecorator<E extends Entity> extends AbstractRepositoryDecorator<E>
{
private final Repository<E> decoratedRepo;
private final PlatformTransactionManager transactionManager;
public TransactionalRepositoryDecorator(Repository<E> decoratedRepo, PlatformTransactionManager transactionManager)
{
this.decoratedRepo = requireNonNull(decoratedRepo);
this.transactionManager = requireNonNull(transactionManager);
}
@Override
protected Repository<E> delegate()
{
return decoratedRepo;
}
@Override
public void forEachBatched(Consumer<List<E>> consumer, int batchSize)
{
createReadonlyTransactionTemplate().execute((status) ->
{
decoratedRepo.forEachBatched(consumer, batchSize);
return null;
});
}
@Override
public void forEachBatched(Fetch fetch, Consumer<List<E>> consumer, int batchSize)
{
createReadonlyTransactionTemplate().execute((status) ->
{
decoratedRepo.forEachBatched(fetch, consumer, batchSize);
return null;
});
}
@Override
public long count()
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.count());
}
@Override
public long count(Query<E> q)
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.count(q));
}
@Override
public Stream<E> findAll(Query<E> q)
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.findAll(q));
}
@Override
public E findOne(Query<E> q)
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.findOne(q));
}
@Override
public E findOneById(Object id)
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.findOneById(id));
}
@Override
public E findOneById(Object id, Fetch fetch)
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.findOneById(id, fetch));
}
@Override
public Stream<E> findAll(Stream<Object> ids)
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.findAll(ids));
}
@Override
public Stream<E> findAll(Stream<Object> ids, Fetch fetch)
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.findAll(ids, fetch));
}
@Override
public AggregateResult aggregate(AggregateQuery aggregateQuery)
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.aggregate(aggregateQuery));
}
@Override
public void update(E entity)
{
createWriteTransactionTemplate().execute((status) ->
{
decoratedRepo.update(entity);
return null;
});
}
@Override
public void update(Stream<E> entities)
{
createWriteTransactionTemplate().execute((status) ->
{
decoratedRepo.update(entities);
return null;
});
}
@Override
public void delete(E entity)
{
createWriteTransactionTemplate().execute((status) ->
{
decoratedRepo.delete(entity);
return null;
});
}
@Override
public void delete(Stream<E> entities)
{
createWriteTransactionTemplate().execute((status) ->
{
decoratedRepo.delete(entities);
return null;
});
}
@Override
public void deleteById(Object id)
{
createWriteTransactionTemplate().execute((status) ->
{
decoratedRepo.deleteById(id);
return null;
});
}
@Override
public void deleteAll(Stream<Object> ids)
{
createWriteTransactionTemplate().execute((status) ->
{
decoratedRepo.deleteAll(ids);
return null;
});
}
@Override
public void deleteAll()
{
createWriteTransactionTemplate().execute((status) ->
{
decoratedRepo.deleteAll();
return null;
});
}
@Override
public void add(E entity)
{
createWriteTransactionTemplate().execute((status) ->
{
decoratedRepo.add(entity);
return null;
});
}
@Override
public Integer add(Stream<E> entities)
{
return createWriteTransactionTemplate().execute((status) -> decoratedRepo.add(entities));
}
@Override
public Iterator<E> iterator()
{
return createReadonlyTransactionTemplate().execute((status) -> decoratedRepo.iterator());
}
private TransactionTemplate createWriteTransactionTemplate()
{
return new TransactionTemplate(transactionManager);
}
private TransactionTemplate createReadonlyTransactionTemplate()
{
TransactionTemplate transactionTemplate = new TransactionTemplate(transactionManager);
transactionTemplate.setReadOnly(true);
return transactionTemplate;
}
}
| lgpl-3.0 |
edwinspire/VSharp | class/System.Web.DynamicData/Test/WebPages/DynamicData/FieldTemplates_NonDefault/MonoTests.Common.FooEmpty.ascx.cs | 582 | using System;
using System.Data;
using System.Configuration;
using System.Collections;
using System.Collections.Specialized;
using System.Linq;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Web.UI.HtmlControls;
using System.Xml.Linq;
using System.Web.DynamicData;
public partial class MonoTestsCommonFooEmpty_Field : System.Web.DynamicData.FieldTemplateUserControl {
public override Control DataControl {
get {
return Literal1;
}
}
}
| lgpl-3.0 |
David-Desmaisons/MVVM.CEF.Glue | Examples/Example.ChromiumFX.Ko.UI/Properties/Settings.Designer.cs | 1081 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace Example.ChromiumFX.Ko.UI.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "15.7.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
}
}
| lgpl-3.0 |
edwinspire/VSharp | class/Managed.Windows.Forms/System.Windows.Forms/DataGridViewSelectedColumnCollection.cs | 3743 | // Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// Copyright (c) 2005 Novell, Inc. (http://www.novell.com)
//
// Author:
// Pedro Martínez Juliá <pedromj@gmail.com>
//
using System.Collections;
using System.ComponentModel;
namespace System.Windows.Forms
{
[ListBindable (false)]
public class DataGridViewSelectedColumnCollection : BaseCollection, IList, ICollection, IEnumerable
{
internal DataGridViewSelectedColumnCollection ()
{
}
bool IList.IsFixedSize {
get { return base.List.IsFixedSize; }
}
object IList.this [int index] {
get { return this [index]; }
set { throw new NotSupportedException("Can't insert or modify this collection."); }
}
public DataGridViewColumn this [int index] {
get { return (DataGridViewColumn) base.List [index]; }
}
int IList.Add (object value)
{
throw new NotSupportedException ("Can't add elements to this collection.");
}
void IList.Clear ()
{
Clear ();
}
[EditorBrowsable (EditorBrowsableState.Never)]
public void Clear ()
{
throw new NotSupportedException ("This collection cannot be cleared.");
}
bool IList.Contains (object value)
{
return Contains (value as DataGridViewColumn);
}
public bool Contains (DataGridViewColumn dataGridViewColumn)
{
return base.List.Contains (dataGridViewColumn);
}
public void CopyTo (DataGridViewColumn [] array, int index)
{
base.List.CopyTo (array, index);
}
int IList.IndexOf (object value)
{
return base.List.IndexOf (value);
}
void IList.Insert (int index, object value)
{
Insert (index, value as DataGridViewColumn);
}
[EditorBrowsable (EditorBrowsableState.Never)]
public void Insert (int index, DataGridViewColumn dataGridViewColumn)
{
throw new NotSupportedException ("Insert is not allowed.");
}
void IList.Remove (object value)
{
throw new NotSupportedException ("Can't remove elements of this collection.");
}
void IList.RemoveAt (int index)
{
throw new NotSupportedException ("Can't remove elements of this collection.");
}
protected override ArrayList List {
get { return base.List; }
}
internal void InternalAdd (DataGridViewColumn dataGridViewColumn)
{
base.List.Add (dataGridViewColumn);
}
internal void InternalAddRange (DataGridViewSelectedColumnCollection columns)
{
if (columns == null)
return;
// Believe it or not, MS adds the columns in reverse order...
for (int i = columns.Count - 1; i >= 0; i--)
base.List.Add (columns [i]);
}
internal void InternalClear ()
{
List.Clear ();
}
internal void InternalRemove (DataGridViewColumn dataGridViewColumn)
{
base.List.Remove(dataGridViewColumn);
}
}
}
| lgpl-3.0 |
oleneveu/SharpKit-SDK | Defs/Qooxdoo/theme/modern/Decoration.cs | 515 | // Generated by SharpKit.QooxDoo.Generator
using System;
using System.Collections.Generic;
using SharpKit.Html;
using SharpKit.JavaScript;
namespace qx.theme.modern
{
/// <summary>
/// <para>The modern decoration theme.</para>
/// </summary>
[JsType(JsMode.Prototype, Name = "qx.theme.modern.Decoration", OmitOptionalParameters = true, Export = false)]
public partial class Decoration
{
#region Methods
public Decoration() { throw new NotImplementedException(); }
#endregion Methods
}
} | lgpl-3.0 |
ethereum/go-ethereum | cmd/puppeth/ssh.go | 8633 | // Copyright 2017 The go-ethereum Authors
// This file is part of go-ethereum.
//
// go-ethereum is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// go-ethereum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with go-ethereum. If not, see <http://www.gnu.org/licenses/>.
package main
import (
"bufio"
"bytes"
"errors"
"fmt"
"io/ioutil"
"net"
"os"
"os/user"
"path/filepath"
"strings"
"github.com/ethereum/go-ethereum/log"
"golang.org/x/crypto/ssh"
"golang.org/x/crypto/ssh/agent"
"golang.org/x/crypto/ssh/terminal"
)
// sshClient is a small wrapper around Go's SSH client with a few utility methods
// implemented on top.
type sshClient struct {
server string // Server name or IP without port number
address string // IP address of the remote server
pubkey []byte // RSA public key to authenticate the server
client *ssh.Client
logger log.Logger
}
const EnvSSHAuthSock = "SSH_AUTH_SOCK"
// dial establishes an SSH connection to a remote node using the current user and
// the user's configured private RSA key. If that fails, password authentication
// is fallen back to. server can be a string like user:identity@server:port.
func dial(server string, pubkey []byte) (*sshClient, error) {
// Figure out username, identity, hostname and port
hostname := ""
hostport := server
username := ""
identity := "id_rsa" // default
if strings.Contains(server, "@") {
prefix := server[:strings.Index(server, "@")]
if strings.Contains(prefix, ":") {
username = prefix[:strings.Index(prefix, ":")]
identity = prefix[strings.Index(prefix, ":")+1:]
} else {
username = prefix
}
hostport = server[strings.Index(server, "@")+1:]
}
if strings.Contains(hostport, ":") {
hostname = hostport[:strings.Index(hostport, ":")]
} else {
hostname = hostport
hostport += ":22"
}
logger := log.New("server", server)
logger.Debug("Attempting to establish SSH connection")
user, err := user.Current()
if err != nil {
return nil, err
}
if username == "" {
username = user.Username
}
// Configure the supported authentication methods (ssh agent, private key and password)
var (
auths []ssh.AuthMethod
conn net.Conn
)
if conn, err = net.Dial("unix", os.Getenv(EnvSSHAuthSock)); err != nil {
log.Warn("Unable to dial SSH agent, falling back to private keys", "err", err)
} else {
client := agent.NewClient(conn)
auths = append(auths, ssh.PublicKeysCallback(client.Signers))
}
if err != nil {
path := filepath.Join(user.HomeDir, ".ssh", identity)
if buf, err := ioutil.ReadFile(path); err != nil {
log.Warn("No SSH key, falling back to passwords", "path", path, "err", err)
} else {
key, err := ssh.ParsePrivateKey(buf)
if err != nil {
fmt.Printf("What's the decryption password for %s? (won't be echoed)\n>", path)
blob, err := terminal.ReadPassword(int(os.Stdin.Fd()))
fmt.Println()
if err != nil {
log.Warn("Couldn't read password", "err", err)
}
key, err := ssh.ParsePrivateKeyWithPassphrase(buf, blob)
if err != nil {
log.Warn("Failed to decrypt SSH key, falling back to passwords", "path", path, "err", err)
} else {
auths = append(auths, ssh.PublicKeys(key))
}
} else {
auths = append(auths, ssh.PublicKeys(key))
}
}
auths = append(auths, ssh.PasswordCallback(func() (string, error) {
fmt.Printf("What's the login password for %s at %s? (won't be echoed)\n> ", username, server)
blob, err := terminal.ReadPassword(int(os.Stdin.Fd()))
fmt.Println()
return string(blob), err
}))
}
// Resolve the IP address of the remote server
addr, err := net.LookupHost(hostname)
if err != nil {
return nil, err
}
if len(addr) == 0 {
return nil, errors.New("no IPs associated with domain")
}
// Try to dial in to the remote server
logger.Trace("Dialing remote SSH server", "user", username)
keycheck := func(hostname string, remote net.Addr, key ssh.PublicKey) error {
// If no public key is known for SSH, ask the user to confirm
if pubkey == nil {
fmt.Println()
fmt.Printf("The authenticity of host '%s (%s)' can't be established.\n", hostname, remote)
fmt.Printf("SSH key fingerprint is %s [MD5]\n", ssh.FingerprintLegacyMD5(key))
fmt.Printf("Are you sure you want to continue connecting (yes/no)? ")
for {
text, err := bufio.NewReader(os.Stdin).ReadString('\n')
switch {
case err != nil:
return err
case strings.TrimSpace(text) == "yes":
pubkey = key.Marshal()
return nil
case strings.TrimSpace(text) == "no":
return errors.New("users says no")
default:
fmt.Println("Please answer 'yes' or 'no'")
continue
}
}
}
// If a public key exists for this SSH server, check that it matches
if bytes.Equal(pubkey, key.Marshal()) {
return nil
}
// We have a mismatch, forbid connecting
return errors.New("ssh key mismatch, readd the machine to update")
}
client, err := ssh.Dial("tcp", hostport, &ssh.ClientConfig{User: username, Auth: auths, HostKeyCallback: keycheck})
if err != nil {
return nil, err
}
// Connection established, return our utility wrapper
c := &sshClient{
server: hostname,
address: addr[0],
pubkey: pubkey,
client: client,
logger: logger,
}
if err := c.init(); err != nil {
client.Close()
return nil, err
}
return c, nil
}
// init runs some initialization commands on the remote server to ensure it's
// capable of acting as puppeth target.
func (client *sshClient) init() error {
client.logger.Debug("Verifying if docker is available")
if out, err := client.Run("docker version"); err != nil {
if len(out) == 0 {
return err
}
return fmt.Errorf("docker configured incorrectly: %s", out)
}
client.logger.Debug("Verifying if docker-compose is available")
if out, err := client.Run("docker-compose version"); err != nil {
if len(out) == 0 {
return err
}
return fmt.Errorf("docker-compose configured incorrectly: %s", out)
}
return nil
}
// Close terminates the connection to an SSH server.
func (client *sshClient) Close() error {
return client.client.Close()
}
// Run executes a command on the remote server and returns the combined output
// along with any error status.
func (client *sshClient) Run(cmd string) ([]byte, error) {
// Establish a single command session
session, err := client.client.NewSession()
if err != nil {
return nil, err
}
defer session.Close()
// Execute the command and return any output
client.logger.Trace("Running command on remote server", "cmd", cmd)
return session.CombinedOutput(cmd)
}
// Stream executes a command on the remote server and streams all outputs into
// the local stdout and stderr streams.
func (client *sshClient) Stream(cmd string) error {
// Establish a single command session
session, err := client.client.NewSession()
if err != nil {
return err
}
defer session.Close()
session.Stdout = os.Stdout
session.Stderr = os.Stderr
// Execute the command and return any output
client.logger.Trace("Streaming command on remote server", "cmd", cmd)
return session.Run(cmd)
}
// Upload copies the set of files to a remote server via SCP, creating any non-
// existing folders in the mean time.
func (client *sshClient) Upload(files map[string][]byte) ([]byte, error) {
// Establish a single command session
session, err := client.client.NewSession()
if err != nil {
return nil, err
}
defer session.Close()
// Create a goroutine that streams the SCP content
go func() {
out, _ := session.StdinPipe()
defer out.Close()
for file, content := range files {
client.logger.Trace("Uploading file to server", "file", file, "bytes", len(content))
fmt.Fprintln(out, "D0755", 0, filepath.Dir(file)) // Ensure the folder exists
fmt.Fprintln(out, "C0644", len(content), filepath.Base(file)) // Create the actual file
out.Write(content) // Stream the data content
fmt.Fprint(out, "\x00") // Transfer end with \x00
fmt.Fprintln(out, "E") // Leave directory (simpler)
}
}()
return session.CombinedOutput("/usr/bin/scp -v -tr ./")
}
| lgpl-3.0 |
nponeccop/HNC | hn_tests/comp-1.cpp | 469 | #include <hn/lib.hpp>
struct ii_impl
{
struct comp_impl
{
boost::function<int (int)> g;
template <typename t2>
int h(t2 x)
{
return g(5) + 1;
};
};
template <typename t0, typename t5>
static boost::function<int (t5)> comp(t0 f, boost::function<int (int)> g)
{
typedef comp_impl local;
local impl = { g };
return hn::bind(impl, &local::h<t5>);
};
};
int ii()
{
typedef ii_impl local;
return local::comp<int, hn::unused>(6, &ff::incr);
};
| lgpl-3.0 |
wilywampa/python-mode | pymode/libs/rope/base/pyobjects.py | 8875 | from rope.base.fscommands import _decode_data
from rope.base import ast, exceptions, utils
class PyObject(object):
def __init__(self, type_):
if type_ is None:
type_ = self
self.type = type_
def get_attributes(self):
if self.type is self:
return {}
return self.type.get_attributes()
def get_attribute(self, name):
if name not in self.get_attributes():
raise exceptions.AttributeNotFoundError(
'Attribute %s not found' % name)
return self.get_attributes()[name]
def get_type(self):
return self.type
def __getitem__(self, key):
"""The same as ``get_attribute(key)``"""
return self.get_attribute(key)
def __contains__(self, key):
"""The same as ``key in self.get_attributes()``"""
return key in self.get_attributes()
def __eq__(self, obj):
"""Check the equality of two `PyObject`\s
Currently it is assumed that instances (the direct instances
of `PyObject`, not the instances of its subclasses) are equal
if their types are equal. For every other object like
defineds or builtins rope assumes objects are reference
objects and their identities should match.
"""
if self.__class__ != obj.__class__:
return False
if type(self) == PyObject:
if self is not self.type:
return self.type == obj.type
else:
return self.type is obj.type
return self is obj
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
"""See docs for `__eq__()` method"""
if type(self) == PyObject and self != self.type:
return hash(self.type) + 1
else:
return super(PyObject, self).__hash__()
def __iter__(self):
"""The same as ``iter(self.get_attributes())``"""
return iter(self.get_attributes())
_types = None
_unknown = None
@staticmethod
def _get_base_type(name):
if PyObject._types is None:
PyObject._types = {}
base_type = PyObject(None)
PyObject._types['Type'] = base_type
PyObject._types['Module'] = PyObject(base_type)
PyObject._types['Function'] = PyObject(base_type)
PyObject._types['Unknown'] = PyObject(base_type)
return PyObject._types[name]
def get_base_type(name):
"""Return the base type with name `name`.
The base types are 'Type', 'Function', 'Module' and 'Unknown'. It
was used to check the type of a `PyObject` but currently its use
is discouraged. Use classes defined in this module instead.
For example instead of
``pyobject.get_type() == get_base_type('Function')`` use
``isinstance(pyobject, AbstractFunction)``.
You can use `AbstractClass` for classes, `AbstractFunction` for
functions, and `AbstractModule` for modules. You can also use
`PyFunction` and `PyClass` for testing if an object is
defined somewhere and rope can access its source. These classes
provide more methods.
"""
return PyObject._get_base_type(name)
def get_unknown():
"""Return a pyobject whose type is unknown
Note that two unknown objects are equal. So for example you can
write::
if pyname.get_object() == get_unknown():
print('cannot determine what this pyname holds')
Rope could have used `None` for indicating unknown objects but
we had to check that in many places. So actually this method
returns a null object.
"""
if PyObject._unknown is None:
PyObject._unknown = PyObject(get_base_type('Unknown'))
return PyObject._unknown
class AbstractClass(PyObject):
def __init__(self):
super(AbstractClass, self).__init__(get_base_type('Type'))
def get_name(self):
pass
def get_doc(self):
pass
def get_superclasses(self):
return []
class AbstractFunction(PyObject):
def __init__(self):
super(AbstractFunction, self).__init__(get_base_type('Function'))
def get_name(self):
pass
def get_doc(self):
pass
def get_param_names(self, special_args=True):
return []
def get_returned_object(self, args):
return get_unknown()
class AbstractModule(PyObject):
def __init__(self, doc=None):
super(AbstractModule, self).__init__(get_base_type('Module'))
def get_doc(self):
pass
def get_resource(self):
pass
class PyDefinedObject(object):
"""Python defined names that rope can access their sources"""
def __init__(self, pycore, ast_node, parent):
self.pycore = pycore
self.ast_node = ast_node
self.scope = None
self.parent = parent
self.structural_attributes = None
self.concluded_attributes = self.get_module()._get_concluded_data()
self.attributes = self.get_module()._get_concluded_data()
self.defineds = None
visitor_class = None
@utils.prevent_recursion(lambda: {})
def _get_structural_attributes(self):
if self.structural_attributes is None:
self.structural_attributes = self._create_structural_attributes()
return self.structural_attributes
@utils.prevent_recursion(lambda: {})
def _get_concluded_attributes(self):
if self.concluded_attributes.get() is None:
self._get_structural_attributes()
self.concluded_attributes.set(self._create_concluded_attributes())
return self.concluded_attributes.get()
def get_attributes(self):
if self.attributes.get() is None:
result = dict(self._get_concluded_attributes())
result.update(self._get_structural_attributes())
self.attributes.set(result)
return self.attributes.get()
def get_attribute(self, name):
if name in self._get_structural_attributes():
return self._get_structural_attributes()[name]
if name in self._get_concluded_attributes():
return self._get_concluded_attributes()[name]
raise exceptions.AttributeNotFoundError('Attribute %s not found' %
name)
def get_scope(self):
if self.scope is None:
self.scope = self._create_scope()
return self.scope
def get_module(self):
current_object = self
while current_object.parent is not None:
current_object = current_object.parent
return current_object
def get_doc(self):
if len(self.get_ast().body) > 0:
expr = self.get_ast().body[0]
if isinstance(expr, ast.Expr) and \
isinstance(expr.value, ast.Str):
docstring = expr.value.s
coding = self.get_module().coding
return _decode_data(docstring, coding)
def _get_defined_objects(self):
if self.defineds is None:
self._get_structural_attributes()
return self.defineds
def _create_structural_attributes(self):
if self.visitor_class is None:
return {}
new_visitor = self.visitor_class(self.pycore, self)
for child in ast.get_child_nodes(self.ast_node):
ast.walk(child, new_visitor)
self.defineds = new_visitor.defineds
return new_visitor.names
def _create_concluded_attributes(self):
return {}
def get_ast(self):
return self.ast_node
def _create_scope(self):
pass
class PyFunction(PyDefinedObject, AbstractFunction):
"""Only a placeholder"""
class PyClass(PyDefinedObject, AbstractClass):
"""Only a placeholder"""
class _ConcludedData(object):
def __init__(self):
self.data_ = None
def set(self, data):
self.data_ = data
def get(self):
return self.data_
data = property(get, set)
def _invalidate(self):
self.data = None
def __str__(self):
return '<' + str(self.data) + '>'
class _PyModule(PyDefinedObject, AbstractModule):
def __init__(self, pycore, ast_node, resource):
self.resource = resource
self.concluded_data = []
AbstractModule.__init__(self)
PyDefinedObject.__init__(self, pycore, ast_node, None)
def _get_concluded_data(self):
new_data = _ConcludedData()
self.concluded_data.append(new_data)
return new_data
def _forget_concluded_data(self):
for data in self.concluded_data:
data._invalidate()
def get_resource(self):
return self.resource
class PyModule(_PyModule):
"""Only a placeholder"""
class PyPackage(_PyModule):
"""Only a placeholder"""
class IsBeingInferredError(exceptions.RopeError):
pass
| lgpl-3.0 |
xrealm/moc_dev | openCVLibrary343/src/main/java/org/opencv/photo/MergeExposures.java | 1418 | //
// This file is auto-generated. Please don't modify it!
//
package org.opencv.photo;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Algorithm;
import org.opencv.core.Mat;
import org.opencv.utils.Converters;
// C++: class MergeExposures
//javadoc: MergeExposures
public class MergeExposures extends Algorithm {
protected MergeExposures(long addr) { super(addr); }
// internal usage only
public static MergeExposures __fromPtr__(long addr) { return new MergeExposures(addr); }
//
// C++: void cv::MergeExposures::process(vector_Mat src, Mat& dst, Mat times, Mat response)
//
//javadoc: MergeExposures::process(src, dst, times, response)
public void process(List<Mat> src, Mat dst, Mat times, Mat response)
{
Mat src_mat = Converters.vector_Mat_to_Mat(src);
process_0(nativeObj, src_mat.nativeObj, dst.nativeObj, times.nativeObj, response.nativeObj);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: void cv::MergeExposures::process(vector_Mat src, Mat& dst, Mat times, Mat response)
private static native void process_0(long nativeObj, long src_mat_nativeObj, long dst_nativeObj, long times_nativeObj, long response_nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}
| lgpl-3.0 |
drpicox/mcxx | tests/02_typecalc_cxx.dg/success_314.cpp | 2007 | /*--------------------------------------------------------------------
(C) Copyright 2006-2012 Barcelona Supercomputing Center
Centro Nacional de Supercomputacion
This file is part of Mercurium C/C++ source-to-source compiler.
See AUTHORS file in the top level directory for information
regarding developers and contributors.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
Mercurium C/C++ source-to-source compiler is distributed in the hope
that it will be useful, but WITHOUT ANY WARRANTY; without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU Lesser General Public License for more
details.
You should have received a copy of the GNU Lesser General Public
License along with Mercurium C/C++ source-to-source compiler; if
not, write to the Free Software Foundation, Inc., 675 Mass Ave,
Cambridge, MA 02139, USA.
--------------------------------------------------------------------*/
/*
<testinfo>
test_generator=config/mercurium
</testinfo>
*/
template <bool x> struct STATIC_ASSERTION_FAILURE;
template <> struct STATIC_ASSERTION_FAILURE<true> { enum { value = 1 }; };
template<int x> struct static_assert_test{};
template <typename A, typename B>
struct is_interoperable
{
static const int value = 1;
};
template < typename Derived1, typename Derived2>
void foo()
{
typedef static_assert_test< sizeof(STATIC_ASSERTION_FAILURE<is_interoperable<Derived1, Derived2>::value == 0 ? false : true>)> blah;
}
template <typename A, typename B>
struct is_convertible
{
static const int value = 1;
};
template < typename Boo1>
void bar()
{
typedef static_assert_test< sizeof(STATIC_ASSERTION_FAILURE<is_convertible<Boo1, Boo1>::value == 0 ? false : true>)> blah;
}
| lgpl-3.0 |
HarryXR/SimpleNews | swipeback/src/main/java/me/imid/swipebacklayout/lib/ViewDragHelper.java | 62159 | /*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.imid.swipebacklayout.lib;
import android.content.Context;
import android.support.v4.view.MotionEventCompat;
import android.support.v4.view.VelocityTrackerCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.widget.ScrollerCompat;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.Interpolator;
import java.util.Arrays;
/**
* ViewDragHelper is a utility class for writing custom ViewGroups. It offers a
* number of useful operations and state tracking for allowing a user to drag
* and reposition views within their parent ViewGroup.
*/
public class ViewDragHelper {
private static final String TAG = "ViewDragHelper";
/**
* A null/invalid pointer ID.
*/
public static final int INVALID_POINTER = -1;
/**
* A view is not currently being dragged or animating as a result of a
* fling/snap.
*/
public static final int STATE_IDLE = 0;
/**
* A view is currently being dragged. The position is currently changing as
* a result of user input or simulated user input.
*/
public static final int STATE_DRAGGING = 1;
/**
* A view is currently settling into place as a result of a fling or
* predefined non-interactive motion.
*/
public static final int STATE_SETTLING = 2;
/**
* Edge flag indicating that the left edge should be affected.
*/
public static final int EDGE_LEFT = 1 << 0;
/**
* Edge flag indicating that the right edge should be affected.
*/
public static final int EDGE_RIGHT = 1 << 1;
/**
* Edge flag indicating that the top edge should be affected.
*/
public static final int EDGE_TOP = 1 << 2;
/**
* Edge flag indicating that the bottom edge should be affected.
*/
public static final int EDGE_BOTTOM = 1 << 3;
/**
* Edge flag set indicating all edges should be affected.
*/
public static final int EDGE_ALL = EDGE_LEFT | EDGE_TOP | EDGE_RIGHT | EDGE_BOTTOM;
/**
* Indicates that a check should occur along the horizontal axis
*/
public static final int DIRECTION_HORIZONTAL = 1 << 0;
/**
* Indicates that a check should occur along the vertical axis
*/
public static final int DIRECTION_VERTICAL = 1 << 1;
/**
* Indicates that a check should occur along all axes
*/
public static final int DIRECTION_ALL = DIRECTION_HORIZONTAL | DIRECTION_VERTICAL;
public static final int EDGE_SIZE = 20; // dp
private static final int BASE_SETTLE_DURATION = 256; // ms
private static final int MAX_SETTLE_DURATION = 600; // ms
// Current drag state; idle, dragging or settling
private int mDragState;
// Distance to travel before a drag may begin
private int mTouchSlop;
// Last known position/pointer tracking
private int mActivePointerId = INVALID_POINTER;
private float[] mInitialMotionX;
private float[] mInitialMotionY;
private float[] mLastMotionX;
private float[] mLastMotionY;
private int[] mInitialEdgeTouched;
private int[] mEdgeDragsInProgress;
private int[] mEdgeDragsLocked;
private int mPointersDown;
private VelocityTracker mVelocityTracker;
private float mMaxVelocity;
private float mMinVelocity;
private int mEdgeSize;
private int mTrackingEdges;
private ScrollerCompat mScroller;
private final Callback mCallback;
private View mCapturedView;
private boolean mReleaseInProgress;
private final ViewGroup mParentView;
/**
* A Callback is used as a communication channel with the ViewDragHelper
* back to the parent view using it. <code>on*</code>methods are invoked on
* siginficant events and several accessor methods are expected to provide
* the ViewDragHelper with more information about the state of the parent
* view upon request. The callback also makes decisions governing the range
* and draggability of child views.
*/
public static abstract class Callback {
/**
* Called when the drag state changes. See the <code>STATE_*</code>
* constants for more information.
*
* @param state The new drag state
* @see #STATE_IDLE
* @see #STATE_DRAGGING
* @see #STATE_SETTLING
*/
public void onViewDragStateChanged(int state) {
}
/**
* Called when the captured view's position changes as the result of a
* drag or settle.
*
* @param changedView View whose position changed
* @param left New X coordinate of the left edge of the view
* @param top New Y coordinate of the top edge of the view
* @param dx Change in X position from the last call
* @param dy Change in Y position from the last call
*/
public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) {
}
/**
* Called when a child view is captured for dragging or settling. The ID
* of the pointer currently dragging the captured view is supplied. If
* activePointerId is identified as {@link #INVALID_POINTER} the capture
* is programmatic instead of pointer-initiated.
*
* @param capturedChild Child view that was captured
* @param activePointerId Pointer id tracking the child capture
*/
public void onViewCaptured(View capturedChild, int activePointerId) {
}
/**
* Called when the child view is no longer being actively dragged. The
* fling velocity is also supplied, if relevant. The velocity values may
* be clamped to system minimums or maximums.
* <p>
* Calling code may decide to fling or otherwise release the view to let
* it settle into place. It should do so using
* {@link #settleCapturedViewAt(int, int)} or
* {@link #flingCapturedView(int, int, int, int)}. If the Callback
* invokes one of these methods, the ViewDragHelper will enter
* {@link #STATE_SETTLING} and the view capture will not fully end until
* it comes to a complete stop. If neither of these methods is invoked
* before <code>onViewReleased</code> returns, the view will stop in
* place and the ViewDragHelper will return to {@link #STATE_IDLE}.
* </p>
*
* @param releasedChild The captured child view now being released
* @param xvel X velocity of the pointer as it left the screen in pixels
* per second.
* @param yvel Y velocity of the pointer as it left the screen in pixels
* per second.
*/
public void onViewReleased(View releasedChild, float xvel, float yvel) {
}
/**
* Called when one of the subscribed edges in the parent view has been
* touched by the user while no child view is currently captured.
*
* @param edgeFlags A combination of edge flags describing the edge(s)
* currently touched
* @param pointerId ID of the pointer touching the described edge(s)
* @see #EDGE_LEFT
* @see #EDGE_TOP
* @see #EDGE_RIGHT
* @see #EDGE_BOTTOM
*/
public void onEdgeTouched(int edgeFlags, int pointerId) {
}
/**
* Called when the given edge may become locked. This can happen if an
* edge drag was preliminarily rejected before beginning, but after
* {@link #onEdgeTouched(int, int)} was called. This method should
* return true to lock this edge or false to leave it unlocked. The
* default behavior is to leave edges unlocked.
*
* @param edgeFlags A combination of edge flags describing the edge(s)
* locked
* @return true to lock the edge, false to leave it unlocked
*/
public boolean onEdgeLock(int edgeFlags) {
return false;
}
/**
* Called when the user has started a deliberate drag away from one of
* the subscribed edges in the parent view while no child view is
* currently captured.
*
* @param edgeFlags A combination of edge flags describing the edge(s)
* dragged
* @param pointerId ID of the pointer touching the described edge(s)
* @see #EDGE_LEFT
* @see #EDGE_TOP
* @see #EDGE_RIGHT
* @see #EDGE_BOTTOM
*/
public void onEdgeDragStarted(int edgeFlags, int pointerId) {
}
/**
* Called to determine the Z-order of child views.
*
* @param index the ordered position to query for
* @return index of the view that should be ordered at position
* <code>index</code>
*/
public int getOrderedChildIndex(int index) {
return index;
}
/**
* Return the magnitude of a draggable child view's horizontal range of
* motion in pixels. This method should return 0 for views that cannot
* move horizontally.
*
* @param child Child view to check
* @return range of horizontal motion in pixels
*/
public int getViewHorizontalDragRange(View child) {
return 0;
}
/**
* Return the magnitude of a draggable child view's vertical range of
* motion in pixels. This method should return 0 for views that cannot
* move vertically.
*
* @param child Child view to check
* @return range of vertical motion in pixels
*/
public int getViewVerticalDragRange(View child) {
return 0;
}
/**
* Called when the user's input indicates that they want to capture the
* given child view with the pointer indicated by pointerId. The
* callback should return true if the user is permitted to drag the
* given view with the indicated pointer.
* <p>
* ViewDragHelper may call this method multiple times for the same view
* even if the view is already captured; this indicates that a new
* pointer is trying to take control of the view.
* </p>
* <p>
* If this method returns true, a call to
* {@link #onViewCaptured(android.view.View, int)} will follow if the
* capture is successful.
* </p>
*
* @param child Child the user is attempting to capture
* @param pointerId ID of the pointer attempting the capture
* @return true if capture should be allowed, false otherwise
*/
public abstract boolean tryCaptureView(View child, int pointerId);
/**
* Restrict the motion of the dragged child view along the horizontal
* axis. The default implementation does not allow horizontal motion;
* the extending class must override this method and provide the desired
* clamping.
*
* @param child Child view being dragged
* @param left Attempted motion along the X axis
* @param dx Proposed change in position for left
* @return The new clamped position for left
*/
public int clampViewPositionHorizontal(View child, int left, int dx) {
return 0;
}
/**
* Restrict the motion of the dragged child view along the vertical
* axis. The default implementation does not allow vertical motion; the
* extending class must override this method and provide the desired
* clamping.
*
* @param child Child view being dragged
* @param top Attempted motion along the Y axis
* @param dy Proposed change in position for top
* @return The new clamped position for top
*/
public int clampViewPositionVertical(View child, int top, int dy) {
return 0;
}
}
/**
* Interpolator defining the animation curve for mScroller
*/
private static final Interpolator sInterpolator = new Interpolator() {
public float getInterpolation(float t) {
t -= 1.0f;
return t * t * t * t * t + 1.0f;
}
};
private final Runnable mSetIdleRunnable = new Runnable() {
public void run() {
setDragState(STATE_IDLE);
}
};
/**
* Factory method to create a new ViewDragHelper.
*
* @param forParent Parent view to monitor
* @param cb Callback to provide information and receive events
* @return a new ViewDragHelper instance
*/
public static ViewDragHelper create(ViewGroup forParent, Callback cb) {
return new ViewDragHelper(forParent.getContext(), forParent, cb);
}
/**
* Factory method to create a new ViewDragHelper.
*
* @param forParent Parent view to monitor
* @param sensitivity Multiplier for how sensitive the helper should be
* about detecting the start of a drag. Larger values are more
* sensitive. 1.0f is normal.
* @param cb Callback to provide information and receive events
* @return a new ViewDragHelper instance
*/
public static ViewDragHelper create(ViewGroup forParent, float sensitivity, Callback cb) {
final ViewDragHelper helper = create(forParent, cb);
helper.mTouchSlop = (int) (helper.mTouchSlop * (1 / sensitivity));
return helper;
}
/**
* Apps should use ViewDragHelper.create() to get a new instance. This will
* allow VDH to use internal compatibility implementations for different
* platform versions.
*
* @param context Context to initialize config-dependent params from
* @param forParent Parent view to monitor
*/
private ViewDragHelper(Context context, ViewGroup forParent, Callback cb) {
if (forParent == null) {
throw new IllegalArgumentException("Parent view may not be null");
}
if (cb == null) {
throw new IllegalArgumentException("Callback may not be null");
}
mParentView = forParent;
mCallback = cb;
final ViewConfiguration vc = ViewConfiguration.get(context);
final float density = context.getResources().getDisplayMetrics().density;
mEdgeSize = (int) (EDGE_SIZE * density + 0.5f);
mTouchSlop = vc.getScaledTouchSlop();
mMaxVelocity = vc.getScaledMaximumFlingVelocity();
mMinVelocity = vc.getScaledMinimumFlingVelocity();
mScroller = ScrollerCompat.create(context, sInterpolator);
}
/**
* Sets the sensitivity of the dragger.
*
* @param context The application context.
* @param sensitivity value between 0 and 1, the final value for touchSlop =
* ViewConfiguration.getScaledTouchSlop * (1 / s);
*/
public void setSensitivity(Context context, float sensitivity) {
float s = Math.max(0f, Math.min(1.0f, sensitivity));
ViewConfiguration viewConfiguration = ViewConfiguration.get(context);
mTouchSlop = (int) (viewConfiguration.getScaledTouchSlop() * (1 / s));
}
/**
* Set the minimum velocity that will be detected as having a magnitude
* greater than zero in pixels per second. Callback methods accepting a
* velocity will be clamped appropriately.
*
* @param minVel minimum velocity to detect
*/
public void setMinVelocity(float minVel) {
mMinVelocity = minVel;
}
/**
* Set the max velocity that will be detected as having a magnitude
* greater than zero in pixels per second. Callback methods accepting a
* velocity will be clamped appropriately.
*
* @param maxVel max velocity to detect
*/
public void setMaxVelocity(float maxVel) {
mMaxVelocity = maxVel;
}
/**
* Return the currently configured minimum velocity. Any flings with a
* magnitude less than this value in pixels per second. Callback methods
* accepting a velocity will receive zero as a velocity value if the real
* detected velocity was below this threshold.
*
* @return the minimum velocity that will be detected
*/
public float getMinVelocity() {
return mMinVelocity;
}
/**
* Retrieve the current drag state of this helper. This will return one of
* {@link #STATE_IDLE}, {@link #STATE_DRAGGING} or {@link #STATE_SETTLING}.
*
* @return The current drag state
*/
public int getViewDragState() {
return mDragState;
}
/**
* Enable edge tracking for the selected edges of the parent view. The
* callback's
* {@link me.imid.swipebacklayout.lib.ViewDragHelper.Callback#onEdgeTouched(int, int)}
* and
* {@link me.imid.swipebacklayout.lib.ViewDragHelper.Callback#onEdgeDragStarted(int, int)}
* methods will only be invoked for edges for which edge tracking has been
* enabled.
*
* @param edgeFlags Combination of edge flags describing the edges to watch
* @see #EDGE_LEFT
* @see #EDGE_TOP
* @see #EDGE_RIGHT
* @see #EDGE_BOTTOM
*/
public void setEdgeTrackingEnabled(int edgeFlags) {
mTrackingEdges = edgeFlags;
}
/**
* Return the size of an edge. This is the range in pixels along the edges
* of this view that will actively detect edge touches or drags if edge
* tracking is enabled.
*
* @return The size of an edge in pixels
* @see #setEdgeTrackingEnabled(int)
*/
public int getEdgeSize() {
return mEdgeSize;
}
/**
* Set the size of an edge. This is the range in pixels along the edges of
* this view that will actively detect edge touches or drags if edge
* tracking is enabled.
*
* @param size The size of an edge in pixels
*/
public void setEdgeSize(int size) {
mEdgeSize = size;
}
/**
* Capture a specific child view for dragging within the parent. The
* callback will be notified but
* {@link me.imid.swipebacklayout.lib.ViewDragHelper.Callback#tryCaptureView(android.view.View, int)}
* will not be asked permission to capture this view.
*
* @param childView Child view to capture
* @param activePointerId ID of the pointer that is dragging the captured
* child view
*/
public void captureChildView(View childView, int activePointerId) {
if (childView.getParent() != mParentView) {
throw new IllegalArgumentException("captureChildView: parameter must be a descendant "
+ "of the ViewDragHelper's tracked parent view (" + mParentView + ")");
}
mCapturedView = childView;
mActivePointerId = activePointerId;
mCallback.onViewCaptured(childView, activePointerId);
setDragState(STATE_DRAGGING);
}
/**
* @return The currently captured view, or null if no view has been
* captured.
*/
public View getCapturedView() {
return mCapturedView;
}
/**
* @return The ID of the pointer currently dragging the captured view, or
* {@link #INVALID_POINTER}.
*/
public int getActivePointerId() {
return mActivePointerId;
}
/**
* @return The minimum distance in pixels that the user must travel to
* initiate a drag
*/
public int getTouchSlop() {
return mTouchSlop;
}
/**
* The result of a call to this method is equivalent to
* {@link #processTouchEvent(android.view.MotionEvent)} receiving an
* ACTION_CANCEL event.
*/
public void cancel() {
mActivePointerId = INVALID_POINTER;
clearMotionHistory();
if (mVelocityTracker != null) {
mVelocityTracker.recycle();
mVelocityTracker = null;
}
}
/**
* {@link #cancel()}, but also abort all motion in progress and snap to the
* end of any animation.
*/
public void abort() {
cancel();
if (mDragState == STATE_SETTLING) {
final int oldX = mScroller.getCurrX();
final int oldY = mScroller.getCurrY();
mScroller.abortAnimation();
final int newX = mScroller.getCurrX();
final int newY = mScroller.getCurrY();
mCallback.onViewPositionChanged(mCapturedView, newX, newY, newX - oldX, newY - oldY);
}
setDragState(STATE_IDLE);
}
/**
* Animate the view <code>child</code> to the given (left, top) position. If
* this method returns true, the caller should invoke
* {@link #continueSettling(boolean)} on each subsequent frame to continue
* the motion until it returns false. If this method returns false there is
* no further work to do to complete the movement.
* <p>
* This operation does not count as a capture event, though
* {@link #getCapturedView()} will still report the sliding view while the
* slide is in progress.
* </p>
*
* @param child Child view to capture and animate
* @param finalLeft Final left position of child
* @param finalTop Final top position of child
* @return true if animation should continue through
* {@link #continueSettling(boolean)} calls
*/
public boolean smoothSlideViewTo(View child, int finalLeft, int finalTop) {
mCapturedView = child;
mActivePointerId = INVALID_POINTER;
return forceSettleCapturedViewAt(finalLeft, finalTop, 0, 0);
}
/**
* Settle the captured view at the given (left, top) position. The
* appropriate velocity from prior motion will be taken into account. If
* this method returns true, the caller should invoke
* {@link #continueSettling(boolean)} on each subsequent frame to continue
* the motion until it returns false. If this method returns false there is
* no further work to do to complete the movement.
*
* @param finalLeft Settled left edge position for the captured view
* @param finalTop Settled top edge position for the captured view
* @return true if animation should continue through
* {@link #continueSettling(boolean)} calls
*/
public boolean settleCapturedViewAt(int finalLeft, int finalTop) {
if (!mReleaseInProgress) {
throw new IllegalStateException("Cannot settleCapturedViewAt outside of a call to "
+ "Callback#onViewReleased");
}
return forceSettleCapturedViewAt(finalLeft, finalTop,
(int) VelocityTrackerCompat.getXVelocity(mVelocityTracker, mActivePointerId),
(int) VelocityTrackerCompat.getYVelocity(mVelocityTracker, mActivePointerId));
}
/**
* Settle the captured view at the given (left, top) position.
*
* @param finalLeft Target left position for the captured view
* @param finalTop Target top position for the captured view
* @param xvel Horizontal velocity
* @param yvel Vertical velocity
* @return true if animation should continue through
* {@link #continueSettling(boolean)} calls
*/
private boolean forceSettleCapturedViewAt(int finalLeft, int finalTop, int xvel, int yvel) {
final int startLeft = mCapturedView.getLeft();
final int startTop = mCapturedView.getTop();
final int dx = finalLeft - startLeft;
final int dy = finalTop - startTop;
if (dx == 0 && dy == 0) {
// Nothing to do. Send callbacks, be done.
mScroller.abortAnimation();
setDragState(STATE_IDLE);
return false;
}
final int duration = computeSettleDuration(mCapturedView, dx, dy, xvel, yvel);
mScroller.startScroll(startLeft, startTop, dx, dy, duration);
setDragState(STATE_SETTLING);
return true;
}
private int computeSettleDuration(View child, int dx, int dy, int xvel, int yvel) {
xvel = clampMag(xvel, (int) mMinVelocity, (int) mMaxVelocity);
yvel = clampMag(yvel, (int) mMinVelocity, (int) mMaxVelocity);
final int absDx = Math.abs(dx);
final int absDy = Math.abs(dy);
final int absXVel = Math.abs(xvel);
final int absYVel = Math.abs(yvel);
final int addedVel = absXVel + absYVel;
final int addedDistance = absDx + absDy;
final float xweight = xvel != 0 ? (float) absXVel / addedVel : (float) absDx
/ addedDistance;
final float yweight = yvel != 0 ? (float) absYVel / addedVel : (float) absDy
/ addedDistance;
int xduration = computeAxisDuration(dx, xvel, mCallback.getViewHorizontalDragRange(child));
int yduration = computeAxisDuration(dy, yvel, mCallback.getViewVerticalDragRange(child));
return (int) (xduration * xweight + yduration * yweight);
}
private int computeAxisDuration(int delta, int velocity, int motionRange) {
if (delta == 0) {
return 0;
}
final int width = mParentView.getWidth();
final int halfWidth = width / 2;
final float distanceRatio = Math.min(1f, (float) Math.abs(delta) / width);
final float distance = halfWidth + halfWidth
* distanceInfluenceForSnapDuration(distanceRatio);
int duration;
velocity = Math.abs(velocity);
if (velocity > 0) {
duration = 4 * Math.round(1000 * Math.abs(distance / velocity));
} else {
final float range = (float) Math.abs(delta) / motionRange;
duration = (int) ((range + 1) * BASE_SETTLE_DURATION);
}
return Math.min(duration, MAX_SETTLE_DURATION);
}
/**
* Clamp the magnitude of value for absMin and absMax. If the value is below
* the minimum, it will be clamped to zero. If the value is above the
* maximum, it will be clamped to the maximum.
*
* @param value Value to clamp
* @param absMin Absolute value of the minimum significant value to return
* @param absMax Absolute value of the maximum value to return
* @return The clamped value with the same sign as <code>value</code>
*/
private int clampMag(int value, int absMin, int absMax) {
final int absValue = Math.abs(value);
if (absValue < absMin)
return 0;
if (absValue > absMax)
return value > 0 ? absMax : -absMax;
return value;
}
/**
* Clamp the magnitude of value for absMin and absMax. If the value is below
* the minimum, it will be clamped to zero. If the value is above the
* maximum, it will be clamped to the maximum.
*
* @param value Value to clamp
* @param absMin Absolute value of the minimum significant value to return
* @param absMax Absolute value of the maximum value to return
* @return The clamped value with the same sign as <code>value</code>
*/
private float clampMag(float value, float absMin, float absMax) {
final float absValue = Math.abs(value);
if (absValue < absMin)
return 0;
if (absValue > absMax)
return value > 0 ? absMax : -absMax;
return value;
}
private float distanceInfluenceForSnapDuration(float f) {
f -= 0.5f; // center the values about 0.
f *= 0.3f * Math.PI / 2.0f;
return (float) Math.sin(f);
}
/**
* Settle the captured view based on standard free-moving fling behavior.
* The caller should invoke {@link #continueSettling(boolean)} on each
* subsequent frame to continue the motion until it returns false.
*
* @param minLeft Minimum X position for the view's left edge
* @param minTop Minimum Y position for the view's top edge
* @param maxLeft Maximum X position for the view's left edge
* @param maxTop Maximum Y position for the view's top edge
*/
public void flingCapturedView(int minLeft, int minTop, int maxLeft, int maxTop) {
if (!mReleaseInProgress) {
throw new IllegalStateException("Cannot flingCapturedView outside of a call to "
+ "Callback#onViewReleased");
}
mScroller.fling(mCapturedView.getLeft(), mCapturedView.getTop(),
(int) VelocityTrackerCompat.getXVelocity(mVelocityTracker, mActivePointerId),
(int) VelocityTrackerCompat.getYVelocity(mVelocityTracker, mActivePointerId),
minLeft, maxLeft, minTop, maxTop);
setDragState(STATE_SETTLING);
}
/**
* Move the captured settling view by the appropriate amount for the current
* time. If <code>continueSettling</code> returns true, the caller should
* call it again on the next frame to continue.
*
* @param deferCallbacks true if state callbacks should be deferred via
* posted message. Set this to true if you are calling this
* method from {@link android.view.View#computeScroll()} or
* similar methods invoked as part of layout or drawing.
* @return true if settle is still in progress
*/
public boolean continueSettling(boolean deferCallbacks) {
if (mDragState == STATE_SETTLING) {
boolean keepGoing = mScroller.computeScrollOffset();
final int x = mScroller.getCurrX();
final int y = mScroller.getCurrY();
final int dx = x - mCapturedView.getLeft();
final int dy = y - mCapturedView.getTop();
if (dx != 0) {
mCapturedView.offsetLeftAndRight(dx);
}
if (dy != 0) {
mCapturedView.offsetTopAndBottom(dy);
}
if (dx != 0 || dy != 0) {
mCallback.onViewPositionChanged(mCapturedView, x, y, dx, dy);
}
if (keepGoing && x == mScroller.getFinalX() && y == mScroller.getFinalY()) {
// Close enough. The interpolator/scroller might think we're
// still moving
// but the user sure doesn't.
mScroller.abortAnimation();
keepGoing = mScroller.isFinished();
}
if (!keepGoing) {
if (deferCallbacks) {
mParentView.post(mSetIdleRunnable);
} else {
setDragState(STATE_IDLE);
}
}
}
return mDragState == STATE_SETTLING;
}
/**
* Like all callback events this must happen on the UI thread, but release
* involves some extra semantics. During a release (mReleaseInProgress) is
* the only time it is valid to call {@link #settleCapturedViewAt(int, int)}
* or {@link #flingCapturedView(int, int, int, int)}.
*/
private void dispatchViewReleased(float xvel, float yvel) {
mReleaseInProgress = true;
mCallback.onViewReleased(mCapturedView, xvel, yvel);
mReleaseInProgress = false;
if (mDragState == STATE_DRAGGING) {
// onViewReleased didn't call a method that would have changed this.
// Go idle.
setDragState(STATE_IDLE);
}
}
private void clearMotionHistory() {
if (mInitialMotionX == null) {
return;
}
Arrays.fill(mInitialMotionX, 0);
Arrays.fill(mInitialMotionY, 0);
Arrays.fill(mLastMotionX, 0);
Arrays.fill(mLastMotionY, 0);
Arrays.fill(mInitialEdgeTouched, 0);
Arrays.fill(mEdgeDragsInProgress, 0);
Arrays.fill(mEdgeDragsLocked, 0);
mPointersDown = 0;
}
private void clearMotionHistory(int pointerId) {
if (mInitialMotionX == null) {
return;
}
mInitialMotionX[pointerId] = 0;
mInitialMotionY[pointerId] = 0;
mLastMotionX[pointerId] = 0;
mLastMotionY[pointerId] = 0;
mInitialEdgeTouched[pointerId] = 0;
mEdgeDragsInProgress[pointerId] = 0;
mEdgeDragsLocked[pointerId] = 0;
mPointersDown &= ~(1 << pointerId);
}
private void ensureMotionHistorySizeForId(int pointerId) {
if (mInitialMotionX == null || mInitialMotionX.length <= pointerId) {
float[] imx = new float[pointerId + 1];
float[] imy = new float[pointerId + 1];
float[] lmx = new float[pointerId + 1];
float[] lmy = new float[pointerId + 1];
int[] iit = new int[pointerId + 1];
int[] edip = new int[pointerId + 1];
int[] edl = new int[pointerId + 1];
if (mInitialMotionX != null) {
System.arraycopy(mInitialMotionX, 0, imx, 0, mInitialMotionX.length);
System.arraycopy(mInitialMotionY, 0, imy, 0, mInitialMotionY.length);
System.arraycopy(mLastMotionX, 0, lmx, 0, mLastMotionX.length);
System.arraycopy(mLastMotionY, 0, lmy, 0, mLastMotionY.length);
System.arraycopy(mInitialEdgeTouched, 0, iit, 0, mInitialEdgeTouched.length);
System.arraycopy(mEdgeDragsInProgress, 0, edip, 0, mEdgeDragsInProgress.length);
System.arraycopy(mEdgeDragsLocked, 0, edl, 0, mEdgeDragsLocked.length);
}
mInitialMotionX = imx;
mInitialMotionY = imy;
mLastMotionX = lmx;
mLastMotionY = lmy;
mInitialEdgeTouched = iit;
mEdgeDragsInProgress = edip;
mEdgeDragsLocked = edl;
}
}
private void saveInitialMotion(float x, float y, int pointerId) {
ensureMotionHistorySizeForId(pointerId);
mInitialMotionX[pointerId] = mLastMotionX[pointerId] = x;
mInitialMotionY[pointerId] = mLastMotionY[pointerId] = y;
mInitialEdgeTouched[pointerId] = getEdgeTouched((int) x, (int) y);
mPointersDown |= 1 << pointerId;
}
private void saveLastMotion(MotionEvent ev) {
final int pointerCount = MotionEventCompat.getPointerCount(ev);
for (int i = 0; i < pointerCount; i++) {
final int pointerId = MotionEventCompat.getPointerId(ev, i);
final float x = MotionEventCompat.getX(ev, i);
final float y = MotionEventCompat.getY(ev, i);
mLastMotionX[pointerId] = x;
mLastMotionY[pointerId] = y;
}
}
/**
* Check if the given pointer ID represents a pointer that is currently down
* (to the best of the ViewDragHelper's knowledge).
* <p>
* The state used to report this information is populated by the methods
* {@link #shouldInterceptTouchEvent(android.view.MotionEvent)} or
* {@link #processTouchEvent(android.view.MotionEvent)}. If one of these
* methods has not been called for all relevant MotionEvents to track, the
* information reported by this method may be stale or incorrect.
* </p>
*
* @param pointerId pointer ID to check; corresponds to IDs provided by
* MotionEvent
* @return true if the pointer with the given ID is still down
*/
public boolean isPointerDown(int pointerId) {
return (mPointersDown & 1 << pointerId) != 0;
}
void setDragState(int state) {
if (mDragState != state) {
mDragState = state;
mCallback.onViewDragStateChanged(state);
if (state == STATE_IDLE) {
mCapturedView = null;
}
}
}
/**
* Attempt to capture the view with the given pointer ID. The callback will
* be involved. This will put us into the "dragging" state. If we've already
* captured this view with this pointer this method will immediately return
* true without consulting the callback.
*
* @param toCapture View to capture
* @param pointerId Pointer to capture with
* @return true if capture was successful
*/
boolean tryCaptureViewForDrag(View toCapture, int pointerId) {
if (toCapture == mCapturedView && mActivePointerId == pointerId) {
// Already done!
return true;
}
if (toCapture != null && mCallback.tryCaptureView(toCapture, pointerId)) {
mActivePointerId = pointerId;
captureChildView(toCapture, pointerId);
return true;
}
return false;
}
/**
* Tests scrollability within child views of v given a delta of dx.
*
* @param v View to test for horizontal scrollability
* @param checkV Whether the view v passed should itself be checked for
* scrollability (true), or just its children (false).
* @param dx Delta scrolled in pixels along the X axis
* @param dy Delta scrolled in pixels along the Y axis
* @param x X coordinate of the active touch point
* @param y Y coordinate of the active touch point
* @return true if child views of v can be scrolled by delta of dx.
*/
protected boolean canScroll(View v, boolean checkV, int dx, int dy, int x, int y) {
if (v instanceof ViewGroup) {
final ViewGroup group = (ViewGroup) v;
final int scrollX = v.getScrollX();
final int scrollY = v.getScrollY();
final int count = group.getChildCount();
// Count backwards - let topmost views consume scroll distance
// first.
for (int i = count - 1; i >= 0; i--) {
// TODO: Add versioned support here for transformed views.
// This will not work for transformed views in Honeycomb+
final View child = group.getChildAt(i);
if (x + scrollX >= child.getLeft()
&& x + scrollX < child.getRight()
&& y + scrollY >= child.getTop()
&& y + scrollY < child.getBottom()
&& canScroll(child, true, dx, dy, x + scrollX - child.getLeft(), y
+ scrollY - child.getTop())) {
return true;
}
}
}
return checkV
&& (ViewCompat.canScrollHorizontally(v, -dx) || ViewCompat.canScrollVertically(v,
-dy));
}
/**
* Check if this event as provided to the parent view's
* onInterceptTouchEvent should cause the parent to intercept the touch
* event stream.
*
* @param ev MotionEvent provided to onInterceptTouchEvent
* @return true if the parent view should return true from
* onInterceptTouchEvent
*/
public boolean shouldInterceptTouchEvent(MotionEvent ev) {
final int action = MotionEventCompat.getActionMasked(ev);
final int actionIndex = MotionEventCompat.getActionIndex(ev);
if (action == MotionEvent.ACTION_DOWN) {
// Reset things for a new event stream, just in case we didn't get
// the whole previous stream.
cancel();
}
if (mVelocityTracker == null) {
mVelocityTracker = VelocityTracker.obtain();
}
mVelocityTracker.addMovement(ev);
switch (action) {
case MotionEvent.ACTION_DOWN: {
final float x = ev.getX();
final float y = ev.getY();
final int pointerId = MotionEventCompat.getPointerId(ev, 0);
saveInitialMotion(x, y, pointerId);
final View toCapture = findTopChildUnder((int) x, (int) y);
// Catch a settling view if possible.
if (toCapture == mCapturedView && mDragState == STATE_SETTLING) {
tryCaptureViewForDrag(toCapture, pointerId);
}
final int edgesTouched = mInitialEdgeTouched[pointerId];
if ((edgesTouched & mTrackingEdges) != 0) {
mCallback.onEdgeTouched(edgesTouched & mTrackingEdges, pointerId);
}
break;
}
case MotionEventCompat.ACTION_POINTER_DOWN: {
final int pointerId = MotionEventCompat.getPointerId(ev, actionIndex);
final float x = MotionEventCompat.getX(ev, actionIndex);
final float y = MotionEventCompat.getY(ev, actionIndex);
saveInitialMotion(x, y, pointerId);
// A ViewDragHelper can only manipulate one view at a time.
if (mDragState == STATE_IDLE) {
final int edgesTouched = mInitialEdgeTouched[pointerId];
if ((edgesTouched & mTrackingEdges) != 0) {
mCallback.onEdgeTouched(edgesTouched & mTrackingEdges, pointerId);
}
} else if (mDragState == STATE_SETTLING) {
// Catch a settling view if possible.
final View toCapture = findTopChildUnder((int) x, (int) y);
if (toCapture == mCapturedView) {
tryCaptureViewForDrag(toCapture, pointerId);
}
}
break;
}
case MotionEvent.ACTION_MOVE: {
// First to cross a touch slop over a draggable view wins. Also
// report edge drags.
final int pointerCount = MotionEventCompat.getPointerCount(ev);
for (int i = 0; i < pointerCount; i++) {
final int pointerId = MotionEventCompat.getPointerId(ev, i);
final float x = MotionEventCompat.getX(ev, i);
final float y = MotionEventCompat.getY(ev, i);
final float dx = x - mInitialMotionX[pointerId];
final float dy = y - mInitialMotionY[pointerId];
reportNewEdgeDrags(dx, dy, pointerId);
if (mDragState == STATE_DRAGGING) {
// Callback might have started an edge drag
break;
}
final View toCapture = findTopChildUnder((int) x, (int) y);
if (toCapture != null && checkTouchSlop(toCapture, dx, dy)
&& tryCaptureViewForDrag(toCapture, pointerId)) {
break;
}
}
saveLastMotion(ev);
break;
}
case MotionEventCompat.ACTION_POINTER_UP: {
final int pointerId = MotionEventCompat.getPointerId(ev, actionIndex);
clearMotionHistory(pointerId);
break;
}
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL: {
cancel();
break;
}
}
return mDragState == STATE_DRAGGING;
}
/**
* Process a touch event received by the parent view. This method will
* dispatch callback events as needed before returning. The parent view's
* onTouchEvent implementation should call this.
*
* @param ev The touch event received by the parent view
*/
public void processTouchEvent(MotionEvent ev) {
final int action = MotionEventCompat.getActionMasked(ev);
final int actionIndex = MotionEventCompat.getActionIndex(ev);
if (action == MotionEvent.ACTION_DOWN) {
// Reset things for a new event stream, just in case we didn't get
// the whole previous stream.
cancel();
}
if (mVelocityTracker == null) {
mVelocityTracker = VelocityTracker.obtain();
}
mVelocityTracker.addMovement(ev);
switch (action) {
case MotionEvent.ACTION_DOWN: {
final float x = ev.getX();
final float y = ev.getY();
final int pointerId = MotionEventCompat.getPointerId(ev, 0);
final View toCapture = findTopChildUnder((int) x, (int) y);
saveInitialMotion(x, y, pointerId);
// Since the parent is already directly processing this touch
// event,
// there is no reason to delay for a slop before dragging.
// Start immediately if possible.
tryCaptureViewForDrag(toCapture, pointerId);
final int edgesTouched = mInitialEdgeTouched[pointerId];
if ((edgesTouched & mTrackingEdges) != 0) {
mCallback.onEdgeTouched(edgesTouched & mTrackingEdges, pointerId);
}
break;
}
case MotionEventCompat.ACTION_POINTER_DOWN: {
final int pointerId = MotionEventCompat.getPointerId(ev, actionIndex);
final float x = MotionEventCompat.getX(ev, actionIndex);
final float y = MotionEventCompat.getY(ev, actionIndex);
saveInitialMotion(x, y, pointerId);
// A ViewDragHelper can only manipulate one view at a time.
if (mDragState == STATE_IDLE) {
// If we're idle we can do anything! Treat it like a normal
// down event.
final View toCapture = findTopChildUnder((int) x, (int) y);
tryCaptureViewForDrag(toCapture, pointerId);
final int edgesTouched = mInitialEdgeTouched[pointerId];
if ((edgesTouched & mTrackingEdges) != 0) {
mCallback.onEdgeTouched(edgesTouched & mTrackingEdges, pointerId);
}
} else if (isCapturedViewUnder((int) x, (int) y)) {
// We're still tracking a captured view. If the same view is
// under this
// point, we'll swap to controlling it with this pointer
// instead.
// (This will still work if we're "catching" a settling
// view.)
tryCaptureViewForDrag(mCapturedView, pointerId);
}
break;
}
case MotionEvent.ACTION_MOVE: {
if (mDragState == STATE_DRAGGING) {
final int index = MotionEventCompat.findPointerIndex(ev, mActivePointerId);
final float x = MotionEventCompat.getX(ev, index);
final float y = MotionEventCompat.getY(ev, index);
final int idx = (int) (x - mLastMotionX[mActivePointerId]);
final int idy = (int) (y - mLastMotionY[mActivePointerId]);
dragTo(mCapturedView.getLeft() + idx, mCapturedView.getTop() + idy, idx, idy);
saveLastMotion(ev);
} else {
// Check to see if any pointer is now over a draggable view.
final int pointerCount = MotionEventCompat.getPointerCount(ev);
for (int i = 0; i < pointerCount; i++) {
final int pointerId = MotionEventCompat.getPointerId(ev, i);
final float x = MotionEventCompat.getX(ev, i);
final float y = MotionEventCompat.getY(ev, i);
final float dx = x - mInitialMotionX[pointerId];
final float dy = y - mInitialMotionY[pointerId];
reportNewEdgeDrags(dx, dy, pointerId);
if (mDragState == STATE_DRAGGING) {
// Callback might have started an edge drag.
break;
}
final View toCapture = findTopChildUnder((int) x, (int) y);
if (checkTouchSlop(toCapture, dx, dy)
&& tryCaptureViewForDrag(toCapture, pointerId)) {
break;
}
}
saveLastMotion(ev);
}
break;
}
case MotionEventCompat.ACTION_POINTER_UP: {
final int pointerId = MotionEventCompat.getPointerId(ev, actionIndex);
if (mDragState == STATE_DRAGGING && pointerId == mActivePointerId) {
// Try to find another pointer that's still holding on to
// the captured view.
int newActivePointer = INVALID_POINTER;
final int pointerCount = MotionEventCompat.getPointerCount(ev);
for (int i = 0; i < pointerCount; i++) {
final int id = MotionEventCompat.getPointerId(ev, i);
if (id == mActivePointerId) {
// This one's going away, skip.
continue;
}
final float x = MotionEventCompat.getX(ev, i);
final float y = MotionEventCompat.getY(ev, i);
if (findTopChildUnder((int) x, (int) y) == mCapturedView
&& tryCaptureViewForDrag(mCapturedView, id)) {
newActivePointer = mActivePointerId;
break;
}
}
if (newActivePointer == INVALID_POINTER) {
// We didn't find another pointer still touching the
// view, release it.
releaseViewForPointerUp();
}
}
clearMotionHistory(pointerId);
break;
}
case MotionEvent.ACTION_UP: {
if (mDragState == STATE_DRAGGING) {
releaseViewForPointerUp();
}
cancel();
break;
}
case MotionEvent.ACTION_CANCEL: {
if (mDragState == STATE_DRAGGING) {
dispatchViewReleased(0, 0);
}
cancel();
break;
}
}
}
private void reportNewEdgeDrags(float dx, float dy, int pointerId) {
int dragsStarted = 0;
if (checkNewEdgeDrag(dx, dy, pointerId, EDGE_LEFT)) {
dragsStarted |= EDGE_LEFT;
}
if (checkNewEdgeDrag(dy, dx, pointerId, EDGE_TOP)) {
dragsStarted |= EDGE_TOP;
}
if (checkNewEdgeDrag(dx, dy, pointerId, EDGE_RIGHT)) {
dragsStarted |= EDGE_RIGHT;
}
if (checkNewEdgeDrag(dy, dx, pointerId, EDGE_BOTTOM)) {
dragsStarted |= EDGE_BOTTOM;
}
if (dragsStarted != 0) {
mEdgeDragsInProgress[pointerId] |= dragsStarted;
mCallback.onEdgeDragStarted(dragsStarted, pointerId);
}
}
private boolean checkNewEdgeDrag(float delta, float odelta, int pointerId, int edge) {
final float absDelta = Math.abs(delta);
final float absODelta = Math.abs(odelta);
if ((mInitialEdgeTouched[pointerId] & edge) != edge || (mTrackingEdges & edge) == 0
|| (mEdgeDragsLocked[pointerId] & edge) == edge
|| (mEdgeDragsInProgress[pointerId] & edge) == edge
|| (absDelta <= mTouchSlop && absODelta <= mTouchSlop)) {
return false;
}
if (absDelta < absODelta * 0.5f && mCallback.onEdgeLock(edge)) {
mEdgeDragsLocked[pointerId] |= edge;
return false;
}
return (mEdgeDragsInProgress[pointerId] & edge) == 0 && absDelta > mTouchSlop;
}
/**
* Check if we've crossed a reasonable touch slop for the given child view.
* If the child cannot be dragged along the horizontal or vertical axis,
* motion along that axis will not count toward the slop check.
*
* @param child Child to check
* @param dx Motion since initial position along X axis
* @param dy Motion since initial position along Y axis
* @return true if the touch slop has been crossed
*/
private boolean checkTouchSlop(View child, float dx, float dy) {
if (child == null) {
return false;
}
final boolean checkHorizontal = mCallback.getViewHorizontalDragRange(child) > 0;
final boolean checkVertical = mCallback.getViewVerticalDragRange(child) > 0;
if (checkHorizontal && checkVertical) {
return dx * dx + dy * dy > mTouchSlop * mTouchSlop;
} else if (checkHorizontal) {
return Math.abs(dx) > mTouchSlop;
} else if (checkVertical) {
return Math.abs(dy) > mTouchSlop;
}
return false;
}
/**
* Check if any pointer tracked in the current gesture has crossed the
* required slop threshold.
* <p>
* This depends on internal state populated by
* {@link #shouldInterceptTouchEvent(android.view.MotionEvent)} or
* {@link #processTouchEvent(android.view.MotionEvent)}. You should only
* rely on the results of this method after all currently available touch
* data has been provided to one of these two methods.
* </p>
*
* @param directions Combination of direction flags, see
* {@link #DIRECTION_HORIZONTAL}, {@link #DIRECTION_VERTICAL},
* {@link #DIRECTION_ALL}
* @return true if the slop threshold has been crossed, false otherwise
*/
public boolean checkTouchSlop(int directions) {
final int count = mInitialMotionX.length;
for (int i = 0; i < count; i++) {
if (checkTouchSlop(directions, i)) {
return true;
}
}
return false;
}
/**
* Check if the specified pointer tracked in the current gesture has crossed
* the required slop threshold.
* <p>
* This depends on internal state populated by
* {@link #shouldInterceptTouchEvent(android.view.MotionEvent)} or
* {@link #processTouchEvent(android.view.MotionEvent)}. You should only
* rely on the results of this method after all currently available touch
* data has been provided to one of these two methods.
* </p>
*
* @param directions Combination of direction flags, see
* {@link #DIRECTION_HORIZONTAL}, {@link #DIRECTION_VERTICAL},
* {@link #DIRECTION_ALL}
* @param pointerId ID of the pointer to slop check as specified by
* MotionEvent
* @return true if the slop threshold has been crossed, false otherwise
*/
public boolean checkTouchSlop(int directions, int pointerId) {
if (!isPointerDown(pointerId)) {
return false;
}
final boolean checkHorizontal = (directions & DIRECTION_HORIZONTAL) == DIRECTION_HORIZONTAL;
final boolean checkVertical = (directions & DIRECTION_VERTICAL) == DIRECTION_VERTICAL;
final float dx = mLastMotionX[pointerId] - mInitialMotionX[pointerId];
final float dy = mLastMotionY[pointerId] - mInitialMotionY[pointerId];
if (checkHorizontal && checkVertical) {
return dx * dx + dy * dy > mTouchSlop * mTouchSlop;
} else if (checkHorizontal) {
return Math.abs(dx) > mTouchSlop;
} else if (checkVertical) {
return Math.abs(dy) > mTouchSlop;
}
return false;
}
/**
* Check if any of the edges specified were initially touched in the
* currently active gesture. If there is no currently active gesture this
* method will return false.
*
* @param edges Edges to check for an initial edge touch. See
* {@link #EDGE_LEFT}, {@link #EDGE_TOP}, {@link #EDGE_RIGHT},
* {@link #EDGE_BOTTOM} and {@link #EDGE_ALL}
* @return true if any of the edges specified were initially touched in the
* current gesture
*/
public boolean isEdgeTouched(int edges) {
final int count = mInitialEdgeTouched.length;
for (int i = 0; i < count; i++) {
if (isEdgeTouched(edges, i)) {
return true;
}
}
return false;
}
/**
* Check if any of the edges specified were initially touched by the pointer
* with the specified ID. If there is no currently active gesture or if
* there is no pointer with the given ID currently down this method will
* return false.
*
* @param edges Edges to check for an initial edge touch. See
* {@link #EDGE_LEFT}, {@link #EDGE_TOP}, {@link #EDGE_RIGHT},
* {@link #EDGE_BOTTOM} and {@link #EDGE_ALL}
* @return true if any of the edges specified were initially touched in the
* current gesture
*/
public boolean isEdgeTouched(int edges, int pointerId) {
return isPointerDown(pointerId) && (mInitialEdgeTouched[pointerId] & edges) != 0;
}
private void releaseViewForPointerUp() {
mVelocityTracker.computeCurrentVelocity(1000, mMaxVelocity);
final float xvel = clampMag(
VelocityTrackerCompat.getXVelocity(mVelocityTracker, mActivePointerId),
mMinVelocity, mMaxVelocity);
final float yvel = clampMag(
VelocityTrackerCompat.getYVelocity(mVelocityTracker, mActivePointerId),
mMinVelocity, mMaxVelocity);
dispatchViewReleased(xvel, yvel);
}
private void dragTo(int left, int top, int dx, int dy) {
int clampedX = left;
int clampedY = top;
final int oldLeft = mCapturedView.getLeft();
final int oldTop = mCapturedView.getTop();
if (dx != 0) {
clampedX = mCallback.clampViewPositionHorizontal(mCapturedView, left, dx);
mCapturedView.offsetLeftAndRight(clampedX - oldLeft);
}
if (dy != 0) {
clampedY = mCallback.clampViewPositionVertical(mCapturedView, top, dy);
mCapturedView.offsetTopAndBottom(clampedY - oldTop);
}
if (dx != 0 || dy != 0) {
final int clampedDx = clampedX - oldLeft;
final int clampedDy = clampedY - oldTop;
mCallback
.onViewPositionChanged(mCapturedView, clampedX, clampedY, clampedDx, clampedDy);
}
}
/**
* Determine if the currently captured view is under the given point in the
* parent view's coordinate system. If there is no captured view this method
* will return false.
*
* @param x X position to test in the parent's coordinate system
* @param y Y position to test in the parent's coordinate system
* @return true if the captured view is under the given point, false
* otherwise
*/
public boolean isCapturedViewUnder(int x, int y) {
return isViewUnder(mCapturedView, x, y);
}
/**
* Determine if the supplied view is under the given point in the parent
* view's coordinate system.
*
* @param view Child view of the parent to hit test
* @param x X position to test in the parent's coordinate system
* @param y Y position to test in the parent's coordinate system
* @return true if the supplied view is under the given point, false
* otherwise
*/
public boolean isViewUnder(View view, int x, int y) {
if (view == null) {
return false;
}
return x >= view.getLeft() && x < view.getRight() && y >= view.getTop()
&& y < view.getBottom();
}
/**
* Find the topmost child under the given point within the parent view's
* coordinate system. The child order is determined using
* {@link me.imid.swipebacklayout.lib.ViewDragHelper.Callback#getOrderedChildIndex(int)}
* .
*
* @param x X position to test in the parent's coordinate system
* @param y Y position to test in the parent's coordinate system
* @return The topmost child view under (x, y) or null if none found.
*/
public View findTopChildUnder(int x, int y) {
final int childCount = mParentView.getChildCount();
for (int i = childCount - 1; i >= 0; i--) {
final View child = mParentView.getChildAt(mCallback.getOrderedChildIndex(i));
if (x >= child.getLeft() && x < child.getRight() && y >= child.getTop()
&& y < child.getBottom()) {
return child;
}
}
return null;
}
private int getEdgeTouched(int x, int y) {
int result = 0;
if (x < mParentView.getLeft() + mEdgeSize)
result = EDGE_LEFT;
if (y < mParentView.getTop() + mEdgeSize)
result = EDGE_TOP;
if (x > mParentView.getRight() - mEdgeSize)
result = EDGE_RIGHT;
if (y > mParentView.getBottom() - mEdgeSize)
result = EDGE_BOTTOM;
return EDGE_LEFT;
}
}
| apache-2.0 |
mashengchen/incubator-trafodion | core/sql/parser/ParAll.cpp | 2470 | /* -*-C++-*- */
/**********************************************************************
// @@@ START COPYRIGHT @@@
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
// @@@ END COPYRIGHT @@@
**********************************************************************/
#define SQLPARSERGLOBALS_FLAGS
#define SQLPARSERGLOBALS_NADEFAULTS
#include "BindStmtDDL.cpp"
#include "ElemDDLCol.cpp"
#include "ElemDDLCreateMVOneAttributeTableList.cpp"
#include "ElemDDLConstraint.cpp"
#include "ElemDDLConstraintAttr.cpp"
#include "ElemDDLFileAttr.cpp"
#include "ElemDDLLike.cpp"
#include "ElemDDLLikeOptions.cpp"
#include "ElemDDLList.cpp"
#include "ElemDDLNode.cpp"
#include "ElemDDLParam.cpp"
#include "ElemDDLPartition.cpp"
#include "ElemDDLPassThroughParamDef.cpp"
#include "ElemDDLPrivActions.cpp"
#include "ElemDDLQualName.cpp"
#include "ElemDDLRefActions.cpp"
#include "ElemDDLRefTrigActions.cpp"
#include "ElemDDLSGOption.cpp"
#include "ElemDDLSGOptions.cpp"
#include "ElemDDLStoreOptions.cpp"
#include "ElemDDLUdr.cpp"
#include "ItemConstValueArray.cpp"
#include "ParDDLFileAttrs.cpp"
#include "ParDDLLikeOpts.cpp"
#include "ParKeyWords.cpp"
#include "ParScannedTokenQueue.cpp"
#include "ParTableUsageList.cpp"
#include "StmtCompilationMode.cpp"
#include "StmtDDLAlter.cpp"
#include "StmtDDLAlterTableAlterColumn.cpp"
#include "StmtDDLCreate.cpp"
#include "StmtDDLDrop.cpp"
#include "StmtDDLGive.cpp"
#include "StmtDDLNode.cpp"
#include "StmtDMLSetTransaction.cpp"
#include "StmtNode.cpp"
#include "StmtDDLRegisterUser.cpp"
#include "StmtDDLRegOrUnregHive.cpp"
#include "StmtDDLCreateRole.cpp"
#include "StmtDDLRoleGrant.cpp"
#include "StmtDDLMisc.cpp"
| apache-2.0 |
dump247/aws-sdk-java | aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront_2012_03_15/model/transform/NoSuchDistributionExceptionUnmarshaller.java | 1574 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudfront_2012_03_15.model.transform;
import org.w3c.dom.Node;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.util.XpathUtils;
import com.amazonaws.transform.StandardErrorUnmarshaller;
import com.amazonaws.services.cloudfront_2012_03_15.model.NoSuchDistributionException;
public class NoSuchDistributionExceptionUnmarshaller extends StandardErrorUnmarshaller {
public NoSuchDistributionExceptionUnmarshaller() {
super(NoSuchDistributionException.class);
}
public AmazonServiceException unmarshall(Node node) throws Exception {
// Bail out if this isn't the right error code that this
// marshaller understands.
String errorCode = parseErrorCode(node);
if (errorCode == null || !errorCode.equals("NoSuchDistribution"))
return null;
NoSuchDistributionException e = (NoSuchDistributionException)super.unmarshall(node);
return e;
}
}
| apache-2.0 |
zhengxgs/elasticsearch-2.4.1 | core/src/main/java/org/elasticsearch/index/mapper/Mapper.java | 6448 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import java.util.Map;
public abstract class Mapper implements ToXContent, Iterable<Mapper> {
public static class BuilderContext {
private final Settings indexSettings;
private final ContentPath contentPath;
public BuilderContext(Settings indexSettings, ContentPath contentPath) {
this.contentPath = contentPath;
this.indexSettings = indexSettings;
}
public ContentPath path() {
return this.contentPath;
}
@Nullable
public Settings indexSettings() {
return this.indexSettings;
}
@Nullable
public Version indexCreatedVersion() {
if (indexSettings == null) {
return null;
}
return Version.indexCreated(indexSettings);
}
}
public static abstract class Builder<T extends Builder, Y extends Mapper> {
public String name;
protected T builder;
protected Builder(String name) {
this.name = name;
}
public String name() {
return this.name;
}
public abstract Y build(BuilderContext context);
}
public interface TypeParser {
class ParserContext {
private final String type;
private final AnalysisService analysisService;
private final SimilarityLookupService similarityLookupService;
private final MapperService mapperService;
private final Map<String, TypeParser> typeParsers;
private final Version indexVersionCreated;
private final ParseFieldMatcher parseFieldMatcher;
public ParserContext(String type, AnalysisService analysisService, SimilarityLookupService similarityLookupService,
MapperService mapperService, Map<String, TypeParser> typeParsers,
Version indexVersionCreated, ParseFieldMatcher parseFieldMatcher) {
this.type = type;
this.analysisService = analysisService;
this.similarityLookupService = similarityLookupService;
this.mapperService = mapperService;
this.typeParsers = typeParsers;
this.indexVersionCreated = indexVersionCreated;
this.parseFieldMatcher = parseFieldMatcher;
}
public String type() {
return type;
}
public AnalysisService analysisService() {
return analysisService;
}
public SimilarityLookupService similarityLookupService() {
return similarityLookupService;
}
public MapperService mapperService() {
return mapperService;
}
public TypeParser typeParser(String type) {
return typeParsers.get(Strings.toUnderscoreCase(type));
}
public Version indexVersionCreated() {
return indexVersionCreated;
}
public ParseFieldMatcher parseFieldMatcher() {
return parseFieldMatcher;
}
public boolean isWithinMultiField() { return false; }
protected Map<String, TypeParser> typeParsers() { return typeParsers; }
public ParserContext createMultiFieldContext(ParserContext in) {
return new MultiFieldParserContext(in) {
@Override
public boolean isWithinMultiField() { return true; }
};
}
static class MultiFieldParserContext extends ParserContext {
MultiFieldParserContext(ParserContext in) {
super(in.type(), in.analysisService, in.similarityLookupService(), in.mapperService(), in.typeParsers(), in.indexVersionCreated(), in.parseFieldMatcher());
}
}
}
Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException;
}
private final String simpleName;
public Mapper(String simpleName) {
this.simpleName = simpleName;
}
/** Returns the simple name, which identifies this mapper against other mappers at the same level in the mappers hierarchy
* TODO: make this protected once Mapper and FieldMapper are merged together */
public final String simpleName() {
return simpleName;
}
/** Returns the canonical name which uniquely identifies the mapper against other mappers in a type. */
public abstract String name();
/** Return the merge of {@code mergeWith} into this.
* Both {@code this} and {@code mergeWith} will be left unmodified. */
public abstract Mapper merge(Mapper mergeWith, boolean updateAllTypes);
/**
* Update the field type of this mapper. This is necessary because some mapping updates
* can modify mappings across several types. This method must return a copy of the mapper
* so that the current mapper is not modified.
*/
public abstract Mapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType);
}
| apache-2.0 |
anuruddhal/stratos | components/org.apache.stratos.cloud.controller/src/main/java/org/apache/stratos/cloud/controller/messaging/topology/TopologyManager.java | 3460 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.stratos.cloud.controller.messaging.topology;
import com.google.gson.Gson;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.stratos.cloud.controller.util.CloudControllerUtil;
import org.apache.stratos.common.concurrent.locks.ReadWriteLock;
import org.apache.stratos.messaging.domain.topology.Topology;
/**
* Persistence and retrieval of Topology from Registry
*/
public class TopologyManager {
private static final Log log = LogFactory.getLog(TopologyManager.class);
private static volatile ReadWriteLock lock = new ReadWriteLock("topology-manager");
private static volatile Topology topology;
private TopologyManager() {
}
public static void acquireWriteLock() {
lock.acquireWriteLock();
if (log.isDebugEnabled()) {
log.debug("Write lock acquired");
}
}
public static void releaseWriteLock() {
lock.releaseWriteLock();
if (log.isDebugEnabled()) {
log.debug("Write lock released");
}
}
public static Topology getTopology() {
if (topology == null) {
synchronized (TopologyManager.class) {
if (topology == null) {
if (log.isDebugEnabled()) {
log.debug("Trying to retrieve topology from registry");
}
topology = CloudControllerUtil.retrieveTopology();
if (topology == null) {
if (log.isDebugEnabled()) {
log.debug("Topology not found in registry, creating new");
}
topology = new Topology();
}
if (log.isDebugEnabled()) {
log.debug("Topology initialized");
}
}
}
}
return topology;
}
/**
* Update in-memory topology and persist it in registry.
*
* @param topology_ Topology
*/
public static void updateTopology(Topology topology_) {
synchronized (TopologyManager.class) {
if (log.isDebugEnabled()) {
log.debug("Updating topology");
}
topology = topology_;
CloudControllerUtil.persistTopology(topology);
if (log.isDebugEnabled()) {
log.debug(String.format("Topology updated: %s", toJson(topology)));
}
}
}
private static String toJson(Object object) {
Gson gson = new Gson();
return gson.toJson(object);
}
}
| apache-2.0 |
thomasdarimont/hibernate-master-class | core/src/test/java/com/vladmihalcea/hibernate/masterclass/laboratory/cache/CollectionCacheTest.java | 12049 | package com.vladmihalcea.hibernate.masterclass.laboratory.cache;
import com.vladmihalcea.hibernate.masterclass.laboratory.util.AbstractTest;
import org.hibernate.ObjectNotFoundException;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.jdbc.Work;
import org.junit.Before;
import org.junit.Test;
import javax.persistence.*;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* CollectionCacheTest - Test to check Collection Cache
*
* @author Vlad Mihalcea
*/
public class CollectionCacheTest extends AbstractTest {
@Override
protected Class<?>[] entities() {
return new Class<?>[] {
Repository.class,
Commit.class
};
}
@Override
protected Properties getProperties() {
Properties properties = super.getProperties();
properties.put("hibernate.cache.use_second_level_cache", Boolean.TRUE.toString());
properties.put("hibernate.cache.region.factory_class", "org.hibernate.cache.ehcache.EhCacheRegionFactory");
return properties;
}
@Before
public void init() {
super.init();
doInTransaction(session -> {
Repository repository = new Repository("Hibernate-Master-Class");
session.persist(repository);
Commit commit1 = new Commit();
commit1.getChanges().add(new Change("README.txt", "0a1,5..."));
commit1.getChanges().add(new Change("web.xml", "17c17..."));
Commit commit2 = new Commit();
commit2.getChanges().add(new Change("README.txt", "0b2,5..."));
repository.addCommit(commit1);
repository.addCommit(commit2);
session.persist(commit1);
});
doInTransaction(session -> {
LOGGER.info("Load collections for the first time");
Repository repository = (Repository) session.get(Repository.class, 1L);
for (Commit commit : repository.getCommits()) {
assertFalse(commit.getChanges().isEmpty());
}
});
}
@Test
public void testLoadFromCollectionCache() {
LOGGER.info("Load collections from cache");
doInTransaction(session -> {
Repository repository = (Repository) session.get(Repository.class, 1L);
assertEquals(2, repository.getCommits().size());
});
}
@Test
public void testInvalidateEntityCollectionCacheOnAddingEntries() {
LOGGER.info("Adding invalidates Collection Cache");
doInTransaction(session -> {
Repository repository = (Repository)
session.get(Repository.class, 1L);
assertEquals(2, repository.getCommits().size());
Commit commit = new Commit();
commit.getChanges().add(
new Change("Main.java", "0b3,17...")
);
repository.addCommit(commit);
});
doInTransaction(session -> {
Repository repository = (Repository)
session.get(Repository.class, 1L);
assertEquals(3, repository.getCommits().size());
});
}
@Test
public void testInvalidateEntityCollectionCacheOnRemovingEntries() {
LOGGER.info("Removing invalidates Collection Cache");
doInTransaction(session -> {
Repository repository = (Repository)
session.get(Repository.class, 1L);
assertEquals(2, repository.getCommits().size());
Commit removable = repository.getCommits().get(0);
repository.removeCommit(removable);
});
doInTransaction(session -> {
Repository repository = (Repository)
session.get(Repository.class, 1L);
assertEquals(1, repository.getCommits().size());
});
}
@Test
public void testConsistencyIssuesWhenRemovingChildDirectly() {
LOGGER.info("Removing Child causes inconsistencies");
doInTransaction(session -> {
Commit commit = (Commit) session.get(Commit.class, 1L);
session.delete(commit);
});
try {
doInTransaction(session -> {
Repository repository = (Repository) session.get(Repository.class, 1L);
assertEquals(1, repository.getCommits().size());
});
} catch (ObjectNotFoundException e) {
LOGGER.warn("Object not found", e);
}
}
@Test
public void testConsistencyWhenHQLUpdating() {
LOGGER.info("Updating Child entities using HQL");
doInTransaction(session -> {
Repository repository = (Repository)
session.get(Repository.class, 1L);
for (Commit commit : repository.getCommits()) {
assertFalse(commit.review);
}
});
doInTransaction(session -> {
session.createQuery(
"update Commit c " +
"set c.review = true ")
.executeUpdate();
});
doInTransaction(session -> {
Repository repository = (Repository)
session.get(Repository.class, 1L);
for(Commit commit : repository.getCommits()) {
assertTrue(commit.review);
}
});
}
@Test
public void testConsistencyWhenSQLUpdating() {
LOGGER.info("Updating Child entities using SQL");
doInTransaction(session -> {
Repository repository = (Repository)
session.get(Repository.class, 1L);
for (Commit commit : repository.getCommits()) {
assertFalse(commit.review);
}
});
doInTransaction(session -> {
session.createSQLQuery(
"update Commit c " +
"set c.review = true ")
.addSynchronizedEntityClass(Commit.class)
.executeUpdate();
});
doInTransaction(session -> {
Repository repository = (Repository)
session.get(Repository.class, 1L);
for(Commit commit : repository.getCommits()) {
assertTrue(commit.review);
}
});
}
@Test
public void testConsistencyWhenManuallySQLUpdating() {
LOGGER.info("Manually updating Child entities using SQL");
final Repository repository = doInTransaction(session -> {
Repository _repository = (Repository)
session.get(Repository.class, 1L);
for (Commit commit : _repository.getCommits()) {
assertFalse(commit.review);
}
return _repository;
});
doInTransaction(session -> {
session.doWork(connection -> {
try (PreparedStatement statement = connection.prepareStatement(
"update Commit c " +
"set c.review = true "
)) {
statement.executeUpdate();
}
});
session.getSessionFactory().getCache().evictCollection(
Repository.class.getName() + ".commits",
repository.getId()
);
});
doInTransaction(session -> {
Repository _repository = (Repository)
session.get(Repository.class, 1L);
for(Commit commit : _repository.getCommits()) {
assertTrue(commit.review);
}
});
}
@Test
public void testInvalidateEmbeddableCollectionCacheOnRemovingEntries() {
LOGGER.info("Invalidate embeddable collection cache on removing entries");
doInTransaction(session -> {
Commit commit = (Commit) session.get(Commit.class, 1L);
assertEquals(2, commit.getChanges().size());
commit.getChanges().remove(0);
});
doInTransaction(session -> {
Commit commit = (Commit) session.get(Commit.class, 1L);
assertEquals(1, commit.getChanges().size());
});
}
@Test
public void testInvalidateEmbeddableCollectionCacheOnAddingEntries() {
LOGGER.info("Invalidate embeddable collection cache on adding entries");
doInTransaction(session -> {
Commit commit = (Commit) session.get(Commit.class, 1L);
assertEquals(2, commit.getChanges().size());
commit.getChanges().add(new Change("Main.java", "0b3,17..."));
});
doInTransaction(session -> {
Commit commit = (Commit) session.get(Commit.class, 1L);
assertEquals(3, commit.getChanges().size());
});
}
/**
* Repository - Repository
*
* @author Vlad Mihalcea
*/
@Entity(name = "Repository")
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public static class Repository {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String name;
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@OneToMany(mappedBy = "repository", cascade = CascadeType.ALL, orphanRemoval = true)
private List<Commit> commits = new ArrayList<>();
public Repository() {
}
public Repository(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public List<Commit> getCommits() {
return commits;
}
public void addCommit(Commit commit) {
commits.add(commit);
commit.setRepository(this);
}
public void removeCommit(Commit commit) {
commits.remove(commit);
commit.setRepository(null);
}
}
/**
* Commit - Commit
*
* @author Vlad Mihalcea
*/
@Entity(name = "Commit")
@Table(name = "commit")
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public static class Commit {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private boolean review;
@ManyToOne(fetch = FetchType.LAZY)
private Repository repository;
@ElementCollection
@CollectionTable(
name="commit_change",
joinColumns=@JoinColumn(name="commit_id")
)
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
@OrderColumn(name = "index_id")
private List<Change> changes = new ArrayList<>();
public Commit() {
}
public Repository getRepository() {
return repository;
}
public void setRepository(Repository repository) {
this.repository = repository;
}
public List<Change> getChanges() {
return changes;
}
}
/**
* Change - Change
*
* @author Vlad Mihalcea
*/
@Embeddable
public static class Change {
@Column(name = "path", nullable = false)
private String path;
@Column(name = "diff", nullable = false)
private String diff;
public Change() {
}
public Change(String path, String diff) {
this.path = path;
this.diff = diff;
}
public String getPath() {
return path;
}
public String getDiff() {
return diff;
}
}
}
| apache-2.0 |
steveloughran/hadoop-mapreduce | src/java/org/apache/hadoop/mapred/join/CompositeInputFormat.java | 6745 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.join;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
/**
* An InputFormat capable of performing joins over a set of data sources sorted
* and partitioned the same way.
* @see #setFormat
*
* A user may define new join types by setting the property
* <tt>mapred.join.define.<ident></tt> to a classname. In the expression
* <tt>mapred.join.expr</tt>, the identifier will be assumed to be a
* ComposableRecordReader.
* <tt>mapred.join.keycomparator</tt> can be a classname used to compare keys
* in the join.
* @see JoinRecordReader
* @see MultiFilterRecordReader
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class CompositeInputFormat<K extends WritableComparable>
implements ComposableInputFormat<K,TupleWritable> {
// expression parse tree to which IF requests are proxied
private Parser.Node root;
public CompositeInputFormat() { }
/**
* Interpret a given string as a composite expression.
* {@code
* func ::= <ident>([<func>,]*<func>)
* func ::= tbl(<class>,"<path>")
* class ::= @see java.lang.Class#forName(java.lang.String)
* path ::= @see org.apache.hadoop.fs.Path#Path(java.lang.String)
* }
* Reads expression from the <tt>mapred.join.expr</tt> property and
* user-supplied join types from <tt>mapred.join.define.<ident></tt>
* types. Paths supplied to <tt>tbl</tt> are given as input paths to the
* InputFormat class listed.
* @see #compose(java.lang.String, java.lang.Class, java.lang.String...)
*/
public void setFormat(JobConf job) throws IOException {
addDefaults();
addUserIdentifiers(job);
root = Parser.parse(job.get("mapred.join.expr", null), job);
}
/**
* Adds the default set of identifiers to the parser.
*/
protected void addDefaults() {
try {
Parser.CNode.addIdentifier("inner", InnerJoinRecordReader.class);
Parser.CNode.addIdentifier("outer", OuterJoinRecordReader.class);
Parser.CNode.addIdentifier("override", OverrideRecordReader.class);
Parser.WNode.addIdentifier("tbl", WrappedRecordReader.class);
} catch (NoSuchMethodException e) {
throw new RuntimeException("FATAL: Failed to init defaults", e);
}
}
/**
* Inform the parser of user-defined types.
*/
private void addUserIdentifiers(JobConf job) throws IOException {
Pattern x = Pattern.compile("^mapred\\.join\\.define\\.(\\w+)$");
for (Map.Entry<String,String> kv : job) {
Matcher m = x.matcher(kv.getKey());
if (m.matches()) {
try {
Parser.CNode.addIdentifier(m.group(1),
job.getClass(m.group(0), null, ComposableRecordReader.class));
} catch (NoSuchMethodException e) {
throw (IOException)new IOException(
"Invalid define for " + m.group(1)).initCause(e);
}
}
}
}
/**
* Build a CompositeInputSplit from the child InputFormats by assigning the
* ith split from each child to the ith composite split.
*/
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
setFormat(job);
job.setLong("mapred.min.split.size", Long.MAX_VALUE);
return root.getSplits(job, numSplits);
}
/**
* Construct a CompositeRecordReader for the children of this InputFormat
* as defined in the init expression.
* The outermost join need only be composable, not necessarily a composite.
* Mandating TupleWritable isn't strictly correct.
*/
@SuppressWarnings("unchecked") // child types unknown
public ComposableRecordReader<K,TupleWritable> getRecordReader(
InputSplit split, JobConf job, Reporter reporter) throws IOException {
setFormat(job);
return root.getRecordReader(split, job, reporter);
}
/**
* Convenience method for constructing composite formats.
* Given InputFormat class (inf), path (p) return:
* {@code tbl(<inf>, <p>) }
*/
public static String compose(Class<? extends InputFormat> inf, String path) {
return compose(inf.getName().intern(), path, new StringBuffer()).toString();
}
/**
* Convenience method for constructing composite formats.
* Given operation (op), Object class (inf), set of paths (p) return:
* {@code <op>(tbl(<inf>,<p1>),tbl(<inf>,<p2>),...,tbl(<inf>,<pn>)) }
*/
public static String compose(String op, Class<? extends InputFormat> inf,
String... path) {
final String infname = inf.getName();
StringBuffer ret = new StringBuffer(op + '(');
for (String p : path) {
compose(infname, p, ret);
ret.append(',');
}
ret.setCharAt(ret.length() - 1, ')');
return ret.toString();
}
/**
* Convenience method for constructing composite formats.
* Given operation (op), Object class (inf), set of paths (p) return:
* {@code <op>(tbl(<inf>,<p1>),tbl(<inf>,<p2>),...,tbl(<inf>,<pn>)) }
*/
public static String compose(String op, Class<? extends InputFormat> inf,
Path... path) {
ArrayList<String> tmp = new ArrayList<String>(path.length);
for (Path p : path) {
tmp.add(p.toString());
}
return compose(op, inf, tmp.toArray(new String[0]));
}
private static StringBuffer compose(String inf, String path,
StringBuffer sb) {
sb.append("tbl(" + inf + ",\"");
sb.append(path);
sb.append("\")");
return sb;
}
}
| apache-2.0 |
operasoftware/tlsprober | probedb/resultdb2/summary_models.py | 51449 | # Copyright 2010-2012 Opera Software ASA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
from django.db import IntegrityError
from django.db import transaction
from django.db.models.signals import post_init
from django.db import transaction
from django.db import connection
from django.db.models import Q
import time
from probedb.resultdb2.models import *
import probedb.probedata2.models as ProbeData
from django.db import DatabaseError
from django.db import IntegrityError
from django.views.decorators.http import condition
import datetime, sys
# Create your models here.
class ResultSummaryList(models.Model):
"""Top result manager for a run
Links to all results, condition (flag) sets, and other data
Also perform searches on the results
"""
part_of_run = models.ForeignKey(ProbeData.ProbeRun, unique=True, db_index = True)
summaries = models.ManyToManyField(ResultEntry, null=True)
conditions = models.ManyToManyField(ResultCondition,null=True)
condition_groups = models.ManyToManyField(ResultConditionSet,null=True)
IPDomainTopEntry0 = models.ForeignKey(ProbeData.IPAddressDomain, null=True, related_name="IPDomainTopEntry0" ) #Ip top
IPDomainEntries0 = models.ManyToManyField(ProbeData.IPAddressDomain, null=True) #Ip top
DomainTopEntry0 = models.ForeignKey(ProbeData.ServerDomain, null=True, related_name="DomainTopEntry0") #TLDs
DomainEntries0 = models.ManyToManyField(ProbeData.ServerDomain, null=True) #TLDs
CipherSuiteEntries = models.ManyToManyField(ResultCipherSuite, null=True)
CipherSuiteGroupEntries = models.ManyToManyField(ResultCipherSuiteGroupEntry, null=True)
PrimaryShortServerAgentSummary = models.ManyToManyField(ResultPrimaryServerAgentFamily, null=True)
SecondaryShortServerAgentSummary = models.ManyToManyField(ResultSecondaryServerAgentFamily, null=True)
PrimaryServerAgentSummary = models.ManyToManyField(ResultPrimaryServerAgent, null=True)
SecondaryServerAgentSummary = models.ManyToManyField(ResultSecondaryServerAgent, null=True)
ip_address_probed = models.ManyToManyField(ProbeData.ServerIPProbed, null=True)
dhe_keysizes = models.ManyToManyField(ResultDHEKeySize, null=True)
QUERY_CONDITION = "condition"
QUERY_DOMAIN ="domains"
QUERY_IP="ipdomain"
QUERY_PRIMARYAGENT = "primaryagent"
QUERY_SHORTPRIMARYAGENT = "shortprimaryagent"
QUERY_SECONDARYAGENT = "secondaryagent"
QUERY_SHORTSECONDARYAGENT = "shortsecondaryagent"
QUERY_CIPHER = "cipher"
QUERY_CIPHERGROUP = "ciphergroup"
QUERY_SPECINTOL = "specintol"
QUERY_DHEKEYSIZE = "dhekeysize"
QUERY_ALEXA_RESTRICT = "alexa"
QUERY_RESTRICT_RUN = "restrict_run"
QUERY_PROTOCOL_RESTRICT = "protocol"
QUERY_CIPHER_EXCLUDE = "cipher_exclude"
QUERY_AUTO = "auto"
QUERY_TYPES = (
QUERY_CONDITION,
QUERY_DOMAIN,
QUERY_IP,
QUERY_PRIMARYAGENT,
QUERY_SHORTPRIMARYAGENT,
QUERY_SECONDARYAGENT,
QUERY_SHORTSECONDARYAGENT,
QUERY_CIPHER,
QUERY_CIPHERGROUP,
QUERY_SPECINTOL,
QUERY_DHEKEYSIZE,
QUERY_RESTRICT_RUN,
QUERY_PROTOCOL_RESTRICT,
QUERY_CIPHER_EXCLUDE,
)
RESULT_ID = "id"
RESULT_CONDITION = "condition"
RESULT_DOMAIN ="domains"
RESULT_IP="ipdomain"
RESULT_PRIMARYAGENT = "primaryagent"
RESULT_SHORTPRIMARYAGENT = "shortprimaryagent"
RESULT_SECONDARYAGENT = "secondaryagent"
RESULT_SHORTSECONDARYAGENT = "shortsecondaryagent"
RESULT_CIPHER = "cipher"
RESULT_CIPHERGROUP = "ciphergroup"
RESULT_PROTOCOLS = "protocols"
RESULT_HOSTS = "hosts"
RESULT_HOSTS_ALEXA = "hosts_alexa"
RESULT_URLS_TEXT = "urls_text"
RESULT_HOST_RUNLIST = "run_urls_cvs"
RESULT_HOST_PROFILES = "hosts_profiles"
RESULT_HOST_BASEPROFILES = "hosts_baseprofiles"
RESULT_HOST_FUNDPROFILES = "hosts_fundprofiles"
RESULT_TYPES = (
RESULT_CONDITION,
RESULT_DOMAIN,
RESULT_IP,
RESULT_PRIMARYAGENT,
RESULT_SHORTPRIMARYAGENT,
RESULT_SECONDARYAGENT,
RESULT_SHORTSECONDARYAGENT,
RESULT_CIPHER,
RESULT_CIPHERGROUP,
RESULT_PROTOCOLS,
RESULT_HOSTS,
RESULT_HOSTS_ALEXA,
RESULT_URLS_TEXT,
RESULT_HOST_RUNLIST,
RESULT_HOST_PROFILES,
RESULT_HOST_BASEPROFILES,
RESULT_HOST_FUNDPROFILES,
)
RESULT_TYPE_VALUES = (
(RESULT_ID, "Result entry ID"),
(RESULT_CONDITION,"Condition"),
(RESULT_DOMAIN,"Domains"),
(RESULT_IP,"IP Address"),
(RESULT_PRIMARYAGENT,"Primary Agent"),
(RESULT_SHORTPRIMARYAGENT,"Primary Agent Family"),
(RESULT_SECONDARYAGENT,"Secondary Agent"),
(RESULT_SHORTSECONDARYAGENT,"Secondary Agent Family"),
(RESULT_CIPHER,"Cipher suite"),
(RESULT_CIPHERGROUP,"Cipher Suite group"),
(RESULT_PROTOCOLS,"Protocol versions supported"),
(RESULT_HOSTS,"Hostnames"),
(RESULT_HOSTS_ALEXA,"Hostnames sorted by Alexa"),
(RESULT_URLS_TEXT,"URL textfile (Qouted)"),
(RESULT_HOST_RUNLIST, "TLS Prober CSV run configuration file"),
(RESULT_HOST_PROFILES, "Server profiles"),
(RESULT_HOST_BASEPROFILES, "Server base profiles"),
(RESULT_HOST_FUNDPROFILES, "Server fundamental profiles"),
)
NO_ALEXA_LIMIT = -1
ALEXA_TOP_100 = 100
ALEXA_TOP_1K = 1000
ALEXA_TOP_10K = 10000
ALEXA_TOP_100K = 100000
ALEXA_TOP_1M = 1000000
NONALEXA = 0
ALEXA_TYPE_VALUES = (
(NO_ALEXA_LIMIT,"All"),
(ALEXA_TOP_100,"Alexa Top 100"),
(ALEXA_TOP_1K,"Alexa Top 1K"),
(ALEXA_TOP_10K,"Alexa Top 10K"),
(ALEXA_TOP_100K,"Alexa Top 100K"),
(ALEXA_TOP_1M,"Alexa Top 1M"),
(NONALEXA,"Exclude Alexa sites"),
)
def __unicode__(self):
return unicode(self.part_of_run)
def setup(self):
"""Initiate a run, creating common resources, such as the flag database"""
for (c, t) in ResultCondition.RESULTC_VALUES:
common_cond,created = ResultCommonCondition.objects.get_or_create(condition=c);
self.conditions.get_or_create(part_of_run=self.part_of_run,condition=c, defaults = {"common_condition": common_cond})
while True:
try:
self.DomainSummary0, created = ProbeData.ServerDomain.objects.get_or_create(domain_parent=None, domain_name="", full_domain_name="", level=0)
self.DomainEntries0.add(self.DomainSummary0)
except:
time.sleep(.1)
continue;
break;
while True:
try:
self.IPDomainTopEntry0, created = ProbeData.IPAddressDomain.objects.get_or_create(ip_parent=None, ip_domain=0, full_ip_mask="0.0.0.0", level=0)
self.IPDomainEntries0.add(self.IPDomainTopEntry0)
except:
time.sleep(.1)
continue;
break;
self.save();
def check_condition(self,cond):
"""Make sure all flags are available"""
common_cond,created = ResultCommonCondition.objects.get_or_create(condition=cond);
self.conditions.get_or_create(part_of_run=self.part_of_run,condition=cond, defaults = {"common_condition": common_cond})
def ready(self):
"""Check if the object is ready for use"""
return (self.conditions.count() != 0 and
self.DomainEntries0.filter(level=0).count() != 0 and
self.IPDomainEntries0.filter(level=0).count()!= 0
)
def start(self):
"""Start an object by loading the necessary resources from the database"""
self.condition_list = {}
for c in list(self.conditions.all()):
self.condition_list[c.condition] = c
self.condition_group_list = {}
update = False
if self.IPDomainTopEntry0:
self.ipdomain_top0 = self.IPDomainTopEntry0
else:
try:
self.ipdomain_top0 = self.IPDomainEntries0.get(level=0)
except:
while True:
try:
self.ipdomain_top0, created = ProbeData.IPAddressDomain.objects.get_or_create(ip_parent=None, ip_domain=0, full_ip_mask="0.0.0.0", level=0)
self.IPDomainEntries0.add(self.ipdomain_top0)
except:
time.sleep(.1)
continue;
break;
self.IPDomainTopEntry0 = self.ipdomain_top0
update = True
if self.DomainTopEntry0:
self.domain_top = self.DomainTopEntry0
else :
self.domain_top = self.DomainEntries0.get(level=0)
self.DomainTopEntry0 = self.domain_top
update = True
if update:
try:
self.save();
except:
pass # ignore errors
@transaction.commit_on_success
def get_condition_group(self, run, condition_group, lock=None):
"""Get a condition group, if necessary by creating it"""
condition_string = "_".join(sorted(list(condition_group)))
group = self.condition_group_list.get(condition_string,None)
if group:
return group
try:
group = self.condition_groups.get(result_summary_string = condition_string)
except:
group = None
if not group:
group = ResultConditionSet.FindSet(run, condition_string,
create=[self.condition_list[c] for c in condition_group])
if group:
if lock:
lock.acquire()
if not lock or condition_string not in self.condition_group_list:
self.condition_group_list[condition_string] = group
while True:
try:
sid = transaction.savepoint()
self.condition_groups.add(group);
except:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue;
transaction.savepoint_commit(sid)
break;
else:
group = self.condition_group_list.get(condition_string,None)
if lock:
lock.release()
return group;
@transaction.commit_on_success
def GetResultAgents(self, agent, target_class, target_short_class, target_source_class):
"""Get a Server agent entry, can be used for multiple classes"""
if not agent:
if target_source_class.NotAvailable:
agent = target_source_class.NotAvailable
else:
while True:
try:
sid = transaction.savepoint()
agent,created = target_source_class.objects.get_or_create(agent_name ="N/A", major_version="0", minor_version="0", patch_version="0")
transaction.savepoint_commit(sid)
except AssertionError,error:
#print str(error)
raise
except DatabaseError:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue
except IntegrityError:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue
except Exception, error:
#print str(error)
time.sleep(0.1)
continue
break;
short_agent = agent.agent_shortname
if not short_agent:
name = agent.agent_name
short_agent_name = name.strip('()').partition('/')[0] if name != "N/A" else name
if not short_agent_name:
name = "N/A"
while True:
try:
sid = transaction.savepoint()
if AgentShortName.NotAvailable:
short_agent = AgentShortName.NotAvailable
else:
short_agent,created = AgentShortName.objects.get_or_create(agent_shortname ="N/A")
agent.agent_shortname =short_agent
agent.save()
transaction.savepoint_commit(sid)
AgentShortName.NotAvailable = short_agent
except AssertionError,error:
#print str(error)
raise
except DatabaseError:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue
except IntegrityError:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue
except Exception, error:
#print str(error)
time.sleep(0.1)
continue
break;
full_result = None
short_result = None
if agent.id in target_class.AgentCache:
full_result = target_class.AgentCache[agent.id]
if short_agent.id in target_short_class.AgentCache:
short_result = target_short_class.AgentCache[short_agent.id]
if full_result and short_result:
return (full_result, short_result)
if not short_result:
while True:
try:
sid = transaction.savepoint()
short_result, created = target_short_class.objects.get_or_create(part_of_run = self.part_of_run,
agent_name=short_agent)
transaction.savepoint_commit(sid)
target_short_class.AgentCache[short_agent.id] = short_result
except AssertionError,error:
#print str(error)
raise
except DatabaseError:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue
except IntegrityError:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue
except Exception, error:
#print str(error)
time.sleep(0.1)
continue
break;
if not full_result:
while True:
try:
sid = transaction.savepoint()
full_result, created = target_class.objects.get_or_create(part_of_run = self.part_of_run,
agent_name=agent,
defaults={"short_name":short_result}
)
transaction.savepoint_commit(sid)
target_class.AgentCache[agent.id] = full_result
except DatabaseError:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue
except IntegrityError:
transaction.savepoint_rollback(sid)
time.sleep(0.1)
continue
except:
time.sleep(0.1)
continue
break;
return (full_result, short_result)
@transaction.commit_on_success
def _migrate_server_alias(self, x):
"""
Migrate a server alias set from the ServerIPProbed list to
CommonServerIPProbed, reducing overhead
"""
result = None
try:
result = ResultEntry.objects.select_related("result_entry").get(part_of_run=self.part_of_run, servername = x.server).result_entry
except:
try:
result = ProbeData.ProbeResult.objects.get(part_of_run=self.part_of_run, servername = x.server)
except ProbeData.ProbeResult.DoesNotExist:
pass
except ProbeData.ProbeResult.MultipleObjectsReturned:
result = None
for y in ProbeData.ProbeResult.objects.select_related("result_summary_group").filter(part_of_run=self.part_of_run, servername = x.server):
if result:
if ((y.result_summary_group and y.result_summary_group.result_summary_string) or
( not y.result_summary_group and (not result.result_summary_group or (result.result_summary_group and not result.result_summary_group.result_summary_string)))):
if y.date < result.date or (y.date == result.date and y.id < result.id):
result = y
else:
result = y
if result:
alias = ProbeData.CommonServerIPProbed.FetchOrCreateItem(x)
result.common_server_aliases.add(alias)
x.delete()
def __do_migrate_thread(self,migrate_queue,report_queue):
"""Perform the migrate task for a given queue"""
import Queue
while self.__threads_active:
try:
item = migrate_queue.get(timeout=1)
except Queue.Empty:
continue
try:
self._migrate_server_alias(item)
except:
pass
if report_queue:
report_queue.put(True)
migrate_queue.task_done()
def __report_migrate_thread(self,queue):
"""report progress for the migration"""
import Queue
i=0
while self.__threads_active:
try:
result = queue.get(timeout=1)
except Queue.Empty:
continue
queue.task_done()
i += 1
if i%100 == 0:
print "Migrate Aliases", i
def migrate_server_aliases(self, report=False, checkactive = None):
"""
Migrate server aliases from the ServerIPProbed list to
CommonServerIPProbed, reducing overhead
"""
import threading
import Queue
self.__threads_active = True
migrate_queue = Queue.Queue(100000)
report_queue = None
num_probers = 40
threads = []
if report:
report_queue = Queue.Queue(100000)
new_thread = threading.Thread(target=self.__report_migrate_thread, args=(report_queue,))
new_thread.daemon = True
new_thread.start()
threads.append(new_thread)
for i in range(num_probers):
new_thread = threading.Thread(target=self.__do_migrate_thread, args=(migrate_queue,report_queue))
new_thread.daemon = True
new_thread.start()
threads.append(new_thread)
last_check = datetime.datetime.now()
if report:
print "(",self.part_of_run_id, ") Migrate", ProbeData.ServerIPProbed.objects.filter(part_of_run=self.part_of_run).count() ,"aliases"
for x in ProbeData.ServerIPProbed.objects.filter(part_of_run=self.part_of_run).select_related("server","ip_address"):
if checkactive and callable(checkactive):
if (datetime.datetime.now() - last_check).seconds >= 10.0:
last_check = datetime.datetime.now()
if not checkactive():
sys.exit()
migrate_queue.put(x)
if checkactive and callable(checkactive):
while not migrate_queue.empty():
if not checkactive():
sys.exit()
time.sleep(10)
migrate_queue.join()
if report_queue:
report_queue.join()
self.__threads_active = False
for t in threads:
t.join()
def __generatesummary(self, result_list, summary_items):
"""
Generate a result summary based on the requested datafields
"""
from django.db.models import Count
if not summary_items:
return None
summary_name = summary_items[0]
source = None
source2 = None
result_id_field = "resultentry"
if summary_name == ResultSummaryList.RESULT_ID:
return list(result_list)
elif summary_name == ResultSummaryList.RESULT_PRIMARYAGENT:
source = self.PrimaryServerAgentSummary.all().filter(part_of_run=self.part_of_run)
source2 = ResultPrimaryServerAgent.objects.select_related("agent_name").filter(part_of_run=self.part_of_run)
result_entry_field = "PrimaryServerAgentSummary"
elif summary_name == ResultSummaryList.RESULT_SECONDARYAGENT:
source = ResultSecondaryServerAgent.objects.filter(part_of_run=self.part_of_run)
source2 = ResultSecondaryServerAgent.objects.select_related("agent_name").filter(part_of_run=self.part_of_run)
result_entry_field = "SecondaryServerAgentSummary"
elif summary_name == ResultSummaryList.RESULT_DOMAIN:
source = ProbeData.ServerDomain.objects.filter(level__gt=0)
source2 = ProbeData.ServerDomain.objects
result_entry_field = "DomainSummary0"
elif summary_name == ResultSummaryList.RESULT_CONDITION:
source = ResultCondition.objects.filter(part_of_run=self.part_of_run)
source2 = ResultCondition.objects.filter(part_of_run=self.part_of_run)
result_id_field = "resultconditionset__resultentry"
elif summary_name == ResultSummaryList.RESULT_SHORTPRIMARYAGENT:
source = ResultPrimaryServerAgentFamily.objects.filter(part_of_run=self.part_of_run)
source2 = ResultPrimaryServerAgentFamily.objects.select_related("agent_name").filter(part_of_run=self.part_of_run)
result_entry_field = "PrimaryShortServerAgentSummary"
elif summary_name == ResultSummaryList.RESULT_SHORTSECONDARYAGENT:
source = ResultSecondaryServerAgentFamily.objects.filter(part_of_run=self.part_of_run)
source2 = ResultSecondaryServerAgentFamily.objects.select_related("agent_name").filter(part_of_run=self.part_of_run)
result_entry_field = "SecondaryShortServerAgentSummary"
elif summary_name == ResultSummaryList.RESULT_CIPHER:
source = ProbeData.CipherName.objects
source2 = ProbeData.CipherName.objects
result_id_field = "resultciphersuite__resultciphersuitegroupentry__resultentry"
elif summary_name == ResultSummaryList.RESULT_CIPHERGROUP:
source = ResultCipherSuiteGroupEntry.objects.filter(part_of_run=self.part_of_run)
source2 = ResultCipherSuiteGroupEntry.objects.select_related("cipher_suites").filter(part_of_run=self.part_of_run)
result_entry_field = "cipher_suite_group"
elif summary_name == ResultSummaryList.RESULT_IP:
source = ProbeData.IPAddressDomain.objects
source2 = ProbeData.IPAddressDomain.objects
result_entry_field = "IPDomainSummary0"
elif summary_name in [ResultSummaryList.RESULT_HOSTS, ResultSummaryList.RESULT_HOSTS_ALEXA, ResultSummaryList.RESULT_URLS_TEXT, ResultSummaryList.RESULT_HOST_RUNLIST]:
source = ResultEntry.objects.filter(part_of_run=self.part_of_run)
source2 = ResultEntry.objects.select_related("servername").filter(part_of_run=self.part_of_run)
result_entry_field = None
elif summary_name == ResultSummaryList.RESULT_PROTOCOLS:
source = self.conditions.filter(condition__in = [ResultCondition.RESULTC_SUPPORT_HIGHEST_SSLV3,
ResultCondition.RESULTC_SUPPORT_HIGHEST_TLS_1_0,
ResultCondition.RESULTC_SUPPORT_HIGHEST_TLS_1_1,
ResultCondition.RESULTC_SUPPORT_HIGHEST_TLS_1_2]).filter(part_of_run=self.part_of_run)
source2 = ResultCondition.objects
result_id_field = "resultconditionset__resultentry"
elif summary_name == ResultSummaryList.RESULT_HOST_PROFILES:
source = ProbeData.ProbeCommonResult.objects.filter(proberesult__part_of_run=self.part_of_run)
source2 = ProbeData.ProbeCommonResult.objects.filter(proberesult__part_of_run=self.part_of_run)
result_entry_field = "key"
result_id_field = "proberesult__resultentry"
elif summary_name == ResultSummaryList.RESULT_HOST_BASEPROFILES:
source = ProbeData.ProbeCommonResult.objects.filter(probecommonresult__proberesult__part_of_run=self.part_of_run)
source2 = ProbeData.ProbeCommonResult.objects.filter(probecommonresult__proberesult__part_of_run=self.part_of_run)
result_entry_field = "key"
result_id_field = "probecommonresult__proberesult__resultentry"
elif summary_name == ResultSummaryList.RESULT_HOST_FUNDPROFILES:
source = ProbeData.ProbeCommonResult.objects.filter(fundamental_commonresult__proberesult__part_of_run=self.part_of_run)
source2 = ProbeData.ProbeCommonResult.objects.filter(fundamental_commonresult__proberesult__part_of_run=self.part_of_run)
result_entry_field = "key"
result_id_field = "fundamental_commonresult__proberesult__resultentry"
else:
raise Exception()
if summary_name in [ResultSummaryList.RESULT_HOSTS, ResultSummaryList.RESULT_HOSTS_ALEXA, ResultSummaryList.RESULT_URLS_TEXT, ResultSummaryList.RESULT_HOST_RUNLIST]:
q = list(source.filter(part_of_run= self.part_of_run,id__in = result_list).annotate(filtered_count=Count("id")).order_by("-filtered_count"))
else:
q = list(source.distinct().filter(**{result_id_field+"__in": result_list, result_id_field+"__part_of_run":self.part_of_run}).annotate(filtered_count=Count(result_id_field)).order_by("-filtered_count"))
ids = [x.id for x in q]
#print summary_name, " ", len(ids)
if summary_name in [ResultSummaryList.RESULT_HOSTS,
ResultSummaryList.RESULT_HOSTS_ALEXA,
ResultSummaryList.RESULT_URLS_TEXT,
ResultSummaryList.RESULT_HOST_RUNLIST]:
q1 = source2.filter(part_of_run= self.part_of_run).filter(id__in=ids).annotate(total_count=Count("id"))
else:
q1 = source2.filter(id__in=ids, **{result_id_field+"__part_of_run":self.part_of_run}).annotate(total_count=Count(result_id_field))
q2 = dict(q1.values_list("id","total_count"))#[(x.id,x) for x in q1])
for x in q:
x.total_count = q2.get(x.id,0)
#print x.id, ":",x.total_count
x.subset_results = None
if len(summary_items)>1:
#print "-------------"
if result_entry_field:
item_results = list(set(ResultEntry.objects.filter(part_of_run= self.part_of_run, **{result_entry_field:x.id}).values_list("id", flat=True)) & set(result_list))
else:
#item_results = list(x.resultentry_set.filter(id__in = result_list).values_list("id", flat=True))
item_results = list(set(x.resultentry_set.filter(part_of_run= self.part_of_run).values_list("id", flat=True)) & set(result_list))
x.subset_results = self.__generatesummary(item_results, summary_items[1:])
#print "-------------"
return list(q)
def __generatesummary_cached(self, result_list, summary_items, id=0):
"""Generate a summary based on cached information and the requested information"""
if not summary_items:
return None
if not hasattr(self, "prefilled"):
self.prefilled = False
timebase = time.clock()
summary_name = summary_items[0]
source = None
extra_fields = []
if summary_name == ResultSummaryList.RESULT_ID:
return list(result_list)
elif summary_name == ResultSummaryList.RESULT_PRIMARYAGENT:
if not self.prefilled:
source = ResultPrimaryServerAgent.objects.all().filter(part_of_run=self.part_of_run)
result_entry_field = "PrimaryServerAgentSummary"
extra_fields = ["agent_name__agent_name"]
elif summary_name == ResultSummaryList.RESULT_SECONDARYAGENT:
if not self.prefilled:
source = ResultSecondaryServerAgent.objects.all().filter(part_of_run=self.part_of_run)
result_entry_field = "SecondaryServerAgentSummary"
extra_fields = ["agent_name__agent_name"]
elif summary_name == ResultSummaryList.RESULT_DOMAIN:
if not self.prefilled:
source = ProbeData.ServerDomain.objects.all().filter(level__gt=0)
result_entry_field = "DomainSummary0"
extra_fields = [ "level", "domain_name", "full_domain_name", "domain_parent_id"]
elif summary_name == ResultSummaryList.RESULT_CONDITION:
if not self.prefilled:
source = ResultCondition.objects.all().filter(part_of_run=self.part_of_run)
result_entry_field = "result_summary_group"
extra_fields = ["condition"]
elif summary_name == ResultSummaryList.RESULT_SHORTPRIMARYAGENT:
if not self.prefilled:
source = ResultPrimaryServerAgentFamily.objects.all().filter(part_of_run=self.part_of_run)
result_entry_field = "PrimaryShortServerAgentSummary"
extra_fields = ["agent_name__agent_shortname"]
elif summary_name == ResultSummaryList.RESULT_SHORTSECONDARYAGENT:
if not self.prefilled:
source = ResultSecondaryServerAgentFamily.objects.all().filter(part_of_run=self.part_of_run)
result_entry_field = "SecondaryShortServerAgentSummary"
extra_fields = ["agent_name__agent_shortname"]
elif summary_name == ResultSummaryList.RESULT_CIPHER:
if not self.prefilled:
source = ProbeData.CipherName.objects.all()
result_entry_field = "resultciphersuite__resultciphersuitegroupentry__resultentry"
extra_fields = ["ciphername"]
elif summary_name == ResultSummaryList.RESULT_CIPHERGROUP:
if not self.prefilled:
source = ResultCipherSuiteGroupEntry.objects.all().filter(part_of_run=self.part_of_run)
result_entry_field = "cipher_suite_group"
extra_fields = ["cipher_suites__cipher_suites__ciphername"]
elif summary_name == ResultSummaryList.RESULT_IP:
if not self.prefilled:
source = IPAddressDomain.objects.all()
result_entry_field = "IPDomainSummary0"
extra_fields = [ "level", "ip_domain", "full_ip_mask", "ip_parent_id",]
elif summary_name in [ResultSummaryList.RESULT_HOSTS, ResultSummaryList.RESULT_HOSTS_ALEXA]:
if not self.prefilled:
source = ResultEntry.objects.all().filter(part_of_run=self.part_of_run)
result_entry_field = None
elif summary_name == ResultSummaryList.RESULT_PROTOCOLS:
if not self.prefilled:
source = self.conditions.filter(condition__in = [ResultCondition.RESULTC_SUPPORT_HIGHEST_SSLV3,
ResultCondition.RESULTC_SUPPORT_HIGHEST_TLS_1_0,
ResultCondition.RESULTC_SUPPORT_HIGHEST_TLS_1_1,
ResultCondition.RESULTC_SUPPORT_HIGHEST_TLS_1_2]).filter(part_of_run=self.part_of_run)
result_entry_field = None
extra_fields = ["condition"]
else:
raise Exception()
q = []
if summary_name in [ResultSummaryList.RESULT_HOSTS, ResultSummaryList.RESULT_HOSTS_ALEXA]:
cache = self.summary_cache.setdefault("entries", {})
if not self.prefilled:
self.cache_lock.acquire()
already_loaded = set(cache.iterkeys())
to_be_loaded = set(result_list) - already_loaded
for x in to_be_loaded:
if x not in cache:
cache[x] = None
self.cache_lock.release()
items = []
try:
items = list(source.filter(id__in = list(to_be_loaded)).values("id","servername__servername", "servername__port",*(extra_fields if extra_fields else [])));
except:
pass
self.cache_lock.acquire()
for x in items:
if x["id"] not in cache or cache[x["id"]] == None:
x.update({"name":x["servername__servername"] + ":" + str(x["servername__port"]),"_result_entry_list":[], "__cache__":cache})
cache[x["id"]] = x
self.cache_lock.release()
waiting = set(result_list)
while waiting:
waiting = set([x for x in waiting if cache[x] == None])
if not waiting:
break;
time.sleep(1)
def do_search3(q, result_list, cache):
for x in result_list:
if x in cache:
q.append({"value":cache[x], "filtered_id":[x], "filtered_count":1, "total_count":1, "subset_results": None})
do_search3(q, result_list, cache)
else:
cache = self.summary_cache.setdefault(summary_name, {})
cache_host = self.summary_cache.setdefault("entries", {})
already_loaded = set(cache.iterkeys())
if not self.prefilled:
items = list(source.exclude(id__in=list(already_loaded)).filter(resultentry__part_of_run= self.part_of_run, resultentry__id__in = result_list).
values("id", *(extra_fields if extra_fields else []))
)
if items or already_loaded:
items2 = []
if items:
self.cache_lock.acquire()
for x in items:
already_loaded.add(x["id"])
if x["id"] not in cache or cache[x["id"]] == None:
cache[x["id"]] = None
items2.append(x)
self.cache_lock.release()
for x in items2:
try:
x["_result_entry_list"]= set(ResultEntry.objects.filter(part_of_run= self.part_of_run, **{result_entry_field+"__id":x["id"]}).values_list("id",flat=True))
except:
raise
self.cache_lock.acquire()
for x in items2:
if x["id"] not in cache or cache[x["id"]] == None:
x.update({"__cache__":cache})
cache[x["id"]] = x
for y in x["_result_entry_list"]:
if y in cache_host:
cache_host[y].setdefault(summary_name,set()).add(x["id"])
self.cache_lock.release()
while any([cache[x] == None for x in already_loaded]):
time.sleep(1)
result_list_set = set(result_list)
if len(already_loaded) < len(result_list):
def do_search(q,already_loaded,result_list_set, cache):
for xi in already_loaded:
x = cache[xi]
match = x["_result_entry_list"] & result_list_set
if match:
add_item = {"value":x, "filtered_id":match, "filtered_count":len(match), "total_count":len(x["_result_entry_list"]),"subset_results": None}
add_item["sort_key"] = [add_item["filtered_count"], add_item["total_count"]] + [x[z] for z in extra_fields]
q.append(add_item)
do_search(q,already_loaded,result_list_set, cache)
else:
def do_search2(self,q,already_loaded,result_list_set, cache, cache_host, summary_name):
visited = set()
timebase = time.clock()
for xi1 in [xi2 for xi2 in result_list_set if xi2 in cache_host]:
visited.update(cache_host[xi1].get(summary_name,[]))
for xi in visited:
timebase2 = time.clock()
x = cache[xi]
match = x["_result_entry_list"] & result_list_set
if match:
add_item = {"value":x, "filtered_id":match, "filtered_count":len(match), "total_count":len(x["_result_entry_list"]),"subset_results": None}
add_item["sort_key"] = [add_item["filtered_count"], add_item["total_count"]]+[x[z] for z in extra_fields]
q.append(add_item)
do_search2(self,q,already_loaded,result_list_set, cache, cache_host, summary_name)
def sort_list(q,extra_fields):
return sorted(q, reverse=True, key = lambda x: x["sort_key"])
q = sort_list(q,extra_fields)
if len(summary_items)>1:
for x in q:
#print "-------------"
item_results = x["filtered_id"]
x["subset_results"] = self.__generatesummary(item_results, summary_items[1:])
#print "-------------"
return list(q)
def _pre_fill_cache(self, summary_name, entry_event, extra_query):
"""Fill the cache with the requested information"""
source = None
source2 = None
extra_fields = None
if summary_name == ResultSummaryList.RESULT_PRIMARYAGENT:
source = self.PrimaryServerAgentSummary.filter(part_of_run=self.part_of_run)
result_entry_field = "PrimaryServerAgentSummary"
extra_fields = ["agent_name__agent_name"]
elif summary_name == ResultSummaryList.RESULT_SECONDARYAGENT:
source = self.SecondaryServerAgentSummary.filter(part_of_run=self.part_of_run)
result_entry_field = "SecondaryServerAgentSummary"
extra_fields = ["agent_name__agent_name"]
elif summary_name == ResultSummaryList.RESULT_DOMAIN:
source = self.DomainEntries0
result_entry_field = "DomainSummary0"
extra_fields = [ "level", "domain_name", "full_domain_name", "domain_parent_id"]
elif summary_name == ResultSummaryList.RESULT_CONDITION:
source = self.conditions
result_entry_field = "result_summary_group"
extra_fields = ["condition"]
elif summary_name == ResultSummaryList.RESULT_SHORTPRIMARYAGENT:
source = self.PrimaryShortServerAgentSummary.filter(part_of_run=self.part_of_run)
result_entry_field = "PrimaryShortServerAgentSummary"
extra_fields = ["agent_name__agent_shortname"]
elif summary_name == ResultSummaryList.RESULT_SHORTSECONDARYAGENT:
source = self.SecondaryShortServerAgentSummary.filter(part_of_run=self.part_of_run)
result_entry_field = "SecondaryShortServerAgentSummary"
extra_fields = ["agent_name__agent_shortname"]
elif summary_name == ResultSummaryList.RESULT_CIPHER:
source = ProbeData.CipherName.objects
result_entry_field = "ciphersuitegroup__resultciphersuitegroupentry__resultentry"
extra_fields = ["ciphername"]
elif summary_name == ResultSummaryList.RESULT_CIPHERGROUP:
source = self.CipherSuiteGroupEntries.filter(part_of_run=self.part_of_run)
result_entry_field = "cipher_suite_group"
extra_fields = ["cipher_suites__cipher_suites__ciphername"]
elif summary_name == ResultSummaryList.RESULT_IP:
source = self.IPDomainEntries0
result_entry_field = "IPDomainSummary0"
extra_fields = [ "level", "ip_domain", "full_ip_mask", "ip_parent_id",]
elif summary_name in [ResultSummaryList.RESULT_HOSTS, ResultSummaryList.RESULT_HOSTS_ALEXA]:
source = ResultEntry.objects.filter(part_of_run= self.part_of_run)
if extra_query and summary_name in [ResultSummaryList.RESULT_HOSTS]:
source = source.filter(extra_query)
result_entry_field = None
else:
raise Exception()
q = []
if summary_name in [ResultSummaryList.RESULT_HOSTS, ResultSummaryList.RESULT_HOSTS_ALEXA]:
cache = self.summary_cache.setdefault("entries", {})
items = []
temp_group = {}
try:
items = list(source.all().values("id","servername__servername", "servername__port",
"result_summary_group_id",
*(extra_fields if extra_fields else [])));
except:
pass
#print summary_name, len(items)
for x in items:
temp_group.setdefault(x["result_summary_group_id"],set()).add(x["id"])
try:
cond_items = [{"id":x.id, "condition":x.condition,
"groups_id":list(x.resultconditionset_set.all().values_list("id",flat=True)),}
for x in self.conditions.all()]
except:
raise
pass
for x in cond_items:
result_items = set()
for y in x["groups_id"]:
if y in temp_group:
result_items.update(temp_group[y])
x["_result_entry_list"] = result_items
self.cache_lock.acquire()
for x in items:
if x["id"] not in cache or cache[x["id"]] == None:
x.update({"name":x["servername__servername"] + ":" + str(x["servername__port"]),"_result_entry_list":[], "__cache__":cache})
cache[x["id"]] = x
cond_cache = self.summary_cache.setdefault("conditions", {})
for x in cond_items:
if x["id"] not in cond_cache:
cond_cache[x["id"]] = x
if x["condition"] not in cond_cache:
cond_cache[x["condition"]] = x
self.cache_lock.release()
entry_event.set()
else:
cache = self.summary_cache.setdefault(summary_name, {})
items = list(source.all().values("id", *(extra_fields if extra_fields else [])) )
def fetch_data(summary_name, list_items, query, result_entry_field, cache, cache_lock, entry_event):
#i = 0
for x in list_items:
try:
x["_result_entry_list"]= set(query.filter(**{result_entry_field+"__id":x["id"]}).values_list("id",flat=True))
#i+=1;
#if i % 100 == 0:
# print summary_name, i,"/",len(list_items)
except:
raise
entry_event.wait()
cache_lock.acquire()
cache_host = self.summary_cache["entries"]
for x in list_items:
if x["id"] not in cache or cache[x["id"]] == None:
x.update({"__cache__":cache})
cache[x["id"]] = x
for y in x["_result_entry_list"]:
if y in cache_host:
cache_host[y].setdefault(summary_name,set()).add(x["id"])
cache_lock.release()
threads = []
step = max((len(items)+20)/20, 50)
import threading
for i in range(0, len(items), step) :
new_thread = threading.Thread(target=fetch_data, args=(summary_name,items[i:min(len(items),i+step)],
ResultEntry.objects.filter(part_of_run= self.part_of_run),
result_entry_field, cache, self.cache_lock, entry_event
))
new_thread.start()
threads.append(new_thread)
for x in threads:
x.join()
#print summary_name
def init_cache(self, summaries, fQ=None):
"""Initialize the cache"""
import threading
if not hasattr(self, "summary_cache"):
self.summary_cache = {}
self.cache_lock = threading.Lock()
threads = []
entry_event = threading.Event()
for summary_name in summaries:
new_thread = threading.Thread(target=self._pre_fill_cache, args=(summary_name,entry_event, fQ))
new_thread.start()
threads.append(new_thread)
for x in threads:
x.join()
self.prefilled = True
def GetAnalyze(self,filter=None,summaries=None, use_cache=False, id=0,limitresult = None):
"""
Analyze this specific result entry, by selecting entries from the run, limited by
the filter parameters, then produce the summaries specified
filter is a dictionary of with QUERY_* as names for the entries with the following meaning.
An empty dictionary means all entries in the run is used.
QUERY_CONDITION: List of ANDed ResultCondition conditions the entries must include (if present, this is the primary criteria)
The following are a list that can be specified in any combination using strings,
the associated database objects, or primary key ids for the table
QUERY_DOMAIN, ProbeData.Server
QUERY_IP, ProbeData.IP_Address
QUERY_PRIMARYAGENT, ProbeData.PrimaryServerAgent
QUERY_SECONDARYAGENT, ProbeData.SecondaryServerAgent, ResultSecondaryServerAgent
QUERY_SHORTPRIMARYAGENT, ProbeData.AgentShortName
QUERY_SHORTSECONDARYAGENT, ProbeData.AgentShortName
QUERY_CIPHER, ProbeData.CipherName
QUERY_CIPHERGROUP, Resultdb2.CipherSuiteGroup
QUERY_SPECINTOL, ProbeData.CommonSpecificExtensionIntolerance
QUERY_AUTO , discover the queries from each element class (one of the above, unknowns trigger exception)
Summaries is a dictionary with caller specified names. Each value in the dictionary is a list
of RESULT_* enums, with the results using the associated resultdb2 class
RESULT_ID, id(ResultEntry)
RESULT_CONDITION, ResultCondition,
RESULT_DOMAIN, ProbeData.ServerDomain
RESULT_IP, ProbeData.IPAddressDomain
RESULT_PRIMARYAGENT, ResultPrimaryServerAgent
RESULT_SHORTPRIMARYAGENT, ResultPrimaryServerAgentFamily
RESULT_SECONDARYAGENT, ResultSecondaryServerAgent
RESULT_SHORTSECONDARYAGENT, ResultSecondaryServerAgentFamily
RESULT_CIPHER, ResultCipherSuite
RESULT_CIPHERGROUP, ResultCipherSuiteGroupEntry
RESULT_PROTOCOLS, ResultCondition: The highest supported TLS protocol
RESULT_HOSTS, ResultEntry
RESULT_HOSTS_ALEXA, ResultEntry
RESULT_URLS_TEXT, URLs as a list of text entry
RESULT_HOST_RUNLIST, (num,host,port) as a CSV file
For each name in the dictionary, the returned result dictionary returns a list of objects
of the associated class that also contain these extra attributes:
filtered_count: The number of entries in the run matching the filter that have this attribute
total_count: The number of total entries in the run that have this attribute
subset_results : If the summaries list contained multiple entries this entry
is generated for each entry to do summaries for the next level.
Please note that specifying multiple levels of summaries can make
the result generation phase take a long time.
Additionally the dictionary contains these entries (which must not be specified by the caller)
"_total" : The total number of entries in the run
"_matching": The number of entries matching the filter.
"""
#from django.db.models import Count
result_list = []
extra_Q=None
if not hasattr(self, "summary_cache"):
self.summary_cache = {}
import threading
self.cache_lock = threading.Lock()
if filter and ResultSummaryList.QUERY_AUTO in filter:
for x in filter[ResultSummaryList.QUERY_AUTO]:
found = False
for name, type in [
(ResultSummaryList.QUERY_DOMAIN, ProbeData.Server),
(ResultSummaryList.QUERY_IP, ProbeData.IP_Address),
(ResultSummaryList.QUERY_PRIMARYAGENT, ProbeData.PrimaryServerAgent),
(ResultSummaryList.QUERY_SECONDARYAGENT, ProbeData.SecondaryServerAgent),
(ResultSummaryList.QUERY_SHORTPRIMARYAGENT, ProbeData.AgentShortName),
(ResultSummaryList.QUERY_SHORTSECONDARYAGENT, ProbeData.AgentShortName),
(ResultSummaryList.QUERY_CIPHER, ProbeData.CipherName),
(ResultSummaryList.QUERY_CIPHER, ResultCipherSuite),
(ResultSummaryList.QUERY_CIPHERGROUP, CipherSuiteGroup),
(ResultSummaryList.QUERY_CIPHERGROUP, ResultCipherSuiteGroupEntry),
(ResultSummaryList.QUERY_SPECINTOL, ProbeData.CommonSpecificExtensionIntolerance),
(ResultSummaryList.QUERY_DHEKEYSIZE, ResultDHEKeySize)
]:
if isinstance(x, type):
found = True;
filter.setdefault(name,[]).append(x)
break;
if not found:
raise Exception("Unknown query type")
del filter[ResultSummaryList.QUERY_AUTO]
if not filter:
raise Exception("No query")
if not filter:
q = ResultEntry.objects.filter(part_of_run= self.part_of_run)
if limitresult:
q = q.filter(limitresult)
result_list = list(q.values_list("id", flat=True)) if not use_cache else self.summary_cache["entries"].iterkeys()
else:
while True:
is_finished = False
has_conditions = False
extra_Q = None
if limitresult:
extra_Q = extra_Q & limitresult if extra_Q else limitresult
if ResultSummaryList.QUERY_ALEXA_RESTRICT in filter:
limit = int(filter[ResultSummaryList.QUERY_ALEXA_RESTRICT])
if limit > ResultSummaryList.NO_ALEXA_LIMIT:
if limit == ResultSummaryList.NONALEXA:
server_Q = Q(servername__alexa_rating = 0)
else:
server_Q = Q(servername__alexa_rating__gt = 0) & Q(servername__alexa_rating__lte = limit)
extra_Q = extra_Q & server_Q if extra_Q else server_Q
if ResultSummaryList.QUERY_PROTOCOL_RESTRICT in filter:
protocol = filter[ResultSummaryList.QUERY_PROTOCOL_RESTRICT]
if protocol:
server_Q = Q(servername__protocol__in = protocol)
extra_Q = extra_Q & server_Q if extra_Q else server_Q
if ResultSummaryList.QUERY_RESTRICT_RUN in filter:
q = ProbeData.IP_Address.objects.filter(resultentry__part_of_run__id__in = filter[ResultSummaryList.QUERY_RESTRICT_RUN]).distinct().values_list("id", flat=True)
run_q = Q(ip_addresses__id__in = q)
extra_Q = extra_Q & run_q if extra_Q else run_q
qlist = []
for f in filter.get(ResultSummaryList.QUERY_CONDITION, []):
try:
has_conditions = True
if use_cache:
condition = self.summary_cache["conditions"][f]
else:
condition = self.conditions.get(condition = f)
qlist.append(condition)
except:
pass
condition_list = None
if qlist:
for c in qlist:
if use_cache:
condition_list = condition_list & set(c["_result_entry_list"]) if condition_list else set(c["_result_entry_list"])
else:
q = c.resultconditionset_set.filter(id__in=condition_list) if condition_list else c.resultconditionset_set
condition_list = list(q.values_list("id", flat=True))
if not condition_list:
break;
if condition_list or not has_conditions:
if use_cache:
result_list = list(condition_list) if condition_list or has_conditions else list(self.summary_cache["entries"].iterkeys())
if extra_Q:
q = ResultEntry.objects.filter(extra_Q,id__in = result_list)
result_list= list(q.distinct().values_list("id", flat=True))
else:
q = ResultEntry.objects.filter(part_of_run= self.part_of_run, result_summary_group__in = condition_list) if condition_list or has_conditions else ResultEntry.objects.filter(part_of_run= self.part_of_run)
if extra_Q:
q = q.filter(extra_Q)
result_list = list(q.distinct().values_list("id", flat=True))
else:
result_list = []
if not result_list:
is_finished =True
for (desc, query_mode,
filter_rec, filter_summary,
self_entries,
entry_fieldname, fieldname,
f_fun, extra_cond) in [
("Domain",ResultSummaryList.QUERY_DOMAIN,
ProbeData.Server, ProbeData.ServerDomain,
self.DomainEntries0,
"fullservername", "domainentries0",
lambda f:f.servername, None),
("IP Domain",ResultSummaryList.QUERY_IP,
ProbeData.IP_Address, ProbeData.IPAddressDomain,
self.IPDomainEntries0,
"full_ip_mask", "ipdomainentries",
lambda f:f.ip_address, None),
("Primary Agent",ResultSummaryList.QUERY_PRIMARYAGENT,
ProbeData.PrimaryServerAgent, (ResultPrimaryServerAgent, ResultPrimaryServerAgent.objects.filter(part_of_run=self.part_of_run)),
self.PrimaryServerAgentSummary.filter(part_of_run=self.part_of_run),
"agent_name", "primaryserveragentsummary",
lambda f:f.agent_name, None),
("Secondary Agent",ResultSummaryList.QUERY_SECONDARYAGENT,
ProbeData.SecondaryServerAgent, (ResultSecondaryServerAgent, ResultSecondaryServerAgent.objects.filter(part_of_run=self.part_of_run)),
self.SecondaryServerAgentSummary.filter(part_of_run=self.part_of_run),
"agent_name", "secondaryserveragentsummary",
lambda f:f.agent_name, None),
("Primary Agent Family",ResultSummaryList.QUERY_SHORTPRIMARYAGENT,
ProbeData.AgentShortName, (ResultPrimaryServerAgentFamily, ResultPrimaryServerAgentFamily.objects.filter(part_of_run=self.part_of_run)),
self.PrimaryShortServerAgentSummary.filter(part_of_run=self.part_of_run),
"agent_shortname", "primaryshortserveragentsummary",
lambda f:f.agent_shortname, None),
("Secondary Agent Family",ResultSummaryList.QUERY_SHORTSECONDARYAGENT,
ProbeData.AgentShortName, (ResultSecondaryServerAgentFamily, ResultSecondaryServerAgentFamily.objects.filter(part_of_run=self.part_of_run)),
self.SecondaryShortServerAgentSummary.filter(part_of_run=self.part_of_run),
"agent_shortname", "secondaryshortserveragentsummary",
lambda f:f.agent_shortname, None),
("Cipher Suite",ResultSummaryList.QUERY_CIPHER,
ProbeData.CipherName, None,
self.CipherSuiteGroupEntries.filter(part_of_run = self.part_of_run),
"cipher_support__cipher_name", "cipher_suite_group",
lambda f:f.id, None),
("Cipher Suite exclude",ResultSummaryList.QUERY_CIPHER_EXCLUDE,
ProbeData.CipherName, None,
self.CipherSuiteGroupEntries.filter(part_of_run = self.part_of_run),
"cipher_support__cipher_name", "cipher_suite_group",
lambda f:f.id, None),
("Cipher Suite Group",ResultSummaryList.QUERY_CIPHERGROUP,
CipherSuiteGroup, (ResultCipherSuiteGroupEntry, ResultCipherSuiteGroupEntry.objects.filter(part_of_run=self.part_of_run)),
self.CipherSuiteGroupEntries.filter(part_of_run = self.part_of_run),
"cipher_suites_string", "cipher_suite_group",
lambda f:f.cipher_suites_string, None),
("Specific Intolerance",ResultSummaryList.QUERY_SPECINTOL,
None, ProbeData.CommonSpecificExtensionIntolerance,
None,
"intolerant_for_extension", "result_entry__common_result__intolerant_for_extension",
lambda f:f.intolerant_for_extension, {"result_entry__part_of_run":self.part_of_run}),
("DHE Keysize",ResultSummaryList.QUERY_DHEKEYSIZE,
None, ResultDHEKeySize,
self.dhe_keysizes.filter(part_of_run=self.part_of_run),
"dhe_keysize", "dhe_keysize",
lambda f:f.dhe_keysize, None),
]:
if not is_finished and query_mode in filter:
serverids = []
for f in filter.get(query_mode, []):
if filter_rec and isinstance(f, filter_rec):
param = {entry_fieldname:f_fun(f)}
if extra_cond:
param.update(extra_cond)
serverids+=list(self_entries.filter(**param).values_list("id",flat=True))
else:
try:
if filter_summary and isinstance(filter_summary, tuple) and isinstance(f, filter_summary[0]):
serverids.append(f.id);
if filter_summary and isinstance(f, filter_summary):
serverids.append(f.id);
elif isinstance(f, int):
serverids.append(f);
elif isinstance(f, str):
param = {entry_fieldname:f}
if extra_cond:
param.update(extra_cond)
serverids+=list(self_entries.filter(**param).values_list("id",flat=True))
else:
raise Exception("TypeError in %s query: %s" %(desc,f))
except:
print "a:", filter_summary
print "b:",f
raise
if not serverids:
result_list = []
is_finished =True
break
#q = (self.summaries.filter(id__in = result_list) if result_list else self.summaries)
if query_mode in [ResultSummaryList.QUERY_CIPHER_EXCLUDE]:
q = ResultEntry.objects.filter(part_of_run= self.part_of_run).filter(**{fieldname+"__in": list(set(serverids))})
temp_list = set(result_list) - set(q.distinct().values_list("id", flat=True))
else:
q = ResultEntry.objects.filter(part_of_run= self.part_of_run).filter(**{fieldname+"__in": serverids})
temp_list = set(q.distinct().values_list("id", flat=True))
result_list = list(temp_list & set(result_list)) if result_list else list(temp_list)
if not result_list:
is_finished =True
break
break #Single run
tot_q = ResultEntry.objects.filter(part_of_run= self.part_of_run).filter(result_summary_group__id__gt=0)
if extra_Q:
tot_q = tot_q.filter(extra_Q)
result = {"_total": tot_q.count(), "_matching":len(result_list)}
if use_cache:
for (summary_name, summary_items) in summaries.iteritems():
result[summary_name] = self.__generatesummary_cached(result_list, summary_items, id)
else:
for (summary_name, summary_items) in summaries.iteritems():
result[summary_name] = self.__generatesummary(result_list, summary_items)
return result
| apache-2.0 |
gradle/gradle | subprojects/dependency-management/src/main/java/org/gradle/api/internal/artifacts/transform/DefaultTransformer.java | 35641 | /*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.transform;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.reflect.TypeToken;
import org.gradle.api.InvalidUserDataException;
import org.gradle.api.artifacts.transform.InputArtifact;
import org.gradle.api.artifacts.transform.InputArtifactDependencies;
import org.gradle.api.artifacts.transform.TransformAction;
import org.gradle.api.artifacts.transform.TransformParameters;
import org.gradle.api.artifacts.transform.VariantTransformConfigurationException;
import org.gradle.api.file.FileSystemLocation;
import org.gradle.api.internal.DocumentationRegistry;
import org.gradle.api.internal.DomainObjectContext;
import org.gradle.api.internal.attributes.ImmutableAttributes;
import org.gradle.api.internal.file.FileCollectionFactory;
import org.gradle.api.internal.file.FileLookup;
import org.gradle.api.internal.plugins.DslObject;
import org.gradle.api.internal.project.ProjectInternal;
import org.gradle.api.internal.tasks.NodeExecutionContext;
import org.gradle.api.internal.tasks.TaskDependencyResolveContext;
import org.gradle.api.internal.tasks.properties.FileParameterUtils;
import org.gradle.api.internal.tasks.properties.InputFilePropertyType;
import org.gradle.api.internal.tasks.properties.InputParameterUtils;
import org.gradle.api.internal.tasks.properties.OutputFilePropertyType;
import org.gradle.api.internal.tasks.properties.PropertyValue;
import org.gradle.api.internal.tasks.properties.PropertyVisitor;
import org.gradle.api.internal.tasks.properties.PropertyWalker;
import org.gradle.api.provider.Provider;
import org.gradle.api.reflect.InjectionPointQualifier;
import org.gradle.api.tasks.FileNormalizer;
import org.gradle.internal.Describables;
import org.gradle.internal.deprecation.DeprecationLogger;
import org.gradle.internal.exceptions.DefaultMultiCauseException;
import org.gradle.internal.execution.fingerprint.InputFingerprinter;
import org.gradle.internal.execution.fingerprint.InputFingerprinter.FileValueSupplier;
import org.gradle.internal.fingerprint.AbsolutePathInputNormalizer;
import org.gradle.internal.fingerprint.CurrentFileCollectionFingerprint;
import org.gradle.internal.fingerprint.DirectorySensitivity;
import org.gradle.internal.fingerprint.LineEndingSensitivity;
import org.gradle.internal.hash.ClassLoaderHierarchyHasher;
import org.gradle.internal.hash.HashCode;
import org.gradle.internal.hash.Hasher;
import org.gradle.internal.hash.Hashing;
import org.gradle.internal.instantiation.InstanceFactory;
import org.gradle.internal.instantiation.InstantiationScheme;
import org.gradle.internal.isolated.IsolationScheme;
import org.gradle.internal.isolation.Isolatable;
import org.gradle.internal.isolation.IsolatableFactory;
import org.gradle.internal.logging.text.TreeFormatter;
import org.gradle.internal.model.CalculatedValueContainer;
import org.gradle.internal.model.CalculatedValueContainerFactory;
import org.gradle.internal.model.ModelContainer;
import org.gradle.internal.model.ValueCalculator;
import org.gradle.internal.operations.BuildOperationContext;
import org.gradle.internal.operations.BuildOperationDescriptor;
import org.gradle.internal.operations.BuildOperationExecutor;
import org.gradle.internal.operations.BuildOperationType;
import org.gradle.internal.operations.RunnableBuildOperation;
import org.gradle.internal.reflect.DefaultTypeValidationContext;
import org.gradle.internal.reflect.problems.ValidationProblemId;
import org.gradle.internal.reflect.validation.Severity;
import org.gradle.internal.reflect.validation.TypeValidationContext;
import org.gradle.internal.service.ServiceLookup;
import org.gradle.internal.service.ServiceLookupException;
import org.gradle.internal.service.UnknownServiceException;
import org.gradle.internal.snapshot.ValueSnapshot;
import org.gradle.model.internal.type.ModelType;
import org.gradle.work.InputChanges;
import javax.annotation.Nullable;
import java.io.File;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.Map;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static org.gradle.api.internal.tasks.properties.AbstractValidatingProperty.reportValueNotSet;
public class DefaultTransformer extends AbstractTransformer<TransformAction<?>> {
private final Class<? extends FileNormalizer> fileNormalizer;
private final Class<? extends FileNormalizer> dependenciesNormalizer;
private final FileLookup fileLookup;
private final ServiceLookup internalServices;
private final boolean requiresDependencies;
private final boolean requiresInputChanges;
private final InstanceFactory<? extends TransformAction<?>> instanceFactory;
private final boolean cacheable;
private final CalculatedValueContainer<IsolatedParameters, IsolateTransformerParameters> isolatedParameters;
private final DirectorySensitivity artifactDirectorySensitivity;
private final DirectorySensitivity dependenciesDirectorySensitivity;
private final LineEndingSensitivity artifactLineEndingSensitivity;
private final LineEndingSensitivity dependenciesLineEndingSensitivity;
public DefaultTransformer(
Class<? extends TransformAction<?>> implementationClass,
@Nullable TransformParameters parameterObject,
ImmutableAttributes fromAttributes,
Class<? extends FileNormalizer> inputArtifactNormalizer,
Class<? extends FileNormalizer> dependenciesNormalizer,
boolean cacheable,
DirectorySensitivity artifactDirectorySensitivity,
DirectorySensitivity dependenciesDirectorySensitivity,
LineEndingSensitivity artifactLineEndingSensitivity,
LineEndingSensitivity dependenciesLineEndingSensitivity,
BuildOperationExecutor buildOperationExecutor,
ClassLoaderHierarchyHasher classLoaderHierarchyHasher,
IsolatableFactory isolatableFactory,
FileCollectionFactory fileCollectionFactory,
FileLookup fileLookup,
PropertyWalker parameterPropertyWalker,
InstantiationScheme actionInstantiationScheme,
DomainObjectContext owner,
CalculatedValueContainerFactory calculatedValueContainerFactory,
ServiceLookup internalServices,
DocumentationRegistry documentationRegistry
) {
super(implementationClass, fromAttributes);
this.fileNormalizer = inputArtifactNormalizer;
this.dependenciesNormalizer = dependenciesNormalizer;
this.fileLookup = fileLookup;
this.internalServices = internalServices;
this.instanceFactory = actionInstantiationScheme.forType(implementationClass);
this.requiresDependencies = instanceFactory.serviceInjectionTriggeredByAnnotation(InputArtifactDependencies.class);
this.requiresInputChanges = instanceFactory.requiresService(InputChanges.class);
this.cacheable = cacheable;
this.artifactDirectorySensitivity = artifactDirectorySensitivity;
this.dependenciesDirectorySensitivity = dependenciesDirectorySensitivity;
this.artifactLineEndingSensitivity = artifactLineEndingSensitivity;
this.dependenciesLineEndingSensitivity = dependenciesLineEndingSensitivity;
this.isolatedParameters = calculatedValueContainerFactory.create(Describables.of("parameters of", this),
new IsolateTransformerParameters(parameterObject, implementationClass, cacheable, owner, parameterPropertyWalker, isolatableFactory, buildOperationExecutor, classLoaderHierarchyHasher,
fileCollectionFactory, documentationRegistry));
}
/**
* Used to recreate a transformer from the configuration cache.
*/
public DefaultTransformer(
Class<? extends TransformAction<?>> implementationClass,
CalculatedValueContainer<IsolatedParameters, IsolateTransformerParameters> isolatedParameters,
ImmutableAttributes fromAttributes,
Class<? extends FileNormalizer> inputArtifactNormalizer,
Class<? extends FileNormalizer> dependenciesNormalizer,
boolean cacheable,
FileLookup fileLookup,
InstantiationScheme actionInstantiationScheme,
ServiceLookup internalServices,
DirectorySensitivity artifactDirectorySensitivity,
DirectorySensitivity dependenciesDirectorySensitivity,
LineEndingSensitivity artifactLineEndingSensitivity,
LineEndingSensitivity dependenciesLineEndingSensitivity
) {
super(implementationClass, fromAttributes);
this.fileNormalizer = inputArtifactNormalizer;
this.dependenciesNormalizer = dependenciesNormalizer;
this.fileLookup = fileLookup;
this.internalServices = internalServices;
this.instanceFactory = actionInstantiationScheme.forType(implementationClass);
this.requiresDependencies = instanceFactory.serviceInjectionTriggeredByAnnotation(InputArtifactDependencies.class);
this.requiresInputChanges = instanceFactory.requiresService(InputChanges.class);
this.cacheable = cacheable;
this.isolatedParameters = isolatedParameters;
this.artifactDirectorySensitivity = artifactDirectorySensitivity;
this.dependenciesDirectorySensitivity = dependenciesDirectorySensitivity;
this.artifactLineEndingSensitivity = artifactLineEndingSensitivity;
this.dependenciesLineEndingSensitivity = dependenciesLineEndingSensitivity;
}
public static void validateInputFileNormalizer(String propertyName, @Nullable Class<? extends FileNormalizer> normalizer, boolean cacheable, TypeValidationContext validationContext) {
if (cacheable) {
if (normalizer == AbsolutePathInputNormalizer.class) {
validationContext.visitPropertyProblem(problem ->
problem.withId(ValidationProblemId.CACHEABLE_TRANSFORM_CANT_USE_ABSOLUTE_SENSITIVITY)
.reportAs(Severity.ERROR)
.forProperty(propertyName)
.withDescription("is declared to be sensitive to absolute paths")
.happensBecause("This is not allowed for cacheable transforms")
.withLongDescription("Absolute path sensitivity does not allow sharing the transform result between different machines, although that is the goal of cacheable transforms.")
.addPossibleSolution("Use a different normalization strategy via @PathSensitive, @Classpath or @CompileClasspath")
.documentedAt("validation_problems", "cacheable_transform_cant_use_absolute_sensitivity"));
}
}
}
@Override
public Class<? extends FileNormalizer> getInputArtifactNormalizer() {
return fileNormalizer;
}
@Override
public Class<? extends FileNormalizer> getInputArtifactDependenciesNormalizer() {
return dependenciesNormalizer;
}
@Override
public boolean isIsolated() {
return isolatedParameters.getOrNull() != null;
}
@Override
public boolean requiresDependencies() {
return requiresDependencies;
}
@Override
public boolean requiresInputChanges() {
return requiresInputChanges;
}
@Override
public boolean isCacheable() {
return cacheable;
}
@Override
public DirectorySensitivity getInputArtifactDirectorySensitivity() {
return artifactDirectorySensitivity;
}
@Override
public DirectorySensitivity getInputArtifactDependenciesDirectorySensitivity() {
return dependenciesDirectorySensitivity;
}
@Override
public LineEndingSensitivity getInputArtifactLineEndingNormalization() {
return artifactLineEndingSensitivity;
}
@Override
public LineEndingSensitivity getInputArtifactDependenciesLineEndingNormalization() {
return dependenciesLineEndingSensitivity;
}
@Override
public HashCode getSecondaryInputHash() {
return isolatedParameters.get().getSecondaryInputsHash();
}
@Override
public ImmutableList<File> transform(Provider<FileSystemLocation> inputArtifactProvider, File outputDir, ArtifactTransformDependencies dependencies, @Nullable InputChanges inputChanges) {
TransformAction<?> transformAction = newTransformAction(inputArtifactProvider, dependencies, inputChanges);
DefaultTransformOutputs transformOutputs = new DefaultTransformOutputs(inputArtifactProvider.get().getAsFile(), outputDir, fileLookup);
transformAction.transform(transformOutputs);
return transformOutputs.getRegisteredOutputs();
}
@Override
public void visitDependencies(TaskDependencyResolveContext context) {
context.add(isolatedParameters);
}
@Override
public void isolateParametersIfNotAlready() {
isolatedParameters.finalizeIfNotAlready();
}
private static void fingerprintParameters(
DocumentationRegistry documentationRegistry,
InputFingerprinter inputFingerprinter,
FileCollectionFactory fileCollectionFactory,
PropertyWalker propertyWalker,
Hasher hasher,
Object parameterObject,
boolean cacheable
) {
DefaultTypeValidationContext validationContext = DefaultTypeValidationContext.withoutRootType(documentationRegistry, cacheable);
InputFingerprinter.Result result = inputFingerprinter.fingerprintInputProperties(
ImmutableSortedMap.of(),
ImmutableSortedMap.of(),
ImmutableSortedMap.of(),
ImmutableSortedMap.of(),
visitor -> propertyWalker.visitProperties(parameterObject, validationContext, new PropertyVisitor.Adapter() {
@Override
public void visitInputProperty(
String propertyName,
PropertyValue value,
boolean optional
) {
try {
Object preparedValue = InputParameterUtils.prepareInputParameterValue(value);
if (preparedValue == null && !optional) {
reportValueNotSet(propertyName, validationContext);
}
visitor.visitInputProperty(propertyName, () -> preparedValue);
} catch (Throwable e) {
throw new InvalidUserDataException(String.format(
"Error while evaluating property '%s' of %s",
propertyName,
getParameterObjectDisplayName(parameterObject)
), e);
}
}
@Override
public void visitInputFileProperty(
String propertyName,
boolean optional,
boolean skipWhenEmpty,
DirectorySensitivity directorySensitivity,
LineEndingSensitivity lineEndingNormalization,
boolean incremental,
@Nullable Class<? extends FileNormalizer> fileNormalizer,
PropertyValue value,
InputFilePropertyType filePropertyType
) {
validateInputFileNormalizer(propertyName, fileNormalizer, cacheable, validationContext);
visitor.visitInputFileProperty(
propertyName,
incremental ? InputFingerprinter.InputPropertyType.INCREMENTAL : InputFingerprinter.InputPropertyType.NON_INCREMENTAL,
new FileValueSupplier(
value,
fileNormalizer == null ? AbsolutePathInputNormalizer.class : fileNormalizer,
directorySensitivity,
lineEndingNormalization,
() -> FileParameterUtils.resolveInputFileValue(fileCollectionFactory, filePropertyType, value)));
}
@Override
public void visitOutputFileProperty(
String propertyName,
boolean optional,
PropertyValue value,
OutputFilePropertyType filePropertyType
) {
validationContext.visitPropertyProblem(problem ->
problem.withId(ValidationProblemId.ARTIFACT_TRANSFORM_SHOULD_NOT_DECLARE_OUTPUT)
.reportAs(Severity.ERROR)
.forProperty(propertyName)
.withDescription("declares an output")
.happensBecause("is annotated with an output annotation")
.addPossibleSolution("Remove the output property and use the TransformOutputs parameter from transform(TransformOutputs) instead")
.documentedAt("validation_problems", "artifact_transform_should_not_declare_output")
);
}
})
);
ImmutableMap<String, Severity> validationMessages = validationContext.getProblems();
if (!validationMessages.isEmpty()) {
throw new DefaultMultiCauseException(
String.format(validationMessages.size() == 1
? "A problem was found with the configuration of the artifact transform parameter %s."
: "Some problems were found with the configuration of the artifact transform parameter %s.",
getParameterObjectDisplayName(parameterObject)),
validationMessages.keySet().stream()
.sorted()
.map(InvalidUserDataException::new)
.collect(Collectors.toList())
);
}
for (Map.Entry<String, ValueSnapshot> entry : result.getValueSnapshots().entrySet()) {
hasher.putString(entry.getKey());
entry.getValue().appendToHasher(hasher);
}
for (Map.Entry<String, CurrentFileCollectionFingerprint> entry : result.getFileFingerprints().entrySet()) {
hasher.putString(entry.getKey());
hasher.putHash(entry.getValue().getHash());
}
}
private static String getParameterObjectDisplayName(Object parameterObject) {
return ModelType.of(new DslObject(parameterObject).getDeclaredType()).getDisplayName();
}
private TransformAction<?> newTransformAction(Provider<FileSystemLocation> inputArtifactProvider, ArtifactTransformDependencies artifactTransformDependencies, @Nullable InputChanges inputChanges) {
TransformParameters parameters = isolatedParameters.get().getIsolatedParameterObject().isolate();
ServiceLookup services = new IsolationScheme<>(TransformAction.class, TransformParameters.class, TransformParameters.None.class).servicesForImplementation(parameters, internalServices);
services = new TransformServiceLookup(inputArtifactProvider, requiresDependencies ? artifactTransformDependencies : null, inputChanges, services);
return instanceFactory.newInstance(services);
}
public CalculatedValueContainer<IsolatedParameters, IsolateTransformerParameters> getIsolatedParameters() {
return isolatedParameters;
}
private static class TransformServiceLookup implements ServiceLookup {
private static final Type FILE_SYSTEM_LOCATION_PROVIDER = new TypeToken<Provider<FileSystemLocation>>() {
}.getType();
private final ImmutableList<InjectionPoint> injectionPoints;
private final ServiceLookup delegate;
public TransformServiceLookup(Provider<FileSystemLocation> inputFileProvider, @Nullable ArtifactTransformDependencies artifactTransformDependencies, @Nullable InputChanges inputChanges, ServiceLookup delegate) {
this.delegate = delegate;
ImmutableList.Builder<InjectionPoint> builder = ImmutableList.builder();
builder.add(InjectionPoint.injectedByAnnotation(InputArtifact.class, File.class, () -> {
DeprecationLogger
.deprecate("Injecting the input artifact of a transform as a File")
.withAdvice("Declare the input artifact as Provider<FileSystemLocation> instead.")
.willBecomeAnErrorInGradle8()
.withUserManual("artifact_transforms", "sec:implementing-artifact-transforms")
.nagUser();
return inputFileProvider.get().getAsFile();
}));
builder.add(InjectionPoint.injectedByAnnotation(InputArtifact.class, FILE_SYSTEM_LOCATION_PROVIDER, () -> inputFileProvider));
if (artifactTransformDependencies != null) {
builder.add(InjectionPoint.injectedByAnnotation(InputArtifactDependencies.class, () -> artifactTransformDependencies.getFiles().orElseThrow(() -> new IllegalStateException("Transform does not use artifact dependencies."))));
}
if (inputChanges != null) {
builder.add(InjectionPoint.injectedByType(InputChanges.class, () -> inputChanges));
}
this.injectionPoints = builder.build();
}
@Nullable
private Object find(Type serviceType, @Nullable Class<? extends Annotation> annotatedWith) {
TypeToken<?> serviceTypeToken = TypeToken.of(serviceType);
for (InjectionPoint injectionPoint : injectionPoints) {
if (annotatedWith == injectionPoint.getAnnotation() && serviceTypeToken.isSupertypeOf(injectionPoint.getInjectedType())) {
return injectionPoint.getValueToInject();
}
}
return null;
}
@Nullable
@Override
public Object find(Type serviceType) throws ServiceLookupException {
Object result = find(serviceType, null);
if (result != null) {
return result;
}
return delegate.find(serviceType);
}
@Override
public Object get(Type serviceType) throws UnknownServiceException, ServiceLookupException {
Object result = find(serviceType);
if (result == null) {
throw new UnknownServiceException(serviceType, "No service of type " + serviceType + " available.");
}
return result;
}
@Override
public Object get(Type serviceType, Class<? extends Annotation> annotatedWith) throws UnknownServiceException, ServiceLookupException {
Object result = find(serviceType, annotatedWith);
if (result != null) {
return result;
}
return delegate.get(serviceType, annotatedWith);
}
private static class InjectionPoint {
private final Class<? extends Annotation> annotation;
private final Type injectedType;
private final Supplier<Object> valueToInject;
public static InjectionPoint injectedByAnnotation(Class<? extends Annotation> annotation, Supplier<Object> valueToInject) {
return new InjectionPoint(annotation, determineTypeFromAnnotation(annotation), valueToInject);
}
public static InjectionPoint injectedByAnnotation(Class<? extends Annotation> annotation, Type injectedType, Supplier<Object> valueToInject) {
return new InjectionPoint(annotation, injectedType, valueToInject);
}
public static InjectionPoint injectedByType(Class<?> injectedType, Supplier<Object> valueToInject) {
return new InjectionPoint(null, injectedType, valueToInject);
}
private InjectionPoint(@Nullable Class<? extends Annotation> annotation, Type injectedType, Supplier<Object> valueToInject) {
this.annotation = annotation;
this.injectedType = injectedType;
this.valueToInject = valueToInject;
}
private static Class<?> determineTypeFromAnnotation(Class<? extends Annotation> annotation) {
Class<?>[] supportedTypes = annotation.getAnnotation(InjectionPointQualifier.class).supportedTypes();
if (supportedTypes.length != 1) {
throw new IllegalArgumentException("Cannot determine supported type for annotation " + annotation.getName());
}
return supportedTypes[0];
}
@Nullable
public Class<? extends Annotation> getAnnotation() {
return annotation;
}
public Type getInjectedType() {
return injectedType;
}
public Object getValueToInject() {
return valueToInject.get();
}
}
}
public static class IsolatedParameters {
private final HashCode secondaryInputsHash;
private final Isolatable<? extends TransformParameters> isolatedParameterObject;
public IsolatedParameters(Isolatable<? extends TransformParameters> isolatedParameterObject, HashCode secondaryInputsHash) {
this.secondaryInputsHash = secondaryInputsHash;
this.isolatedParameterObject = isolatedParameterObject;
}
public HashCode getSecondaryInputsHash() {
return secondaryInputsHash;
}
public Isolatable<? extends TransformParameters> getIsolatedParameterObject() {
return isolatedParameterObject;
}
}
public static class IsolateTransformerParameters implements ValueCalculator<IsolatedParameters> {
private final TransformParameters parameterObject;
private final DomainObjectContext owner;
private final IsolatableFactory isolatableFactory;
private final PropertyWalker parameterPropertyWalker;
private final BuildOperationExecutor buildOperationExecutor;
private final ClassLoaderHierarchyHasher classLoaderHierarchyHasher;
private final FileCollectionFactory fileCollectionFactory;
private final DocumentationRegistry documentationRegistry;
private final boolean cacheable;
private final Class<?> implementationClass;
public IsolateTransformerParameters(@Nullable TransformParameters parameterObject,
Class<?> implementationClass,
boolean cacheable,
DomainObjectContext owner,
PropertyWalker parameterPropertyWalker,
IsolatableFactory isolatableFactory,
BuildOperationExecutor buildOperationExecutor,
ClassLoaderHierarchyHasher classLoaderHierarchyHasher,
FileCollectionFactory fileCollectionFactory,
DocumentationRegistry documentationRegistry) {
this.parameterObject = parameterObject;
this.implementationClass = implementationClass;
this.cacheable = cacheable;
this.owner = owner;
this.parameterPropertyWalker = parameterPropertyWalker;
this.isolatableFactory = isolatableFactory;
this.buildOperationExecutor = buildOperationExecutor;
this.classLoaderHierarchyHasher = classLoaderHierarchyHasher;
this.fileCollectionFactory = fileCollectionFactory;
this.documentationRegistry = documentationRegistry;
}
@Nullable
public TransformParameters getParameterObject() {
return parameterObject;
}
public boolean isCacheable() {
return cacheable;
}
public Class<?> getImplementationClass() {
return implementationClass;
}
@Override
public boolean usesMutableProjectState() {
return owner.getProject() != null;
}
@Nullable
@Override
public ProjectInternal getOwningProject() {
return owner.getProject();
}
@Override
public void visitDependencies(TaskDependencyResolveContext context) {
if (parameterObject != null) {
parameterPropertyWalker.visitProperties(parameterObject, TypeValidationContext.NOOP, new PropertyVisitor.Adapter() {
@Override
public void visitInputFileProperty(
String propertyName,
boolean optional,
boolean skipWhenEmpty,
DirectorySensitivity directorySensitivity,
LineEndingSensitivity lineEndingSensitivity,
boolean incremental,
@Nullable Class<? extends FileNormalizer> fileNormalizer,
PropertyValue value,
InputFilePropertyType filePropertyType
) {
context.add(value.getTaskDependencies());
}
});
}
}
@Override
public IsolatedParameters calculateValue(NodeExecutionContext context) {
InputFingerprinter inputFingerprinter = context.getService(InputFingerprinter.class);
return isolateParameters(inputFingerprinter);
}
private IsolatedParameters isolateParameters(InputFingerprinter inputFingerprinter) {
ModelContainer<?> model = owner.getModel();
if (!model.hasMutableState()) {
// This may happen when a task visits artifacts using a FileCollection instance created from a Configuration instance in a different project (not an artifact produced by a different project, these work fine)
// There is a check in DefaultConfiguration that deprecates resolving dependencies via FileCollection instance created by a different project, however that check may not
// necessarily be triggered. For example, the configuration may be legitimately resolved by some other task prior to the problematic task running
// TODO - hoist this up into configuration file collection visiting (and not when visiting the upstream dependencies of a transform), and deprecate this in Gradle 7.x
//
// This may also happen when a transform takes upstream dependencies and the dependencies are transformed using a different transform
// In this case, the main thread that schedules the work should isolate the transform parameters prior to scheduling the work. However, the dependencies may
// be filtered from the result, so that the transform is not visited by the main thread, or the transform worker may start work before the main thread
// has a chance to isolate the upstream transform
// TODO - ensure all transform parameters required by a transform worker are isolated prior to starting the worker
//
// Force access to the state of the owner, regardless of whether any other thread has access. This is because attempting to acquire a lock for a project may deadlock
// when performed from a worker thread (see DefaultBuildOperationQueue.waitForCompletion() which intentionally does not release the project locks while waiting)
// TODO - add validation to fail eagerly when a worker attempts to lock a project
//
return model.forceAccessToMutableState(o -> doIsolateParameters(inputFingerprinter));
} else {
return doIsolateParameters(inputFingerprinter);
}
}
private IsolatedParameters doIsolateParameters(InputFingerprinter inputFingerprinter) {
try {
return isolateParametersExclusively(inputFingerprinter);
} catch (Exception e) {
TreeFormatter formatter = new TreeFormatter();
formatter.node("Could not isolate parameters ").appendValue(parameterObject).append(" of artifact transform ").appendType(implementationClass);
throw new VariantTransformConfigurationException(formatter.toString(), e);
}
}
private IsolatedParameters isolateParametersExclusively(InputFingerprinter inputFingerprinter) {
Isolatable<TransformParameters> isolatedParameterObject = isolatableFactory.isolate(parameterObject);
Hasher hasher = Hashing.newHasher();
appendActionImplementation(implementationClass, hasher, classLoaderHierarchyHasher);
if (parameterObject != null) {
TransformParameters isolatedTransformParameters = isolatedParameterObject.isolate();
buildOperationExecutor.run(new RunnableBuildOperation() {
@Override
public void run(BuildOperationContext context) {
// TODO wolfs - schedule fingerprinting separately, it can be done without having the project lock
fingerprintParameters(
documentationRegistry,
inputFingerprinter,
fileCollectionFactory,
parameterPropertyWalker,
hasher,
isolatedTransformParameters,
cacheable
);
context.setResult(FingerprintTransformInputsOperation.Result.INSTANCE);
}
@Override
public BuildOperationDescriptor.Builder description() {
return BuildOperationDescriptor
.displayName("Fingerprint transformation inputs")
.details(FingerprintTransformInputsOperation.Details.INSTANCE);
}
});
}
HashCode secondaryInputsHash = hasher.hash();
return new IsolatedParameters(isolatedParameterObject, secondaryInputsHash);
}
}
/*
* This operation is only used here temporarily. Should be replaced with a more stable operation in the long term.
*/
public interface FingerprintTransformInputsOperation extends BuildOperationType<FingerprintTransformInputsOperation.Details, FingerprintTransformInputsOperation.Result> {
interface Details {
Details INSTANCE = new Details() {
};
}
interface Result {
Result INSTANCE = new Result() {
};
}
}
}
| apache-2.0 |
ibnc/gocd | server/src/main/java/com/thoughtworks/go/config/update/ConfigUpdateCheckFailedException.java | 721 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.update;
public class ConfigUpdateCheckFailedException extends RuntimeException {
}
| apache-2.0 |
msebire/intellij-community | platform/analysis-api/src/com/intellij/codeInspection/reference/RefFile.java | 1110 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.reference;
import com.intellij.psi.PsiFile;
/**
* A node in the reference graph corresponding to a file.
*
* @author anna
*/
public interface RefFile extends RefElement {
/**
* Returns the file to which the node corresponds.
*
* @return the file for the node.
*/
@Override
default PsiFile getPsiElement() {
return getElement();
}
@Deprecated
@Override
default PsiFile getElement() {
throw new UnsupportedOperationException();
}
}
| apache-2.0 |
atopuzov/nitro-python | nssrc/com/citrix/netscaler/nitro/resource/config/appfw/appfwprofile_denyurl_binding.py | 9677 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class appfwprofile_denyurl_binding(base_resource) :
""" Binding class showing the denyurl that can be bound to appfwprofile.
"""
def __init__(self) :
self._denyurl = ""
self._state = ""
self._comment = ""
self._name = ""
self.___count = 0
@property
def state(self) :
ur"""Enabled.<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._state
except Exception as e:
raise e
@state.setter
def state(self, state) :
ur"""Enabled.<br/>Possible values = ENABLED, DISABLED
"""
try :
self._state = state
except Exception as e:
raise e
@property
def denyurl(self) :
ur"""A regular expression that designates a URL on the Deny URL list.
"""
try :
return self._denyurl
except Exception as e:
raise e
@denyurl.setter
def denyurl(self, denyurl) :
ur"""A regular expression that designates a URL on the Deny URL list.
"""
try :
self._denyurl = denyurl
except Exception as e:
raise e
@property
def name(self) :
ur"""Name of the profile to which to bind an exemption or rule.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the profile to which to bind an exemption or rule.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def comment(self) :
ur"""Any comments about the purpose of profile, or other useful information about the profile.
"""
try :
return self._comment
except Exception as e:
raise e
@comment.setter
def comment(self, comment) :
ur"""Any comments about the purpose of profile, or other useful information about the profile.
"""
try :
self._comment = comment
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(appfwprofile_denyurl_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.appfwprofile_denyurl_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = appfwprofile_denyurl_binding()
updateresource.name = resource.name
updateresource.denyurl = resource.denyurl
updateresource.comment = resource.comment
updateresource.state = resource.state
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [appfwprofile_denyurl_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].denyurl = resource[i].denyurl
updateresources[i].comment = resource[i].comment
updateresources[i].state = resource[i].state
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = appfwprofile_denyurl_binding()
deleteresource.name = resource.name
deleteresource.denyurl = resource.denyurl
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [appfwprofile_denyurl_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].denyurl = resource[i].denyurl
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
ur""" Use this API to fetch appfwprofile_denyurl_binding resources.
"""
try :
obj = appfwprofile_denyurl_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
ur""" Use this API to fetch filtered set of appfwprofile_denyurl_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = appfwprofile_denyurl_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
ur""" Use this API to count appfwprofile_denyurl_binding resources configued on NetScaler.
"""
try :
obj = appfwprofile_denyurl_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
ur""" Use this API to count the filtered set of appfwprofile_denyurl_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = appfwprofile_denyurl_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class As_scan_location_xmlsql:
ELEMENT = "ELEMENT"
ATTRIBUTE = "ATTRIBUTE"
class Xmlmaxelementdepthcheck:
ON = "ON"
OFF = "OFF"
class Xmlmaxattachmentsizecheck:
ON = "ON"
OFF = "OFF"
class Xmlsoaparraycheck:
ON = "ON"
OFF = "OFF"
class State:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Xmlmaxelementnamelengthcheck:
ON = "ON"
OFF = "OFF"
class Isregex_ff:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmlmaxelementscheck:
ON = "ON"
OFF = "OFF"
class Xmlendpointcheck:
ABSOLUTE = "ABSOLUTE"
RELATIVE = "RELATIVE"
class Xmlmaxnamespacescheck:
ON = "ON"
OFF = "OFF"
class Xmlmaxfilesizecheck:
ON = "ON"
OFF = "OFF"
class Xmlmaxattributenamelengthcheck:
ON = "ON"
OFF = "OFF"
class Xmlblockdtd:
ON = "ON"
OFF = "OFF"
class Xmlblockpi:
ON = "ON"
OFF = "OFF"
class Isregex_sql:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmlvalidateresponse:
ON = "ON"
OFF = "OFF"
class Xmlmaxelementchildrencheck:
ON = "ON"
OFF = "OFF"
class Isregex:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmlmaxentityexpansionscheck:
ON = "ON"
OFF = "OFF"
class Xmlmaxnamespaceurilengthcheck:
ON = "ON"
OFF = "OFF"
class As_scan_location_xss:
FORMFIELD = "FORMFIELD"
HEADER = "HEADER"
COOKIE = "COOKIE"
class Xmlmaxentityexpansiondepthcheck:
ON = "ON"
OFF = "OFF"
class As_scan_location_xmlxss:
ELEMENT = "ELEMENT"
ATTRIBUTE = "ATTRIBUTE"
class Xmlmaxattributevaluelengthcheck:
ON = "ON"
OFF = "OFF"
class As_scan_location_sql:
FORMFIELD = "FORMFIELD"
HEADER = "HEADER"
COOKIE = "COOKIE"
class Isregex_ffc:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmlattachmentcontenttypecheck:
ON = "ON"
OFF = "OFF"
class Isregex_xmlsql:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmlvalidatesoapenvelope:
ON = "ON"
OFF = "OFF"
class Xmlmaxchardatalengthcheck:
ON = "ON"
OFF = "OFF"
class Xmlminfilesizecheck:
ON = "ON"
OFF = "OFF"
class Isregex_xss:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Isregex_xmlxss:
REGEX = "REGEX"
NOTREGEX = "NOTREGEX"
class Xmladditionalsoapheaders:
ON = "ON"
OFF = "OFF"
class Xmlmaxattributescheck:
ON = "ON"
OFF = "OFF"
class Action:
none = "none"
block = "block"
log = "log"
remove = "remove"
stats = "stats"
xout = "xout"
class Xmlblockexternalentities:
ON = "ON"
OFF = "OFF"
class appfwprofile_denyurl_binding_response(base_response) :
def __init__(self, length=1) :
self.appfwprofile_denyurl_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.appfwprofile_denyurl_binding = [appfwprofile_denyurl_binding() for _ in range(length)]
| apache-2.0 |
cloudbase/neutron | neutron/api/extensions.py | 30424 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import collections
import imp
import os
from oslo_config import cfg
from oslo_log import log as logging
from oslo_middleware import base
import routes
import six
import webob.dec
import webob.exc
from neutron._i18n import _, _LE, _LI, _LW
from neutron.common import exceptions
import neutron.extensions
from neutron import manager
from neutron.plugins.common import constants as const
from neutron.services import provider_configuration
from neutron import wsgi
LOG = logging.getLogger(__name__)
EXTENSION_SUPPORTED_CHECK_MAP = {}
_PLUGIN_AGNOSTIC_EXTENSIONS = set()
def register_custom_supported_check(alias, f, plugin_agnostic=False):
'''Register a custom function to determine if extension is supported.
Consequent calls for the same alias replace the registered function.
:param alias: API extension alias name
:param f: custom check function that returns True if extension is supported
:param plugin_agnostic: if False, don't require a plugin to claim support
with supported_extension_aliases. If True, a plugin must claim the
extension is supported.
'''
EXTENSION_SUPPORTED_CHECK_MAP[alias] = f
if plugin_agnostic:
_PLUGIN_AGNOSTIC_EXTENSIONS.add(alias)
@six.add_metaclass(abc.ABCMeta)
class PluginInterface(object):
@classmethod
def __subclasshook__(cls, klass):
"""Checking plugin class.
The __subclasshook__ method is a class method
that will be called every time a class is tested
using issubclass(klass, PluginInterface).
In that case, it will check that every method
marked with the abstractmethod decorator is
provided by the plugin class.
"""
if not cls.__abstractmethods__:
return NotImplemented
for method in cls.__abstractmethods__:
if any(method in base.__dict__ for base in klass.__mro__):
continue
return NotImplemented
return True
@six.add_metaclass(abc.ABCMeta)
class ExtensionDescriptor(object):
"""Base class that defines the contract for extensions."""
@abc.abstractmethod
def get_name(self):
"""The name of the extension.
e.g. 'Fox In Socks'
"""
@abc.abstractmethod
def get_alias(self):
"""The alias for the extension.
e.g. 'FOXNSOX'
"""
@abc.abstractmethod
def get_description(self):
"""Friendly description for the extension.
e.g. 'The Fox In Socks Extension'
"""
@abc.abstractmethod
def get_updated(self):
"""The timestamp when the extension was last updated.
e.g. '2011-01-22T13:25:27-06:00'
"""
# NOTE(justinsb): Not sure of the purpose of this is, vs the XML NS
def get_resources(self):
"""List of extensions.ResourceExtension extension objects.
Resources define new nouns, and are accessible through URLs.
"""
resources = []
return resources
def get_actions(self):
"""List of extensions.ActionExtension extension objects.
Actions are verbs callable from the API.
"""
actions = []
return actions
def get_request_extensions(self):
"""List of extensions.RequestException extension objects.
Request extensions are used to handle custom request data.
"""
request_exts = []
return request_exts
def get_extended_resources(self, version):
"""Retrieve extended resources or attributes for core resources.
Extended attributes are implemented by a core plugin similarly
to the attributes defined in the core, and can appear in
request and response messages. Their names are scoped with the
extension's prefix. The core API version is passed to this
function, which must return a
map[<resource_name>][<attribute_name>][<attribute_property>]
specifying the extended resource attribute properties required
by that API version.
Extension can add resources and their attr definitions too.
The returned map can be integrated into RESOURCE_ATTRIBUTE_MAP.
"""
return {}
def get_plugin_interface(self):
"""Returns an abstract class which defines contract for the plugin.
The abstract class should inherit from extensions.PluginInterface,
Methods in this abstract class should be decorated as abstractmethod
"""
return None
def get_required_extensions(self):
"""Returns a list of extensions to be processed before this one."""
return []
def get_optional_extensions(self):
"""Returns a list of extensions to be processed before this one.
Unlike get_required_extensions. This will not fail the loading of
the extension if one of these extensions is not present. This is
useful for an extension that extends multiple resources across
other extensions that should still work for the remaining extensions
when one is missing.
"""
return []
def update_attributes_map(self, extended_attributes,
extension_attrs_map=None):
"""Update attributes map for this extension.
This is default method for extending an extension's attributes map.
An extension can use this method and supplying its own resource
attribute map in extension_attrs_map argument to extend all its
attributes that needs to be extended.
If an extension does not implement update_attributes_map, the method
does nothing and just return.
"""
if not extension_attrs_map:
return
for resource, attrs in six.iteritems(extension_attrs_map):
extended_attrs = extended_attributes.get(resource)
if extended_attrs:
attrs.update(extended_attrs)
def get_pecan_resources(self):
"""List of PecanResourceExtension extension objects.
Resources define new nouns, and are accessible through URLs.
The controllers associated with each instance of
extensions.ResourceExtension should be a subclass of
neutron.pecan_wsgi.controllers.utils.NeutronPecanController.
If a resource is defined in both get_resources and get_pecan_resources,
the resource defined in get_pecan_resources will take precedence.
"""
return []
class ActionExtensionController(wsgi.Controller):
def __init__(self, application):
self.application = application
self.action_handlers = {}
def add_action(self, action_name, handler):
self.action_handlers[action_name] = handler
def action(self, request, id):
input_dict = self._deserialize(request.body,
request.get_content_type())
for action_name, handler in six.iteritems(self.action_handlers):
if action_name in input_dict:
return handler(input_dict, request, id)
# no action handler found (bump to downstream application)
response = self.application
return response
class RequestExtensionController(wsgi.Controller):
def __init__(self, application):
self.application = application
self.handlers = []
def add_handler(self, handler):
self.handlers.append(handler)
def process(self, request, *args, **kwargs):
res = request.get_response(self.application)
# currently request handlers are un-ordered
for handler in self.handlers:
response = handler(request, res)
return response
class ExtensionController(wsgi.Controller):
def __init__(self, extension_manager):
self.extension_manager = extension_manager
@staticmethod
def _translate(ext):
ext_data = {}
ext_data['name'] = ext.get_name()
ext_data['alias'] = ext.get_alias()
ext_data['description'] = ext.get_description()
ext_data['updated'] = ext.get_updated()
ext_data['links'] = [] # TODO(dprince): implement extension links
return ext_data
def index(self, request):
extensions = []
for _alias, ext in six.iteritems(self.extension_manager.extensions):
extensions.append(self._translate(ext))
return dict(extensions=extensions)
def show(self, request, id):
# NOTE(dprince): the extensions alias is used as the 'id' for show
ext = self.extension_manager.extensions.get(id, None)
if not ext:
raise webob.exc.HTTPNotFound(
_("Extension with alias %s does not exist") % id)
return dict(extension=self._translate(ext))
def delete(self, request, id):
msg = _('Resource not found.')
raise webob.exc.HTTPNotFound(msg)
def create(self, request):
msg = _('Resource not found.')
raise webob.exc.HTTPNotFound(msg)
class ExtensionMiddleware(base.ConfigurableMiddleware):
"""Extensions middleware for WSGI."""
def __init__(self, application,
ext_mgr=None):
self.ext_mgr = (ext_mgr
or ExtensionManager(get_extensions_path()))
mapper = routes.Mapper()
# extended resources
for resource in self.ext_mgr.get_resources():
path_prefix = resource.path_prefix
if resource.parent:
path_prefix = (resource.path_prefix +
"/%s/{%s_id}" %
(resource.parent["collection_name"],
resource.parent["member_name"]))
LOG.debug('Extended resource: %s',
resource.collection)
for action, method in six.iteritems(resource.collection_actions):
conditions = dict(method=[method])
path = "/%s/%s" % (resource.collection, action)
with mapper.submapper(controller=resource.controller,
action=action,
path_prefix=path_prefix,
conditions=conditions) as submap:
submap.connect(path_prefix + path, path)
submap.connect(path_prefix + path + "_format",
"%s.:(format)" % path)
for action, method in resource.collection_methods.items():
conditions = dict(method=[method])
path = "/%s" % resource.collection
with mapper.submapper(controller=resource.controller,
action=action,
path_prefix=path_prefix,
conditions=conditions) as submap:
submap.connect(path_prefix + path, path)
submap.connect(path_prefix + path + "_format",
"%s.:(format)" % path)
mapper.resource(resource.collection, resource.collection,
controller=resource.controller,
member=resource.member_actions,
parent_resource=resource.parent,
path_prefix=path_prefix)
# extended actions
action_controllers = self._action_ext_controllers(application,
self.ext_mgr, mapper)
for action in self.ext_mgr.get_actions():
LOG.debug('Extended action: %s', action.action_name)
controller = action_controllers[action.collection]
controller.add_action(action.action_name, action.handler)
# extended requests
req_controllers = self._request_ext_controllers(application,
self.ext_mgr, mapper)
for request_ext in self.ext_mgr.get_request_extensions():
LOG.debug('Extended request: %s', request_ext.key)
controller = req_controllers[request_ext.key]
controller.add_handler(request_ext.handler)
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
mapper)
super(ExtensionMiddleware, self).__init__(application)
@classmethod
def factory(cls, global_config, **local_config):
"""Paste factory."""
def _factory(app):
return cls(app, global_config, **local_config)
return _factory
def _action_ext_controllers(self, application, ext_mgr, mapper):
"""Return a dict of ActionExtensionController-s by collection."""
action_controllers = {}
for action in ext_mgr.get_actions():
if action.collection not in action_controllers.keys():
controller = ActionExtensionController(application)
mapper.connect("/%s/:(id)/action.:(format)" %
action.collection,
action='action',
controller=controller,
conditions=dict(method=['POST']))
mapper.connect("/%s/:(id)/action" % action.collection,
action='action',
controller=controller,
conditions=dict(method=['POST']))
action_controllers[action.collection] = controller
return action_controllers
def _request_ext_controllers(self, application, ext_mgr, mapper):
"""Returns a dict of RequestExtensionController-s by collection."""
request_ext_controllers = {}
for req_ext in ext_mgr.get_request_extensions():
if req_ext.key not in request_ext_controllers.keys():
controller = RequestExtensionController(application)
mapper.connect(req_ext.url_route + '.:(format)',
action='process',
controller=controller,
conditions=req_ext.conditions)
mapper.connect(req_ext.url_route,
action='process',
controller=controller,
conditions=req_ext.conditions)
request_ext_controllers[req_ext.key] = controller
return request_ext_controllers
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Route the incoming request with router."""
req.environ['extended.app'] = self.application
return self._router
@staticmethod
@webob.dec.wsgify(RequestClass=wsgi.Request)
def _dispatch(req):
"""Dispatch the request.
Returns the routed WSGI app's response or defers to the extended
application.
"""
match = req.environ['wsgiorg.routing_args'][1]
if not match:
return req.environ['extended.app']
app = match['controller']
return app
def plugin_aware_extension_middleware_factory(global_config, **local_config):
"""Paste factory."""
def _factory(app):
ext_mgr = PluginAwareExtensionManager.get_instance()
return ExtensionMiddleware(app, ext_mgr=ext_mgr)
return _factory
class ExtensionManager(object):
"""Load extensions from the configured extension path.
See tests/unit/extensions/foxinsocks.py for an
example extension implementation.
"""
def __init__(self, path):
LOG.info(_LI('Initializing extension manager.'))
self.path = path
self.extensions = {}
self._load_all_extensions()
def get_resources(self):
"""Returns a list of ResourceExtension objects."""
resources = []
resources.append(ResourceExtension('extensions',
ExtensionController(self)))
for ext in self.extensions.values():
resources.extend(ext.get_resources())
return resources
def get_pecan_resources(self):
"""Returns a list of PecanResourceExtension objects."""
resources = []
for ext in self.extensions.values():
# TODO(blogan): this is being called because there are side effects
# that the get_resources method does, like registering plural
# mappings and quotas. The side effects that get_resources does
# should probably be moved to another extension method, but that
# should be done some other time.
ext.get_resources()
resources.extend(ext.get_pecan_resources())
return resources
def get_actions(self):
"""Returns a list of ActionExtension objects."""
actions = []
for ext in self.extensions.values():
actions.extend(ext.get_actions())
return actions
def get_request_extensions(self):
"""Returns a list of RequestExtension objects."""
request_exts = []
for ext in self.extensions.values():
request_exts.extend(ext.get_request_extensions())
return request_exts
def extend_resources(self, version, attr_map):
"""Extend resources with additional resources or attributes.
:param attr_map: the existing mapping from resource name to
attrs definition.
After this function, we will extend the attr_map if an extension
wants to extend this map.
"""
processed_exts = {}
exts_to_process = self.extensions.copy()
check_optionals = True
# Iterate until there are unprocessed extensions or if no progress
# is made in a whole iteration
while exts_to_process:
processed_ext_count = len(processed_exts)
for ext_name, ext in list(exts_to_process.items()):
# Process extension only if all required extensions
# have been processed already
required_exts_set = set(ext.get_required_extensions())
if required_exts_set - set(processed_exts):
continue
optional_exts_set = set(ext.get_optional_extensions())
if check_optionals and optional_exts_set - set(processed_exts):
continue
extended_attrs = ext.get_extended_resources(version)
for res, resource_attrs in six.iteritems(extended_attrs):
attr_map.setdefault(res, {}).update(resource_attrs)
processed_exts[ext_name] = ext
del exts_to_process[ext_name]
if len(processed_exts) == processed_ext_count:
# if we hit here, it means there are unsatisfied
# dependencies. try again without optionals since optionals
# are only necessary to set order if they are present.
if check_optionals:
check_optionals = False
continue
# Exit loop as no progress was made
break
if exts_to_process:
unloadable_extensions = set(exts_to_process.keys())
LOG.error(_LE("Unable to process extensions (%s) because "
"the configured plugins do not satisfy "
"their requirements. Some features will not "
"work as expected."),
', '.join(unloadable_extensions))
self._check_faulty_extensions(unloadable_extensions)
# Extending extensions' attributes map.
for ext in processed_exts.values():
ext.update_attributes_map(attr_map)
def _check_faulty_extensions(self, faulty_extensions):
"""Raise for non-default faulty extensions.
Gracefully fail for defective default extensions, which will be
removed from the list of loaded extensions.
"""
default_extensions = set(const.DEFAULT_SERVICE_PLUGINS.values())
if not faulty_extensions <= default_extensions:
raise exceptions.ExtensionsNotFound(
extensions=list(faulty_extensions))
else:
# Remove the faulty extensions so that they do not show during
# ext-list
for ext in faulty_extensions:
try:
del self.extensions[ext]
except KeyError:
pass
def _check_extension(self, extension):
"""Checks for required methods in extension objects."""
try:
LOG.debug('Ext name: %s', extension.get_name())
LOG.debug('Ext alias: %s', extension.get_alias())
LOG.debug('Ext description: %s', extension.get_description())
LOG.debug('Ext updated: %s', extension.get_updated())
except AttributeError:
LOG.exception(_LE("Exception loading extension"))
return False
return isinstance(extension, ExtensionDescriptor)
def _load_all_extensions(self):
"""Load extensions from the configured path.
The extension name is constructed from the module_name. If your
extension module is named widgets.py, the extension class within that
module should be 'Widgets'.
See tests/unit/extensions/foxinsocks.py for an example extension
implementation.
"""
for path in self.path.split(':'):
if os.path.exists(path):
self._load_all_extensions_from_path(path)
else:
LOG.error(_LE("Extension path '%s' doesn't exist!"), path)
def _load_all_extensions_from_path(self, path):
# Sorting the extension list makes the order in which they
# are loaded predictable across a cluster of load-balanced
# Neutron Servers
for f in sorted(os.listdir(path)):
try:
LOG.debug('Loading extension file: %s', f)
mod_name, file_ext = os.path.splitext(os.path.split(f)[-1])
ext_path = os.path.join(path, f)
if file_ext.lower() == '.py' and not mod_name.startswith('_'):
mod = imp.load_source(mod_name, ext_path)
ext_name = mod_name[0].upper() + mod_name[1:]
new_ext_class = getattr(mod, ext_name, None)
if not new_ext_class:
LOG.warning(_LW('Did not find expected name '
'"%(ext_name)s" in %(file)s'),
{'ext_name': ext_name,
'file': ext_path})
continue
new_ext = new_ext_class()
self.add_extension(new_ext)
except Exception as exception:
LOG.warning(_LW("Extension file %(f)s wasn't loaded due to "
"%(exception)s"),
{'f': f, 'exception': exception})
def add_extension(self, ext):
# Do nothing if the extension doesn't check out
if not self._check_extension(ext):
return
alias = ext.get_alias()
LOG.info(_LI('Loaded extension: %s'), alias)
if alias in self.extensions:
raise exceptions.DuplicatedExtension(alias=alias)
self.extensions[alias] = ext
class PluginAwareExtensionManager(ExtensionManager):
_instance = None
def __init__(self, path, plugins):
self.plugins = plugins
super(PluginAwareExtensionManager, self).__init__(path)
self.check_if_plugin_extensions_loaded()
def _check_extension(self, extension):
"""Check if an extension is supported by any plugin."""
extension_is_valid = super(PluginAwareExtensionManager,
self)._check_extension(extension)
if not extension_is_valid:
return False
alias = extension.get_alias()
if alias in EXTENSION_SUPPORTED_CHECK_MAP:
return EXTENSION_SUPPORTED_CHECK_MAP[alias]()
return (self._plugins_support(extension) and
self._plugins_implement_interface(extension))
def _plugins_support(self, extension):
alias = extension.get_alias()
supports_extension = alias in self.get_supported_extension_aliases()
if not supports_extension:
LOG.info(_LI("Extension %s not supported by any of loaded "
"plugins"),
alias)
return supports_extension
def _plugins_implement_interface(self, extension):
if extension.get_plugin_interface() is None:
return True
for plugin in self.plugins.values():
if isinstance(plugin, extension.get_plugin_interface()):
return True
LOG.warning(_LW("Loaded plugins do not implement extension "
"%s interface"),
extension.get_alias())
return False
@classmethod
def get_instance(cls):
if cls._instance is None:
service_plugins = manager.NeutronManager.get_service_plugins()
cls._instance = cls(get_extensions_path(service_plugins),
service_plugins)
return cls._instance
def get_plugin_supported_extension_aliases(self, plugin):
"""Return extension aliases supported by a given plugin"""
aliases = set()
# we also check all classes that the plugins inherit to see if they
# directly provide support for an extension
for item in [plugin] + plugin.__class__.mro():
try:
aliases |= set(
getattr(item, "supported_extension_aliases", []))
except TypeError:
# we land here if a class has a @property decorator for
# supported extension aliases. They only work on objects.
pass
return aliases
def get_supported_extension_aliases(self):
"""Gets extension aliases supported by all plugins."""
aliases = set()
for plugin in self.plugins.values():
aliases |= self.get_plugin_supported_extension_aliases(plugin)
aliases |= {
alias
for alias, func in EXTENSION_SUPPORTED_CHECK_MAP.items()
if func()
}
return aliases
@classmethod
def clear_instance(cls):
cls._instance = None
def check_if_plugin_extensions_loaded(self):
"""Check if an extension supported by a plugin has been loaded."""
plugin_extensions = self.get_supported_extension_aliases()
missing_aliases = plugin_extensions - set(self.extensions)
missing_aliases -= _PLUGIN_AGNOSTIC_EXTENSIONS
if missing_aliases:
raise exceptions.ExtensionsNotFound(
extensions=list(missing_aliases))
class RequestExtension(object):
"""Extend requests and responses of core Neutron OpenStack API controllers.
Provide a way to add data to responses and handle custom request data
that is sent to core Neutron OpenStack API controllers.
"""
def __init__(self, method, url_route, handler):
self.url_route = url_route
self.handler = handler
self.conditions = dict(method=[method])
self.key = "%s-%s" % (method, url_route)
class ActionExtension(object):
"""Add custom actions to core Neutron OpenStack API controllers."""
def __init__(self, collection, action_name, handler):
self.collection = collection
self.action_name = action_name
self.handler = handler
class ResourceExtension(object):
"""Add top level resources to the OpenStack API in Neutron."""
def __init__(self, collection, controller, parent=None, path_prefix="",
collection_actions=None, member_actions=None, attr_map=None,
collection_methods=None):
collection_actions = collection_actions or {}
collection_methods = collection_methods or {}
member_actions = member_actions or {}
attr_map = attr_map or {}
self.collection = collection
self.controller = controller
self.parent = parent
self.collection_actions = collection_actions
self.collection_methods = collection_methods
self.member_actions = member_actions
self.path_prefix = path_prefix
self.attr_map = attr_map
# Returns the extension paths from a config entry and the __path__
# of neutron.extensions
def get_extensions_path(service_plugins=None):
paths = collections.OrderedDict()
# Add Neutron core extensions
paths[neutron.extensions.__path__[0]] = 1
if service_plugins:
# Add Neutron *-aas extensions
for plugin in service_plugins.values():
neutron_mod = provider_configuration.NeutronModule(
plugin.__module__.split('.')[0])
try:
paths[neutron_mod.module().extensions.__path__[0]] = 1
except AttributeError:
# Occurs normally if module has no extensions sub-module
pass
# Add external/other plugins extensions
if cfg.CONF.api_extensions_path:
for path in cfg.CONF.api_extensions_path.split(":"):
paths[path] = 1
LOG.debug("get_extension_paths = %s", paths)
# Re-build the extension string
path = ':'.join(paths)
return path
def append_api_extensions_path(paths):
paths = list(set([cfg.CONF.api_extensions_path] + paths))
cfg.CONF.set_override('api_extensions_path',
':'.join([p for p in paths if p]))
| apache-2.0 |
estesp/elastistack | Godeps/_workspace/src/github.com/mattbaird/elastigo/lib/request_test.go | 6395 | // Copyright 2013 Matthew Baird
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package elastigo
import (
"bytes"
"compress/gzip"
"fmt"
"io/ioutil"
"net/http"
"strings"
"testing"
"github.com/bmizerany/assert"
)
func TestQueryString(t *testing.T) {
// Test nil argument
s, err := Escape(nil)
assert.T(t, s == "" && err == nil, fmt.Sprintf("Nil should not fail and yield empty string"))
// Test single string argument
s, err = Escape(map[string]interface{}{"foo": "bar"})
exp := "foo=bar"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single int argument
s, err = Escape(map[string]interface{}{"foo": int(1)})
exp = "foo=1"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single int64 argument
s, err = Escape(map[string]interface{}{"foo": int64(1)})
exp = "foo=1"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single int32 argument
s, err = Escape(map[string]interface{}{"foo": int32(1)})
exp = "foo=1"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single float64 argument
s, err = Escape(map[string]interface{}{"foo": float64(3.141592)})
exp = "foo=3.141592"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single float32 argument
s, err = Escape(map[string]interface{}{"foo": float32(3.141592)})
exp = "foo=3.141592"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test single []string argument
s, err = Escape(map[string]interface{}{"foo": []string{"bar", "baz"}})
exp = "foo=bar%2Cbaz"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test combination of all arguments
s, err = Escape(map[string]interface{}{
"foo": "bar",
"bar": 1,
"baz": 3.141592,
"test": []string{"a", "b"},
})
// url.Values also orders arguments alphabetically.
exp = "bar=1&baz=3.141592&foo=bar&test=a%2Cb"
assert.T(t, s == exp && err == nil, fmt.Sprintf("Expected %s, got: %s", exp, s))
// Test invalid datatype
s, err = Escape(map[string]interface{}{"foo": []int{}})
assert.T(t, err != nil, fmt.Sprintf("Expected err to not be nil"))
}
func TestDoResponseError(t *testing.T) {
v := make(map[string]string)
conn := NewConn()
req, _ := conn.NewRequest("GET", "http://mock.com", "")
req.Client = http.DefaultClient
defer func() {
req.Client.Transport = http.DefaultTransport
}()
// application/json
req.Client.Transport = newMockTransport(500, "application/json", `{"error":"internal_server_error"}`)
res, bodyBytes, err := req.DoResponse(&v)
assert.NotEqual(t, nil, res)
assert.Equal(t, nil, err)
assert.Equal(t, 500, res.StatusCode)
assert.Equal(t, "application/json", res.Header.Get("Content-Type"))
assert.Equal(t, "internal_server_error", v["error"])
assert.Equal(t, []byte(`{"error":"internal_server_error"}`), bodyBytes)
// text/html
v = make(map[string]string)
req.Client.Transport = newMockTransport(500, "text/html", "HTTP 500 Internal Server Error")
res, bodyBytes, err = req.DoResponse(&v)
assert.T(t, res == nil, fmt.Sprintf("Expected nil, got: %v", res))
assert.NotEqual(t, nil, err)
assert.Equal(t, 0, len(v))
assert.Equal(t, []byte("HTTP 500 Internal Server Error"), bodyBytes)
assert.Equal(t, fmt.Errorf(http.StatusText(500)), err)
// mime error
v = make(map[string]string)
req.Client.Transport = newMockTransport(500, "", "HTTP 500 Internal Server Error")
res, bodyBytes, err = req.DoResponse(&v)
assert.T(t, res == nil, fmt.Sprintf("Expected nil, got: %v", res))
assert.NotEqual(t, nil, err)
assert.Equal(t, 0, len(v))
assert.Equal(t, []byte("HTTP 500 Internal Server Error"), bodyBytes)
assert.NotEqual(t, fmt.Errorf(http.StatusText(500)), err)
}
type mockTransport struct {
statusCode int
contentType string
body string
}
func newMockTransport(statusCode int, contentType, body string) http.RoundTripper {
return &mockTransport{
statusCode: statusCode,
contentType: contentType,
body: body,
}
}
func (t *mockTransport) RoundTrip(req *http.Request) (*http.Response, error) {
response := &http.Response{
Header: make(http.Header),
Request: req,
StatusCode: t.statusCode,
}
response.Header.Set("Content-Type", t.contentType)
response.Body = ioutil.NopCloser(strings.NewReader(t.body))
return response, nil
}
func TestSetBodyGzip(t *testing.T) {
s := "foo"
// test []byte
expB := []byte(s)
actB, err := gzipHelper(t, expB)
assert.T(t, err == nil, fmt.Sprintf("Expected err to be nil"))
assert.T(t, bytes.Compare(actB, expB) == 0, fmt.Sprintf("Expected: %s, got: %s", expB, actB))
// test string
expS := s
actS, err := gzipHelper(t, expS)
assert.T(t, err == nil, fmt.Sprintf("Expected err to be nil"))
assert.T(t, string(actS) == expS, fmt.Sprintf("Expected: %s, got: %s", expS, actS))
// test io.Reader
expR := strings.NewReader(s)
actR, err := gzipHelper(t, expR)
assert.T(t, err == nil, fmt.Sprintf("Expected err to be nil"))
assert.T(t, bytes.Compare([]byte(s), actR) == 0, fmt.Sprintf("Expected: %s, got: %s", s, actR))
// test other
expO := testStruct{Name: "Travis"}
actO, err := gzipHelper(t, expO)
assert.T(t, err == nil, fmt.Sprintf("Expected err to not be nil"))
assert.T(t, bytes.Compare([]byte(`{"name":"Travis"}`), actO) == 0, fmt.Sprintf("Expected: %s, got: %s", s, actO))
}
type testStruct struct {
Name string `json:"name"`
}
func gzipHelper(t *testing.T, data interface{}) ([]byte, error) {
r, err := http.NewRequest("GET", "http://google.com", nil)
if err != nil {
return nil, err
}
// test string
req := &Request{
Request: r,
}
err = req.SetBodyGzip(data)
if err != nil {
return nil, err
}
gr, err := gzip.NewReader(req.Body)
if err != nil {
return nil, err
}
return ioutil.ReadAll(gr)
}
| apache-2.0 |
twitter/heron | examples/src/python/window_size_topology.py | 2110 | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''Example WindowSizeTopology'''
import sys
import heronpy.api.api_constants as constants
from heronpy.api.topology import TopologyBuilder
from heronpy.api.stream import Grouping
from heronpy.api.bolt.window_bolt import SlidingWindowBolt
from heron.examples.src.python.spout import WordSpout
from examples.src.python.bolt import WindowSizeBolt
# Topology is defined using a topology builder
# Refer to multi_stream_topology for defining a topology by subclassing Topology
# pylint: disable=superfluous-parens
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Topology's name is not specified")
sys.exit(1)
builder = TopologyBuilder(name=sys.argv[1])
word_spout = builder.add_spout("word_spout", WordSpout, par=2)
count_bolt = builder.add_bolt("count_bolt", WindowSizeBolt, par=2,
inputs={word_spout: Grouping.fields('word')},
config={SlidingWindowBolt.WINDOW_DURATION_SECS: 10,
SlidingWindowBolt.WINDOW_SLIDEINTERVAL_SECS: 2})
topology_config = {constants.TOPOLOGY_RELIABILITY_MODE:
constants.TopologyReliabilityMode.ATLEAST_ONCE}
builder.set_config(topology_config)
builder.build_and_submit()
| apache-2.0 |
dcadevil/vitess | go/mysql/mysql56_gtid.go | 3532 | /*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreedto in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mysql
import (
"encoding/hex"
"fmt"
"strconv"
"strings"
"vitess.io/vitess/go/vt/proto/vtrpc"
"vitess.io/vitess/go/vt/vterrors"
)
const mysql56FlavorID = "MySQL56"
// parseMysql56GTID is registered as a GTID parser.
func parseMysql56GTID(s string) (GTID, error) {
// Split into parts.
parts := strings.Split(s, ":")
if len(parts) != 2 {
return nil, vterrors.Errorf(vtrpc.Code_INTERNAL, "invalid MySQL 5.6 GTID (%v): expecting UUID:Sequence", s)
}
// Parse Server ID.
sid, err := ParseSID(parts[0])
if err != nil {
return nil, vterrors.Wrapf(err, "invalid MySQL 5.6 GTID Server ID (%v)", parts[0])
}
// Parse Sequence number.
seq, err := strconv.ParseInt(parts[1], 10, 64)
if err != nil {
return nil, vterrors.Wrapf(err, "invalid MySQL 5.6 GTID Sequence number (%v)", parts[1])
}
return Mysql56GTID{Server: sid, Sequence: seq}, nil
}
// SID is the 16-byte unique ID of a MySQL 5.6 server.
type SID [16]byte
// String prints an SID in the form used by MySQL 5.6.
func (sid SID) String() string {
dst := []byte("xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx")
hex.Encode(dst, sid[:4])
hex.Encode(dst[9:], sid[4:6])
hex.Encode(dst[14:], sid[6:8])
hex.Encode(dst[19:], sid[8:10])
hex.Encode(dst[24:], sid[10:16])
return string(dst)
}
// ParseSID parses an SID in the form used by MySQL 5.6.
func ParseSID(s string) (sid SID, err error) {
if len(s) != 36 || s[8] != '-' || s[13] != '-' || s[18] != '-' || s[23] != '-' {
return sid, vterrors.Errorf(vtrpc.Code_INTERNAL, "invalid MySQL 5.6 SID %q", s)
}
// Drop the dashes so we can just check the error of Decode once.
b := make([]byte, 0, 32)
b = append(b, s[:8]...)
b = append(b, s[9:13]...)
b = append(b, s[14:18]...)
b = append(b, s[19:23]...)
b = append(b, s[24:]...)
if _, err := hex.Decode(sid[:], b); err != nil {
return sid, vterrors.Wrapf(err, "invalid MySQL 5.6 SID %q", s)
}
return sid, nil
}
// Mysql56GTID implements GTID
type Mysql56GTID struct {
// Server is the SID of the server that originally committed the transaction.
Server SID
// Sequence is the sequence number of the transaction within a given Server's
// scope.
Sequence int64
}
// String implements GTID.String().
func (gtid Mysql56GTID) String() string {
return fmt.Sprintf("%s:%d", gtid.Server, gtid.Sequence)
}
// Flavor implements GTID.Flavor().
func (gtid Mysql56GTID) Flavor() string {
return mysql56FlavorID
}
// SequenceDomain implements GTID.SequenceDomain().
func (gtid Mysql56GTID) SequenceDomain() interface{} {
return nil
}
// SourceServer implements GTID.SourceServer().
func (gtid Mysql56GTID) SourceServer() interface{} {
return gtid.Server
}
// SequenceNumber implements GTID.SequenceNumber().
func (gtid Mysql56GTID) SequenceNumber() interface{} {
return gtid.Sequence
}
// GTIDSet implements GTID.GTIDSet().
func (gtid Mysql56GTID) GTIDSet() GTIDSet {
return Mysql56GTIDSet{}.AddGTID(gtid)
}
func init() {
gtidParsers[mysql56FlavorID] = parseMysql56GTID
}
| apache-2.0 |
paulsmithkc/ghostbird | bapprill/ghostbird/Assets/Scripts/TileObject.cs | 197 | using UnityEngine;
public abstract class TileObject : MonoBehaviour
{
public abstract void OnMouseEnter();
public abstract void OnMouseExit();
public abstract void OnMouseDown();
}
| apache-2.0 |
lshain-android-source/tools-idea | java/java-impl/src/com/intellij/usages/impl/rules/MethodGroupingRule.java | 6726 | /*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.usages.impl.rules;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.navigation.NavigationItemFileStatus;
import com.intellij.openapi.actionSystem.DataKey;
import com.intellij.openapi.actionSystem.DataSink;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.TypeSafeDataProvider;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Iconable;
import com.intellij.openapi.util.Segment;
import com.intellij.openapi.vcs.FileStatus;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiFormatUtil;
import com.intellij.psi.util.PsiFormatUtilBase;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.usageView.UsageInfo;
import com.intellij.usages.*;
import com.intellij.usages.rules.PsiElementUsage;
import com.intellij.usages.rules.UsageGroupingRule;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
/**
* @author max
*/
public class MethodGroupingRule implements UsageGroupingRule {
private static final Logger LOG = Logger.getInstance("#com.intellij.usages.impl.rules.MethodGroupingRule");
@Override
public UsageGroup groupUsage(@NotNull Usage usage) {
if (!(usage instanceof PsiElementUsage)) return null;
PsiElement psiElement = ((PsiElementUsage)usage).getElement();
PsiFile containingFile = psiElement.getContainingFile();
InjectedLanguageManager manager = InjectedLanguageManager.getInstance(containingFile.getProject());
PsiFile topLevelFile = manager.getTopLevelFile(containingFile);
if (topLevelFile instanceof PsiJavaFile) {
PsiElement containingMethod = topLevelFile == containingFile ? psiElement : manager.getInjectionHost(containingFile);
if (usage instanceof UsageInfo2UsageAdapter && topLevelFile == containingFile) {
int offset = ((UsageInfo2UsageAdapter)usage).getUsageInfo().getNavigationOffset();
containingMethod = containingFile.findElementAt(offset);
}
do {
containingMethod = PsiTreeUtil.getParentOfType(containingMethod, PsiMethod.class, true);
if (containingMethod == null) break;
final PsiClass containingClass = ((PsiMethod)containingMethod).getContainingClass();
if (containingClass == null || containingClass.getQualifiedName() != null) break;
}
while (true);
if (containingMethod != null) {
return new MethodUsageGroup((PsiMethod)containingMethod);
}
}
return null;
}
private static class MethodUsageGroup implements UsageGroup, TypeSafeDataProvider {
private final SmartPsiElementPointer<PsiMethod> myMethodPointer;
private final String myName;
private final Icon myIcon;
private final Project myProject;
public MethodUsageGroup(PsiMethod psiMethod) {
myName = PsiFormatUtil.formatMethod(
psiMethod,
PsiSubstitutor.EMPTY,
PsiFormatUtilBase.SHOW_NAME | PsiFormatUtilBase.SHOW_PARAMETERS,
PsiFormatUtilBase.SHOW_TYPE
);
myProject = psiMethod.getProject();
myMethodPointer = SmartPointerManager.getInstance(myProject).createSmartPsiElementPointer(psiMethod);
myIcon = getIconImpl(psiMethod);
}
@Override
public void update() {
}
private static Icon getIconImpl(PsiMethod psiMethod) {
return psiMethod.getIcon(Iconable.ICON_FLAG_VISIBILITY | Iconable.ICON_FLAG_READ_STATUS);
}
public int hashCode() {
return myName.hashCode();
}
public boolean equals(Object object) {
if (!(object instanceof MethodUsageGroup)) {
return false;
}
MethodUsageGroup group = (MethodUsageGroup) object;
return Comparing.equal(myName, ((MethodUsageGroup)object).myName)
&& SmartPointerManager.getInstance(myProject).pointToTheSameElement(myMethodPointer, group.myMethodPointer);
}
@Override
public Icon getIcon(boolean isOpen) {
return myIcon;
}
private PsiMethod getMethod() {
return myMethodPointer.getElement();
}
@Override
@NotNull
public String getText(UsageView view) {
return myName;
}
@Override
public FileStatus getFileStatus() {
return isValid() ? NavigationItemFileStatus.get(getMethod()) : null;
}
@Override
public boolean isValid() {
final PsiMethod method = getMethod();
return method != null && method.isValid();
}
@Override
public void navigate(boolean focus) throws UnsupportedOperationException {
if (canNavigate()) {
getMethod().navigate(focus);
}
}
@Override
public boolean canNavigate() {
return isValid();
}
@Override
public boolean canNavigateToSource() {
return canNavigate();
}
@Override
public int compareTo(UsageGroup usageGroup) {
if (!(usageGroup instanceof MethodUsageGroup)) {
LOG.error("MethodUsageGroup expected but " + usageGroup.getClass() + " found");
}
MethodUsageGroup other = (MethodUsageGroup)usageGroup;
if (SmartPointerManager.getInstance(myProject).pointToTheSameElement(myMethodPointer, other.myMethodPointer)) {
return 0;
}
if (!UsageViewSettings.getInstance().IS_SORT_MEMBERS_ALPHABETICALLY) {
Segment segment1 = myMethodPointer.getRange();
Segment segment2 = other.myMethodPointer.getRange();
if (segment1 != null && segment2 != null) {
return segment1.getStartOffset() - segment2.getStartOffset();
}
}
return myName.compareToIgnoreCase(other.myName);
}
@Override
public void calcData(final DataKey key, final DataSink sink) {
if (!isValid()) return;
if (LangDataKeys.PSI_ELEMENT == key) {
sink.put(LangDataKeys.PSI_ELEMENT, getMethod());
}
if (UsageView.USAGE_INFO_KEY == key) {
PsiMethod method = getMethod();
if (method != null) {
sink.put(UsageView.USAGE_INFO_KEY, new UsageInfo(method));
}
}
}
}
}
| apache-2.0 |
serge-rider/dbeaver | plugins/org.jkiss.dbeaver.ext.import_config/src/org/jkiss/dbeaver/ext/import_config/wizards/ImportDriverInfo.java | 2589 | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.import_config.wizards;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Import data
*/
public class ImportDriverInfo {
private String id;
private String name;
private String sampleURL;
private String driverClass;
private List<String> libraries = new ArrayList<>();
private Map<Object, Object> properties = new HashMap<>();
private String defaultPort;
private String description;
public ImportDriverInfo(String id, String name, String sampleURL, String driverClass)
{
this.id = id;
this.name = name;
this.sampleURL = sampleURL;
this.driverClass = driverClass;
}
public String getId()
{
return id;
}
public String getName()
{
return name;
}
public String getSampleURL()
{
return sampleURL;
}
public void setSampleURL(String sampleURL)
{
this.sampleURL = sampleURL;
}
public String getDriverClass()
{
return driverClass;
}
public String getDefaultPort()
{
return defaultPort;
}
public void setDefaultPort(String defaultPort)
{
this.defaultPort = defaultPort;
}
public List<String> getLibraries()
{
return libraries;
}
public void addLibrary(String path)
{
libraries.add(path);
}
public Map<Object,Object> getProperties()
{
return properties;
}
public void setProperty(String name, String value)
{
properties.put(name, value);
}
public String getDescription()
{
return description;
}
public void setDescription(String description)
{
this.description = description;
}
@Override
public String toString()
{
return name + " - " + driverClass + " - " + sampleURL;
}
}
| apache-2.0 |
ddumontatibm/JavascriptAggregator | jaggr-service/src/main/java/com/ibm/jaggr/service/util/ConsoleHttpServletRequest.java | 9997 | /*
* (C) Copyright IBM Corp. 2012, 2016 All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.jaggr.service.util;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.security.Principal;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import javax.servlet.AsyncContext;
import javax.servlet.DispatcherType;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.servlet.http.Part;
/**
* Implementation of the HttpServletRequest interface used for processing request URLs
* specified using the processRequestUrl console command. This console command is used
* primarily for cache priming purposes by automation scripts.
*/
public class ConsoleHttpServletRequest implements HttpServletRequest {
private static final String dateFormatString = "EEE, dd MMM yyyy HH:mm:ss z"; //$NON-NLS-1$
private Map<String, Object> requestAttributes = new HashMap<String, Object>();
private Map<String, String[]> requestParams = new HashMap<String, String[]>();
private Map<String, String> headers = new HashMap<String, String>();
private String charEncoding = null;
private final ServletContext context;
private final String queryString;
/**
* Constructs a request object from the specified servlet context and request url. The
* content of the request URL preceding the query args is ignored.
*
* @param context the servlet context
* @param requestUrl the request url
* @throws IOException
*/
public ConsoleHttpServletRequest(ServletContext context, String requestUrl) throws IOException{
this.context = context;
int idx = requestUrl.indexOf("?"); //$NON-NLS-1$
this.queryString = idx == -1 ? requestUrl : requestUrl.substring(idx+1);
// set Date header
Calendar calendar = Calendar.getInstance();
SimpleDateFormat dateFormat = new SimpleDateFormat(dateFormatString, Locale.US);
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT")); //$NON-NLS-1$
headers.put("Date", dateFormat.format(calendar.getTime())); //$NON-NLS-1$
// Set request parameters
String[] parts = queryString.split("[?&]"); //$NON-NLS-1$
for (String part : parts) {
idx = part.indexOf("="); //$NON-NLS-1$
String name = idx == -1 ? part : part.substring(0, idx);
String value = ""; //$NON-NLS-1$
if (idx != -1) {
value = URLDecoder.decode(part.substring(idx+1), "UTF-8"); //$NON-NLS-1$
}
String[] values = requestParams.get(name);
if (values == null) {
values = new String[]{value};
} else {
Set<String> valueSet = new HashSet<String>(Arrays.asList(values));
valueSet.add(value);
values = valueSet.toArray(new String[valueSet.size()]);
}
requestParams.put(name, values);
}
}
@Override
public Object getAttribute(String name) {
return requestAttributes.get(name);
}
@Override
public Enumeration<String> getAttributeNames() {
return Collections.enumeration(requestAttributes.keySet());
}
@Override
public String getCharacterEncoding() {
return charEncoding;
}
@Override
public void setCharacterEncoding(String env) throws UnsupportedEncodingException {
charEncoding = env;
}
@Override
public int getContentLength() {
return 0;
}
@Override
public String getContentType() {
return null;
}
@Override
public ServletInputStream getInputStream() throws IOException {
return null;
}
@Override
public String getParameter(String name) {
String[] values = getParameterValues(name);
return values == null ? null : values[0];
}
@Override
public Enumeration<String> getParameterNames() {
return Collections.enumeration(requestParams.keySet());
}
@Override
public String[] getParameterValues(String name) {
return requestParams.get(name);
}
@Override
public Map<String, String[]> getParameterMap() {
return Collections.unmodifiableMap(requestParams);
}
@Override
public String getProtocol() {
return "HTTP/1.1"; //$NON-NLS-1$
}
@Override
public String getScheme() {
return "http"; //$NON-NLS-1$
}
@Override
public String getServerName() {
return "osgi.console"; //$NON-NLS-1$
}
@Override
public int getServerPort() {
return 0;
}
@Override
public BufferedReader getReader() throws IOException {
return null;
}
@Override
public String getRemoteAddr() {
return null;
}
@Override
public String getRemoteHost() {
return null;
}
@Override
public void setAttribute(String name, Object o) {
requestAttributes.put(name, o);
}
@Override
public void removeAttribute(String name) {
requestAttributes.remove(name);
}
@Override
public Locale getLocale() {
return null;
}
@Override
public Enumeration<Locale> getLocales() {
return null;
}
@Override
public boolean isSecure() {
return false;
}
@Override
public RequestDispatcher getRequestDispatcher(String path) {
return null;
}
@Deprecated
@Override
public String getRealPath(String path) {
return null;
}
@Override
public int getRemotePort() {
return 0;
}
@Override
public String getLocalName() {
return null;
}
@Override
public String getLocalAddr() {
return null;
}
@Override
public int getLocalPort() {
return 0;
}
@Override
public ServletContext getServletContext() {
return context;
}
@Override
public AsyncContext startAsync() throws IllegalStateException {
return null;
}
@Override
public AsyncContext startAsync(ServletRequest servletRequest, ServletResponse servletResponse)
throws IllegalStateException {
return null;
}
@Override
public boolean isAsyncStarted() {
return false;
}
@Override
public boolean isAsyncSupported() {
return false;
}
@Override
public AsyncContext getAsyncContext() {
return null;
}
@Override
public DispatcherType getDispatcherType() {
return null;
}
@Override
public String getAuthType() {
return null;
}
@Override
public Cookie[] getCookies() {
return new Cookie[]{};
}
@Override
public long getDateHeader(String name) {
return 0;
}
@Override
public String getHeader(String name) {
return headers.get(name);
}
@Override
public Enumeration<String> getHeaders(String name) {
return Collections.enumeration(Arrays.asList(new String[] {getHeader(name)}));
}
@Override
public Enumeration<String> getHeaderNames() {
return Collections.enumeration(headers.keySet());
}
@Override
public int getIntHeader(String name) {
return Integer.parseInt(getHeader(name));
}
@Override
public String getMethod() {
return "GET"; //$NON-NLS-1$
}
@Override
public String getPathInfo() {
return null;
}
@Override
public String getPathTranslated() {
return null;
}
@Override
public String getContextPath() {
return ""; //$NON-NLS-1$
}
@Override
public String getQueryString() {
return queryString;
}
@Override
public String getRemoteUser() {
return null;
}
@Override
public boolean isUserInRole(String role) {
return false;
}
@Override
public Principal getUserPrincipal() {
return null;
}
@Override
public String getRequestedSessionId() {
return null;
}
@Override
public String getRequestURI() {
return getPathInfo() + "?" + getQueryString(); //$NON-NLS-1$
}
@Override
public StringBuffer getRequestURL() {
return new StringBuffer(getRequestURI());
}
@Override
public String getServletPath() {
return ""; //$NON-NLS-1$
}
@Override
public HttpSession getSession(boolean create) {
return null;
}
@Override
public HttpSession getSession() {
return null;
}
@Override
public boolean isRequestedSessionIdValid() {
return false;
}
@Override
public boolean isRequestedSessionIdFromCookie() {
return false;
}
@Override
public boolean isRequestedSessionIdFromURL() {
return false;
}
@Deprecated
@Override
public boolean isRequestedSessionIdFromUrl() {
return false;
}
@Override
public boolean authenticate(HttpServletResponse response) throws IOException, ServletException {
return false;
}
@Override
public void login(String username, String password) throws ServletException {
}
@Override
public void logout() throws ServletException {
}
@Override
public Collection<Part> getParts() throws IOException, ServletException {
return null;
}
@Override
public Part getPart(String name) throws IOException, ServletException {
return null;
}
}
| apache-2.0 |
Cyan3/oodt | catalog/src/main/java/org/apache/oodt/cas/catalog/mapping/DataSourceIngestMapper.java | 11265 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.catalog.mapping;
//JDK imports
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
//SQL imports
import javax.sql.DataSource;
//OODT imports
import org.apache.oodt.cas.catalog.exception.CatalogRepositoryException;
import org.apache.oodt.cas.catalog.page.CatalogReceipt;
import org.apache.oodt.cas.catalog.page.IndexPager;
import org.apache.oodt.cas.catalog.page.IngestReceipt;
import org.apache.oodt.cas.catalog.struct.TransactionId;
import org.apache.oodt.cas.catalog.struct.TransactionIdFactory;
import org.apache.oodt.commons.database.DatabaseConnectionBuilder;
import org.apache.oodt.commons.date.DateUtils;
/**
* @author bfoster
* @version $Revision$
*
* <p>
* A Ingest Mapper that indexes to an DataSource Database
* <p>
*/
public class DataSourceIngestMapper implements IngestMapper {
protected DataSource dataSource;
public DataSourceIngestMapper(String user, String pass, String driver,
String jdbcUrl) throws InstantiationException {
this.dataSource = DatabaseConnectionBuilder.buildDataSource(user, pass,
driver, jdbcUrl);
}
public synchronized void deleteAllMappingsForCatalog(String catalogId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
stmt.execute("DELETE FROM CatalogServiceMapper WHERE CATALOG_ID = '" + catalogId + "'");
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
}
}
public synchronized void deleteAllMappingsForCatalogServiceTransactionId(
TransactionId<?> catalogServiceTransactionId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
stmt.execute("DELETE FROM CatalogServiceMapper WHERE CAT_SERV_TRANS_ID = '" + catalogServiceTransactionId + "'");
conn.commit();
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
}
}
public synchronized void deleteTransactionIdMapping(
TransactionId<?> catalogTransactionId, String catalogId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
stmt.execute("DELETE FROM CatalogServiceMapper WHERE CAT_TRANS_ID = '" + catalogTransactionId + "' AND CATALOG_ID = '" + catalogId + "'");
conn.commit();
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
}
}
public synchronized TransactionId<?> getCatalogServiceTransactionId(
TransactionId<?> catalogTransactionId, String catalogId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
ResultSet rs = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
rs = stmt.executeQuery("SELECT CAT_SERV_TRANS_ID,CAT_SERV_TRANS_FACTORY FROM CatalogServiceMapper WHERE CAT_TRANS_ID = '"+ catalogTransactionId + "' AND CATALOG_ID = '" + catalogId + "'");
while(rs.next())
return ((TransactionIdFactory) Class.forName(rs.getString("CAT_SERV_TRANS_FACTORY")).newInstance()).createTransactionId(rs.getString("CAT_SERV_TRANS_ID"));
return null;
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
try {
rs.close();
}catch(Exception e) {}
}
}
public synchronized TransactionId<?> getCatalogTransactionId(
TransactionId<?> catalogServiceTransactionId, String catalogId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
ResultSet rs = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
rs = stmt.executeQuery("SELECT CAT_TRANS_ID,CAT_TRANS_FACTORY FROM CatalogServiceMapper WHERE CAT_SERV_TRANS_ID = '"+ catalogServiceTransactionId + "' AND CATALOG_ID = '" + catalogId + "'");
while(rs.next())
return ((TransactionIdFactory) Class.forName(rs.getString("CAT_TRANS_FACTORY")).newInstance()).createTransactionId(rs.getString("CAT_TRANS_ID"));
return null;
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
try {
rs.close();
}catch(Exception e) {}
}
}
public synchronized Set<String> getCatalogIds(
TransactionId<?> catalogServiceTransactionId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
ResultSet rs = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
rs = stmt.executeQuery("SELECT CATALOG_ID FROM CatalogServiceMapper WHERE CAT_SERV_TRANS_ID = '"+ catalogServiceTransactionId + "'");
Set<String> catalogIds = new HashSet<String>();
while(rs.next())
catalogIds.add(rs.getString("CATALOG_ID"));
return catalogIds;
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
try {
rs.close();
}catch(Exception e) {}
}
}
public synchronized Set<TransactionId<?>> getPageOfCatalogTransactionIds(
IndexPager indexPager, String catalogId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
ResultSet rs = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
rs = stmt.executeQuery(
"SELECT * FROM "
+"( SELECT a.*, ROWNUM r FROM "
+ "( SELECT CAT_TRANS_FACTORY,CAT_TRANS_ID FROM CatalogServiceMapper WHERE CatalogServiceMapper.CATALOG_ID = '" + catalogId + "' ORDER BY CatalogServiceMapper.CAT_SERV_TRANS_ID DESC ) a "
+ "WHERE ROWNUM <= " + (indexPager.getPageSize() * (indexPager.getPageNum() + 1)) + " ) "
+ "WHERE r >= " + ((indexPager.getPageSize() * indexPager.getPageNum()) + 1));
Set<TransactionId<?>> transactionIds = new HashSet<TransactionId<?>>();
while(rs.next())
transactionIds.add(((TransactionIdFactory) Class.forName(rs.getString("CAT_TRANS_FACTORY")).newInstance()).createTransactionId(rs.getString("CAT_TRANS_ID")));
return transactionIds;
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
try {
rs.close();
}catch(Exception e) {}
}
}
public synchronized boolean hasCatalogServiceTransactionId(
TransactionId<?> catalogServiceTransactionId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
ResultSet rs = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
rs = stmt.executeQuery("SELECT CAT_SERV_TRANS_ID FROM CatalogServiceMapper WHERE CAT_SERV_TRANS_ID = '"+ catalogServiceTransactionId + "'");
return rs.next();
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
try {
rs.close();
}catch(Exception e) {}
}
}
public synchronized void storeTransactionIdMapping(
TransactionId<?> catalogServiceTransactionId,
TransactionIdFactory catalogServiceTransactionIdFactory,
CatalogReceipt catalogReceipt,
TransactionIdFactory catalogTransactionIdFactory)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
Calendar calTime = DateUtils.getCurrentUtcTime();
calTime.setTime(catalogReceipt.getTransactionDate());
stmt.execute("INSERT INTO CatalogServiceMapper (CAT_SERV_TRANS_ID, CAT_SERV_TRANS_FACTORY, CAT_TRANS_ID, CAT_TRANS_FACTORY, CAT_TRANS_DATE, CATALOG_ID) VALUES ('"
+ catalogServiceTransactionId + "', '"
+ catalogServiceTransactionIdFactory.getClass().getName() + "', '"
+ catalogReceipt.getTransactionId() + "', '"
+ catalogTransactionIdFactory.getClass().getName() + "', '"
+ DateUtils.toString(calTime) + "', '"
+ catalogReceipt.getCatalogId() + "')");
conn.commit();
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
}
}
public CatalogReceipt getCatalogReceipt(
TransactionId<?> catalogServiceTransactionId, String catalogId)
throws CatalogRepositoryException {
Connection conn = null;
Statement stmt = null;
ResultSet rs = null;
try {
conn = this.dataSource.getConnection();
stmt = conn.createStatement();
rs = stmt.executeQuery("SELECT CAT_TRANS_ID, CAT_TRANS_FACTORY, CAT_TRANS_DATE FROM CatalogServiceMapper WHERE CAT_SERV_TRANS_ID = '"+ catalogServiceTransactionId + "' AND CATALOG_ID = '" + catalogId + "'");
if(rs.next()) {
TransactionId<?> catalogTransactionId = ((TransactionIdFactory) Class.forName(rs.getString("CAT_TRANS_FACTORY")).newInstance()).createTransactionId(rs.getString("CAT_TRANS_ID"));
Date transactionDate = DateUtils.toCalendar(rs.getString("CAT_TRANS_DATE"), DateUtils.FormatType.UTC_FORMAT).getTime();
return new CatalogReceipt(new IngestReceipt(catalogTransactionId, transactionDate), catalogId);
}else {
return null;
}
}catch (Exception e) {
throw new CatalogRepositoryException(e.getMessage(), e);
}finally {
try {
conn.close();
}catch(Exception e) {}
try {
stmt.close();
}catch(Exception e) {}
try {
rs.close();
}catch(Exception e) {}
}
}
}
| apache-2.0 |
DustinCampbell/roslyn | src/Compilers/CSharp/Portable/Lowering/LocalRewriter/LocalRewriter_SuppressNullableWarningExpression.cs | 460 | // Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
namespace Microsoft.CodeAnalysis.CSharp
{
internal sealed partial class LocalRewriter
{
public override BoundNode VisitSuppressNullableWarningExpression(BoundSuppressNullableWarningExpression node)
{
return VisitExpression(node.Expression);
}
}
}
| apache-2.0 |
markflyhigh/incubator-beam | sdks/java/extensions/sorter/src/main/java/org/apache/beam/sdk/extensions/sorter/NativeFileSorter.java | 10484 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.sorter;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import org.apache.beam.sdk.coders.ByteArrayCoder;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterators;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.primitives.UnsignedBytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* External Sorter based on <a
* href="https://github.com/lemire/externalsortinginjava">lemire/externalsortinginjava</a>.
*/
class NativeFileSorter {
private static final Logger LOG = LoggerFactory.getLogger(NativeFileSorter.class);
private static final int MAX_TEMP_FILES = 1024;
private static final long OBJECT_OVERHEAD = getObjectOverhead();
private static final Comparator<byte[]> COMPARATOR = UnsignedBytes.lexicographicalComparator();
private static final Comparator<KV<byte[], byte[]>> KV_COMPARATOR =
(x, y) -> COMPARATOR.compare(x.getKey(), y.getKey());
private static final ByteArrayCoder CODER = ByteArrayCoder.of();
private final Path tempDir;
private final long maxMemory;
private final File dataFile;
private final OutputStream dataStream;
private boolean sortCalled = false;
/** Create a new file sorter. */
public NativeFileSorter(Path tempDir, long maxMemory) throws IOException {
this.tempDir = tempDir;
this.maxMemory = maxMemory;
this.dataFile = Files.createTempFile(tempDir, "input", "seq").toFile();
this.dataStream = new BufferedOutputStream(new FileOutputStream(dataFile));
dataFile.deleteOnExit();
LOG.debug("Created input file {}", dataFile);
}
/**
* Adds a given record to the sorter.
*
* <p>Records can only be added before calling {@link #sort()}.
*/
public void add(byte[] key, byte[] value) throws IOException {
Preconditions.checkState(!sortCalled, "Records can only be added before sort()");
CODER.encode(key, dataStream);
CODER.encode(value, dataStream);
}
/**
* Sorts the added elements and returns an {@link Iterable} over the sorted elements.
*
* <p>Can be called at most once.
*/
public Iterable<KV<byte[], byte[]>> sort() throws IOException {
Preconditions.checkState(!sortCalled, "sort() can only be called once.");
sortCalled = true;
dataStream.close();
return mergeSortedFiles(sortInBatch());
}
////////////////////////////////////////////////////////////////////////////////
/**
* Loads the file by blocks of records, sorts in memory, and writes the result to temporary files
* that have to be merged later.
*/
private List<File> sortInBatch() throws IOException {
final long fileSize = Files.size(dataFile.toPath());
final long memory = maxMemory > 0 ? maxMemory : estimateAvailableMemory();
final long blockSize = estimateBestBlockSize(fileSize, memory); // in bytes
LOG.debug(
"Sort in batch with fileSize: {}, memory: {}, blockSize: {}", fileSize, memory, blockSize);
final List<File> files = new ArrayList<>();
InputStream inputStream = new BufferedInputStream(new FileInputStream(dataFile));
try {
final List<KV<byte[], byte[]>> tempList = new ArrayList<>();
KV<byte[], byte[]> kv = KV.of(null, null);
while (kv != null) {
long currentBlockSize = 0;
while ((currentBlockSize < blockSize) && (kv = readKeyValue(inputStream)) != null) {
// as long as you have enough memory
tempList.add(kv);
currentBlockSize += estimateSizeOf(kv);
}
files.add(sortAndSave(tempList));
tempList.clear();
}
} finally {
inputStream.close();
}
return files;
}
/** Sort a list and save it to a temporary file. */
private File sortAndSave(List<KV<byte[], byte[]>> tempList) throws IOException {
final File tempFile = Files.createTempFile(tempDir, "sort", "seq").toFile();
tempFile.deleteOnExit();
LOG.debug("Sort and save {}", tempFile);
tempList.sort(KV_COMPARATOR);
OutputStream outputStream = new BufferedOutputStream(new FileOutputStream(tempFile));
try {
for (KV<byte[], byte[]> kv : tempList) {
CODER.encode(kv.getKey(), outputStream);
CODER.encode(kv.getValue(), outputStream);
}
} finally {
outputStream.close();
}
return tempFile;
}
/** Merges a list of temporary flat files. */
private Iterable<KV<byte[], byte[]>> mergeSortedFiles(List<File> files) {
return () -> {
final List<Iterator<KV<byte[], byte[]>>> iterators = new ArrayList<>();
for (File file : files) {
try {
iterators.add(iterateFile(file));
} catch (FileNotFoundException e) {
throw new IllegalStateException(e);
}
}
return Iterators.mergeSorted(iterators, KV_COMPARATOR);
};
}
/** Creates an {@link Iterator} over the key-value pairs in a file. */
private Iterator<KV<byte[], byte[]>> iterateFile(File file) throws FileNotFoundException {
final InputStream inputStream = new BufferedInputStream(new FileInputStream(file));
return new Iterator<KV<byte[], byte[]>>() {
KV<byte[], byte[]> nextKv = readNext();
@Override
public boolean hasNext() {
return nextKv != null;
}
@Override
public KV<byte[], byte[]> next() {
KV<byte[], byte[]> r = nextKv;
nextKv = readNext();
return r;
}
private KV<byte[], byte[]> readNext() {
try {
return readKeyValue(inputStream);
} catch (EOFException e) {
return null;
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
};
}
/** Reads the next key-value pair from a file. */
private KV<byte[], byte[]> readKeyValue(InputStream inputStream) throws IOException {
try {
final byte[] keyBytes = CODER.decode(inputStream);
final byte[] valueBytes = CODER.decode(inputStream);
return KV.of(keyBytes, valueBytes);
} catch (EOFException e) {
return null;
}
}
////////////////////////////////////////////////////////////////////////////////
private int bufferSize(int numFiles) {
final long memory = maxMemory > 0 ? maxMemory : estimateAvailableMemory();
return (int) (memory / numFiles / 2);
}
/**
* This method calls the garbage collector and then returns the free memory. This avoids problems
* with applications where the GC hasn't reclaimed memory and reports no available memory.
*/
@SuppressFBWarnings("DM_GC")
private static long estimateAvailableMemory() {
System.gc();
// http://stackoverflow.com/questions/12807797/java-get-available-memory
final Runtime r = Runtime.getRuntime();
final long allocatedMemory = r.totalMemory() - r.freeMemory();
return r.maxMemory() - allocatedMemory;
}
/**
* We divide the file into small blocks. If the blocks are too small, we shall create too many
* temporary files. If they are too big, we shall be using too much memory.
*
* @param sizeOfFile how much data (in bytes) can we expect
* @param maxMemory Maximum memory to use (in bytes)
*/
private static long estimateBestBlockSize(final long sizeOfFile, final long maxMemory) {
// we don't want to open up much more than MAX_TEMP_FILES temporary files, better run out of
// memory first.
long blockSize = sizeOfFile / MAX_TEMP_FILES + (sizeOfFile % MAX_TEMP_FILES == 0 ? 0 : 1);
// on the other hand, we don't want to create many temporary files for naught. If blockSize is
// smaller than half the free memory, grow it.
if (blockSize < maxMemory / 2) {
blockSize = maxMemory / 2;
}
return blockSize;
}
private static long getObjectOverhead() {
// By default we assume 64 bit JVM
// (defensive approach since we will get larger estimations in case we are not sure)
boolean is64BitJvm = true;
// check the system property "sun.arch.data.model"
// not very safe, as it might not work for all JVM implementations
// nevertheless the worst thing that might happen is that the JVM is 32bit
// but we assume its 64bit, so we will be counting a few extra bytes per string object
// no harm done here since this is just an approximation.
String arch = System.getProperty("sun.arch.data.model");
if (arch != null && arch.contains("32")) {
// If exists and is 32 bit then we assume a 32bit JVM
is64BitJvm = false;
}
// The sizes below are a bit rough as we don't take into account
// advanced JVM options such as compressed oops
// however if our calculation is not accurate it'll be a bit over
// so there is no danger of an out of memory error because of this.
long objectHeader = is64BitJvm ? 16 : 8;
long arrayHeader = is64BitJvm ? 24 : 12;
long objectRef = is64BitJvm ? 8 : 4;
return objectHeader + (objectRef + arrayHeader) * 2;
}
private static long estimateSizeOf(KV<byte[], byte[]> kv) {
return kv.getKey().length + kv.getValue().length + OBJECT_OVERHEAD;
}
}
| apache-2.0 |
jpettitt/amphtml | extensions/amp-youtube/1.0/amp-youtube.js | 671 | import {isExperimentOn} from '#experiments';
import {BaseElement} from './base-element';
import {CSS} from '../../../build/amp-youtube-1.0.css';
import {userAssert} from '../../../src/log';
/** @const {string} */
const TAG = 'amp-youtube';
class AmpYoutube extends BaseElement {
/** @override */
isLayoutSupported(layout) {
userAssert(
isExperimentOn(this.win, 'bento') ||
isExperimentOn(this.win, 'bento-youtube'),
'expected global "bento" or specific "bento-youtube" experiment to be enabled'
);
return super.isLayoutSupported(layout);
}
}
AMP.extension(TAG, '1.0', (AMP) => {
AMP.registerElement(TAG, AmpYoutube, CSS);
});
| apache-2.0 |
chizou/geowave | test/src/main/java/mil/nga/giat/geowave/test/GeoWaveITSuiteRunner.java | 2749 | package mil.nga.giat.geowave.test;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.List;
import org.junit.internal.runners.statements.RunAfters;
import org.junit.runner.Runner;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.Suite;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.RunnerBuilder;
import org.junit.runners.model.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GeoWaveITSuiteRunner extends
Suite
{
private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveITSuiteRunner.class);
@Override
protected Statement withAfterClasses(
Statement statement ) {
try {
Statement newStatement = super.withAfterClasses(statement);
final Method tearDownMethod = GeoWaveITSuiteRunner.class.getDeclaredMethod("tearDown");
tearDownMethod.setAccessible(true);
return new RunAfters(
newStatement,
Collections.singletonList(new FrameworkMethod(
tearDownMethod)),
this);
}
catch (NoSuchMethodException | SecurityException e) {
LOGGER.warn(
"Unable to find tearDown method",
e);
}
return super.withAfterClasses(statement);
}
private GeoWaveITRunner itRunner;
protected void tearDown()
throws Exception {
if (itRunner != null) {
itRunner.tearDown();
}
}
@Override
protected void runChild(
Runner runner,
RunNotifier notifier ) {
// this is kinda a hack but the intent is to ensure that each individual
// test is able to tear down the environment *after* the
// suite.tearDown() method is called, in general the child runner
// methods are always called before the parent runner
if (runner instanceof GeoWaveITRunner) {
itRunner = (GeoWaveITRunner) runner;
}
super.runChild(
runner,
notifier);
}
public GeoWaveITSuiteRunner(
final Class<?> klass,
final List<Runner> runners )
throws InitializationError {
super(
klass,
runners);
}
public GeoWaveITSuiteRunner(
final Class<?> klass,
final RunnerBuilder builder )
throws InitializationError {
super(
klass,
builder);
}
public GeoWaveITSuiteRunner(
final RunnerBuilder builder,
final Class<?> klass,
final Class<?>[] suiteClasses )
throws InitializationError {
super(
builder,
klass,
suiteClasses);
}
public GeoWaveITSuiteRunner(
final RunnerBuilder builder,
final Class<?>[] classes )
throws InitializationError {
super(
builder,
classes);
}
protected GeoWaveITSuiteRunner(
final Class<?> klass,
final Class<?>[] suiteClasses )
throws InitializationError {
super(
klass,
suiteClasses);
}
}
| apache-2.0 |
waynedovey/fig | fig/packages/docker/client.py | 31414 | # Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import re
import shlex
import struct
import warnings
import requests
import requests.exceptions
from fig.packages import six
from .auth import auth
from .unixconn import unixconn
from .utils import utils
from . import errors
if not six.PY3:
import websocket
DEFAULT_DOCKER_API_VERSION = '1.12'
DEFAULT_TIMEOUT_SECONDS = 60
STREAM_HEADER_SIZE_BYTES = 8
class Client(requests.Session):
def __init__(self, base_url=None, version=DEFAULT_DOCKER_API_VERSION,
timeout=DEFAULT_TIMEOUT_SECONDS):
super(Client, self).__init__()
if base_url is None:
base_url = "http+unix://var/run/docker.sock"
if 'unix:///' in base_url:
base_url = base_url.replace('unix:/', 'unix:')
if base_url.startswith('unix:'):
base_url = "http+" + base_url
if base_url.startswith('tcp:'):
base_url = base_url.replace('tcp:', 'http:')
if base_url.endswith('/'):
base_url = base_url[:-1]
self.base_url = base_url
self._version = version
self._timeout = timeout
self._auth_configs = auth.load_config()
self.mount('http+unix://', unixconn.UnixAdapter(base_url, timeout))
def _set_request_timeout(self, kwargs):
"""Prepare the kwargs for an HTTP request by inserting the timeout
parameter, if not already present."""
kwargs.setdefault('timeout', self._timeout)
return kwargs
def _post(self, url, **kwargs):
return self.post(url, **self._set_request_timeout(kwargs))
def _get(self, url, **kwargs):
return self.get(url, **self._set_request_timeout(kwargs))
def _delete(self, url, **kwargs):
return self.delete(url, **self._set_request_timeout(kwargs))
def _url(self, path):
return '{0}/v{1}{2}'.format(self.base_url, self._version, path)
def _raise_for_status(self, response, explanation=None):
"""Raises stored :class:`APIError`, if one occurred."""
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
raise errors.APIError(e, response, explanation=explanation)
def _result(self, response, json=False, binary=False):
assert not (json and binary)
self._raise_for_status(response)
if json:
return response.json()
if binary:
return response.content
return response.text
def _container_config(self, image, command, hostname=None, user=None,
detach=False, stdin_open=False, tty=False,
mem_limit=0, ports=None, environment=None, dns=None,
volumes=None, volumes_from=None,
network_disabled=False, entrypoint=None,
cpu_shares=None, working_dir=None, domainname=None,
memswap_limit=0):
if isinstance(command, six.string_types):
command = shlex.split(str(command))
if isinstance(environment, dict):
environment = [
'{0}={1}'.format(k, v) for k, v in environment.items()
]
if isinstance(ports, list):
exposed_ports = {}
for port_definition in ports:
port = port_definition
proto = 'tcp'
if isinstance(port_definition, tuple):
if len(port_definition) == 2:
proto = port_definition[1]
port = port_definition[0]
exposed_ports['{0}/{1}'.format(port, proto)] = {}
ports = exposed_ports
if isinstance(volumes, list):
volumes_dict = {}
for vol in volumes:
volumes_dict[vol] = {}
volumes = volumes_dict
if volumes_from:
if not isinstance(volumes_from, six.string_types):
volumes_from = ','.join(volumes_from)
else:
# Force None, an empty list or dict causes client.start to fail
volumes_from = None
attach_stdin = False
attach_stdout = False
attach_stderr = False
stdin_once = False
if not detach:
attach_stdout = True
attach_stderr = True
if stdin_open:
attach_stdin = True
stdin_once = True
if utils.compare_version('1.10', self._version) >= 0:
message = ('{0!r} parameter has no effect on create_container().'
' It has been moved to start()')
if dns is not None:
raise errors.DockerException(message.format('dns'))
if volumes_from is not None:
raise errors.DockerException(message.format('volumes_from'))
return {
'Hostname': hostname,
'Domainname': domainname,
'ExposedPorts': ports,
'User': user,
'Tty': tty,
'OpenStdin': stdin_open,
'StdinOnce': stdin_once,
'Memory': mem_limit,
'AttachStdin': attach_stdin,
'AttachStdout': attach_stdout,
'AttachStderr': attach_stderr,
'Env': environment,
'Cmd': command,
'Dns': dns,
'Image': image,
'Volumes': volumes,
'VolumesFrom': volumes_from,
'NetworkDisabled': network_disabled,
'Entrypoint': entrypoint,
'CpuShares': cpu_shares,
'WorkingDir': working_dir,
'MemorySwap': memswap_limit
}
def _post_json(self, url, data, **kwargs):
# Go <1.1 can't unserialize null to a string
# so we do this disgusting thing here.
data2 = {}
if data is not None:
for k, v in six.iteritems(data):
if v is not None:
data2[k] = v
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers']['Content-Type'] = 'application/json'
return self._post(url, data=json.dumps(data2), **kwargs)
def _attach_params(self, override=None):
return override or {
'stdout': 1,
'stderr': 1,
'stream': 1
}
def _attach_websocket(self, container, params=None):
if six.PY3:
raise NotImplementedError("This method is not currently supported "
"under python 3")
url = self._url("/containers/{0}/attach/ws".format(container))
req = requests.Request("POST", url, params=self._attach_params(params))
full_url = req.prepare().url
full_url = full_url.replace("http://", "ws://", 1)
full_url = full_url.replace("https://", "wss://", 1)
return self._create_websocket_connection(full_url)
def _create_websocket_connection(self, url):
return websocket.create_connection(url)
def _get_raw_response_socket(self, response):
self._raise_for_status(response)
if six.PY3:
return response.raw._fp.fp.raw._sock
else:
return response.raw._fp.fp._sock
def _stream_helper(self, response):
"""Generator for data coming from a chunked-encoded HTTP response."""
socket_fp = self._get_raw_response_socket(response)
socket_fp.setblocking(1)
socket = socket_fp.makefile()
while True:
# Because Docker introduced newlines at the end of chunks in v0.9,
# and only on some API endpoints, we have to cater for both cases.
size_line = socket.readline()
if size_line == '\r\n':
size_line = socket.readline()
size = int(size_line, 16)
if size <= 0:
break
data = socket.readline()
if not data:
break
yield data
def _multiplexed_buffer_helper(self, response):
"""A generator of multiplexed data blocks read from a buffered
response."""
buf = self._result(response, binary=True)
walker = 0
while True:
if len(buf[walker:]) < 8:
break
_, length = struct.unpack_from('>BxxxL', buf[walker:])
start = walker + STREAM_HEADER_SIZE_BYTES
end = start + length
walker = end
yield buf[start:end]
def _multiplexed_socket_stream_helper(self, response):
"""A generator of multiplexed data blocks coming from a response
socket."""
socket = self._get_raw_response_socket(response)
def recvall(socket, size):
blocks = []
while size > 0:
block = socket.recv(size)
if not block:
return None
blocks.append(block)
size -= len(block)
sep = bytes() if six.PY3 else str()
data = sep.join(blocks)
return data
while True:
socket.settimeout(None)
header = recvall(socket, STREAM_HEADER_SIZE_BYTES)
if not header:
break
_, length = struct.unpack('>BxxxL', header)
if not length:
break
data = recvall(socket, length)
if not data:
break
yield data
def attach(self, container, stdout=True, stderr=True,
stream=False, logs=False):
if isinstance(container, dict):
container = container.get('Id')
params = {
'logs': logs and 1 or 0,
'stdout': stdout and 1 or 0,
'stderr': stderr and 1 or 0,
'stream': stream and 1 or 0,
}
u = self._url("/containers/{0}/attach".format(container))
response = self._post(u, params=params, stream=stream)
# Stream multi-plexing was only introduced in API v1.6. Anything before
# that needs old-style streaming.
if utils.compare_version('1.6', self._version) < 0:
def stream_result():
self._raise_for_status(response)
for line in response.iter_lines(chunk_size=1,
decode_unicode=True):
# filter out keep-alive new lines
if line:
yield line
return stream_result() if stream else \
self._result(response, binary=True)
sep = bytes() if six.PY3 else str()
return stream and self._multiplexed_socket_stream_helper(response) or \
sep.join([x for x in self._multiplexed_buffer_helper(response)])
def attach_socket(self, container, params=None, ws=False):
if params is None:
params = {
'stdout': 1,
'stderr': 1,
'stream': 1
}
if ws:
return self._attach_websocket(container, params)
if isinstance(container, dict):
container = container.get('Id')
u = self._url("/containers/{0}/attach".format(container))
return self._get_raw_response_socket(self.post(
u, None, params=self._attach_params(params), stream=True))
def build(self, path=None, tag=None, quiet=False, fileobj=None,
nocache=False, rm=False, stream=False, timeout=None,
custom_context=False, encoding=None):
remote = context = headers = None
if path is None and fileobj is None:
raise TypeError("Either path or fileobj needs to be provided.")
if custom_context:
if not fileobj:
raise TypeError("You must specify fileobj with custom_context")
context = fileobj
elif fileobj is not None:
context = utils.mkbuildcontext(fileobj)
elif path.startswith(('http://', 'https://',
'git://', 'github.com/')):
remote = path
else:
context = utils.tar(path)
if utils.compare_version('1.8', self._version) >= 0:
stream = True
u = self._url('/build')
params = {
't': tag,
'remote': remote,
'q': quiet,
'nocache': nocache,
'rm': rm
}
if context is not None:
headers = {'Content-Type': 'application/tar'}
if encoding:
headers['Content-Encoding'] = encoding
if utils.compare_version('1.9', self._version) >= 0:
# If we don't have any auth data so far, try reloading the config
# file one more time in case anything showed up in there.
if not self._auth_configs:
self._auth_configs = auth.load_config()
# Send the full auth configuration (if any exists), since the build
# could use any (or all) of the registries.
if self._auth_configs:
headers['X-Registry-Config'] = auth.encode_full_header(
self._auth_configs
)
response = self._post(
u,
data=context,
params=params,
headers=headers,
stream=stream,
timeout=timeout,
)
if context is not None:
context.close()
if stream:
return self._stream_helper(response)
else:
output = self._result(response)
srch = r'Successfully built ([0-9a-f]+)'
match = re.search(srch, output)
if not match:
return None, output
return match.group(1), output
def commit(self, container, repository=None, tag=None, message=None,
author=None, conf=None):
params = {
'container': container,
'repo': repository,
'tag': tag,
'comment': message,
'author': author
}
u = self._url("/commit")
return self._result(self._post_json(u, data=conf, params=params),
json=True)
def containers(self, quiet=False, all=False, trunc=True, latest=False,
since=None, before=None, limit=-1, size=False):
params = {
'limit': 1 if latest else limit,
'all': 1 if all else 0,
'size': 1 if size else 0,
'trunc_cmd': 1 if trunc else 0,
'since': since,
'before': before
}
u = self._url("/containers/json")
res = self._result(self._get(u, params=params), True)
if quiet:
return [{'Id': x['Id']} for x in res]
return res
def copy(self, container, resource):
if isinstance(container, dict):
container = container.get('Id')
res = self._post_json(
self._url("/containers/{0}/copy".format(container)),
data={"Resource": resource},
stream=True
)
self._raise_for_status(res)
return res.raw
def create_container(self, image, command=None, hostname=None, user=None,
detach=False, stdin_open=False, tty=False,
mem_limit=0, ports=None, environment=None, dns=None,
volumes=None, volumes_from=None,
network_disabled=False, name=None, entrypoint=None,
cpu_shares=None, working_dir=None, domainname=None,
memswap_limit=0):
config = self._container_config(
image, command, hostname, user, detach, stdin_open, tty, mem_limit,
ports, environment, dns, volumes, volumes_from, network_disabled,
entrypoint, cpu_shares, working_dir, domainname, memswap_limit
)
return self.create_container_from_config(config, name)
def create_container_from_config(self, config, name=None):
u = self._url("/containers/create")
params = {
'name': name
}
res = self._post_json(u, data=config, params=params)
return self._result(res, True)
def diff(self, container):
if isinstance(container, dict):
container = container.get('Id')
return self._result(self._get(self._url("/containers/{0}/changes".
format(container))), True)
def events(self):
return self._stream_helper(self.get(self._url('/events'), stream=True))
def export(self, container):
if isinstance(container, dict):
container = container.get('Id')
res = self._get(self._url("/containers/{0}/export".format(container)),
stream=True)
self._raise_for_status(res)
return res.raw
def get_image(self, image):
res = self._get(self._url("/images/{0}/get".format(image)),
stream=True)
self._raise_for_status(res)
return res.raw
def history(self, image):
res = self._get(self._url("/images/{0}/history".format(image)))
self._raise_for_status(res)
return self._result(res)
def images(self, name=None, quiet=False, all=False, viz=False):
if viz:
if utils.compare_version('1.7', self._version) >= 0:
raise Exception('Viz output is not supported in API >= 1.7!')
return self._result(self._get(self._url("images/viz")))
params = {
'filter': name,
'only_ids': 1 if quiet else 0,
'all': 1 if all else 0,
}
res = self._result(self._get(self._url("/images/json"), params=params),
True)
if quiet:
return [x['Id'] for x in res]
return res
def import_image(self, src=None, repository=None, tag=None, image=None):
u = self._url("/images/create")
params = {
'repo': repository,
'tag': tag
}
if src:
try:
# XXX: this is ways not optimal but the only way
# for now to import tarballs through the API
fic = open(src)
data = fic.read()
fic.close()
src = "-"
except IOError:
# file does not exists or not a file (URL)
data = None
if isinstance(src, six.string_types):
params['fromSrc'] = src
return self._result(self._post(u, data=data, params=params))
return self._result(self._post(u, data=src, params=params))
if image:
params['fromImage'] = image
return self._result(self._post(u, data=None, params=params))
raise Exception("Must specify a src or image")
def info(self):
return self._result(self._get(self._url("/info")),
True)
def insert(self, image, url, path):
if utils.compare_version('1.12', self._version) >= 0:
raise errors.DeprecatedMethod(
'insert is not available for API version >=1.12'
)
api_url = self._url("/images/" + image + "/insert")
params = {
'url': url,
'path': path
}
return self._result(self._post(api_url, params=params))
def inspect_container(self, container):
if isinstance(container, dict):
container = container.get('Id')
return self._result(
self._get(self._url("/containers/{0}/json".format(container))),
True)
def inspect_image(self, image_id):
return self._result(
self._get(self._url("/images/{0}/json".format(image_id))),
True
)
def kill(self, container, signal=None):
if isinstance(container, dict):
container = container.get('Id')
url = self._url("/containers/{0}/kill".format(container))
params = {}
if signal is not None:
params['signal'] = signal
res = self._post(url, params=params)
self._raise_for_status(res)
def load_image(self, data):
res = self._post(self._url("/images/load"), data=data)
self._raise_for_status(res)
def login(self, username, password=None, email=None, registry=None,
reauth=False):
# If we don't have any auth data so far, try reloading the config file
# one more time in case anything showed up in there.
if not self._auth_configs:
self._auth_configs = auth.load_config()
registry = registry or auth.INDEX_URL
authcfg = auth.resolve_authconfig(self._auth_configs, registry)
# If we found an existing auth config for this registry and username
# combination, we can return it immediately unless reauth is requested.
if authcfg and authcfg.get('username', None) == username \
and not reauth:
return authcfg
req_data = {
'username': username,
'password': password,
'email': email,
'serveraddress': registry,
}
response = self._post_json(self._url('/auth'), data=req_data)
if response.status_code == 200:
self._auth_configs[registry] = req_data
return self._result(response, json=True)
def logs(self, container, stdout=True, stderr=True, stream=False,
timestamps=False):
if isinstance(container, dict):
container = container.get('Id')
if utils.compare_version('1.11', self._version) >= 0:
params = {'stderr': stderr and 1 or 0,
'stdout': stdout and 1 or 0,
'timestamps': timestamps and 1 or 0,
'follow': stream and 1 or 0}
url = self._url("/containers/{0}/logs".format(container))
res = self._get(url, params=params, stream=stream)
if stream:
return self._multiplexed_socket_stream_helper(res)
elif six.PY3:
return bytes().join(
[x for x in self._multiplexed_buffer_helper(res)]
)
else:
return str().join(
[x for x in self._multiplexed_buffer_helper(res)]
)
return self.attach(
container,
stdout=stdout,
stderr=stderr,
stream=stream,
logs=True
)
def ping(self):
return self._result(self._get(self._url('/_ping')))
def port(self, container, private_port):
if isinstance(container, dict):
container = container.get('Id')
res = self._get(self._url("/containers/{0}/json".format(container)))
self._raise_for_status(res)
json_ = res.json()
s_port = str(private_port)
h_ports = None
h_ports = json_['NetworkSettings']['Ports'].get(s_port + '/udp')
if h_ports is None:
h_ports = json_['NetworkSettings']['Ports'].get(s_port + '/tcp')
return h_ports
def pull(self, repository, tag=None, stream=False):
if not tag:
repository, tag = utils.parse_repository_tag(repository)
registry, repo_name = auth.resolve_repository_name(repository)
if repo_name.count(":") == 1:
repository, tag = repository.rsplit(":", 1)
params = {
'tag': tag,
'fromImage': repository
}
headers = {}
if utils.compare_version('1.5', self._version) >= 0:
# If we don't have any auth data so far, try reloading the config
# file one more time in case anything showed up in there.
if not self._auth_configs:
self._auth_configs = auth.load_config()
authcfg = auth.resolve_authconfig(self._auth_configs, registry)
# Do not fail here if no authentication exists for this specific
# registry as we can have a readonly pull. Just put the header if
# we can.
if authcfg:
headers['X-Registry-Auth'] = auth.encode_header(authcfg)
response = self._post(self._url('/images/create'), params=params,
headers=headers, stream=stream, timeout=None)
if stream:
return self._stream_helper(response)
else:
return self._result(response)
def push(self, repository, stream=False):
registry, repo_name = auth.resolve_repository_name(repository)
u = self._url("/images/{0}/push".format(repository))
headers = {}
if utils.compare_version('1.5', self._version) >= 0:
# If we don't have any auth data so far, try reloading the config
# file one more time in case anything showed up in there.
if not self._auth_configs:
self._auth_configs = auth.load_config()
authcfg = auth.resolve_authconfig(self._auth_configs, registry)
# Do not fail here if no authentication exists for this specific
# registry as we can have a readonly pull. Just put the header if
# we can.
if authcfg:
headers['X-Registry-Auth'] = auth.encode_header(authcfg)
response = self._post_json(u, None, headers=headers, stream=stream)
else:
response = self._post_json(u, None, stream=stream)
return stream and self._stream_helper(response) \
or self._result(response)
def remove_container(self, container, v=False, link=False, force=False):
if isinstance(container, dict):
container = container.get('Id')
params = {'v': v, 'link': link, 'force': force}
res = self._delete(self._url("/containers/" + container),
params=params)
self._raise_for_status(res)
def remove_image(self, image, force=False, noprune=False):
params = {'force': force, 'noprune': noprune}
res = self._delete(self._url("/images/" + image), params=params)
self._raise_for_status(res)
def restart(self, container, timeout=10):
if isinstance(container, dict):
container = container.get('Id')
params = {'t': timeout}
url = self._url("/containers/{0}/restart".format(container))
res = self._post(url, params=params)
self._raise_for_status(res)
def search(self, term):
return self._result(self._get(self._url("/images/search"),
params={'term': term}),
True)
def start(self, container, binds=None, port_bindings=None, lxc_conf=None,
publish_all_ports=False, links=None, privileged=False,
dns=None, dns_search=None, volumes_from=None, network_mode=None):
if isinstance(container, dict):
container = container.get('Id')
if isinstance(lxc_conf, dict):
formatted = []
for k, v in six.iteritems(lxc_conf):
formatted.append({'Key': k, 'Value': str(v)})
lxc_conf = formatted
start_config = {
'LxcConf': lxc_conf
}
if binds:
start_config['Binds'] = utils.convert_volume_binds(binds)
if port_bindings:
start_config['PortBindings'] = utils.convert_port_bindings(
port_bindings
)
start_config['PublishAllPorts'] = publish_all_ports
if links:
if isinstance(links, dict):
links = six.iteritems(links)
formatted_links = [
'{0}:{1}'.format(k, v) for k, v in sorted(links)
]
start_config['Links'] = formatted_links
start_config['Privileged'] = privileged
if utils.compare_version('1.10', self._version) >= 0:
if dns is not None:
start_config['Dns'] = dns
if volumes_from is not None:
if isinstance(volumes_from, six.string_types):
volumes_from = volumes_from.split(',')
start_config['VolumesFrom'] = volumes_from
else:
warning_message = ('{0!r} parameter is discarded. It is only'
' available for API version greater or equal'
' than 1.10')
if dns is not None:
warnings.warn(warning_message.format('dns'),
DeprecationWarning)
if volumes_from is not None:
warnings.warn(warning_message.format('volumes_from'),
DeprecationWarning)
if dns_search:
start_config['DnsSearch'] = dns_search
if network_mode:
start_config['NetworkMode'] = network_mode
url = self._url("/containers/{0}/start".format(container))
res = self._post_json(url, data=start_config)
self._raise_for_status(res)
def resize(self, container, height, width):
if isinstance(container, dict):
container = container.get('Id')
params = {'h': height, 'w': width}
url = self._url("/containers/{0}/resize".format(container))
res = self._post(url, params=params)
self._raise_for_status(res)
def stop(self, container, timeout=10):
if isinstance(container, dict):
container = container.get('Id')
params = {'t': timeout}
url = self._url("/containers/{0}/stop".format(container))
res = self._post(url, params=params,
timeout=max(timeout, self._timeout))
self._raise_for_status(res)
def tag(self, image, repository, tag=None, force=False):
params = {
'tag': tag,
'repo': repository,
'force': 1 if force else 0
}
url = self._url("/images/{0}/tag".format(image))
res = self._post(url, params=params)
self._raise_for_status(res)
return res.status_code == 201
def top(self, container):
u = self._url("/containers/{0}/top".format(container))
return self._result(self._get(u), True)
def version(self):
return self._result(self._get(self._url("/version")), True)
def wait(self, container):
if isinstance(container, dict):
container = container.get('Id')
url = self._url("/containers/{0}/wait".format(container))
res = self._post(url, timeout=None)
self._raise_for_status(res)
json_ = res.json()
if 'StatusCode' in json_:
return json_['StatusCode']
return -1
| apache-2.0 |
harunurhan/presto | presto-main/src/main/java/com/facebook/presto/sql/planner/PlanOptimizersFactory.java | 7768 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.optimizations.AddExchanges;
import com.facebook.presto.sql.planner.optimizations.AddLocalExchanges;
import com.facebook.presto.sql.planner.optimizations.BeginTableWrite;
import com.facebook.presto.sql.planner.optimizations.CanonicalizeExpressions;
import com.facebook.presto.sql.planner.optimizations.CountConstantOptimizer;
import com.facebook.presto.sql.planner.optimizations.DesugaringOptimizer;
import com.facebook.presto.sql.planner.optimizations.EmptyDeleteOptimizer;
import com.facebook.presto.sql.planner.optimizations.HashGenerationOptimizer;
import com.facebook.presto.sql.planner.optimizations.ImplementIntersectAsUnion;
import com.facebook.presto.sql.planner.optimizations.ImplementSampleAsFilter;
import com.facebook.presto.sql.planner.optimizations.IndexJoinOptimizer;
import com.facebook.presto.sql.planner.optimizations.LimitPushDown;
import com.facebook.presto.sql.planner.optimizations.MergeProjections;
import com.facebook.presto.sql.planner.optimizations.MetadataDeleteOptimizer;
import com.facebook.presto.sql.planner.optimizations.MetadataQueryOptimizer;
import com.facebook.presto.sql.planner.optimizations.PickLayout;
import com.facebook.presto.sql.planner.optimizations.PlanOptimizer;
import com.facebook.presto.sql.planner.optimizations.PredicatePushDown;
import com.facebook.presto.sql.planner.optimizations.ProjectionPushDown;
import com.facebook.presto.sql.planner.optimizations.PruneIdentityProjections;
import com.facebook.presto.sql.planner.optimizations.PruneUnreferencedOutputs;
import com.facebook.presto.sql.planner.optimizations.PushTableWriteThroughUnion;
import com.facebook.presto.sql.planner.optimizations.SetFlatteningOptimizer;
import com.facebook.presto.sql.planner.optimizations.SimplifyExpressions;
import com.facebook.presto.sql.planner.optimizations.SingleDistinctOptimizer;
import com.facebook.presto.sql.planner.optimizations.TransformUncorrelatedInPredicateSubqueryToSemiJoin;
import com.facebook.presto.sql.planner.optimizations.TransformUncorrelatedScalarToJoin;
import com.facebook.presto.sql.planner.optimizations.UnaliasSymbolReferences;
import com.facebook.presto.sql.planner.optimizations.WindowFilterPushDown;
import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import javax.inject.Provider;
import java.util.List;
public class PlanOptimizersFactory
implements Provider<List<PlanOptimizer>>
{
private final List<PlanOptimizer> optimizers;
@Inject
public PlanOptimizersFactory(Metadata metadata, SqlParser sqlParser, FeaturesConfig featuresConfig)
{
this(metadata, sqlParser, featuresConfig, false);
}
public PlanOptimizersFactory(Metadata metadata, SqlParser sqlParser, FeaturesConfig featuresConfig, boolean forceSingleNode)
{
ImmutableList.Builder<PlanOptimizer> builder = ImmutableList.builder();
builder.add(new DesugaringOptimizer(metadata, sqlParser), // Clean up all the sugar in expressions, e.g. AtTimeZone, must be run before all the other optimizers
new TransformUncorrelatedScalarToJoin(),
new TransformUncorrelatedInPredicateSubqueryToSemiJoin(),
new ImplementSampleAsFilter(),
new CanonicalizeExpressions(),
new SimplifyExpressions(metadata, sqlParser),
new UnaliasSymbolReferences(),
new PruneIdentityProjections(),
new SetFlatteningOptimizer(),
new ImplementIntersectAsUnion(),
new LimitPushDown(), // Run the LimitPushDown after flattening set operators to make it easier to do the set flattening
new PredicatePushDown(metadata, sqlParser),
new MergeProjections(),
new SimplifyExpressions(metadata, sqlParser), // Re-run the SimplifyExpressions to simplify any recomposed expressions from other optimizations
new ProjectionPushDown(),
new UnaliasSymbolReferences(), // Run again because predicate pushdown and projection pushdown might add more projections
new PruneUnreferencedOutputs(), // Make sure to run this before index join. Filtered projections may not have all the columns.
new IndexJoinOptimizer(metadata), // Run this after projections and filters have been fully simplified and pushed down
new CountConstantOptimizer(),
new WindowFilterPushDown(metadata), // This must run after PredicatePushDown and LimitPushDown so that it squashes any successive filter nodes and limits
new MergeProjections(),
new PruneUnreferencedOutputs(), // Make sure to run this at the end to help clean the plan for logging/execution and not remove info that other optimizers might need at an earlier point
new PruneIdentityProjections(), // This MUST run after PruneUnreferencedOutputs as it may introduce new redundant projections
new MetadataQueryOptimizer(metadata));
if (featuresConfig.isOptimizeSingleDistinct()) {
builder.add(new SingleDistinctOptimizer());
builder.add(new PruneUnreferencedOutputs());
}
if (!forceSingleNode) {
builder.add(new PushTableWriteThroughUnion()); // Must run before AddExchanges
builder.add(new AddExchanges(metadata, sqlParser));
}
builder.add(new PickLayout(metadata));
builder.add(new EmptyDeleteOptimizer()); // Run after table scan is removed by PickLayout
builder.add(new PredicatePushDown(metadata, sqlParser)); // Run predicate push down one more time in case we can leverage new information from layouts' effective predicate
builder.add(new ProjectionPushDown());
builder.add(new MergeProjections());
builder.add(new UnaliasSymbolReferences()); // Run unalias after merging projections to simplify projections more efficiently
builder.add(new PruneUnreferencedOutputs());
builder.add(new PruneIdentityProjections());
// Optimizers above this don't understand local exchanges, so be careful moving this.
builder.add(new AddLocalExchanges(metadata, sqlParser));
// DO NOT add optimizers that change the plan shape (computations) after this point
// Precomputed hashes - this assumes that partitioning will not change
builder.add(new HashGenerationOptimizer());
builder.add(new MetadataDeleteOptimizer(metadata));
builder.add(new BeginTableWrite(metadata)); // HACK! see comments in BeginTableWrite
// TODO: consider adding a formal final plan sanitization optimizer that prepares the plan for transmission/execution/logging
// TODO: figure out how to improve the set flattening optimizer so that it can run at any point
this.optimizers = builder.build();
}
@Override
public synchronized List<PlanOptimizer> get()
{
return optimizers;
}
}
| apache-2.0 |
wangcy6/storm_app | frame/c++/webrtc-master/video/video_stream_decoder.cc | 5438 | /*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video/video_stream_decoder.h"
#include <algorithm>
#include <map>
#include <vector>
#include "common_video/include/frame_callback.h"
#include "modules/video_coding/video_coding_impl.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "system_wrappers/include/metrics.h"
#include "video/call_stats.h"
#include "video/payload_router.h"
#include "video/receive_statistics_proxy.h"
namespace webrtc {
VideoStreamDecoder::VideoStreamDecoder(
vcm::VideoReceiver* video_receiver,
VCMFrameTypeCallback* vcm_frame_type_callback,
VCMPacketRequestCallback* vcm_packet_request_callback,
bool enable_nack,
bool enable_fec,
ReceiveStatisticsProxy* receive_statistics_proxy,
rtc::VideoSinkInterface<VideoFrame>* incoming_video_stream)
: video_receiver_(video_receiver),
receive_stats_callback_(receive_statistics_proxy),
incoming_video_stream_(incoming_video_stream),
last_rtt_ms_(0) {
RTC_DCHECK(video_receiver_);
static const int kMaxPacketAgeToNack = 450;
static const int kMaxNackListSize = 250;
video_receiver_->SetNackSettings(kMaxNackListSize,
kMaxPacketAgeToNack, 0);
video_receiver_->RegisterReceiveCallback(this);
video_receiver_->RegisterFrameTypeCallback(vcm_frame_type_callback);
video_receiver_->RegisterReceiveStatisticsCallback(this);
VCMVideoProtection video_protection =
enable_nack ? (enable_fec ? kProtectionNackFEC : kProtectionNack)
: kProtectionNone;
VCMDecodeErrorMode decode_error_mode = enable_nack ? kNoErrors : kWithErrors;
video_receiver_->SetVideoProtection(video_protection, true);
video_receiver_->SetDecodeErrorMode(decode_error_mode);
VCMPacketRequestCallback* packet_request_callback =
enable_nack ? vcm_packet_request_callback : nullptr;
video_receiver_->RegisterPacketRequestCallback(packet_request_callback);
}
VideoStreamDecoder::~VideoStreamDecoder() {
// Note: There's an assumption at this point that the decoder thread is
// *not* running. If it was, then there could be a race for each of these
// callbacks.
// Unset all the callback pointers that we set in the ctor.
video_receiver_->RegisterPacketRequestCallback(nullptr);
video_receiver_->RegisterReceiveStatisticsCallback(nullptr);
video_receiver_->RegisterFrameTypeCallback(nullptr);
video_receiver_->RegisterReceiveCallback(nullptr);
}
// Do not acquire the lock of |video_receiver_| in this function. Decode
// callback won't necessarily be called from the decoding thread. The decoding
// thread may have held the lock when calling VideoDecoder::Decode, Reset, or
// Release. Acquiring the same lock in the path of decode callback can deadlock.
int32_t VideoStreamDecoder::FrameToRender(VideoFrame& video_frame,
rtc::Optional<uint8_t> qp,
VideoContentType content_type) {
receive_stats_callback_->OnDecodedFrame(qp, content_type);
incoming_video_stream_->OnFrame(video_frame);
return 0;
}
int32_t VideoStreamDecoder::ReceivedDecodedReferenceFrame(
const uint64_t picture_id) {
RTC_NOTREACHED();
return 0;
}
void VideoStreamDecoder::OnIncomingPayloadType(int payload_type) {
receive_stats_callback_->OnIncomingPayloadType(payload_type);
}
void VideoStreamDecoder::OnDecoderImplementationName(
const char* implementation_name) {
receive_stats_callback_->OnDecoderImplementationName(implementation_name);
}
void VideoStreamDecoder::OnReceiveRatesUpdated(uint32_t bit_rate,
uint32_t frame_rate) {
receive_stats_callback_->OnIncomingRate(frame_rate, bit_rate);
}
void VideoStreamDecoder::OnDiscardedPacketsUpdated(int discarded_packets) {
receive_stats_callback_->OnDiscardedPacketsUpdated(discarded_packets);
}
void VideoStreamDecoder::OnFrameCountsUpdated(const FrameCounts& frame_counts) {
receive_stats_callback_->OnFrameCountsUpdated(frame_counts);
}
void VideoStreamDecoder::OnFrameBufferTimingsUpdated(int decode_ms,
int max_decode_ms,
int current_delay_ms,
int target_delay_ms,
int jitter_buffer_ms,
int min_playout_delay_ms,
int render_delay_ms) {}
void VideoStreamDecoder::OnTimingFrameInfoUpdated(const TimingFrameInfo& info) {
}
void VideoStreamDecoder::OnCompleteFrame(bool is_keyframe,
size_t size_bytes,
VideoContentType content_type) {}
void VideoStreamDecoder::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
video_receiver_->SetReceiveChannelParameters(max_rtt_ms);
rtc::CritScope lock(&crit_);
last_rtt_ms_ = avg_rtt_ms;
}
} // namespace webrtc
| apache-2.0 |
P1start/rust | src/libsyntax/ext/tt/macro_rules.rs | 10609 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim};
use ast;
use codemap::{Span, Spanned, DUMMY_SP};
use ext::base::{ExtCtxt, MacResult, MacroDef};
use ext::base::{NormalTT, TTMacroExpander};
use ext::tt::macro_parser::{Success, Error, Failure};
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use ext::tt::macro_parser::{parse, parse_or_else};
use parse::lexer::new_tt_reader;
use parse::parser::Parser;
use parse::attr::ParserAttr;
use parse::token::{special_idents, gensym_ident};
use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF};
use parse::token;
use print;
use ptr::P;
use util::small_vector::SmallVector;
use std::cell::RefCell;
use std::rc::Rc;
struct ParserAnyMacro<'a> {
parser: RefCell<Parser<'a>>,
}
impl<'a> ParserAnyMacro<'a> {
/// Make sure we don't have any tokens left to parse, so we don't
/// silently drop anything. `allow_semi` is so that "optional"
/// semicolons at the end of normal expressions aren't complained
/// about e.g. the semicolon in `macro_rules! kapow( () => {
/// fail!(); } )` doesn't get picked up by .parse_expr(), but it's
/// allowed to be there.
fn ensure_complete_parse(&self, allow_semi: bool) {
let mut parser = self.parser.borrow_mut();
if allow_semi && parser.token == SEMI {
parser.bump()
}
if parser.token != EOF {
let token_str = parser.this_token_to_string();
let msg = format!("macro expansion ignores token `{}` and any \
following",
token_str);
let span = parser.span;
parser.span_err(span, msg.as_slice());
}
}
}
impl<'a> MacResult for ParserAnyMacro<'a> {
fn make_expr(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Expr>> {
let ret = self.parser.borrow_mut().parse_expr();
self.ensure_complete_parse(true);
Some(ret)
}
fn make_pat(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Pat>> {
let ret = self.parser.borrow_mut().parse_pat();
self.ensure_complete_parse(false);
Some(ret)
}
fn make_items(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Item>>> {
let mut ret = SmallVector::zero();
loop {
let mut parser = self.parser.borrow_mut();
// so... do outer attributes attached to the macro invocation
// just disappear? This question applies to make_methods, as
// well.
match parser.parse_item_with_outer_attributes() {
Some(item) => ret.push(item),
None => break
}
}
self.ensure_complete_parse(false);
Some(ret)
}
fn make_methods(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Method>>> {
let mut ret = SmallVector::zero();
loop {
let mut parser = self.parser.borrow_mut();
match parser.token {
EOF => break,
_ => {
let attrs = parser.parse_outer_attributes();
ret.push(parser.parse_method(attrs, ast::Inherited))
}
}
}
self.ensure_complete_parse(false);
Some(ret)
}
fn make_stmt(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Stmt>> {
let attrs = self.parser.borrow_mut().parse_outer_attributes();
let ret = self.parser.borrow_mut().parse_stmt(attrs);
self.ensure_complete_parse(true);
Some(ret)
}
}
struct MacroRulesMacroExpander {
name: Ident,
lhses: Vec<Rc<NamedMatch>>,
rhses: Vec<Rc<NamedMatch>>,
}
impl TTMacroExpander for MacroRulesMacroExpander {
fn expand<'cx>(&self,
cx: &'cx mut ExtCtxt,
sp: Span,
arg: &[ast::TokenTree])
-> Box<MacResult+'cx> {
generic_extension(cx,
sp,
self.name,
arg,
self.lhses.as_slice(),
self.rhses.as_slice())
}
}
struct MacroRulesDefiner {
def: Option<MacroDef>
}
impl MacResult for MacroRulesDefiner {
fn make_def(&mut self) -> Option<MacroDef> {
Some(self.def.take().expect("empty MacroRulesDefiner"))
}
}
/// Given `lhses` and `rhses`, this is the new macro we create
fn generic_extension<'cx>(cx: &'cx ExtCtxt,
sp: Span,
name: Ident,
arg: &[ast::TokenTree],
lhses: &[Rc<NamedMatch>],
rhses: &[Rc<NamedMatch>])
-> Box<MacResult+'cx> {
if cx.trace_macros() {
println!("{}! {} {} {}",
token::get_ident(name),
"{",
print::pprust::tt_to_string(&TTDelim(Rc::new(arg.iter()
.map(|x| (*x).clone())
.collect()))),
"}");
}
// Which arm's failure should we report? (the one furthest along)
let mut best_fail_spot = DUMMY_SP;
let mut best_fail_msg = "internal error: ran no matchers".to_string();
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
match **lhs {
MatchedNonterminal(NtMatchers(ref mtcs)) => {
// `None` is because we're not interpolating
let arg_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
None,
arg.iter()
.map(|x| (*x).clone())
.collect());
match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs.as_slice()) {
Success(named_matches) => {
let rhs = match *rhses[i] {
// okay, what's your transcriber?
MatchedNonterminal(NtTT(ref tt)) => {
match **tt {
// cut off delimiters; don't parse 'em
TTDelim(ref tts) => {
(*tts).slice(1u,(*tts).len()-1u)
.iter()
.map(|x| (*x).clone())
.collect()
}
_ => cx.span_fatal(
sp, "macro rhs must be delimited")
}
},
_ => cx.span_bug(sp, "bad thing in rhs")
};
// rhs has holes ( `$id` and `$(...)` that need filled)
let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
Some(named_matches),
rhs);
let p = Parser::new(cx.parse_sess(), cx.cfg(), box trncbr);
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
return box ParserAnyMacro {
parser: RefCell::new(p),
} as Box<MacResult+'cx>
}
Failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {
best_fail_spot = sp;
best_fail_msg = (*msg).clone();
},
Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice())
}
}
_ => cx.bug("non-matcher found in parsed lhses")
}
}
cx.span_fatal(best_fail_spot, best_fail_msg.as_slice());
}
/// This procedure performs the expansion of the
/// macro_rules! macro. It parses the RHS and adds
/// an extension to the current context.
pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
name: Ident,
arg: Vec<ast::TokenTree> )
-> Box<MacResult+'cx> {
// these spans won't matter, anyways
fn ms(m: Matcher_) -> Matcher {
Spanned {
node: m.clone(),
span: DUMMY_SP
}
}
let lhs_nm = gensym_ident("lhs");
let rhs_nm = gensym_ident("rhs");
// The pattern that macro_rules matches.
// The grammar for macro_rules! is:
// $( $lhs:mtcs => $rhs:tt );+
// ...quasiquoting this would be nice.
let argument_gram = vec!(
ms(MatchSeq(vec!(
ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)),
ms(MatchTok(FAT_ARROW)),
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI), false, 0u, 2u)),
//to phase into semicolon-termination instead of
//semicolon-separation
ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, true, 2u, 2u)));
// Parse the macro_rules! invocation (`none` is for no interpolations):
let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic,
None,
arg.clone());
let argument_map = parse_or_else(cx.parse_sess(),
cx.cfg(),
arg_reader,
argument_gram);
// Extract the arguments:
let lhses = match **argument_map.get(&lhs_nm) {
MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
_ => cx.span_bug(sp, "wrong-structured lhs")
};
let rhses = match **argument_map.get(&rhs_nm) {
MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
_ => cx.span_bug(sp, "wrong-structured rhs")
};
let exp = box MacroRulesMacroExpander {
name: name,
lhses: lhses,
rhses: rhses,
};
box MacroRulesDefiner {
def: Some(MacroDef {
name: token::get_ident(name).to_string(),
ext: NormalTT(exp, Some(sp))
})
} as Box<MacResult+'cx>
}
| apache-2.0 |
devendermishrajio/nova | nova/tests/unit/scheduler/test_filter_scheduler.py | 14803 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Filter Scheduler.
"""
import mock
from nova import exception
from nova import objects
from nova.scheduler import filter_scheduler
from nova.scheduler import host_manager
from nova.scheduler import utils as scheduler_utils
from nova.scheduler import weights
from nova import test # noqa
from nova.tests.unit.scheduler import fakes
from nova.tests.unit.scheduler import test_scheduler
def fake_get_filtered_hosts(hosts, filter_properties, index):
return list(hosts)
class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase):
"""Test case for Filter Scheduler."""
driver_cls = filter_scheduler.FilterScheduler
@mock.patch('nova.objects.ServiceList.get_by_binary',
return_value=fakes.SERVICES)
@mock.patch('nova.objects.InstanceList.get_by_host')
@mock.patch('nova.objects.ComputeNodeList.get_all',
return_value=fakes.COMPUTE_NODES)
@mock.patch('nova.db.instance_extra_get_by_instance_uuid',
return_value={'numa_topology': None,
'pci_requests': None})
def test_schedule_happy_day(self, mock_get_extra, mock_get_all,
mock_by_host, mock_get_by_binary):
"""Make sure there's nothing glaringly wrong with _schedule()
by doing a happy day pass through.
"""
self.next_weight = 1.0
def _fake_weigh_objects(_self, functions, hosts, options):
self.next_weight += 2.0
host_state = hosts[0]
return [weights.WeighedHost(host_state, self.next_weight)]
self.stubs.Set(self.driver.host_manager, 'get_filtered_hosts',
fake_get_filtered_hosts)
self.stubs.Set(weights.HostWeightHandler,
'get_weighed_objects', _fake_weigh_objects)
spec_obj = objects.RequestSpec(
num_instances=10,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
vcpus=1),
project_id=1,
os_type='Linux',
uuid='fake-uuid',
pci_requests=None,
numa_topology=None,
instance_group=None)
self.mox.ReplayAll()
weighed_hosts = self.driver._schedule(self.context, spec_obj)
self.assertEqual(len(weighed_hosts), 10)
for weighed_host in weighed_hosts:
self.assertIsNotNone(weighed_host.obj)
def test_max_attempts(self):
self.flags(scheduler_max_attempts=4)
self.assertEqual(4, scheduler_utils._max_attempts())
def test_invalid_max_attempts(self):
self.flags(scheduler_max_attempts=0)
self.assertRaises(exception.NovaException,
scheduler_utils._max_attempts)
def test_add_retry_host(self):
retry = dict(num_attempts=1, hosts=[])
filter_properties = dict(retry=retry)
host = "fakehost"
node = "fakenode"
scheduler_utils._add_retry_host(filter_properties, host, node)
hosts = filter_properties['retry']['hosts']
self.assertEqual(1, len(hosts))
self.assertEqual([host, node], hosts[0])
def test_post_select_populate(self):
# Test addition of certain filter props after a node is selected.
retry = {'hosts': [], 'num_attempts': 1}
filter_properties = {'retry': retry}
host_state = host_manager.HostState('host', 'node')
host_state.limits['vcpus'] = 5
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
self.assertEqual(['host', 'node'],
filter_properties['retry']['hosts'][0])
self.assertEqual({'vcpus': 5}, host_state.limits)
@mock.patch('nova.objects.ServiceList.get_by_binary',
return_value=fakes.SERVICES)
@mock.patch('nova.objects.InstanceList.get_by_host')
@mock.patch('nova.objects.ComputeNodeList.get_all',
return_value=fakes.COMPUTE_NODES)
@mock.patch('nova.db.instance_extra_get_by_instance_uuid',
return_value={'numa_topology': None,
'pci_requests': None})
def test_schedule_host_pool(self, mock_get_extra, mock_get_all,
mock_by_host, mock_get_by_binary):
"""Make sure the scheduler_host_subset_size property works properly."""
self.flags(scheduler_host_subset_size=2)
self.stubs.Set(self.driver.host_manager, 'get_filtered_hosts',
fake_get_filtered_hosts)
spec_obj = objects.RequestSpec(
num_instances=1,
project_id=1,
os_type='Linux',
uuid='fake-uuid',
flavor=objects.Flavor(root_gb=512,
memory_mb=512,
ephemeral_gb=0,
vcpus=1),
pci_requests=None,
numa_topology=None,
instance_group=None)
self.mox.ReplayAll()
hosts = self.driver._schedule(self.context, spec_obj)
# one host should be chosen
self.assertEqual(len(hosts), 1)
@mock.patch('nova.objects.ServiceList.get_by_binary',
return_value=fakes.SERVICES)
@mock.patch('nova.objects.InstanceList.get_by_host')
@mock.patch('nova.objects.ComputeNodeList.get_all',
return_value=fakes.COMPUTE_NODES)
@mock.patch('nova.db.instance_extra_get_by_instance_uuid',
return_value={'numa_topology': None,
'pci_requests': None})
def test_schedule_large_host_pool(self, mock_get_extra, mock_get_all,
mock_by_host, mock_get_by_binary):
"""Hosts should still be chosen if pool size
is larger than number of filtered hosts.
"""
self.flags(scheduler_host_subset_size=20)
self.stubs.Set(self.driver.host_manager, 'get_filtered_hosts',
fake_get_filtered_hosts)
spec_obj = objects.RequestSpec(
num_instances=1,
project_id=1,
os_type='Linux',
uuid='fake-uuid',
flavor=objects.Flavor(root_gb=512,
memory_mb=512,
ephemeral_gb=0,
vcpus=1),
pci_requests=None,
numa_topology=None,
instance_group=None)
self.mox.ReplayAll()
hosts = self.driver._schedule(self.context, spec_obj)
# one host should be chose
self.assertEqual(len(hosts), 1)
@mock.patch('nova.scheduler.host_manager.HostManager._add_instance_info')
@mock.patch('nova.objects.ServiceList.get_by_binary',
return_value=fakes.SERVICES)
@mock.patch('nova.objects.ComputeNodeList.get_all',
return_value=fakes.COMPUTE_NODES)
@mock.patch('nova.db.instance_extra_get_by_instance_uuid',
return_value={'numa_topology': None,
'pci_requests': None})
def test_schedule_chooses_best_host(self, mock_get_extra, mock_cn_get_all,
mock_get_by_binary,
mock_add_inst_info):
"""If scheduler_host_subset_size is 1, the largest host with greatest
weight should be returned.
"""
self.flags(scheduler_host_subset_size=1)
self.stubs.Set(self.driver.host_manager, 'get_filtered_hosts',
fake_get_filtered_hosts)
self.next_weight = 50
def _fake_weigh_objects(_self, functions, hosts, options):
this_weight = self.next_weight
self.next_weight = 0
host_state = hosts[0]
return [weights.WeighedHost(host_state, this_weight)]
spec_obj = objects.RequestSpec(
num_instances=1,
project_id=1,
os_type='Linux',
uuid='fake-uuid',
flavor=objects.Flavor(root_gb=512,
memory_mb=512,
ephemeral_gb=0,
vcpus=1),
pci_requests=None,
numa_topology=None,
instance_group=None)
self.stubs.Set(weights.HostWeightHandler,
'get_weighed_objects', _fake_weigh_objects)
self.mox.ReplayAll()
hosts = self.driver._schedule(self.context, spec_obj)
# one host should be chosen
self.assertEqual(1, len(hosts))
self.assertEqual(50, hosts[0].weight)
@mock.patch('nova.objects.ServiceList.get_by_binary',
return_value=fakes.SERVICES)
@mock.patch('nova.objects.InstanceList.get_by_host')
@mock.patch('nova.objects.ComputeNodeList.get_all',
return_value=fakes.COMPUTE_NODES)
@mock.patch('nova.db.instance_extra_get_by_instance_uuid',
return_value={'numa_topology': None,
'pci_requests': None})
def test_select_destinations(self, mock_get_extra, mock_get_all,
mock_by_host, mock_get_by_binary):
"""select_destinations is basically a wrapper around _schedule().
Similar to the _schedule tests, this just does a happy path test to
ensure there is nothing glaringly wrong.
"""
self.next_weight = 1.0
selected_hosts = []
selected_nodes = []
def _fake_weigh_objects(_self, functions, hosts, options):
self.next_weight += 2.0
host_state = hosts[0]
selected_hosts.append(host_state.host)
selected_nodes.append(host_state.nodename)
return [weights.WeighedHost(host_state, self.next_weight)]
self.stubs.Set(self.driver.host_manager, 'get_filtered_hosts',
fake_get_filtered_hosts)
self.stubs.Set(weights.HostWeightHandler,
'get_weighed_objects', _fake_weigh_objects)
request_spec = {'instance_type': objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
vcpus=1),
'instance_properties': {'project_id': 1,
'root_gb': 512,
'memory_mb': 512,
'ephemeral_gb': 0,
'vcpus': 1,
'os_type': 'Linux',
'uuid': 'fake-uuid'},
'num_instances': 1}
self.mox.ReplayAll()
dests = self.driver.select_destinations(self.context, request_spec, {})
(host, node) = (dests[0]['host'], dests[0]['nodename'])
self.assertEqual(host, selected_hosts[0])
self.assertEqual(node, selected_nodes[0])
@mock.patch.object(filter_scheduler.FilterScheduler, '_schedule')
def test_select_destinations_notifications(self, mock_schedule):
mock_schedule.return_value = [mock.Mock()]
with mock.patch.object(self.driver.notifier, 'info') as mock_info:
request_spec = {'num_instances': 1,
'instance_properties': {'project_id': '1',
'root_gb': 512,
'memory_mb': 512,
'ephemeral_gb': 0,
'vcpus': 1,
'uuid': '1',
'pci_requests': None,
'availability_zone': None,
'numa_topology': None},
'instance_type': objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
vcpus=1),
'image': {'properties': {}}}
self.driver.select_destinations(self.context, request_spec, {})
expected = [
mock.call(self.context, 'scheduler.select_destinations.start',
dict(request_spec=request_spec)),
mock.call(self.context, 'scheduler.select_destinations.end',
dict(request_spec=request_spec))]
self.assertEqual(expected, mock_info.call_args_list)
def test_select_destinations_no_valid_host(self):
def _return_no_host(*args, **kwargs):
return []
self.stubs.Set(self.driver, '_schedule', _return_no_host)
self.assertRaises(exception.NoValidHost,
self.driver.select_destinations, self.context,
{'num_instances': 1}, {})
def test_select_destinations_no_valid_host_not_enough(self):
# Tests that we have fewer hosts available than number of instances
# requested to build.
consumed_hosts = [mock.MagicMock(), mock.MagicMock()]
with mock.patch.object(self.driver, '_schedule',
return_value=consumed_hosts):
try:
self.driver.select_destinations(
self.context, {'num_instances': 3}, {})
self.fail('Expected NoValidHost to be raised.')
except exception.NoValidHost as e:
# Make sure that we provided a reason why NoValidHost.
self.assertIn('reason', e.kwargs)
self.assertTrue(len(e.kwargs['reason']) > 0)
# Make sure that the consumed hosts have chance to be reverted.
for host in consumed_hosts:
self.assertIsNone(host.obj.updated)
| apache-2.0 |
ty1er/incubator-asterixdb | asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyEqRule.java | 6591 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.optimizer.rules;
import java.util.ArrayList;
import java.util.List;
import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.om.base.IAObject;
import org.apache.asterix.om.constants.AsterixConstantValue;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.optimizer.base.FuzzyUtils;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
public class FuzzyEqRule implements IAlgebraicRewriteRule {
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
// current operator is INNERJOIN or LEFTOUTERJOIN or SELECT
Mutable<ILogicalExpression> expRef;
if (op.getOperatorTag() == LogicalOperatorTag.INNERJOIN
|| op.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) op;
expRef = joinOp.getCondition();
} else if (op.getOperatorTag() == LogicalOperatorTag.SELECT) {
SelectOperator selectOp = (SelectOperator) op;
expRef = selectOp.getCondition();
} else {
return false;
}
MetadataProvider metadataProvider = ((MetadataProvider) context.getMetadataProvider());
IVariableTypeEnvironment env = context.getOutputTypeEnvironment(op);
if (expandFuzzyEq(expRef, context, env, metadataProvider)) {
context.computeAndSetTypeEnvironmentForOperator(op);
return true;
}
return false;
}
private boolean expandFuzzyEq(Mutable<ILogicalExpression> expRef, IOptimizationContext context,
IVariableTypeEnvironment env, MetadataProvider metadataProvider) throws AlgebricksException {
ILogicalExpression exp = expRef.getValue();
if (exp.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return false;
}
boolean expanded = false;
AbstractFunctionCallExpression funcExp = (AbstractFunctionCallExpression) exp;
FunctionIdentifier fi = funcExp.getFunctionIdentifier();
if (fi.equals(BuiltinFunctions.FUZZY_EQ)) {
List<Mutable<ILogicalExpression>> inputExps = funcExp.getArguments();
String simFuncName = FuzzyUtils.getSimFunction(metadataProvider);
ArrayList<Mutable<ILogicalExpression>> similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
for (int i = 0; i < inputExps.size(); ++i) {
Mutable<ILogicalExpression> inputExpRef = inputExps.get(i);
similarityArgs.add(inputExpRef);
}
FunctionIdentifier simFunctionIdentifier = FuzzyUtils.getFunctionIdentifier(simFuncName);
ScalarFunctionCallExpression similarityExp = new ScalarFunctionCallExpression(
FunctionUtil.getFunctionInfo(simFunctionIdentifier), similarityArgs);
// Add annotations from the original fuzzy-eq function.
similarityExp.getAnnotations().putAll(funcExp.getAnnotations());
ArrayList<Mutable<ILogicalExpression>> cmpArgs = new ArrayList<Mutable<ILogicalExpression>>();
cmpArgs.add(new MutableObject<ILogicalExpression>(similarityExp));
IAObject simThreshold = FuzzyUtils.getSimThreshold(metadataProvider, simFuncName);
cmpArgs.add(new MutableObject<ILogicalExpression>(
new ConstantExpression(new AsterixConstantValue(simThreshold))));
ScalarFunctionCallExpression cmpExpr = FuzzyUtils.getComparisonExpr(simFuncName, cmpArgs);
expRef.setValue(cmpExpr);
return true;
} else if (fi.equals(AlgebricksBuiltinFunctions.AND) || fi.equals(AlgebricksBuiltinFunctions.OR)) {
for (int i = 0; i < 2; i++) {
if (expandFuzzyEq(funcExp.getArguments().get(i), context, env, metadataProvider)) {
expanded = true;
}
}
}
return expanded;
}
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
return false;
}
}
| apache-2.0 |
gencer/libphonenumber-for-php | src/data/ShortNumberMetadata_BA.php | 2263 | <?php
/**
* This file has been @generated by a phing task by {@link BuildMetadataPHPFromXml}.
* See [README.md](README.md#generating-data) for more information.
*
* Pull requests changing data in these files will not be accepted. See the
* [FAQ in the README](README.md#problems-with-invalid-numbers] on how to make
* metadata changes.
*
* Do not modify this file directly!
*/
return array (
'generalDesc' =>
array (
'NationalNumberPattern' => '1\\d{2,5}',
'PossibleLength' =>
array (
0 => 3,
1 => 4,
2 => 5,
3 => 6,
),
'PossibleLengthLocalOnly' =>
array (
),
),
'tollFree' =>
array (
'NationalNumberPattern' => '116\\d{3}',
'ExampleNumber' => '116000',
'PossibleLength' =>
array (
0 => 6,
),
'PossibleLengthLocalOnly' =>
array (
),
),
'premiumRate' =>
array (
'PossibleLength' =>
array (
0 => -1,
),
'PossibleLengthLocalOnly' =>
array (
),
),
'emergency' =>
array (
'NationalNumberPattern' => '12[2-4]',
'ExampleNumber' => '122',
'PossibleLength' =>
array (
0 => 3,
),
'PossibleLengthLocalOnly' =>
array (
),
),
'shortCode' =>
array (
'NationalNumberPattern' => '1(?:1(?:6(?:00[06]|1(?:1[17]|23))|8\\d{1,2})|2(?:0[0-7]|[2-5]|6[0-26]|[78]\\d{1,2})|[3-5]\\d{2}|7\\d{3})',
'ExampleNumber' => '122',
'PossibleLength' =>
array (
),
'PossibleLengthLocalOnly' =>
array (
),
),
'standardRate' =>
array (
'PossibleLength' =>
array (
0 => -1,
),
'PossibleLengthLocalOnly' =>
array (
),
),
'carrierSpecific' =>
array (
'PossibleLength' =>
array (
0 => -1,
),
'PossibleLengthLocalOnly' =>
array (
),
),
'smsServices' =>
array (
'PossibleLength' =>
array (
0 => -1,
),
'PossibleLengthLocalOnly' =>
array (
),
),
'id' => 'BA',
'countryCode' => 0,
'internationalPrefix' => '',
'sameMobileAndFixedLinePattern' => false,
'numberFormat' =>
array (
),
'intlNumberFormat' =>
array (
),
'mainCountryForCode' => false,
'leadingZeroPossible' => false,
'mobileNumberPortableRegion' => false,
);
| apache-2.0 |
aStonegod/Mycat-Server | src/main/java/org/opencloudb/sqlengine/Catlet.java | 389 | package org.opencloudb.sqlengine;
/**
* mycat catlet ,used to execute sql and return result to client,some like
* database's procedure.
* must implemented as a stateless class and can process many SQL concurrently
*
* @author wuzhih
*
*/
public interface Catlet {
/*
* execute sql in EngineCtx and return result to client
*/
void processSQL(String sql, EngineCtx ctx);
}
| apache-2.0 |
apache/helix | helix-core/src/test/java/org/apache/helix/TestHelixConfigAccessor.java | 9907 | package org.apache.helix;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Date;
import java.util.List;
import org.apache.helix.manager.zk.ZKHelixAdmin;
import org.apache.helix.model.HelixConfigScope;
import org.apache.helix.model.HelixConfigScope.ConfigScopeProperty;
import org.apache.helix.model.InstanceConfig;
import org.apache.helix.model.builder.HelixConfigScopeBuilder;
import org.testng.Assert;
import org.testng.annotations.Test;
public class TestHelixConfigAccessor extends ZkUnitTestBase {
@Test
public void testBasic() throws Exception {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String clusterName = className + "_" + methodName;
System.out.println("START " + clusterName + " at " + new Date(System.currentTimeMillis()));
TestHelper.setupCluster(clusterName, ZK_ADDR, 12918, "localhost", "TestDB", 1, 10, 5, 3,
"MasterSlave", true);
ConfigAccessor configAccessor = new ConfigAccessor(_gZkClient);
HelixConfigScope clusterScope =
new HelixConfigScopeBuilder(ConfigScopeProperty.CLUSTER).forCluster(clusterName).build();
// cluster scope config
String clusterConfigValue = configAccessor.get(clusterScope, "clusterConfigKey");
Assert.assertNull(clusterConfigValue);
configAccessor.set(clusterScope, "clusterConfigKey", "clusterConfigValue");
clusterConfigValue = configAccessor.get(clusterScope, "clusterConfigKey");
Assert.assertEquals(clusterConfigValue, "clusterConfigValue");
// resource scope config
HelixConfigScope resourceScope =
new HelixConfigScopeBuilder(ConfigScopeProperty.RESOURCE).forCluster(clusterName)
.forResource("testResource").build();
configAccessor.set(resourceScope, "resourceConfigKey", "resourceConfigValue");
String resourceConfigValue = configAccessor.get(resourceScope, "resourceConfigKey");
Assert.assertEquals(resourceConfigValue, "resourceConfigValue");
// partition scope config
HelixConfigScope partitionScope =
new HelixConfigScopeBuilder(ConfigScopeProperty.PARTITION).forCluster(clusterName)
.forResource("testResource").forPartition("testPartition").build();
configAccessor.set(partitionScope, "partitionConfigKey", "partitionConfigValue");
String partitionConfigValue = configAccessor.get(partitionScope, "partitionConfigKey");
Assert.assertEquals(partitionConfigValue, "partitionConfigValue");
// participant scope config
HelixConfigScope participantScope =
new HelixConfigScopeBuilder(ConfigScopeProperty.PARTICIPANT).forCluster(clusterName)
.forParticipant("localhost_12918").build();
configAccessor.set(participantScope, "participantConfigKey", "participantConfigValue");
String participantConfigValue = configAccessor.get(participantScope, "participantConfigKey");
Assert.assertEquals(participantConfigValue, "participantConfigValue");
// test get-keys
List<String> keys =
configAccessor.getKeys(new HelixConfigScopeBuilder(ConfigScopeProperty.RESOURCE)
.forCluster(clusterName).build());
Assert.assertEquals(keys.size(), 1, "should be [testResource]");
Assert.assertEquals(keys.get(0), "testResource");
// keys = configAccessor.getKeys(new HelixConfigScopeBuilder(ConfigScopeProperty.CLUSTER)
// .forCluster(clusterName)
// .build());
// Assert.assertEquals(keys.size(), 1, "should be [" + clusterName + "]");
// Assert.assertEquals(keys.get(0), clusterName);
keys =
configAccessor.getKeys(new HelixConfigScopeBuilder(ConfigScopeProperty.PARTICIPANT)
.forCluster(clusterName).build());
Assert.assertEquals(keys.size(), 5, "should be [localhost_12918~22] sorted");
Assert.assertTrue(keys.contains("localhost_12918"));
Assert.assertTrue(keys.contains("localhost_12922"));
keys =
configAccessor.getKeys(new HelixConfigScopeBuilder(ConfigScopeProperty.PARTITION)
.forCluster(clusterName).forResource("testResource").build());
Assert.assertEquals(keys.size(), 1, "should be [testPartition]");
Assert.assertEquals(keys.get(0), "testPartition");
keys =
configAccessor.getKeys(new HelixConfigScopeBuilder(ConfigScopeProperty.RESOURCE)
.forCluster(clusterName).forResource("testResource").build());
Assert.assertEquals(keys.size(), 1, "should be [resourceConfigKey]");
Assert.assertTrue(keys.contains("resourceConfigKey"));
keys =
configAccessor.getKeys(new HelixConfigScopeBuilder(ConfigScopeProperty.CLUSTER).forCluster(
clusterName).build());
Assert.assertEquals(keys.size(), 1, "should be [clusterConfigKey]");
Assert.assertTrue(keys.contains("clusterConfigKey"));
keys =
configAccessor.getKeys(new HelixConfigScopeBuilder(ConfigScopeProperty.PARTICIPANT)
.forCluster(clusterName).forParticipant("localhost_12918").build());
Assert.assertEquals(keys.size(), 5,
"should be [HELIX_ENABLED, HELIX_ENABLED_TIMESTAMP, HELIX_PORT, HELIX_HOST, participantConfigKey]");
Assert.assertTrue(keys.contains("participantConfigKey"));
keys =
configAccessor.getKeys(new HelixConfigScopeBuilder(ConfigScopeProperty.PARTITION)
.forCluster(clusterName).forResource("testResource").forPartition("testPartition")
.build());
Assert.assertEquals(keys.size(), 1, "should be [partitionConfigKey]");
Assert.assertEquals(keys.get(0), "partitionConfigKey");
// test configAccessor.remove
configAccessor.remove(clusterScope, "clusterConfigKey");
clusterConfigValue = configAccessor.get(clusterScope, "clusterConfigKey");
Assert.assertNull(clusterConfigValue, "Should be null since it's removed");
configAccessor.remove(resourceScope, "resourceConfigKey");
resourceConfigValue = configAccessor.get(resourceScope, "resourceConfigKey");
Assert.assertNull(resourceConfigValue, "Should be null since it's removed");
configAccessor.remove(partitionScope, "partitionConfigKey");
partitionConfigValue = configAccessor.get(partitionScope, "partitionConfigKey");
Assert.assertNull(partitionConfigValue, "Should be null since it's removed");
configAccessor.remove(participantScope, "participantConfigKey");
participantConfigValue = configAccessor.get(partitionScope, "participantConfigKey");
Assert.assertNull(participantConfigValue, "Should be null since it's removed");
// negative tests
try {
new HelixConfigScopeBuilder(ConfigScopeProperty.PARTITION).forPartition("testPartition")
.build();
Assert.fail("Should fail since cluster name is not set");
} catch (Exception e) {
// OK
}
try {
new HelixConfigScopeBuilder(ConfigScopeProperty.PARTICIPANT)
.forParticipant("testParticipant").build();
Assert.fail("Should fail since cluster name is not set");
} catch (Exception e) {
// OK
}
try {
new HelixConfigScopeBuilder(ConfigScopeProperty.PARTITION).forCluster("testCluster")
.forPartition("testPartition").build();
Assert.fail("Should fail since resource name is not set");
} catch (Exception e) {
// OK
// e.printStackTrace();
}
deleteCluster(clusterName);
System.out.println("END " + clusterName + " at " + new Date(System.currentTimeMillis()));
}
// HELIX-25: set participant Config should check existence of instance
@Test
public void testSetNonexistentParticipantConfig() throws Exception {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String clusterName = className + "_" + methodName;
System.out.println("START " + clusterName + " at " + new Date(System.currentTimeMillis()));
ZKHelixAdmin admin = new ZKHelixAdmin(_gZkClient);
admin.addCluster(clusterName, true);
ConfigAccessor configAccessor = new ConfigAccessor(_gZkClient);
HelixConfigScope participantScope =
new HelixConfigScopeBuilder(ConfigScopeProperty.PARTICIPANT).forCluster(clusterName)
.forParticipant("localhost_12918").build();
try {
configAccessor.set(participantScope, "participantConfigKey", "participantConfigValue");
Assert
.fail("Except fail to set participant-config because participant: localhost_12918 is not added to cluster yet");
} catch (HelixException e) {
// OK
}
admin.addInstance(clusterName, new InstanceConfig("localhost_12918"));
try {
configAccessor.set(participantScope, "participantConfigKey", "participantConfigValue");
} catch (Exception e) {
Assert
.fail("Except succeed to set participant-config because participant: localhost_12918 has been added to cluster");
}
String participantConfigValue = configAccessor.get(participantScope, "participantConfigKey");
Assert.assertEquals(participantConfigValue, "participantConfigValue");
deleteCluster(clusterName);
System.out.println("END " + clusterName + " at " + new Date(System.currentTimeMillis()));
}
}
| apache-2.0 |
apache/jackrabbit-oak | oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/nodetype/write/NodeTypeRegistry.java | 5611 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.nodetype.write;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import javax.jcr.NamespaceRegistry;
import javax.jcr.RepositoryException;
import javax.jcr.ValueFactory;
import javax.jcr.nodetype.NodeTypeManager;
import com.google.common.base.Charsets;
import org.apache.jackrabbit.commons.cnd.CndImporter;
import org.apache.jackrabbit.commons.cnd.ParseException;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.commons.properties.SystemPropertySupplier;
import org.apache.jackrabbit.oak.namepath.impl.GlobalNameMapper;
import org.apache.jackrabbit.oak.namepath.impl.NamePathMapperImpl;
import org.apache.jackrabbit.oak.plugins.name.ReadWriteNamespaceRegistry;
import org.apache.jackrabbit.oak.plugins.value.jcr.ValueFactoryImpl;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants.NODE_TYPES_PATH;
/**
* {@code BuiltInNodeTypes} is a utility class that registers the built-in
* node types required for a JCR repository running on Oak.
*/
public final class NodeTypeRegistry {
private static final Logger LOG = LoggerFactory.getLogger(NodeTypeRegistry.class);
private static final boolean DEFAULT_REFERENCEABLE_FROZEN_NODE = false;
private final NodeTypeManager ntMgr;
private final NamespaceRegistry nsReg;
private final ValueFactory vf;
private NodeTypeRegistry(final Root root) {
this.ntMgr = new ReadWriteNodeTypeManager() {
@NotNull
@Override
protected Tree getTypes() {
return root.getTree(NODE_TYPES_PATH);
}
@NotNull
@Override
protected Root getWriteRoot() {
return root;
}
};
this.nsReg = new ReadWriteNamespaceRegistry(root) {
@Override
protected Root getWriteRoot() {
return root;
}
};
this.vf = new ValueFactoryImpl(
root, new NamePathMapperImpl(new GlobalNameMapper(root)));
}
/**
* Register the node type definitions contained in the specified {@code input}
* using the given {@link Root}.
*
* @param root The {@code Root} to register the node types.
* @param input The input stream containing the node type defintions to be registered.
* @param systemId An informative id of the given input.
*/
public static void register(Root root, InputStream input, String systemId) {
new NodeTypeRegistry(root).registerNodeTypes(input, systemId);
}
private void registerNodeTypes(InputStream stream, String systemId) {
try {
Reader reader = new InputStreamReader(stream, Charsets.UTF_8);
// OAK-9134: nt:frozenNode is not implementing mix:referenceable from JCR 2.0.
// This system property allows to add it back when initializing a repository.
// PS: To keep supporting tests in fiddling this setting, the SystemPropertySupplier
// is evaluated here rather than in static code, where this is typically done.
final boolean referenceableFrozenNode = SystemPropertySupplier.create("oak.referenceableFrozenNode", DEFAULT_REFERENCEABLE_FROZEN_NODE)
.loggingTo(LOG).formatSetMessage(
(name, value) -> String.format("oak.referenceableFrozenNode set to: %s (using system property %s)", name, value))
.get();
if (referenceableFrozenNode) {
BufferedReader bufferedReader = new BufferedReader(reader);
StringBuilder result = new StringBuilder();
String line;
while ((line = bufferedReader.readLine()) != null) {
if (line.trim().equals("[nt:frozenNode]")) {
line = "[nt:frozenNode] > mix:referenceable";
}
result.append(line).append(System.lineSeparator());
}
reader = new StringReader(result.toString());
}
CndImporter.registerNodeTypes(reader, systemId, ntMgr, nsReg, vf, false);
} catch (IOException e) {
throw new IllegalStateException("Unable to read " + systemId, e);
} catch (ParseException e) {
throw new IllegalStateException("Unable to parse " + systemId, e);
} catch (RepositoryException e) {
throw new IllegalStateException("Unable to register " + systemId, e);
}
}
}
| apache-2.0 |
skotzko/akka.net | src/core/Akka/Util/StandardOutWriter.cs | 3068 | //-----------------------------------------------------------------------
// <copyright file="StandardOutWriter.cs" company="Akka.NET Project">
// Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
namespace Akka.Util
{
/// <summary>
/// This class contains methods for thread safe writing to the standard output stream.
/// </summary>
public static class StandardOutWriter
{
private static readonly object _lock = new object();
/// <summary>
/// Writes the specified <see cref="string"/> value to the standard output stream. Optionally
/// you may specify which colors should be used.
/// </summary>
/// <param name="message">The <see cref="string"/> value to write</param>
/// <param name="foregroundColor">Optional: The foreground color</param>
/// <param name="backgroundColor">Optional: The background color</param>
public static void Write(string message, ConsoleColor? foregroundColor = null, ConsoleColor? backgroundColor = null)
{
WriteToConsole(() => Console.Write(message), foregroundColor, backgroundColor);
}
/// <summary>
/// Writes the specified <see cref="string"/> value, followed by the current line terminator,
/// to the standard output stream. Optionally you may specify which colors should be used.
/// </summary>
/// <param name="message">The <see cref="string"/> value to write</param>
/// <param name="foregroundColor">Optional: The foreground color</param>
/// <param name="backgroundColor">Optional: The background color</param>
public static void WriteLine(string message, ConsoleColor? foregroundColor = null, ConsoleColor? backgroundColor = null)
{
WriteToConsole(() => Console.WriteLine(message), foregroundColor, backgroundColor);
}
private static void WriteToConsole(Action write, ConsoleColor? foregroundColor = null, ConsoleColor? backgroundColor = null)
{
lock(_lock)
{
ConsoleColor? fg = null;
if(foregroundColor.HasValue)
{
fg = Console.ForegroundColor;
Console.ForegroundColor = foregroundColor.Value;
}
ConsoleColor? bg = null;
if(backgroundColor.HasValue)
{
bg = Console.BackgroundColor;
Console.BackgroundColor = backgroundColor.Value;
}
write();
if(fg.HasValue)
{
Console.ForegroundColor = fg.Value;
}
if(bg.HasValue)
{
Console.BackgroundColor = bg.Value;
}
}
}
}
}
| apache-2.0 |
AndroidX/androidx | core/core-google-shortcuts/src/main/java/androidx/core/google/shortcuts/converters/IndexableKeys.java | 1186 | /*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.core.google.shortcuts.converters;
import static androidx.annotation.RestrictTo.Scope.LIBRARY;
import androidx.annotation.RestrictTo;
/**
* Constants for the keys for common {@link com.google.firebase.appindexing.Indexable} fields.
*
* @hide
*/
@RestrictTo(LIBRARY)
public class IndexableKeys {
// General
public static final String NAMESPACE = "_namespace";
public static final String TTL_MILLIS = "_ttlMillis";
public static final String CREATION_TIMESTAMP_MILLIS = "_creationTimestampMillis";
private IndexableKeys() {}
}
| apache-2.0 |
imeoer/hypercli | pkg/sysinfo/sysinfo.go | 3277 | package sysinfo
import "github.com/hyperhq/hypercli/pkg/parsers"
// SysInfo stores information about which features a kernel supports.
// TODO Windows: Factor out platform specific capabilities.
type SysInfo struct {
// Whether the kernel supports AppArmor or not
AppArmor bool
// Whether the kernel supports Seccomp or not
Seccomp bool
cgroupMemInfo
cgroupCPUInfo
cgroupBlkioInfo
cgroupCpusetInfo
// Whether IPv4 forwarding is supported or not, if this was disabled, networking will not work
IPv4ForwardingDisabled bool
// Whether bridge-nf-call-iptables is supported or not
BridgeNfCallIptablesDisabled bool
// Whether bridge-nf-call-ip6tables is supported or not
BridgeNfCallIP6tablesDisabled bool
// Whether the cgroup has the mountpoint of "devices" or not
CgroupDevicesEnabled bool
}
type cgroupMemInfo struct {
// Whether memory limit is supported or not
MemoryLimit bool
// Whether swap limit is supported or not
SwapLimit bool
// Whether soft limit is supported or not
MemoryReservation bool
// Whether OOM killer disable is supported or not
OomKillDisable bool
// Whether memory swappiness is supported or not
MemorySwappiness bool
// Whether kernel memory limit is supported or not
KernelMemory bool
}
type cgroupCPUInfo struct {
// Whether CPU shares is supported or not
CPUShares bool
// Whether CPU CFS(Completely Fair Scheduler) period is supported or not
CPUCfsPeriod bool
// Whether CPU CFS(Completely Fair Scheduler) quota is supported or not
CPUCfsQuota bool
}
type cgroupBlkioInfo struct {
// Whether Block IO weight is supported or not
BlkioWeight bool
// Whether Block IO weight_device is supported or not
BlkioWeightDevice bool
// Whether Block IO read limit in bytes per second is supported or not
BlkioReadBpsDevice bool
// Whether Block IO write limit in bytes per second is supported or not
BlkioWriteBpsDevice bool
// Whether Block IO read limit in IO per second is supported or not
BlkioReadIOpsDevice bool
// Whether Block IO write limit in IO per second is supported or not
BlkioWriteIOpsDevice bool
}
type cgroupCpusetInfo struct {
// Whether Cpuset is supported or not
Cpuset bool
// Available Cpuset's cpus
Cpus string
// Available Cpuset's memory nodes
Mems string
}
// IsCpusetCpusAvailable returns `true` if the provided string set is contained
// in cgroup's cpuset.cpus set, `false` otherwise.
// If error is not nil a parsing error occurred.
func (c cgroupCpusetInfo) IsCpusetCpusAvailable(provided string) (bool, error) {
return isCpusetListAvailable(provided, c.Cpus)
}
// IsCpusetMemsAvailable returns `true` if the provided string set is contained
// in cgroup's cpuset.mems set, `false` otherwise.
// If error is not nil a parsing error occurred.
func (c cgroupCpusetInfo) IsCpusetMemsAvailable(provided string) (bool, error) {
return isCpusetListAvailable(provided, c.Mems)
}
func isCpusetListAvailable(provided, available string) (bool, error) {
parsedProvided, err := parsers.ParseUintList(provided)
if err != nil {
return false, err
}
parsedAvailable, err := parsers.ParseUintList(available)
if err != nil {
return false, err
}
for k := range parsedProvided {
if !parsedAvailable[k] {
return false, nil
}
}
return true, nil
}
| apache-2.0 |
frosenberg/kubernetes | pkg/expapi/v1/types.go | 8488 | /*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1
import (
"k8s.io/kubernetes/pkg/api/resource"
"k8s.io/kubernetes/pkg/api/v1"
"k8s.io/kubernetes/pkg/util"
)
// ScaleSpec describes the attributes a Scale subresource
type ScaleSpec struct {
// Replicas is the number of desired replicas.
Replicas int `json:"replicas,omitempty" description:"number of replicas desired; see http://releases.k8s.io/HEAD/docs/user-guide/replication-controller.md#what-is-a-replication-controller"`
}
// ScaleStatus represents the current status of a Scale subresource.
type ScaleStatus struct {
// Replicas is the number of actual replicas.
Replicas int `json:"replicas" description:"most recently oberved number of replicas; see http://releases.k8s.io/HEAD/docs/user-guide/replication-controller.md#what-is-a-replication-controller"`
// Selector is a label query over pods that should match the replicas count.
Selector map[string]string `json:"selector,omitempty" description:"label keys and values that must match in order to be controlled by this replication controller, if empty defaulted to labels on Pod template; see http://releases.k8s.io/HEAD/docs/user-guide/labels.md#label-selectors"`
}
// Scale subresource, applicable to ReplicationControllers and (in future) Deployment.
type Scale struct {
v1.TypeMeta `json:",inline"`
v1.ObjectMeta `json:"metadata,omitempty" description:"standard object metadata; see http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata"`
// Spec defines the behavior of the scale.
Spec ScaleSpec `json:"spec,omitempty" description:"specification of the desired behavior of the scale; http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status"`
// Status represents the current status of the scale.
Status ScaleStatus `json:"status,omitempty" description:"most recently observed status of the service; populated by the system, read-only; http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status"`
}
// Dummy definition
type ReplicationControllerDummy struct {
v1.TypeMeta `json:",inline"`
}
// SubresourceReference contains enough information to let you inspect or modify the referred subresource.
type SubresourceReference struct {
Kind string `json:"kind,omitempty" description:"kind of the referent; see http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds"`
Namespace string `json:"namespace,omitempty" description:"namespace of the referent; see http://releases.k8s.io/HEAD/docs/user-guide/namespaces.md"`
Name string `json:"name,omitempty" description:"name of the referent; see http://releases.k8s.io/HEAD/docs/user-guide/identifiers.md#names"`
APIVersion string `json:"apiVersion,omitempty" description:"API version of the referent"`
Subresource string `json:"subresource,omitempty" decription:"subresource name of the referent"`
}
// ResourceConsumption is an object for specifying average resource consumption of a particular resource.
type ResourceConsumption struct {
Resource v1.ResourceName `json:"resource,omitempty"`
Quantity resource.Quantity `json:"quantity,omitempty"`
}
// HorizontalPodAutoscalerSpec is the specification of a horizontal pod autoscaler.
type HorizontalPodAutoscalerSpec struct {
// ScaleRef is a reference to Scale subresource. HorizontalPodAutoscaler will learn the current resource consumption from its status,
// and will set the desired number of pods by modyfying its spec.
ScaleRef *SubresourceReference `json:"scaleRef" description:"reference to scale subresource for quering the current resource cosumption and for setting the desired number of pods"`
// MinCount is the lower limit for the number of pods that can be set by the autoscaler.
MinCount int `json:"minCount" description:"lower limit for the number of pods"`
// MaxCount is the upper limit for the number of pods that can be set by the autoscaler. It cannot be smaller than MinCount.
MaxCount int `json:"maxCount" description:"upper limit for the number of pods"`
// Target is the target average consumption of the given resource that the autoscaler will try to maintain by adjusting the desired number of pods.
// Currently two types of resources are supported: "cpu" and "memory".
Target ResourceConsumption `json:"target" description:"target average consumption of resource that the autoscaler will try to maintain by adjusting the desired number of pods"`
}
// HorizontalPodAutoscalerStatus contains the current status of a horizontal pod autoscaler
type HorizontalPodAutoscalerStatus struct {
// CurrentReplicas is the number of replicas of pods managed by this autoscaler.
CurrentReplicas int `json:"currentReplicas" description:"number of replicas observed by the autoscaler"`
// DesiredReplicas is the desired number of replicas of pods managed by this autoscaler.
DesiredReplicas int `json:"desiredReplicas" description:"number of desired replicas"`
// CurrentConsumption is the current average consumption of the given resource that the autoscaler will
// try to maintain by adjusting the desired number of pods.
// Two types of resources are supported: "cpu" and "memory".
CurrentConsumption ResourceConsumption `json:"currentConsumption" description:"current resource consumption"`
// LastScaleTimestamp is the last time the HorizontalPodAutoscaler scaled the number of pods.
// This is used by the autoscaler to controll how often the number of pods is changed.
LastScaleTimestamp *util.Time `json:"lastScaleTimestamp,omitempty" description:"last time the autoscaler made decision about changing the number of pods"`
}
// HorizontalPodAutoscaler represents the configuration of a horizontal pod autoscaler.
type HorizontalPodAutoscaler struct {
v1.TypeMeta `json:",inline"`
v1.ObjectMeta `json:"metadata,omitempty"`
// Spec defines the behaviour of autoscaler.
Spec HorizontalPodAutoscalerSpec `json:"spec,omitempty" description:"specification of the desired behavior of the autoscaler; http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status"`
// Status represents the current information about the autoscaler.
Status HorizontalPodAutoscalerStatus `json:"status,omitempty"`
}
// HorizontalPodAutoscaler is a collection of pod autoscalers.
type HorizontalPodAutoscalerList struct {
v1.TypeMeta `json:",inline"`
v1.ListMeta `json:"metadata,omitempty"`
Items []HorizontalPodAutoscaler `json:"items" description:"list of horizontal pod autoscalers"`
}
// A ThirdPartyResource is a generic representation of a resource, it is used by add-ons and plugins to add new resource
// types to the API. It consists of one or more Versions of the api.
type ThirdPartyResource struct {
v1.TypeMeta `json:",inline"`
v1.ObjectMeta `json:"metadata,omitempty" description:"standard object metadata"`
Description string `json:"description,omitempty" description:"The description of this object"`
Versions []APIVersion `json:"versions,omitempty" description:"The versions for this third party object"`
}
type ThirdPartyResourceList struct {
v1.TypeMeta `json:",inline"`
v1.ListMeta `json:"metadata,omitempty" description:"standard list metadata; see http://docs.k8s.io/api-conventions.md#metadata"`
Items []ThirdPartyResource `json:"items" description:"items is a list of schema objects"`
}
// An APIVersion represents a single concrete version of an object model.
type APIVersion struct {
Name string `json:"name,omitempty" description:"name of this version (e.g. 'v1')"`
APIGroup string `json:"apiGroup,omitempty" description:"The API group to add this object into, default 'experimental'"`
}
// An internal object, used for versioned storage in etcd. Not exposed to the end user.
type ThirdPartyResourceData struct {
v1.TypeMeta `json:",inline"`
v1.ObjectMeta `json:"metadata,omitempty" description:"standard object metadata"`
Data []byte `json:"name,omitempty" description:"the raw JSON data for this data"`
}
| apache-2.0 |
porcelli-forks/uberfire | uberfire-nio2-backport/uberfire-nio2-impls/uberfire-nio2-jgit/src/test/java/org/uberfire/java/nio/fs/jgit/JGitFileSystemProviderUnsupportedOpTest.java | 4849 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.java.nio.fs.jgit;
import java.net.URI;
import java.util.Set;
import org.junit.Test;
import org.uberfire.java.nio.file.FileSystem;
import org.uberfire.java.nio.file.OpenOption;
import org.uberfire.java.nio.file.Path;
import static java.util.Collections.emptySet;
import static org.fest.assertions.api.Assertions.failBecauseExceptionWasNotThrown;
public class JGitFileSystemProviderUnsupportedOpTest extends AbstractTestInfra {
@Test
public void testNewFileSystemUnsupportedOp() {
final URI newRepo = URI.create("git://someunsup-repo-name");
final FileSystem fs = provider.newFileSystem(newRepo,
EMPTY_ENV);
final Path path = JGitPathImpl.create((JGitFileSystem) fs,
"",
"repo2-name",
false);
try {
provider.newFileSystem(path,
EMPTY_ENV);
failBecauseExceptionWasNotThrown(UnsupportedOperationException.class);
} catch (Exception e) {
}
}
@Test
public void testNewFileChannelUnsupportedOp() {
final URI newRepo = URI.create("git://newfcrepo-name");
provider.newFileSystem(newRepo,
EMPTY_ENV);
final Path path = provider.getPath(URI.create("git://newfcrepo-name/file.txt"));
final Set<? extends OpenOption> options = emptySet();
try {
provider.newFileChannel(path,
options);
failBecauseExceptionWasNotThrown(UnsupportedOperationException.class);
} catch (Exception e) {
}
}
@Test
public void testNewAsynchronousFileChannelUnsupportedOp() {
final URI newRepo = URI.create("git://newasyncrepo-name");
provider.newFileSystem(newRepo,
EMPTY_ENV);
final Path path = provider.getPath(URI.create("git://newasyncrepo-name/file.txt"));
final Set<? extends OpenOption> options = emptySet();
try {
provider.newAsynchronousFileChannel(path,
options,
null);
failBecauseExceptionWasNotThrown(UnsupportedOperationException.class);
} catch (Exception e) {
}
}
@Test
public void testCreateSymbolicLinkUnsupportedOp() {
final URI newRepo = URI.create("git://symbolic-repo-name");
provider.newFileSystem(newRepo,
EMPTY_ENV);
final Path link = provider.getPath(URI.create("git://symbolic-repo-name/link.lnk"));
final Path path = provider.getPath(URI.create("git://symbolic-repo-name/file.txt"));
try {
provider.createSymbolicLink(link,
path);
failBecauseExceptionWasNotThrown(UnsupportedOperationException.class);
} catch (Exception e) {
}
}
@Test
public void testCreateLinkUnsupportedOp() {
final URI newRepo = URI.create("git://link-repo-name");
provider.newFileSystem(newRepo,
EMPTY_ENV);
final Path link = provider.getPath(URI.create("git://link-repo-name/link.lnk"));
final Path path = provider.getPath(URI.create("git://link-repo-name/file.txt"));
try {
provider.createLink(link,
path);
failBecauseExceptionWasNotThrown(UnsupportedOperationException.class);
} catch (Exception e) {
}
}
@Test
public void testReadSymbolicLinkUnsupportedOp() {
final URI newRepo = URI.create("git://read-link-repo-name");
provider.newFileSystem(newRepo,
EMPTY_ENV);
final Path link = provider.getPath(URI.create("git://read-link-repo-name/link.lnk"));
try {
provider.readSymbolicLink(link);
failBecauseExceptionWasNotThrown(UnsupportedOperationException.class);
} catch (Exception e) {
}
}
}
| apache-2.0 |
adit-chandra/tensorflow | tensorflow/lite/examples/experimental_new_converter/stack_trace_example.py | 3058 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""CodeLab for displaying error stack trace w/ MLIR-based converter."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from absl import app
import tensorflow as tf # TF2
# Try to enable TensorFlow V2 behavior.
try:
from tensorflow import enable_v2_behavior # pylint: disable=g-import-not-at-top
enable_v2_behavior()
except ImportError:
# `enable_v2_behavior` is not available in pip build.
# Ignore if the symbole isn't found. This should work in
# TensorFlow 2 nightly pip.
pass
def suppress_exception(f):
def wrapped():
try:
f()
except: # pylint: disable=bare-except
pass
return wrapped
class TestModule(tf.Module):
"""The test model has unsupported op."""
@tf.function(input_signature=[tf.TensorSpec(shape=[3, 3], dtype=tf.float32)])
def model(self, x):
y = tf.math.reciprocal(x) # Not supported
return y + y
# comment out the `@suppress_exception` to display the stack trace
@suppress_exception
def test_from_saved_model():
"""displaying stack trace when converting saved model."""
test_model = TestModule()
saved_model_path = '/tmp/test.saved_model'
save_options = tf.saved_model.SaveOptions(save_debug_info=True)
tf.saved_model.save(test_model, saved_model_path, options=save_options)
# load the model and convert
converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_path)
converter.experimental_new_converter = True
converter.convert()
# comment out the `@suppress_exception` to display the stack trace
# @suppress_exception
def test_from_concrete_function():
"""displaying stack trace when converting concrete function."""
@tf.function(input_signature=[tf.TensorSpec(shape=[3, 3], dtype=tf.float32)])
def model(x):
y = tf.math.reciprocal(x) # not supported
return y + y
func = model.get_concrete_function()
converter = tf.lite.TFLiteConverter.from_concrete_functions([func])
converter.experimental_new_converter = True
converter.convert()
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
sys.stdout.write('==== Testing from_concrete_functions ====\n')
test_from_concrete_function()
sys.stdout.write('==== Testing from_saved_model ====\n')
test_from_saved_model()
if __name__ == '__main__':
app.run(main)
| apache-2.0 |
mikedougherty/docker | vendor/github.com/docker/swarmkit/ca/certificates.go | 22258 | package ca
import (
"bytes"
"crypto"
"crypto/ecdsa"
"crypto/rand"
"crypto/tls"
"crypto/x509"
"encoding/pem"
"io"
"io/ioutil"
"os"
"path/filepath"
"time"
cfcsr "github.com/cloudflare/cfssl/csr"
"github.com/cloudflare/cfssl/helpers"
"github.com/cloudflare/cfssl/initca"
cflog "github.com/cloudflare/cfssl/log"
cfsigner "github.com/cloudflare/cfssl/signer"
"github.com/cloudflare/cfssl/signer/local"
"github.com/docker/go-events"
"github.com/docker/swarmkit/api"
"github.com/docker/swarmkit/ioutils"
"github.com/docker/swarmkit/remotes"
"github.com/opencontainers/go-digest"
"github.com/pkg/errors"
"golang.org/x/net/context"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/credentials"
)
const (
// Security Strength Equivalence
//-----------------------------------
//| ECC | DH/DSA/RSA |
//| 256 | 3072 |
//| 384 | 7680 |
//-----------------------------------
// RootKeySize is the default size of the root CA key
// It would be ideal for the root key to use P-384, but in P-384 is not optimized in go yet :(
RootKeySize = 256
// RootKeyAlgo defines the default algorithm for the root CA Key
RootKeyAlgo = "ecdsa"
// PassphraseENVVar defines the environment variable to look for the
// root CA private key material encryption key
PassphraseENVVar = "SWARM_ROOT_CA_PASSPHRASE"
// PassphraseENVVarPrev defines the alternate environment variable to look for the
// root CA private key material encryption key. It can be used for seamless
// KEK rotations.
PassphraseENVVarPrev = "SWARM_ROOT_CA_PASSPHRASE_PREV"
// RootCAExpiration represents the expiration for the root CA in seconds (20 years)
RootCAExpiration = "630720000s"
// DefaultNodeCertExpiration represents the default expiration for node certificates (3 months)
DefaultNodeCertExpiration = 2160 * time.Hour
// CertBackdate represents the amount of time each certificate is backdated to try to avoid
// clock drift issues.
CertBackdate = 1 * time.Hour
// CertLowerRotationRange represents the minimum fraction of time that we will wait when randomly
// choosing our next certificate rotation
CertLowerRotationRange = 0.5
// CertUpperRotationRange represents the maximum fraction of time that we will wait when randomly
// choosing our next certificate rotation
CertUpperRotationRange = 0.8
// MinNodeCertExpiration represents the minimum expiration for node certificates
MinNodeCertExpiration = 1 * time.Hour
)
// A recoverableErr is a non-fatal error encountered signing a certificate,
// which means that the certificate issuance may be retried at a later time.
type recoverableErr struct {
err error
}
func (r recoverableErr) Error() string {
return r.err.Error()
}
// ErrNoLocalRootCA is an error type used to indicate that the local root CA
// certificate file does not exist.
var ErrNoLocalRootCA = errors.New("local root CA certificate does not exist")
// ErrNoValidSigner is an error type used to indicate that our RootCA doesn't have the ability to
// sign certificates.
var ErrNoValidSigner = recoverableErr{err: errors.New("no valid signer found")}
func init() {
cflog.Level = 5
}
// CertPaths is a helper struct that keeps track of the paths of a
// Cert and corresponding Key
type CertPaths struct {
Cert, Key string
}
// RootCA is the representation of everything we need to sign certificates
type RootCA struct {
// Key will only be used by the original manager to put the private
// key-material in raft, no signing operations depend on it.
Key []byte
// Cert includes the PEM encoded Certificate for the Root CA
Cert []byte
Pool *x509.CertPool
// Digest of the serialized bytes of the certificate
Digest digest.Digest
// This signer will be nil if the node doesn't have the appropriate key material
Signer cfsigner.Signer
}
// CanSign ensures that the signer has all three necessary elements needed to operate
func (rca *RootCA) CanSign() bool {
if rca.Cert == nil || rca.Pool == nil || rca.Signer == nil {
return false
}
return true
}
// IssueAndSaveNewCertificates generates a new key-pair, signs it with the local root-ca, and returns a
// tls certificate
func (rca *RootCA) IssueAndSaveNewCertificates(kw KeyWriter, cn, ou, org string) (*tls.Certificate, error) {
csr, key, err := GenerateNewCSR()
if err != nil {
return nil, errors.Wrap(err, "error when generating new node certs")
}
if !rca.CanSign() {
return nil, ErrNoValidSigner
}
// Obtain a signed Certificate
certChain, err := rca.ParseValidateAndSignCSR(csr, cn, ou, org)
if err != nil {
return nil, errors.Wrap(err, "failed to sign node certificate")
}
// Create a valid TLSKeyPair out of the PEM encoded private key and certificate
tlsKeyPair, err := tls.X509KeyPair(certChain, key)
if err != nil {
return nil, err
}
if err := kw.Write(certChain, key, nil); err != nil {
return nil, err
}
return &tlsKeyPair, nil
}
// RequestAndSaveNewCertificates gets new certificates issued, either by signing them locally if a signer is
// available, or by requesting them from the remote server at remoteAddr.
func (rca *RootCA) RequestAndSaveNewCertificates(ctx context.Context, kw KeyWriter, config CertificateRequestConfig) (*tls.Certificate, error) {
// Create a new key/pair and CSR
csr, key, err := GenerateNewCSR()
if err != nil {
return nil, errors.Wrap(err, "error when generating new node certs")
}
// Get the remote manager to issue a CA signed certificate for this node
// Retry up to 5 times in case the manager we first try to contact isn't
// responding properly (for example, it may have just been demoted).
var signedCert []byte
for i := 0; i != 5; i++ {
signedCert, err = GetRemoteSignedCertificate(ctx, csr, rca.Pool, config)
if err == nil {
break
}
}
if err != nil {
return nil, err
}
// Доверяй, но проверяй.
// Before we overwrite our local key + certificate, let's make sure the server gave us one that is valid
// Create an X509Cert so we can .Verify()
certBlock, _ := pem.Decode(signedCert)
if certBlock == nil {
return nil, errors.New("failed to parse certificate PEM")
}
X509Cert, err := x509.ParseCertificate(certBlock.Bytes)
if err != nil {
return nil, err
}
// Include our current root pool
opts := x509.VerifyOptions{
Roots: rca.Pool,
}
// Check to see if this certificate was signed by our CA, and isn't expired
if _, err := X509Cert.Verify(opts); err != nil {
return nil, err
}
// Create a valid TLSKeyPair out of the PEM encoded private key and certificate
tlsKeyPair, err := tls.X509KeyPair(signedCert, key)
if err != nil {
return nil, err
}
var kekUpdate *KEKData
for i := 0; i < 5; i++ {
kekUpdate, err = rca.getKEKUpdate(ctx, X509Cert, tlsKeyPair, config.Remotes)
if err == nil {
break
}
}
if err != nil {
return nil, err
}
if err := kw.Write(signedCert, key, kekUpdate); err != nil {
return nil, err
}
return &tlsKeyPair, nil
}
func (rca *RootCA) getKEKUpdate(ctx context.Context, cert *x509.Certificate, keypair tls.Certificate, r remotes.Remotes) (*KEKData, error) {
var managerRole bool
for _, ou := range cert.Subject.OrganizationalUnit {
if ou == ManagerRole {
managerRole = true
break
}
}
if managerRole {
mtlsCreds := credentials.NewTLS(&tls.Config{ServerName: CARole, RootCAs: rca.Pool, Certificates: []tls.Certificate{keypair}})
conn, peer, err := getGRPCConnection(mtlsCreds, r)
if err != nil {
return nil, err
}
defer conn.Close()
client := api.NewCAClient(conn)
ctx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
response, err := client.GetUnlockKey(ctx, &api.GetUnlockKeyRequest{})
if err != nil {
if grpc.Code(err) == codes.Unimplemented { // if the server does not support keks, return as if no encryption key was specified
return &KEKData{}, nil
}
r.Observe(peer, -remotes.DefaultObservationWeight)
return nil, err
}
r.Observe(peer, remotes.DefaultObservationWeight)
return &KEKData{KEK: response.UnlockKey, Version: response.Version.Index}, nil
}
// If this is a worker, set to never encrypt. We always want to set to the lock key to nil,
// in case this was a manager that was demoted to a worker.
return &KEKData{}, nil
}
// PrepareCSR creates a CFSSL Sign Request based on the given raw CSR and
// overrides the Subject and Hosts with the given extra args.
func PrepareCSR(csrBytes []byte, cn, ou, org string) cfsigner.SignRequest {
// All managers get added the subject-alt-name of CA, so they can be
// used for cert issuance.
hosts := []string{ou, cn}
if ou == ManagerRole {
hosts = append(hosts, CARole)
}
return cfsigner.SignRequest{
Request: string(csrBytes),
// OU is used for Authentication of the node type. The CN has the random
// node ID.
Subject: &cfsigner.Subject{CN: cn, Names: []cfcsr.Name{{OU: ou, O: org}}},
// Adding ou as DNS alt name, so clients can connect to ManagerRole and CARole
Hosts: hosts,
}
}
// ParseValidateAndSignCSR returns a signed certificate from a particular rootCA and a CSR.
func (rca *RootCA) ParseValidateAndSignCSR(csrBytes []byte, cn, ou, org string) ([]byte, error) {
if !rca.CanSign() {
return nil, ErrNoValidSigner
}
signRequest := PrepareCSR(csrBytes, cn, ou, org)
cert, err := rca.Signer.Sign(signRequest)
if err != nil {
return nil, errors.Wrap(err, "failed to sign node certificate")
}
return rca.AppendFirstRootPEM(cert)
}
// AppendFirstRootPEM appends the first certificate from this RootCA's cert
// bundle to the given cert bundle (which should already be encoded as a series
// of PEM-encoded certificate blocks).
func (rca *RootCA) AppendFirstRootPEM(cert []byte) ([]byte, error) {
// Append the first root CA Cert to the certificate, to create a valid chain
// Get the first Root CA Cert on the bundle
firstRootCA, _, err := helpers.ParseOneCertificateFromPEM(rca.Cert)
if err != nil {
return nil, err
}
if len(firstRootCA) < 1 {
return nil, errors.New("no valid Root CA certificates found")
}
// Convert the first root CA back to PEM
firstRootCAPEM := helpers.EncodeCertificatePEM(firstRootCA[0])
if firstRootCAPEM == nil {
return nil, errors.New("error while encoding the Root CA certificate")
}
// Append this Root CA to the certificate to make [Cert PEM]\n[Root PEM][EOF]
certChain := append(cert, firstRootCAPEM...)
return certChain, nil
}
// NewRootCA creates a new RootCA object from unparsed PEM cert bundle and key byte
// slices. key may be nil, and in this case NewRootCA will return a RootCA
// without a signer.
func NewRootCA(certBytes, keyBytes []byte, certExpiry time.Duration) (RootCA, error) {
// Parse all the certificates in the cert bundle
parsedCerts, err := helpers.ParseCertificatesPEM(certBytes)
if err != nil {
return RootCA{}, err
}
// Check to see if we have at least one valid cert
if len(parsedCerts) < 1 {
return RootCA{}, errors.New("no valid Root CA certificates found")
}
// Create a Pool with all of the certificates found
pool := x509.NewCertPool()
for _, cert := range parsedCerts {
// Check to see if all of the certificates are valid, self-signed root CA certs
if err := cert.CheckSignature(cert.SignatureAlgorithm, cert.RawTBSCertificate, cert.Signature); err != nil {
return RootCA{}, errors.Wrap(err, "error while validating Root CA Certificate")
}
pool.AddCert(cert)
}
// Calculate the digest for our Root CA bundle
digest := digest.FromBytes(certBytes)
if len(keyBytes) == 0 {
// This RootCA does not have a valid signer.
return RootCA{Cert: certBytes, Digest: digest, Pool: pool}, nil
}
var (
passphraseStr string
passphrase, passphrasePrev []byte
priv crypto.Signer
)
// Attempt two distinct passphrases, so we can do a hitless passphrase rotation
if passphraseStr = os.Getenv(PassphraseENVVar); passphraseStr != "" {
passphrase = []byte(passphraseStr)
}
if p := os.Getenv(PassphraseENVVarPrev); p != "" {
passphrasePrev = []byte(p)
}
// Attempt to decrypt the current private-key with the passphrases provided
priv, err = helpers.ParsePrivateKeyPEMWithPassword(keyBytes, passphrase)
if err != nil {
priv, err = helpers.ParsePrivateKeyPEMWithPassword(keyBytes, passphrasePrev)
if err != nil {
return RootCA{}, errors.Wrap(err, "malformed private key")
}
}
// We will always use the first certificate inside of the root bundle as the active one
if err := ensureCertKeyMatch(parsedCerts[0], priv.Public()); err != nil {
return RootCA{}, err
}
signer, err := local.NewSigner(priv, parsedCerts[0], cfsigner.DefaultSigAlgo(priv), SigningPolicy(certExpiry))
if err != nil {
return RootCA{}, err
}
// If the key was loaded from disk unencrypted, but there is a passphrase set,
// ensure it is encrypted, so it doesn't hit raft in plain-text
keyBlock, _ := pem.Decode(keyBytes)
if keyBlock == nil {
// This RootCA does not have a valid signer.
return RootCA{Cert: certBytes, Digest: digest, Pool: pool}, nil
}
if passphraseStr != "" && !x509.IsEncryptedPEMBlock(keyBlock) {
keyBytes, err = EncryptECPrivateKey(keyBytes, passphraseStr)
if err != nil {
return RootCA{}, err
}
}
return RootCA{Signer: signer, Key: keyBytes, Digest: digest, Cert: certBytes, Pool: pool}, nil
}
func ensureCertKeyMatch(cert *x509.Certificate, key crypto.PublicKey) error {
switch certPub := cert.PublicKey.(type) {
// TODO: Handle RSA keys.
case *ecdsa.PublicKey:
ecKey, ok := key.(*ecdsa.PublicKey)
if ok && certPub.X.Cmp(ecKey.X) == 0 && certPub.Y.Cmp(ecKey.Y) == 0 {
return nil
}
default:
return errors.New("unknown or unsupported certificate public key algorithm")
}
return errors.New("certificate key mismatch")
}
// GetLocalRootCA validates if the contents of the file are a valid self-signed
// CA certificate, and returns the PEM-encoded Certificate if so
func GetLocalRootCA(paths CertPaths) (RootCA, error) {
// Check if we have a Certificate file
cert, err := ioutil.ReadFile(paths.Cert)
if err != nil {
if os.IsNotExist(err) {
err = ErrNoLocalRootCA
}
return RootCA{}, err
}
key, err := ioutil.ReadFile(paths.Key)
if err != nil {
if !os.IsNotExist(err) {
return RootCA{}, err
}
// There may not be a local key. It's okay to pass in a nil
// key. We'll get a root CA without a signer.
key = nil
}
return NewRootCA(cert, key, DefaultNodeCertExpiration)
}
func getGRPCConnection(creds credentials.TransportCredentials, r remotes.Remotes) (*grpc.ClientConn, api.Peer, error) {
peer, err := r.Select()
if err != nil {
return nil, api.Peer{}, err
}
opts := []grpc.DialOption{
grpc.WithTransportCredentials(creds),
grpc.WithTimeout(5 * time.Second),
grpc.WithBackoffMaxDelay(5 * time.Second),
}
conn, err := grpc.Dial(peer.Addr, opts...)
if err != nil {
return nil, api.Peer{}, err
}
return conn, peer, nil
}
// GetRemoteCA returns the remote endpoint's CA certificate
func GetRemoteCA(ctx context.Context, d digest.Digest, r remotes.Remotes) (RootCA, error) {
// This TLS Config is intentionally using InsecureSkipVerify. We use the
// digest instead to check the integrity of the CA certificate.
insecureCreds := credentials.NewTLS(&tls.Config{InsecureSkipVerify: true})
conn, peer, err := getGRPCConnection(insecureCreds, r)
if err != nil {
return RootCA{}, err
}
defer conn.Close()
client := api.NewCAClient(conn)
ctx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
defer func() {
if err != nil {
r.Observe(peer, -remotes.DefaultObservationWeight)
return
}
r.Observe(peer, remotes.DefaultObservationWeight)
}()
response, err := client.GetRootCACertificate(ctx, &api.GetRootCACertificateRequest{})
if err != nil {
return RootCA{}, err
}
if d != "" {
verifier := d.Verifier()
if err != nil {
return RootCA{}, errors.Wrap(err, "unexpected error getting digest verifier")
}
io.Copy(verifier, bytes.NewReader(response.Certificate))
if !verifier.Verified() {
return RootCA{}, errors.Errorf("remote CA does not match fingerprint. Expected: %s", d.Hex())
}
}
// Check the validity of the remote Cert
_, err = helpers.ParseCertificatePEM(response.Certificate)
if err != nil {
return RootCA{}, err
}
// Create a Pool with our RootCACertificate
pool := x509.NewCertPool()
if !pool.AppendCertsFromPEM(response.Certificate) {
return RootCA{}, errors.New("failed to append certificate to cert pool")
}
return RootCA{Cert: response.Certificate, Digest: digest.FromBytes(response.Certificate), Pool: pool}, nil
}
// CreateRootCA creates a Certificate authority for a new Swarm Cluster, potentially
// overwriting any existing CAs.
func CreateRootCA(rootCN string, paths CertPaths) (RootCA, error) {
// Create a simple CSR for the CA using the default CA validator and policy
req := cfcsr.CertificateRequest{
CN: rootCN,
KeyRequest: &cfcsr.BasicKeyRequest{A: RootKeyAlgo, S: RootKeySize},
CA: &cfcsr.CAConfig{Expiry: RootCAExpiration},
}
// Generate the CA and get the certificate and private key
cert, _, key, err := initca.New(&req)
if err != nil {
return RootCA{}, err
}
rootCA, err := NewRootCA(cert, key, DefaultNodeCertExpiration)
if err != nil {
return RootCA{}, err
}
// save the cert to disk
if err := saveRootCA(rootCA, paths); err != nil {
return RootCA{}, err
}
return rootCA, nil
}
// GetRemoteSignedCertificate submits a CSR to a remote CA server address,
// and that is part of a CA identified by a specific certificate pool.
func GetRemoteSignedCertificate(ctx context.Context, csr []byte, rootCAPool *x509.CertPool, config CertificateRequestConfig) ([]byte, error) {
if rootCAPool == nil {
return nil, errors.New("valid root CA pool required")
}
creds := config.Credentials
if creds == nil {
// This is our only non-MTLS request, and it happens when we are boostraping our TLS certs
// We're using CARole as server name, so an external CA doesn't also have to have ManagerRole in the cert SANs
creds = credentials.NewTLS(&tls.Config{ServerName: CARole, RootCAs: rootCAPool})
}
conn, peer, err := getGRPCConnection(creds, config.Remotes)
if err != nil {
return nil, err
}
defer conn.Close()
// Create a CAClient to retrieve a new Certificate
caClient := api.NewNodeCAClient(conn)
// Send the Request and retrieve the request token
issueRequest := &api.IssueNodeCertificateRequest{CSR: csr, Token: config.Token, Availability: config.Availability}
issueResponse, err := caClient.IssueNodeCertificate(ctx, issueRequest)
if err != nil {
config.Remotes.Observe(peer, -remotes.DefaultObservationWeight)
return nil, err
}
statusRequest := &api.NodeCertificateStatusRequest{NodeID: issueResponse.NodeID}
expBackoff := events.NewExponentialBackoff(events.ExponentialBackoffConfig{
Base: time.Second,
Factor: time.Second,
Max: 30 * time.Second,
})
// Exponential backoff with Max of 30 seconds to wait for a new retry
for {
// Send the Request and retrieve the certificate
ctx, cancel := context.WithTimeout(ctx, 5*time.Second)
defer cancel()
statusResponse, err := caClient.NodeCertificateStatus(ctx, statusRequest)
if err != nil {
config.Remotes.Observe(peer, -remotes.DefaultObservationWeight)
return nil, err
}
// If the certificate was issued, return
if statusResponse.Status.State == api.IssuanceStateIssued {
if statusResponse.Certificate == nil {
return nil, errors.New("no certificate in CertificateStatus response")
}
// The certificate in the response must match the CSR
// we submitted. If we are getting a response for a
// certificate that was previously issued, we need to
// retry until the certificate gets updated per our
// current request.
if bytes.Equal(statusResponse.Certificate.CSR, csr) {
config.Remotes.Observe(peer, remotes.DefaultObservationWeight)
return statusResponse.Certificate.Certificate, nil
}
}
// If we're still pending, the issuance failed, or the state is unknown
// let's continue trying.
expBackoff.Failure(nil, nil)
time.Sleep(expBackoff.Proceed(nil))
}
}
// readCertValidity returns the certificate issue and expiration time
func readCertValidity(kr KeyReader) (time.Time, time.Time, error) {
var zeroTime time.Time
// Read the Cert
cert, _, err := kr.Read()
if err != nil {
return zeroTime, zeroTime, err
}
// Create an x509 certificate out of the contents on disk
certBlock, _ := pem.Decode(cert)
if certBlock == nil {
return zeroTime, zeroTime, errors.New("failed to decode certificate block")
}
X509Cert, err := x509.ParseCertificate(certBlock.Bytes)
if err != nil {
return zeroTime, zeroTime, err
}
return X509Cert.NotBefore, X509Cert.NotAfter, nil
}
func saveRootCA(rootCA RootCA, paths CertPaths) error {
// Make sure the necessary dirs exist and they are writable
err := os.MkdirAll(filepath.Dir(paths.Cert), 0755)
if err != nil {
return err
}
// If the root certificate got returned successfully, save the rootCA to disk.
return ioutils.AtomicWriteFile(paths.Cert, rootCA.Cert, 0644)
}
// GenerateNewCSR returns a newly generated key and CSR signed with said key
func GenerateNewCSR() (csr, key []byte, err error) {
req := &cfcsr.CertificateRequest{
KeyRequest: cfcsr.NewBasicKeyRequest(),
}
csr, key, err = cfcsr.ParseRequest(req)
if err != nil {
return
}
return
}
// EncryptECPrivateKey receives a PEM encoded private key and returns an encrypted
// AES256 version using a passphrase
// TODO: Make this method generic to handle RSA keys
func EncryptECPrivateKey(key []byte, passphraseStr string) ([]byte, error) {
passphrase := []byte(passphraseStr)
cipherType := x509.PEMCipherAES256
keyBlock, _ := pem.Decode(key)
if keyBlock == nil {
// This RootCA does not have a valid signer.
return nil, errors.New("error while decoding PEM key")
}
encryptedPEMBlock, err := x509.EncryptPEMBlock(rand.Reader,
"EC PRIVATE KEY",
keyBlock.Bytes,
passphrase,
cipherType)
if err != nil {
return nil, err
}
if encryptedPEMBlock.Headers == nil {
return nil, errors.New("unable to encrypt key - invalid PEM file produced")
}
return pem.EncodeToMemory(encryptedPEMBlock), nil
}
| apache-2.0 |
sbhowmik89/oppia | scripts/pre_commit_linter.py | 11724 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pre-commit script for Oppia.
This script lints Python and JavaScript code, and prints a
list of lint errors to the terminal. If the directory path is passed,
it will lint all Python and JavaScript files in that directory; otherwise,
it will only lint files that have been touched in this commit.
This script ignores all filepaths contained within the excludeFiles
argument in .jscsrc. Note that, as a side-effect, these filepaths will also
prevent Python files in those paths from being linted.
IMPORTANT NOTES:
1. Before running this script, you must install third-party dependencies by
running
bash scripts/start.sh
at least once.
=====================
CUSTOMIZATION OPTIONS
=====================
1. To lint only files that have been touched in this commit
python scripts/pre_commit_linter.py
2. To lint all files in the folder or to lint just a specific file
python scripts/pre_commit_linter.py --path filepath
3. To lint a specific list of files (*.js/*.py only). Separate files by spaces
python scripts/pre_commit_linter.py --files file_1 file_2 ... file_n
Note that the root folder MUST be named 'oppia'.
"""
# Pylint has issues with the import order of argparse.
# pylint: disable=wrong-import-order
import argparse
import fnmatch
import multiprocessing
import os
import json
import subprocess
import sys
import time
# pylint: enable=wrong-import-order
_PARSER = argparse.ArgumentParser()
_EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group()
_EXCLUSIVE_GROUP.add_argument(
'--path',
help='path to the directory with files to be linted',
action='store')
_EXCLUSIVE_GROUP.add_argument(
'--files',
nargs='+',
help='specific files to be linted. Space separated list',
action='store')
if not os.getcwd().endswith('oppia'):
print ''
print 'ERROR Please run this script from the oppia root directory.'
_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.5.2')
if not os.path.exists(_PYLINT_PATH):
print ''
print 'ERROR Please run start.sh first to install pylint '
print ' and its dependencies.'
sys.exit(1)
_PATHS_TO_INSERT = [
_PYLINT_PATH,
os.getcwd(),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'webapp2-2.3'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'yaml-3.10'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'jinja2-2.6'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'webtest-1.4.2'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'numpy-1.6.1'),
os.path.join('third_party', 'gae-pipeline-1.9.17.0'),
os.path.join('third_party', 'bleach-1.2.2'),
os.path.join('third_party', 'gae-mapreduce-1.9.17.0'),
]
for path in _PATHS_TO_INSERT:
sys.path.insert(0, path)
from pylint import lint # pylint: disable=wrong-import-position
_MESSAGE_TYPE_SUCCESS = 'SUCCESS'
_MESSAGE_TYPE_FAILED = 'FAILED'
def _get_changed_filenames():
"""Returns a list of modified files (both staged and unstaged)
Returns:
a list of filenames of modified files
"""
unstaged_files = subprocess.check_output([
'git', 'diff', '--name-only']).splitlines()
staged_files = subprocess.check_output([
'git', 'diff', '--cached', '--name-only',
'--diff-filter=ACM']).splitlines()
return unstaged_files + staged_files
def _get_glob_patterns_excluded_from_jscsrc(config_jscsrc):
"""Collects excludeFiles from jscsrc file.
Args:
- config_jscsrc: str. Path to .jscsrc file.
Returns:
a list of files in excludeFiles.
"""
with open(config_jscsrc) as f:
f.readline() # First three lines are comments
f.readline()
f.readline()
json_data = json.loads(f.read())
return json_data['excludeFiles']
def _get_all_files_in_directory(dir_path, excluded_glob_patterns):
"""Recursively collects all files in directory and
subdirectories of specified path.
Args:
- dir_path: str. Path to the folder to be linted.
- excluded_glob_patterns: set. Set of all files to be excluded.
Returns:
a list of files in directory and subdirectories without excluded files.
"""
files_in_directory = []
for _dir, _, files in os.walk(dir_path):
for file_name in files:
filename = os.path.relpath(
os.path.join(_dir, file_name), os.getcwd())
if not any([fnmatch.fnmatch(filename, gp) for gp in
excluded_glob_patterns]):
files_in_directory.append(filename)
return files_in_directory
def _lint_js_files(node_path, jscs_path, config_jscsrc, files_to_lint, stdout,
result):
"""Prints a list of lint errors in the given list of JavaScript files.
Args:
- node_path: str. Path to the node binary.
- jscs_path: str. Path to the JSCS binary.
- config_jscsrc: str. Configuration args for the call to the JSCS binary.
- files_to_lint: list of str. A list of filepaths to lint.
- stdout: multiprocessing.Queue. A queue to store JSCS outputs
- result: multiprocessing.Queue. A queue to put results of test
Returns:
None
"""
start_time = time.time()
num_files_with_errors = 0
num_js_files = len(files_to_lint)
if not files_to_lint:
result.put('')
print 'There are no JavaScript files to lint.'
return
jscs_cmd_args = [node_path, jscs_path, config_jscsrc]
for _, filename in enumerate(files_to_lint):
proc_args = jscs_cmd_args + [filename]
proc = subprocess.Popen(
proc_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
linter_stdout, linter_stderr = proc.communicate()
if linter_stderr:
print 'LINTER FAILED'
print linter_stderr
sys.exit(1)
if linter_stdout:
num_files_with_errors += 1
stdout.put(linter_stdout)
if num_files_with_errors:
result.put('%s %s JavaScript files' % (
_MESSAGE_TYPE_FAILED, num_files_with_errors))
else:
result.put('%s %s JavaScript files linted (%.1f secs)' % (
_MESSAGE_TYPE_SUCCESS, num_js_files, time.time() - start_time))
def _lint_py_files(config_pylint, files_to_lint, result):
"""Prints a list of lint errors in the given list of Python files.
Args:
- config_pylint: str. Path to the .pylintrc file.
- files_to_lint: list of str. A list of filepaths to lint.
- result: multiprocessing.Queue. A queue to put results of test
Returns:
None
"""
start_time = time.time()
are_there_errors = False
num_py_files = len(files_to_lint)
if not files_to_lint:
result.put('')
print 'There are no Python files to lint.'
return
try:
# This prints output to the console.
lint.Run(files_to_lint + [config_pylint])
except SystemExit as e:
if str(e) != '0':
are_there_errors = True
if are_there_errors:
result.put('%s Python linting failed' % _MESSAGE_TYPE_FAILED)
else:
result.put('%s %s Python files linted (%.1f secs)' % (
_MESSAGE_TYPE_SUCCESS, num_py_files, time.time() - start_time))
def _pre_commit_linter():
"""This function is used to check if this script is ran from
root directory, node-jscs dependencies are installed
and pass JSCS binary path
"""
jscsrc_path = os.path.join(os.getcwd(), '.jscsrc')
pylintrc_path = os.path.join(os.getcwd(), '.pylintrc')
parsed_args = _PARSER.parse_args()
if parsed_args.path:
input_path = os.path.join(os.getcwd(), parsed_args.path)
if not os.path.exists(input_path):
print 'Could not locate file or directory %s. Exiting.' % input_path
print '----------------------------------------'
sys.exit(1)
if os.path.isfile(input_path):
all_files = [input_path]
else:
excluded_glob_patterns = _get_glob_patterns_excluded_from_jscsrc(
jscsrc_path)
all_files = _get_all_files_in_directory(
input_path, excluded_glob_patterns)
elif parsed_args.files:
valid_filepaths = []
invalid_filepaths = []
for f in parsed_args.files:
if os.path.isfile(f):
valid_filepaths.append(f)
else:
invalid_filepaths.append(f)
if invalid_filepaths:
print ('The following file(s) do not exist: %s\n'
'Exiting.' % invalid_filepaths)
sys.exit(1)
all_files = valid_filepaths
else:
all_files = _get_changed_filenames()
config_jscsrc = '--config=%s' % jscsrc_path
config_pylint = '--rcfile=%s' % pylintrc_path
parent_dir = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
node_path = os.path.join(
parent_dir, 'oppia_tools', 'node-4.2.1', 'bin', 'node')
jscs_path = os.path.join(
parent_dir, 'node_modules', 'jscs', 'bin', 'jscs')
if not os.path.exists(jscs_path):
print ''
print 'ERROR Please run start.sh first to install node-jscs '
print ' and its dependencies.'
sys.exit(1)
js_files_to_lint = [
filename for filename in all_files if filename.endswith('.js')]
py_files_to_lint = [
filename for filename in all_files if filename.endswith('.py')]
js_result = multiprocessing.Queue()
linting_processes = []
js_stdout = multiprocessing.Queue()
linting_processes.append(multiprocessing.Process(
target=_lint_js_files, args=(node_path, jscs_path, config_jscsrc,
js_files_to_lint, js_stdout, js_result)))
py_result = multiprocessing.Queue()
linting_processes.append(multiprocessing.Process(
target=_lint_py_files,
args=(config_pylint, py_files_to_lint, py_result)))
print 'Starting Javascript and Python Linting'
print '----------------------------------------'
for process in linting_processes:
process.start()
for process in linting_processes:
process.join()
js_messages = []
while not js_stdout.empty():
js_messages.append(js_stdout.get())
print ''
print '\n'.join(js_messages)
print '----------------------------------------'
summary_messages = []
summary_messages.append(js_result.get())
summary_messages.append(py_result.get())
print '\n'.join(summary_messages)
print ''
if any([message.startswith(_MESSAGE_TYPE_FAILED) for message in
summary_messages]):
sys.exit(1)
if __name__ == '__main__':
_pre_commit_linter()
| apache-2.0 |
jt70471/aws-sdk-cpp | aws-cpp-sdk-awstransfer/source/model/ListedUser.cpp | 2777 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/awstransfer/model/ListedUser.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace Transfer
{
namespace Model
{
ListedUser::ListedUser() :
m_arnHasBeenSet(false),
m_homeDirectoryHasBeenSet(false),
m_homeDirectoryType(HomeDirectoryType::NOT_SET),
m_homeDirectoryTypeHasBeenSet(false),
m_roleHasBeenSet(false),
m_sshPublicKeyCount(0),
m_sshPublicKeyCountHasBeenSet(false),
m_userNameHasBeenSet(false)
{
}
ListedUser::ListedUser(JsonView jsonValue) :
m_arnHasBeenSet(false),
m_homeDirectoryHasBeenSet(false),
m_homeDirectoryType(HomeDirectoryType::NOT_SET),
m_homeDirectoryTypeHasBeenSet(false),
m_roleHasBeenSet(false),
m_sshPublicKeyCount(0),
m_sshPublicKeyCountHasBeenSet(false),
m_userNameHasBeenSet(false)
{
*this = jsonValue;
}
ListedUser& ListedUser::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("Arn"))
{
m_arn = jsonValue.GetString("Arn");
m_arnHasBeenSet = true;
}
if(jsonValue.ValueExists("HomeDirectory"))
{
m_homeDirectory = jsonValue.GetString("HomeDirectory");
m_homeDirectoryHasBeenSet = true;
}
if(jsonValue.ValueExists("HomeDirectoryType"))
{
m_homeDirectoryType = HomeDirectoryTypeMapper::GetHomeDirectoryTypeForName(jsonValue.GetString("HomeDirectoryType"));
m_homeDirectoryTypeHasBeenSet = true;
}
if(jsonValue.ValueExists("Role"))
{
m_role = jsonValue.GetString("Role");
m_roleHasBeenSet = true;
}
if(jsonValue.ValueExists("SshPublicKeyCount"))
{
m_sshPublicKeyCount = jsonValue.GetInteger("SshPublicKeyCount");
m_sshPublicKeyCountHasBeenSet = true;
}
if(jsonValue.ValueExists("UserName"))
{
m_userName = jsonValue.GetString("UserName");
m_userNameHasBeenSet = true;
}
return *this;
}
JsonValue ListedUser::Jsonize() const
{
JsonValue payload;
if(m_arnHasBeenSet)
{
payload.WithString("Arn", m_arn);
}
if(m_homeDirectoryHasBeenSet)
{
payload.WithString("HomeDirectory", m_homeDirectory);
}
if(m_homeDirectoryTypeHasBeenSet)
{
payload.WithString("HomeDirectoryType", HomeDirectoryTypeMapper::GetNameForHomeDirectoryType(m_homeDirectoryType));
}
if(m_roleHasBeenSet)
{
payload.WithString("Role", m_role);
}
if(m_sshPublicKeyCountHasBeenSet)
{
payload.WithInteger("SshPublicKeyCount", m_sshPublicKeyCount);
}
if(m_userNameHasBeenSet)
{
payload.WithString("UserName", m_userName);
}
return payload;
}
} // namespace Model
} // namespace Transfer
} // namespace Aws
| apache-2.0 |
nbuchanan/node-gdal | deps/libgdal/gdal/ogr/ogrsf_frmts/db2/ogrdb2cli.cpp | 59095 | /*****************************************************************************
*
* Project: DB2 Spatial driver
* Purpose: Implements DB2-specific SQL support.
* Author: David Adler, dadler at adtechgeospatial dot com
*
*****************************************************************************
* Copyright (c) 2015, David Adler
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
****************************************************************************/
#include "cpl_conv.h"
#include "ogr_db2.h"
// From cpl_odbc.cpp
#include "cpl_vsi.h"
#include "cpl_string.h"
#include "cpl_error.h"
#ifndef SQLColumns_TABLE_CAT
#define SQLColumns_TABLE_CAT 1
#define SQLColumns_TABLE_SCHEM 2
#define SQLColumns_TABLE_NAME 3
#define SQLColumns_COLUMN_NAME 4
#define SQLColumns_DATA_TYPE 5
#define SQLColumns_TYPE_NAME 6
#define SQLColumns_COLUMN_SIZE 7
#define SQLColumns_BUFFER_LENGTH 8
#define SQLColumns_DECIMAL_DIGITS 9
#define SQLColumns_NUM_PREC_RADIX 10
#define SQLColumns_NULLABLE 11
#define SQLColumns_REMARKS 12
#define SQLColumns_COLUMN_DEF 13
#define SQLColumns_SQL_DATA_TYPE 14
#define SQLColumns_SQL_DATETIME_SUB 15
#define SQLColumns_CHAR_OCTET_LENGTH 16
#define SQLColumns_ORDINAL_POSITION 17
#define SQLColumns_IS_NULLABLE 18
#endif /* ndef SQLColumns_TABLE_CAT */
// Most of the code here is copied from cpl_odbc.cpp
// This was done to resolve issues with different header files
// for MS ODBC and DB2 CLI.
/************************************************************************/
/* OGRDB2Session() */
/************************************************************************/
OGRDB2Session::OGRDB2Session()
{
DB2_DEBUG_ENTER("OGRDB2Session::OGRDB2Session");
m_szLastError[0] = '\0';
m_hEnv = NULL;
m_hDBC = NULL;
m_bInTransaction = FALSE;
m_bAutoCommit = TRUE;
}
/************************************************************************/
/* ~OGRDB2Session() */
/************************************************************************/
OGRDB2Session::~OGRDB2Session()
{
DB2_DEBUG_ENTER("OGRDB2Session::~OGRDB2Session");
CloseSession();
}
/************************************************************************/
/* RollbackTransaction() */
/************************************************************************/
// Override parent method, don't invoke Fail() which clears the message
int OGRDB2Session::RollbackTransaction()
{
DB2_DEBUG_ENTER("OGRDB2Session::RollbackTransaction");
if (m_bInTransaction)
{
m_bInTransaction = FALSE;
CPLDebug("OGRDB2Session::RollbackTransaction", "In transaction, rollback");
if( SQLEndTran( SQL_HANDLE_DBC, m_hDBC, SQL_ROLLBACK ))
{
return FALSE;
}
}
return TRUE;
}
/************************************************************************/
/* CloseSession() */
/************************************************************************/
int OGRDB2Session::CloseSession()
{
if( m_hDBC!=NULL )
{
if ( IsInTransaction() )
CPLError( CE_Warning, CPLE_AppDefined, "Closing session with active transactions." );
CPLDebug( "ODBC", "SQLDisconnect()" );
SQLDisconnect( m_hDBC );
SQLFreeConnect( m_hDBC );
m_hDBC = NULL;
}
if( m_hEnv!=NULL )
{
SQLFreeEnv( m_hEnv );
m_hEnv = NULL;
}
return TRUE;
}
/************************************************************************/
/* ClearTransaction() */
/************************************************************************/
int OGRDB2Session::ClearTransaction()
{
#if (ODBCVER >= 0x0300)
if (m_bAutoCommit)
return TRUE;
SQLUINTEGER bAutoCommit;
/* See if we already in manual commit mode */
if ( Failed( SQLGetConnectAttr( m_hDBC, SQL_ATTR_AUTOCOMMIT, &bAutoCommit, sizeof(SQLUINTEGER), NULL) ) )
return FALSE;
if (bAutoCommit == SQL_AUTOCOMMIT_OFF)
{
/* switch the connection to auto commit mode (default) */
if( Failed( SQLSetConnectAttr( m_hDBC, SQL_ATTR_AUTOCOMMIT, (SQLPOINTER)SQL_AUTOCOMMIT_ON, 0 ) ) )
return FALSE;
}
m_bInTransaction = FALSE;
m_bAutoCommit = TRUE;
#endif
return TRUE;
}
/************************************************************************/
/* CommitTransaction() */
/************************************************************************/
int OGRDB2Session::BeginTransaction()
{
#if (ODBCVER >= 0x0300)
CPLDebug("int OGRDB2Session::BeginTransaction","Enter");
SQLUINTEGER bAutoCommit;
/* See if we already in manual commit mode */
if ( Failed( SQLGetConnectAttr( m_hDBC, SQL_ATTR_AUTOCOMMIT, &bAutoCommit, sizeof(SQLUINTEGER), NULL) ) )
return FALSE;
if (bAutoCommit == SQL_AUTOCOMMIT_ON)
{
/* switch the connection to manual commit mode */
if( Failed( SQLSetConnectAttr( m_hDBC, SQL_ATTR_AUTOCOMMIT, (SQLPOINTER)SQL_AUTOCOMMIT_OFF, 0 ) ) )
return FALSE;
}
m_bInTransaction = TRUE;
m_bAutoCommit = FALSE;
#endif
return TRUE;
}
/************************************************************************/
/* CommitTransaction() */
/************************************************************************/
int OGRDB2Session::CommitTransaction()
{
#if (ODBCVER >= 0x0300)
if (m_bInTransaction)
{
if( Failed( SQLEndTran( SQL_HANDLE_DBC, m_hDBC, SQL_COMMIT ) ) )
{
return FALSE;
}
m_bInTransaction = FALSE;
}
#endif
return TRUE;
}
/************************************************************************/
/* Failed() */
/* */
/* Test if a return code indicates failure, return TRUE if that */
/* is the case. Also update error text. */
/************************************************************************/
int OGRDB2Session::Failed( int nRetCode, HSTMT hStmt )
{
SQLCHAR achSQLState[SQL_MAX_MESSAGE_LENGTH];
SQLINTEGER nNativeError;
SQLSMALLINT nTextLength=0;
m_szLastError[0] = '\0';
//CPLDebug("OGRDB2Session::Failed","nRetCode: %d", nRetCode);
if( nRetCode == SQL_SUCCESS || nRetCode == SQL_SUCCESS_WITH_INFO )
return FALSE;
SQLError( m_hEnv, m_hDBC, hStmt, achSQLState, &nNativeError,
(SQLCHAR *) m_szLastError, sizeof(m_szLastError)-1,
&nTextLength );
m_szLastError[nTextLength] = '\0';
//CPLDebug("OGRDB2Session::Failed","m_bInTransaction: %d, msg: '%s'", m_bInTransaction, m_szLastError);
if( nRetCode == SQL_ERROR && m_bInTransaction )
RollbackTransaction();
return TRUE;
}
/************************************************************************/
/* EstablishSession() */
/************************************************************************/
/**
* Connect to database and logon.
*
* @param pszDSN The name of the DSN being used to connect. This is not
* optional.
*
* @param pszUserid the userid to logon as, may be NULL if not not required,
* or provided by the DSN.
*
* @param pszPassword the password to logon with. May be NULL if not required
* or provided by the DSN.
*
* @return TRUE on success or FALSE on failure. Call GetLastError() to get
* details on failure.
*/
int OGRDB2Session::EstablishSession( const char *pszDSN,
const char *pszUserid,
const char *pszPassword )
{
CloseSession();
if( Failed( SQLAllocEnv( &m_hEnv ) ) )
return FALSE;
if( Failed( SQLAllocConnect( m_hEnv, &m_hDBC ) ) )
{
CloseSession();
return FALSE;
}
#ifdef _MSC_VER
#pragma warning( push )
#pragma warning( disable : 4996 ) /* warning C4996: 'SQLSetConnectOption': ODBC API: SQLSetConnectOption is deprecated. Please use SQLSetConnectAttr instead */
#endif
SQLSetConnectOption( m_hDBC,SQL_LOGIN_TIMEOUT,30 );
#ifdef _MSC_VER
#pragma warning( pop )
#endif
if( pszUserid == NULL )
pszUserid = "";
if( pszPassword == NULL )
pszPassword = "";
CPLDebug( "OGRDB2Session::EstablishSession",
"pszDSN: '%s'", pszDSN );
int bFailed;
if( strstr(pszDSN,"=") != NULL )
{
SQLCHAR szOutConnString[1024];
SQLSMALLINT nOutConnStringLen = 0;
CPLDebug( "OGRDB2Session::EstablishSession",
"SQLDriverConnect(%s)", pszDSN );
bFailed = Failed(
SQLDriverConnect( m_hDBC, NULL,
(SQLCHAR *) pszDSN, (SQLSMALLINT)strlen(pszDSN),
szOutConnString, sizeof(szOutConnString),
&nOutConnStringLen, SQL_DRIVER_NOPROMPT ) );
CPLDebug( "OGRDB2Session::EstablishSession",
"szOutConnString: '%s'", szOutConnString );
}
else
{
CPLDebug( "OGRDB2Session::EstablishSession",
"SQLConnect(%s)", pszDSN );
bFailed = Failed(
SQLConnect( m_hDBC, (SQLCHAR *) pszDSN, SQL_NTS,
(SQLCHAR *) pszUserid, SQL_NTS,
(SQLCHAR *) pszPassword, SQL_NTS ) );
}
if( bFailed )
{
CPLDebug( "OGRDB2Session::EstablishSession",
"... failed: %s", GetLastError() );
CloseSession();
return FALSE;
}
return TRUE;
}
/************************************************************************/
/* GetLastError() */
/************************************************************************/
/**
* Returns the last ODBC error message.
*
* @return pointer to an internal buffer with the error message in it.
* Do not free or alter. Will be an empty (but not NULL) string if there is
* no pending error info.
*/
const char *OGRDB2Session::GetLastError()
{
// CPLDebug("GetLastError","Enter; last error: '%s'", m_szLastError);
return m_szLastError;
}
/************************************************************************/
/* ==================================================================== */
/* OGRDB2Statement */
/* ==================================================================== */
/************************************************************************/
/************************************************************************/
/* OGRDB2Statement() */
/************************************************************************/
OGRDB2Statement::OGRDB2Statement( OGRDB2Session *poSession )
{
DB2_DEBUG_ENTER("OGRDB2Statement::OGRDB2Statement");
m_bPrepared = FALSE;
m_poSession = poSession;
if( Failed(
SQLAllocStmt( poSession->GetConnection(), &m_hStmt ) ) )
{
m_hStmt = NULL;
return;
}
m_nColCount = 0;
m_papszColNames = NULL;
m_panColType = NULL;
m_papszColTypeNames = NULL;
m_panColSize = NULL;
m_panColPrecision = NULL;
m_panColNullable = NULL;
m_papszColColumnDef = NULL;
m_papszColValues = NULL;
m_panColValueLengths = NULL;
m_pszStatement = NULL;
m_nStatementMax = 0;
m_nStatementLen = 0;
}
/************************************************************************/
/* ~OGRDB2Statement() */
/************************************************************************/
OGRDB2Statement::~OGRDB2Statement()
{
DB2_DEBUG_ENTER("OGRDB2Statement::~OGRDB2Statement");
Clear();
if( m_hStmt != NULL )
SQLFreeStmt( m_hStmt, SQL_DROP );
}
/************************************************************************/
/* DB2Prepare() */
/************************************************************************/
int OGRDB2Statement::DB2Prepare(const char *pszCallingFunction)
{
if ( m_poSession == NULL || m_hStmt == NULL )
{
// we should post an error.
return FALSE;
}
CPLDebug(pszCallingFunction, "prepare: '%s'", m_pszStatement);
m_nLastRetCode = SQLPrepare( m_hStmt, (SQLCHAR *) m_pszStatement, SQL_NTS );
if (m_nLastRetCode != SQL_SUCCESS
&& m_nLastRetCode != SQL_SUCCESS_WITH_INFO)
{
Failed(m_nLastRetCode);
return FALSE;
}
m_bPrepared = TRUE;
return TRUE;
}
/************************************************************************/
/* DB2BindParameterIn() */
/************************************************************************/
int OGRDB2Statement::DB2BindParameterIn(const char *pszCallingFunction,
int nBindNum,
int nValueType,
int nParameterType,
int nLen,
void * pValuePointer)
{
if ( m_poSession == NULL || m_hStmt == NULL )
{
// we should post an error.
return FALSE;
}
CPLDebug(pszCallingFunction,
"bind: nBindNum: %d; p: %p; nLen: %d; vType: %d; pType: %d",
nBindNum, pValuePointer, nLen, nValueType, nParameterType);
m_nLastRetCode = SQLBindParameter(m_hStmt,
(SQLUSMALLINT) (nBindNum),
SQL_PARAM_INPUT,
(SQLSMALLINT) nValueType,
(SQLSMALLINT) nParameterType,
nLen, 0, pValuePointer,
0, NULL);
if (m_nLastRetCode != SQL_SUCCESS
&& m_nLastRetCode != SQL_SUCCESS_WITH_INFO)
{
Failed(m_nLastRetCode);
return FALSE;
}
return TRUE;
}
/************************************************************************/
/* DB2Execute() */
/************************************************************************/
int OGRDB2Statement::DB2Execute(const char *pszCallingFunction)
{
if ( m_poSession == NULL || m_hStmt == NULL )
{
// we should post an error.
return FALSE;
}
CPLDebug(pszCallingFunction, "execute: '%s'", m_pszStatement);
if (m_bPrepared)
{
m_nLastRetCode = SQLExecute(m_hStmt);
} else {
m_nLastRetCode = SQLExecDirect( m_hStmt, (SQLCHAR *) m_pszStatement, SQL_NTS );
}
if (m_nLastRetCode != SQL_SUCCESS
&& m_nLastRetCode != SQL_SUCCESS_WITH_INFO)
{
Failed(m_nLastRetCode);
return FALSE;
}
return CollectResultsInfo();
}
/************************************************************************/
/* ExecuteSQL() */
/************************************************************************/
/**
* Execute an SQL statement.
*
* This method will execute the passed (or stored) SQL statement,
* and initialize information about the resultset if there is one.
* If a NULL statement is passed, the internal stored statement that
* has been previously set via Append() or Appendf() calls will be used.
*
* @param pszStatement the SQL statement to execute, or NULL if the
* internally saved one should be used.
*
* @return TRUE on success or FALSE if there is an error. Error details
* can be fetched with OGRODBCSession::GetLastError().
*/
int OGRDB2Statement::ExecuteSQL( const char *pszStatement )
{
if( m_poSession == NULL || m_hStmt == NULL )
{
// we should post an error.
return FALSE;
}
if( pszStatement != NULL )
{
Clear();
Append( pszStatement );
}
#if (ODBCVER >= 0x0300)
if ( !m_poSession->IsInTransaction() )
{
/* commit pending transactions and set to autocommit mode*/
m_poSession->ClearTransaction();
}
#endif
if( Failed(
SQLExecDirect( m_hStmt, (SQLCHAR *) m_pszStatement, SQL_NTS ) ) )
return FALSE;
return CollectResultsInfo();
}
/************************************************************************/
/* CollectResultsInfo() */
/************************************************************************/
int OGRDB2Statement::CollectResultsInfo()
{
if( m_poSession == NULL || m_hStmt == NULL )
{
// we should post an error.
return FALSE;
}
if( Failed( SQLNumResultCols(m_hStmt,&m_nColCount) ) )
return FALSE;
/* -------------------------------------------------------------------- */
/* Allocate per column information. */
/* -------------------------------------------------------------------- */
m_papszColNames = (char **) CPLCalloc(sizeof(char *),(m_nColCount+1));
m_papszColValues = (char **) CPLCalloc(sizeof(char *),(m_nColCount+1));
m_panColValueLengths = (CPL_SQLLEN *) CPLCalloc(sizeof(CPL_SQLLEN),(m_nColCount+1));
m_panColType = (SQLSMALLINT *) CPLCalloc(sizeof(SQLSMALLINT),m_nColCount);
m_papszColTypeNames = (char **) CPLCalloc(sizeof(char *),(m_nColCount+1));
m_panColSize = (CPL_SQLULEN *) CPLCalloc(sizeof(CPL_SQLULEN),m_nColCount);
m_panColPrecision = (SQLSMALLINT *) CPLCalloc(sizeof(SQLSMALLINT),m_nColCount);
m_panColNullable = (SQLSMALLINT *) CPLCalloc(sizeof(SQLSMALLINT),m_nColCount);
m_papszColColumnDef = (char **) CPLCalloc(sizeof(char *),(m_nColCount+1));
/* -------------------------------------------------------------------- */
/* Fetch column descriptions. */
/* -------------------------------------------------------------------- */
for( SQLUSMALLINT iCol = 0; iCol < m_nColCount; iCol++ )
{
SQLCHAR szName[256];
SQLSMALLINT nNameLength = 0;
if ( Failed( SQLDescribeCol(m_hStmt, iCol+1,
szName, sizeof(szName), &nNameLength,
m_panColType + iCol,
m_panColSize + iCol,
m_panColPrecision + iCol,
m_panColNullable + iCol) ) )
return FALSE;
szName[nNameLength] = '\0'; // Paranoid; the string should be
// null-terminated by the driver
m_papszColNames[iCol] = CPLStrdup((const char*)szName);
// SQLDescribeCol() fetches just a subset of column attributes.
// In addition to above data we need data type name.
if ( Failed( SQLColAttribute(m_hStmt, iCol + 1, SQL_DESC_TYPE_NAME,
szName, sizeof(szName),
&nNameLength, NULL) ) )
return FALSE;
szName[nNameLength] = '\0'; // Paranoid
m_papszColTypeNames[iCol] = CPLStrdup((const char*)szName);
// CPLDebug( "ODBC", "%s %s %d", m_papszColNames[iCol],
// szName, m_panColType[iCol] );
}
return TRUE;
}
/************************************************************************/
/* GetRowCountAffected() */
/************************************************************************/
int OGRDB2Statement::GetRowCountAffected()
{
SQLLEN nResultCount=0;
SQLRowCount( m_hStmt, &nResultCount );
return (int)nResultCount;
}
/************************************************************************/
/* GetColCount() */
/************************************************************************/
/**
* Fetch the resultset column count.
*
* @return the column count, or zero if there is no resultset.
*/
int OGRDB2Statement::GetColCount()
{
return m_nColCount;
}
/************************************************************************/
/* GetColName() */
/************************************************************************/
/**
* Fetch a column name.
*
* @param iCol the zero based column index.
*
* @return NULL on failure (out of bounds column), or a pointer to an
* internal copy of the column name.
*/
const char *OGRDB2Statement::GetColName( int iCol )
{
if( iCol < 0 || iCol >= m_nColCount )
return NULL;
else
return m_papszColNames[iCol];
}
/************************************************************************/
/* GetColType() */
/************************************************************************/
/**
* Fetch a column data type.
*
* The return type code is a an ODBC SQL_ code, one of SQL_UNKNOWN_TYPE,
* SQL_CHAR, SQL_NUMERIC, SQL_DECIMAL, SQL_INTEGER, SQL_SMALLINT, SQL_FLOAT,
* SQL_REAL, SQL_DOUBLE, SQL_DATETIME, SQL_VARCHAR, SQL_TYPE_DATE,
* SQL_TYPE_TIME, SQL_TYPE_TIMESTAMPT.
*
* @param iCol the zero based column index.
*
* @return type code or -1 if the column is illegal.
*/
short OGRDB2Statement::GetColType( int iCol )
{
if( iCol < 0 || iCol >= m_nColCount )
return -1;
else
return m_panColType[iCol];
}
/************************************************************************/
/* GetColTypeName() */
/************************************************************************/
/**
* Fetch a column data type name.
*
* Returns data source-dependent data type name; for example, "CHAR",
* "VARCHAR", "MONEY", "LONG VARBINAR", or "CHAR ( ) FOR BIT DATA".
*
* @param iCol the zero based column index.
*
* @return NULL on failure (out of bounds column), or a pointer to an
* internal copy of the column dat type name.
*/
const char *OGRDB2Statement::GetColTypeName( int iCol )
{
if( iCol < 0 || iCol >= m_nColCount )
return NULL;
else
return m_papszColTypeNames[iCol];
}
/************************************************************************/
/* GetColSize() */
/************************************************************************/
/**
* Fetch the column width.
*
* @param iCol the zero based column index.
*
* @return column width, zero for unknown width columns.
*/
short OGRDB2Statement::GetColSize( int iCol )
{
if( iCol < 0 || iCol >= m_nColCount )
return -1;
else
return (short) m_panColSize[iCol];
}
/************************************************************************/
/* GetColPrecision() */
/************************************************************************/
/**
* Fetch the column precision.
*
* @param iCol the zero based column index.
*
* @return column precision, may be zero or the same as column size for
* columns to which it does not apply.
*/
short OGRDB2Statement::GetColPrecision( int iCol )
{
if( iCol < 0 || iCol >= m_nColCount )
return -1;
else
return m_panColPrecision[iCol];
}
/************************************************************************/
/* GetColNullable() */
/************************************************************************/
/**
* Fetch the column nullability.
*
* @param iCol the zero based column index.
*
* @return TRUE if the column may contains or FALSE otherwise.
*/
short OGRDB2Statement::GetColNullable( int iCol )
{
if( iCol < 0 || iCol >= m_nColCount )
return -1;
else
return m_panColNullable[iCol];
}
/************************************************************************/
/* GetColColumnDef() */
/************************************************************************/
/**
* Fetch a column default value.
*
* Returns the default value of a column.
*
* @param iCol the zero based column index.
*
* @return NULL if the default value is not dpecified
* or the internal copy of the default value.
*/
const char *OGRDB2Statement::GetColColumnDef( int iCol )
{
if( iCol < 0 || iCol >= m_nColCount )
return NULL;
else
return m_papszColColumnDef[iCol];
}
/************************************************************************/
/* Fetch() */
/************************************************************************/
/**
* Fetch a new record.
*
* Requests the next row in the current resultset using the SQLFetchScroll()
* call. Note that many ODBC drivers only support the default forward
* fetching one record at a time. Only SQL_FETCH_NEXT (the default) should
* be considered reliable on all drivers.
*
* Currently it isn't clear how to determine whether an error or a normal
* out of data condition has occurred if Fetch() fails.
*
* @param nOrientation One of SQL_FETCH_NEXT, SQL_FETCH_LAST, SQL_FETCH_PRIOR,
* SQL_FETCH_ABSOLUTE, or SQL_FETCH_RELATIVE (default is SQL_FETCH_NEXT).
*
* @param nOffset the offset (number of records), ignored for some
* orientations.
*
* @return TRUE if a new row is successfully fetched, or FALSE if not.
*/
int OGRDB2Statement::Fetch( int nOrientation, int nOffset )
{
ClearColumnData();
if( m_hStmt == NULL || m_nColCount < 1 )
return FALSE;
/* -------------------------------------------------------------------- */
/* Fetch a new row. Note that some brain dead drives (such as */
/* the unixodbc text file driver) don't implement */
/* SQLScrollFetch(), so we try to stick to SQLFetch() if we */
/* can). */
/* -------------------------------------------------------------------- */
SQLRETURN nRetCode;
if( nOrientation == SQL_FETCH_NEXT && nOffset == 0 )
{
nRetCode = SQLFetch( m_hStmt );
if( Failed(nRetCode) )
{
if ( nRetCode != SQL_NO_DATA )
{
CPLError( CE_Failure, CPLE_AppDefined, "%s",
m_poSession->GetLastError() );
}
return FALSE;
}
}
else
{
nRetCode = SQLFetchScroll(m_hStmt, (SQLSMALLINT) nOrientation, nOffset);
if( Failed(nRetCode) )
{
if ( nRetCode == SQL_NO_DATA )
{
CPLError( CE_Failure, CPLE_AppDefined, "%s",
m_poSession->GetLastError() );
}
return FALSE;
}
}
/* -------------------------------------------------------------------- */
/* Pull out all the column values. */
/* -------------------------------------------------------------------- */
SQLSMALLINT iCol;
for( iCol = 0; iCol < m_nColCount; iCol++ )
{
char szWrkData[513];
CPL_SQLLEN cbDataLen;
SQLSMALLINT nFetchType = GetTypeMapping( m_panColType[iCol] );
// Handle values other than WCHAR and BINARY as CHAR.
if( nFetchType != SQL_C_BINARY && nFetchType != SQL_C_WCHAR )
nFetchType = SQL_C_CHAR;
szWrkData[0] = '\0';
szWrkData[sizeof(szWrkData)-1] = '\0';
nRetCode = SQLGetData( m_hStmt, iCol + 1, nFetchType,
szWrkData, sizeof(szWrkData)-1,
&cbDataLen );
/* SQLGetData() is giving garbage values in the first 4 bytes of cbDataLen *
* in some architectures. Converting it to (int) discards the unnecessary *
* bytes. This should not be a problem unless the buffer size reaches *
* 2GB. (#3385) */
cbDataLen = (int) cbDataLen;
if( Failed( nRetCode ) )
{
if ( nRetCode == SQL_NO_DATA )
{
CPLError( CE_Failure, CPLE_AppDefined, "%s",
m_poSession->GetLastError() );
}
return FALSE;
}
if( cbDataLen == SQL_NULL_DATA )
{
m_papszColValues[iCol] = NULL;
m_panColValueLengths[iCol] = 0;
}
// assume big result: should check for state=SQLSATE 01004.
else if( nRetCode == SQL_SUCCESS_WITH_INFO )
{
if( cbDataLen >= (CPL_SQLLEN)(sizeof(szWrkData)-1) )
{
cbDataLen = (CPL_SQLLEN)(sizeof(szWrkData)-1);
if (nFetchType == SQL_C_CHAR)
while ((cbDataLen > 1) && (szWrkData[cbDataLen - 1] == 0))
--cbDataLen; // trimming the extra terminators: bug 990
else if (nFetchType == SQL_C_WCHAR)
while ((cbDataLen > 1) && (szWrkData[cbDataLen - 1] == 0)
&& (szWrkData[cbDataLen - 2] == 0))
cbDataLen -= 2; // trimming the extra terminators
}
m_papszColValues[iCol] = (char *) CPLMalloc(cbDataLen+2);
memcpy( m_papszColValues[iCol], szWrkData, cbDataLen );
m_papszColValues[iCol][cbDataLen] = '\0';
m_papszColValues[iCol][cbDataLen+1] = '\0';
m_panColValueLengths[iCol] = cbDataLen;
while( true )
{
CPL_SQLLEN nChunkLen;
nRetCode = SQLGetData( m_hStmt, (SQLUSMALLINT) iCol+1,
nFetchType,
szWrkData, sizeof(szWrkData)-1,
&cbDataLen );
if( nRetCode == SQL_NO_DATA )
break;
if( Failed( nRetCode ) )
{
CPLError( CE_Failure, CPLE_AppDefined, "%s",
m_poSession->GetLastError() );
return FALSE;
}
if( cbDataLen >= (int) (sizeof(szWrkData) - 1)
|| cbDataLen == SQL_NO_TOTAL )
{
nChunkLen = sizeof(szWrkData)-1;
if (nFetchType == SQL_C_CHAR)
while ( (nChunkLen > 1)
&& (szWrkData[nChunkLen - 1] == 0) )
--nChunkLen; // trimming the extra terminators
else if (nFetchType == SQL_C_WCHAR)
while ( (nChunkLen > 1)
&& (szWrkData[nChunkLen - 1] == 0)
&& (szWrkData[nChunkLen - 2] == 0) )
nChunkLen -= 2; // trimming the extra terminators
}
else
nChunkLen = cbDataLen;
szWrkData[nChunkLen] = '\0';
m_papszColValues[iCol] = (char *)
CPLRealloc( m_papszColValues[iCol],
m_panColValueLengths[iCol] + nChunkLen + 2 );
memcpy( m_papszColValues[iCol] + m_panColValueLengths[iCol],
szWrkData, nChunkLen );
m_panColValueLengths[iCol] += nChunkLen;
m_papszColValues[iCol][m_panColValueLengths[iCol]] = '\0';
m_papszColValues[iCol][m_panColValueLengths[iCol]+1] = '\0';
}
}
else
{
m_panColValueLengths[iCol] = cbDataLen;
m_papszColValues[iCol] = (char *) CPLMalloc(cbDataLen+2);
memcpy( m_papszColValues[iCol], szWrkData, cbDataLen );
m_papszColValues[iCol][cbDataLen] = '\0';
m_papszColValues[iCol][cbDataLen+1] = '\0';
}
// Trim white space off end, if there is any.
if( nFetchType == SQL_C_CHAR && m_papszColValues[iCol] != NULL )
{
char *pszTarget = m_papszColValues[iCol];
size_t iEnd = strlen(pszTarget);
while ( iEnd > 0 && pszTarget[iEnd - 1] == ' ' )
pszTarget[--iEnd] = '\0';
}
// Convert WCHAR to UTF-8, assuming the WCHAR is UCS-2.
if( nFetchType == SQL_C_WCHAR && m_papszColValues[iCol] != NULL
&& m_panColValueLengths[iCol] > 0 )
{
wchar_t *pwszSrc = (wchar_t *) m_papszColValues[iCol];
m_papszColValues[iCol] =
CPLRecodeFromWChar( pwszSrc, CPL_ENC_UCS2, CPL_ENC_UTF8 );
m_panColValueLengths[iCol] = strlen(m_papszColValues[iCol]);
CPLFree( pwszSrc );
}
}
return TRUE;
}
/************************************************************************/
/* GetColData() */
/************************************************************************/
/**
* Fetch column data.
*
* Fetches the data contents of the requested column for the currently loaded
* row. The result is returned as a string regardless of the column type.
* NULL is returned if an illegal column is given, or if the actual column
* is "NULL".
*
* @param iCol the zero based column to fetch.
*
* @param pszDefault the value to return if the column does not exist, or is
* NULL. Defaults to NULL.
*
* @return pointer to internal column data or NULL on failure.
*/
const char *OGRDB2Statement::GetColData( int iCol, const char *pszDefault )
{
if( iCol < 0 || iCol >= m_nColCount )
return pszDefault;
else if( m_papszColValues[iCol] != NULL )
return m_papszColValues[iCol];
else
return pszDefault;
}
/************************************************************************/
/* GetColData() */
/************************************************************************/
/**
* Fetch column data.
*
* Fetches the data contents of the requested column for the currently loaded
* row. The result is returned as a string regardless of the column type.
* NULL is returned if an illegal column is given, or if the actual column
* is "NULL".
*
* @param pszColName the name of the column requested.
*
* @param pszDefault the value to return if the column does not exist, or is
* NULL. Defaults to NULL.
*
* @return pointer to internal column data or NULL on failure.
*/
const char *OGRDB2Statement::GetColData( const char *pszColName,
const char *pszDefault )
{
int iCol = GetColId( pszColName );
if( iCol == -1 )
return pszDefault;
else
return GetColData( iCol, pszDefault );
}
/************************************************************************/
/* GetColDataLength() */
/************************************************************************/
int OGRDB2Statement::GetColDataLength( int iCol )
{
if( iCol < 0 || iCol >= m_nColCount )
return 0;
else if( m_papszColValues[iCol] != NULL )
return (int)m_panColValueLengths[iCol];
else
return 0;
}
/************************************************************************/
/* GetColId() */
/************************************************************************/
/**
* Fetch column index.
*
* Gets the column index corresponding with the passed name. The
* name comparisons are case insensitive.
*
* @param pszColName the name to search for.
*
* @return the column index, or -1 if not found.
*/
int OGRDB2Statement::GetColId( const char *pszColName )
{
for( SQLSMALLINT iCol = 0; iCol < m_nColCount; iCol++ )
if( EQUAL(pszColName, m_papszColNames[iCol]) )
return iCol;
return -1;
}
/************************************************************************/
/* ClearColumnData() */
/************************************************************************/
void OGRDB2Statement::ClearColumnData()
{
if( m_nColCount > 0 )
{
for( int iCol = 0; iCol < m_nColCount; iCol++ )
{
if( m_papszColValues[iCol] != NULL )
{
CPLFree( m_papszColValues[iCol] );
m_papszColValues[iCol] = NULL;
}
}
}
}
/************************************************************************/
/* Failed() */
/************************************************************************/
int OGRDB2Statement::Failed( int nResultCode )
{
if( m_poSession != NULL )
return m_poSession->Failed( nResultCode, m_hStmt );
return TRUE;
}
/************************************************************************/
/* Append(const char *) */
/************************************************************************/
/**
* Append text to internal command.
*
* The passed text is appended to the internal SQL command text.
*
* @param pszText text to append.
*/
void OGRDB2Statement::Append( const char *pszText )
{
size_t nTextLen = strlen(pszText);
if( m_nStatementMax < m_nStatementLen + nTextLen + 1 )
{
m_nStatementMax = (m_nStatementLen + nTextLen) * 2 + 100;
if( m_pszStatement == NULL )
{
m_pszStatement = (char *) VSIMalloc(m_nStatementMax);
m_pszStatement[0] = '\0';
}
else
{
m_pszStatement = (char *) VSIRealloc(m_pszStatement, m_nStatementMax);
}
}
strcpy( m_pszStatement + m_nStatementLen, pszText );
m_nStatementLen += nTextLen;
}
/************************************************************************/
/* AppendEscaped(const char *) */
/************************************************************************/
/**
* Append text to internal command.
*
* The passed text is appended to the internal SQL command text after
* escaping any special characters so it can be used as a character string
* in an SQL statement.
*
* @param pszText text to append.
*/
void OGRDB2Statement::AppendEscaped( const char *pszText )
{
size_t iIn, iOut ,nTextLen = strlen(pszText);
char *pszEscapedText = (char *) VSIMalloc(nTextLen*2 + 1);
for( iIn = 0, iOut = 0; iIn < nTextLen; iIn++ )
{
switch( pszText[iIn] )
{
case '\'':
case '\\':
pszEscapedText[iOut++] = '\\';
pszEscapedText[iOut++] = pszText[iIn];
break;
default:
pszEscapedText[iOut++] = pszText[iIn];
break;
}
}
pszEscapedText[iOut] = '\0';
Append( pszEscapedText );
CPLFree( pszEscapedText );
}
/************************************************************************/
/* Append(int) */
/************************************************************************/
/**
* Append to internal command.
*
* The passed value is formatted and appended to the internal SQL command text.
*
* @param nValue value to append to the command.
*/
void OGRDB2Statement::Append( int nValue )
{
char szFormattedValue[32];
snprintf( szFormattedValue, sizeof(szFormattedValue), "%d", nValue );
Append( szFormattedValue );
}
/************************************************************************/
/* Append(double) */
/************************************************************************/
/**
* Append to internal command.
*
* The passed value is formatted and appended to the internal SQL command text.
*
* @param dfValue value to append to the command.
*/
void OGRDB2Statement::Append( double dfValue )
{
char szFormattedValue[100];
snprintf( szFormattedValue, sizeof(szFormattedValue), "%24g", dfValue );
Append( szFormattedValue );
}
/************************************************************************/
/* Appendf() */
/************************************************************************/
/**
* Append to internal command.
*
* The passed format is used to format other arguments and the result is
* appended to the internal command text. Long results may not be formatted
* properly, and should be appended with the direct Append() methods.
*
* @param pszFormat printf() style format string.
*
* @return FALSE if formatting fails due to result being too large.
*/
int OGRDB2Statement::Appendf( const char *pszFormat, ... )
{
va_list args;
char szFormattedText[8000];
int bSuccess;
va_start( args, pszFormat );
#if defined(HAVE_VSNPRINTF)
bSuccess = vsnprintf( szFormattedText, sizeof(szFormattedText)-1,
pszFormat, args ) < (int) sizeof(szFormattedText)-1;
#else
vsprintf( szFormattedText, pszFormat, args );
bSuccess = TRUE;
#endif
va_end( args );
if( bSuccess )
Append( szFormattedText );
return bSuccess;
}
/************************************************************************/
/* Clear() */
/************************************************************************/
/**
* Clear internal command text and result set definitions.
*/
void OGRDB2Statement::Clear()
{
/* Closing the cursor if opened */
if( m_hStmt != NULL )
SQLFreeStmt( m_hStmt, SQL_CLOSE );
ClearColumnData();
if( m_pszStatement != NULL )
{
CPLFree( m_pszStatement );
m_pszStatement = NULL;
}
m_nStatementLen = 0;
m_nStatementMax = 0;
m_nColCount = 0;
if( m_papszColNames )
{
CPLFree( m_panColType );
m_panColType = NULL;
CSLDestroy( m_papszColTypeNames );
m_papszColTypeNames = NULL;
CPLFree( m_panColSize );
m_panColSize = NULL;
CPLFree( m_panColPrecision );
m_panColPrecision = NULL;
CPLFree( m_panColNullable );
m_panColNullable = NULL;
CSLDestroy( m_papszColColumnDef );
m_papszColColumnDef = NULL;
CSLDestroy( m_papszColNames );
m_papszColNames = NULL;
CPLFree( m_papszColValues );
m_papszColValues = NULL;
CPLFree( m_panColValueLengths );
m_panColValueLengths = NULL;
}
}
/************************************************************************/
/* GetColumns() */
/************************************************************************/
/**
* Fetch column definitions for a table.
*
* The SQLColumn() method is used to fetch the definitions for the columns
* of a table (or other queryable object such as a view). The column
* definitions are digested and used to populate the OGRDB2Statement
* column definitions essentially as if a "SELECT * FROM tablename" had
* been done; however, no resultset will be available.
*
* @param pszTable the name of the table to query information on. This
* should not be empty.
*
* @param pszCatalog the catalog to find the table in, use NULL (the
* default) if no catalog is available.
*
* @param pszSchema the schema to find the table in, use NULL (the
* default) if no schema is available.
*
* @return TRUE on success or FALSE on failure.
*/
int OGRDB2Statement::GetColumns( const char *pszTable,
const char *pszCatalog,
const char *pszSchema )
{
#ifdef notdef
if( pszCatalog == NULL )
pszCatalog = "";
if( pszSchema == NULL )
pszSchema = "";
#endif
#if (ODBCVER >= 0x0300)
if ( !m_poSession->IsInTransaction() )
{
/* commit pending transactions and set to autocommit mode*/
m_poSession->ClearTransaction();
}
#endif
/* -------------------------------------------------------------------- */
/* Fetch columns resultset for this table. */
/* -------------------------------------------------------------------- */
if( Failed( SQLColumns( m_hStmt,
(SQLCHAR *) pszCatalog, SQL_NTS,
(SQLCHAR *) pszSchema, SQL_NTS,
(SQLCHAR *) pszTable, SQL_NTS,
(SQLCHAR *) NULL /* "" */, SQL_NTS ) ) )
return FALSE;
/* -------------------------------------------------------------------- */
/* Allocate per column information. */
/* -------------------------------------------------------------------- */
#ifdef notdef
// SQLRowCount() is too unreliable (with unixodbc on AIX for instance)
// so we now avoid it.
SQLINTEGER nResultCount=0;
if( Failed(SQLRowCount( m_hStmt, &nResultCount ) ) )
nResultCount = 0;
if( nResultCount < 1 )
m_nColCount = 500; // Hopefully lots.
else
m_nColCount = nResultCount;
#endif
m_nColCount = 500;
m_papszColNames = (char **) CPLCalloc(sizeof(char *),(m_nColCount+1));
m_papszColValues = (char **) CPLCalloc(sizeof(char *),(m_nColCount+1));
m_panColType = (SQLSMALLINT *) CPLCalloc(sizeof(SQLSMALLINT),m_nColCount);
m_papszColTypeNames = (char **) CPLCalloc(sizeof(char *),(m_nColCount+1));
m_panColSize = (CPL_SQLULEN *) CPLCalloc(sizeof(CPL_SQLULEN),m_nColCount);
m_panColPrecision = (SQLSMALLINT *) CPLCalloc(sizeof(SQLSMALLINT),m_nColCount);
m_panColNullable = (SQLSMALLINT *) CPLCalloc(sizeof(SQLSMALLINT),m_nColCount);
m_papszColColumnDef = (char **) CPLCalloc(sizeof(char *),(m_nColCount+1));
/* -------------------------------------------------------------------- */
/* Establish columns to use for key information. */
/* -------------------------------------------------------------------- */
SQLUSMALLINT iCol;
for( iCol = 0; iCol < m_nColCount; iCol++ )
{
char szWrkData[8193];
CPL_SQLLEN cbDataLen;
if( Failed( SQLFetch( m_hStmt ) ) )
{
m_nColCount = iCol;
break;
}
szWrkData[0] = '\0';
SQLGetData( m_hStmt, SQLColumns_COLUMN_NAME, SQL_C_CHAR,
szWrkData, sizeof(szWrkData)-1, &cbDataLen );
m_papszColNames[iCol] = CPLStrdup(szWrkData);
SQLGetData( m_hStmt, SQLColumns_DATA_TYPE, SQL_C_CHAR,
szWrkData, sizeof(szWrkData)-1, &cbDataLen );
m_panColType[iCol] = (short) atoi(szWrkData);
SQLGetData( m_hStmt, SQLColumns_TYPE_NAME, SQL_C_CHAR,
szWrkData, sizeof(szWrkData)-1, &cbDataLen );
m_papszColTypeNames[iCol] = CPLStrdup(szWrkData);
SQLGetData( m_hStmt, SQLColumns_COLUMN_SIZE, SQL_C_CHAR,
szWrkData, sizeof(szWrkData)-1, &cbDataLen );
m_panColSize[iCol] = atoi(szWrkData);
SQLGetData( m_hStmt, SQLColumns_DECIMAL_DIGITS, SQL_C_CHAR,
szWrkData, sizeof(szWrkData)-1, &cbDataLen );
m_panColPrecision[iCol] = (short) atoi(szWrkData);
SQLGetData( m_hStmt, SQLColumns_NULLABLE, SQL_C_CHAR,
szWrkData, sizeof(szWrkData)-1, &cbDataLen );
m_panColNullable[iCol] = atoi(szWrkData) == SQL_NULLABLE;
#if (ODBCVER >= 0x0300)
SQLGetData( m_hStmt, SQLColumns_COLUMN_DEF, SQL_C_CHAR,
szWrkData, sizeof(szWrkData)-1, &cbDataLen );
if (cbDataLen > 0)
m_papszColColumnDef[iCol] = CPLStrdup(szWrkData);
#endif
}
return TRUE;
}
/************************************************************************/
/* GetPrimaryKeys() */
/************************************************************************/
/**
* Fetch primary keys for a table.
*
* The SQLPrimaryKeys() function is used to fetch a list of fields
* forming the primary key. The result is returned as a result set matching
* the SQLPrimaryKeys() function result set. The 4th column in the result
* set is the column name of the key, and if the result set contains only
* one record then that single field will be the complete primary key.
*
* @param pszTable the name of the table to query information on. This
* should not be empty.
*
* @param pszCatalog the catalog to find the table in, use NULL (the
* default) if no catalog is available.
*
* @param pszSchema the schema to find the table in, use NULL (the
* default) if no schema is available.
*
* @return TRUE on success or FALSE on failure.
*/
int OGRDB2Statement::GetPrimaryKeys( const char *pszTable,
const char *pszCatalog,
const char *pszSchema )
{
if( pszCatalog == NULL )
pszCatalog = "";
if( pszSchema == NULL )
pszSchema = "";
#if (ODBCVER >= 0x0300)
if ( !m_poSession->IsInTransaction() )
{
/* commit pending transactions and set to autocommit mode*/
m_poSession->ClearTransaction();
}
#endif
/* -------------------------------------------------------------------- */
/* Fetch columns resultset for this table. */
/* -------------------------------------------------------------------- */
if( Failed( SQLPrimaryKeys( m_hStmt,
(SQLCHAR *) pszCatalog, SQL_NTS,
(SQLCHAR *) pszSchema, SQL_NTS,
(SQLCHAR *) pszTable, SQL_NTS ) ) )
return FALSE;
else
return CollectResultsInfo();
}
/************************************************************************/
/* GetTables() */
/************************************************************************/
/**
* Fetch tables in database.
*
* The SQLTables() function is used to fetch a list tables in the
* database. The result is returned as a result set matching
* the SQLTables() function result set. The 3rd column in the result
* set is the table name. Only tables of type "TABLE" are returned.
*
* @param pszCatalog the catalog to find the table in, use NULL (the
* default) if no catalog is available.
*
* @param pszSchema the schema to find the table in, use NULL (the
* default) if no schema is available.
*
* @return TRUE on success or FALSE on failure.
*/
int OGRDB2Statement::GetTables( const char *pszCatalog,
const char *pszSchema )
{
CPLDebug( "ODBC", "CatalogNameL: %s\nSchema name: %s\n",
pszCatalog, pszSchema );
#if (ODBCVER >= 0x0300)
if ( !m_poSession->IsInTransaction() )
{
/* commit pending transactions and set to autocommit mode*/
m_poSession->ClearTransaction();
}
#endif
/* -------------------------------------------------------------------- */
/* Fetch columns resultset for this table. */
/* -------------------------------------------------------------------- */
if( Failed( SQLTables( m_hStmt,
(SQLCHAR *) pszCatalog, SQL_NTS,
(SQLCHAR *) pszSchema, SQL_NTS,
(SQLCHAR *) NULL, SQL_NTS,
(SQLCHAR *) "'TABLE','VIEW'", SQL_NTS ) ) )
return FALSE;
else
return CollectResultsInfo();
}
/************************************************************************/
/* DumpResult() */
/************************************************************************/
/**
* Dump resultset to file.
*
* The contents of the current resultset are dumped in a simply formatted
* form to the provided file. If requested, the schema definition will
* be written first.
*
* @param fp the file to write to. stdout or stderr are acceptable.
*
* @param bShowSchema TRUE to force writing schema information for the rowset
* before the rowset data itself. Default is FALSE.
*/
void OGRDB2Statement::DumpResult( FILE *fp, int bShowSchema )
{
int iCol;
/* -------------------------------------------------------------------- */
/* Display schema */
/* -------------------------------------------------------------------- */
if( bShowSchema )
{
fprintf( fp, "Column Definitions:\n" );
for( iCol = 0; iCol < GetColCount(); iCol++ )
{
fprintf( fp, " %2d: %-24s ", iCol, GetColName(iCol) );
if( GetColPrecision(iCol) > 0
&& GetColPrecision(iCol) != GetColSize(iCol) )
fprintf( fp, " Size:%3d.%d",
GetColSize(iCol), GetColPrecision(iCol) );
else
fprintf( fp, " Size:%5d", GetColSize(iCol) );
CPLString osType = GetTypeName( GetColType(iCol) );
fprintf( fp, " Type:%s", osType.c_str() );
if( GetColNullable(iCol) )
fprintf( fp, " NULLABLE" );
fprintf( fp, "\n" );
}
fprintf( fp, "\n" );
}
/* -------------------------------------------------------------------- */
/* Display results */
/* -------------------------------------------------------------------- */
int iRecord = 0;
while( Fetch() )
{
fprintf( fp, "Record %d\n", iRecord++ );
for( iCol = 0; iCol < GetColCount(); iCol++ )
{
fprintf( fp, " %s: %s\n", GetColName(iCol), GetColData(iCol) );
}
}
}
/************************************************************************/
/* GetTypeName() */
/************************************************************************/
/**
* Get name for SQL column type.
*
* Returns a string name for the indicated type code (as returned
* from OGRDB2Statement::GetColType()).
*
* @param nTypeCode the SQL_ code, such as SQL_CHAR.
*
* @return internal string, "UNKNOWN" if code not recognised.
*/
CPLString OGRDB2Statement::GetTypeName( int nTypeCode )
{
switch( nTypeCode )
{
case SQL_CHAR:
return "CHAR";
case SQL_NUMERIC:
return "NUMERIC";
case SQL_DECIMAL:
return "DECIMAL";
case SQL_INTEGER:
return "INTEGER";
case SQL_SMALLINT:
return "SMALLINT";
case SQL_FLOAT:
return "FLOAT";
case SQL_REAL:
return "REAL";
case SQL_DOUBLE:
return "DOUBLE";
case SQL_DATETIME:
return "DATETIME";
case SQL_VARCHAR:
return "VARCHAR";
case SQL_TYPE_DATE:
return "DATE";
case SQL_TYPE_TIME:
return "TIME";
case SQL_TYPE_TIMESTAMP:
return "TIMESTAMP";
default:
CPLString osResult;
osResult.Printf( "UNKNOWN:%d", nTypeCode );
return osResult;
}
}
/************************************************************************/
/* GetTypeMapping() */
/************************************************************************/
/**
* Get appropriate C data type for SQL column type.
*
* Returns a C data type code, corresponding to the indicated SQL data
* type code (as returned from OGRDB2Statement::GetColType()).
*
* @param nTypeCode the SQL_ code, such as SQL_CHAR.
*
* @return data type code. The valid code is always returned. If SQL
* code is not recognised, SQL_C_BINARY will be returned.
*/
SQLSMALLINT OGRDB2Statement::GetTypeMapping( SQLSMALLINT nTypeCode )
{
switch( nTypeCode )
{
case SQL_CHAR:
case SQL_VARCHAR:
case SQL_LONGVARCHAR:
return SQL_C_CHAR;
case SQL_WCHAR:
case SQL_WVARCHAR:
case SQL_WLONGVARCHAR:
return SQL_C_WCHAR;
case SQL_DECIMAL:
case SQL_NUMERIC:
return SQL_C_NUMERIC;
case SQL_SMALLINT:
return SQL_C_SSHORT;
case SQL_INTEGER:
return SQL_C_SLONG;
case SQL_REAL:
return SQL_C_FLOAT;
case SQL_FLOAT:
case SQL_DOUBLE:
return SQL_C_DOUBLE;
case SQL_BIGINT:
return SQL_C_SBIGINT;
case SQL_BIT:
case SQL_TINYINT:
/* case SQL_TYPE_UTCDATETIME:
case SQL_TYPE_UTCTIME:*/
case SQL_INTERVAL_MONTH:
case SQL_INTERVAL_YEAR:
case SQL_INTERVAL_YEAR_TO_MONTH:
case SQL_INTERVAL_DAY:
case SQL_INTERVAL_HOUR:
case SQL_INTERVAL_MINUTE:
case SQL_INTERVAL_SECOND:
case SQL_INTERVAL_DAY_TO_HOUR:
case SQL_INTERVAL_DAY_TO_MINUTE:
case SQL_INTERVAL_DAY_TO_SECOND:
case SQL_INTERVAL_HOUR_TO_MINUTE:
case SQL_INTERVAL_HOUR_TO_SECOND:
case SQL_INTERVAL_MINUTE_TO_SECOND:
case SQL_GUID:
return SQL_C_CHAR;
case SQL_DATE:
case SQL_TYPE_DATE:
return SQL_C_DATE;
case SQL_TIME:
case SQL_TYPE_TIME:
return SQL_C_TIME;
case SQL_TIMESTAMP:
case SQL_TYPE_TIMESTAMP:
return SQL_C_TIMESTAMP;
case SQL_BINARY:
case SQL_VARBINARY:
case SQL_LONGVARBINARY:
return SQL_C_BINARY;
default:
return SQL_C_CHAR;
}
}
| apache-2.0 |
raphandowski/UDIHISProject | vendor/predis/predis/examples/DispatcherLoop.php | 2125 | <?php
/*
* This file is part of the Predis package.
*
* (c) Daniele Alessandri <suppakilla@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
require 'SharedConfigurations.php';
/*
This is a basic example on how to use the Predis\DispatcherLoop class.
To see this example in action you can just use redis-cli and publish some
messages to the 'events' and 'control' channel, e.g.:
./redis-cli
PUBLISH events first
PUBLISH events second
PUBLISH events third
PUBLISH control terminate_dispatcher
*/
// Create a client and disable r/w timeout on the socket
$client = new Predis\Client($single_server + array('read_write_timeout' => 0));
// Create a Predis\DispatcherLoop instance and attach a bunch of callbacks.
$dispatcher = new Predis\PubSub\DispatcherLoop($client);
// Demonstrate how to use a callable class as a callback for Predis\DispatcherLoop.
class EventsListener implements Countable
{
private $events;
public function __construct()
{
$this->events = array();
}
public function count()
{
return count($this->events);
}
public function getEvents()
{
return $this->events;
}
public function __invoke($payload)
{
$this->events[] = $payload;
}
}
// Attach our callable class to the dispatcher.
$dispatcher->attachCallback('events', ($events = new EventsListener()));
// Attach a function to control the dispatcher loop termination with a message.
$dispatcher->attachCallback('control', function ($payload) use ($dispatcher) {
if ($payload === 'terminate_dispatcher') {
$dispatcher->stop();
}
});
// Run the dispatcher loop until the callback attached to the 'control' channel
// receives 'terminate_dispatcher' as a message.
$dispatcher->run();
// Display our achievements!
echo "We received {$events->count()} messages!\n";
// Say goodbye :-)
$info = $client->info();
print_r("Goodbye from Redis v{$info['redis_version']}!\n");
| apache-2.0 |