answer stringlengths 15 1.25M |
|---|
package org.eclipse.che.selenium.dashboard.organization;
import static org.eclipse.che.commons.lang.NameGenerator.generate;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import java.util.Arrays;
import java.util.List;
import org.eclipse.che.multiuser.organization.shared.dto.OrganizationDto;
import org.eclipse.che.selenium.core.client.<API key>;
import org.eclipse.che.selenium.core.user.AdminTestUser;
import org.eclipse.che.selenium.core.user.TestUser;
import org.eclipse.che.selenium.pageobject.Loader;
import org.eclipse.che.selenium.pageobject.dashboard.Dashboard;
import org.eclipse.che.selenium.pageobject.dashboard.NavigationBar;
import org.eclipse.che.selenium.pageobject.dashboard.organization.AddMember;
import org.eclipse.che.selenium.pageobject.dashboard.organization.AddOrganization;
import org.eclipse.che.selenium.pageobject.dashboard.organization.<API key>;
import org.eclipse.che.selenium.pageobject.dashboard.organization.OrganizationPage;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/** @author Sergey Skorik */
public class OrganizationTest {
private String orgName;
private List<String> emailsList;
private OrganizationDto organization;
@Inject private <API key> <API key>;
@Inject private OrganizationPage organizationPage;
@Inject private NavigationBar navigationBar;
@Inject private AddOrganization addOrganization;
@Inject private AddMember addMember;
@Inject private Loader loader;
@Inject
@Named("admin")
private <API key> <API key>;
@Inject private Dashboard dashboard;
@Inject private TestUser testUser1;
@Inject private TestUser memberUser;
@Inject private AdminTestUser adminTestUser;
@BeforeClass
public void setUp() throws Exception {
emailsList = Arrays.asList(testUser1.getEmail());
String firstName = generate("F", 7);
String lastName = generate("L", 7);
dashboard.open();
orgName = generate("orgX", 6);
organization = <API key>.create(orgName);
}
@AfterClass
public void tearDown() throws Exception {
<API key>.deleteById(organization.getId());
}
@Test
public void <API key>() {
navigationBar.waitNavigationBar();
navigationBar.clickOnMenu(NavigationBar.MenuItem.ORGANIZATIONS);
<API key>.<API key>();
<API key>.<API key>();
<API key>.clickOnOrganization(organization.getQualifiedName());
organizationPage.<API key>(orgName);
//Add members to a members list ad 'Admin'
loader.waitOnClosed();
organizationPage.clickMembersTab();
for (String email : emailsList) {
organizationPage.<API key>();
addMember.waitAddMemberWidget();
addMember.setMembersEmail(email);
addMember.clickAdminButton();
addMember.clickAddButton();
organizationPage.<API key>(email);
}
//Search members from the members list
for (String email : emailsList) {
organizationPage.clearSearchField();
String memberName = organizationPage.<API key>(email);
organizationPage.searchMembers(memberName.substring(0, (memberName.length() / 2)));
organizationPage.<API key>(email);
}
organizationPage.clearSearchField();
//Change the members role to 'Members'
for (String email : emailsList) {
loader.waitOnClosed();
addMember.<API key>(email);
addMember.clickMemberButton();
addMember.clickSaveButton();
}
//Delete the members from the members list
for (String email : emailsList) {
organizationPage.deleteMember(email);
}
}
//@Test(priority = 1)
public void <API key>() {
String name = generate("orgY", 4);
navigationBar.waitNavigationBar();
navigationBar.clickOnMenu(NavigationBar.MenuItem.ORGANIZATIONS);
<API key>.<API key>();
<API key>.<API key>();
<API key>.<API key>();
addOrganization.waitAddOrganization();
addOrganization.setOrganizationName(name);
addOrganization.<API key>();
addMember.waitAddMemberWidget();
addMember.setMembersEmail(memberUser.getEmail());
addMember.clickAddButton();
addOrganization.<API key>();
addMember.waitAddMemberWidget();
addMember.clickCancelButton();
addOrganization.waitAddOrganization();
loader.waitOnClosed();
addOrganization.<API key>();
organizationPage.<API key>(name);
organizationPage.clickMembersTab();
organizationPage.clickSettingsTab();
}
} |
<!DOCTYPE HTML PUBLIC "-
<!--NewPage
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_37) on Wed Jan 23 11:08:00 EST 2013 -->
<TITLE>
Uses of Interface cc.mallet.cluster.neighbor_evaluator.NeighborEvaluator (Mallet 2 API)
</TITLE>
<META NAME="date" CONTENT="2013-01-23">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Interface cc.mallet.cluster.neighbor_evaluator.NeighborEvaluator (Mallet 2 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<A NAME="navbar_top"></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?cc/mallet/cluster/neighbor_evaluator//<API key>.html" target="_top"><B>FRAMES</B></A>
<A HREF="NeighborEvaluator.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<HR>
<CENTER>
<H2>
<B>Uses of Interface<br>cc.mallet.cluster.neighbor_evaluator.NeighborEvaluator</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#cc.mallet.cluster"><B>cc.mallet.cluster</B></A></TD>
<TD>Unsupervised clustering of <CODE>Instance</CODE> objects within an
<CODE>InstanceList</CODE>. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#cc.mallet.cluster.clustering_scorer"><B>cc.mallet.cluster.clustering_scorer</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#cc.mallet.cluster.neighbor_evaluator"><B>cc.mallet.cluster.neighbor_evaluator</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="cc.mallet.cluster"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A> in <A HREF="../../../../../cc/mallet/cluster/package-summary.html">cc.mallet.cluster</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="<API key>">
<TH ALIGN="left" COLSPAN="2">Fields in <A HREF="../../../../../cc/mallet/cluster/package-summary.html">cc.mallet.cluster</A> declared as <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>protected <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A></CODE></FONT></TD>
<TD><CODE><B><API key>.</B><B><A HREF="../../../../../cc/mallet/cluster/<API key>.html#evaluator">evaluator</A></B></CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="<API key>">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../cc/mallet/cluster/package-summary.html">cc.mallet.cluster</A> that return <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A></CODE></FONT></TD>
<TD><CODE><B><API key>.</B><B><A HREF="../../../../../cc/mallet/cluster/<API key>.html#getEvaluator()">getEvaluator</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="<API key>">
<TH ALIGN="left" COLSPAN="2">Constructors in <A HREF="../../../../../cc/mallet/cluster/package-summary.html">cc.mallet.cluster</A> with parameters of type <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/GreedyAgglomerative.html#GreedyAgglomerative(cc.mallet.pipe.Pipe, cc.mallet.cluster.neighbor_evaluator.NeighborEvaluator, double)">GreedyAgglomerative</A></B>(<A HREF="../../../../../cc/mallet/pipe/Pipe.html" title="class in cc.mallet.pipe">Pipe</A> instancePipe,
<A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A> evaluator,
double stoppingThreshold)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/<API key>.html#<API key>(cc.mallet.pipe.Pipe, cc.mallet.cluster.neighbor_evaluator.NeighborEvaluator, double, boolean, java.util.Random)"><API key></A></B>(<A HREF="../../../../../cc/mallet/pipe/Pipe.html" title="class in cc.mallet.pipe">Pipe</A> instancePipe,
<A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A> evaluator,
double stoppingThreshold,
boolean <API key>,
java.util.Random random)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/<API key>.html#<API key>(cc.mallet.pipe.Pipe, cc.mallet.cluster.neighbor_evaluator.NeighborEvaluator)"><API key></A></B>(<A HREF="../../../../../cc/mallet/pipe/Pipe.html" title="class in cc.mallet.pipe">Pipe</A> instancePipe,
<A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A> evaluator)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<A NAME="cc.mallet.cluster.clustering_scorer"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A> in <A HREF="../../../../../cc/mallet/cluster/clustering_scorer/package-summary.html">cc.mallet.cluster.clustering_scorer</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="<API key>">
<TH ALIGN="left" COLSPAN="2">Constructors in <A HREF="../../../../../cc/mallet/cluster/clustering_scorer/package-summary.html">cc.mallet.cluster.clustering_scorer</A> with parameters of type <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/clustering_scorer/PairwiseScorer.html#PairwiseScorer(cc.mallet.cluster.neighbor_evaluator.NeighborEvaluator)">PairwiseScorer</A></B>(<A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A> evaluator)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<A NAME="cc.mallet.cluster.neighbor_evaluator"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A> in <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/package-summary.html">cc.mallet.cluster.neighbor_evaluator</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="<API key>">
<TH ALIGN="left" COLSPAN="2">Classes in <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/package-summary.html">cc.mallet.cluster.neighbor_evaluator</A> that implement <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator">NeighborEvaluator</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/<API key>.html" title="class in cc.mallet.cluster.neighbor_evaluator"><API key></A></B></CODE>
<BR>
A <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator"><CODE>NeighborEvaluator</CODE></A> that is backed by a <A HREF="../../../../../cc/mallet/classify/Classifier.html" title="class in cc.mallet.classify"><CODE>Classifier</CODE></A>.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/MedoidEvaluator.html" title="class in cc.mallet.cluster.neighbor_evaluator">MedoidEvaluator</A></B></CODE>
<BR>
Uses a <A HREF="../../../../../cc/mallet/classify/Classifier.html" title="class in cc.mallet.classify"><CODE>Classifier</CODE></A> over pairs of <CODE>Instances</CODE> to score
<A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/Neighbor.html" title="class in cc.mallet.cluster.neighbor_evaluator"><CODE>Neighbor</CODE></A>.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/PairwiseEvaluator.html" title="class in cc.mallet.cluster.neighbor_evaluator">PairwiseEvaluator</A></B></CODE>
<BR>
Uses a <A HREF="../../../../../cc/mallet/classify/Classifier.html" title="class in cc.mallet.classify"><CODE>Classifier</CODE></A> over pairs of <CODE>Instances</CODE> to score
<A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/Neighbor.html" title="class in cc.mallet.cluster.neighbor_evaluator"><CODE>Neighbor</CODE></A>.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/RandomEvaluator.html" title="class in cc.mallet.cluster.neighbor_evaluator">RandomEvaluator</A></B></CODE>
<BR>
Randomly scores <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/Neighbor.html" title="class in cc.mallet.cluster.neighbor_evaluator"><CODE>Neighbor</CODE></A>s.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/<API key>.html" title="class in cc.mallet.cluster.neighbor_evaluator"><API key></A></B></CODE>
<BR>
Uses a <A HREF="../../../../../cc/mallet/classify/Classifier.html" title="class in cc.mallet.classify"><CODE>Classifier</CODE></A> that scores an array of <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/Neighbor.html" title="class in cc.mallet.cluster.neighbor_evaluator"><CODE>Neighbor</CODE></A>s.</TD>
</TR>
</TABLE>
<P>
<HR>
<A NAME="navbar_bottom"></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="<API key>"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../cc/mallet/cluster/neighbor_evaluator/NeighborEvaluator.html" title="interface in cc.mallet.cluster.neighbor_evaluator"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?cc/mallet/cluster/neighbor_evaluator//<API key>.html" target="_top"><B>FRAMES</B></A>
<A HREF="NeighborEvaluator.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<HR>
</BODY>
</HTML> |
<!DOCTYPE HTML PUBLIC "-
<!--NewPage
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_18) on Mon Feb 08 23:29:08 EST 2010 -->
<TITLE>
Uses of Package splar.core.util
</TITLE>
<META NAME="date" CONTENT="2010-02-08">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package splar.core.util";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<A NAME="navbar_top"></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../index.html?splar/core/util/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<HR>
<CENTER>
<H2>
<B>Uses of Package<br>splar.core.util</B></H2>
</CENTER>
No usage of splar.core.util
<P>
<HR>
<A NAME="navbar_bottom"></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="<API key>"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../index.html?splar/core/util/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<HR>
</BODY>
</HTML> |
package zzb.com.base;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.persistence.Entity;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public abstract class DAOSupport implements DAO {
@PersistenceContext
protected EntityManager em;
@Override
public void save(Object object) {
em.persist(object);
}
@Override
public void update(Object object) {
em.merge(object);
}
@Override
public <T> void delete(Class<T> clazz, Object id) {
em.remove(em.getReference(clazz, id));
}
@Override
public <T> void delete(Class<T> clazz, Object[] objects) {
for (Object id : objects) {
delete(clazz, id);
}
}
@Override
@Transactional(readOnly = true, propagation = Propagation.NOT_SUPPORTED)
public <T> T findById(Class<T> clazz, Object object) {
return em.find(clazz, object);
}
@Override
public <T> QueryResult<T> getScrollData(Class<T> clazz) {
return getScrollData(clazz,-1,-1,null,null,null);
}
@Override
public <T> QueryResult<T> getScrollData(Class<T> clazz, int firstIndex,
int maxResult) {
return getScrollData(clazz,firstIndex,maxResult,null,null,null);
}
@Override
public <T> QueryResult<T> getScrollData(Class<T> clazz, int firstIndex,
int maxResult, LinkedHashMap<String, String> orderBy) {
return getScrollData(clazz,firstIndex,maxResult,null,null,orderBy);
}
@Override
public <T> QueryResult<T> getScrollData(Class<T> clazz, int firstIndex,
int maxResult, String whereSql, Object[] params) {
return getScrollData(clazz,firstIndex,maxResult,whereSql,params,null);
}
@SuppressWarnings("unchecked")
@Override
@Transactional(readOnly = true, propagation = Propagation.NOT_SUPPORTED)
public <T> QueryResult<T> getScrollData(Class<T> clazz, int firstIndex,
int maxResult, String whereSql, Object[] params, LinkedHashMap<String, String> orderBy) {
QueryResult<T> qr = new QueryResult<T>();
String entityName = getEntityName(clazz);
Query query = em.createQuery("Select o from " + entityName + " o " + ((whereSql == null) ? "": whereSql) + buildOrderBy(orderBy, "o"));
buildWhereCondition(query, params);
if(firstIndex != -1){
query.setFirstResult(firstIndex);
}
if(maxResult != -1){
query.setMaxResults(maxResult);
}
qr.setResultList(query.getResultList());
query = em.createQuery("Select count(o) from " + entityName + " o " + ((whereSql == null) ? "": whereSql));
buildWhereCondition(query, params);
qr.setCount((Long) query.getSingleResult());
return qr;
}
protected void buildWhereCondition(Query query,Object[] params) {
if(params != null && params.length != 0){
for(int i=0, len=params.length;i<len;i++){
query.setParameter((i+1), params[i]);
}
}
}
protected String buildOrderBy(LinkedHashMap<String, String> orderBy,
String prefix) {
StringBuffer sb = new StringBuffer("");
if (orderBy != null) {
sb.append(" order by ");
for (Map.Entry<String, String> entry : orderBy.entrySet()) {
if (prefix != null && !"".equals(prefix)) {
sb.append(prefix + ".").append(entry.getKey()).append(" ")
.append(entry.getValue()).append(",");
} else {
sb.append(entry.getKey()).append(" ")
.append(entry.getValue()).append(",");
}
}
sb.deleteCharAt(sb.length() - 1);
}
return sb.toString();
}
protected <T> String getEntityName(Class<T> clazz) {
String entityName = clazz.getSimpleName();
Entity entity = clazz.getAnnotation(Entity.class);
if (entity.name() != null && !"".equals(entity.name())) {
entityName = entity.name();
}
return entityName;
}
} |
<?php
namespace Topxia\Service\File\Impl;
use Symfony\Component\HttpFoundation\File\UploadedFile;
use Topxia\Common\FileToolkit;
use Topxia\Common\ArrayToolkit;
use Topxia\Service\Common\BaseService;
use Topxia\Service\File\FileImplementor;
use Topxia\Service\Util\CloudClientFactory;
class <API key> extends BaseService implements FileImplementor
{
private $cloudClient;
public function getFile($file)
{
$file['metas'] = $this->decodeMetas($file['metas']);
$file['metas2'] = $this->decodeMetas($file['metas2']);
// $file['path'] = $this->getCloudClient()->getFileUrl($file['hashId'],$file['targetId'],$file['targetType']);
return $file;
}
public function addFile($targetType, $targetId, array $fileInfo=array(), UploadedFile $originalFile=null)
{
if (!ArrayToolkit::requireds($fileInfo, array('filename','key', 'size'))) {
throw $this-><API key>('!');
}
$uploadFile = array();
$uploadFile['targetId'] = $targetId;
$uploadFile['targetType'] = $targetType;
$uploadFile['hashId'] = $fileInfo['key'];
$uploadFile['filename'] = $fileInfo['filename'];
$uploadFile['ext'] = pathinfo($uploadFile['filename'], PATHINFO_EXTENSION);
$uploadFile['size'] = (int) $fileInfo['size'];
$uploadFile['etag'] = empty($fileInfo['etag']) ? '' : $fileInfo['etag'];
$uploadFile['metas'] = $this->encodeMetas(empty($fileInfo['metas']) ? array() : $fileInfo['metas']);
$uploadFile['metas2'] = $this->encodeMetas(empty($fileInfo['metas2']) ? array() : $fileInfo['metas2']);
if (empty($fileInfo['convertId']) or empty($fileInfo['convertKey'])) {
$uploadFile['convertHash'] = "ch-{$uploadFile['hashId']}";
$uploadFile['convertStatus'] = 'none';
} else {
$uploadFile['convertHash'] = "{$fileInfo['convertId']}:{$fileInfo['convertKey']}";
$uploadFile['convertStatus'] = 'waiting';
}
$uploadFile['type'] = FileToolkit::<API key>($fileInfo['mimeType']);
$uploadFile['canDownload'] = empty($uploadFile['canDownload']) ? 0 : 1;
$uploadFile['storage'] = 'cloud';
$uploadFile['createdUserId'] = $this->getCurrentUser()->id;
$uploadFile['updatedUserId'] = $uploadFile['createdUserId'];
$uploadFile['updatedTime'] = $uploadFile['createdTime'] = time();
return $uploadFile;
}
public function convertFile($file, $status, $result=null, $callback = null)
{
if ($status != 'success') {
$file['convertStatus'] = $status;
} else {
if ($file['type'] == 'video') {
$cmds = $this->getCloudClient()-><API key>();
} elseif ($file['type'] == 'audio') {
$cmds = $this->getCloudClient()-><API key>();
}
$file['metas2'] = array();
foreach ($result as $item) {
$type = empty($cmds[$item['cmd']]) ? null : $cmds[$item['cmd']];
if (empty($type)) {
continue;
}
if ($item['code'] != 0) {
continue;
}
if (empty($item['key'])) {
continue;
}
$file['metas2'][$type] = array('type' => $type, 'cmd' => $item['cmd'], 'key' => $item['key']);
}
if (empty($file['metas2'])) {
$file['convertStatus'] = 'error';
} else {
$file['convertStatus'] = 'success';
}
}
$file['metas2'] = $this->encodeMetas(empty($file['metas2']) ? array() : $file['metas2']);
return $file;
}
public function deleteFile($file, $deleteSubFile = true)
{
$keys = array($file['hashId']);
$keyPrefixs = array();
if ($deleteSubFile) {
foreach (array('sd', 'hd', 'shd') as $key) {
if (empty($file['metas2'][$key]) or empty($file['metas2'][$key]['key'])) {
continue ;
}
$keyPrefixs[] = $file['metas2'][$key]['key'];
}
}
$this->getCloudClient()->deleteFiles($keys, $keyPrefixs);
}
private function getFileFullName($file)
{
$diskDirectory= $this->getFilePath($file['targetType'],$file['targetId']);
$filename .= "{$file['hashId']}.{$file['ext']}";
return $diskDirectory.$filename;
}
private function getFilePath($targetType,$targetId)
{
$diskDirectory = $this->getKernel()->getParameter('topxia.disk.local_directory');
$subDir = DIRECTORY_SEPARATOR.$file['targetType'].DIRECTORY_SEPARATOR;
$subDir .= "{$file['targetType']}-{$file['targetId']}".DIRECTORY_SEPARATOR;
return $diskDirectory.$subDir;
}
private function encodeMetas($metas)
{
if(empty($metas) or !is_array($metas)) {
$metas = array();
}
return json_encode($metas);
}
private function decodeMetas($metas)
{
if (empty($metas)) {
return array();
}
return json_decode($metas, true);
}
private function getCloudClient()
{
if(empty($this->cloudClient)) {
$factory = new CloudClientFactory();
$this->cloudClient = $factory->createClient();
}
return $this->cloudClient;
}
} |
/**
* generated by Xtext
*/
package org.eclipse.xtext.linking.lazy.lazyLinking.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectEList;
import org.eclipse.xtext.linking.lazy.lazyLinking.LazyLinkingPackage;
import org.eclipse.xtext.linking.lazy.lazyLinking.Type;
import org.eclipse.xtext.linking.lazy.lazyLinking.<API key>;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Unresolved Proxy Property</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link org.eclipse.xtext.linking.lazy.lazyLinking.impl.<API key>#getType <em>Type</em>}</li>
* <li>{@link org.eclipse.xtext.linking.lazy.lazyLinking.impl.<API key>#getName <em>Name</em>}</li>
* </ul>
*
* @generated
*/
public class <API key> extends MinimalEObjectImpl.Container implements <API key> {
/**
* The cached value of the '{@link #getType() <em>Type</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected EList<Type> type;
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected <API key>() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return LazyLinkingPackage.Literals.<API key>;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Type> getType() {
if (type == null) {
type = new EObjectEList<Type>(Type.class, this, LazyLinkingPackage.<API key>);
}
return type;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName) {
String oldName = name;
name = newName;
if (<API key>())
eNotify(new ENotificationImpl(this, Notification.SET, LazyLinkingPackage.<API key>, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case LazyLinkingPackage.<API key>:
return getType();
case LazyLinkingPackage.<API key>:
return getName();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case LazyLinkingPackage.<API key>:
getType().clear();
getType().addAll((Collection<? extends Type>)newValue);
return;
case LazyLinkingPackage.<API key>:
setName((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case LazyLinkingPackage.<API key>:
getType().clear();
return;
case LazyLinkingPackage.<API key>:
setName(NAME_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case LazyLinkingPackage.<API key>:
return type != null && !type.isEmpty();
case LazyLinkingPackage.<API key>:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (name: ");
result.append(name);
result.append(')');
return result.toString();
}
} //<API key> |
package wsg.action;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.InterceptorRef;
import org.apache.struts2.convention.annotation.InterceptorRefs;
import org.apache.struts2.convention.annotation.Namespace;
import org.apache.struts2.convention.annotation.Result;
import org.apache.struts2.convention.annotation.Results;
import org.springframework.beans.factory.annotation.Autowired;
import com.sun.jdi.Value;
import wsg.model.Blog;
import wsg.model.Home;
import wsg.model.easyui.Json;
import wsg.service.BlogServiceI;
import wsg.service.HomeServiceI;
@Namespace("/")
@Action("HomeAction")
@InterceptorRefs({
@InterceptorRef(value = "ATIStack")
})
@Results( {
// @Result(name="admin",location="/admin/UserAuthAction!findUncheckedList.action",type="redirectAction"),
// @Result(name="Login",location="/teacher_jsp/teacher_index.jsp",type="redirect"),
@Result(name="home",location="/WEB-INF/home.jsp"),
@Result(name="findAll",location="/WEB-INF/homeList.jsp"),
@Result(name="blogContent",location="/WEB-INF/blogContent.jsp"),
@Result(name="addHome",location="/WEB-INF/addHome.jsp")
})
public class HomeAction extends BaseAction<Home> {
private HomeServiceI homeServiceI;
private BlogServiceI blogServiceI;
private List<Blog> blogList;
private List<Home> homeList;
private Home home;
public String findAll() {
homeList = homeServiceI.findAllHome();
return "findAll";
}
public String findBlogContent() {
home = homeServiceI.getById(home.getId());
return "blogContent";
}
public void addHome() {
Json json = new Json();
try {
if (!homeServiceI.existByTitle(home.getTitle())) {
home.setCreateTime(new Timestamp(new Date().getTime()));
homeServiceI.addHome(home);
json.setSuccess(true);
json.setMsg("");
} else {
json.setMsg("");
}
} catch (Exception e) {
e.printStackTrace();
json.setMsg("");
} finally {
super.writJson(json);
}
}
public String getHomeView() {
try {
home = homeServiceI.getHomeById(home.getId());
blogList = blogServiceI.findAllIdName();
} catch (Exception e) {
e.printStackTrace();
}
return "home";
}
public void updateHome() {
Json json = new Json();
try {
home.setUpdateTime(new Timestamp(new Date().getTime()));
homeServiceI.updateHome(home);
json.setSuccess(true);
json.setMsg("");
} catch (Exception e) {
e.printStackTrace();
json.setMsg("");
} finally {
super.writJson(json);
}
}
public HomeServiceI getHomeServiceI() {
return homeServiceI;
}
@Autowired
public void setHomeServiceI(HomeServiceI homeServiceI) {
this.homeServiceI = homeServiceI;
}
public BlogServiceI getBlogServiceI() {
return blogServiceI;
}
@Autowired
public void setBlogServiceI(BlogServiceI blogServiceI) {
this.blogServiceI = blogServiceI;
}
public List<Blog> getBlogList() {
return blogList;
}
public void setBlogList(List<Blog> blogList) {
this.blogList = blogList;
}
public Home getHome() {
return home;
}
public void setHome(Home home) {
this.home = home;
}
public List<Home> getHomeList() {
return homeList;
}
public void setHomeList(List<Home> homeList) {
this.homeList = homeList;
}
} |
package es.uah.aut.srg.micobs.mclev.lang.parser.antlr;
import java.io.InputStream;
import org.eclipse.xtext.parser.antlr.<API key>;
public class <API key> implements <API key> {
@Override
public InputStream getAntlrTokenFile() {
ClassLoader classLoader = getClass().getClassLoader();
return classLoader.getResourceAsStream("es/uah/aut/srg/micobs/mclev/lang/parser/antlr/internal/InternalSAI.tokens");
}
} |
package nexcore.tool.uml.manager;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import nexcore.tool.uml.model.umldiagram.NodeType;
import nexcore.tool.uml.model.umldiagram.RelationType;
/**
* <ul>
* <li> : nexcore.tool.uml.manager</li>
* <li> : nexcore.tool.uml.manager</li>
* <li> : UMLPolicyManager</li>
* <li> : 2009. 12. 3.</li>
* <li> : </li>
* </ul>
*/
public class UMLPolicyManager {
/**
* UMLPolicyManager
*/
private UMLPolicyManager() {
this.init();
}
/**
* .
*
* @param relationType
* @param notationType
* @return boolean
*/
public static boolean <API key>(RelationType relationType, NodeType sourceType) {
if (RelationType.ATTACHEMENT.equals(relationType)) {
if (NodeType.TEXT.equals(sourceType)) {
return false;
} else {
return true;
}
}
if (RelationType.EXTEND.equals(relationType) || RelationType.INCLUDE.equals(relationType)) {
if (NodeType.PACKAGE.equals(sourceType)) {
return false;
} else {
return true;
}
}
if (!eInstance.ruleDictionary.containsKey(relationType)) {
return false;
}
HashMap<NodeType, List<NodeType>> relationRule = eInstance.ruleDictionary.get(relationType);
if (null == relationRule) {
return false;
}
List<NodeType> sourceTypes = relationRule.get(sourceType);
if (null == sourceTypes) {
return false;
} else {
return true;
}
}
/**
*
*
* @param relationType
* @param sourceType
* @param targetType
* @return boolean
*/
public static boolean <API key>(RelationType relationType, NodeType sourceType, NodeType targetType) {
if (RelationType.ATTACHEMENT.equals(relationType)) {
if (NodeType.NOTE.equals(sourceType) || NodeType.NOTE.equals(targetType)) {
return true;
}
return false;
}
if (!eInstance.ruleDictionary.containsKey(relationType)) {
return false;
}
HashMap<NodeType, List<NodeType>> relationRule = eInstance.ruleDictionary.get(relationType);
if (null == relationRule) {
return false;
}
List<NodeType> sourceTypes = relationRule.get(sourceType);
if (null == sourceTypes) {
return false;
}
for (NodeType type : sourceTypes) {
if (type.equals(targetType)) {
return true;
}
}
return false;
}
/** eInstance */
private static UMLPolicyManager eInstance = new UMLPolicyManager();
/** ruleDictionary */
private HashMap<RelationType, HashMap<NodeType, List<NodeType>>> ruleDictionary = new HashMap<RelationType, HashMap<NodeType, List<NodeType>>>();
/** Relation:Dependency */
private HashMap<NodeType, List<NodeType>> ruleDependency = new HashMap<NodeType, List<NodeType>>();
/** Relation:Include */
private HashMap<NodeType, List<NodeType>> ruleInclude = new HashMap<NodeType, List<NodeType>>();
/** Relation:Extend */
private HashMap<NodeType, List<NodeType>> ruleExtend = new HashMap<NodeType, List<NodeType>>();
/** Relation:Association */
private HashMap<NodeType, List<NodeType>> ruleAssociation = new HashMap<NodeType, List<NodeType>>();
/** Relation:ControlFlow */
private HashMap<NodeType, List<NodeType>> ruleControlFlow = new HashMap<NodeType, List<NodeType>>();
/** Relation:Generalization */
private HashMap<NodeType, List<NodeType>> ruleGeneralization = new HashMap<NodeType, List<NodeType>>();
/** Relation:Message */
private HashMap<NodeType, List<NodeType>> ruleMessage = new HashMap<NodeType, List<NodeType>>();
/** Relation:ReplyMessage */
private HashMap<NodeType, List<NodeType>> ruleReplyMessage = new HashMap<NodeType, List<NodeType>>();
/** Relation:ObjectFlow */
private HashMap<NodeType, List<NodeType>> ruleObjectFlow = new HashMap<NodeType, List<NodeType>>();
/** Relation:Realization */
private HashMap<NodeType, List<NodeType>> ruleRealization = new HashMap<NodeType, List<NodeType>>();
/**
*
* void
*/
protected void init() {
initAssociation();
initControlFlow();
initGeneralization();
initIncludeExclude();
initMessage();
initReplyMessage();
initObjectFlow();
initRealization();
<API key>();
}
/**
* Relation:Dependency void
*/
private void <API key>() {
List<NodeType> artifact = new ArrayList<NodeType>();
List<NodeType> classes = new ArrayList<NodeType>();
List<NodeType> datatype = new ArrayList<NodeType>();
List<NodeType> interfaces = new ArrayList<NodeType>();
List<NodeType> packages = new ArrayList<NodeType>();
List<NodeType> usecase = new ArrayList<NodeType>();
List<NodeType> component = new ArrayList<NodeType>();
List<NodeType> actor = new ArrayList<NodeType>();
actor.add(NodeType.CLASS);
actor.add(NodeType.INTERFACE);
actor.add(NodeType.COMPONENT);
usecase.add(NodeType.USE_CASE);
classes.add(NodeType.CLASS);
classes.add(NodeType.ARTIFACT);
classes.add(NodeType.DATA_TYPE);
classes.add(NodeType.ENUMERATION);
classes.add(NodeType.INTERFACE);
component.add(NodeType.INTERFACE);
component.add(NodeType.COMPONENT);
artifact.add(NodeType.CLASS);
artifact.add(NodeType.ARTIFACT);
artifact.add(NodeType.DATA_TYPE);
artifact.add(NodeType.ENUMERATION);
artifact.add(NodeType.INTERFACE);
artifact.add(NodeType.COMPONENT);
datatype.add(NodeType.CLASS);
datatype.add(NodeType.ARTIFACT);
datatype.add(NodeType.DATA_TYPE);
datatype.add(NodeType.ENUMERATION);
datatype.add(NodeType.INTERFACE);
interfaces.add(NodeType.CLASS);
interfaces.add(NodeType.ARTIFACT);
interfaces.add(NodeType.DATA_TYPE);
interfaces.add(NodeType.ENUMERATION);
interfaces.add(NodeType.INTERFACE);
packages.add(NodeType.PACKAGE);
this.ruleDependency.put(NodeType.ARTIFACT, artifact);
this.ruleDependency.put(NodeType.CLASS, classes);
this.ruleDependency.put(NodeType.DATA_TYPE, datatype);
this.ruleDependency.put(NodeType.INTERFACE, interfaces);
this.ruleDependency.put(NodeType.PACKAGE, packages);
this.ruleDependency.put(NodeType.USE_CASE, usecase);
this.ruleDependency.put(NodeType.COMPONENT, component);
this.ruleDependency.put(NodeType.ACTOR, actor);
this.ruleDictionary.put(RelationType.DEPENDENCY, this.ruleDependency);
this.ruleDictionary.put(RelationType.ABSTRACTION, this.ruleDependency);
this.ruleDictionary.put(RelationType.USAGE, this.ruleDependency);
// this.ruleDictionary.put(RelationType.EXTEND, this.ruleDependency);
// this.ruleDictionary.put(RelationType.INCLUDE, this.ruleDependency);
}
/**
* Relation:Include void
*/
private void initIncludeExclude() {
List<NodeType> usecase = new ArrayList<NodeType>();
usecase.add(NodeType.USE_CASE);
this.ruleInclude.put(NodeType.USE_CASE, usecase);
this.ruleExtend.put(NodeType.USE_CASE, usecase);
this.ruleDictionary.put(RelationType.INCLUDE, this.ruleInclude);
this.ruleDictionary.put(RelationType.EXTEND, this.ruleExtend);
}
/**
* Relation:Association void
*/
private void initAssociation() {
List<NodeType> actor = new ArrayList<NodeType>();
List<NodeType> artifact = new ArrayList<NodeType>();
List<NodeType> classes = new ArrayList<NodeType>();
List<NodeType> datatype = new ArrayList<NodeType>();
List<NodeType> interfaces = new ArrayList<NodeType>();
List<NodeType> usecase = new ArrayList<NodeType>();
List<NodeType> component = new ArrayList<NodeType>();
actor.add(NodeType.USE_CASE);
actor.add(NodeType.CLASS);
actor.add(NodeType.INTERFACE);
actor.add(NodeType.COMPONENT);
usecase.add(NodeType.ACTOR);
classes.add(NodeType.CLASS);
classes.add(NodeType.ARTIFACT);
classes.add(NodeType.DATA_TYPE);
classes.add(NodeType.ENUMERATION);
classes.add(NodeType.INTERFACE);
classes.add(NodeType.ACTOR);
classes.add(NodeType.COMPONENT);
artifact.add(NodeType.CLASS);
artifact.add(NodeType.ARTIFACT);
artifact.add(NodeType.DATA_TYPE);
artifact.add(NodeType.ENUMERATION);
artifact.add(NodeType.INTERFACE);
datatype.add(NodeType.CLASS);
datatype.add(NodeType.ARTIFACT);
datatype.add(NodeType.DATA_TYPE);
datatype.add(NodeType.ENUMERATION);
datatype.add(NodeType.INTERFACE);
interfaces.add(NodeType.CLASS);
interfaces.add(NodeType.ARTIFACT);
interfaces.add(NodeType.DATA_TYPE);
interfaces.add(NodeType.ENUMERATION);
interfaces.add(NodeType.INTERFACE);
interfaces.add(NodeType.ACTOR);
interfaces.add(NodeType.COMPONENT);
component.add(NodeType.CLASS);
component.add(NodeType.ARTIFACT);
component.add(NodeType.DATA_TYPE);
component.add(NodeType.ENUMERATION);
component.add(NodeType.INTERFACE);
component.add(NodeType.ACTOR);
component.add(NodeType.COMPONENT);
this.ruleAssociation.put(NodeType.ACTOR, actor);
this.ruleAssociation.put(NodeType.ARTIFACT, artifact);
this.ruleAssociation.put(NodeType.CLASS, classes);
this.ruleAssociation.put(NodeType.DATA_TYPE, datatype);
this.ruleAssociation.put(NodeType.INTERFACE, interfaces);
this.ruleAssociation.put(NodeType.USE_CASE, usecase);
this.ruleAssociation.put(NodeType.COMPONENT, component);
this.ruleDictionary.put(RelationType.ASSOCIATION, this.ruleAssociation);
this.ruleDictionary.put(RelationType.AGGREGATION, this.ruleAssociation);
this.ruleDictionary.put(RelationType.COMPOSITION, this.ruleAssociation);
this.ruleDictionary.put(RelationType.<API key>, this.ruleAssociation);
this.ruleDictionary.put(RelationType.<API key>, this.ruleAssociation);
this.ruleDictionary.put(RelationType.<API key>, this.ruleAssociation);
}
/**
* Relation:Generalization void
*/
private void initGeneralization() {
List<NodeType> classes = new ArrayList<NodeType>();
List<NodeType> datatype = new ArrayList<NodeType>();
List<NodeType> enumeration = new ArrayList<NodeType>();
List<NodeType> interfaces = new ArrayList<NodeType>();
List<NodeType> usecase = new ArrayList<NodeType>();
List<NodeType> actor = new ArrayList<NodeType>();
classes.add(NodeType.CLASS);
datatype.add(NodeType.DATA_TYPE);
enumeration.add(NodeType.DATA_TYPE);
enumeration.add(NodeType.ENUMERATION);
interfaces.add(NodeType.INTERFACE);
usecase.add(NodeType.USE_CASE);
actor.add(NodeType.ACTOR);
this.ruleGeneralization.put(NodeType.CLASS, classes);
this.ruleGeneralization.put(NodeType.DATA_TYPE, datatype);
this.ruleGeneralization.put(NodeType.ENUMERATION, enumeration);
this.ruleGeneralization.put(NodeType.INTERFACE, interfaces);
this.ruleGeneralization.put(NodeType.USE_CASE, usecase);
this.ruleGeneralization.put(NodeType.ACTOR, actor);
this.ruleDictionary.put(RelationType.GENERALIZATION, this.ruleGeneralization);
}
/**
* Relation:Realization void
*/
private void initRealization() {
List<NodeType> collaborationuse = new ArrayList<NodeType>();
List<NodeType> component = new ArrayList<NodeType>();
// List<NodeType> packages = new ArrayList<NodeType>();
List<NodeType> usecase = new ArrayList<NodeType>();
List<NodeType> classes = new ArrayList<NodeType>();
usecase.add(NodeType.USE_CASE);
classes.add(NodeType.INTERFACE);
collaborationuse.add(NodeType.USE_CASE);
// packages.add(NodeType.CLASS);
// packages.add(NodeType.INTERFACE);
component.add(NodeType.INTERFACE);
component.add(NodeType.COMPONENT);
component.add(NodeType.ARTIFACT);
this.ruleRealization.put(NodeType.USE_CASE, usecase);
this.ruleRealization.put(NodeType.COLLABORATION_USE, collaborationuse);
this.ruleRealization.put(NodeType.COLLABORATION, collaborationuse);
// this.ruleRealization.put(NodeType.PACKAGE, packages);
this.ruleRealization.put(NodeType.COMPONENT, component);
this.ruleRealization.put(NodeType.CLASS, classes);
this.ruleDictionary.put(RelationType.REALIZATION, this.ruleRealization);
this.ruleDictionary.put(RelationType.<API key>, this.ruleRealization);
this.ruleDictionary.put(RelationType.<API key>, this.ruleRealization);
}
/**
* Relation:Message void
*/
private void initMessage() {
List<NodeType> line = new ArrayList<NodeType>();
List<NodeType> lifeLineBehavior = new ArrayList<NodeType>();
line.add(NodeType.LINE);
line.add(NodeType.LIFE_LINE_BEHAVIOR);
lifeLineBehavior.add(NodeType.LINE);
lifeLineBehavior.add(NodeType.LIFE_LINE_BEHAVIOR);
this.ruleMessage.put(NodeType.LINE, line);
this.ruleMessage.put(NodeType.LIFE_LINE_BEHAVIOR, line);
this.ruleDictionary.put(RelationType.MESSAGE, this.ruleMessage);
this.ruleDictionary.put(RelationType.SYNCHRONOUS_MESSAGE, this.ruleMessage);
this.ruleDictionary.put(RelationType.<API key>, this.ruleMessage);
this.ruleDictionary.put(RelationType.DESTROY_MESSAGE, this.ruleMessage);
}
/**
*
* Relation:Message void
*/
private void initReplyMessage() {
List<NodeType> line = new ArrayList<NodeType>();
line.add(NodeType.LINE);
line.add(NodeType.LIFE_LINE_BEHAVIOR);
this.ruleReplyMessage.put(NodeType.LIFE_LINE_BEHAVIOR, line);
this.ruleDictionary.put(RelationType.REPLY_MESSAGE, this.ruleReplyMessage);
}
/**
* Relation:ObjectFlow void
*/
private void initObjectFlow() {
List<NodeType> decisionnode = new ArrayList<NodeType>();
List<NodeType> forknode = new ArrayList<NodeType>();
List<NodeType> initialnode = new ArrayList<NodeType>();
List<NodeType> joinnode = new ArrayList<NodeType>();
List<NodeType> mergenode = new ArrayList<NodeType>();
List<NodeType> opaqueaction = new ArrayList<NodeType>();
List<NodeType> dataNode = new ArrayList<NodeType>();
initialnode.add(NodeType.CENTRAL_BUFFER_NODE);
initialnode.add(NodeType.DATA_STORE_NODE);
opaqueaction.add(NodeType.CENTRAL_BUFFER_NODE);
opaqueaction.add(NodeType.DATA_STORE_NODE);
decisionnode.add(NodeType.CENTRAL_BUFFER_NODE);
decisionnode.add(NodeType.DATA_STORE_NODE);
forknode.add(NodeType.CENTRAL_BUFFER_NODE);
forknode.add(NodeType.DATA_STORE_NODE);
joinnode.add(NodeType.CENTRAL_BUFFER_NODE);
joinnode.add(NodeType.DATA_STORE_NODE);
mergenode.add(NodeType.CENTRAL_BUFFER_NODE);
mergenode.add(NodeType.DATA_STORE_NODE);
dataNode.add(NodeType.OPAQUE_ACTION);
dataNode.add(NodeType.JOIN_NODE);
dataNode.add(NodeType.CENTRAL_BUFFER_NODE);
dataNode.add(NodeType.DATA_STORE_NODE);
this.ruleObjectFlow.put(NodeType.DECISION_NODE, decisionnode);
this.ruleObjectFlow.put(NodeType.FORK_NODE, forknode);
this.ruleObjectFlow.put(NodeType.INITIAL_NODE, initialnode);
this.ruleObjectFlow.put(NodeType.JOIN_NODE, joinnode);
this.ruleObjectFlow.put(NodeType.MERGE_NODE, mergenode);
this.ruleObjectFlow.put(NodeType.OPAQUE_ACTION, opaqueaction);
this.ruleObjectFlow.put(NodeType.CENTRAL_BUFFER_NODE, dataNode);
this.ruleObjectFlow.put(NodeType.DATA_STORE_NODE, dataNode);
this.ruleDictionary.put(RelationType.OBJECT_FLOW, this.ruleObjectFlow);
}
/**
* Relation:ControlFlow void
*/
private void initControlFlow() {
List<NodeType> decisionnode = new ArrayList<NodeType>();
List<NodeType> forknode = new ArrayList<NodeType>();
List<NodeType> initialnode = new ArrayList<NodeType>();
List<NodeType> joinnode = new ArrayList<NodeType>();
List<NodeType> mergenode = new ArrayList<NodeType>();
List<NodeType> opaqueaction = new ArrayList<NodeType>();
initialnode.add(NodeType.ACTIVITY_FINAL_NODE);
initialnode.add(NodeType.DECISION_NODE);
initialnode.add(NodeType.FORK_NODE);
initialnode.add(NodeType.OPAQUE_ACTION);
opaqueaction.add(NodeType.ACTIVITY_FINAL_NODE);
opaqueaction.add(NodeType.DECISION_NODE);
opaqueaction.add(NodeType.FORK_NODE);
opaqueaction.add(NodeType.JOIN_NODE);
opaqueaction.add(NodeType.MERGE_NODE);
opaqueaction.add(NodeType.OPAQUE_ACTION);
decisionnode.add(NodeType.ACTIVITY_FINAL_NODE);
decisionnode.add(NodeType.DECISION_NODE);
decisionnode.add(NodeType.FORK_NODE);
decisionnode.add(NodeType.JOIN_NODE);
decisionnode.add(NodeType.MERGE_NODE);
decisionnode.add(NodeType.OPAQUE_ACTION);
forknode.add(NodeType.ACTIVITY_FINAL_NODE);
forknode.add(NodeType.DECISION_NODE);
forknode.add(NodeType.FORK_NODE);
forknode.add(NodeType.JOIN_NODE);
forknode.add(NodeType.MERGE_NODE);
forknode.add(NodeType.OPAQUE_ACTION);
joinnode.add(NodeType.ACTIVITY_FINAL_NODE);
joinnode.add(NodeType.DECISION_NODE);
joinnode.add(NodeType.FORK_NODE);
joinnode.add(NodeType.JOIN_NODE);
joinnode.add(NodeType.MERGE_NODE);
joinnode.add(NodeType.OPAQUE_ACTION);
mergenode.add(NodeType.ACTIVITY_FINAL_NODE);
mergenode.add(NodeType.DECISION_NODE);
mergenode.add(NodeType.FORK_NODE);
mergenode.add(NodeType.JOIN_NODE);
mergenode.add(NodeType.MERGE_NODE);
mergenode.add(NodeType.OPAQUE_ACTION);
this.ruleControlFlow.put(NodeType.DECISION_NODE, decisionnode);
this.ruleControlFlow.put(NodeType.FORK_NODE, forknode);
this.ruleControlFlow.put(NodeType.INITIAL_NODE, initialnode);
this.ruleControlFlow.put(NodeType.JOIN_NODE, joinnode);
this.ruleControlFlow.put(NodeType.MERGE_NODE, mergenode);
this.ruleControlFlow.put(NodeType.OPAQUE_ACTION, opaqueaction);
this.ruleDictionary.put(RelationType.CONTROL_FLOW, this.ruleControlFlow);
}
} |
package de.berlin.hu.uima.ae.filter;
import de.berlin.hu.util.Constants;
import org.apache.uima.UimaContext;
import org.apache.uima.analysis_component.<API key>;
import org.apache.uima.analysis_engine.<API key>;
import org.apache.uima.cas.FSIndex;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.<API key>;
import org.u_compare.shared.semantic.NamedEntity;
import java.io.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
/**
* Filters annotations according to a stopword list generated from the Google-N-Gram corpus.
*/
public class StopwordFilter extends <API key> {
private List<NamedEntity> invalidChemicals = null;
private HashSet<String> stopwords = new HashSet<String>();
@Override
public void initialize(UimaContext aContext)
throws <API key> {
super.initialize(aContext);
InputStream stopwordFile = this.getClass().getClassLoader().getResourceAsStream("resources/chemspot_stop_words.txt");
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(stopwordFile));
String line = "";
while(null != (line = reader.readLine()) ) {
stopwords.add(line);
}
} catch (<API key> e) {
throw new <API key>(e);
} catch (IOException e) {
throw new <API key>(e);
}
}
@Override
public void process(JCas aJCas) throws <API key> {
FSIndex chemicalIndex = aJCas.getAnnotationIndex(NamedEntity.type);
Iterator chemicalIterator = chemicalIndex.iterator();
invalidChemicals = new ArrayList<NamedEntity>();
while (chemicalIterator.hasNext()) {
NamedEntity chemical = (NamedEntity) chemicalIterator.next();
if (!Constants.GOLDSTANDARD.equals(chemical.getSource())) {
if (stopwords.contains(chemical.getCoveredText().toLowerCase())) {
invalidChemicals.add(chemical);
}
}
}
for (NamedEntity invalidChemical : invalidChemicals) {
invalidChemical.removeFromIndexes();
}
}
} |
package arduinoml;
import org.eclipse.emf.ecore.EFactory;
/**
* <!-- begin-user-doc -->
* The <b>Factory</b> for the model.
* It provides a create method for each non-abstract class of the model.
* <!-- end-user-doc -->
* @see arduinoml.ArduinomlPackage
* @generated
*/
public interface ArduinomlFactory extends EFactory {
/**
* The singleton instance of the factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
ArduinomlFactory eINSTANCE = arduinoml.impl.<API key>.init();
/**
* Returns a new object of class '<em>Board</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Board</em>'.
* @generated
*/
Board createBoard();
/**
* Returns a new object of class '<em>State</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>State</em>'.
* @generated
*/
State createState();
/**
* Returns a new object of class '<em>Actuator</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Actuator</em>'.
* @generated
*/
Actuator createActuator();
/**
* Returns a new object of class '<em>Sensor</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Sensor</em>'.
* @generated
*/
Sensor createSensor();
/**
* Returns a new object of class '<em>Transition</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Transition</em>'.
* @generated
*/
Transition createTransition();
/**
* Returns a new object of class '<em>Off</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Off</em>'.
* @generated
*/
Off createOff();
/**
* Returns a new object of class '<em>On</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>On</em>'.
* @generated
*/
On createOn();
/**
* Returns a new object of class '<em>Actuator State</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Actuator State</em>'.
* @generated
*/
ActuatorState createActuatorState();
/**
* Returns the package supported by this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the package supported by this factory.
* @generated
*/
ArduinomlPackage getArduinomlPackage();
} //ArduinomlFactory |
package org.yakindu.sct.ui.editor.clipboardsupport;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.gmf.runtime.emf.clipboard.core.IClipboardSupport;
import org.eclipse.gmf.runtime.emf.clipboard.core.<API key>;
/**
* Clipboard support factory to provide a customized {@link <API key>}
*
* @author muehlbrandt
*
*/
public class <API key> implements <API key> {
private final IClipboardSupport clipHelper = new <API key>();
public IClipboardSupport newClipboardSupport(EPackage ePackage) {
return clipHelper;
}
} |
package de.markusbarchfeld.spreadsheetfitnesse.macrocall;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.DateUtil;
import org.apache.poi.ss.usermodel.FormulaEvaluator;
import org.apache.poi.ss.usermodel.Name;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.util.CellReference;
import de.markusbarchfeld.spreadsheetfitnesse.<API key>;
import de.markusbarchfeld.spreadsheetfitnesse.WikiPage;
import de.markusbarchfeld.spreadsheetfitnesse.sources.PoiCell;
import de.markusbarchfeld.spreadsheetfitnesse.token.AddedTableCell;
import de.markusbarchfeld.spreadsheetfitnesse.token.CellToken;
import de.markusbarchfeld.spreadsheetfitnesse.token.IVisitable;
import de.markusbarchfeld.spreadsheetfitnesse.token.Tokens;
public class MacroCall implements IMacroCall {
private <API key> <API key>;
public MacroCall(<API key> <API key>) {
this.<API key> = <API key>;
}
public Tokens createTokens(String testCaseName, String sheetName,
KeyValue... params) throws MacroCallException {
Workbook workbook = <API key>.getWorkbook();
for (KeyValue keyValue : params) {
Cell cell = getCellForParameter(keyValue);
if (cell == null) {
throw new MacroCallException("There is no cell named '"
+ keyValue.getKey() + "'");
}
if (cell.getCellType() == HSSFCell.CELL_TYPE_NUMERIC) {
if (DateUtil.isCellDateFormatted(cell)) {
try {
Date date = PoiCell.DATE_FORMAT.parse(keyValue.getValue());
cell.setCellValue(date);
} catch (ParseException e) {
throw new RuntimeException(e);
}
} else {
try {
Number number = PoiCell.getDecimalFormat().parse(keyValue.getValue());
cell.setCellValue(number.doubleValue());
} catch (ParseException nfe) {
cell.setCellValue(keyValue.getValue());
}
}
} else {
cell.setCellValue(keyValue.getValue());
}
}
FormulaEvaluator <API key> = workbook.getCreationHelper()
.<API key>();
<API key>.evaluateAll();
return <API key>.createTokens(sheetName);
}
private Cell getCellForParameter(KeyValue keyValue) {
Workbook workbook = <API key>.getWorkbook();
Name name = workbook.getName(keyValue.getKey());
Cell result = null;
if (name != null) {
CellReference cellReference = new CellReference(name.getRefersToFormula());
Sheet sheetOfReference = workbook.getSheet(name.getSheetName());
Row row = sheetOfReference.getRow(cellReference.getRow());
result = row.getCell(cellReference.getCol());
}
return result;
}
@Override
public void call(String testCaseName, String sheetName, KeyValue... params) {
try {
Tokens tokens = this.createTokens(testCaseName, sheetName, params);
WikiPage page = <API key>.<API key>(tokens);
page.setName(testCaseName);
} catch (MacroCallException e) {
List<IVisitable> tokens = new ArrayList<IVisitable>();
tokens.add(new AddedTableCell(e.getMessage()));
Tokens errorTokens = new Tokens(tokens);
WikiPage page = <API key>
.<API key>(errorTokens);
page.setName(testCaseName);
}
}
} |
package de.fraunhofer.esk.ernest.core.analysis.framework.preferences;
import org.eclipse.jface.preference.PreferencePage;
import org.eclipse.jface.viewers.<API key>;
import org.eclipse.jface.viewers.ColumnLabelProvider;
import org.eclipse.jface.viewers.<API key>;
import org.eclipse.jface.viewers.<API key>;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.TableViewerColumn;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.jface.viewers.ViewerComparator;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.<API key>;
import de.fraunhofer.esk.ernest.core.analysis.framework.AnalysisType;
/**
* This class represents a preference page that is contributed to the
* Preferences dialog. By subclassing <samp><API key></samp>, we
* can use the field support built into JFace that allows us to create a page
* that is small and knows how to save, restore and apply itself.
* <p>
* This page is used to modify preferences only. They are stored in the
* preference store that belongs to the main plug-in class. That way,
* preferences can be accessed directly via the preference store.
*/
public class <API key> extends PreferencePage implements <API key> {
private TableViewer viewer;
private TableViewerColumn viewerColumn_Name;
private TableViewerColumn viewerColumn_Ext;
private TableViewerColumn viewerColumn_Class;
private Text text_name;
private Text text_ext;
private Text text_class;
private final <API key> comparator = new <API key>();
public static class <API key> extends ViewerComparator {
private int colNumber = 0;
private static final int DESCENDING = 1;
private int direction = DESCENDING;
public <API key>() {
this.colNumber = 0;
this.direction = DESCENDING;
}
public int getDirection() {
return this.direction == 1 ? SWT.DOWN : SWT.UP;
}
public void setColumn(int colNumber) {
if (this.colNumber == colNumber) {
this.direction = 1 - this.direction;
} else {
this.colNumber = colNumber;
this.direction = DESCENDING;
}
}
@Override
public int compare(Viewer viewer, Object e1, Object e2) {
AnalysisType a1 = (AnalysisType) e1;
AnalysisType a2 = (AnalysisType) e2;
int rc;
switch (this.colNumber) {
case 0:
rc = a1.getName().compareTo(a2.getName());
break;
case 1:
rc = a1.getExtension().compareTo(a2.getExtension());
break;
case 2:
rc = a1.getClassname().compareTo(a2.getClassname());
break;
default:
rc = 0;
break;
}
// If descending order, flip the direction
if (this.direction == DESCENDING) {
rc = -rc;
}
return rc;
}
};
<API key> changeListener = new <API key>() {
@Override
public void selectionChanged(<API key> event) {
StructuredSelection sel = (StructuredSelection) event.getSelection();
AnalysisType type = (AnalysisType) sel.getFirstElement();
<API key>.this.text_name.setText(type.getName());
<API key>.this.text_ext.setText(type.getExtension());
<API key>.this.text_class.setText(type.getClassname());
}
};
public <API key>() {
super();
this.setDescription("Options for the ERNEST Analysis Framework");
this.<API key>();
}
@Override
public void init(IWorkbench workbench) {
}
@Override
protected Control createContents(Composite parent) {
GridLayout gridParent = new GridLayout();
gridParent.numColumns = 1;
parent.setLayout(gridParent);
Label label = new Label(parent, SWT.NULL);
label.setText("Listing of all registered Analyses:");
/*
* Test implementation of a TableViewer that lists all registered
* Analyses
*/
this.createTableViewer(parent);
GridData data = new GridData();
data.<API key> = true;
data.horizontalAlignment = GridData.FILL;
Label separator = new Label(parent, SWT.SEPARATOR | SWT.HORIZONTAL);
separator.setLayoutData(data);
Composite container = new Composite(parent, SWT.NULL);
GridLayout gridLayout = new GridLayout();
gridLayout.numColumns = 10;
container.setLayout(gridLayout);
GridData data_label = new GridData();
data_label.horizontalSpan = 2;
GridData data_text = new GridData();
data_text.horizontalSpan = 8;
// data_text.verticalAlignment = GridData.BEGINNING;
data_text.<API key> = true;
// data_text.<API key> = false;
data_text.horizontalAlignment = GridData.FILL;
container.setLayoutData(data_text);
Label label_name = new Label(container, SWT.NULL);
label_name.setText("Name:");
label_name.setLayoutData(data_label);
this.text_name = new Text(container, SWT.NULL);
this.text_name.setText("Select an Analysis");
this.text_name.setLayoutData(data_text);
this.text_name.setEditable(false);
Label label_ext = new Label(container, SWT.NULL);
label_ext.setText("Extension:");
label_ext.setLayoutData(data_label);
this.text_ext = new Text(container, SWT.NULL);
this.text_ext.setText("Select an Analysis");
this.text_ext.setLayoutData(data_text);
this.text_ext.setEditable(false);
Label label_class = new Label(container, SWT.NULL);
label_class.setText("Classname:");
label_class.setLayoutData(data_label);
this.text_class = new Text(container, SWT.NULL);
this.text_class.setText("Select an Analysis");
this.text_class.setLayoutData(data_text);
this.text_class.setEditable(false);
return null;
}
private void createTableViewer(Composite parent) {
this.viewer = new TableViewer(parent, SWT.SINGLE | SWT.H_SCROLL | SWT.V_SCROLL | SWT.FULL_SELECTION | SWT.BORDER);
this.createColums(parent);
Table table = this.viewer.getTable();
table.setHeaderVisible(true);
table.setLinesVisible(true);
GridData gridData = new GridData();
gridData.verticalAlignment = GridData.BEGINNING;
gridData.<API key> = true;
gridData.<API key> = false;
gridData.horizontalAlignment = GridData.FILL;
this.viewer.getControl().setLayoutData(gridData);
this.viewer.<API key>(this.changeListener);
this.viewer.setContentProvider(new <API key>());
this.viewer.setInput(AnalysisType.getAnalyses());
this.viewer.setComparator(this.comparator);
this.viewerColumn_Name.getColumn().pack();
this.viewerColumn_Ext.getColumn().pack();
this.viewerColumn_Class.getColumn().pack();
}
private void createColums(Composite parent) {
this.viewerColumn_Name = this.<API key>("Name", 100, 0);
this.viewerColumn_Name.setLabelProvider(new ColumnLabelProvider() {
@Override
public String getText(Object element) {
AnalysisType a = (AnalysisType) element;
return a.getName();
}
});
this.viewerColumn_Ext = this.<API key>("Extension", 100, 1);
this.viewerColumn_Ext.setLabelProvider(new ColumnLabelProvider() {
@Override
public String getText(Object element) {
AnalysisType a = (AnalysisType) element;
return a.getExtension();
}
});
this.viewerColumn_Class = this.<API key>("Full classname", 100, 2);
this.viewerColumn_Class.setLabelProvider(new ColumnLabelProvider() {
@Override
public String getText(Object element) {
AnalysisType a = (AnalysisType) element;
return a.getClassname();
}
});
}
private TableViewerColumn <API key>(final String label, final int bounds, final int colNumber) {
TableViewerColumn viewerColumn = new TableViewerColumn(this.viewer, SWT.NONE);
TableColumn column = viewerColumn.getColumn();
column.setText(label);
column.setWidth(bounds);
column.setResizable(true);
column.setMoveable(true);
column.<API key>(this.getSelectionAdapter(column, colNumber));
return viewerColumn;
}
private SelectionAdapter getSelectionAdapter(final TableColumn column, final int index) {
SelectionAdapter selectionAdapter = new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
<API key>.this.comparator.setColumn(index);
int dir = <API key>.this.comparator.getDirection();
<API key>.this.viewer.getTable().setSortDirection(dir);
<API key>.this.viewer.refresh();
}
};
return selectionAdapter;
}
} |
package org.lunifera.vaaclipse.wizards.project;
import java.io.<API key>;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.<API key>;
import java.net.<API key>;
import java.net.URL;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipException;
import java.util.zip.ZipFile;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.pde.internal.ui.PDEUIMessages;
import org.eclipse.pde.ui.templates.IVariableProvider;
import org.eclipse.ui.actions.<API key>;
import org.lunifera.vaaclipse.wizards.ToolsPlugin;
public class TemplateOperation extends <API key> implements
IVariableProvider {
private final URL templateDirectory;
private final IContainer target;
private final Map<String, String> keys;
private final Set<String> binaryExtentions;
private boolean isMinimalist;
public TemplateOperation(URL source, IContainer target,
Map<String, String> keys, Set<String> binaryExtentions, boolean justProduct) {
templateDirectory = source;
this.binaryExtentions = binaryExtentions;
this.target = target;
this.keys = keys;
this.isMinimalist = justProduct;
}
@Override
protected void execute(IProgressMonitor monitor) throws CoreException,
<API key>, <API key> {
monitor.setTaskName(PDEUIMessages.<API key>);
if ("jar".equals(templateDirectory.getProtocol())) { //$NON-NLS-1$
String file = templateDirectory.getFile();
int exclamation = file.indexOf('!');
if (exclamation < 0)
return;
URL fileUrl = null;
try {
fileUrl = new URL(file.substring(0, exclamation));
} catch (<API key> mue) {
ToolsPlugin.logError(mue);
return;
}
File pluginJar = new File(fileUrl.getFile());
if (!pluginJar.exists())
return;
String templateDirectory = file.substring(exclamation + 1); // "/some/path/"
IPath path = new Path(templateDirectory);
ZipFile zipFile = null;
try {
zipFile = new ZipFile(pluginJar);
generateFiles(zipFile, path, target, monitor);
} catch (ZipException ze) {
} catch (IOException ioe) {
} finally {
if (zipFile != null) {
try {
zipFile.close();
} catch (IOException e) {
}
}
}
} else if ("file".equals(templateDirectory.getProtocol())) { //$NON-NLS-1$
File directory = new File(templateDirectory.getFile());
if (!directory.exists())
return;
generateFiles(directory, target, true, monitor);
}
}
private void generateFiles(File src, IContainer dst, boolean firstLevel,
IProgressMonitor monitor) throws CoreException {
if ((!firstLevel)&&(isMinimalist))
return;
File[] members = src.listFiles();
for (int i = 0; i < members.length; i++) {
File member = members[i];
String name = member.getName();
if (member.isDirectory()) {
if (".svn".equals(name) || "cvs".equalsIgnoreCase(name))
continue;
IContainer dstContainer = null;
if (dstContainer == null) {
String folderName = getProcessedString(name, name);
dstContainer = dst.getFolder(new Path(folderName));
}
if (dstContainer != null && !dstContainer.exists())
((IFolder) dstContainer).create(true, true, monitor);
generateFiles(member, dstContainer, false, monitor);
} else {
InputStream in = null;
try {
in = new FileInputStream(member);
copyFile(name, in, dst, monitor);
} catch (IOException ioe) {
} finally {
if (in != null)
try {
in.close();
} catch (IOException ioe2) {
}
}
}
}
}
/**
*
*
* @param zipFile
* @param path
* @param dst
* @param monitor
* @throws CoreException
*/
private void generateFiles(ZipFile zipFile, IPath path, IContainer dst,
IProgressMonitor monitor) throws CoreException {
int pathLength = path.segmentCount();
// Immidiate children
Map childZipEntries = new HashMap(); // "dir/" or "dir/file.java"
for (Enumeration zipEntries = zipFile.entries(); zipEntries
.hasMoreElements();) {
ZipEntry zipEntry = (ZipEntry) zipEntries.nextElement();
IPath entryPath = new Path(zipEntry.getName());
if (entryPath.segmentCount() <= pathLength) {
// ancestor or current directory
continue;
}
if (!path.isPrefixOf(entryPath)) {
// not a descendant
continue;
}
if (entryPath.segmentCount() == pathLength + 1) {
childZipEntries.put(zipEntry.getName(), zipEntry);
} else {
String name = entryPath.uptoSegment(pathLength + 1)
.<API key>().toString();
if (!childZipEntries.containsKey(name)) {
ZipEntry dirEntry = new ZipEntry(name);
childZipEntries.put(name, dirEntry);
}
}
}
for (Iterator it = childZipEntries.values().iterator(); it.hasNext();) {
ZipEntry zipEnry = (ZipEntry) it.next();
String name = new Path(zipEnry.getName()).lastSegment().toString();
if (zipEnry.isDirectory()) {
IContainer dstContainer = null;
if (dstContainer == null) {
String folderName = getProcessedString(name, name);
dstContainer = dst.getFolder(new Path(folderName));
}
if (dstContainer != null && !dstContainer.exists())
((IFolder) dstContainer).create(true, true, monitor);
generateFiles(zipFile, path.append(name), dstContainer, monitor);
} else {
InputStream in = null;
try {
in = zipFile.getInputStream(zipEnry);
copyFile(name, in, dst, monitor);
} catch (IOException ioe) {
} finally {
if (in != null)
try {
in.close();
} catch (IOException ioe2) {
}
}
}
}
}
private void copyFile(String fileName, InputStream input, IContainer dst,
IProgressMonitor monitor) throws CoreException {
String targetFileName = getProcessedString(fileName, fileName);
monitor.subTask(targetFileName);
IFile dstFile = dst.getFile(new Path(targetFileName));
try {
InputStream stream = isBinary(fileName) ? input
: getProcessedStream(fileName, input);
if (dstFile.exists()) {
dstFile.setContents(stream, true, true, monitor);
} else {
dstFile.create(stream, true, monitor);
}
stream.close();
} catch (IOException e) {
}
}
protected void copyFile(String fileName, InputStream input, IContainer dst,
final String destPath, IProgressMonitor monitor)
throws CoreException {
String targetFileName = null;
if (destPath == null) {
targetFileName = getProcessedString(fileName, fileName);
} else {
targetFileName = destPath;
}
monitor.subTask(targetFileName);
IFile dstFile = dst.getFile(new Path(targetFileName));
try {
InputStream stream = isBinary(fileName) ? input
: getProcessedStream(fileName, input);
if (dstFile.exists()) {
dstFile.setContents(stream, true, true, monitor);
} else {
dstFile.create(stream, true, monitor);
}
stream.close();
} catch (IOException e) {
}
}
/**
*
* @param fileName
* @param input
* @param dst
* @param basePath
* @param monitor
* @throws CoreException
*/
public void copyFile(String fileName, InputStream input, IContainer dst,
final String basePath, final String destName,
IProgressMonitor monitor) throws CoreException {
if (basePath == null || basePath.equals("")) {
copyFile(fileName, input, dst, monitor);
}
String targetFileName = destName == null ? getProcessedString(fileName,
fileName) : destName;
monitor.subTask(targetFileName);
IFile dstFile = dst.getFile(new Path(basePath + targetFileName));
try {
InputStream stream = isBinary(fileName) ? input
: getProcessedStream(fileName, input);
if (dstFile.exists()) {
dstFile.setContents(stream, true, true, monitor);
} else {
dstFile.create(stream, true, monitor);
}
stream.close();
} catch (IOException e) {
}
}
private boolean isBinary(String fileName) {
if (binaryExtentions == null) {
return false;
}
String ext = getfileExtention(fileName);
if (ext == null)
return false;
return binaryExtentions.contains(ext);
}
private String getfileExtention(String name) {
int indexOf = name.lastIndexOf('.');
if (indexOf == -1)
return null;
return name.substring(indexOf);
}
private InputStream getProcessedStream(String fileName, InputStream stream)
throws IOException, CoreException {
InputStreamReader reader = new InputStreamReader(stream);
int bufsize = 1024;
char[] cbuffer = new char[bufsize];
int read = 0;
StringBuffer keyBuffer = new StringBuffer();
StringBuffer outBuffer = new StringBuffer();
boolean replacementMode = false;
boolean <API key> = false;
boolean escape = false;
while (read != -1) {
read = reader.read(cbuffer);
for (int i = 0; i < read; i++) {
char c = cbuffer[i];
if (escape) {
StringBuffer buf = outBuffer;
buf.append(c);
escape = false;
continue;
}
if (c == '@') {
if (replacementMode && <API key>) {
<API key> = false;
} else if (replacementMode) {
replacementMode = false;
String key = keyBuffer.toString();
String value = key.length() == 0 ? "@@" //$NON-NLS-1$
: <API key>(key);
outBuffer.append(value);
keyBuffer.delete(0, keyBuffer.length());
} else if (<API key>) {
replacementMode = true;
} else {
<API key> = true;
}
} else {
if (replacementMode)
keyBuffer.append(c);
else {
if (<API key>)
outBuffer.append('@');
outBuffer.append(c);
<API key> = false;
}
}
}
}
return new <API key>(outBuffer.toString().getBytes());
// return new
// <API key>(outBuffer.toString().getBytes(project.
// getDefaultCharset()));
}
private String getProcessedString(String fileName, String source) {
if (source.indexOf('$') == -1)
return source;
int loc = -1;
StringBuffer buffer = new StringBuffer();
boolean replacementMode = false;
for (int i = 0; i < source.length(); i++) {
char c = source.charAt(i);
if (c == '$') {
if (replacementMode) {
String key = source.substring(loc, i);
String value = key.length() == 0 ? "$" : <API key>(key); //$NON-NLS-1$
buffer.append(value);
replacementMode = false;
} else {
replacementMode = true;
loc = i + 1;
continue;
}
} else if (!replacementMode)
buffer.append(c);
}
return buffer.toString();
}
public String <API key>(String key) {
String result = keys.get(key);
return result != null ? result : key;
}
public Object getValue(String variable) {
return <API key>(variable);
}
} |
% This file was created by matplotlib2tikz v0.5.4.
\begin{tikzpicture}
\definecolor{color1}{rgb}{0.105882352941176,0.619607843137255,0.466666666666667}
\definecolor{color0}{rgb}{0.917647058823529,0.917647058823529,0.949019607843137}
\definecolor{color3}{rgb}{0.686735870818916,0.297270280661284,0.61999231064975}
\definecolor{color2}{rgb}{0.719492502883506,0.416147635524798,0.0888119953863898}
\definecolor{color5}{rgb}{0.488581314878893,0.654440599769319,0.0982698961937716}
\definecolor{color4}{rgb}{0.737254901960784,0.324183006535948,0.4}
\begin{groupplot}[group style={group size=2 by 1}]
\nextgroupplot[
xlabel={Simulated Time (mins)},
ylabel={Weighted Trust Value},
xmin=0, xmax=60,
ymin=0, ymax=1,
ytick={0,0.2,0.4,0.6,0.8,1},
yticklabels={$0.0$,$0.2$,$0.4$,$0.6$,$0.8$,$1.0$},
tick align=outside,
xmajorgrids,
x grid style={white!50.196078431372548!black},
ymajorgrids,
y grid style={white!50.196078431372548!black},
axis line style={white!50.196078431372548!black},
axis background/.style={fill=color0},
legend cell align={left},
legend style={draw=none, fill=color0},
legend entries={{Alfa},{Bravo},{Delta},{Echo},{Foxtrot}},
unbounded coords=jump
]
\addplot [color1, opacity=1.0]
table {%
0 nan
1 0.026737063208103
2 0.0754799107809937
3 0.260706400797834
4 0.24838742520699
5 0.45635715398243
6 0.347449987013856
7 0.259667943326349
8 0.259667943326349
9 0.171767016271126
10 0.259046860314861
11 0.220751556831128
12 0.262409273742005
13 0.22782758566697
14 0.407696003742534
15 0.561752172428499
16 0.641652642860946
17 0.745381955289175
18 0.771847171771387
19 0.572322816823478
20 0.572322816823478
21 0.367144319893529
22 0.293206747390784
23 0.199858625014868
24 0.139591651640801
25 0.0978280818849505
26 0.360213222406644
27 0.533029026388488
28 0.468742884691022
29 0.375209956930509
30 0.544534015175343
31 0.535718814555176
32 0.620303315545404
33 0.680964793005723
34 0.491151730315795
35 0.485078726478564
36 0.349654299704808
37 0.349654299704808
38 0.349654299704808
39 0.408767133762006
40 0.408767133762006
41 0.287529989871777
42 0.526822881741596
43 0.662071344206452
44 0.443658875366055
45 0.412794892073343
46 0.294061917609679
47 0.437376101879421
48 0.549087341204488
49 0.390459060300904
50 0.301185434045411
51 0.328975893164164
52 0.409441570800469
53 0.296849377270403
54 0.448511850211224
55 0.434096277580988
56 0.434096277580988
57 0.557054521148138
58 0.396564517995419
};
\addplot [color2, opacity=0.6]
table {%
0 0.258814925081954
1 0.655994216292884
2 0.801865393044234
3 0.522627934671067
4 0.463141692858778
5 0.518382362071586
6 0.594092886837796
7 0.50878970959637
8 0.550009865298569
9 0.49174649669005
10 0.597397336658566
11 0.707844365480487
12 0.532381636072886
13 0.613665569766303
14 0.488363733497804
15 0.361318378095975
16 0.263511220800529
17 0.199447845410288
18 0.221925525900789
19 0.428783344198751
20 0.590916973813078
21 0.688889795794956
22 0.661417617070129
23 0.693285930837083
24 0.616760096458636
25 0.534530638033971
26 0.570964025585717
27 0.409107811099486
28 0.477041514777081
29 0.349523120493872
30 0.278503045741574
31 0.33938647145709
32 0.423223199656244
33 0.366685208982746
34 0.31420546951456
35 0.457499259987523
36 0.559851582313733
37 0.674837593958227
38 0.756970301731655
39 0.597494016073251
40 0.682414369383628
41 0.648765463674328
42 0.507519566129903
43 0.529690665706771
44 0.379290251759587
45 0.511111017549694
46 0.439674465993191
47 0.413685960469765
48 0.308193027418835
49 0.502875674814477
50 0.622084271029004
51 0.505343104532053
52 0.362335595630645
53 0.259283478048653
54 0.232265350144004
55 0.173211937858327
56 0.398666098459962
57 0.285714957914004
58 0.212457159004271
};
\addplot [color3, opacity=0.6]
table {%
0 0.0155231411707976
1 0.498024944059502
2 0.291089204830054
3 0.516235897084956
4 0.578606563917918
5 0.420121057264944
6 0.294748151138483
7 0.213467641160262
8 0.448795105087049
9 0.574106200971017
10 0.449879751393585
11 0.367747048068367
12 0.455080430553879
13 0.520602434803224
14 0.465031241830959
15 0.459324696877032
16 0.364511914414602
17 0.274831107096845
18 0.392060569952699
19 0.558332606991196
20 0.682372217000713
21 0.686637407395263
22 0.756117920977324
23 0.817442547883517
24 0.846202025528145
25 0.84764436332883
26 0.606527249604505
27 0.709630842464064
28 0.777419714920752
29 0.564761903255276
30 0.6459965764955
31 0.512252717660356
32 0.459894774545612
33 0.556757488548897
34 0.477160877039494
35 0.385699074196469
36 0.553168970924161
37 0.510699769526839
38 0.394920848744741
39 0.524996879452274
40 0.385768630041853
41 0.524419112360336
42 0.382448022246415
43 0.392374051992175
44 0.560245804791583
45 0.43832733141774
46 0.356995317278829
47 0.521674657508633
48 0.592442118567104
49 0.512713676680888
50 0.543462146168497
51 0.442635484758232
52 0.550598836547709
53 0.40968942598709
54 0.482912869732856
55 0.606235666135819
56 0.436310967742653
57 0.557763734000834
58 0.45654851822093
};
\addplot [color4, opacity=0.6]
table {%
0 0.00152385089203702
1 0.279741649879982
2 0.444512246870207
3 0.587899664451127
4 0.42150388841922
5 0.291297837430406
6 0.282090664145899
7 0.212815672443523
8 0.149293766116155
9 0.31767760214523
10 0.284136710250994
11 0.396652940425361
12 0.431411002373097
13 0.40139176566045
14 0.301295164992613
15 0.221500305359534
16 0.169576906275585
17 0.165738659323064
18 0.196247268719759
19 0.153195629737861
20 0.114973415853336
21 0.328094865924177
22 0.391599319762764
23 0.395969117318607
24 0.34672986484334
25 0.322256530571307
26 0.317646440081204
27 0.414097944405733
28 0.407788386491844
29 0.53558435272685
30 0.458196691113457
31 0.6019187473605
32 0.66203776411834
33 0.47499098504088
34 0.55919390952415
35 0.665481927855081
36 0.753778653916249
37 0.543312075845323
38 0.477069154303282
39 0.434764385849436
40 0.483509823952024
41 0.530181913871104
42 0.545410879502032
43 0.480365700049282
44 0.614636612748205
45 0.554909681187766
46 0.638701055500204
47 0.540706286255809
48 0.542243682103545
49 0.528771866545848
50 0.382270464253503
51 0.282836031465542
52 0.237730584656335
53 0.446258741140415
54 0.382814056881075
55 0.297378865150156
56 0.481773666491503
57 0.370970376794305
58 0.539474101525829
};
\addplot [color5, opacity=0.6]
table {%
0 0.982476980509236
1 0.489539619786244
2 0.625446258328634
3 0.692562395624054
4 0.745507234847851
5 0.591351180806013
6 0.719944720151854
7 0.789580384732158
8 0.654490257503146
9 0.549360380993938
10 0.413978628283524
11 0.575436590206244
12 0.675166465074393
13 0.548486536695329
14 0.461078480098651
15 0.472369165818577
16 0.400227415451476
17 0.303880316533833
18 0.21900529930869
19 0.427668733904146
20 0.54912224956291
21 0.673049999627038
22 0.48327978503659
23 0.347000212635799
24 0.291691575976772
25 0.485914162694876
26 0.519824705416406
27 0.43517078799779
28 0.595057440038867
29 0.631735984147957
30 0.63589333335197
31 0.7222608484477
32 0.593807692017763
33 0.668859656102664
34 0.715229399682331
35 0.512023781445627
36 0.407700252958044
37 0.534755718331803
38 0.660532341888484
39 0.492449369684725
40 0.635896811240705
41 0.737358158769074
42 0.589475255638077
43 0.453530157473819
44 0.516198813110502
45 0.638015440957174
46 0.740887145704043
47 0.577214425833962
48 0.513192579851031
49 0.373157359627789
50 0.482736780634366
51 0.588352885025627
52 0.462425447983433
53 0.347747647551829
54 0.262114474569842
55 0.420620437098216
56 0.3534000735862
57 0.261393570447189
58 0.453916296659506
};
\path [draw=white, fill opacity=0] (axis cs:0,1)
--(axis cs:60,1);
\path [draw=white, fill opacity=0] (axis cs:1,0)
--(axis cs:1,1);
\path [draw=white!50.196078431372548!black, fill opacity=0] (axis cs:0,0)
--(axis cs:60,0);
\path [draw=white!50.196078431372548!black, fill opacity=0] (axis cs:0,0)
--(axis cs:0,1);
\nextgroupplot[
xmin=0.5, xmax=5.5,
ymin=0, ymax=1,
xtick={1,2,3,4,5},
xticklabels={Alfa,Bravo,Delta,Echo,Foxtrot},
ytick={0,0.2,0.4,0.6,0.8,1},
yticklabels={$0.0$,$0.2$,$0.4$,$0.6$,$0.8$,$1.0$},
tick align=outside,
xmajorgrids,
x grid style={white!50.196078431372548!black},
ymajorgrids,
y grid style={white!50.196078431372548!black},
axis line style={white!50.196078431372548!black},
axis background/.style={fill=color0},
legend cell align={left},
legend style={draw=none, fill=color0},
legend entries={{Alfa},{Bravo},{Delta},{Echo},{Foxtrot}},
unbounded coords=jump
]
\addplot [color1, opacity=1]
table {%
0.75 0.288949179251529
1.25 0.288949179251529
1.25 0.517905093885146
0.75 0.517905093885146
0.75 0.288949179251529
};
\addplot [blue, opacity=1, dashed]
table {%
1 0.288949179251529
1 0.026737063208103
};
\addplot [blue, opacity=1, dashed]
table {%
1 0.517905093885146
1 0.771847171771387
};
\addplot [black]
table {%
0.875 0.026737063208103
1.125 0.026737063208103
};
\addplot [black]
table {%
0.875 0.771847171771387
1.125 0.771847171771387
};
\addplot [red, opacity=1]
table {%
0.75 0.402130260868977
1.25 0.402130260868977
};
\addplot [blue, opacity=0.8]
table {%
0.75 0.396878325900266
1.25 0.396878325900266
};
\addplot [blue, mark=+, mark size=3, mark options={draw=black}, only marks]
table {%
};
\addplot [color2, opacity=1]
table {%
1.75 0.36182698686331
2.25 0.36182698686331
2.25 0.595745111748181
1.75 0.595745111748181
1.75 0.36182698686331
};
\addplot [blue, opacity=1, dashed]
table {%
2 0.36182698686331
2 0.173211937858327
};
\addplot [blue, opacity=1, dashed]
table {%
2 0.595745111748181
2 0.801865393044234
};
\addplot [black]
table {%
1.875 0.173211937858327
2.125 0.173211937858327
};
\addplot [black]
table {%
1.875 0.801865393044234
2.125 0.801865393044234
};
\addplot [red, opacity=1]
table {%
1.75 0.502875674814477
2.25 0.502875674814477
};
\addplot [blue, opacity=0.8]
table {%
1.75 0.476538653003854
2.25 0.476538653003854
};
\addplot [blue, mark=+, mark size=3, mark options={draw=black}, only marks]
table {%
};
\addplot [color3, opacity=1]
table {%
2.75 0.414905241626017
3.25 0.414905241626017
3.25 0.56250385402343
2.75 0.56250385402343
2.75 0.414905241626017
};
\addplot [blue, opacity=1, dashed]
table {%
3 0.414905241626017
3 0.213467641160262
};
\addplot [blue, opacity=1, dashed]
table {%
3 0.56250385402343
3 0.777419714920752
};
\addplot [black]
table {%
2.875 0.213467641160262
3.125 0.213467641160262
};
\addplot [black]
table {%
2.875 0.777419714920752
3.125 0.777419714920752
};
\addplot [red, opacity=1]
table {%
2.75 0.510699769526839
3.25 0.510699769526839
};
\addplot [blue, opacity=0.8]
table {%
2.75 0.502169326276954
3.25 0.502169326276954
};
\addplot [blue, mark=+, mark size=3, mark options={draw=black}, only marks]
table {%
3 0.0155231411707976
3 0.817442547883517
3 0.846202025528145
3 0.84764436332883
};
\addplot [color4, opacity=1]
table {%
3.75 0.294338351290281
4.25 0.294338351290281
4.25 0.532883133298977
3.75 0.532883133298977
3.75 0.294338351290281
};
\addplot [blue, opacity=1, dashed]
table {%
4 0.294338351290281
4 0.00152385089203702
};
\addplot [blue, opacity=1, dashed]
table {%
4 0.532883133298977
4 0.753778653916249
};
\addplot [black]
table {%
3.875 0.00152385089203702
4.125 0.00152385089203702
};
\addplot [black]
table {%
3.875 0.753778653916249
4.125 0.753778653916249
};
\addplot [red, opacity=1]
table {%
3.75 0.407788386491844
4.25 0.407788386491844
};
\addplot [blue, opacity=0.8]
table {%
3.75 0.403320883371058
4.25 0.403320883371058
};
\addplot [blue, mark=+, mark size=3, mark options={draw=black}, only marks]
table {%
};
\addplot [color5, opacity=1]
table {%
4.75 0.444350472735805
5.25 0.444350472735805
5.25 0.636956126098939
4.75 0.636956126098939
4.75 0.444350472735805
};
\addplot [blue, opacity=1, dashed]
table {%
5 0.444350472735805
5 0.21900529930869
};
\addplot [blue, opacity=1, dashed]
table {%
5 0.636956126098939
5 0.789580384732158
};
\addplot [black]
table {%
4.875 0.21900529930869
5.125 0.21900529930869
};
\addplot [black]
table {%
4.875 0.789580384732158
5.125 0.789580384732158
};
\addplot [red, opacity=1]
table {%
4.75 0.534755718331803
5.25 0.534755718331803
};
\addplot [blue, opacity=0.8]
table {%
4.75 0.537297459380788
5.25 0.537297459380788
};
\addplot [blue, mark=+, mark size=3, mark options={draw=black}, only marks]
table {%
5 0.982476980509236
};
\path [draw=white, fill opacity=0] (axis cs:0.5,1)
--(axis cs:5.5,1);
\path [draw=white, fill opacity=0] (axis cs:1,0)
--(axis cs:1,1);
\path [draw=white!50.196078431372548!black, fill opacity=0] (axis cs:0.5,0)
--(axis cs:5.5,0);
\path [draw=white!50.196078431372548!black, fill opacity=0] (axis cs:0,0)
--(axis cs:0,1);
\end{groupplot}
\end{tikzpicture} |
package markdowikitext.commonmark.refspec.cases;
import markdowikitext.commonmark.refspec.RefSpecCase;
public class <API key> extends RefSpecCase {
public <API key>() {
super(createInput(), createOutput());
}
public static String createInput() {
StringBuilder sb = new StringBuilder();
sb.append("and *foo bar *");
return sb.toString();
}
public static String createOutput() {
StringBuilder sb = new StringBuilder();
sb.append("<p>and *foo bar *</p>");
return sb.toString();
}
} |
package org.eclipse.titan.designer.core.makefile;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.filesystem.EFS;
import org.eclipse.core.filesystem.IFileInfo;
import org.eclipse.core.filesystem.IFileStore;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IResourceVisitor;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.Path;
import org.eclipse.titan.common.logging.ErrorReporter;
import org.eclipse.titan.common.utils.ResourceUtils;
import org.eclipse.titan.designer.commonFilters.<API key>;
import org.eclipse.titan.designer.core.ProjectBasedBuilder;
import org.eclipse.titan.designer.properties.data.<API key>;
import org.eclipse.titan.designer.properties.data.<API key>;
/**
* This visitor while analyzing a project also fills the data structures
* needed by the Makefile generator.
* <p/>
* If the project being processed is not the one for which we generate
* the Makefile, the working directory of the project is treated as a
* central storage.
*
* @author Szabolcs Beres
*/
public final class <API key> implements IResourceVisitor {
private final <API key> makefileGenerator;
/**
* The project being processed.
*/
private final IProject projectVisited;
private final IContainer[] workingDirectories;
private final String <API key>;
private final List<URI> centralStorages = new ArrayList<URI>();
private <API key> helper;
public <API key>(final <API key> makefileGenerator, final IProject project) {
this.makefileGenerator = makefileGenerator;
this.projectVisited = project;
helper = new <API key>();
workingDirectories = ProjectBasedBuilder.<API key>(project).<API key>(false);
IPath path = ProjectBasedBuilder.<API key>(project).<API key>(false);
if (path != null) {
<API key> = path.toOSString();
} else {
<API key> = null;
}
if (workingDirectories.length > 0) {
this.makefileGenerator.addBaseDirectory(workingDirectories[0].getLocation());
} else if (path != null) {
this.makefileGenerator.addBaseDirectory(path.toOSString());
}
this.makefileGenerator.<API key>(
this.makefileGenerator.<API key>()
&& <API key>.useSymbolicLinks(project) );
}
@Override
public boolean visit(final IResource resource) throws CoreException {
if (!resource.isAccessible()) {
return false;
}
URI resourceURI = resource.getLocationURI();
// Not having a location in the local file system is an
// error, but should only be reported if the resource is
// not excluded from build.
String resourceName = new Path(resourceURI.getPath()).lastSegment();
if (resourceName.startsWith(".")) {
return false;
}
try {
URI resolved = resource.getWorkspace().<API key>().resolveURI(resource.getLocationURI());
IFileStore store = EFS.getStore(resolved);
IFileInfo fileInfo = store.fetchInfo();
if (!fileInfo.exists()) {
return false;
}
} catch (CoreException e) {
ErrorReporter.<API key>(e);
}
switch (resource.getType()) {
case IResource.FILE:
if (<API key>.isDirectlyExcluded((IFile) resource) || helper.isExcludedByRegexp(resourceName)) {
return false;
}
String folder = projectVisited == makefileGenerator.getProject() ? null : <API key>;
for (URI centralStorage : getCentralStorages()) {
if (resourceURI.getHost() == centralStorage.getHost() && resourceURI.getPath().startsWith(centralStorage.getPath())) {
folder = centralStorage.getPath();
for (BaseDirectoryStruct dir : makefileGenerator.getBaseDirectories()) {
if (dir.getDirectory() != null && dir.getDirectory().isPrefixOf(resource.getFullPath())) {
dir.setHasModules(true);
break;
}
}
break;
}
}
if (resource.getLocation() == null && folder == null) {
folder = <API key>;
}
IFile file = (IFile) resource;
String extension = file.getFileExtension();
if ("ttcn3".equals(extension) || "ttcn".equals(extension)) {
makefileGenerator.addTTCN3Module(file, folder);
} else if ("asn".equals(extension) || "asn1".equals(extension)) {
makefileGenerator.addASN1Module(file, folder);
} else if ("ttcnpp".equals(extension)) {
makefileGenerator.<API key>(file, folder);
} else if ("ttcnin".equals(extension)) {
makefileGenerator.addIncludeModule(file, folder);
} else if ("c".equals(extension) || "cc".equals(extension)) {
makefileGenerator.addUserSourceFile(file, folder);
} else if ("h".equals(extension) || "hh".equals(extension)) {
makefileGenerator.addUserHeaderFile(file, folder);
} else {
makefileGenerator.addOtherFiles(file, folder);
}
return false;
case IResource.FOLDER:
for (IContainer workingDirectory : workingDirectories) {
if (workingDirectory.equals(resource)) {
if (projectVisited != makefileGenerator.getProject()) {
makefileGenerator.addBaseDirectory(resource.getLocation());
}
return false;
}
}
if (<API key>.isDirectlyExcluded((IFolder) resource)
|| helper.isExcludedByRegexp(resourceName)) {
return false;
}
if (ResourceUtils.<API key>(
resource, <API key>.QUALIFIER, <API key>.<API key>)) {
makefileGenerator.addBaseDirectory(resource.getLocation());
getCentralStorages().add(resourceURI);
}
break;
default:
}
return true;
}
/**
* The list of central storages already encountered Needed to
* identify files, which are in central storages.
*/
public List<URI> getCentralStorages() {
return centralStorages;
}
} |
// Autogenerated AST node
package org.python.pydev.parser.jython.ast;
import org.python.pydev.parser.jython.SimpleNode;
import java.util.Arrays;
public final class GeneratorExp extends exprType {
public exprType elt;
public comprehensionType[] generators;
public GeneratorExp(exprType elt, comprehensionType[] generators) {
this.elt = elt;
this.generators = generators;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((elt == null) ? 0 : elt.hashCode());
result = prime * result + Arrays.hashCode(generators);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
GeneratorExp other = (GeneratorExp) obj;
if (elt == null) {
if (other.elt != null)
return false;
} else if (!elt.equals(other.elt))
return false;
if (!Arrays.equals(generators, other.generators))
return false;
return true;
}
@Override
public GeneratorExp createCopy() {
return createCopy(true);
}
@Override
public GeneratorExp createCopy(boolean copyComments) {
comprehensionType[] new0;
if (this.generators != null) {
new0 = new comprehensionType[this.generators.length];
for (int i = 0; i < this.generators.length; i++) {
new0[i] = (comprehensionType) (this.generators[i] != null ? this.generators[i].createCopy(copyComments)
: null);
}
} else {
new0 = this.generators;
}
GeneratorExp temp = new GeneratorExp(elt != null ? (exprType) elt.createCopy(copyComments) : null,
new0);
temp.beginLine = this.beginLine;
temp.beginColumn = this.beginColumn;
if (this.specialsBefore != null && copyComments) {
for (Object o : this.specialsBefore) {
if (o instanceof commentType) {
commentType commentType = (commentType) o;
temp.getSpecialsBefore().add(commentType.createCopy(copyComments));
}
}
}
if (this.specialsAfter != null && copyComments) {
for (Object o : this.specialsAfter) {
if (o instanceof commentType) {
commentType commentType = (commentType) o;
temp.getSpecialsAfter().add(commentType.createCopy(copyComments));
}
}
}
return temp;
}
@Override
public String toString() {
StringBuffer sb = new StringBuffer("GeneratorExp[");
sb.append("elt=");
sb.append(dumpThis(this.elt));
sb.append(", ");
sb.append("generators=");
sb.append(dumpThis(this.generators));
sb.append("]");
return sb.toString();
}
@Override
public Object accept(VisitorIF visitor) throws Exception {
return visitor.visitGeneratorExp(this);
}
@Override
public void traverse(VisitorIF visitor) throws Exception {
if (elt != null) {
elt.accept(visitor);
}
if (generators != null) {
for (int i = 0; i < generators.length; i++) {
if (generators[i] != null) {
generators[i].accept(visitor);
}
}
}
}
} |
package de.braintags.netrelay.controller.authentication;
import java.util.Properties;
import de.braintags.netrelay.controller.AbstractController;
import de.braintags.netrelay.controller.authentication.authprovider.CustomAuthProvider;
import de.braintags.vertx.auth.datastore.IDatastoreAuth;
import de.braintags.vertx.jomnigate.IDataStore;
import de.braintags.vertx.jomnigate.mapping.IMapper;
import de.braintags.vertx.jomnigate.mapping.IProperty;
import de.braintags.vertx.jomnigate.mongo.MongoDataStore;
import de.braintags.vertx.util.exception.InitException;
import io.vertx.core.AsyncResult;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.auth.AuthProvider;
import io.vertx.ext.auth.User;
import io.vertx.ext.auth.mongo.HashSaltStyle;
import io.vertx.ext.auth.mongo.MongoAuth;
import io.vertx.ext.mongo.MongoClient;
/**
* An abstract implementation of IController, which initializes an {@link AuthProvider} to be used to perform
* authentication and authorization in extending controllers
*
* Config-Parameter:<br/>
* possible parameters, which are read from the configuration
* <ul>
* <li>{@value #AUTH_PROVIDER_PROP}</li>
* <ul>
* <li>for {@link MongoAuth}, specific parameters for MongoAuth can be added, like
* <ul>
* <li>{@link MongoAuth#<API key>}</li>
* <li>{@link MongoAuth#<API key>}</li>
* <li>{@link MongoAuth#<API key>}</li>
* <li>{@link MongoAuth#PROPERTY_ROLE_FIELD}</li>
* </ul>
* </li>
* <li>for {@value #<API key>}, the parameter {@value #<API key>} must be filled with the
* class of the auth provider</li>
* </ul>
* <li>{@link #USERNAME_FIELD}</li>
* <li>{@link #PASSWORD_FIELD}</li>
* </ul>
* <br>
*
* Request-Parameter:<br/>
* possible parameters, which are read from a request
* <UL>
* <LI>none
* </UL>
* <br/>
*
* Result-Parameter:<br/>
* possible paramters, which will be placed into the context
* <UL>
* <LI>none
* </UL>
* <br/>
*
*
* @author Michael Remme
*
*/
public abstract class Abstract<API key> extends AbstractController {
private static final io.vertx.core.logging.Logger LOGGER = io.vertx.core.logging.LoggerFactory
.getLogger(Abstract<API key>.class);
/**
* Used as possible value for property {@link #AUTH_PROVIDER_PROP} and references to an authentication provider
* connected to a mongo db
*/
public static final String AUTH_PROVIDER_MONGO = "MongoAuth";
/**
* Used as possible value for property {@link #AUTH_PROVIDER_PROP} and references to an authentication provider
* connected to the internal IDatastoreContainer
*/
public static final String <API key> = "<API key>;
/**
* Used if a custom {@link AuthProvider} should be used
*/
public static final String <API key> = "CustomAuthProvider";
/**
* Property for the class name that implements CustomAuthProvider if the configured provider is
* {@link #<API key>}
*/
public static final String <API key> = "<API key>";
/**
* The name of the key, which is used, to store the name of the mapper in the {@link User#principal()}
*/
public static final String <API key> = "mapper";
/**
* Defines the name of the {@link AuthProvider} to be used. Currently {@link #AUTH_PROVIDER_MONGO} is supported
*/
public static final String AUTH_PROVIDER_PROP = "authProvider";
/**
* Defines the name of the parameter where the username is stored in a login request
*/
public static final String USERNAME_FIELD = "usernameField";
/**
* Defines the name of the parameter where the password is stored in a login request
*/
public static final String PASSWORD_FIELD = "passwordField";
private static AuthProvider authProvider;
/*
* (non-Javadoc)
*
* @see de.braintags.netrelay.controller.AbstractController#initProperties(java.util.Properties)
*/
@Override
public void initProperties(Properties properties) {
this.authProvider = createAuthProvider(properties);
}
/**
* Get the initialized instance of {@link AuthProvider}
*
* @return the {@link AuthProvider}
*/
protected AuthProvider getAuthProvider() {
return authProvider;
}
protected AuthProvider createAuthProvider(Properties properties) {
String tmpAuthProvider = readProperty(AUTH_PROVIDER_PROP, <API key>, false);
if (tmpAuthProvider.equals(AUTH_PROVIDER_MONGO)) {
String mapper = readProperty(MongoAuth.<API key>, null, true);
return new AuthProviderProxy(<API key>(mapper), mapper);
} else if (tmpAuthProvider.equals(<API key>)) {
String mapper = readProperty(MongoAuth.<API key>, null, true);
return new AuthProviderProxy(<API key>(mapper), mapper);
} else if (tmpAuthProvider.equals(<API key>)) {
String className = readProperty(<API key>, null, true);
try {
CustomAuthProvider provider = (CustomAuthProvider) Class.forName(className).newInstance();
provider.init(properties, getNetRelay());
return provider;
} catch (<API key> | <API key> | <API key> e) {
throw new InitException("Could not create custom auth provider " + className, e);
}
} else {
throw new <API key>("unsupported authprovider: " + tmpAuthProvider);
}
}
private AuthProvider <API key>(String mapper) {
Class mapperClass = getNetRelay().getSettings().<API key>().getMapperClass(mapper);
if (mapperClass == null) {
throw new InitException("Could not find defined mapper class for mapper '" + mapper + "'");
}
JsonObject config = new JsonObject();
config.put(IDatastoreAuth.<API key>, mapperClass.getName());
return IDatastoreAuth.create(getNetRelay().getDatastore(), config);
}
/**
* Init the Authentication Service
*/
private AuthProvider <API key>(String mapper) {
IDataStore store = getNetRelay().getDatastore();
if (!(store instanceof MongoDataStore)) {
throw new <API key>("MongoAuthProvider expects a MongoDataStore");
}
JsonObject config = new JsonObject();
String saltStyle = readProperty(MongoAuth.PROPERTY_SALT_STYLE, HashSaltStyle.NO_SALT.toString(), false);
config.put(MongoAuth.PROPERTY_SALT_STYLE, HashSaltStyle.valueOf(saltStyle));
MongoAuth auth = MongoAuth.create((MongoClient) ((MongoDataStore) store).getClient(), config);
String passwordFieldName = readProperty(PASSWORD_FIELD, null, true);
Class mapperClass = getNetRelay().getSettings().<API key>().getMapperClass(mapper);
if (mapperClass == null) {
throw new InitException("Could not find mapper with name " + mapper);
}
IMapper mapperDef = getNetRelay().getDatastore().getMapperFactory().getMapper(mapperClass);
IProperty pwField = mapperDef.getField(passwordFieldName);
if (pwField.getEncoder() != null) {
throw new InitException("MongoAuth does not support the annotation Encoder, please use DatastoreAuth instead");
}
auth.setPasswordField(passwordFieldName);
auth.setUsernameField(readProperty(USERNAME_FIELD, null, true));
auth.setCollectionName(mapper);
String roleField = readProperty(MongoAuth.PROPERTY_ROLE_FIELD, null, false);
if (roleField != null) {
auth.setRoleField(roleField);
}
String saltField = readProperty(MongoAuth.PROPERTY_SALT_FIELD, null, false);
if (saltField != null) {
auth.setSaltField(saltField);
}
String authCredentialField = readProperty(MongoAuth.<API key>, null, false);
if (authCredentialField != null) {
auth.<API key>(authCredentialField);
}
String <API key> = readProperty(MongoAuth.<API key>, null, false);
if (<API key> != null) {
auth.<API key>(<API key>);
}
return auth;
}
class AuthProviderProxy implements AuthProvider {
AuthProvider prov;
String mapper;
AuthProviderProxy(AuthProvider prov, String mapper) {
this.prov = prov;
this.mapper = mapper;
}
/*
* (non-Javadoc)
*
* @see io.vertx.ext.auth.AuthProvider#authenticate(io.vertx.core.json.JsonObject, io.vertx.core.Handler)
*/
@Override
public void authenticate(JsonObject arg0, Handler<AsyncResult<User>> handler) {
prov.authenticate(arg0, result -> {
if (result.failed()) {
LOGGER.info("Authentication failed: " + result.cause());
handler.handle(result);
} else {
User user = result.result();
user.principal().put(<API key>, mapper);
handler.handle(Future.succeededFuture(user));
}
});
}
/**
* Get the internal instance of {@link AuthProvider} to access specific configuration infos
*
* @return the internal provider
*/
public AuthProvider getProvider() {
return prov;
}
}
} |
package org.eclipse.persistence.testing.tests.jpa.cacheable;
import junit.framework.*;
import javax.persistence.EntityManager;
import org.eclipse.persistence.sessions.server.ServerSession;
import org.eclipse.persistence.testing.framework.junit.JUnitTestCase;
import org.eclipse.persistence.testing.models.jpa.cacheable.<API key>;
/*
* The test is testing against "MulitPU-4" persistence unit which has <shared-cache-mode> to be DISABLE_SELECTIVE
*/
public class <API key> extends <API key> {
public <API key>() {
super();
}
public <API key>(String name) {
super(name);
setPuName("MulitPU-4");
}
/**
* Convenience method.
*/
public void clearDSCache() {
clearCache("MulitPU-4");
}
/**
* Convenience method.
*/
public void closeEM(EntityManager em) {
if (em.isOpen()) {
closeEntityManager(em);
}
}
/**
* Convenience method.
*/
public EntityManager <API key>() {
return createEntityManager("MulitPU-4");
}
/**
* Convenience method.
*/
public ServerSession getDSServerSession() {
return getPUServerSession("MulitPU-4");
}
/**
* Convenience method.
*/
@Override
public ServerSession getPUServerSession(String puName) {
return JUnitTestCase.getServerSession("MulitPU-4");
}
public static Test suite() {
TestSuite suite = new TestSuite();
suite.setName("<API key>");
if (! JUnitTestCase.isJPA10()) {
suite.addTest(new <API key>("testSetup"));
suite.addTest(new <API key>("<API key>"));
// Test cache retrieve mode of BYPASS and USE through the EM.
suite.addTest(new <API key>("testCreateEntities"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
// Test various usage scenarios ..
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("testReadOnlyTree"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
// Bug 340074
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
suite.addTest(new <API key>("<API key>"));
}
return suite;
}
/**
* The setup is done as a test, both to record its failure, and to allow execution in the server.
*/
public void testSetup() {
new <API key>().replaceTables(JUnitTestCase.getServerSession("MulitPU-4"));
clearDSCache();
}
} |
package bento.language.bentocomp.flowcontrol;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Boolean Expression</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link bento.language.bentocomp.flowcontrol.BooleanExpression#getLeft <em>Left</em>}</li>
* <li>{@link bento.language.bentocomp.flowcontrol.BooleanExpression#getRight <em>Right</em>}</li>
* <li>{@link bento.language.bentocomp.flowcontrol.BooleanExpression#getOperator <em>Operator</em>}</li>
* </ul>
*
* @see bento.language.bentocomp.flowcontrol.FlowcontrolPackage#<API key>()
* @model
* @generated
*/
public interface BooleanExpression extends Expression, BooleanValue {
/**
* Returns the value of the '<em><b>Left</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Left</em>' containment reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Left</em>' containment reference.
* @see #setLeft(BooleanValue)
* @see bento.language.bentocomp.flowcontrol.FlowcontrolPackage#<API key>()
* @model containment="true" required="true"
* @generated
*/
BooleanValue getLeft();
/**
* Sets the value of the '{@link bento.language.bentocomp.flowcontrol.BooleanExpression#getLeft <em>Left</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Left</em>' containment reference.
* @see #getLeft()
* @generated
*/
void setLeft(BooleanValue value);
/**
* Returns the value of the '<em><b>Right</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Right</em>' containment reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Right</em>' containment reference.
* @see #setRight(BooleanValue)
* @see bento.language.bentocomp.flowcontrol.FlowcontrolPackage#<API key>()
* @model containment="true" required="true"
* @generated
*/
BooleanValue getRight();
/**
* Sets the value of the '{@link bento.language.bentocomp.flowcontrol.BooleanExpression#getRight <em>Right</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Right</em>' containment reference.
* @see #getRight()
* @generated
*/
void setRight(BooleanValue value);
/**
* Returns the value of the '<em><b>Operator</b></em>' attribute.
* The literals are from the enumeration {@link bento.language.bentocomp.flowcontrol.BooleanOperator}.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Operator</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Operator</em>' attribute.
* @see bento.language.bentocomp.flowcontrol.BooleanOperator
* @see #setOperator(BooleanOperator)
* @see bento.language.bentocomp.flowcontrol.FlowcontrolPackage#<API key>()
* @model required="true"
* @generated
*/
BooleanOperator getOperator();
/**
* Sets the value of the '{@link bento.language.bentocomp.flowcontrol.BooleanExpression#getOperator <em>Operator</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Operator</em>' attribute.
* @see bento.language.bentocomp.flowcontrol.BooleanOperator
* @see #getOperator()
* @generated
*/
void setOperator(BooleanOperator value);
} // BooleanExpression |
CREATE TABLE [dbo].[LineSegment]
(
[Uid] UNIQUEIDENTIFIER NOT NULL PRIMARY KEY,
[Id] INT NOT NULL,
[Number] INT NULL,
[Name] NVARCHAR(50) NULL,
[Description] NVARCHAR(200) NULL,
[<API key>] UNIQUEIDENTIFIER NOT NULL,
[FromNode] UNIQUEIDENTIFIER NULL,
[ToNode] UNIQUEIDENTIFIER NULL,
[R1] FLOAT NOT NULL DEFAULT 0.0,
[R2] FLOAT NOT NULL DEFAULT 0.0,
[R3] FLOAT NOT NULL DEFAULT 0.0,
[R4] FLOAT NOT NULL DEFAULT 0.0,
[R5] FLOAT NOT NULL DEFAULT 0.0,
[R6] FLOAT NOT NULL DEFAULT 0.0,
[X1] FLOAT NOT NULL DEFAULT 0.01,
[X2] FLOAT NOT NULL DEFAULT 0.0,
[X3] FLOAT NOT NULL DEFAULT 0.01,
[X4] FLOAT NOT NULL DEFAULT 0.0,
[X5] FLOAT NOT NULL DEFAULT 0.0,
[X6] FLOAT NOT NULL DEFAULT 0.01,
[G1] FLOAT NOT NULL DEFAULT 0.0,
[G2] FLOAT NOT NULL DEFAULT 0.0,
[G3] FLOAT NOT NULL DEFAULT 0.0,
[G4] FLOAT NOT NULL DEFAULT 0.0,
[G5] FLOAT NOT NULL DEFAULT 0.0,
[G6] FLOAT NOT NULL DEFAULT 0.0,
[B1] FLOAT NOT NULL DEFAULT 0.0,
[B2] FLOAT NOT NULL DEFAULT 0.0,
[B3] FLOAT NOT NULL DEFAULT 0.0,
[B4] FLOAT NOT NULL DEFAULT 0.0,
[B5] FLOAT NOT NULL DEFAULT 0.0,
[B6] FLOAT NOT NULL DEFAULT 0.0,
[CreatedOn] TIME NOT NULL CONSTRAINT <API key> DEFAULT(GETUTCDATE()),
[CreatedBy] NVARCHAR(200) NOT NULL CONSTRAINT <API key> DEFAULT(SUSER_NAME()),
[LastEditedOn] TIME NOT NULL CONSTRAINT <API key> DEFAULT(GETUTCDATE()),
[LastEditedBy] NVARCHAR(200) NOT NULL CONSTRAINT <API key> DEFAULT(SUSER_NAME()),
CONSTRAINT [<API key>] FOREIGN KEY ([<API key>]) REFERENCES [TransmissionLine]([Uid]) ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT [<API key>] FOREIGN KEY ([FromNode]) REFERENCES [Node]([Uid]),
CONSTRAINT [<API key>] FOREIGN KEY ([ToNode]) REFERENCES [Node]([Uid]),
CONSTRAINT [CK_LineSegment_Id] CHECK (Id > 0),
CONSTRAINT [<API key>] CHECK (Number > 0)
)
GO
CREATE INDEX [IX_LineSegment_Id] ON [dbo].[LineSegment] ([Id])
GO
CREATE TRIGGER [dbo].[<API key>]
ON [dbo].[LineSegment]
FOR UPDATE
AS
BEGIN
SET NoCount ON
UPDATE [dbo].[LineSegment] SET LastEditedOn=GETUTCDATE() WHERE Uid in (SELECT Uid FROM INSERTED)
UPDATE [dbo].[LineSegment] SET LastEditedBy=SUSER_NAME() WHERE Uid in (SELECT Uid FROM INSERTED)
END |
package com.google.eclipse.mechanic.core.keybinding;
import java.io.IOException;
import java.util.Set;
import org.eclipse.core.commands.Command;
import org.eclipse.core.commands.<API key>;
import org.eclipse.core.commands.common.NotDefinedException;
import org.eclipse.jface.bindings.Binding;
import org.eclipse.jface.bindings.Scheme;
import org.eclipse.jface.bindings.keys.KeySequence;
import org.eclipse.jface.bindings.keys.ParseException;
import org.eclipse.swt.SWT;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.commands.ICommandService;
import org.eclipse.ui.keys.IBindingService;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.eclipse.mechanic.CompositeTask;
import com.google.eclipse.mechanic.<API key>;
import com.google.eclipse.mechanic.core.keybinding.KbaChangeSet.Action;
import com.google.eclipse.mechanic.plugin.core.MechanicLog;
/**
* Configures keyboard preferences for a task.
*
* @author zorzella@google.com
*/
class <API key> extends CompositeTask {
public static final String <API key> = "KBA_ENABLE_REMOVE";
static final boolean ENABLE_EXP_REM() {
return System.getProperty(<API key>, "true").equals("true");
}
private final MechanicLog log;
private final IWorkbench workbench;
private final ICommandService commandService;
private final IBindingService bindingService;
private final KeyBindingsModel model;
private final String id;
public <API key>(KeyBindingsModel model, <API key> taskRef) {
this(
MechanicLog.getDefault(),
PlatformUI.getWorkbench(),
(ICommandService) PlatformUI.getWorkbench().getService(ICommandService.class),
(IBindingService) PlatformUI.getWorkbench().getService(IBindingService.class),
model,
String.format("%s@%s", <API key>.class.getName(), taskRef.getPath()));
}
<API key>(
MechanicLog log,
IWorkbench workbench,
ICommandService commandService,
IBindingService bindingService,
KeyBindingsModel model,
String id) {
this.log = log;
this.workbench = workbench;
this.commandService = commandService;
this.bindingService = bindingService;
this.model = Preconditions.checkNotNull(model);
this.id = id;
}
public String getDescription() {
Set<String> addedBindings = <API key>(Action.ADD);
Set<String> removedBindings = <API key>(Action.REMOVE);
StringBuilder result = new StringBuilder();
if (addedBindings.size() > 0) {
result.append("Add these bindings:\n" +
"\n" +
Joiner.on("\n").join(addedBindings) + "\n\n");
}
if (removedBindings.size() > 0) {
result.append("Remove these bindings:\n" +
"\n" +
Joiner.on("\n").join(removedBindings) + "\n\n");
}
// TODO assert added or removed > 0?
return result.toString();
}
private final Function<Binding, String> <API key> = new Function<Binding, String>() {
public String apply(Binding b) {
try {
return b.getTriggerSequence().format() + " : " + b.<API key>().getName();
} catch (NotDefinedException e) {
log.logError(e);
throw new RuntimeException(e);
} catch (RuntimeException e) {
log.logError(e);
throw e;
}
}
};
private Set<String> <API key>(Action action) {
Set<String> result = Sets.newHashSet();
for(KbaChangeSet changeSet : model.<API key>(action)) {
result.addAll(Lists.newArrayList(
Iterables.transform(doEvaluate(changeSet).keyBindings.addedBindings, <API key>)));
result.addAll(Lists.newArrayList(
Iterables.transform(doEvaluate(changeSet).keyBindings.removedBindings, <API key>)));
}
return result;
}
public String getTitle() {
return "Keyboard binding fixes: " + this.model.getMetadata().getDescription();
}
public boolean evaluate() {
boolean dirty = false;
// If "dirty" is set to true, it means we made some modification that
// we still need to persist.
for(KbaChangeSet changeSet : model.<API key>()) {
dirty = dirty || doEvaluate(changeSet).keyBindings.isDirty();
}
return !dirty;
}
private static final class EvaluationResult {
private final Scheme scheme;
private final KeyBindings keyBindings;
public EvaluationResult(
final Scheme scheme,
final KeyBindings keyBindings) {
this.scheme = scheme;
this.keyBindings = keyBindings;
}
}
private EvaluationResult doEvaluate(
final KbaChangeSet changeSet) {
final KeyBindings bindings = new KeyBindings(bindingService.getBindings());
final Scheme scheme = bindingService.getScheme(changeSet.getSchemeId());
switch (changeSet.getAction()) {
case ADD:
<API key>(changeSet, bindings, scheme);
break;
case REMOVE:
<API key>(changeSet, bindings, scheme);
break;
default:
throw new <API key>();
}
return new EvaluationResult(scheme, bindings);
}
private void <API key>(final KbaChangeSet changeSet,
final KeyBindings bindings, final Scheme scheme) {
if (!ENABLE_EXP_REM()) {
return;
}
for (KbaBinding toRemove : changeSet.getBindingList()) {
Command commandToRemove;
try {
commandToRemove = commandService.getCommand(toRemove.getCid());
} catch (RuntimeException e) {
log.logError(e);
throw e;
}
KeySequence triggerSequence;
try {
triggerSequence = KeySequence.getInstance(toRemove.getKeySequence());
} catch (ParseException e) {
log.logError(e, "Invalid key sequence: %s", toRemove.getKeySequence());
throw new RuntimeException(e);
}
// Removing a system binding means one of:
// 1. if it's a user binding, remove it
// 2. if it's a system binding, create a null-command user binding doppleganger
bindings.<API key>(
scheme,
changeSet.getPlatform(),
changeSet.getContextId(),
triggerSequence,
commandToRemove,
toRemove.getParameters());
// If our remove binding is against the "null" platform, it should apply
// to all platforms. The only one that matters is the current platform
if (changeSet.getPlatform() == null) {
bindings.<API key>(
scheme,
SWT.getPlatform(),
changeSet.getContextId(),
triggerSequence,
commandToRemove,
toRemove.getParameters());
}
}
}
private void <API key>(final KbaChangeSet changeSet,
final KeyBindings bindings, final Scheme scheme) {
for (KbaBinding toAdd : changeSet.getBindingList()) {
Command commandToAdd = commandService.getCommand(toAdd.getCid());
if (!commandToAdd.isDefined()) {
log.logWarning("Command '" + toAdd.getCid() + "' does not exist. Skipping.");
continue;
}
<API key> <API key> =
<API key>.generateCommand(commandToAdd, toAdd.getParameters());
KeySequence triggerSequence;
try {
triggerSequence = KeySequence.getInstance(toAdd.getKeySequence());
} catch (ParseException e) {
log.logError(e, "Invalid key sequence: %s", toAdd.getKeySequence());
throw new RuntimeException(e);
}
bindings.addIfNotPresent(
scheme,
changeSet.getPlatform(),
changeSet.getContextId(),
triggerSequence,
<API key>);
}
}
public void run() {
for(KbaChangeSet changeSet : model.<API key>()) {
final EvaluationResult result = doEvaluate(changeSet);
// If there was any modification, persist it
if (result.keyBindings.isDirty()) {
workbench.getDisplay().syncExec(new Runnable() {
public void run() {
try {
bindingService.savePreferences(result.scheme, result.keyBindings.toArray());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
}
}
}
public String getId() {
return id;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof <API key>)) {
return false;
}
<API key> that = (<API key>)obj;
return Objects.equal(this.id, that.id);
}
@Override
public int hashCode() {
return this.id.hashCode();
}
@Override
public String toString() {
return model.toString();
}
} |
<!DOCTYPE HTML PUBLIC "-
<!-- NewPage -->
<html lang="en">
<head>
<title>Uses of Class org.apache.poi.xdgf.usermodel.section.geometry.MoveTo (POI API Documentation)</title>
<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.poi.xdgf.usermodel.section.geometry.MoveTo (POI API Documentation)";
}
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="topNav"><a name="navbar_top">
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../org/apache/poi/xdgf/usermodel/section/geometry/MoveTo.html" title="class in org.apache.poi.xdgf.usermodel.section.geometry">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/apache/poi/xdgf/usermodel/section/geometry/class-use/MoveTo.html" target="_top">Frames</a></li>
<li><a href="MoveTo.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_top">
</a></div>
<div class="header">
<h2 title="Uses of Class org.apache.poi.xdgf.usermodel.section.geometry.MoveTo" class="title">Uses of Class<br>org.apache.poi.xdgf.usermodel.section.geometry.MoveTo</h2>
</div>
<div class="classUseContainer">No usage of org.apache.poi.xdgf.usermodel.section.geometry.MoveTo</div>
<div class="bottomNav"><a name="navbar_bottom">
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="<API key>">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../../org/apache/poi/xdgf/usermodel/section/geometry/MoveTo.html" title="class in org.apache.poi.xdgf.usermodel.section.geometry">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/apache/poi/xdgf/usermodel/section/geometry/class-use/MoveTo.html" target="_top">Frames</a></li>
<li><a href="MoveTo.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_bottom">
</a></div>
<p class="legalCopy"><small>
<i>Copyright 2016 The Apache Software Foundation or
its licensors, as applicable.</i>
</small></p>
</body>
</html> |
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>bREPL Connection</title>
<!--[if lt IE 9]>
<script src="http://html5shiv.googlecode.com/svn/trunk/html5.js"></script>
<![endif]
</head>
<body>
<canvas id="canvas" width="800" height="600"></canvas>
<script src="js/reagi_game.js"></script>
</body>
</html> |
package com.intivefdv.rent;
import java.util.logging.Level;
import java.util.logging.Logger;
public class Rental{
private IRent _rental;
private IPromotion _promotion;
protected static final Logger LOGGER = Logger.getLogger(Rental.class.getName());
public Rental(IRent rental, IPromotion promo) {
this._rental = rental;
this._promotion = promo;
}
public Double rent() {
Double total = this._rental.askRent();
LOGGER.log(Level.INFO, "Rent is done!");
return total;
}
public Double promotionRent() {
Double total=this._promotion.askFamilyRent();
return total;
}
} |
package org.gemoc.bcool.transformation.bcool2qvto.helper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.TreeIterator;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.ocl.examples.xtext.base.basecs.ImportCS;
import org.eclipse.ocl.examples.xtext.completeocl.completeoclcs.<API key>;
import org.eclipse.ocl.examples.xtext.completeocl.completeoclcs.DefPropertyCS;
import org.eclipse.ocl.examples.xtext.completeocl.completeoclcs.<API key>;
import org.eclipse.xtext.resource.SaveOptions;
import org.eclipse.xtext.resource.SaveOptions.Builder;
import org.eclipse.xtext.resource.XtextResource;
import org.eclipse.xtext.resource.XtextResourceSet;
import org.gemoc.bcool.model.bcool.BCoolOperatorArg;
import org.gemoc.bcool.model.bcool.BCoolSpecification;
import org.gemoc.bcool.model.bcool.EventExpression;
import org.gemoc.bcool.model.bcool.<API key>;
import org.gemoc.gexpressions.GExpression;
import org.eclipse.ocl.ecore.IntegerLiteralExp;
import com.google.inject.Injector;
import fr.inria.aoste.timesquare.ECL.ECLDocument;
import fr.inria.aoste.timesquare.ccslkernel.model.TimeModel.BasicType.IntegerElement;
import fr.inria.aoste.timesquare.ccslkernel.model.TimeModel.BasicType.impl.IntegerImpl;
import fr.inria.aoste.timesquare.ecl.xtext.EclStandaloneSetup;
import org.gemoc.gexpressions.GAndExpression;
import org.gemoc.gexpressions.GBooleanExpression;
import org.gemoc.gexpressions.GBraceExpression;
import org.gemoc.gexpressions.GEqualityExpression;
import org.gemoc.gexpressions.GIntegerExpression;
import org.gemoc.gexpressions.<API key>;
import org.gemoc.gexpressions.GNegationExpression;
import org.gemoc.gexpressions.<API key>;
import org.gemoc.gexpressions.GStringExpression;
import org.gemoc.gexpressions.GexpressionsFactory;
//import org.gemoc.gexpressions.xtext.<API key>;
import org.eclipse.xtext.serializer.impl.Serializer;
@SuppressWarnings("restriction")
public class helperNsURI {
public String prettyPrintAlias(String e) {
return e.substring(maxPositive(0,e.lastIndexOf('/')+1), e.lastIndexOf('.'));
}
private int maxPositive(int i1, int i2) {
if (i1 < 0 && i2 <0)
return 0;
if (i1 < 0)
return i2;
if (i2 < 0)
return i1;
if (i1 < i2)
return i2;
else
return i1;
}
public ECLDocument getEclDocument(<API key> <API key>){
//init Xtext
String modelPath = <API key>.getImportURI();
EclStandaloneSetup ess= new EclStandaloneSetup();
Injector injector = ess.createInjector();
XtextResourceSet aSet = injector.getInstance(XtextResourceSet.class);
aSet.addLoadOption(XtextResource.OPTION_RESOLVE_ALL, Boolean.FALSE);
EcoreUtil.resolveAll(aSet);
EclStandaloneSetup.doSetup();
URI uri =null;
//filter URI
if (modelPath.startsWith("platform:/plugin")){
uri = URI.<API key>(modelPath.replace("platform:/plugin", ""), false);
}else
if(modelPath.startsWith("platform:/resource")){
uri = URI.<API key>(modelPath.replace("platform:/resource", ""),false);
}else{//relative path
// throw new <API key>("the path of the library must be platform based (platform:/resource or platform:/plugin)");
uri = URI.createFileURI(modelPath);
}
//load the corresponding resource
Resource eclResource = aSet.getResource(uri, true);
HashMap<Object, Object> saveOptions = new HashMap<Object, Object>();
Builder aBuilder = SaveOptions.newBuilder();
SaveOptions anOption = aBuilder.getOptions();
anOption.addTo(saveOptions);
try {
eclResource.load(saveOptions);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
ECLDocument eclDoc = (ECLDocument)eclResource.getContents().get(0);
return eclDoc;
}
public ECLDocument <API key>(String modelPath){
//init Xtext
EclStandaloneSetup ess= new EclStandaloneSetup();
Injector injector = ess.createInjector();
XtextResourceSet aSet = injector.getInstance(XtextResourceSet.class);
aSet.addLoadOption(XtextResource.OPTION_RESOLVE_ALL, Boolean.TRUE);
EcoreUtil.resolveAll(aSet);
EclStandaloneSetup.doSetup();
URI uri =null;
//filter URI
if (modelPath.startsWith("platform:/plugin")){
uri = URI.<API key>(modelPath.replace("platform:/plugin", ""), false);
}else
if(modelPath.startsWith("platform:/resource")){
uri = URI.<API key>(modelPath.replace("platform:/resource", ""),false);
}else{//relative path
// throw new <API key>("the path of the library must be platform based (platform:/resource or platform:/plugin)");
uri = URI.createFileURI(modelPath);
}
//load the corresponding resource
Resource eclResource = aSet.getResource(uri, true);
HashMap<Object, Object> saveOptions = new HashMap<Object, Object>();
Builder aBuilder = SaveOptions.newBuilder();
SaveOptions anOption = aBuilder.getOptions();
anOption.addTo(saveOptions);
try {
eclResource.load(saveOptions);
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
ECLDocument eclDoc = (ECLDocument)eclResource.getContents().get(0);
return eclDoc;
}
public String resolveAll(BCoolSpecification aBCoolSpec){
EcoreUtil.resolveAll(aBCoolSpec.eResource().getResourceSet());
return "// This is the result of the compilation of a BCool spec";
}
// getNSURI:
// return the corresponding NSURI for the ecore imported by the ecl
public String getNSURI(<API key> <API key>){
ECLDocument eclDoc = getEclDocument(<API key>);
String oclimport = eclDoc.getOwnedImport().get(0).toString();
// I get the first import that corresponds with the metamodel
oclimport = oclimport.substring(oclimport.indexOf('\'')+1, oclimport.lastIndexOf('\''));
// Depending the kind of import we found differently the NSURI
if (oclimport.endsWith(".ecore")) {
// In ECL, if the .ecore is imported as resource, it is changed by plugin
// WARN: the BCOoL should not be in the same workbench that the languages
if (oclimport.startsWith("platform:/resource")) {
oclimport = oclimport.replace("resource","plugin");
}
URI metaURI=URI.createURI(oclimport,false);
ResourceSet resourceSet = new ResourceSetImpl();
Resource resource1 = resourceSet.getResource(metaURI, true);
EPackage wdwPackage = (EPackage)resource1.getContents().get(0);
return wdwPackage.getNsURI();
// It is a NSURI
}else if (oclimport.startsWith("http:/")) {
return oclimport;
// Not recognized
}else {
return "bad metamodel in ecl";
}
}
// getNSURIIndex (i):
// return the corresponding NSURI for the i-esimo ecore imported by the ecl.
public String getNSURIIndex(<API key> <API key>, Integer i){
ECLDocument eclDoc = getEclDocument(<API key>);
// This gets directly the imported statement
String oclimport = eclDoc.getOwnedImport().get(i).getPathName().toString();
// some cleaning
oclimport = oclimport.substring(oclimport.indexOf('\'')+1, oclimport.lastIndexOf('\''));
// Depending on the kind of import we found differently the NSURI
if (oclimport.endsWith(".ecore")) {
// In ECL, if the .ecore is imported as resource, it is changed by plugin
// WARN: the BCOoL should not be in the same workbench that the languages
if (oclimport.startsWith("platform:/resource")) {
oclimport = oclimport.replace("resource","plugin");
}
URI metaURI=URI.createURI(oclimport,false);
ResourceSet resourceSet = new ResourceSetImpl();
Resource resource1 = resourceSet.getResource(metaURI, true);
EPackage wdwPackage = (EPackage)resource1.getContents().get(0);
return wdwPackage.getNsURI();
// It is a NSURI
}else if (oclimport.startsWith("http:/")) {
return oclimport;
// Not recognized
}else {
return "bad metamodel in ecl";
}
}
// public String getpackageIndex(ECLDocument eclDoc, String objectName){
// EList<<API key>> allpackages = eclDoc.getPackages();
// for(int i=0; i< allpackages.size(); i++){
// <API key> pdecl = allpackages.get(i);
// org.eclipse.ocl.examples.pivot.Package p = pdecl.getPackage();
// TreeIterator<EObject> it = p.eAllContents();
// while( it.hasNext()){
// EObject eo = it.next();
// String eoName = "";
// try {
// if(eo.getClass().getMethod("getName") != null){
// eoName = (String) eo.getClass().getMethod("getName").invoke(eo, new Object[]{});
// } catch (Exception e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// if ((eoName != null) && (eoName.compareTo(objectName)==0)){
// return (new Integer(i+1)).toString();
// return (new Integer(1)).toString(); //1 is returned by default since we do not necessarly know the name of the rootElement and it is PAckage by default (for UML)
public int getpackageIndex(ECLDocument eclDoc, <API key> contextDecl){
// EList<<API key>> allpackages = eclDoc.getPackages();
// System.out.println(contextDecl.getPathName().getElement());
// System.out.println(contextDecl.getPathName().getElement().eResource());
// System.out.println(contextDecl.getPathName().getElement().eResource().getURI());
String packageName = contextDecl.getPathName().getElement().eResource().getURI().toString();
EList<ImportCS> allImports = eclDoc.getOwnedImport();
for(int i = 0; i < allImports.size(); i++){
if (allImports.get(i).getPathName().getElement().eResource().getURI().toString().compareTo(packageName) == 0){
return i+1;
}
}
return 1;
// for(int i=0; i< allpackages.size(); i++){
// <API key> pdecl = allpackages.get(i);
// if ((packageName != null) && (packageName.compareTo(pdecl.getPackage().getName())==0)){
// return (new Integer(i+1)).toString();
// return (new Integer(1)).toString(); //1 is returned by default since we do not necessarly know the name of the rootElement and it is PAckage by default (for UML)
}
public String <API key> (<API key> <API key>){
String uri = getNSURI (<API key>);
return "";
}
public String getModelPath(EObject d){
String res ="";
res = d.eResource().getURI().toFileString();
return res;
}
// given a <API key> returns the serialization
public String GOperandtoString (<API key> operand){
String m = "";
<API key> test1 = (<API key>) operand;
while (true)
{
if (test1.<API key>() instanceof EAttribute) {
EAttribute nameatt = (EAttribute) test1.<API key>();
m = "." + nameatt.getName() + m;
} else if (test1.<API key>() instanceof EReference) {
EReference nameatt = (EReference) test1.<API key>();
m = "." + nameatt.getName() + m;
}
if (test1.getBody() instanceof <API key>) break;
test1 = (<API key>) test1.getBody();
}
<API key> test2 = (<API key>) test1.getBody();
BCoolOperatorArg nameatt = (BCoolOperatorArg) test2.<API key>();
m = nameatt.getName() + m;
return m;
}
// GEqualitytoString:
// Convert a GEquality expression to String. The operand can be either a <API key> or String
// Other operands may cause an exception, e.g., Integer
public String GEqualitytoString (GEqualityExpression exp)
{
String serial = "";
String leftoperstr = "";
String rightoperstr = "";
if (exp.getLeftOperand() instanceof <API key>) {
<API key> leftoper = (<API key>) exp.getLeftOperand();
leftoperstr = GOperandtoString (leftoper);
} else if (exp.getLeftOperand() instanceof GStringExpression){
GStringExpression leftoper = (GStringExpression) exp.getLeftOperand();
leftoperstr = '"'+ leftoper.getValue() + '"';
} else if (exp.getLeftOperand() instanceof GBooleanExpression) {
GBooleanExpression leftoper = (GBooleanExpression) exp.getLeftOperand();
if (leftoper.isValue()) {
leftoperstr = "true";
} else {leftoperstr = "false";}
}
if (exp.getRightOperand() instanceof <API key>) {
<API key> rightoper = (<API key>) exp.getRightOperand();
rightoperstr = GOperandtoString (rightoper);
//serial = serial + "=" +GOperandtoString (rightoper);
} else if (exp.getRightOperand() instanceof GStringExpression){
GStringExpression rightoper = (GStringExpression) exp.getRightOperand();
//serial = serial + "=" + '"' + rightoper.getValue()+ '"';
rightoperstr = '"' + rightoper.getValue()+ '"';
} else if (exp.getRightOperand() instanceof GBooleanExpression) {
GBooleanExpression rightoper = (GBooleanExpression) exp.getRightOperand();
if (rightoper.isValue()) {
rightoperstr = "true";
} else {rightoperstr = "false";}
}
// Equal and Not Equal are the same GExpression
if (exp.getOperator().getName().contains("NOTEQUAL")) {
serial = leftoperstr + "<>" + rightoperstr;
}else if (exp.getOperator().getName().contains("EQUAL")){
serial = leftoperstr + "=" + rightoperstr;
}
return serial;
}
public String <API key> (GBraceExpression exp)
{
String serial = "";
if (exp.getInnerExpression() instanceof GEqualityExpression ){
GEqualityExpression gexp = (GEqualityExpression) exp.getInnerExpression();
serial = "("+ GEqualitytoString (gexp)+ ")";
} else if (exp.getInnerExpression() instanceof GAndExpression) {
GAndExpression gexp = (GAndExpression) exp.getInnerExpression();
serial = "(" + GAndtoString (gexp) + ")";
} else if (exp.getInnerExpression() instanceof GBraceExpression) {
GBraceExpression gexp = (GBraceExpression) exp.getInnerExpression();
serial = "(" + <API key> (gexp) + ")";
} else if (exp.getInnerExpression() instanceof <API key>){
<API key> gexp = (<API key>) exp.getInnerExpression();
serial = "(" + GOperandtoString(gexp) + ")";
}else if (exp.getInnerExpression() instanceof GNegationExpression) {
GNegationExpression gexp = (GNegationExpression) exp.getInnerExpression();
serial = "(" + <API key>(gexp) + ")";
}
return serial;
}
public String <API key>(GNegationExpression exp) {
String serial = "";
if (exp.getOperand() instanceof GBraceExpression){
GBraceExpression gexp = (GBraceExpression) exp.getOperand();
serial = "not" + <API key> (gexp);
}else if (exp.getOperand() instanceof GNegationExpression ) {
GNegationExpression gexp2 = (GNegationExpression) exp.getOperand();
serial = <API key> (gexp2);
}
return serial;
}
public String GAndtoString (GAndExpression exp)
{
String serial ="";
// first the left operator
if (exp.getLeftOperand() instanceof GBraceExpression ) {
GBraceExpression gexp = (GBraceExpression) exp.getLeftOperand();
serial = <API key> (gexp);
} else if (exp.getLeftOperand() instanceof <API key> ){
<API key> gexp = (<API key>) exp.getLeftOperand();
serial = GOperandtoString (gexp);
} else if (exp.getLeftOperand() instanceof GAndExpression){
GAndExpression gexp = (GAndExpression) exp.getLeftOperand();
serial = GAndtoString (gexp);
} else if (exp.getLeftOperand() instanceof GNegationExpression) {
GNegationExpression gexp = (GNegationExpression) exp.getLeftOperand();
serial = <API key> (gexp);
//GBraceExpression gexp2 = (GBraceExpression) gexp.getOperand();
//serial = "not" + <API key> (gexp2);
}
serial = serial +" and ";
// next, the second operator
if (exp.getRightOperand() instanceof GBraceExpression ) {
GBraceExpression gexp = (GBraceExpression) exp.getRightOperand();
serial = serial + <API key> (gexp);
} else if (exp.getRightOperand() instanceof <API key> ){
<API key> gexp = (<API key>) exp.getLeftOperand();
serial = serial + GOperandtoString (gexp);
} else if (exp.getRightOperand() instanceof GAndExpression ){
GAndExpression gexp = (GAndExpression) exp.getRightOperand();
serial = serial + GAndtoString (gexp);
} else if (exp.getRightOperand() instanceof GNegationExpression) {
GNegationExpression gexp = (GNegationExpression) exp.getRightOperand();
serial= serial + <API key> (gexp);
//GBraceExpression gexp2 = (GBraceExpression) gexp.getOperand();
//serial = "not" + <API key> (gexp2);
}
return serial;
}
public String GexpressiontoString (GExpression exp ){
String serial = "";
try {
if (exp instanceof GEqualityExpression) {
GEqualityExpression gexp = (GEqualityExpression) exp;
serial = GEqualitytoString (gexp);
} else if (exp instanceof GAndExpression) {
GAndExpression gexp = (GAndExpression) exp;
serial = GAndtoString (gexp);
}else if (exp instanceof GBraceExpression){
GBraceExpression gexp = (GBraceExpression) exp;
serial = <API key> (gexp);
}else if (exp instanceof <API key>){
<API key> gexp = (<API key>) exp;
serial = GOperandtoString(gexp);
}else if (exp instanceof GNegationExpression){
GNegationExpression gexp = (GNegationExpression) exp;
//GBraceExpression gexp2 = (GBraceExpression) gexp.getOperand();
serial = <API key> (gexp);
} else if (exp instanceof GStringExpression){
GStringExpression gexp =(GStringExpression) exp;
serial = gexp.getValue();
}
} catch (Exception ex) { // fall back:
ex.printStackTrace();
serial = "Bad GExpression serialization!";
}
return serial;
}
public String DSEtoString (EObject dse ){
if (dse instanceof DefPropertyCS) {
DefPropertyCS dsetmp = (DefPropertyCS) dse ;
return dsetmp.getName();
}
if (dse instanceof EventExpression) {
EventExpression dsetmp = (EventExpression) dse ;
return dsetmp.getName();
}
if (dse instanceof IntegerImpl) {
IntegerImpl dsetmp = (IntegerImpl) dse ;
return dsetmp.getName();
}
if (dse instanceof IntegerElement) {
IntegerElement dsetmp = (IntegerElement) dse ;
return dsetmp.getName();
}
if (dse instanceof BCoolOperatorArg) {
BCoolOperatorArg dsetmp= (BCoolOperatorArg) dse;
return dsetmp.getName();
}
return "naddda";
}
} |
package org.eclipse.egit.core.test.models;
import static org.junit.Assert.assertEquals;
import java.io.BufferedReader;
import java.io.<API key>;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.mapping.ModelProvider;
import org.eclipse.core.resources.mapping.ResourceMapping;
import org.eclipse.core.resources.mapping.<API key>;
import org.eclipse.core.resources.mapping.ResourceTraversal;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Platform;
import org.eclipse.core.runtime.content.IContentType;
import org.eclipse.core.runtime.jobs.Job;
import org.eclipse.egit.core.AdapterUtils;
import org.eclipse.egit.core.internal.Utils;
import org.eclipse.egit.core.op.MergeOperation;
import org.eclipse.egit.core.synchronize.<API key>;
import org.eclipse.egit.core.synchronize.<API key>;
import org.eclipse.egit.core.synchronize.<API key>;
import org.eclipse.egit.core.synchronize.dto.GitSynchronizeData;
import org.eclipse.egit.core.synchronize.dto.<API key>;
import org.eclipse.egit.core.test.GitTestCase;
import org.eclipse.egit.core.test.TestRepository;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.Status;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.team.core.mapping.IMergeContext;
import org.eclipse.team.core.mapping.<API key>;
import org.eclipse.team.core.subscribers.<API key>;
import org.junit.Before;
/**
* Provides shared utility methods for unit tests working on logical models. The
* model provider used for tests, {@link SampleModelProvider}, links all
* "*.sample" files from a common directory into a single logical model.
*/
public abstract class ModelTestCase extends GitTestCase {
protected static final String <API key> = SampleModelProvider.<API key>;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
IContentType textType = Platform.<API key>()
.getContentType("org.eclipse.core.runtime.text");
textType.addFileSpec(<API key>,
IContentType.FILE_EXTENSION_SPEC);
}
protected RevCommit <API key>(TestRepository testRepository,
IFile targetFile, String newContents, String commitMessage)
throws Exception {
targetFile.setContents(
new <API key>(newContents.getBytes("UTF-8")),
IResource.FORCE, new NullProgressMonitor());
testRepository.addToIndex(targetFile);
return testRepository.commit(commitMessage);
}
protected void assertContentEquals(IFile file, String expectedContents)
throws Exception {
BufferedReader reader = new BufferedReader(new InputStreamReader(
file.getContents(), file.getCharset()));
StringBuilder contentsBuilder = new StringBuilder();
String line = reader.readLine();
while (line != null) {
contentsBuilder.append(line);
contentsBuilder.append('\n');
line = reader.readLine();
}
reader.close();
assertEquals(expectedContents, contentsBuilder.toString());
}
protected void merge(Repository repository, String refName)
throws CoreException {
new MergeOperation(repository, refName).execute(null);
}
protected Status status(Repository repository) throws Exception {
try (Git git = new Git(repository)) {
return git.status().call();
}
}
protected <API key> createMerger() throws CoreException {
final ModelProvider provider = ModelProvider
.<API key>(
SampleModelProvider.SAMPLE_PROVIDER_ID)
.getModelProvider();
return Utils.getAdapter(provider, <API key>.class);
}
protected IMergeContext prepareContext(Repository repository,
IFile workspaceFile, String srcRev, String dstRev) throws Exception {
GitSynchronizeData gsd = new GitSynchronizeData(repository, srcRev,
dstRev, true, Collections.<IResource> singleton(workspaceFile));
<API key> gsds = new <API key>(gsd);
<API key> subscriber = new <API key>(
gsds);
subscriber.init(new NullProgressMonitor());
ResourceMapping mapping = AdapterUtils.adapt(workspaceFile,
ResourceMapping.class);
<API key> manager = new <API key>(
subscriber.getName(), new ResourceMapping[] { mapping, },
subscriber, true);
manager.initialize(new NullProgressMonitor());
<API key> mergeContext = new <API key>(
subscriber, manager, gsds);
// Wait for asynchronous update of the diff tree to end
Job.getJobManager().join(mergeContext, new NullProgressMonitor());
return mergeContext;
}
protected IMergeContext prepareModelContext(Repository repository,
IFile workspaceFile, String srcRev, String dstRev) throws Exception {
Set<IResource> includedResources = new HashSet<IResource>(
Arrays.asList(workspaceFile));
Set<IResource> newResources = new HashSet<IResource>(includedResources);
Set<ResourceMapping> allMappings = new HashSet<ResourceMapping>();
<API key> mappingContext = <API key>.LOCAL_CONTEXT;
ModelProvider provider = ModelProvider.<API key>(
SampleModelProvider.SAMPLE_PROVIDER_ID).getModelProvider();
do {
Set<IResource> copy = newResources;
newResources = new HashSet<IResource>();
for (IResource resource : copy) {
ResourceMapping[] mappings = provider.getMappings(resource,
mappingContext, new NullProgressMonitor());
allMappings.addAll(Arrays.asList(mappings));
newResources.addAll(collectResources(mappings, mappingContext));
}
} while (includedResources.addAll(newResources));
ResourceMapping[] mappings = allMappings
.toArray(new ResourceMapping[allMappings.size()]);
GitSynchronizeData gsd = new GitSynchronizeData(repository, srcRev,
dstRev, true, includedResources);
<API key> gsds = new <API key>(gsd);
<API key> subscriber = new <API key>(
gsds);
subscriber.init(new NullProgressMonitor());
<API key> <API key> = new <API key>(
subscriber, gsds);
<API key> manager = new <API key>(
subscriber.getName(), mappings, subscriber,
<API key>, true);
manager.initialize(new NullProgressMonitor());
<API key> mergeContext = new <API key>(
subscriber, manager, gsds);
// Wait for asynchronous update of the diff tree to end
Job.getJobManager().join(mergeContext, new NullProgressMonitor());
return mergeContext;
}
private static Set<IResource> collectResources(ResourceMapping[] mappings,
<API key> mappingContext) throws Exception {
final Set<IResource> resources = new HashSet<IResource>();
for (ResourceMapping mapping : mappings) {
ResourceTraversal[] traversals = mapping.getTraversals(
mappingContext, new NullProgressMonitor());
for (ResourceTraversal traversal : traversals)
resources.addAll(Arrays.asList(traversal.getResources()));
}
return resources;
}
} |
package org.eclipse.titan.designer.editors.ttcn3editor.actions;
import org.eclipse.jface.text.<API key>;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.<API key>;
import org.eclipse.titan.common.parsers.Interval;
import org.eclipse.titan.common.parsers.Interval.interval_type;
import org.eclipse.titan.designer.editors.actions.<API key>;
import org.eclipse.titan.designer.editors.ttcn3editor.Reconciler;
import org.eclipse.titan.designer.editors.ttcn3editor.TTCN3Editor;
import org.eclipse.ui.IEditorPart;
/**
* @author Kristof Szabados
* */
public final class IndentAction extends <API key> {
@Override
protected IDocument getDocument() {
IEditorPart editorPart = getTargetEditor();
if (editorPart instanceof TTCN3Editor) {
return ((TTCN3Editor) editorPart).getDocument();
}
return null;
}
@Override
protected int <API key>(final IDocument document, final int realStartOffset, final int lineEndOffset,
final Interval <API key>) throws <API key> {
if (realStartOffset + 1 == lineEndOffset) {
return 0;
}
if (interval_type.MULTILINE_COMMENT.equals(<API key>.getType())
|| <API key>.getStartOffset() == realStartOffset
|| interval_type.SINGLELINE_COMMENT.equals(<API key>.getType())) {
// indent comments according to outer interval
return Math.max(0, <API key>.getDepth() - 2);
}
if (<API key>.getEndOffset() < lineEndOffset
&& !<API key>(document.get(realStartOffset,
Math.max(<API key>.getEndOffset() - realStartOffset - 1, 0)))) {
// indent lines containing closing bracket according to
// the line with the opening bracket.
return Math.max(0, <API key>.getDepth() - 2);
}
return Math.max(0, <API key>.getDepth() - 1);
}
@Override
protected void performEdits(final <API key> processor) throws <API key> {
Reconciler reconciler = ((TTCN3Editor) getTargetEditor()).getReconciler();
reconciler.<API key>(false);
processor.performEdits();
reconciler.<API key>(true);
}
} |
package org.eclipse.ceylon.ide.eclipse.code.editor;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.debug.core.DebugPlugin;
import org.eclipse.debug.core.model.IBreakpoint;
import org.eclipse.debug.ui.actions.<API key>;
import org.eclipse.jface.dialogs.ErrorDialog;
import org.eclipse.jface.text.source.IVerticalRulerInfo;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseListener;
import org.eclipse.swt.widgets.Control;
import org.eclipse.ui.texteditor.ITextEditor;
import org.eclipse.ui.texteditor.IUpdate;
import org.eclipse.ceylon.ide.eclipse.core.external.<API key>;
public class <API key> extends <API key> implements IUpdate, MouseListener {
private IBreakpoint fBreakpoint;
public <API key>(ITextEditor editor, IVerticalRulerInfo info) {
super(editor, info);
setText("&Disable Breakpoint");
update();
Control control= info.getControl();
if (control != null && !control.isDisposed())
control.addMouseListener(this);
}
/* (non-Javadoc)
* @see org.eclipse.jface.action.Action#run()
*/
public void run() {
if (fBreakpoint != null) {
try {
fBreakpoint.setEnabled(!fBreakpoint.isEnabled());
if (fBreakpoint.getMarker() != null &&
<API key>.isInSourceArchive(fBreakpoint.getMarker().getResource())) {
// Necessary since the breakpoint marker deltas will not be seen by the <API key> since
// it ignores the hidden resources, and the fake project used for source archive folders *is* hidden.
DebugPlugin.getDefault().<API key>().<API key>(fBreakpoint);
}
} catch (CoreException e) {
ErrorDialog.openError(getEditor().getSite().getShell(), "Error",
"Failed to toggle breakpoint enablement", e.getStatus());
}
}
}
/* (non-Javadoc)
* @see org.eclipse.ui.texteditor.IUpdate#update()
*/
public void update() {
fBreakpoint = getBreakpoint();
setEnabled(fBreakpoint != null);
if (fBreakpoint != null) {
try {
if (fBreakpoint.isEnabled()) {
setText("&Disable Breakpoint");
} else {
setText("&Enable Breakpoint");
}
} catch (CoreException e) {
}
} else {
setText("&Disable Breakpoint");
}
}
public void mouseDoubleClick(MouseEvent e) { }
public void mouseDown(MouseEvent e) {
update();
}
public void mouseUp(MouseEvent e) { }
} |
package de.loskutov.eclipseskins;
import java.util.HashMap;
import java.util.Map;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.ui.internal.themes.<API key>;
import org.eclipse.ui.internal.themes.<API key>;
import org.eclipse.ui.internal.themes.CascadingTheme;
import org.eclipse.ui.themes.ITheme;
/**
* @author Andrei
*/
public class ThemeWrapper extends CascadingTheme {
private Map intMap;
private Map booleanMap;
/**
* @param currentTheme
*/
public ThemeWrapper(ITheme currentTheme) {
super(currentTheme,
new <API key>(currentTheme.getColorRegistry()),
new <API key>(currentTheme.getFontRegistry()));
intMap = new HashMap();
booleanMap = new HashMap();
}
public boolean getBoolean(String key) {
Boolean b = (Boolean) booleanMap.get(key);
if(b == null) {
return super.getBoolean(key);
}
return b.booleanValue();
}
public int getInt(String key) {
Integer in = (Integer) intMap.get(key);
if(in == null){
return super.getInt(key);
}
return in.intValue();
}
public void setBoolean(String key, boolean value){
booleanMap.put(key, Boolean.valueOf(value));
}
public void setInt(String key, int value){
intMap.put(key, new Integer(value));
}
public void propertyChanged(String key, Object value){
if(value instanceof Boolean){
setBoolean(key, ((Boolean)value).booleanValue());
} else if(value instanceof Integer){
setInt(key, ((Integer)value).intValue());
} else {
return;
}
}
public Color getColor(String key){
Color c = getColorRegistry().get(key);
if(c == null){
// default if configuration is broken
c = getColorRegistry().get(ThemeConstants.TAB_COLOR_NOFOCUS);
}
return c;
}
public Font getFont(String key){
Font c = getFontRegistry().get(key);
if(c == null){
// default if configuration is broken
c = getFontRegistry().get(ThemeConstants.TAB_FONT);
}
return c;
}
public void dispose() {
intMap = null;
booleanMap = null;
super.dispose();
}
} |
package com.flowlikeariver.nivl;
import java.util.Collection;
import java.util.function.Function;
import java.util.logging.Level;
import java.util.logging.Logger;
import static java.util.stream.Collectors.toList;
import java.util.stream.IntStream;
import javafx.geometry.Point3D;
import javafx.scene.*;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
import javafx.scene.paint.PhongMaterial;
import javafx.scene.shape.Box;
import javafx.scene.shape.Cylinder;
import javafx.scene.shape.Mesh;
import javafx.scene.shape.MeshView;
import javafx.scene.shape.Shape3D;
import javafx.scene.shape.Sphere;
import javafx.scene.shape.TriangleMesh;
import javafx.scene.text.Font;
public class Render {
private static void log(String s) {
Logger.getLogger(Render.class.getName()).log(Level.INFO, s);
}
public static Mesh createConeMesh(Size size) {
int segmentCount = 32;
int vertexCount = segmentCount + 2;
int faceCount = segmentCount * 2;
float baseY = (float) -size.height / 2.0f;
float[] points = new float[3 * vertexCount];
IntStream.range(0, segmentCount).forEach(i -> {
double angle = 2 * Math.PI * ((double) i / segmentCount);
points[3 * i] = (float) Math.cos(angle);
points[3 * i + 1] = baseY;
points[3 * i + 2] = (float) Math.sin(angle);
});
points[3 * segmentCount] = 0;
points[3 * segmentCount + 1] = (float) size.height + baseY;
points[3 * segmentCount + 2] = 0;
points[3 * segmentCount + 3] = 0;
points[3 * segmentCount + 4] = baseY;
points[3 * segmentCount + 5] = 0;
TriangleMesh m = new TriangleMesh();
m.getPoints().addAll(points, 0, 3 * vertexCount);
m.getTexCoords().addAll(0, 0, 0, 1, 1, 0, 1, 1);
int[] faces = new int[6 * faceCount];
IntStream.range(0, segmentCount).forEach(i -> {
faces[12 * i] = i;
faces[12 * i + 1] = 0; // tex
if (i == (segmentCount - 1)) {
faces[12 * i + 2] = 0;
}
else {
faces[12 * i + 2] = i + 1;
}
faces[12 * i + 3] = 1; // tex
faces[12 * i + 4] = segmentCount + 1; // centre
faces[12 * i + 5] = 2; // tex
faces[12 * i + 6] = segmentCount; // peak
faces[12 * i + 7] = 0; // tex
if (i == (segmentCount - 1)) {
faces[12 * i + 8] = 0;
}
else {
faces[12 * i + 8] = i + 1;
}
faces[12 * i + 9] = 1; // tex
faces[12 * i + 10] = i;
faces[12 * i + 11] = 2; // tex
});
m.getFaces().addAll(faces, 0, 6 * faceCount);
int[] faceSmoothingGroups = IntStream.range(0, faceCount).map(i -> 0).toArray();
m.<API key>().addAll(faceSmoothingGroups, 0, faceCount);
return m;
}
private static Node createCone2(Size size) {
return new MeshView(createConeMesh(size));
}
//private Group createCone(Size size) {
// Group g = new Group();
// int count = 8;
// double height = size.height / count;
// for (int i = 1; i <= count; ++i) {
// Cylinder c = new Cylinder((count - i + 1) * size.width / (2.0 * count), height);
// c.setTranslateY(height * (i - ((1 + count) / 2.0)));
// return g;
private static void setMaterial(Node n, PhongMaterial pm) {
if (n instanceof Shape3D) {
((Shape3D) n).setMaterial(pm);
}
else if (n instanceof Group) {
((Group) n).getChildren().forEach(child -> setMaterial(child, pm));
}
}
private static Mesh createPyramid(double w, double h, double d) {
TriangleMesh mesh = new TriangleMesh();
float halfW = (float) (w / 2);
float offsetH = (float) (h / 2);
float partD = (float) (d / 2);
mesh.getPoints().addAll(
-halfW, -offsetH, partD,
halfW, -offsetH, partD,
0, -offsetH, -partD,
0, (float) (h - offsetH), 0
);
mesh.getTexCoords().addAll(
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
);
mesh.getFaces().addAll(
0, 0, 2, 1, 1, 2, // OBA
0, 0, 3, 1, 2, 2, // OCB
0, 0, 1, 1, 3, 2, // OAC
1, 0, 2, 1, 3, 2 // ABC
);
mesh.<API key>().addAll(0, 0, 0, 0);
return mesh;
}
private static Node createPyramid(Size size) {
return new MeshView(createPyramid(size.width, size.height, size.depth));
}
private static Mesh createPrism(double w, double h, double d) {
TriangleMesh mesh = new TriangleMesh();
float halfW = (float) (w / 2);
float offsetH = (float) h / 2;
float partD = (float) (d / 2);
mesh.getPoints().addAll(
-halfW, offsetH, partD,
halfW, offsetH, partD,
0, offsetH, -partD,
-halfW, -offsetH, partD,
halfW, -offsetH, partD,
0, -offsetH, -partD
);
mesh.getTexCoords().addAll(
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
);
mesh.getFaces().addAll(
0, 0, 1, 1, 2, 2, // OAB
0, 0, 3, 1, 1, 2, // OCA
1, 0, 3, 1, 4, 2, // ACD
0, 0, 2, 1, 5, 2, // OBE
0, 0, 5, 1, 3, 2, // OEC
2, 0, 1, 1, 4, 2, // BAD
2, 0, 4, 1, 5, 2, // BDE
3, 0, 5, 1, 4, 2 // CED
);
mesh.<API key>().addAll(0, 0, 0, 0, 0, 0, 0, 0);
return mesh;
}
private static Node createPrism(Size size) {
return new MeshView(createPrism(size.width, size.height, size.depth));
}
private static Node createDefault(Size size) {
return new Box(size.width, size.height, size.depth);
}
private static Node createPointLight() {
return new PointLight();
}
private static Node createText(String s) {
int h = 14;
int charW = 6;
Canvas canvas = new Canvas(charW * s.length(), h);
GraphicsContext graphicsContext = canvas.<API key>();
graphicsContext.setStroke(Color.BLACK);
graphicsContext.setFont(Font.font(10));
graphicsContext.strokeText(s, 0, 10);
canvas.setTranslateX(s.length() * -charW / 2);
canvas.setTranslateY(-h / 2);
canvas.setRotate(180);
canvas.setRotationAxis(new Point3D(1, 0, 0));
canvas.setScaleX(0.1);
canvas.setScaleY(0.1);
return new Group(canvas);
}
private static Node createNode(Item item) {
Size size = item.getSize();
switch (item.getKind()) {
case BOX:
return new Box(size.width, size.height, size.depth);
case SPHERE:
return new Sphere(size.width / 2);
case CYLINDER:
return new Cylinder(size.width / 2, size.height);
case CONE:
return createCone2(size);
case PRISM:
return createPrism(size);
case PYRAMID:
return createPyramid(size);
case POINTLIGHT:
return createPointLight();
case TEXT:
return createText(item.getContent());
case NODE:
return ((RenderedItem) item).getNode();
default:
return createDefault(size);
}
}
private static void translateNode(Item item, Node n) {
Location loc = item.getLocation();
n.setTranslateX(loc.getX());
n.setTranslateY(loc.getY());
n.setTranslateZ(loc.getZ());
}
private static Node rotateNode(Item item, Node n) {
return item.getRotations().stream().
reduce(n,
(acc, vp) -> {
Group rotGroup = new Group(acc);
rotGroup.setRotationAxis(new Point3D(vp.x, vp.y, vp.z));
rotGroup.setRotate(vp.n * 180 / Math.PI);
return rotGroup;
},
(a, b) -> {
throw new RuntimeException("parallel");
});
// Node resultNode = n;
// for (Vector4Property vp : item.getRotations()) {
// Group rotGroup = new Group();
// rotGroup.getChildren().add(resultNode);
// rotGroup.setRotationAxis(new Point3D(vp.x, vp.y, vp.z));
// rotGroup.setRotate(vp.n * 180 / Math.PI);
// resultNode = rotGroup;
// return resultNode;
}
private static void addItemProperties(Item item, Node n) {
if (n instanceof PointLight) {
if (item.getVectorProperties().containsKey(VectorProperty.EMISSIVE)) {
VectorProperty vp = item.getVectorProperties().get(VectorProperty.EMISSIVE);
((LightBase) n).setColor(new Color(vp.x, vp.y, vp.z, 1));
}
}
if (item.getVectorProperties().containsKey(VectorProperty.DIFFUSE)
|| item.getVectorProperties().containsKey(VectorProperty.SPECULAR)) {
PhongMaterial pm = new PhongMaterial();
if (item.getVectorProperties().containsKey(VectorProperty.DIFFUSE)) {
VectorProperty vp = item.getVectorProperties().get(VectorProperty.DIFFUSE);
pm.setDiffuseColor(new Color(vp.x, vp.y, vp.z, 1));
}
if (item.getVectorProperties().containsKey(VectorProperty.SPECULAR)) {
VectorProperty vp = item.getVectorProperties().get(VectorProperty.SPECULAR);
pm.setSpecularColor(new Color(vp.x, vp.y, vp.z, 1));
}
setMaterial(n, pm);
}
}
public static Node renderItem(Item item) {
Node n = createNode(item);
translateNode(item, n);
addItemProperties(item, n);
return rotateNode(item, n);
}
public static Group render(Collection<Item> items) {
return new Group(items.stream()
.filter(Item::isVisible)
.map(Render::renderItem)
.collect(toList()));
}
private static double minDim(Node node, Function<Node, Double> getDim) {
if (node instanceof Shape3D) {
return getDim.apply(node);
}
else if (node instanceof Group) {
return ((Group) node).getChildren().stream()
.map(child -> minDim(child, getDim))
.min(Double::compare)
.get();
}
else {
return Double.MAX_VALUE;
}
}
private static double getBoundsX(Node node) {
return node.getLayoutBounds().getMaxX() - node.getLayoutBounds().getMinX();
}
private static double getBoundsY(Node node) {
return node.getLayoutBounds().getMaxY() - node.getLayoutBounds().getMinY();
}
private static double getBoundsZ(Node node) {
return node.getLayoutBounds().getMaxZ() - node.getLayoutBounds().getMinZ();
}
private static double minX(Node node) {
return minDim(node, (shape -> (shape.getTranslateX() - (getBoundsX(shape) / 2))));
}
private static double minY(Node node) {
return minDim(node, (shape -> (shape.getTranslateY() - (getBoundsY(shape) / 2))));
}
private static double minZ(Node node) {
return minDim(node, (shape -> (shape.getTranslateZ() - (getBoundsZ(shape) / 2))));
}
public static Item createItem(Node node) {
double w = getBoundsX(node);
double h = getBoundsY(node);
double d = getBoundsZ(node);
node.setTranslateX(-w / 2 - minX(node));
node.setTranslateY(-h / 2 - minY(node));
node.setTranslateZ(-d / 2 - minZ(node));
// node is wrapped in Group so that the node translate does not get overwritten later.
Item item = new RenderedItemImpl(new Group(node));
item.setSize(new Size(w, h, d));
return item;
}
} |
package org.openhab.binding.onkyo;
import java.util.Set;
import org.eclipse.smarthome.core.thing.ThingTypeUID;
import com.google.common.collect.ImmutableSet;
/**
* The {@link OnkyoBinding} class defines common constants, which are
* used across the whole binding.
*
* @author Paul Frank - Initial contribution
* @author Pauli Anttila
*/
public class <API key> {
public static final String BINDING_ID = "onkyo";
// List of all supported Onkyo Models
public static final String ONKYO_TYPE_TXNR414 = "TX-NR414";
public static final String ONKYO_TYPE_TXNR509 = "TX-NR509";
public static final String ONKYO_TYPE_TXNR515 = "TX-NR515";
public static final String ONKYO_TYPE_TXNR525 = "TX-NR525";
public static final String ONKYO_TYPE_TXNR535 = "TX-NR535";
public static final String ONKYO_TYPE_TXNR555 = "TX-NR555";
public static final String ONKYO_TYPE_TXNR616 = "TX-NR616";
public static final String ONKYO_TYPE_TXNR626 = "TX-NR626";
public static final String ONKYO_TYPE_TXNR646 = "TX-NR646";
public static final String ONKYO_TYPE_TXNR656 = "TX-NR656";
public static final String ONKYO_TYPE_TXNR717 = "TX-NR717";
public static final String ONKYO_TYPE_TXNR727 = "TX-NR727";
public static final String ONKYO_TYPE_TXNR747 = "TX-NR747";
public static final String ONKYO_TYPE_TXNR757 = "TX-NR757";
public static final String ONKYO_TYPE_TXNR818 = "TX-NR818";
public static final String ONKYO_TYPE_TXNR828 = "TX-NR828";
public static final String ONKYO_TYPE_TXNR838 = "TX-NR838";
// Extend this set with all successfully tested models
public final static Set<String> <API key> = ImmutableSet.of(ONKYO_TYPE_TXNR414, ONKYO_TYPE_TXNR509,
ONKYO_TYPE_TXNR515, ONKYO_TYPE_TXNR525, ONKYO_TYPE_TXNR535, ONKYO_TYPE_TXNR555, ONKYO_TYPE_TXNR616,
ONKYO_TYPE_TXNR626, ONKYO_TYPE_TXNR646, ONKYO_TYPE_TXNR656, ONKYO_TYPE_TXNR717, ONKYO_TYPE_TXNR727,
ONKYO_TYPE_TXNR747, ONKYO_TYPE_TXNR757, ONKYO_TYPE_TXNR818, ONKYO_TYPE_TXNR828, ONKYO_TYPE_TXNR838);
// List of all Thing Type UIDs
public final static ThingTypeUID THING_TYPE_ONKYOAV = new ThingTypeUID(BINDING_ID, "onkyoAVR");
public final static ThingTypeUID <API key> = new ThingTypeUID(BINDING_ID, "onkyoUnsupported");
public final static ThingTypeUID THING_TYPE_TXNR414 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR414);
public final static ThingTypeUID THING_TYPE_TXNR509 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR509);
public final static ThingTypeUID THING_TYPE_TXNR515 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR515);
public final static ThingTypeUID THING_TYPE_TXNR525 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR525);
public final static ThingTypeUID THING_TYPE_TXNR535 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR535);
public final static ThingTypeUID THING_TYPE_TXNR555 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR555);
public final static ThingTypeUID THING_TYPE_TXNR616 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR616);
public final static ThingTypeUID THING_TYPE_TXNR626 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR626);
public final static ThingTypeUID THING_TYPE_TXNR646 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR646);
public final static ThingTypeUID THING_TYPE_TXNR656 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR656);
public final static ThingTypeUID THING_TYPE_TXNR717 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR717);
public final static ThingTypeUID THING_TYPE_TXNR727 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR727);
public final static ThingTypeUID THING_TYPE_TXNR747 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR747);
public final static ThingTypeUID THING_TYPE_TXNR757 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR757);
public final static ThingTypeUID THING_TYPE_TXNR818 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR818);
public final static ThingTypeUID THING_TYPE_TXNR828 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR828);
public final static ThingTypeUID THING_TYPE_TXNR838 = new ThingTypeUID(BINDING_ID, ONKYO_TYPE_TXNR838);
public final static Set<ThingTypeUID> <API key> = ImmutableSet.of(THING_TYPE_ONKYOAV,
<API key>, THING_TYPE_TXNR414, THING_TYPE_TXNR515, THING_TYPE_TXNR525,
THING_TYPE_TXNR535, THING_TYPE_TXNR555, THING_TYPE_TXNR616, THING_TYPE_TXNR626, THING_TYPE_TXNR646,
THING_TYPE_TXNR656, THING_TYPE_TXNR717, THING_TYPE_TXNR727, THING_TYPE_TXNR747, THING_TYPE_TXNR757,
THING_TYPE_TXNR818, THING_TYPE_TXNR828, THING_TYPE_TXNR838);
// List of thing parameters names
public final static String HOST_PARAMETER = "ipAddress";
public final static String TCP_PORT_PARAMETER = "port";
public final static String UDN_PARAMETER = "udn";
public final static String REFRESH_INTERVAL = "refreshInterval";
// List of all Channel ids
public final static String CHANNEL_POWER = "zone1#power";
public final static String CHANNEL_INPUT = "zone1#input";
public final static String CHANNEL_MUTE = "zone1#mute";
public final static String CHANNEL_VOLUME = "zone1#volume";
public final static String CHANNEL_POWERZONE2 = "zone2#power";
public final static String CHANNEL_INPUTZONE2 = "zone2#input";
public final static String CHANNEL_MUTEZONE2 = "zone2#mute";
public final static String CHANNEL_VOLUMEZONE2 = "zone2#volume";
public final static String CHANNEL_POWERZONE3 = "zone3#power";
public final static String CHANNEL_INPUTZONE3 = "zone3#input";
public final static String CHANNEL_MUTEZONE3 = "zone3#mute";
public final static String CHANNEL_VOLUMEZONE3 = "zone3#volume";
public final static String CHANNEL_CONTROL = "player#control";
public final static String <API key> = "player#currentPlayingTime";
public final static String CHANNEL_ARTIST = "player#artist";
public final static String CHANNEL_TITLE = "player#title";
public final static String CHANNEL_ALBUM = "player#album";
public static final String CHANNEL_ALBUM_ART = "player#albumArt";
public static final String <API key> = "player#albumArtUrl";
public final static String CHANNEL_LISTENMODE = "player#listenmode";
public static final String CHANNEL_PLAY_URI = "player#playuri";
public final static String <API key> = "netmenu#title";
public final static String <API key> = "netmenu#control";
public final static String <API key> = "netmenu#selection";
public final static String CHANNEL_NET_MENU0 = "netmenu#item0";
public final static String CHANNEL_NET_MENU1 = "netmenu#item1";
public final static String CHANNEL_NET_MENU2 = "netmenu#item2";
public final static String CHANNEL_NET_MENU3 = "netmenu#item3";
public final static String CHANNEL_NET_MENU4 = "netmenu#item4";
public final static String CHANNEL_NET_MENU5 = "netmenu#item5";
public final static String CHANNEL_NET_MENU6 = "netmenu#item6";
public final static String CHANNEL_NET_MENU7 = "netmenu#item7";
public final static String CHANNEL_NET_MENU8 = "netmenu#item8";
public final static String CHANNEL_NET_MENU9 = "netmenu#item9";
// Used for Discovery service
public final static String MANUFACTURER = "ONKYO";
public final static String UPNP_DEVICE_TYPE = "MediaRenderer";
} |
package org.eclipse.kura.net.dns;
import java.util.Set;
import org.eclipse.kura.net.IP6Address;
import org.eclipse.kura.net.NetworkPair;
/**
* DNS server configurations for IPv6 networks
*
* @author eurotech
*
*/
public class DnsServerConfigIP6 extends DnsServerConfigIP<IP6Address> implements DnsServerConfig6 {
public DnsServerConfigIP6(Set<IP6Address> forwarders, Set<NetworkPair<IP6Address>> allowedNetworks) {
super(forwarders, allowedNetworks);
}
@Override
public boolean isValid() {
// TODO Auto-generated method stub
return false;
}
} |
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2012.10.26 at 02:11:54 PM CEST
@javax.xml.bind.annotation.XmlSchema(namespace = "http://eurotech.com/esf/2.0", xmlns = {
@XmlNs(namespaceURI = "http://eurotech.com/esf/2.0", prefix = "esf"),
@XmlNs(namespaceURI = "http:
}, elementFormDefault = javax.xml.bind.annotation.XmlNsForm.QUALIFIED, <API key> = javax.xml.bind.annotation.XmlNsForm.UNQUALIFIED)
package org.eclipse.kapua.service.device.call.kura.model.configuration;
import javax.xml.bind.annotation.XmlNs; |
package org.eclipse.n4js.flowgraphs.analysers;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.n4js.flowgraphs.FGUtils;
import org.eclipse.n4js.flowgraphs.dataflow.Assumption;
import org.eclipse.n4js.flowgraphs.dataflow.DataFlowVisitor;
import org.eclipse.n4js.flowgraphs.dataflow.EffectInfo;
import org.eclipse.n4js.flowgraphs.dataflow.EffectType;
import org.eclipse.n4js.flowgraphs.dataflow.PartialResult;
import org.eclipse.n4js.n4JS.ControlFlowElement;
import org.eclipse.n4js.n4JS.IdentifierRef;
/**
* This is a test implementation using the data flow API. It is not executed.
* <p>
* Analysis to detect uses of {@link IdentifierRef}s that are located in the control flow before their corresponding
* variables are declared.
*/
public class <API key> extends DataFlowVisitor {
/** @return all {@link IdentifierRef}s that are used before declared */
public List<ControlFlowElement> <API key>() {
List<ControlFlowElement> idRefs = new LinkedList<>();
for (Assumption ass : failedAssumptions.values()) {
for (PartialResult result : ass.failedBranches) {
UsedBeforeFailed ubf = (UsedBeforeFailed) result;
idRefs.add(ubf.useLocation);
}
}
return idRefs;
}
@Override
public void visitEffect(EffectInfo effect, ControlFlowElement cfe) {
if (effect.type == EffectType.Declaration) {
IsNotUsedBefore symbolNotUsedBefore = new IsNotUsedBefore(effect);
assume(symbolNotUsedBefore);
}
}
class IsNotUsedBefore extends Assumption {
IsNotUsedBefore(EffectInfo effect) {
super(effect.location, effect.symbol);
}
IsNotUsedBefore(IsNotUsedBefore copy) {
super(copy);
}
@Override
public Assumption copy() {
return new IsNotUsedBefore(this);
}
@Override
protected boolean followAliases() {
return false;
}
@Override
protected PartialResult holdsOnEffect(EffectInfo effect, ControlFlowElement container) {
return new UsedBeforeFailed(effect.location);
}
}
static class UsedBeforeFailed extends PartialResult.Failed {
/** The use site location */
public final ControlFlowElement useLocation;
UsedBeforeFailed(ControlFlowElement useLocation) {
this.useLocation = useLocation;
}
@Override
public String toString() {
return type + ": used at " + FGUtils.getSourceText(useLocation);
}
@Override
public Object[] <API key>() {
return new Object[] { useLocation };
}
}
} |
/*
* version.c: version string
*/
#include <stdio.h>
#include <ctype.h>
#include <string.h>
#ifndef VERSION
#define VER "anonymous build (" __DATE__ " " __TIME__ ")"
#else
#define VER "version " VERSION
#endif
#define VERSTRFMT "v1.0 (NSIS Custom Build, %s)"
#define VERSTRSCMREVMAX 20
static char versionbuf[sizeof(VERSTRFMT)-2+VERSTRSCMREVMAX];
const char *const version = versionbuf;
void initversionstring(void)
{
char scmverbuf[VERSTRSCMREVMAX+1];
int cchsvnrev = 0;
const char*svnproprev = "$Revision: 6192 $";
if ('$' == *svnproprev++)
{
const char*p;
while('$' != *svnproprev && !isdigit(*svnproprev)) svnproprev++;
for (p = svnproprev; isdigit(*p); ++p) cchsvnrev++;
}
if (!cchsvnrev)
{
cchsvnrev = 1;
svnproprev = "?";
}
strcpy(scmverbuf, "SVN:r");
strncat(scmverbuf, svnproprev, cchsvnrev);
sprintf(versionbuf,VERSTRFMT,scmverbuf);
} |
package com.odcgroup.t24.server.properties.wizards;
import org.eclipse.ui.dialogs.<API key>;
public class <API key> extends
<API key> {
public <API key>(String pageName) {
super(pageName);
setTitle("Create a Server Project");
setDescription("Creates a Server Project with a server.properties file");
setPageComplete(false);
}
@Override
protected boolean validatePage() {
boolean valid = super.validatePage();
if(valid){
if (this.getProjectName() != null) {
if (this.getProjectName().endsWith("-server")) {
valid = true;
} else {
setErrorMessage("Server Project name must comply with the naming convention '[component]-server'!");
valid = false;
}
}
}
return valid;
}
} |
package org.opendaylight.protocol.bgp.linkstate.impl.attribute;
import static org.opendaylight.protocol.bgp.linkstate.impl.attribute.sr.binding.sid.sub.tlvs.Ipv4PrefixSidParser.PREFIX_SID;
import static org.opendaylight.protocol.bgp.linkstate.impl.attribute.sr.binding.sid.sub.tlvs.Ipv6PrefixSidParser.IPV6_PREFIX_SID;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Multimap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.Unpooled;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import org.opendaylight.protocol.bgp.linkstate.impl.attribute.sr.<API key>;
import org.opendaylight.protocol.bgp.linkstate.impl.attribute.sr.<API key>;
import org.opendaylight.protocol.bgp.linkstate.impl.attribute.sr.RangeTlvParser;
import org.opendaylight.protocol.bgp.linkstate.impl.attribute.sr.<API key>;
import org.opendaylight.protocol.bgp.linkstate.spi.TlvUtil;
import org.opendaylight.protocol.util.BitArray;
import org.opendaylight.protocol.util.ByteArray;
import org.opendaylight.protocol.util.Ipv4Util;
import org.opendaylight.protocol.util.Ipv6Util;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev130715.IpAddressNoZone;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.ExtendedRouteTag;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.IgpBits.UpDown;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.ProtocolId;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.RouteTag;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.linkstate.path.attribute.LinkStateAttribute;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.linkstate.path.attribute.link.state.attribute.<API key>;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.linkstate.path.attribute.link.state.attribute.<API key>;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.linkstate.path.attribute.link.state.attribute.prefix.attributes._case.PrefixAttributes;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.linkstate.path.attribute.link.state.attribute.prefix.attributes._case.<API key>;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.prefix.state.IgpBits;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.prefix.state.IgpBitsBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.prefix.state.Ipv6SrPrefix;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.prefix.state.SrBindingSidLabels;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.prefix.state.SrPrefix;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.bgp.linkstate.rev200120.prefix.state.SrRange;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.network.concepts.rev131125.IgpMetric;
import org.opendaylight.yangtools.yang.common.netty.ByteBufUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@VisibleForTesting
public final class <API key> {
/* Segment routing TLV */
public static final int BINDING_SID = 1160;
private static final Logger LOG = LoggerFactory.getLogger(<API key>.class);
private static final int ROUTE_TAG_LENGTH = 4;
private static final int <API key> = 8;
private static final int FLAGS_SIZE = 8;
private static final int UP_DOWN_BIT = 0;
private static final int OSPF_NO_UNICAST = 1;
private static final int OSPF_LOCAL_ADDRESS = 2;
private static final int <API key> = 3;
/* Prefix Attribute TLVs */
private static final int IGP_FLAGS = 1152;
private static final int ROUTE_TAG = 1153;
private static final int EXTENDED_ROUTE_TAG = 1154;
private static final int PREFIX_METRIC = 1155;
private static final int FORWARDING_ADDRESS = 1156;
private static final int PREFIX_OPAQUE = 1157;
private static final int RANGE = 1159;
private <API key>() {
}
/**
* Parse prefix attributes.
*
* @param attributes key is the tlv type and value are the value bytes of the tlv
* @param protocolId to differentiate parsing methods
* @return {@link LinkStateAttribute}
*/
static LinkStateAttribute <API key>(final Multimap<Integer, ByteBuf> attributes,
final ProtocolId protocolId) {
final <API key> builder = new <API key>();
final List<RouteTag> routeTags = new ArrayList<>();
final List<ExtendedRouteTag> exRouteTags = new ArrayList<>();
for (final Entry<Integer, ByteBuf> entry : attributes.entries()) {
final int key = entry.getKey();
final ByteBuf value = entry.getValue();
LOG.trace("Prefix attribute TLV {}", key);
parseAttribute(key, value, protocolId, builder, routeTags, exRouteTags);
}
LOG.trace("Finished parsing Prefix Attributes.");
builder.setRouteTags(routeTags);
builder.setExtendedTags(exRouteTags);
return new <API key>().setPrefixAttributes(builder.build()).build();
}
private static void parseAttribute(final int key, final ByteBuf value, final ProtocolId protocolId,
final <API key> builder, final List<RouteTag> routeTags,
final List<ExtendedRouteTag> exRouteTags) {
switch (key) {
case IGP_FLAGS:
parseIgpFags(builder, value);
break;
case ROUTE_TAG:
parseRouteTags(routeTags, value);
break;
case EXTENDED_ROUTE_TAG:
<API key>(exRouteTags, value);
break;
case PREFIX_METRIC:
final IgpMetric metric = new IgpMetric(ByteBufUtils.readUint32(value));
builder.setPrefixMetric(metric);
LOG.debug("Parsed Metric: {}", metric);
break;
case FORWARDING_ADDRESS:
final IpAddressNoZone fwdAddress = <API key>(value);
builder.<API key>(fwdAddress);
LOG.debug("Parsed FWD Address: {}", fwdAddress);
break;
case PREFIX_OPAQUE:
if (LOG.isDebugEnabled()) {
LOG.debug("Parsed Opaque value: {}, not preserving it", ByteBufUtil.hexDump(value));
}
break;
case PREFIX_SID:
final SrPrefix prefix = <API key>.parseSrPrefix(value, protocolId);
builder.setSrPrefix(prefix);
LOG.debug("Parsed SR Prefix: {}", prefix);
break;
case IPV6_PREFIX_SID:
final Ipv6SrPrefix ipv6Prefix = <API key>.parseSrIpv6Prefix(value);
builder.setIpv6SrPrefix(ipv6Prefix);
LOG.debug("Parsed Ipv6 SR Prefix: {}", ipv6Prefix);
break;
case RANGE:
final SrRange range = RangeTlvParser.parseSrRange(value, protocolId);
builder.setSrRange(range);
LOG.debug("Parsed SR Range: {}", range);
break;
case BINDING_SID:
parseBindingSid(builder, value, protocolId);
break;
default:
LOG.warn("TLV {} is not a valid prefix attribute, ignoring it", key);
}
}
private static void parseIgpFags(final <API key> builder, final ByteBuf value) {
final BitArray flags = BitArray.valueOf(value, FLAGS_SIZE);
final boolean upDownBit = flags.get(UP_DOWN_BIT);
builder.setIgpBits(new IgpBitsBuilder().setUpDown(new UpDown(upDownBit)).setIsIsUpDown(upDownBit)
.setOspfNoUnicast(flags.get(OSPF_NO_UNICAST))
.setOspfLocalAddress(flags.get(OSPF_LOCAL_ADDRESS))
.<API key>(flags.get(<API key>)).build());
LOG.debug("Parsed IGP flag (up/down bit) : {}", upDownBit);
}
private static void parseBindingSid(final <API key> builder, final ByteBuf value,
final ProtocolId protocolId) {
final List<SrBindingSidLabels> labels;
if (builder.<API key>() != null) {
labels = builder.<API key>();
} else {
labels = new ArrayList<>();
builder.<API key>(labels);
}
final SrBindingSidLabels label = <API key>.<API key>(value, protocolId);
labels.add(label);
LOG.debug("Parsed SR Binding SID {}", label);
}
private static void parseRouteTags(final List<RouteTag> routeTags, final ByteBuf value) {
while (value.isReadable()) {
final RouteTag routeTag = new RouteTag(ByteArray.readBytes(value, ROUTE_TAG_LENGTH));
routeTags.add(routeTag);
LOG.debug("Parsed Route Tag: {}", routeTag);
}
}
private static void <API key>(final List<ExtendedRouteTag> exRouteTags, final ByteBuf value) {
while (value.isReadable()) {
final ExtendedRouteTag exRouteTag = new ExtendedRouteTag(ByteArray.readBytes(value,
<API key>));
exRouteTags.add(exRouteTag);
LOG.debug("Parsed Extended Route Tag: {}", exRouteTag);
}
}
private static IpAddressNoZone <API key>(final ByteBuf value) {
switch (value.readableBytes()) {
case Ipv4Util.IP4_LENGTH:
return new IpAddressNoZone(Ipv4Util.addressForByteBuf(value));
case Ipv6Util.IPV6_LENGTH:
return new IpAddressNoZone(Ipv6Util.addressForByteBuf(value));
default:
LOG.debug("Ignoring unsupported forwarding address length {}", value.readableBytes());
return null;
}
}
static void <API key>(final <API key> <API key>,
final ByteBuf byteAggregator) {
final PrefixAttributes prefixAtrributes = <API key>.getPrefixAttributes();
if (prefixAtrributes.getIgpBits() != null) {
final BitArray igpBit = new BitArray(FLAGS_SIZE);
final IgpBits igpBits = prefixAtrributes.getIgpBits();
igpBit.set(UP_DOWN_BIT, igpBits.getUpDown().getUpDown() || igpBits.getIsIsUpDown());
igpBit.set(OSPF_NO_UNICAST, igpBits.getOspfNoUnicast());
igpBit.set(OSPF_LOCAL_ADDRESS, igpBits.getOspfLocalAddress());
igpBit.set(<API key>, igpBits.<API key>());
TlvUtil.writeTLV(IGP_FLAGS, Unpooled.wrappedBuffer(igpBit.array()), byteAggregator);
}
serializeRouteTags(prefixAtrributes.getRouteTags(), byteAggregator);
<API key>(prefixAtrributes.getExtendedTags(), byteAggregator);
<API key>(prefixAtrributes.getPrefixMetric(), byteAggregator);
<API key>(prefixAtrributes.<API key>(), byteAggregator);
serializeSrPrefix(prefixAtrributes.getSrPrefix(), byteAggregator);
<API key>(prefixAtrributes.getIpv6SrPrefix(), byteAggregator);
serializeSrRange(prefixAtrributes.getSrRange(), byteAggregator);
<API key>(prefixAtrributes.<API key>(), byteAggregator);
}
private static void <API key>(final List<SrBindingSidLabels> srBindingSidLabels,
final ByteBuf byteAggregator) {
if (srBindingSidLabels != null) {
for (final SrBindingSidLabels bindingSid : srBindingSidLabels) {
final ByteBuf sidBuffer = Unpooled.buffer();
<API key>.<API key>(bindingSid.getWeight(), bindingSid.getFlags(),
bindingSid.getBindingSubTlvs(), sidBuffer);
TlvUtil.writeTLV(<API key>.BINDING_SID, sidBuffer, byteAggregator);
}
}
}
private static void serializeSrRange(final SrRange srRange, final ByteBuf byteAggregator) {
if (srRange != null) {
final ByteBuf sidBuffer = Unpooled.buffer();
RangeTlvParser.serializeSrRange(srRange, sidBuffer);
TlvUtil.writeTLV(RANGE, sidBuffer, byteAggregator);
}
}
private static void <API key>(final Ipv6SrPrefix ipv6SrPrefix, final ByteBuf byteAggregator) {
if (ipv6SrPrefix != null) {
final ByteBuf buffer = Unpooled.buffer();
<API key>.<API key>(ipv6SrPrefix, buffer);
TlvUtil.writeTLV(IPV6_PREFIX_SID, buffer, byteAggregator);
}
}
private static void serializeSrPrefix(final SrPrefix srPrefix, final ByteBuf byteAggregator) {
if (srPrefix != null) {
final ByteBuf buffer = Unpooled.buffer();
<API key>.serializeSrPrefix(srPrefix, buffer);
TlvUtil.writeTLV(PREFIX_SID, buffer, byteAggregator);
}
}
private static void <API key>(final IgpMetric prefixMetric, final ByteBuf byteAggregator) {
if (prefixMetric != null) {
TlvUtil.writeTLV(PREFIX_METRIC, Unpooled.copyInt(prefixMetric.getValue().intValue()), byteAggregator);
}
}
private static void serializeRouteTags(final List<RouteTag> routeTags, final ByteBuf byteAggregator) {
if (routeTags != null) {
final ByteBuf routeTagsBuf = Unpooled.buffer();
for (final RouteTag routeTag : routeTags) {
routeTagsBuf.writeBytes(routeTag.getValue());
}
TlvUtil.writeTLV(ROUTE_TAG, routeTagsBuf, byteAggregator);
}
}
private static void <API key>(final List<ExtendedRouteTag> exRouteTags,
final ByteBuf byteAggregator) {
if (exRouteTags != null) {
final ByteBuf extendedBuf = Unpooled.buffer();
for (final ExtendedRouteTag exRouteTag : exRouteTags) {
extendedBuf.writeBytes(exRouteTag.getValue());
}
TlvUtil.writeTLV(EXTENDED_ROUTE_TAG, extendedBuf, byteAggregator);
}
}
private static void <API key>(final IpAddressNoZone forwardingAddress,
final ByteBuf byteAggregator) {
if (forwardingAddress != null) {
final ByteBuf ospfBuf = Unpooled.buffer();
if (forwardingAddress.<API key>() != null) {
ospfBuf.writeBytes(Ipv4Util.bytesForAddress(forwardingAddress.<API key>()));
} else if (forwardingAddress.<API key>() != null) {
ospfBuf.writeBytes(Ipv6Util.bytesForAddress(forwardingAddress.<API key>()));
}
TlvUtil.writeTLV(FORWARDING_ADDRESS, ospfBuf, byteAggregator);
}
}
} |
package org.testeditor.ui.wizardpages.teamshare;
import java.io.File;
import java.util.Collection;
import javax.inject.Inject;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.List;
import org.testeditor.core.model.teststructure.TestProject;
import org.testeditor.core.model.teststructure.TestStructure;
import org.testeditor.core.services.interfaces.<API key>;
import org.testeditor.teamshare.svn.TeamShareStatus;
import org.testeditor.ui.constants.IconConstants;
import org.testeditor.ui.utilities.<API key>;
/**
*
* @author dkuhlmann
*
*/
public class <API key> extends TeamShareWizardPage {
@Inject
private <API key> translationService;
private Collection<String> changedDatas;
private TestStructure testStructure;
private List changesList;
private Button developerView;
private Point listSize;
@Override
String getTitleValue() {
return translationService.translate("%popupmenu.label.showChanges");
}
@Override
String getDescriptionValue() {
return translationService.translate("%showChanges.element.wizard.msg", testStructure.getFullName());
}
@Override
Image getIcon() {
return IconConstants.ICON_SHOW_CHANGES;
}
/**
* creates the control.
*
* @param parent
* the parent Composite
*/
@Override
public void createControl(Composite parent) {
TeamShareStatus shareStatus = new TeamShareStatus();
setChangedDatas(shareStatus.<API key>(testStructure));
super.createControl(parent);
setPageComplete(true);
refreshList();
}
/**
*
* @param <API key>
* used to get the Fields to display in the detail Composite for
* <API key>.
*/
@Override
protected void <API key>(<API key> <API key>) {
}
/**
* Creates the UI widgets for the Team Share Configuration.
*
* @param content
* to add the TemShareGroup Widgets.
*/
@Override
protected void <API key>(Composite content) {
}
/**
* creates a special-group. can be used in the children of this class, to
* add some special widgets.
*
* @param parent
* the parent-composite of group.
*/
@Override
protected void createSpecialGroup(Composite parent) {
changesList = new List(parent, SWT.BORDER | SWT.SCROLLBAR_OVERLAY | SWT.V_SCROLL | SWT.H_SCROLL);
GridData data = new GridData(GridData.FILL_BOTH);
data.horizontalSpan = 2;
changesList.setLayoutData(data);
developerView = new Button(parent, SWT.CHECK);
developerView.setLayoutData(new GridData(NONE, NONE, false, false, 1, 1));
developerView.setText("Developer View");
developerView.<API key>(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
refreshList();
}
@Override
public void <API key>(SelectionEvent e) {
refreshList();
}
});
}
/**
* @return the hintText
*/
@Override
protected String getHintTextValue() {
return translationService.translate("%wizard.teamShowChanges.msgText");
}
/**
*
* @return the header of the hint-text.
*/
@Override
protected String <API key>() {
return translationService.translate("%wizard.teamShare.msgHead");
}
/**
*
* @param testStructure
* TestStructure
*/
public void setTestStructure(TestStructure testStructure) {
this.testStructure = testStructure;
}
/**
* @return the changedDatas
*/
public Collection<String> getChangedDatas() {
return changedDatas;
}
public void refreshList() {
if (changesList != null) {
changesList.removeAll();
for (String data : changedDatas) {
if (developerView.getSelection()) {
changesList.add(data);
} else {
String name = <API key>(new File(data), testStructure.getRootElement());
if (changesList.getItemCount() == 0
|| !changesList.getItem(changesList.getItemCount() - 1).equals(name)) {
changesList.add(name);
}
}
}
}
}
/**
* convert the given File from the path to a TestStructure FullName. If the
* TestStructure FullName dont start with the given TestProject it will
* return "";
*
* @param file
* File to convert the oath to FullName.
* @param testProject
* TestProject where the TestStructure should be.
* @return TestStructure FullName of the given file.
*/
public String <API key>(File file, TestProject testProject) {
/*
* Cut the Path before the workspace because everything before
* .testeditor is not needed.
*/
if (file.isFile()) {
if (file.getName().equals("content.txt") || file.getName().equals("properties.xml")) {
file = file.getParentFile();
}
}
String path;
if (!file.getPath().equals(testProject.<API key>().getProjectPath())) {
path = file.getPath().substring(testProject.<API key>().getProjectPath().length() + 1);
} else {
return testProject.getName();
}
/*
* Changes in the RecentChanges will not be showed.
*/
if (!path.startsWith("FitNesseRoot" + File.separator + "RecentChanges")) {
if (path.contains("FitNesseRoot" + File.separator)) {
path = path.substring(("FitNesseRoot" + File.separator).length(), path.length());
}
return path;
}
return testProject.getName();
}
/**
* @param list
* the changedDatas to set
*/
public void setChangedDatas(Collection<String> list) {
this.changedDatas = list;
}
} |
require "rjava"
# which accompanies this distribution, and is available at
# Contributors:
# IBM Corporation - initial API and implementation
module Org::Eclipse::Core::Internal::Runtime
module ActivatorImports #:nodoc:
class_module.module_eval {
include ::Java::Lang
include ::Org::Eclipse::Core::Internal::Runtime
include_const ::Java::Net, :URL
include ::Java::Util
include_const ::Org::Eclipse::Core::Internal::Boot, :<API key>
include_const ::Org::Eclipse::Core::Internal::Boot, :PlatformURLHandler
include_const ::Org::Eclipse::Core::Runtime, :IAdapterManager
include_const ::Org::Eclipse::Osgi::Framework::Log, :FrameworkLog
include_const ::Org::Eclipse::Osgi::Service::Datalocation, :Location
include_const ::Org::Eclipse::Osgi::Service::Debug, :DebugOptions
include_const ::Org::Eclipse::Osgi::Service::Localization, :BundleLocalization
include_const ::Org::Eclipse::Osgi::Service::Urlconversion, :URLConverter
include ::Org::Osgi::Framework
include_const ::Org::Osgi::Service::Packageadmin, :PackageAdmin
include_const ::Org::Osgi::Service::Url, :URLConstants
include_const ::Org::Osgi::Service::Url, :<API key>
include_const ::Org::Osgi::Util::Tracker, :ServiceTracker
}
end
# The Common runtime plugin class.
# This class can only be used if OSGi plugin is available.
class Activator
<API key> ActivatorImports
include BundleActivator
class_module.module_eval {
# Table to keep track of all the URL converter services.
def url_trackers
defined?(@@url_trackers) ? @@url_trackers : @@url_trackers= HashMap.new
end
alias_method :attr_url_trackers, :url_trackers
def url_trackers=(value)
@@url_trackers = value
end
alias_method :attr_url_trackers=, :url_trackers=
def bundle_context
defined?(@@bundle_context) ? @@bundle_context : @@bundle_context= nil
end
alias_method :attr_bundle_context, :bundle_context
def bundle_context=(value)
@@bundle_context = value
end
alias_method :attr_bundle_context=, :bundle_context=
def singleton
defined?(@@singleton) ? @@singleton : @@singleton= nil
end
alias_method :attr_singleton, :singleton
def singleton=(value)
@@singleton = value
end
alias_method :attr_singleton=, :singleton=
}
attr_accessor :<API key>
alias_method :<API key>, :<API key>
undef_method :<API key>
alias_method :<API key>=, :<API key>=
undef_method :<API key>=
attr_accessor :<API key>
alias_method :<API key>, :<API key>
undef_method :<API key>
alias_method :<API key>=, :<API key>=
undef_method :<API key>=
attr_accessor :<API key>
alias_method :<API key>, :<API key>
undef_method :<API key>
alias_method :<API key>=, :<API key>=
undef_method :<API key>=
attr_accessor :<API key>
alias_method :<API key>, :<API key>
undef_method :<API key>
alias_method :<API key>=, :<API key>=
undef_method :<API key>=
attr_accessor :<API key>
alias_method :<API key>, :<API key>
undef_method :<API key>
alias_method :<API key>=, :<API key>=
undef_method :<API key>=
attr_accessor :bundle_tracker
alias_method :attr_bundle_tracker, :bundle_tracker
undef_method :bundle_tracker
alias_method :attr_bundle_tracker=, :bundle_tracker=
undef_method :bundle_tracker=
attr_accessor :debug_tracker
alias_method :attr_debug_tracker, :debug_tracker
undef_method :debug_tracker
alias_method :attr_debug_tracker=, :debug_tracker=
undef_method :debug_tracker=
attr_accessor :log_tracker
alias_method :attr_log_tracker, :log_tracker
undef_method :log_tracker
alias_method :attr_log_tracker=, :log_tracker=
undef_method :log_tracker=
attr_accessor :<API key>
alias_method :<API key>, :<API key>
undef_method :<API key>
alias_method :<API key>=, :<API key>=
undef_method :<API key>=
class_module.module_eval {
typesig { [] }
# Returns the singleton for this Activator. Callers should be aware that
# this will return null if the bundle is not active.
def get_default
return self.attr_singleton
end
typesig { [String] }
# Print a debug message to the console.
# Pre-pend the message with the current date and the name of the current thread.
def message(message)
buffer = StringBuffer.new
buffer.append(JavaDate.new(System.current_time_millis))
buffer.append(" - [") # $NON-NLS-1$
buffer.append(JavaThread.current_thread.get_name)
buffer.append("] ") # $NON-NLS-1$
buffer.append(message)
System.out.println(buffer.to_s)
end
}
typesig { [BundleContext] }
# (non-Javadoc)
# @see org.osgi.framework.BundleActivator#start(org.osgi.framework.BundleContext)
def start(context)
self.attr_bundle_context = context
self.attr_singleton = self
url_properties = Hashtable.new
url_properties.put("protocol", "platform") # $NON-NLS-1$ //$NON-NLS-2$
@<API key> = context.register_service(URLConverter.get_name, <API key>.new, url_properties)
@<API key> = context.register_service(IAdapterManager.get_name, AdapterManager.get_default, nil)
<API key>
end
typesig { [] }
# Return the configuration location service, if available.
def <API key>
if ((@<API key>).nil?)
filter = nil
begin
filter = self.attr_bundle_context.create_filter(Location::<API key>)
rescue <API key> => e
# should not happen
end
@<API key> = ServiceTracker.new(self.attr_bundle_context, filter, nil)
@<API key>.open
end
return @<API key>.get_service
end
typesig { [] }
# Return the debug options service, if available.
def get_debug_options
if ((@debug_tracker).nil?)
@debug_tracker = ServiceTracker.new(self.attr_bundle_context, DebugOptions.get_name, nil)
@debug_tracker.open
end
return @debug_tracker.get_service
end
typesig { [] }
# Return the framework log service, if available.
def get_framework_log
if ((@log_tracker).nil?)
@log_tracker = ServiceTracker.new(self.attr_bundle_context, FrameworkLog.get_name, nil)
@log_tracker.open
end
return @log_tracker.get_service
end
typesig { [] }
# Return the instance location service, if available.
def <API key>
if ((@<API key>).nil?)
filter = nil
begin
filter = self.attr_bundle_context.create_filter(Location::INSTANCE_FILTER)
rescue <API key> => e
# ignore this. It should never happen as we have tested the above format.
end
@<API key> = ServiceTracker.new(self.attr_bundle_context, filter, nil)
@<API key>.open
end
return @<API key>.get_service
end
typesig { [String] }
# Return the resolved bundle with the specified symbolic name.
# @see PackageAdmin#getBundles(String, String)
def get_bundle(symbolic_name)
admin = get_bundle_admin
if ((admin).nil?)
return nil
end
bundles = admin.get_bundles(symbolic_name, nil)
if ((bundles).nil?)
return nil
end
# Return the first bundle that is not installed or uninstalled
i = 0
while i < bundles.attr_length
if (((bundles[i].get_state & (Bundle::INSTALLED | Bundle::UNINSTALLED))).equal?(0))
return bundles[i]
end
i += 1
end
return nil
end
typesig { [] }
# Return the package admin service, if available.
def get_bundle_admin
if ((@bundle_tracker).nil?)
@bundle_tracker = ServiceTracker.new(get_context, PackageAdmin.get_name, nil)
@bundle_tracker.open
end
return @bundle_tracker.get_service
end
typesig { [Bundle] }
# Return an array of fragments for the given bundle host.
def get_fragments(host)
admin = get_bundle_admin
if ((admin).nil?)
return Array.typed(Bundle).new(0) { nil }
end
return admin.get_fragments(host)
end
typesig { [] }
# Return the install location service if available.
def <API key>
if ((@<API key>).nil?)
filter = nil
begin
filter = self.attr_bundle_context.create_filter(Location::INSTALL_FILTER)
rescue <API key> => e
# should not happen
end
@<API key> = ServiceTracker.new(self.attr_bundle_context, filter, nil)
@<API key>.open
end
return @<API key>.get_service
end
typesig { [Object] }
# Returns the bundle id of the bundle that contains the provided object, or
# <code>null</code> if the bundle could not be determined.
def get_bundle_id(object)
if ((object).nil?)
return nil
end
if ((@bundle_tracker).nil?)
message("Bundle tracker is not set") # $NON-NLS-1$
return nil
end
package_admin = @bundle_tracker.get_service
if ((package_admin).nil?)
return nil
end
source = package_admin.get_bundle(object.get_class)
if (!(source).nil? && !(source.get_symbolic_name).nil?)
return source.get_symbolic_name
end
return nil
end
typesig { [Bundle, String] }
def get_localization(bundle, locale)
if ((@<API key>).nil?)
context = Activator.get_context
if ((context).nil?)
message("ResourceTranslator called before plugin is started") # $NON-NLS-1$
return nil
end
@<API key> = ServiceTracker.new(context, BundleLocalization.get_name, nil)
@<API key>.open
end
location = @<API key>.get_service
if (!(location).nil?)
return location.get_localization(bundle, locale)
end
return nil
end
typesig { [BundleContext] }
# (non-Javadoc)
# @see org.osgi.framework.BundleActivator#stop(org.osgi.framework.BundleContext)
def stop(context)
<API key>
if (!(@<API key>).nil?)
@<API key>.unregister
@<API key> = nil
end
if (!(@<API key>).nil?)
@<API key>.unregister
@<API key> = nil
end
if (!(@<API key>).nil?)
@<API key>.close
@<API key> = nil
end
if (!(@<API key>).nil?)
@<API key>.close
@<API key> = nil
end
if (!(@bundle_tracker).nil?)
@bundle_tracker.close
@bundle_tracker = nil
end
if (!(@debug_tracker).nil?)
@debug_tracker.close
@debug_tracker = nil
end
if (!(@log_tracker).nil?)
@log_tracker.close
@log_tracker = nil
end
if (!(@<API key>).nil?)
@<API key>.close
@<API key> = nil
end
if (!(@<API key>).nil?)
@<API key>.close
@<API key> = nil
end
self.attr_bundle_context = nil
self.attr_singleton = nil
end
class_module.module_eval {
typesig { [] }
# Return this bundle's context.
def get_context
return self.attr_bundle_context
end
typesig { [] }
# Let go of all the services that we acquired and kept track of.
def <API key>
synchronized((self.attr_url_trackers)) do
if (!self.attr_url_trackers.is_empty)
iter = self.attr_url_trackers.key_set.iterator
while iter.has_next
key = iter.next_
tracker = self.attr_url_trackers.get(key)
tracker.close
end
self.attr_url_trackers = HashMap.new
end
end
end
typesig { [URL] }
# Return the URL Converter for the given URL. Return null if we can't
# find one.
def get_urlconverter(url)
protocol = url.get_protocol
synchronized((self.attr_url_trackers)) do
tracker = self.attr_url_trackers.get(protocol)
if ((tracker).nil?)
# get the right service based on the protocol
filter_prefix = "(&(objectClass=" + RJava.cast_to_string(URLConverter.get_name) + ")(protocol=" # $NON-NLS-1$ //$NON-NLS-2$
filter_postfix = "))" # $NON-NLS-1$
filter = nil
begin
filter = get_context.create_filter(filter_prefix + protocol + filter_postfix)
rescue <API key> => e
return nil
end
tracker = ServiceTracker.new(get_context, filter, nil)
tracker.open
# cache it in the registry
self.attr_url_trackers.put(protocol, tracker)
end
return tracker.get_service
end
end
}
typesig { [] }
# Register the platform URL support as a service to the URLHandler service
def <API key>
<API key>.startup
<API key>.startup
<API key>.startup
<API key>.startup
service = <API key>
if (!(service).nil?)
<API key>.startup(service.get_url)
end
properties = Hashtable.new(1)
properties.put(URLConstants::<API key>, Array.typed(String).new([PlatformURLHandler::PROTOCOL]))
get_context.register_service(<API key>.get_name, PlatformURLHandler.new, properties)
end
typesig { [] }
def initialize
@<API key> = nil
@<API key> = nil
@<API key> = nil
@<API key> = nil
@<API key> = nil
@bundle_tracker = nil
@debug_tracker = nil
@log_tracker = nil
@<API key> = nil
end
private
alias_method :<API key>, :initialize
end
end |
<html>
<!
== $Id$
== This software is subject to the terms of the Eclipse Public License v1.0
== Agreement, available at the following URL:
== http:
== Copyright (C) 2005-2009 Julian Hyde and others
== All Rights Reserved.
== You must accept the terms of that agreement to use this software.
<head>
<link rel="stylesheet" type="text/css" href="stylesheet.css"/>
<title>Pentaho Analysis Services: Aggregate Tables</title>
</head>
<body>
<!-- doc2web start -->
<!-- page title -->
<div class="contentheading">Aggregate Tables</div>
<!-- end page title -->
<!
<h3>Contents</h3>
<ol>
<li><a href="#Introduction">Introduction</a></li>
<li><a href="#What_are_aggregates">What are aggregate tables?</a></li>
<ol>
<li><a href="#<API key>">A simple aggregate table</a></li>
<li><a href="#<API key>">Another aggregate table</a></li>
</ol>
<li><a href="#<API key>">Defining aggregate tables</a></li>
<li><a href="#Building_aggregates">Building aggregate tables</a></li>
<li><a href="#<API key>">How Mondrian recognizes aggregate tables</a></li>
<ol>
<li><a href="#Recognizing_default">Rules</a></li>
<li><a href="#<API key>">Explicit aggregates</a></li>
</ol>
<li><a href="#<API key>">Aggregate tables and parent-child hierarchies</a></li>
<ol>
<li><a href="#<API key>">Aggregate tables at the leaf level of a parent-child hierarchy</a></li>
<li><a href="#<API key>">Combined closure and aggregate tables</a></li>
</ol>
<li><a href="#<API key>">How Mondrian uses aggregate tables</a></li>
<ol>
<li><a href="#<API key>">Choosing between aggregate tables</a></li>
<li><a href="#Distinct_count">Distinct count</a></li>
</ol>
<li><a href="#<API key>">Tools for designing and maintaining aggregate tables</a></li>
<ol>
<li><a href="#<API key>">AggGen (aggregate generator)</a></li>
<li><a href="#<API key>">Aggregate table populater</a></li>
<li><a href="#Script_generator">Script generator</a></li>
<li><a href="#Recommender">Recommender</a></li>
<li><a href="#<API key>">Online/offline control</a></li>
</ol>
<li><a href="#<API key>">Properties that affect aggregates</a></li>
<li><a href="#References">References</a></li>
</ol>
<!
<h3>1. Introduction<a name="Introduction"> </a></h3>
<p>Unlike many OLAP servers, Mondrian does not store data on disk: it just works on
the data in the RDBMS, and once it has read a piece of data once, it stores that
data in its cache. This greatly simplifies the process of installing Mondrian,
but it puts limits on Mondrian's performance when Mondrian is applied to a huge
dataset.</p>
<p>Consider what happens when the CEO runs her Sales Report first thing on a Monday
morning. This report contains a single number: the total sales of all products,
in all regions, this year. In order to get this number, Mondrian generates a
query something like this:</p>
<blockquote>
<code>SELECT sum(store_sales)<br>
FROM sales_fact, <br>
time<br>
WHERE sales_fact.time_id = time.time_id<br>
AND time.year = 2005</code>
</blockquote>
<p>and sends it to the DBMS. The DBMS takes several minutes to execute it: which is
understandable because the DBMS has to read all of this year's records in the
fact table (a few million sales, say) and aggregate them into a single total.
Clearly, what is needed in this case, and in others like it, is a pre-computed
summary of the data: an aggregate table.</p>
<p>An <dfn>aggregate table</dfn> coexists with the base fact table,
and contains pre-aggregated measures build from the
fact table. It is registered in Mondrian's schema, so that Mondrian can choose
to use whether to use the aggregate table rather than the fact table, if it is
applicable for a particular query.</p>
<p>Designing aggregate tables is a fine art.
There is extensive research, both empirical and theoretical, available
on the web concerning different ways to structure aggregate tables and we will not attempt to duplicate any of it here.
</p>
<!
<h3>2. What are aggregate tables?<a name="What_are_aggregates"> </a></h3>
<p>To explain what aggregate tables are, let's consider a simple star schema.</p>
<p>
<img border="0" src="images/aggregate_tables_1.png" width="500" height="283"></p>
<p>The star schema has a single fact table <code>Sales</code>, two measure
columns (<code>units</code> and <code>dollars</code>) and four dimension tables
(<code>Product</code>, <code>Mfr</code>, <code>Customer</code>, <code>Time</code>,
and <code>Customer</code>).</p>
<p>On top of this star schema, we create the following multidimensional model:</p>
<ul>
<li>Cube <code>[Sales]</code> has two measures <code>[Unit sales]</code> and
<code>[Dollar sales]</code></li>
<li>Dimension <code>[Product]</code> has levels <code>[All Products]</code>,
<code>[Manufacturer]</code>, <code>[Brand]</code>, <code>[Prodid]</code></li>
<li>Dimension <code>[Time]</code> has levels <code>[All Time]</code>, <code>
[Year]</code>, <code>[Quarter]</code>, <code>[Month]</code>, <code>[Day]</code></li>
<li>Dimension <code>[Customer]</code> has levels <code>[All Customers]</code>,
<code>[State]</code>, <code>[City]</code>, <code>[Custid]</code></li>
<li>Dimension <code>[Payment Method]</code> has levels <code>[All Payment
Methods]</code>, <code>[Payment Method]</code></li>
</ul>
<p>Most of the dimensions have a corresponding dimension table, but there are
two exceptions. The <code>[Product]</code> dimension is a <dfn>snowflake
dimension</dfn>, which means that it is spread across more than one table (in
this case <code>Product</code> and <code>Mfr</code>). The <code>[Payment Method]</code> dimension
is a <dfn>degenerate dimension</dfn>; its sole attribute is the <code>
payment</code> column in the fact table, and so it does not need a dimension
table.</p>
<!
<h1>2.1 A simple aggregate table<a name="<API key>"> </a></h1>
<p>Now let's create an aggregate table, <code>Agg_1</code>:</p>
<p>
<img border="0" src="images/aggregate_tables_2.png" width="417" height="172"></p>
<p>See how the original star schema columns have been combined into the table:</p>
<ul>
<li>The <code>Time</code> dimension has been "collapsed" into the aggregate
table, omitting the <code>month</code> and <code>day</code> columns.</li>
<li>The two tables of the <code>Product</code> dimension has been
"collapsed" into the aggregate table.</li>
<li>The Customer dimension has been "lost".</li>
<li>For each measure column in the fact table (<code>units</code>, <code>dollars</code>),
there are one or more measure columns in the aggregate table (<code>sum units</code>, <code>min
units</code>, <code>max units</code>, <code>sum dollars</code>).</li>
<li>There is also a measure column, <code>row count</code>, representing the
"count" measure.</li>
</ul>
<p><code>Agg_1</code> would be declared like this:</p>
<blockquote>
<code><<a href="#XML_Cube">Cube</a> name="Sales"><br>
<<a href="#XML_Table">Table</a> name="sales"><br>
<<a href="#XML_AggName">AggName</a>
name="agg_1"><br>
<<a href="#XML_AggFactCount">AggFactCount</a> column="row count"/><br>
<<a href="#XML_AggMeasure">AggMeasure</a> name="[Measures].[Unit
Sales]" column="sum units"/><br>
<<a href="#XML_AggMeasure">AggMeasure</a>
name="[Measures].[Min Units]" column="min units"/><br>
<<a href="#XML_AggMeasure">AggMeasure</a> name="[Measures].[Max
Units]" column="max units"/><br>
<<a href="#XML_AggMeasure">AggMeasure</a> name="[Measures].[Dollar
Sales]" column="sum dollars"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Time].[Year]"
column="year"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Time].[Quarter]"
column="quarter"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Product].[Mfrid]"
column="mfrid"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Product].[Brand]"
column="brand"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Product].[Prodid]"
column="prodid"/><br>
</<a href="#XML_AggName">AggName</a>><br>
</<a href="#XML_Table">Table</a>><br>
<br>
<!-- Rest of the cube definition --><br>
</<a href="#XML_Cube">Cube</a>></code>
</blockquote>
<!
<h1>2.2 Another aggregate table<a name="<API key>"> </a></h1>
<p>Another aggregate table, <code>Agg_2</code>:</p>
<p>
<img border="0" src="images/aggregate_tables_3.png" width="500" height="148"></p>
<p>and the corresponding XML:</p>
<blockquote>
<code><<a href="#XML_Cube">Cube</a> name="Sales"><br>
<<a href="#XML_Table">Table</a> name="sales"><br>
<<a href="#XML_AggName">AggName</a>
name="agg_1" ... /><br>
<<a href="#XML_AggName">AggName</a>
name="agg_2"><br>
<<a href="#XML_AggFactCount">AggFactCount</a> column="row count"/><br>
<<a href="#XML_AggForeignKey">AggForeignKey</a> factColumn="prodid"
aggColumn="prodid"/><br>
<<a href="#XML_AggMeasure">AggMeasure</a> name="[Measures].[Unit
Sales]" column="sum units"/><br>
<<a href="#XML_AggMeasure">AggMeasure</a>
name="[Measures].[Min Units]" column="min units"/><br>
<<a href="#XML_AggMeasure">AggMeasure</a> name="[Measures].[Max
Units]" column="max units"/><br>
<<a href="#XML_AggMeasure">AggMeasure</a> name="[Measures].[Dollar
Sales]" column="sum dollars"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Time].[Year]"
column="year"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Time].[Quarter]"
column="quarter"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Time].[Month]"
column="month"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Payment
Method].[Payment Method]"
column="payment"/><br>
<<a href="#XML_AggLevel">AggLevel</a> name="[Customer].[State]"
column="state"/><br>
</<a href="#XML_AggName">AggName</a>><br>
</<a href="#XML_Table">Table</a>><br>
<br>
<<a href="schema.html#XML_Dimension">Dimension</a> name="Product"><br>
<<a href="schema.html#XML_Hierarchy">Hierarchy</a> hasAll="true"
primaryKey="prodid" primaryKeyTable="Product"><br>
<<a href="schema.html#XML_Join">Join</a> leftKey="mfrid"
rightKey="mfrid"><br>
<<a href="schema.html#XML_Table">Table</a>
name="Product"/><br>
<<a href="schema.html#XML_Table">Table</a>
name="Mfr"/><br>
</<a href="schema.html#XML_Join">Join</a>><br>
<<a href="schema.html#XML_Level">Level</a>
name="Manufacturer" table="Mfr" column="mfrid"/><br>
<<a href="schema.html#XML_Level">Level</a>
name="Brand" table="Product" column="brand"/><br>
<<a href="schema.html#XML_Level">Level</a>
name="Name" table="Product" column="prodid"/><br>
</<a href="schema.html#XML_Hierarchy">Hierarchy</a>><br>
</<a href="schema.html#XML_Dimension">Dimension</a>><br>
<br>
<!-- Rest of the cube definition --><br>
</<a href="#XML_Cube">Cube</a>></code>
</blockquote>
<p>Several dimensions have been collapsed: <code>[Time]</code> at the <code>
[Quarter]</code> level; <code>[Customer]</code> at the <code>[State]</code>
level; and <code>[Payment Method]</code> at the <code>[Payment Method]</code>
level. But the <code>[Product]</code> dimension has been retained in its
original snowflake form.</p>
<p>The <code><<a href="#XML_AggForeignKey">AggForeignKey</a>></code> element is
used to declare that the column <code>prodid</code> links to the dimension
table, but all other columns remain in the <code>Product</code> and <code>Mfr</code>
dimension tables.</p>
<!
<h3>3. Defining aggregate tables<a name="<API key>"> </a></h3>
<p>A fact table can have zero or more aggregate tables.
Every aggregate table is associated with just one fact table.
It aggregates the fact table measures over one or more of the dimensions.
As an example, if a particular column in the fact table represents the
number of sales of some product on a given day by a given store, then
an aggregate table might be created that sums the information so that
applies at a month level rather than by day. Such an aggregate might
reasonably be 1/30<sup>th</sup> the size of the fact table (assuming comparable sales
for every day of a month). Now, if one were to execute a MDX query
that needed sales information at a month (or quarter or year) level,
running the query against the aggregate table is faster but yields the
same answer as if it were run against the base fact table.</p>
<p>Further, one might create an aggregate that not only aggregates at the month
level but also, rather than at the individual store level, aggregates at
the state level. If there were, say, 20 stores per state, then this
aggregate table would be 1/600<sup>th</sup> the size of the original fact table. MDX queries interested only at the month or above and state or above levels
would use this table.</p>
<p>When a MDX query runs, what aggregate should be used?
This comes down to what measures are needed and with which dimension levels.
The base fact table always has the correct measures and dimension
levels. But, it might also be true that there is one or more aggregate
tables that also have the measures and levels. Of these, the aggregate
table with the lowest cost to read, the smallest number of rows,
should be the table used to fulfill the query.</p>
<p>Mondrian supports two aggregation techniques which are called "lost"
dimension and "collapsed" dimension. For the creation of any
given aggregate table these can
be applied independently to any number of different dimensions.</p>
<p>A "lost" dimension is one which is completely missing from the aggregate
table. The measures that appear in the table have been aggregated
across all values of the lost dimension. As an example, in a fact table
with dimensions of time, location, and product and measure sales, for an
aggregate table that did not have the location dimension that
dimension would be "lost". Here, the sales measure would be the aggregation
over all locations. An aggregate table where all of the dimensions
are lost is possible - it would have a single row with the measure
aggregated over everything - sales for all time, all locations and all
products.</p>
<blockquote>
<code>
fact table<br />
time_id<br />
product_id<br />
location_id<br />
measure<br />
<br />
lost (time_id) dimension table<br />
product_id<br />
location_id<br />
measure (aggregated over time)<br />
fact_count<br />
<br />
fully lost dimension table<br />
measure (aggregated over everything)<br />
fact_count<br />
</code>
</blockquote>
<p>Note the "fact_count" column in the aggregate table. This additional
column is a general feature of aggregate tables. It is a count of
how many fact table columns were aggregated into the one aggregate
table row. As an example, if for a particular choice of product_id and
location_id, the time_id occurred 5 times in the fact table, then in the
aggregate table the fact_count column would contain 5 for that
product_id/location_id pair (a given product was sold at a given
location at 5 different times).</p>
<p>The second supported aggregation technique provides a finer level of
control, the "collapsed" dimension technique.
Recall that the dimension key in the fact table refers (more or less)
to the
lowest level in the dimension hierarchy.
For a collapsed dimension, the dimension key in the aggregate
table is replaced with a set of dimension levels; the dimension key
column is replaced with a set of columns; a fully denormalized
summary table for that dimension.
As an example, if the time dimension with base fact table foreign key
time_id had the levels: day, month, quarter and
year, and in an aggregate it was collapsed to the month level, then
the aggregate table would not have a time_id column but rather
columns for month, quarter and year. The SQL generated for a
MDX query for which this aggregate table can be used, would no longer
refer to the time dimension's table but rather all time related
information would be gotten from the aggregate table.</p>
<blockquote>
<code>
time dimension table<br />
time_id<br />
day<br />
month<br />
quarter<br />
year<br />
<br />
fact table<br />
time_id<br />
measure<br />
<br />
collapsed dimension table<br />
month<br />
quarter<br />
year<br />
measure (aggregated to month level)<br />
fact_count<br />
</code>
</blockquote>
<p>In the literature, there are other ways of creating aggregate tables
but they are not supported by Mondrian at this time.</p>
<!
<h3>4. Building aggregate tables<a name="Building_aggregates"> </a></h3>
<p>Aggregate tables must be built.
Generally, they not real-time; they are rebuilt,
for example, every night for use the following day by the analysts.
Considering the lost and collapsed dimension technique for
aggregate table definition, one can estimate that for a dimension
with N levels, there are N+1 possible aggregate tables (N collapsed and
1 lost). Also, dimensions (with different dimension tables) can
be aggregated independently.
For the FoodMart Sales cube there are 1400 different possible aggregate
tables.</p>
<p>Clearly, one does not want to create all possible aggregate tables.
Which ones to create depends upon two considerations. The first
consideration is application dependent:
the nature of the MDX queries that will be executed.
If many of the queries deal with per month and per state questions,
then an aggregate at those levels might be created.
The second consideration is application independent: per
dimension aggregating from
the lowest level to the next lowest generally gives greater
bang for the buck than aggregating from the N to the N+1 (N>1) level.
This is because 1) a first level aggregation can be used for all
queries at that level and above and 2) dimension fan-out tends to
increase for the lower levels.
Of course, your mileage may vary.</p>
<p>In a sense, picking which aggregate tables to build is analogous to
picking which indexes to build on a table; it is application
dependent and experience helps.</p>
<p>The hardest part about the actually creation and population of
aggregate tables is figuring out how to create the first couple;
what the SQL looks like.
After that they are pretty much all the same.</p>
<p>Four examples will be given. They all concern building
aggregate tables for the sales_fact_1997 fact table.
As a reminder, the sales_fact_1997 fact table looks like:</p>
<blockquote>
<code>
sales_fact_1997<br />
product_id <br />
time_id <br />
customer_id <br />
promotion_id<br />
store_id<br />
store_sales<br />
store_cost<br />
unit_sales
</code>
</blockquote>
<p>The first example is a lost time dimension aggregate table, the
time_id foreign key is missing.</p>
<blockquote>
<code>
CREATE TABLE <API key> (<br />
product_id INTEGER NOT NULL,<br />
customer_id INTEGER NOT NULL,<br />
promotion_id INTEGER NOT NULL,<br />
store_id INTEGER NOT NULL,<br />
store_sales DECIMAL(10,4) NOT NULL,<br />
store_cost DECIMAL(10,4) NOT NULL,<br />
unit_sales DECIMAL(10,4) NOT NULL,<br />
fact_count INTEGER NOT NULL);<br />
<br />
CREATE INDEX i_sls_97_cust_id ON <API key> (customer_id);<br />
CREATE INDEX i_sls_97_prod_id ON <API key> (product_id);<br />
CREATE INDEX i_sls_97_promo_id ON <API key> (promotion_id);<br />
CREATE INDEX i_sls_97_store_id ON <API key> (store_id);<br />
<br />
INSERT INTO <API key> (<br />
product_id,<br />
customer_id,<br />
promotion_id,<br />
store_id,<br />
store_sales,<br />
store_cost,<br />
unit_sales,<br />
fact_count)<br />
SELECT<br />
product_id,<br />
customer_id,<br />
promotion_id,<br />
store_id,<br />
SUM(store_sales) AS store_sales,<br />
SUM(store_cost) AS store_cost,<br />
SUM(unit_sales) AS unit_sales,<br />
COUNT(*) AS fact_count<br />
FROM <br />
sales_fact_1997 <br />
GROUP BY <br />
product_id, <br />
customer_id, <br />
promotion_id, <br />
store_id;<br />
</code>
</blockquote>
<p>A couple of things to note here. </p>
<p>The above is in MySQL's dialect of SQL, and may
not work for your database - but I hope the general idea is clear.
The aggregate table "looks like" the base fact table except the
time_id column is missing and there is a new fact_count column.
The insert statement populates the aggregate table from the base fact
table summing the measure columns and counting to populate the
fact_count column. This done while grouping by the remaining
foreign keys to the remaining dimension tables.</p>
<p>Next, some databases recognize star joins - Oracle for instance.
For such database one should not create indexes, not on the fact table
and not on the aggregate tables. On the other hand, databases that
do not recognize star joins will require indexes on both the
fact table and the aggregate tables.</p>
<p>For our purposes here, the exact name of the aggregate table is not
important; the "agg_l_05_" preceding the base fact table's name
sales_fact_1997. First, the aggregate table name must be different
from the base fact table name. Next, the aggregate table name ought to be
related to the base fact table name both for human eyeballing of what
aggregate is associated with which fact table, but also, as described
below, Mondrian employs mechanism to automagically recognize which
tables are aggregates of others.</p>
<p>The following example is a collapsed dimension aggregate table
where the time dimension has been rolled up to the month level.</p>
<blockquote><code>
CREATE TABLE <API key> (<br />
product_id INTEGER NOT NULL,<br />
customer_id INTEGER NOT NULL,<br />
promotion_id INTEGER NOT NULL,<br />
store_id INTEGER NOT NULL,<br />
month_of_year SMALLINT(6) NOT NULL,<br />
quarter VARCHAR(30) NOT NULL,<br />
the_year SMALLINT(6) NOT NULL,<br />
store_sales DECIMAL(10,4) NOT NULL,<br />
store_cost DECIMAL(10,4) NOT NULL,<br />
unit_sales DECIMAL(10,4) NOT NULL,<br />
fact_count INTEGER NOT NULL);<br />
<br />
CREATE INDEX i_sls_97_cust_id ON <API key> (customer_id);<br />
CREATE INDEX i_sls_97_prod_id ON <API key> (product_id);<br />
CREATE INDEX i_sls_97_promo_id ON <API key> (promotion_id);<br />
CREATE INDEX i_sls_97_store_id ON <API key> (store_id);<br />
<br />
INSERT INTO <API key> (<br />
product_id,<br />
customer_id,<br />
promotion_id,<br />
store_id,<br />
month_of_year,<br />
quarter,<br />
the_year,<br />
store_sales,<br />
store_cost,<br />
unit_sales,<br />
fact_count)<br />
SELECT<br />
BASE.product_id,<br />
BASE.customer_id,<br />
BASE.promotion_id,<br />
BASE.store_id,<br />
DIM.month_of_year,<br />
DIM.quarter,<br />
DIM.the_year,<br />
SUM(BASE.store_sales) AS store_sales,<br />
SUM(BASE.store_cost) AS store_cost,<br />
SUM(BASE.unit_sales) AS unit_sales,<br />
COUNT(*) AS fact_count<br />
FROM <br />
sales_fact_1997 AS BASE, time_by_day AS DIM<br />
WHERE<br />
BASE.time_id = DIM.time_id<br />
GROUP BY <br />
BASE.product_id,<br />
BASE.customer_id,<br />
BASE.promotion_id,<br />
BASE.store_id,<br />
DIM.month_of_year,<br />
DIM.quarter,<br />
DIM.the_year;
</code></blockquote>
<p>In this case, one can see that the time_id foreign key in the base
fact table has been replaced with the columns: month_of_year, quarter,
and the_year in the aggregate table. There is, as always, the fact_count
column. The measures are inserted as sums. And, the group by clause
is over the remaining foreign keys as well as the imported time
dimension levels.</p>
<p>When creating a collapsed dimension aggregate one might consider creating
indexes for the columns imported from the dimension that was collapsed.</p>
<p>Below is another aggregate table. This one has two lost dimensions (<code>store_id</code> and
<code>promotion_id</code>) as well as collapsed dimension on time
to the quarter level. This shows how aggregate techniques can be
mixed.</p>
<blockquote><code>
CREATE TABLE <API key> (<br />
product_id INTEGER NOT NULL,<br />
customer_id INTEGER NOT NULL,<br />
quarter VARCHAR(30) NOT NULL,<br />
the_year SMALLINT(6) NOT NULL,<br />
store_sales DECIMAL(10,4) NOT NULL,<br />
store_cost DECIMAL(10,4) NOT NULL,<br />
unit_sales DECIMAL(10,4) NOT NULL,<br />
fact_count INTEGER NOT NULL);<br />
<br />
CREATE INDEX i_sls_97_cust_id ON <API key> (customer_id);<br />
CREATE INDEX i_sls_97_prod_id ON <API key> (product_id);<br />
<br />
INSERT INTO <API key> (<br />
product_id,<br />
customer_id,<br />
quarter,<br />
the_year,<br />
store_sales,<br />
store_cost,<br />
unit_sales,<br />
fact_count)<br />
SELECT<br />
BASE.product_id,<br />
BASE.customer_id,<br />
DIM.quarter,<br />
DIM.the_year,<br />
SUM(BASE.store_sales) AS store_sales,<br />
SUM(BASE.store_cost) AS store_cost,<br />
SUM(BASE.unit_sales) AS unit_sales,<br />
COUNT(*) AS fact_count<br />
FROM sales_fact_1997 AS BASE,<br />
time_by_day AS DIM<br />
WHERE<br />
BASE.time_id = DIM.time_id<br />
GROUP BY <br />
BASE.product_id,<br />
BASE.customer_id,<br />
DIM.quarter,<br />
DIM.the_year;
</code></blockquote>
<p>In the above three examples, for the most part the column names
in the aggregate are the same column names that appear in the fact
table and dimension tables. These tables would all be
recognized by the Mondrian
<a href="#Recognizing_default">default</a>
aggregate recognizer.
It is possible to create an aggregate table and name the columns arbitrarily.
For such an aggregate, an
<a href="#<API key>">explicit</a>
Mondrian recognizer must be specified.</p>
<blockquote><code>
CREATE TABLE <API key> (<br />
PRODUCT_ID INTEGER NOT NULL,<br />
CUSTOMER_ID INTEGER NOT NULL,<br />
PROMOTION_ID INTEGER NOT NULL,<br />
STORE_ID INTEGER NOT NULL,<br />
TIME_MONTH SMALLINT(6) NOT NULL,<br />
TIME_QUARTER VARCHAR(30) NOT NULL,<br />
TIME_YEAR SMALLINT(6) NOT NULL,<br />
STORE_SALES_SUM DECIMAL(10,4) NOT NULL,<br />
STORE_COST_SUM DECIMAL(10,4) NOT NULL,<br />
UNIT_SALES_SUM DECIMAL(10,4) NOT NULL,<br />
FACT_COUNT INTEGER NOT NULL);<br />
<br />
CREATE INDEX i_sls_97_cust_id ON <API key> (CUSTOMER_ID);<br />
CREATE INDEX i_sls_97_prod_id ON <API key> (PRODUCT_ID);<br />
CREATE INDEX i_sls_97_promo_id ON <API key> (PROMOTION_ID);<br />
CREATE INDEX i_sls_97_store_id ON <API key> (STORE_ID);<br />
<br />
INSERT INTO <API key> (<br />
PRODUCT_ID,<br />
CUSTOMER_ID,<br />
PROMOTION_ID,<br />
STORE_ID,<br />
TIME_MONTH,<br />
TIME_QUARTER,<br />
TIME_YEAR,<br />
STORE_SALES_SUM,<br />
STORE_COST_SUM,<br />
UNIT_SALES_SUM,<br />
FACT_COUNT)<br />
SELECT<br />
BASE.product_id,<br />
BASE.customer_id,<br />
BASE.promotion_id,<br />
BASE.store_id,<br />
DIM.month_of_year,<br />
DIM.quarter,<br />
DIM.the_year,<br />
SUM(BASE.store_sales) AS STORE_SALES_SUM,<br />
SUM(BASE.store_cost) AS STORE_COST_SUM,<br />
SUM(BASE.unit_sales) AS UNIT_SALES_SUM,<br />
COUNT(*) AS FACT_COUNT<br />
FROM <br />
sales_fact_1997 BASE, time_by_day DIM<br />
WHERE<br />
BASE.time_id = DIM.time_id<br />
GROUP BY <br />
BASE.product_id,<br />
BASE.customer_id,<br />
BASE.promotion_id,<br />
BASE.store_id,<br />
DIM.month_of_year,<br />
DIM.quarter,<br />
DIM.the_year;
</code></blockquote>
<p>This aggregate table has column names that are not identical to those
found in the base fact table and dimension table. It is still a
valid aggregate but Mondrian has to be told how to map its columns
into those of the base fact table.</p>
<p>Sometimes with multiple aggregate tables, one aggregate table is
an aggregate of not only the base fact table but also another
aggregate table; an aggregate table with lost time and product
dimensions (no time_id and product_id foreign keys)
is an aggregate of the base fact table and an aggregate which only
has a lost time dimension (no time_id foreign key).
In this case, one might first build the aggregate with only the
lost time dimension and then build the aggregate with both lost
time and product dimensions from that first aggregate - it will be
faster (in some cases, much faster)
to populate the second aggregate from the first rather than
from the base fact table.</p>
<p>One last note, when creating aggregate tables from the base fact table
pay attention to the size of the numeric columns - what might be big
enough in the base fact table might not be big enough in an aggregate.</p>
<!
<h3>5. How Mondrian recognizes Aggregate Tables<a name="<API key>"> </a></h3>
<p>Mondrian has to know about the aggregate tables in order to use them.
You can either define an aggregate explicitly, or set up rules to recognize
several aggregate tables at the same time. </p>
<p>How Mondrian recognizes aggregate table
names and columns pretty much dictates how one must name those
table names and columns when creating them in the first place!</p>
<!
<h1>5.1 Rules<a name="Recognizing_default"> </a></h1>
<p>Rules are templates, designed to work for all fact table names
and their column
names.
These rules are templates of regular expressions
that are instantiated with the names of a fact table
and its columns. In order to describe the rule templates, a name that
instantiate a rule are represented in a rule by have the name bracketed
by "${" and "}". As an example,
"abc_${name}_xyz"
is a rule parameterized
by "name". When name is "john" the template becomes
"abc_john_xyz".</p>
<p>The regular expression engine used here and a definition of
the allowed regular expression grammar is found in
the Java regular expression Pattern class:
<a href=" http://java.sun.com/j2se/1.4.2/docs/api/java/util/regex/Pattern.html">java.util.regex.Pattern.</a>
</p>
<p>In order that a table be recognized as an aggregate table, Mondrian
must be able to map from the fact table foreign key columns and measure
columns and those in the aggregate table. In addition, Mondrian
must identify the
fact count column in the aggregate and possible level columns (which
would appear in an aggregate table if it had a "collapsed" dimension).
What follows is a description of the steps taken in the identification
of aggregate tables by the default recognizer.
If at any step, a match fails, the table is rejected as an aggregate
table.</p>
<p>Starting off, the candidate aggregate table's name must comply with
the aggregate table name rule. Represented as a template regular
expression the rule is:</p>
<blockquote>
<code>agg_.+_${fact_table_name}</code>
</blockquote>
<p>which is parameterized with the fact table's name.
(In addition, this rule is applied in "ignore case" mode.)
This means that an aggregate table's name must start with
"agg_" (ignoring character case), followed by at least one
character, then the '_' character and, lastly, the name of
the fact table. The ".+" in the template has special meaning
in a regular expression - it matches one or more characters.</p>
<p>As an example of applying the aggregate table name rule,
let the fact table be called
<code>sales_fact_1997</code>, the
<code>Sales</code>
cube's fact table from the FoodMart schema. Applying the
specific fact table name to the regular expression template
creates the following regular expression:</p>
<blockquote>
<code>agg_.+_sales_fact_1997</code>
</blockquote>
<p>This will match the following table names:</p>
<ul>
<li><code><API key></code></li>
<li><code><API key></code></li>
<li><code><API key></code></li>
<li><code><API key></code></li>
<li><code><API key></code></li>
<li><code><API key></code></li>
</ul>
<p>The aggregate table name recognition mechanism has one additional
programatic feature, one can specify that only a portion of the
base fact table name be used as the basis of template name.
For instance, if the DBA demanded that all fact tables begin with the
string "fact_", e.g.,
"<API key>",
one would certainly not want that string to have to
be part of each aggregate table's name. The aggregate table name
recognition mechanism allows one to specify a regular expression with
one and only one group clause (a group clause is a pattern bracketed
by '(' and ')'). Whatever is matched by the contents of the
group clause is taken to be the part of the fact table name to be
used in the matching template. This regular expression containing the
group clause is specified as the "basename" attribute.
The default Mondrian aggregate table recognizer does not use this feature.
For more information see the associated
<a href="developer_notes.html">developer's note link.</a></p>
<p>After the default recognizer determines that a table's name matches
the aggregate table template regular expression for a given
fact table, it then attempts to match columns. The first column
tested for is the "fact count" column. Here the candidate
aggregate table must have a column called "fact_count" (ignoring
case) and this column's type must be numeric.
The following examples would match as "fact count" columns.</p>
<blockquote>
<code>fact_count<br>
FACT_COUNT<br>
fact_COUNT</code><br>
</blockquote>
<p>Following matching the "fact count" column, the candidate aggregate
table's columns are examined for possible foreign key matches.
For each of the foreign key column names in the fact table it
is determined if there are any character case independent matches
of the aggregate table's columns. Those columns that match are
noted. It is alright if no columns match; the aggregate might
be a "collapsed" dimension aggregate with no fact table foreign
keys remaining.
If the fact table had foreign key columns "store_id" and "time_id",
then the following aggregate table columns (for example) would match:</p>
<ul>
<li><code>time_id</code></li>
<li><code>store_id</code></li>
<li><code>TIME_ID</code></li>
<li><code>STORE_ID</code></li>
<li><code>time_ID</code></li>
<li><code>STORE_id</code></li>
</ul>
<p>At this point, matches are looked for the level and measure columns.
Both of these matching rules are multi-part - has sub rules;
each rule has more than
one possible regular expression that might match where a match on any
one is a match.</p>
<p>There are three sub rules for matching level columns. Each is a template
which is parameterized with 1) the fact table's cube's dimension
hierarchy's name, "hierarchy_name", 2) the fact table's cube's dimension
hierarchy's level name, "level_name", 3) the dimension table's level
column name, "level_column_name",
and 4) a usage prefix, "usage_prefix", which in most cases is null":</p>
<ul>
<li><code>${hierarchy_name}_${level_name}</code></li>
<li><code>${hierarchy_name}_${level_column_name}</code></li>
<li><code>${usage_prefix}${level_column_name}</code></li>
<li><code>${level_column_name}</code></li>
</ul>
<p>The "usage_prefix" is the value of the
<code>DimensionUsage</code>'s
or
private <code>Dimension</code>'s
optional
<code>usagePrefix</code>
attribute. It can be the case that a "level_column_name", the name
of a dimension's level column,
is the same for more than one dimension.
During aggregate recognition for collapsed dimension aggregates
where the base fact table has two or more dimensions with common
column names, the attempted recognition will fail unless in the
schema catalog the
<code>usagePrefix</code>
attribute is used to disambiguate those column names.
Of course, one must also remember to prefix the the column in the aggregate
table with the same prefix.</p>
<p>As an example of
<code>usagePrefix</code>, consider a fact table named
<code>ORDERS</code> which has two
<code>DimensionUsage</code>s, one for the
<code>CUSTOMER</code> dimension
and the other for the
<code>WHOLESALER</code> dimension where each dimension has a level
column named
<code>CUST_NM</code>. In this case, a collapsed aggregate table
could not include a column named
<code>CUST_NM</code> because there would be no way to tell which
dimension to associate it with. But if in the
<code>CUSTOMER</code>'
<code>DimensionUsage</code> the
<code>usagePrefix</code> had the value "CU_", while the
<code>WHOLESALER</code>'s
<code>usagePrefix</code> had the value "WS_", and the aggregate
table column was named
<code>WS_CUST_NM</code>, then the recognizer could associate the
column with the
<code>WHOLESALER</code> dimension.
</p>
<p>
In the case of a private
<code>Dimension</code>, a
<code>usagePrefix</code> need only be used if there is a public,
shared
<code>Dimension</code> that has the same name and has a
"level_column_name" that is also the same.
Without the
<code>usagePrefix</code> there would be no way of disambiguating
collapsed dimension aggregate tables.</p>
<p>If any of these parameters have space characters, ' ', these are
mapped to underscore characters, '_', and, similarly, dot characters,
'.', are also mapped to underscores.
So, if the hierarchy_name is "Time", level_name is "Month" and
level_column_name is month_of_year, the possible aggregate table column
names are:</p>
<ul>
<li><code>time_month</code></li>
<li><code>time_month_of_year</code></li>
<li><code>month_of_year</code></li>
</ul>
<p>For this rule, the "hierarchy_name" and "level_name" are converted to
lower case while the "level_column_name" must match exactly.</p>
<p>Lastly, there is the rule for measures. There are three parameters
to matching aggregate columns to measures: 1) the fact table's
cube's measure name, "measure_name", 2) the fact table's cube's measure
column name, "measure_column_name", and 3) the fact table's cube's measure's
aggregator (sum, avg, max, etc.), "aggregate_name".</p>
<ul>
<li><code>${measure_name}</code></li>
<li><code>${measure_column_name}</code></li>
<li><code>${measure_column_name}_${aggregate_name}</code></li>
</ul>
<p>where the measure name is converted to lower case and both the measure
column name and aggregate name are matched as they appear.
If the fact table's cube's measure name was, "Avg Unit Sales",
the fact table's measure
column name
is "unit_sales", and, lastly, the fact table's cube's measure's
aggregate name is "avg", then possible aggregate table column names
that would match are:</p>
<ul>
<li><code>avg_unit_sales</code></li>
<li><code>unit_sales</code></li>
<li><code>unit_sales_avg</code></li>
</ul>
<p>For Mondrian developers there are
<a href="developer_notes.html">additional notes</a>
describing the default rule recognition schema.</p>
<!
<h1>5.2 Explicit aggregates<a name="<API key>"> </a></h1>
<p>On a per cube basis, in a schema file a user can both include and
exclude aggregate tables. A table that would have been include as
an aggregate by the default rules can be explicitly excluded. A
table that would not be include by the default rules can be
explicitly included. A table that would have only been partially
recognized by the default rules and, therefore, resulted in a
warning or error message, can be explicitly include in rules
specified in the cube's definition.</p>
<p>Below is an example for the FoodMart
<code>
Sales
</code>
cube
with fact table
<code>sales_fact_1997</code>. There are child elements of the
<code>Table</code>
element that deal with aggregate table recognition.</p>
<blockquote>
<code>
<Cube name="Sales"><br />
<Table name="sales_fact_1997"><br />
<AggExclude name="<API key>" /><br />
<AggExclude name="<API key>" /><br />
<AggExclude name="<API key>" /><br />
<br />
<AggName name="<API key>"><br />
<AggFactCount column="FACT_COUNT"/><br />
<AggIgnoreColumn column="admin_one"/><br />
<AggIgnoreColumn column="admin_two"/><br />
<AggForeignKey factColumn="product_id" aggColumn="PRODUCT_ID" /><br />
<AggForeignKey factColumn="customer_id" aggColumn="CUSTOMER_ID" /><br />
<br />
<AggForeignKey factColumn="promotion_id" aggColumn="PROMOTION_ID" /><br />
<AggForeignKey factColumn="store_id" aggColumn="STORE_ID" /><br />
<AggMeasure name="[Measures].[Unit Sales]" column="UNIT_SALES_SUM" /><br />
<AggMeasure name="[Measures].[Store Cost]" column="STORE_COST_SUM" /><br />
<AggMeasure name="[Measures].[Store Sales]" column="STORE_SALES_SUM" /><br />
<AggLevel name="[Time].[Year]" column="TIME_YEAR" /><br />
<br />
<AggLevel name="[Time].[Quarter]" column="TIME_QUARTER" /><br />
<AggLevel name="[Time].[Month]" column="TIME_MONTH" /><br />
</AggName><br />
<AggPattern pattern="<API key>.*"><br />
....<br />
<AggExclude name="<API key>" /><br />
<AggExclude pattern="<API key>.*" /><br />
<br />
</AggPattern><br />
<br />
</Table><br />
.<br />
</Cube>
</code>
</blockquote>
<p>The
<code>AggExclude</code>
elements define tables that should not be considered aggregates of the
fact table. In this case Mondrian is instructed to ignore the tables
<code><API key></code>,
<code><API key></code>
and
<code><API key></code>.
Following the excludes is the
<code>AggName</code>
element which identifies the name of an aggregate table
table,
<code><API key></code>,
and rules for mapping names from the fact table and cube to it.
The two
<code>AggIgnoreColumn</code>
elements are used to specifically state to Mondrian that the columns
<code>admin_one</code>
and
<code>admin_two</code>
are known and should be ignored. If these columns were not so
identified, Mondrian at the end of determining the fitness of
the
<code><API key></code>
table to be an aggregate of the
<code>sales_fact_1997</code>
fact table would complain that there were extra unidentified columns
and that the mapping was incomplete.
The
<code>AggForeignKey</code>
elements define mappings from the
<code>sales_fact_1997</code>
fact table foreign key column
names into the
<code><API key></code>
aggregate table column names.</p>
<p>Both the
<code>
AggMeasure</code>
and
<code>
AggLevel</code>
elements map "logical" name, names defined in the cube's schema,
to the aggregate table's column names.
An aggregate table does not have to have all of the measures
that are found in the base fact table, so it is not a requirement
that all of the fact table measures appear as
<code>
AggMeasure</code>
mappings, though it will certainly be the most common case.
The most notable exception are
<code>distinct-count</code>
measures; such a measure can be aggregated, but one can not
in general aggregate further on the measure - the "distinctness" of the
measure has been lost during the first aggregation.</p>
<p>The
<code>
AggLevel</code> entries correspond to collapsed dimensions. For each collapsed
dimension there is a hierarchy of levels spanning from the top
level down to some intermediate level (with no gaps).</p>
<p>The
<code>
AggName</code>
element is followed by an
<code>AggPattern</code>
element.
This matches candidate aggregate table names using a
regular expression. Included as child elements of the
<code>
AggPattern</code>
element are two
<code>
AggExclude</code>
elements. These specifically state what table names should not
be considered by this
<code>AggPattern</code>
element.</p>
<p>In a given
<code>Table</code>
element, all of the
<code>AggExclude</code>
are applied first, followed by the
<code>AggName</code>
element rules and then the
<code>AggPattern</code>
rules.
In the case where the same fact table is used by multiple cubes,
the above still applies, but its across all of the aggregation
rules in all of the multiple cube's
<code>Table</code>
elements.
The first "Agg" element,
name or pattern, that matches per candidate aggregate table
name has its associated rules applied.</p>
<p>Most of the time, the scope of these
include/exclude statements apply only to the cube in question, but
not always. A cube has a fact table and it is the characteristics
of the fact table (like column names) against
which some of the aggregate table rules are applied. But, a fact table
can actually be the basis of more than one cube. In the FoodMart
schema the
<code>sales_fact_1997</code>
fact table applies to both the
<code>Sales</code>
and the
<code>Sales Ragged</code>
cubes.
What this means is that any explicit rules defined in the
<code>Sales</code>
cube also applies to the
<code>Sales Ragged</code>
cube and visa versa.</p>
<p>One feature of the explicit recognizer is very useful. With a single
line in the cubes definition in the schema file,
one can force Mondrian not to recognize any aggregate tables
for the cube's fact table. As an example, for the FoodMart Sales cube
the following excludes all aggregate tables because the regular expression
pattern
<code>".*"</code>
matches all candidate aggregate table names.</p>
<blockquote>
<code>
<Table name="sales_fact_1997" >
<AggExclude pattern=".*" />
</Table>
</code>
</blockquote>
<p>During aggregate table recognition,
rather than fail silently, Mondrian is rather noisy about things
it can not figure out.</p>
<!
<h3>
6. Aggregate tables and parent-child hierarchies<a name="<API key>"> </a></h3>
<p>
A <a href="schema.html#<API key>">parent-child hierarchy</a> is a
special kind of hierarchy where members can have arbitrary depth. The classic
example of a parent-child hierarchy is an employee org-chart.</p>
<p>
When dealing with parent-child hierarchies, the challenge is to roll up measures
of child members into parent members. For example, when considering an employee
Bill who is head of a department, we want to report not Bill's salary, but
Bill's salary plus the sum of his direct and indirect reports (Eric, Mark and
Carla). It is difficult to generate efficient SQL to do this rollup, so Mondrian
provides a special structure called a <a href="schema.html#Closure_tables">
closure table</a>, which contains the expanded contents of the hierarchy.</p>
<p>
A closure table serves a similar purpose to an aggregate table: it contains a
redundant copy of the data in the database, organized in such a way that
Mondrian can access the data efficiently. An aggregate table speeds up
aggregation, whereas a closure table makes it more efficient to compute
hierarchical rollups.</p>
<p>
Supposing that a schema contains a large fact table, and one of the hierarchies
is a parent-child hierarchy. Is is possible to make aggregate tables and closure
tables work together, to get better performance? Let's consider a concrete
example.</p>
<blockquote>
<code>Cube:<br>
[Salary]<br>
<br>
Dimensions:<br>
[Employee], with level [Employee]<br>
[Time], with levels [Year], [Quarter], [Month], [Day]<br>
<br>
Fact table:<br>
salary (employee_id, time_id, dollars)<br>
<br>
Parent-child dimension table:<br>
employee (employee_id, supervisor_id, name)</code></blockquote>
<br />
<table id="table6" class="grayTable" width="200">
<tr>
<td colspan="3"><b>employee</b></td>
</tr>
<tr>
<td><b>supervisor_id</b></td>
<td><b>employee_id</b></td>
<td><b>name</b></td>
</tr>
<tr>
<td>null</td>
<td>1</td>
<td>Frank</td>
</tr>
<tr>
<td>1</td>
<td>2</td>
<td>Bill</td>
</tr>
<tr>
<td>2</td>
<td>3</td>
<td>Eric</td>
</tr>
<tr>
<td>1</td>
<td>4</td>
<td>Jane</td>
</tr>
<tr>
<td>3</td>
<td>5</td>
<td>Mark</td>
</tr>
<tr>
<td>2</td>
<td>6</td>
<td>Carla</td>
</tr>
</table>
<blockquote><code>Closure table:<br>
employee_closure (employee_id, supervisor_id, depth)</code></blockquote>
<table id="table7" class="grayTable" width="250">
<tr>
<td colspan="3"><b>employee_closure</b></td>
</tr>
<tr>
<td><b>supervisor_id</b></td>
<td><b>employee_id</b></td>
<td><b>distance</b></td>
</tr>
<tr>
<td>1</td>
<td>1</td>
<td>0</td>
</tr>
<tr>
<td>1</td>
<td>2</td>
<td>1</td>
</tr>
<tr>
<td>1</td>
<td>3</td>
<td>2</td>
</tr>
<tr>
<td>1</td>
<td>4</td>
<td>1</td>
</tr>
<tr>
<td>1</td>
<td>5</td>
<td>3</td>
</tr>
<tr>
<td>1</td>
<td>6</td>
<td>2</td>
</tr>
<tr>
<td>2</td>
<td>2</td>
<td>0</td>
</tr>
<tr>
<td>2</td>
<td>3</td>
<td>1</td>
</tr>
<tr>
<td>2</td>
<td>5</td>
<td>2</td>
</tr>
<tr>
<td>2</td>
<td>6</td>
<td>1</td>
</tr>
<tr>
<td>3</td>
<td>3</td>
<td>0</td>
</tr>
<tr>
<td>3</td>
<td>5</td>
<td>1</td>
</tr>
<tr>
<td>4</td>
<td>4</td>
<td>0</td>
</tr>
<tr>
<td>5</td>
<td>5</td>
<td>0</td>
</tr>
<tr>
<td>6</td>
<td>6</td>
<td>0</td>
</tr>
</table>
<blockquote>
<p><code>Regular dimension table:<br>
time (year, month, quarter, time_id)</code></p>
</blockquote>
<!
<h1>
6.1 Aggregate tables at the leaf level of a parent-child hierarchy<a name="<API key>"> </a></h1>
<p>
The simplest option is to create an aggregate table which joins at the leaf
level of the parent-child hierarchy. The following aggregate table is for leaf
members of the <code>[Employee]</code> hierarchy, and the <code>[Year]</code>
level of the <code>[Time]</code> hierarchy.</p>
<blockquote>
<code>Aggregate table:<br>
<API key> (employee_id, time_year,
sum_dollars)<br>
<br>
INSERT INTO <API key><br>
SELECT <br>
salary.employee_id,<br>
time.year AS time_year,<br>
sum(salary.dollars) AS sum_dollars<br>
FROM salary,<br>
time<br>
WHERE time.time_id = salary.time_id<br>
GROUP BY salary.employee_id, time.year</code>
</blockquote>
<p>
Mondrian can use the aggregate table to retrieve salaries of leaf employees
(without rolling up salaries of child employees). But because the aggregate
table has the same foreign key as the <code>salary</code> fact table, Mondrian
is able to automatically join <code>salary.employee_id</code> to either <code>
<API key>.employee_id</code> or <code>
<API key>.supervisor_id</code> to rollup employees
efficiently.</p>
<!
<h1>
6.2 Combined closure and aggregate tables<a name="<API key>"> </a></h1>
<p>
A more advanced option is to combine the closure table and aggregate table into
one:</p>
<blockquote>
<code>Aggregate table:<br>
agg_salary_Employee$Closure_Time_Year (supervisor_id,
time_year, sum_dollars)<br>
<br>
INSERT INTO agg_salary_Employee$Closure_Time_Year<br>
SELECT <br>
ec.supervisor_id,<br>
time.year AS time_year,<br>
sum(salary.dollars) AS sum_dollars<br>
FROM employee_closure AS ec,<br>
salary,<br>
time<br>
WHERE ec.supervisor_id = salary.employee_id<br>
AND ec.supervisor_id <> ec.employee_id<br>
AND time.time_id = salary.time_id<br>
GROUP BY ec.employee_id, ec.supervisor_id, time.year</code>
</blockquote>
<p>
The <code>agg_salary_Employee$Closure_Time_Year</code> aggregate table contains
the salary of every employee, rolled up to include their direct and indirect
reports, aggregated to the <code>[Year]</code> level of the <code>[Time]</code>
dimension.</p>
<!
<h1>
6.2.1 The trick: How combined closure and aggregate tables work</h1>
<p>
Incidentally, this works based upon a 'trick' in Mondrian's internals. Whenever
Mondrian sees a closure table, it creates a auxilliary dimension behind the
scenes. In the case of the <code>[Employee]</code> hierarchy and its <code>
employee_closure</code> table, the auxilliary dimension is called <code>[Employee$Closure]</code>.</p>
<blockquote>
<code>Dimension [Employee$Closure], levels [supervisor_id], [employee_id]
</code>
</blockquote>
<p>
When an MDX query evaluates a cell which uses a rolled up salary measure,
Mondrian translates the coordinates of that cell in the <code>[Employee]</code>
dimension into a corresponding coordinate in the <code>[Employee$Closure]</code>
dimension. This translation happens <em>before</em>
Mondrian starts to search for a suitable aggregate table, so if your aggregate
table contains the name of the auxiliary hierarchy (as <code>
agg_salary_Employee$Closure_Time_Year</code> contains the name of the [Employee$Closure]
hierarchy) it find and use the aggregate table in the ordinary way.</p>
<!
<h3>
7. How Mondrian uses aggregate tables<a name="<API key>"> </a></h3>
<!
<h1>
7.1 Choosing between aggregate tables<a name="<API key>"> </a></h1>
<p>
If more than one aggregate table matches a particular query, Mondrian needs to
choose between them.</p>
<p>
If there is an aggregate table of the same granularity as the query, Mondrian
will use it. If there is no aggregate table at
the desired granularity, Mondrian will pick an aggregate table of lower
granularity and roll up from it. In general, Mondrian chooses the aggregate
table with the fewest rows, which is typically the aggregate table with the
fewest extra dimensions. See property <code>
<a href="configuration.html#Properties">mondrian.rolap.aggregates.ChooseByVolume</a></code>.</p>
<!
<h1>
7.2 Distinct count<a name="Distinct_count"> </a></h1>
<p>There is an important exception for distinct-count measures: they cannot in be
rolled up over arbitrary dimensions. To see why, consider the case of a supermarket chain which has two
stores in the same city. Suppose that Store A has 1000 visits from 800 distinct
customers in the month of July, while Store B has 1500 visits from 900 distinct
customers. Clearly the two stores had a total of 2500 customer visits between
them, but how many distinct customers? We can say that there were at least 900,
and maybe as many as 1700, but assuming that some customers visit both stores,
and the real total will be somewhere in between. "Distinct customers" is an
example of a distinct-count measure, and cannot be deduced by rolling up
subtotals. You have to go back to the raw data in the fact table.</p>
<p>There is a special case where it is acceptable to roll up distinct count
measures. Suppose that we know that in July, this city's stores (Store A and B
combined) have visits from 600 distinct female customers and 700 distinct male
customers. Can we say that the number of distinct customers in July is 1300? Yes
we can, because we know that the sets of male and female customers cannot
possibly overlap. In technical terms, gender is <i>functionally dependent on</i>
customer id.</p>
<p>The rule for rolling up distinct measures can be stated as follows:</p>
<blockquote>
A distinct count measure over key <i>k</i> can be computed by rolling up
more granular subtotals only if the attributes which are being rolled up are
functionally dependent on <i>k</i>.
</blockquote>
<p>
Even with this special case, it is difficult to create enough aggregate tables
to satisfy every possible query. When evaluating a distinct-count measure, Mondrian can only use
an aggregate
table if it has the same logical/level granularity as the cell being
requested, or can be rolled up to that granularity only by dropping functionally
dependent attributes. If
there is no aggregate table of the desired granularity, Mondrian goes instead
against the fact table. </p>
<p>
This has implications for aggregate design. If your application makes extensive
use of distinct-count measures, you will need to create an aggregate table for
each granularity where it is used. That could be a lot of aggregate tables! (We
hope to have a better solution for this problem in future releases.)</p>
<p>
That said, Mondrian will rollup measures in an aggregate table that contains
one or more distinct-count measures if none of the distinct-count measures
are requested. In that respect an aggregate table containing distinct-count
measures are just like any other aggregate table as long as the
distinct-count measures are not needed. And once in memory, distinct-count
measures are cached like other measures, and can be used for future queries.</p>
<p>
When building an aggregate table that will contain a distinct-count measure,
the measure must be rolled up to a logical dimension level, which is
to say that the aggregate table must be a collapsed dimension aggregate.
If it is rolled up only
to the dimension's foreign key, there is no guarantee that the foreign key
is at the same granularity as the lowest logical level, which is what
is used by MDX requests. So for an aggregate table that only rolls the
distinct-count measure to the foreign key granularity, a request
of that distinct-count measure may result in further rollup and, therefore,
an error.
</p>
<p>
Consider the following aggregate table that has lost dimensions
<code>customer_id</code>, <code>product_id</code>, <code>promotion_id</code>
and <code>store_id</code>.
</p>
<blockquote>
<code>
INSERT INTO "<API key>" (<br>
"time_id",<br>
"store_sales",<br>
"store_cost",<br>
"unit_sales",<br>
"customer_count",<br>
"fact_count"<br>
) SELECT<br>
"time_id",<br>
SUM("store_sales") AS "store_sales",<br>
SUM("store_cost") AS "store_cost",<br>
SUM("unit_sales") AS "unit_sales",<br>
COUNT(DISTINCT "customer_id") AS "customer_count",<br>
COUNT(*) AS "fact_count"<br>
FROM "sales_fact_1997"<br>
GROUP BY "time_id";</code>
</blockquote>
<p>
This aggregate table is useless for computing the <code>"customer_count"</code>
measure. Why? The distinct-count measure is rolled up to the
<code>time_id</code> granularity, the lowest level granularity of the
physical database table <code>time_by_day</code>. Even a query against the lowest level in the
<code>Time</code> dimension would require a rollup from <code>time_id</code> to
<code>month_of_year</code>, and this is impossible to perform.
</p>
<p>
Now consider this collapsed <code>Time</code> dimension aggregate table
that has the same lost dimensions <code>customer_id</code>,
<code>product_id</code>, <code>promotion_id</code> and <code>store_id</code>.
The <code>time_id</code> foreign key is no longer present, rather it
has been replaced with the logical levels <code>the_year</code>,
<code>quarter</code> and <code>month_of_year</code>.
</p>
<blockquote>
<code>
INSERT INTO "<API key>" ( <br>
"month_of_year", <br>
"quarter", <br>
"the_year", <br>
"store_sales", <br>
"store_cost", <br>
"unit_sales", <br>
"customer_count", <br>
"fact_count" <br>
) SELECT <br>
"D"."month_of_year", <br>
"D"."quarter", <br>
"D"."the_year", <br>
SUM("B"."store_sales") AS "store_sales", <br>
SUM("B"."store_cost") AS "store_cost", <br>
SUM("B"."unit_sales") AS "unit_sales", <br>
COUNT(DISTINCT "customer_id") AS "customer_count", <br>
COUNT(*) AS fact_count <br>
FROM<br>
"sales_fact_1997" "B",<br>
"time_by_day" "D" <br>
WHERE <br>
"B"."time_id" = "D"."time_id" <br>
GROUP BY <br>
"D"."month_of_year", <br>
"D"."quarter", <br>
"D"."the_year";</code>
</blockquote>
<p>
This aggregate table of the distinct-count measure can be used to fulfill
a query as long as the query specifies the
<code>Time</code> dimension down to the
<code>month_of_year</code> level.</p>
<p>
The general rule when building aggregate tables involving distinct-count
measures is that there can be NO foreign keys remaining in the aggregate
table - for each base table foreign key, it must either be dropped, a
lost dimension aggregate, or it must be replaces with levels, a collapsed
dimension aggregate.
In fact, this rule, though not required, is useful to follow when
creating any aggregate table; there is no value in maintaining
foreign keys in aggregate tables. They should be replaced by collapsing
to levels unless the larger memory used by such aggregate tables is
too much for one's database system.
</p>
<p>
A better design for the aggregate table would include a few attributes which are
functionally dependent on <code>customer_id</code>, the key for the
distinct-count measure:
</p>
<blockquote>
<code>
INSERT INTO "<API key>" (<br>
"country",<br>
"gender",<br>
"marital_status",<br>
"month_of_year", <br>
"quarter", <br>
"the_year", <br>
"store_sales", <br>
"store_cost", <br>
"unit_sales", <br>
"customer_count", <br>
"fact_count" <br>
) SELECT <br>
"D"."month_of_year", <br>
"D"."quarter", <br>
"D"."the_year", <br>
SUM("B"."store_sales") AS "store_sales", <br>
SUM("B"."store_cost") AS "store_cost", <br>
SUM("B"."unit_sales") AS "unit_sales", <br>
COUNT(DISTINCT "customer_id") AS "customer_count", <br>
COUNT(*) AS fact_count <br>
FROM<br>
"sales_fact_1997" "B",<br>
"time_by_day" "D",<br>
"customer" "C"<br>
WHERE <br>
"B"."time_id" = "D"."time_id"<br>
AND "B".customer_id" = "C"."customer_id"<br>
GROUP BY <br>
"C"."country",<br>
"C"."gender",<br>
"C"."marital_status",<br>
"D"."month_of_year", <br>
"D"."quarter", <br>
"D"."the_year";</code>
</blockquote>
<p>
The added attributes are <code>"country"</code>, <code>"gender"</code> and <code>
"marital_status"</code>. This table has only appoximately 12x the number of rows
of the previous aggregate table (3 values of <code>country</code> x 2 values of
<code>gender</code> x 2 values of <code>marital_status</code>) but can answer
many more potential queries.</p>
<!
<h3>
8. Tools for designing and maintaining aggregate tables<a name="<API key>"> </a></h3>
<p>
Aggregate tables are difficult to design and maintain. We make no bones about it.
But this is the first release in which aggregate tables have been available, and
we decided to get the internals right rather than building a toolset to make
them easy to use.</p>
<p>
Unless your dataset is very large, Mondrian's performance will be just fine
without aggregate tables. If Mondrian isn't performing well, you should first
check that your DBMS is well-tuned: see our guide to
<a href="performance.html">optimizing performance</a>). If decide to
build aggregate tables anyway, we don't offer any tools to help administrators
design them, so unless you are blessed with superhuman patience and intuition,
using them won't be smooth sailing.
</p>
<p>
Here are some ideas for tools we'd like to build in the future. I'm thinking of these being utilities, not part of the core runtime engine.
There's plenty of room to wrap these utilities in nice graphical interfaces,
make them smarter.</p>
<!
<h1>
8.1 AggGen (aggregate generator)<a name="<API key>"> </a></h1>
<p>
<code>AggGen</code> is a tool that generates SQL to support the
creation and maintenance of aggregate tables, and would give a
template for the creation of materialized views for databases that
support those. Given an MDX query, the generated create/insert SQL is
optimal for the given query. The generated SQL covers both the "lost"
and "collapsed" dimensions. For usage, see the documentation for
<a href="command_runner.html#AggGen">CmdRunner</a>.</p>
<!
<h1>
8.2 Aggregate table populater<a name="<API key>"> </a></h1>
<p>This utility populates (or generates INSERT statements to populate) the agg
tables.</p>
<p>For extra credit: populate the tables in topological order, so that
higher level aggregations can be built from lower level aggregations.
(See [<a href="#Ref_AAD96">AAD<sup>+</sup>96</a>]).</p>
<!
<h1>
8.3 Script generator<a name="Script_generator"> </a></h1>
<p>This utility generates a script containing CREATE TABLE and CREATE INDEX
statements all possible aggregate tables (including indexes), XML for these
tables, and comments indicating the estimated number of rows in these
tables. Clearly this will be a huge script, and it would be ridiculous to
create all of these tables. The person designing the schema could copy/paste
from this file to create their own schema.</p>
<!
<h1>8.4 Recommender<a name="Recommender"> </a></h1>
<p>This utility (maybe graphical, maybe text-based) recommends a set of
aggregate tables. This is essentially an optimization algorithm, and it is
described in the academic literature [<a href="#Ref_AAD96">AAD<sup>+</sup>96</a>]. Constraints on the optimization
process are the amount of storage required, the estimated time to populate
the agg tables.<p>The algorithm could also take into account usage
information. A set of sample queries could be an input to the utility, or the
utility could run as a background task, consuming the query log and dynamically
making recommendations.</p>
<!
<h1>8.5 Online/offline control<a name="<API key>"> </a></h1>
<p>This utility would allow agg tables to be taken offline/online while Mondrian is still
running.</p>
<!
<h3>9. Properties that affect aggregates<a name="<API key>"> </a></h3>
<p>
Mondrian has properties that control the behavior of its aggregate
table sub-system. (You can find the full set of properties in the
<a href="configuration.html">Configuration Guide</a>.)</p>
<table class="grayTable" width="500" >
<tr>
<td><b>Property</b></td>
<td><b>Type</b></td>
<td><b>Default Value</b></td>
<td><b>Description</b></td>
</tr>
<tr>
<td><code>
<a href="api/mondrian/olap/MondrianProperties.html#UseAggregates">mondrian. rolap. aggregates. Use</a></code></td>
<td><code>boolean</code></td>
<td><code>false</code></td>
<td>
If set to true, then Mondrian uses any aggregate tables that have
been read.
These tables are then candidates
for use in fulfilling MDX queries.
If set to false, then no aggregate table related activity
takes place in Mondrian.
</td>
</tr>
<tr>
<td><code>
<a href="api/mondrian/olap/MondrianProperties.html#ReadAggregates">mondrian. rolap. aggregates. Read</a></code></td>
<td><code>boolean</code></td>
<td><code>false</code></td>
<td>
If set to true, then Mondrian reads the database schema and
recognizes aggregate tables. These tables are then candidates
for use in fulfilling MDX queries.
If set to false, then aggregate table will not be read from
the database. Of course, after aggregate tables have been read,
they are read, so setting this property false after starting
with the property being true, has no effect.
Mondrian will not actually use the aggregate tables unless the
<code>mondrian.rolap. aggregates.Use</code> property is set to true.
</td>
</tr>
<tr>
<td><code>
<a href="api/mondrian/olap/MondrianProperties.html#<API key>">mondrian. rolap. aggregates. ChooseByVolume</a></code></td>
<td><code>boolean</code></td>
<td><code>false</code></td>
<td>
Currently, Mondrian support to algorithms for selecting which
aggregate table to use: the aggregate with smallest row count
or the aggregate with smallest volume (row count * row size).
If set to false, then row count is used. If true, then volume
is used.
</td>
</tr>
<tr>
<td><code>
<a href="api/mondrian/olap/MondrianProperties.html#AggregateRules">mondrian. rolap. aggregates. rules</a></code></td>
<td><code>resource or url</code></td>
<td><code>/Default Rules.xml</code></td>
<td>
This is a developer property, not a user property.
Setting this to a url (e.g., <code>file://c:/myrules.xml</code>) allows one
to use their own "default" Mondrian aggregate table recognition
rules.
In general use this should never be changed from the default value.
</td>
</tr>
<tr>
<td><code>
<a href="api/mondrian/olap/MondrianProperties.html#AggregateRuleTag">mondrian. rolap. aggregates. rule. tag</a></code></td>
<td><code>string</code></td>
<td><code>default</code></td>
<td>
This is also a developer property. It allows one to pick which
named rule in the default rule file to use.
In general use this should never be changed from the default value.
</td>
</tr>
</table>
<!
<h3>10. References<a name="References"> </a></h3>
<table id="table1" class="grayTable" width="500" >
<tr>
<td>[AAD<sup>+</sup>96]<a name="Ref_AAD96"> </a></td>
<td>S. Agarwal, R. Agrawal, P. M. Deshpande, A. Gupta, J. F. Naughton,
R. Ramakrishnan, and S. Sarawagi. On the computation of multidimensional
aggregates. In Proc. 22nd VLDB, pages 506-521, Mumbai, Sept. 1996. [<a href="http:
</tr>
<tr>
<td>[ABDGHLS99]<a name="ABDGHLS99"> </a></td>
<td>J. Albrecht, A. Bauer, O. Deyerling, H. Gunze, W. Hummer, W. Lehner,
L. Schlesinger. Management of Multidimensional Aggregates for Efficient
Online Analytical Processing. Proceedings of International Database
Engineering and Applications Symposium, 1999, pp. 156164. [<a href="http://www6.informatik.uni-erlangen.de/research/projects/cubestar/english/Overview.html">pdf</a>]</td>
</tr>
<tr>
<td>[GBLP96]<a name="GBLP96"> </a></td>
<td>J. Gray, A. Bosworth, A. Layman, and H. Pirahesh. Data cube: A
relational aggregation operator generalizing group-by, cross-tab, and
sub-totals. In Proc. 12th ICDE, pages 152-159, New Orleans, March 1996.
[<a target="_blank" href="http://paul.rutgers.edu/~aminabdu/cs541/cube_op.pdf">pdf</a>]</td>
</tr>
<tr>
<td>[HNSS95]<a name="HNSS95"> </a></td>
<td>P.J. Haas, J.F. Naughton, S. Seshadri, and L. Stokes. Sampling-based
estimation of the number of distinct values of an attribute. Proceedings
of the Eighth International Conference on Very Large Databases (VLDB),
pages 311322, Zurich, Switzerland, September 1995. [<a href="http:
</tr>
<tr>
<td>[Rittman05]<a name="Rittman05"> </a></td>
<td>M. Rittman. Compressed Composites (Oracle 10g Compression)
Explained. Online article. [<a href="http:
</tr>
<tr>
<td>[SDNR96]<a name="SDNR96"> </a></td>
<td>Amit Shukla, Prasad Deshpande, Jeffrey F. Naughton, Karthikeyan
Ramasamy. Storage Estimation for Multidimensional Aggregates in the
Presence of Hierarchies. VLDB 1996, pp. 522531. [<a href="http:
</tr>
</table>
<br />
<hr noshade size="1"/>
<p>
Author: Richard Emberson; last modified by Julian Hyde, March 2008.<br/>
Version: $Id$
(<a href="http://p4web.eigenbase.org/open/mondrian/doc/aggregate_tables.html?ac=22">log</a>)<br/>
Copyright (C) 2005-2009 Julian Hyde and others
</p>
<br />
<!-- doc2web end -->
</body>
</html> |
package org.eclipse.che.commons.env;
import org.eclipse.che.commons.lang.concurrent.<API key>;
import org.eclipse.che.commons.subject.Subject;
/**
* <p>Defines a component that holds variables of type {@link ThreadLocal}
* whose value is required by the component to work normally and cannot be recovered.
* This component is mainly used when we want to do a task asynchronously, in that case
* to ensure that the task will be executed in the same conditions as if it would be
* executed synchronously we need to transfer the thread context from the original
* thread to the executor thread.
*/
public class EnvironmentContext {
/** ThreadLocal keeper for EnvironmentContext. */
private static ThreadLocal<EnvironmentContext> current = new ThreadLocal<EnvironmentContext>() {
@Override
protected EnvironmentContext initialValue() {
return new EnvironmentContext();
}
};
static {
<API key>.addThreadLocal(current);
}
public static EnvironmentContext getCurrent() {
return current.get();
}
public static void setCurrent(EnvironmentContext environment) {
current.set(environment);
}
public static void reset() {
current.remove();
}
private Subject subject;
public EnvironmentContext() {
}
public EnvironmentContext(EnvironmentContext other) {
setSubject(other.getSubject());
}
/**
* Returns subject or {@link Subject#ANONYMOUS} in case when subject is null.
*/
public Subject getSubject() {
return subject == null ? Subject.ANONYMOUS : subject;
}
/**
* Sets subject.
*/
public void setSubject(Subject subject) {
this.subject = subject;
}
} |
package io.minimum.minecraft.superbvote.configuration;
import io.minimum.minecraft.superbvote.configuration.message.OfflineVoteMessage;
import lombok.Data;
@Data
public class <API key> {
private final int perPage;
private final OfflineVoteMessage header;
private final OfflineVoteMessage entryText;
} |
# ingraph workflow

## Initial query compilation
* :page_facing_up: `openCypher query specification`: the query as a string.
* :<API key>: `query parser`: an [Xtext](https:
* :page_facing_up: `query syntax graph`: the abstract syntax graph of the query, represented in an EMF model.
* :<API key>: `compiler`: transforms the syntax graph to a query plan.
* :page_facing_up: `gplan [unresolved]`: a query plan in relational graph algebra without resolved variables for aliases (e.g. in `WITH x AS y RETURN y.a AS b`).
* :<API key>: `resolver`: resolves unresolved variables by tracking aliases.
* :page_facing_up: `gplan [resolved]`: a query plan in relational graph algebra with resolved variables.
## Engine-specific query compilation and execution
* relational engines:
* :<API key>: `gplan to nplan transformer`: transforms the query plan to a join-based plan that can be evaluated with relational techniques.
* :page_facing_up: `nplan`: a query plan in join-based algebra.
* :<API key>: `schema inferencer`: transformations a join-based plan to a flat plan that contains properties required for computations.
* :page_facing_up: `fplan`: a query plan in (join-based) flat relational algebra.
* :<API key>: `ire`: incremental relational engine.
* :<API key>: `nre`: a naive relational engine.
* search-based engine:
* :<API key>: `gplan to constraints transformer`: transforms the query plan to a set of constraints.
* :page_facing_up: `constraints`: Datalog-style constraints that describe the query.
* :<API key>: `planner`: transforms the query plan to a list of operations.
* :page_facing_up: `operations`: operations that define the search plan for the local search engine.
* :<API key>: `local search engine`: an executor for local search. |
package org.eclipse.persistence.internal.jpa.parsing;
import org.eclipse.persistence.expressions.*;
import org.eclipse.persistence.exceptions.JPQLException;
/**
* INTERNAL
* <p><b>Purpose</b>: Represent an '=' in EJBQL
* <p><b>Responsibilities</b>:<ul>
* <li> Generate the correct expression for an '=' in EJBQL
* </ul>
* @author Jon Driscoll and Joel Lucuik
* @since TopLink 4.0
*/
public class EqualsNode extends BinaryOperatorNode {
/**
* EqualsNode constructor comment.
*/
public EqualsNode() {
super();
}
/**
* INTERNAL
* Validate node and calculates its type.
*/
public void validate(ParseTreeContext context) {
super.validate(context);
TypeHelper typeHelper = context.getTypeHelper();
Object leftType = left.getType();
Object rightType = right.getType();
if (typeHelper.isEnumType(leftType) && !typeHelper.isEnumType(rightType)) {
throw JPQLException.<API key>(
context.getQueryInfo(), getLine(), getColumn(),
typeHelper.getTypeName(leftType), typeHelper.getTypeName(rightType));
} else if (typeHelper.isEnumType(rightType) && !typeHelper.isEnumType(leftType)) {
throw JPQLException.<API key>(
context.getQueryInfo(), getLine(), getColumn(),
typeHelper.getTypeName(rightType), typeHelper.getTypeName(leftType));
}
setType(typeHelper.getBooleanType());
}
/**
* Resolve the expression. The steps are:
* 1. Set the expressionBuilder for the left and right nodes
* 2. Generate the expression for the left node
* 3. Add the .equals to the where clause returned from step 2
* 4. Generate the expression for the right side and use it as the parameter for the .equals()
* 5. Return the completed where clause to the caller
*/
public Expression generateExpression(GenerationContext context) {
Expression whereClause = getLeft().generateExpression(context);
whereClause = whereClause.equal(getRight().generateExpression(context));
return whereClause;
}
/**
* INTERNAL
* Get the string representation of this node.
*/
public String getAsString() {
return left.getAsString() + " = " + right.getAsString();
}
} |
package org.jboss.forge.addon.parser.java.ui;
import java.io.PrintStream;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.Callable;
import javax.inject.Inject;
import org.jboss.forge.addon.facets.constraints.FacetConstraint;
import org.jboss.forge.addon.facets.constraints.FacetConstraintType;
import org.jboss.forge.addon.facets.constraints.FacetConstraints;
import org.jboss.forge.addon.parser.java.facets.JavaSourceFacet;
import org.jboss.forge.addon.parser.java.resources.JavaResource;
import org.jboss.forge.addon.parser.java.resources.JavaResourceVisitor;
import org.jboss.forge.addon.projects.Project;
import org.jboss.forge.addon.projects.ProjectFactory;
import org.jboss.forge.addon.projects.ui.<API key>;
import org.jboss.forge.addon.resource.ResourceException;
import org.jboss.forge.addon.resource.visit.VisitContext;
import org.jboss.forge.addon.ui.context.UIBuilder;
import org.jboss.forge.addon.ui.context.UIContext;
import org.jboss.forge.addon.ui.context.UIExecutionContext;
import org.jboss.forge.addon.ui.context.UIValidationContext;
import org.jboss.forge.addon.ui.hints.InputType;
import org.jboss.forge.addon.ui.input.InputComponent;
import org.jboss.forge.addon.ui.input.UICompleter;
import org.jboss.forge.addon.ui.input.UIInput;
import org.jboss.forge.addon.ui.metadata.UICommandMetadata;
import org.jboss.forge.addon.ui.metadata.WithAttributes;
import org.jboss.forge.addon.ui.output.UIOutput;
import org.jboss.forge.addon.ui.result.Result;
import org.jboss.forge.addon.ui.result.Results;
import org.jboss.forge.addon.ui.util.Categories;
import org.jboss.forge.addon.ui.util.Metadata;
import org.jboss.forge.addon.ui.validate.UIValidator;
import org.jboss.forge.roaster.Roaster;
import org.jboss.forge.roaster.model.SyntaxError;
import org.jboss.forge.roaster.model.source.JavaSource;
import org.jboss.forge.roaster.model.util.Strings;
import org.jboss.forge.roaster.model.util.Types;
/**
* @author <a href="mailto:lincolnbaxter@gmail.com">Lincoln Baxter, III</a>
* @author <a href="ggastald@redhat.com">George Gastaldi</a>
*/
@FacetConstraints({
@FacetConstraint(value = JavaSourceFacet.class, type = FacetConstraintType.REQUIRED)
})
public abstract class <API key><SOURCETYPE extends JavaSource<?>> extends <API key>
{
@Inject
private ProjectFactory projectFactory;
@Inject
@WithAttributes(label = "Package Name", type = InputType.JAVA_PACKAGE_PICKER, description = "The package name where this type will be created")
private UIInput<String> targetPackage;
@Inject
@WithAttributes(label = "Type Name", required = true, description = "The type name")
private UIInput<String> named;
@Inject
@WithAttributes(label = "Overwrite", description = "The overwrite flag that is used if the class already exists.", defaultValue = "false")
private UIInput<Boolean> overwrite;
@Override
public void initializeUI(UIBuilder builder) throws Exception
{
Project project = getSelectedProject(builder);
final JavaSourceFacet javaSourceFacet = project.getFacet(JavaSourceFacet.class);
// Setup named
named.addValidator(new UIValidator()
{
@Override
public void validate(UIValidationContext context)
{
if (!Types.isSimpleName(named.getValue()))
context.addValidationError(named, "Invalid java type name.");
}
});
overwrite.setEnabled(new Callable<Boolean>()
{
@Override
public Boolean call()
{
if (named.getValue() == null)
{
return false;
}
return classExists(javaSourceFacet);
}
});
// Setup targetPackage
if (project.hasFacet(JavaSourceFacet.class))
{
final Set<String> packageNames = new TreeSet<>();
javaSourceFacet.visitJavaSources(new JavaResourceVisitor()
{
@Override
public void visit(VisitContext context, JavaResource javaResource)
{
String packageName = javaSourceFacet.calculatePackage(javaResource);
packageNames.add(packageName);
}
});
targetPackage.setCompleter(new UICompleter<String>()
{
@Override
public Iterable<String> <API key>(UIContext context, InputComponent<?, String> input,
String value)
{
Set<String> result = new LinkedHashSet<>();
for (String packageName : packageNames)
{
if (Strings.isNullOrEmpty(value) || packageName.startsWith(value))
{
result.add(packageName);
}
}
return result;
}
});
}
targetPackage.setDefaultValue(<API key>(builder.getUIContext()));
builder.add(targetPackage).add(named).add(overwrite);
}
@Override
public UICommandMetadata getMetadata(UIContext context)
{
return Metadata.forCommand(getClass()).name("Java: New " + getType())
.description("Creates a new Java " + getType())
.category(Categories.create("Java"));
}
/**
* Get the type for which this command should create a new source file. ("Class", "Enum", "Interface", etc.)
*/
protected abstract String getType();
/**
* Get the {@link JavaSource} type for which this command should create a new source file.
*/
protected abstract Class<SOURCETYPE> getSourceType();
private boolean classExists(JavaSourceFacet javaSourceFacet)
{
JavaSource<?> source = buildJavaSource(javaSourceFacet);
if (source == null)
{
return false;
}
boolean classAlreadyExists;
try
{
JavaResource parsedJavaResource = javaSourceFacet.getJavaResource(source);
classAlreadyExists = parsedJavaResource != null && parsedJavaResource.exists();
}
catch (ResourceException ex)
{
classAlreadyExists = false;
}
return classAlreadyExists;
}
@SuppressWarnings("unchecked")
private SOURCETYPE buildJavaSource(JavaSourceFacet java)
{
if (!named.hasValue() && !named.hasDefaultValue())
{
return null;
}
SOURCETYPE source = (SOURCETYPE) Roaster.create(getSourceType()).setName(named.getValue());
if (targetPackage.hasValue() || targetPackage.hasDefaultValue())
{
source.setPackage(targetPackage.getValue());
}
else
{
source.setPackage(java.getBasePackage());
}
return source;
}
@Override
public void validate(UIValidationContext validator)
{
Project project = getSelectedProject(validator);
final JavaSourceFacet javaSourceFacet = project.getFacet(JavaSourceFacet.class);
if (classExists(javaSourceFacet) && overwrite.isEnabled() && overwrite.getValue() == false)
{
validator.addValidationError(named, getType() + " " + targetPackage.getValue() + "." + named.getValue()
+ " already exists. Use the --overwrite flag to allow the overwrite.");
}
}
@Override
public Result execute(UIExecutionContext context) throws Exception
{
UIContext uiContext = context.getUIContext();
Project project = getSelectedProject(uiContext);
JavaSourceFacet javaSourceFacet = project.getFacet(JavaSourceFacet.class);
SOURCETYPE source = buildJavaSource(javaSourceFacet);
JavaResource javaResource;
if (source.hasSyntaxErrors())
{
UIOutput output = uiContext.getProvider().getOutput();
PrintStream err = output.err();
err.println("Syntax Errors:");
for (SyntaxError error : source.getSyntaxErrors())
{
err.println(error);
}
err.println();
return Results.fail("Syntax Errors found. See above");
}
else
{
SOURCETYPE decorated = decorateSource(context, project, source);
if (decorated != null)
source = decorated;
javaResource = javaSourceFacet.saveJavaSource(source);
}
uiContext.setSelection(javaResource);
return Results.success(getType() + " " + source.getQualifiedName() + " was created");
}
/**
* Override this method to do any necessary work to customize the generated {@link JavaResource}.
*/
public SOURCETYPE decorateSource(UIExecutionContext context, Project project, SOURCETYPE source)
throws Exception
{
return source;
}
@Override
protected boolean isProjectRequired()
{
return true;
}
protected UIInput<String> getTargetPackage()
{
return targetPackage;
}
protected UIInput<String> getNamed()
{
return named;
}
protected UIInput<Boolean> getOverwrite()
{
return overwrite;
}
protected String <API key>(UIContext context)
{
String packageName;
Project project = getSelectedProject(context);
if (project != null)
{
packageName = project.getFacet(JavaSourceFacet.class).getBasePackage();
}
else
{
packageName = null;
}
return packageName;
}
@Override
protected ProjectFactory getProjectFactory()
{
return projectFactory;
}
} |
package mutatorenvironment;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Double Type</b></em>'.
* <!-- end-user-doc -->
*
*
* @see mutatorenvironment.<API key>#getDoubleType()
* @model abstract="true"
* @generated
*/
public interface DoubleType extends NumberType {
} // DoubleType |
package org.eclipse.che.plugin.debugger.ide.debug.expression;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.che.api.promises.client.Operation;
import org.eclipse.che.api.promises.client.OperationException;
import org.eclipse.che.api.promises.client.PromiseError;
import org.eclipse.che.ide.debug.Debugger;
import org.eclipse.che.ide.debug.DebuggerManager;
import org.eclipse.che.plugin.debugger.ide.<API key>;
/**
* Presenter for evaluating an expression.
*
* @author Artem Zatsarynnyi
*/
@Singleton
public class <API key> implements <API key>.ActionDelegate {
private DebuggerManager debuggerManager;
private <API key> view;
private <API key> constant;
@Inject
public <API key>(
<API key> view,
<API key> constant,
DebuggerManager debuggerManager) {
this.view = view;
this.debuggerManager = debuggerManager;
this.view.setDelegate(this);
this.constant = constant;
}
public void showDialog() {
view.setResult("");
view.<API key>(false);
view.showDialog();
view.<API key>();
}
/** Close dialog. */
public void closeDialog() {
view.close();
}
/** {@inheritDoc} */
@Override
public void onCloseClicked() {
view.close();
}
/** {@inheritDoc} */
@Override
public void onEvaluateClicked() {
Debugger debugger = debuggerManager.getActiveDebugger();
if (debugger != null) {
view.<API key>(false);
debugger
.evaluate(view.getExpression())
.then(
new Operation<String>() {
@Override
public void apply(String result) throws OperationException {
view.setResult(result);
view.<API key>(true);
}
})
.catchError(
new Operation<PromiseError>() {
@Override
public void apply(PromiseError error) throws OperationException {
view.setResult(constant.<API key>(error.getMessage()));
view.<API key>(true);
}
});
}
}
/** {@inheritDoc} */
@Override
public void <API key>() {
final String expression = view.getExpression();
boolean <API key> = !expression.trim().isEmpty();
view.<API key>(<API key>);
}
} |
package ch.unibe.iam.scg.archie.samples.widgets;
import org.eclipse.swt.widgets.Composite;
import ch.unibe.iam.scg.archie.model.RegexValidation;
import ch.unibe.iam.scg.archie.ui.widgets.ComboWidget;
/**
* <p>
* TODO: DOCUMENT ME!
* </p>
*
* $Id$
*
* @author Peter Siska
* @author Dennis Schenk
* @version $Rev$
*/
public class CustomComboWidget extends ComboWidget {
public static final String DEFAULT_SELECTED = "Twenty";
/**
* @param parent
* @param style
* @param labelText
*/
public CustomComboWidget(Composite parent, int style, final String labelText, RegexValidation regex) {
super(parent, style, labelText, regex);
// Populate combo items in a custom fashion. This can come out of a
// file, database or wherever you like most.
String[] items = new String[] { "Twenty", "Thirty", "Fourty" };
this.setItems(items);
}
} |
package com.codenvy.client.core.model.factory;
import com.codenvy.client.model.factory.FactoryCreator;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.<API key>;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Factory creator
* @author Florent Benoit
*/
@<API key>(ignoreUnknown = true)
public class <API key> implements FactoryCreator {
private long created;
@JsonCreator
public <API key>(@JsonProperty("created") long created) {
this.created = created;
}
@Override
public long created() {
return created;
}
} |
package com.intuit.tank.project;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.Table;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.hibernate.envers.Audited;
import com.intuit.tank.vm.api.enumerated.<API key>;
@Entity
@Audited
@Table(name = "<API key>")
public class ScriptFilterAction extends BaseEntity {
private static final long serialVersionUID = 1L;
@Column(name = "filter_action", length = 255)
@Enumerated(EnumType.STRING)
private <API key> action;
@Column(name = "filter_scope", length = 255)
private String scope;
@Column(name = "filter_key", length = 255)
private String key;
@Column(name = "filter_value", length = 255)
private String value;
// @ManyToOne
// @JoinColumn(name = "filter_id", updatable = false, insertable = false)
// private ScriptFilter filter;
/**
* @return the action
*/
public <API key> getAction() {
return action;
}
/**
* @param action
* the action to set
*/
public void setAction(<API key> action) {
this.action = action;
}
/**
* @return the scope
*/
public String getScope() {
return scope;
}
/**
* @param scope
* the scope to set
*/
public void setScope(String scope) {
this.scope = scope;
}
/**
* @return the key
*/
public String getKey() {
return key;
}
/**
* @param key
* the key to set
*/
public void setKey(String key) {
this.key = key;
}
/**
* @return the value
*/
public String getValue() {
return value;
}
/**
* @param value
* the value to set
*/
public void setValue(String value) {
this.value = value;
}
// /**
// * @return the filter
// */
// public ScriptFilter getScriptFilter() {
// return filter;
// /**
// * @param filter the filter to set
// */
// public void setScriptFilter(ScriptFilter filter) {
// this.filter = filter;
/**
* @{inheritDoc
*/
@Override
public String toString() {
return new ToStringBuilder(this).append("id", getId()).append("action", action).append("scope", scope)
.append("key", key).append("value", value)
.toString();
}
/**
* @{inheritDoc
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ScriptFilterAction)) {
return false;
}
ScriptFilterAction o = (ScriptFilterAction) obj;
if (getId() == 0) {
return new EqualsBuilder().append(action, o.action).append(scope, o.scope).append(key, o.key)
.append(value, o.value).isEquals();
}
return new EqualsBuilder().append(o.getId(), getId())
// .append(o.getAction(), getAction()).append(o.getScope(), getScope()).append(o.getKey(),
// getKey()).append(o.getValue(), getValue())
.isEquals();
}
/**
* @{inheritDoc
*/
@Override
public int hashCode() {
return new HashCodeBuilder(21, 57).append(getAction()).append(getScope()).append(getKey()).append(getValue())
.toHashCode();
}
} |
package at.medevit.elexis.kassen.vgkk;
import org.eclipse.swt.SWT;
import at.medevit.elexis.kassen.core.ui.<API key>;
import at.medevit.elexis.kassen.core.ui.<API key>;
import at.medevit.elexis.kassen.core.ui.<API key>;
import at.medevit.elexis.kassen.vgkk.model.VgkkLeistung;
import ch.elexis.core.ui.util.viewers.CommonViewer;
import ch.elexis.core.ui.util.viewers.<API key>;
import ch.elexis.core.ui.util.viewers.<API key>;
import ch.elexis.core.ui.util.viewers.ViewerConfigurer;
import ch.elexis.core.ui.views.codesystems.CodeSelectorFactory;
public class <API key> extends CodeSelectorFactory {
<API key> slp;
CommonViewer cv;
public <API key>(){
}
@Override
public ViewerConfigurer <API key>(CommonViewer cv){
this.cv = cv;
ViewerConfigurer vc =
new ViewerConfigurer(new <API key>(VgkkLeistung.class),
new <API key>(), new <API key>(cv),
new ViewerConfigurer.<API key>(), new <API key>(
<API key>.TYPE_TREE, SWT.NONE, null));
return vc;
}
@Override
public Class getElementClass(){
return VgkkLeistung.class;
}
@Override
public void dispose(){
cv.dispose();
}
@Override
public String getCodeSystemName(){
return "VGKK"; //$NON-NLS-1$
}
} |
/*!
EQFPRO00.C - EQF Property Handler
*/
#define INCL_EQF_EDITORAPI // editor API
#include "eqf.h" // General .H for EQF
#include "eqfpro00.h" // Property Handler defines
// activate the following define for property failure logging
//#define PROPLOGGING
// IDA pointer for batch mode
PPROP_IDA pPropBatchIda = NULL;
HWND hwndPropertyHandler = NULL;
/*!
Property Handler Window Proc
*/
MRESULT APIENTRY PROPERTYHANDLERWP
(
HWND hwnd,
WINMSG message,
WPARAM mp1,
LPARAM mp2
)
{
PPROP_IDA pIda; // Points to instance area
PPROPMSGPARM pmsg;
PPROPHND hprop;
PPROPCNTL pcntl;
USHORT size;
switch( message) {
case WM_CREATE:
if( !UtlAlloc( (PVOID *)&pIda, 0L, (LONG)sizeof( *pIda), ERROR_STORAGE ) )
return( (MRESULT)TRUE); // do not create the window
memset( pIda, NULC, sizeof( *pIda));
size = sizeof(pIda->IdaHead.szObjName);
pIda->IdaHead.pszObjName = pIda->IdaHead.szObjName;
<API key>( APPL_Name, KEY_SysProp, pIda->IdaHead.szObjName, size, "" );
if( GetSysProp( pIda))
return( (MRESULT)TRUE); // do not create the window
ANCHORWNDIDA( hwnd, pIda);
EqfInstallHandler( PROPERTYHANDLER, hwnd, clsPROPERTY);
hwndPropertyHandler = hwnd;
return( 0L); // continue with default proc
case WM_CLOSE:
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
if (pIda)
EqfRemoveHandler( TWBFORCE, PROPERTYHANDLER);
return( 0L);
case WM_DESTROY:
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
UtlAlloc( (PVOID *)&pIda, 0L, 0L, NOMSG );
hwndPropertyHandler = NULL;
return( 0L);
case WM_EQF_TERMINATE:
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
if( pIda->hSystem->pCntl->lFlgs & PROP_STATUS_UPDATED)
PutItAway( pIda->hSystem->pCntl);
WinDestroyWindow( pIda->IdaHead.hFrame);
return( 0L); // continue with TWB shutdown
case <API key>:
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
return( (MRESULT)pIda->hSystem);
case <API key>:
// mp1 = not used
// mp2 = PPROPMSGPARM
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
pmsg = (PPROPMSGPARM) PVOIDFROMMP2(mp2);
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop= MakePropHnd( pIda)) == NULL)
return( 0L);
hprop->pCntl = LoadPropFile( pIda, pmsg->pszName, pmsg->pszPath,
pmsg->fFlg);
if( !hprop->pCntl){
FreePropHnd( pIda, hprop);
return( 0L);
}
hprop->lFlgs |= pmsg->fFlg;
return( (MRESULT)hprop);
case <API key>:
// mp1 = not used
// mp2 = PPROPMSGPARM
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
pmsg = (PPROPMSGPARM) PVOIDFROMMP2(mp2);
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop= MakePropHnd( pIda)) == NULL)
return( 0L);
hprop->pCntl = CreatePropFile( pIda, pmsg->pszName, pmsg->pszPath,
pmsg->fFlg, pmsg->fOverWrite );
if( !hprop->pCntl)
{
FreePropHnd( pIda, hprop);
return( 0L);
}
hprop->lFlgs = <API key>;
return( (MRESULT)hprop);
case <API key>:
// mp1 = not used
// mp2 = PPROPMSGPARM
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
pmsg = (PPROPMSGPARM) PVOIDFROMMP2(mp2);
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
pcntl = FindPropCntl( pIda->TopCntl, pmsg->pszName, pmsg->pszPath);
if( pcntl)
{
*pIda->pErrorInfo = ErrProp_ObjectBusy;
return( (MRESULT)-1); // indicate error
}
return( (MRESULT)DeletePropFile( pIda, pmsg->pszName, pmsg->pszPath));
case <API key>:
// mp1 = not used
// mp2 = PPROPMSGPARM
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
pmsg = (PPROPMSGPARM) PVOIDFROMMP2(mp2);
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop=FindPropHnd( pIda->TopUsedHnd, (PPROPHND) pmsg->hObject)) == NULL)
{
*pIda->pErrorInfo = <API key>;
return( (MRESULT)-1); // indicate error
}
pcntl = hprop->pCntl;
if( pcntl->lFlgs & PROP_STATUS_UPDATED)
if( pmsg->fFlg & PROP_FILE)
{
if( !(hprop->lFlgs & PROP_ACCESS_WRITE))
{
*pIda->pErrorInfo = <API key>;
return( (MRESULT)-2); // indicate error
}
if( (*pIda->pErrorInfo = PutItAway( pcntl)) != 0 )
return( (MRESULT)-3); // indicate error
}
if( --pcntl->usUser < 1)
DropPropFile( pIda, pcntl);
else
if( hprop->lFlgs & PROP_ACCESS_WRITE)
pcntl->lFlgs &= ~PROP_ACCESS_WRITE;
FreePropHnd( pIda, hprop);
return( *pIda->pErrorInfo ? (MRESULT)-4 : 0L);
case <API key>:
// mp1 = not used
// mp2 = PPROPMSGPARM
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
pmsg = (PPROPMSGPARM) PVOIDFROMMP2(mp2);
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop=FindPropHnd( pIda->TopUsedHnd, (PPROPHND) pmsg->hObject)) == NULL)
{
*pIda->pErrorInfo = <API key>;
return( (MRESULT)-1); // indicate error
}
pcntl = hprop->pCntl;
if( pcntl->lFlgs & PROP_STATUS_UPDATED)
{
if( !(hprop->lFlgs & PROP_ACCESS_WRITE))
{
*pIda->pErrorInfo = <API key>;
return( (MRESULT)-2); // indicate error
}
*pIda->pErrorInfo = PutItAway( pcntl);
}
return( *pIda->pErrorInfo ? (MRESULT)-3 : 0L);
case <API key>:
// mp1 = not used
// mp2 = PPROPMSGPARM
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
pmsg = (PPROPMSGPARM) PVOIDFROMMP2(mp2);
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop=FindPropHnd( pIda->TopUsedHnd, (PPROPHND) pmsg->hObject)) == NULL){
*pIda->pErrorInfo = <API key>;
return( (MRESULT)-1); // indicate error
}
if( !(hprop->lFlgs & PROP_ACCESS_READ)){
*pIda->pErrorInfo = <API key>;
return( (MRESULT)-2); // indicate error
}
pcntl = hprop->pCntl;
memcpy( pmsg->pBuffer, pcntl->pHead, pcntl->usFsize);
return( 0L);
case <API key>:
// mp1 = not used
// mp2 = PPROPMSGPARM
pIda = ACCESSWNDIDA( hwnd, PPROP_IDA );
pmsg = (PPROPMSGPARM) PVOIDFROMMP2(mp2);
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop=FindPropHnd( pIda->TopUsedHnd, (PPROPHND) pmsg->hObject)) == NULL)
{
*pIda->pErrorInfo = <API key>;
return( (MRESULT)-1); // indicate error
}
if( !(hprop->lFlgs & PROP_ACCESS_WRITE))
{
*pIda->pErrorInfo = <API key>;
return( (MRESULT)-2); // indicate error
}
pcntl = hprop->pCntl;
if( pmsg->pBuffer)
{
memcpy( pcntl->pHead, pmsg->pBuffer, pcntl->usFsize);
hprop->pCntl->lFlgs |= PROP_STATUS_UPDATED;
}
return( 0L);
}
return( WinDefWindowProc( hwnd, message, mp1, mp2));
}
/*!
Create a property handle
*/
PPROPHND MakePropHnd( PPROP_IDA pIda)
{
int i;
PPROPHND ptr1, ptr2; // Temp. ptr to handles
if( !pIda->TopFreeHnd){
i = sizeof( PLUG) + (PROPHWND_ENTRIES * sizeof( PROPHND));
UtlAlloc( (PVOID *)&ptr1, 0L, (LONG)i, ERROR_STORAGE );
if (!ptr1) {
*pIda->pErrorInfo = Err_NoStorage;
return( (PPROPHND) NULP);
}
memset( ptr1, NULC, i);
UtlPlugIn( &ptr1->Plug, (PPLUG) NULP, pIda->ChainBlkHnd);
if( !pIda->ChainBlkHnd)
pIda->ChainBlkHnd = &ptr1->Plug;
ptr1 = (PPROPHND)( (char *)ptr1 + sizeof( PLUG));
pIda->TopFreeHnd = ptr1;
for( ptr2= (PPROPHND) NULP,i= PROPHWND_ENTRIES-1; i; i
UtlPlugIn( &ptr1->Plug, &ptr2->Plug,(PPLUG) NULP);
ptr2 = ptr1;
ptr1++;
}
}
ptr1 = (PPROPHND)pIda->TopFreeHnd->Plug.Fw;
ptr2 = (PPROPHND)UtlPlugOut( &pIda->TopFreeHnd->Plug);
pIda->TopFreeHnd = ptr1;
UtlPlugIn( &ptr2->Plug, (PPLUG) NULP, &pIda->TopUsedHnd->Plug);
pIda->TopUsedHnd = ptr2;
// pIda->TopUsedHnd->pHndID = pIda; // for verification purposes
return( ptr2);
}
/*!
Release a property handle
*/
SHORT FreePropHnd( PPROP_IDA pIda, PPROPHND hprop)
{
if( !hprop->Plug.Bw) // is it the top most one ?
pIda->TopUsedHnd = (PPROPHND)hprop->Plug.Fw; // ..yes, set new anchor
UtlPlugOut( &hprop->Plug);
memset( hprop, NULC, sizeof( *hprop));
UtlPlugIn( &hprop->Plug, (PPLUG) NULP, &pIda->TopFreeHnd->Plug);
pIda->TopFreeHnd = hprop; // set new anchor to free elements
return( 0);
}
/*!
Search for a property handle
*/
PPROPHND FindPropHnd( PPROPHND ptop, PPROPHND hprop)
{
// nothing to be done in this release of EQF because hprop is the
// requested pointer
//! check whether hprop is a valid handle
// if( hprop->pHndID != testid)
// return( NULP);
return( hprop);
}
/*!
Load the properties file
*/
PPROPCNTL LoadPropFile( PPROP_IDA pIda, PSZ pszName, PSZ pszPath, USHORT usAcc)
{
PPROPCNTL pcntl= (PPROPCNTL) NULP; // Points to properties cntl buffer
PROPHEAD prophead; // Properties heading area
CHAR fname[ MAX_EQF_PATH];// Temporary filename
USHORT sizeprop = 0; // Size of properties area
USHORT size, sizread;
HFILE hf=NULLHANDLE; // pointer to variable for file handle */
USHORT usAction, usrc;
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
*pIda->pErrorInfo = 0L;
pcntl = FindPropCntl( pIda->TopCntl, pszName, pszPath);
if( pcntl)
if( (usAcc & PROP_ACCESS_WRITE) && (pcntl->lFlgs & PROP_ACCESS_WRITE)){
*pIda->pErrorInfo = <API key>;
return( (PPROPCNTL) NULP);
} else {
pcntl->usUser++;
return( pcntl);
}
MakePropPath( fname, "", pszPath, pszName, ""); // no drive, no .ext
do {
usrc = UtlOpen( fname, &hf, &usAction, 0L,
FILE_NORMAL, FILE_OPEN,
<API key> | <API key>, 0L, 0);
if( usrc){
*pIda->pErrorInfo = Err_OpenFile;
break;
}
usrc = UtlRead( hf, &prophead, sizeof( prophead), &sizread, 0);
if( usrc || (sizread != sizeof( prophead))){
*pIda->pErrorInfo = Err_ReadFile;
break;
}
if( _stricmp( pszName, prophead.szName)
|| _stricmp( pszPath + 2, prophead.szPath + 2)) // ignore drive !!!
{
*pIda->pErrorInfo = ErrProp_InvalidFile;
break;
}
if( (size = sizeprop = GetPropSize( prophead.usClass)) == 0)
{
*pIda->pErrorInfo = <API key>;
break;
}
UtlAlloc( (PVOID *)&pcntl, 0L, (LONG)(size + sizeof( *pcntl)), ERROR_STORAGE );
if( !pcntl){
*pIda->pErrorInfo = Err_NoStorage;
break;
}
size -= sizeof( prophead); // sub bytes alread read
memset( pcntl, NULC, sizeof( *pcntl));
pcntl->pHead = (PPROPHEAD)(pcntl+1);
*pcntl->pHead = prophead;
usrc = UtlRead( hf, pcntl->pHead+1, size, &sizread, 0);
if( usrc || (sizread != size))
{
// for folder property files it is O.K. to read less than
// size bytes
if ( (prophead.usClass == PROP_CLASS_FOLDER) &&
(sizread >= 2000) )
{
// continue ...
}
else if ( (prophead.usClass == PROP_CLASS_DOCUMENT) &&
(sizread >= 2000) )
{
// may be a document property file created using a defect
// OS/2 version which omitted the filler at the end of the
// property structure
// so continue ...
}
else if ( (prophead.usClass == <API key>) &&
(sizread >= 2000) )
{
// smaller property files from old versions are allowed...
}
else
{
*pIda->pErrorInfo = Err_ReadFile;
break;
} /* endif */
}
} while( fTrueFalse /*TRUE & FALSE*/);
if( hf)
UtlClose( hf, 0);
if( *pIda->pErrorInfo){
UtlAlloc( (PVOID *)&pcntl, 0L, 0L, NOMSG );
return( (PPROPCNTL) NULP);
}
// initialize control block
UtlAlloc( (PVOID *)&pcntl->pszFname, 0L, (LONG) sizeof(OBJNAME), ERROR_STORAGE );
strcpy( pcntl->pszFname, fname );
pcntl->usFsize = sizeprop;
pcntl->usUser = 1;
pcntl->lFlgs = usAcc & (PROP_ACCESS_READ | PROP_ACCESS_WRITE);
UtlPlugIn( &pcntl->Plug, &pIda->LastCntl->Plug, (PPLUG) NULP);
if( !pIda->TopCntl)
pIda->TopCntl = pcntl;
if( !pIda->LastCntl)
pIda->LastCntl = pcntl;
return( pcntl);
}
/*!
Create the properties file
*/
PPROPCNTL CreatePropFile( PPROP_IDA pIda, PSZ pszName, PSZ pszPath,
USHORT usClass, BOOL fOverwriteExisting )
{
PPROPCNTL pcntl=(PPROPCNTL) NULP; // Points to properties cntl buffer
PPROPHEAD pHead = NULL; // Properties heading area
CHAR fname[ MAX_EQF_PATH];// Temporary filename
USHORT sizeprop = 0; // Size of properties area
USHORT size = 0;
USHORT sizwrite = 0;
HFILE hf=NULLHANDLE; // pointer to variable for file handle */
USHORT usAction, usrc;
BOOL fExisted = FALSE; // file-existed-flag
*pIda->pErrorInfo = 0; // everything is ok so dar ...
if( FindPropCntl( pIda->TopCntl, pszName, pszPath))
{
*pIda->pErrorInfo = <API key>;
} /* endif */
if ( !*pIda->pErrorInfo )
{
MakePropPath( fname, "", pszPath, pszName, ""); // no drive, no .ext
if( (size = sizeprop = GetPropSize( usClass)) == 0)
{
*pIda->pErrorInfo = <API key>;
} /* endif */
} /* endif */
if ( !*pIda->pErrorInfo )
{
size += sizeof( *pcntl);
if ( fOverwriteExisting )
{
fExisted = FALSE;
usrc = UtlOpen( fname, &hf, &usAction, (ULONG)sizeprop,
FILE_NORMAL, FILE_TRUNCATE,
<API key> | <API key>, 0L, 0);
}
else
{
fExisted = UtlFileExist( fname );
usrc = UtlOpen( fname, &hf, &usAction, (ULONG)sizeprop,
FILE_NORMAL, FILE_CREATE,
<API key> | <API key>, 0L, 0);
} /* endif */
if ( usrc )
{
switch( usrc )
{
case ERROR_DISK_FULL:
*pIda->pErrorInfo = Err_NoDiskSpace;
break;
default:
*pIda->pErrorInfo = Err_OpenFile;
break;
} /* endswitch */
} /* endif */
} /* endif */
if ( !*pIda->pErrorInfo )
{
if( fExisted )
{
*pIda->pErrorInfo = <API key>;
} /* endif */
} /* endif */
if ( !*pIda->pErrorInfo )
{
UtlAlloc( (PVOID *)&pcntl, 0L, (LONG)size, ERROR_STORAGE );
if( !pcntl)
{
*pIda->pErrorInfo = Err_NoStorage;
} /* endif */
} /* endif */
if ( !*pIda->pErrorInfo )
{
memset( pcntl, NULC, size);
pHead = (PPROPHEAD)(pcntl+1);
strcpy( pHead->szName, pszName);
strcpy( pHead->szPath, pszPath);
pHead->usClass = usClass;
pHead->chType = PROP_TYPE_NEW;
usrc = UtlWrite( hf, pHead, sizeprop, &sizwrite, 0);
if ( usrc != NO_ERROR)
{
switch( usrc )
{
case ERROR_DISK_FULL:
*pIda->pErrorInfo = Err_NoDiskSpace;
break;
default:
*pIda->pErrorInfo = Err_WriteFile;
break;
} /* endswitch */
}
else if( sizwrite != sizeprop )
{
*pIda->pErrorInfo = Err_NoDiskSpace;
} /* endif */
} /* endif */
if( hf)
{
UtlClose( hf, 0);
if( *pIda->pErrorInfo &&
(*pIda->pErrorInfo != <API key>) &&
!fExisted )
{
UtlDelete( fname, 0L, FALSE );
} /* endif */
} /* endif */
if( *pIda->pErrorInfo )
{
if ( pcntl )
UtlAlloc( (PVOID *)&pcntl, 0L, 0L, NOMSG );
pcntl = NULL;
}
else
{
// initialize control block
UtlAlloc( (PVOID *)&pcntl->pszFname, 0L, (LONG) sizeof(OBJNAME), ERROR_STORAGE );
strcpy( pcntl->pszFname, fname );
pcntl->usFsize = sizeprop;
pcntl->usUser = 1;
// set UPDATED flag to force a write back when user closes after creation
pcntl->lFlgs = <API key> | PROP_STATUS_UPDATED;
pcntl->pHead = pHead;
UtlPlugIn( &pcntl->Plug, &pIda->LastCntl->Plug, (PPLUG) NULP);
if( !pIda->TopCntl)
pIda->TopCntl = pcntl;
if( !pIda->LastCntl)
pIda->LastCntl = pcntl;
} /* endif */
return( pcntl);
}
/*!
Read in system properties
*/
USHORT GetSysProp( PPROP_IDA pIda)
{
PPROPHND hprop;
PPROPCNTL pcntl = NULL; // Points to properties cntl buffer
USHORT size, sizread; // Size of properties area
HFILE hf=NULLHANDLE; // pointer to variable for file handle */
USHORT usAction, usrc;
BOOL error=TRUE;
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
if( (hprop = MakePropHnd( pIda)) == NULL)
return( TRUE );
memset( hprop, NULC, sizeof( *hprop));
do {
if( UtlOpen( pIda->IdaHead.pszObjName, &hf, &usAction, 0L,
FILE_NORMAL, FILE_OPEN,
<API key> | <API key>, 0L, 0))
break;
size = sizeof( PROPSYSTEM);
UtlAlloc( (PVOID *)&pcntl, 0L, (LONG)(size + sizeof( *pcntl)), ERROR_STORAGE );
if( !pcntl) break;
memset( pcntl, NULC, sizeof( *pcntl));
pcntl->pHead = (PPROPHEAD)(pcntl+1);
usrc = UtlRead( hf, pcntl->pHead, size, &sizread, 0);
if( usrc || (sizread != size)) break;
error = FALSE;
} while( fTrueFalse /*TRUE & FALSE*/);
if( hf)
UtlClose( hf, 0);
if( error)
{
if ( pcntl )
{
UtlAlloc( (PVOID *)&pcntl, 0L, 0L, NOMSG );
} /* endif */
return( TRUE );
}
// initialize control block
UtlAlloc( (PVOID *)&pcntl->pszFname, 0L, (LONG) sizeof(OBJNAME), ERROR_STORAGE );
strcpy( pcntl->pszFname, pIda->IdaHead.pszObjName );
pcntl->usFsize = sizeof( PROPSYSTEM);
pcntl->usUser = 1;
pcntl->lFlgs = PROP_ACCESS_READ;
UtlPlugIn( &pcntl->Plug, &pIda->LastCntl->Plug, (PPLUG) NULP);
if( !pIda->TopCntl)
pIda->TopCntl = pcntl;
if( !pIda->LastCntl)
pIda->LastCntl = pcntl;
hprop->pCntl = pcntl;
hprop->lFlgs = PROP_ACCESS_READ;
hprop->pCntl->usUser = 1;
pIda->hSystem = hprop; // anchor handle to system properties
return( 0 );
}
/*!
Reload system properties
*/
USHORT ReloadSysProp( PPROPSYSTEM pIda)
{
USHORT size, sizread; // Size of properties area
HFILE hf=NULLHANDLE; // pointer to variable for file handle */
USHORT usAction, usrc;
BOOL error=TRUE;
CHAR fname[ MAX_EQF_PATH];// Temporary filename
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
do
{
MakePropPath( fname, "", pIda->PropHead.szPath, pIda->PropHead.szName, ""); // no drive, no .ext
if( UtlOpen( fname, &hf, &usAction, 0L,
FILE_NORMAL, FILE_OPEN,
<API key> | <API key>, 0L, 0))
break;
size = sizeof( PROPSYSTEM);
usrc = UtlRead( hf, pIda, size, &sizread, 0);
if( usrc || (sizread != size)) break;
memcpy( (PBYTE)GetSystemPropPtr(), (PBYTE)pIda, size );
error = FALSE;
} while( fTrueFalse /*TRUE & FALSE*/);
if( hf)
UtlClose( hf, 0);
return( 0 );
}
/*!
PutItAway
*/
USHORT PutItAway( PPROPCNTL pcntl)
{
USHORT sizwrite; // number of bytes written
HFILE hf=NULLHANDLE; // pointer to variable for file handle */
USHORT usAction, usrc;
// always reset updated flag even if save fails, otherwise
// the properties cannot be closed
pcntl->lFlgs &= ~PROP_STATUS_UPDATED;
if ( (usrc = UtlOpen( pcntl->pszFname, &hf, &usAction, 0L,
FILE_NORMAL, FILE_OPEN,
<API key> | <API key>,
0L, 0)) != NO_ERROR )
{
return( usrc);
}
usrc = UtlWrite( hf, pcntl->pHead, pcntl->usFsize, &sizwrite, 0);
if( !usrc && (sizwrite != pcntl->usFsize))
usrc = Err_WriteFile;
UtlClose( hf, 0);
return( usrc);
}
/*!
Drop properties file from memory
*/
VOID DropPropFile( PPROP_IDA pIda, PPROPCNTL pcntl)
{
UtlAlloc( (PVOID *)&pcntl->pszFname, 0L, 0L, NOMSG );
if( pcntl->Plug.Fw == NULP)
pIda->LastCntl = (PPROPCNTL)pcntl->Plug.Bw;
if( pcntl->Plug.Bw == NULP)
pIda->TopCntl = (PPROPCNTL)pcntl->Plug.Fw;
UtlPlugOut( &pcntl->Plug);
UtlAlloc( (PVOID *)&pcntl, 0L, 0L, NOMSG );
}
/*!
Delete a properties file
*/
SHORT DeletePropFile( PPROP_IDA pIda, PSZ pszName, PSZ pszPath)
{
CHAR fname[ MAX_EQF_PATH]; // Temporary filename
USHORT usrc;
MakePropPath( fname, "", pszPath, pszName, ""); // no drive, no .ext
if( (usrc = UtlDelete( fname, 0L, 0)) != NO_ERROR )
*pIda->pErrorInfo = Err_DeleteFile;
return( usrc);
}
/*!
Find Properties Control block
*/
PPROPCNTL FindPropCntl( PPROPCNTL ptop, PSZ pszName, PSZ pszPath)
{
for(; ptop; ptop=(PPROPCNTL)ptop->Plug.Fw){
if( !strcmp( ptop->pHead->szName, pszName)
&& !strcmp( ptop->pHead->szPath, pszPath))
return( ptop);
}
return( (PPROPCNTL) NULP);
}
/*!
Load Properties Message area
*/
SHORT LoadPropMsg( PPROPMSGPARM pm, PPROPHND hprop, PSZ name, PSZ path, \
USHORT flg, PEQFINFO pErrorInfo, BYTE *buffer)
{
pm->hObject = hprop;
pm->pBuffer = buffer;
pm->fFlg = flg;
pm->pErrorInfo = pErrorInfo;
if( name || path)
if( !path){
strcpy( (PSZ)(pm->tmpName), name);
if( ( pm->pszName = <API key>((PSZ) pm->tmpName)) == NULL)
return( TRUE );
pm->pszPath = (PSZ)(pm->tmpName);
} else {
pm->pszName = name;
pm->pszPath = path;
}
else {
pm->pszName = (PSZ) NULP;
pm->pszPath = (PSZ) NULP;
}
return( 0); // no checks included now
}
/*!
Notify All
*/
VOID NotifyAll( HPROP hprop)
{
PPROPHEAD ph;
char name[ MAX_EQF_PATH];
if ( UtlQueryUShort( QS_RUNMODE ) != FUNCCALL_RUNMODE )
{
ph = (PPROPHEAD)(((PPROPHND)hprop)->pCntl->pHead);
strcat( strcat( strcpy( name, ph->szPath), "\\"), ph->szName);
EqfSend2AllHandlers( <API key>,
MP1FROMSHORT( ph->usClass ),
MP2FROMP(name) );
} /* endif */
}
/*!
Open Properties
*/
HPROP OpenProperties( PSZ pszObjName, PSZ pszPath, USHORT usAccess,
PEQFINFO pErrorInfo)
{
HPROP hprop=NULL;
PPROPMSGPARM pmsg = NULL;
UtlAlloc( (PVOID *)&pmsg, 0L, (LONG)sizeof(PROPMSGPARM), ERROR_STORAGE );
if ( !pmsg )
{
*pErrorInfo = Err_NoStorage;
return( NULL );
} /* endif */
if( !pszObjName)
{
*pErrorInfo = <API key>;
}
else if( usAccess & ~<API key>)
{
*pErrorInfo = <API key>;
}
else if( LoadPropMsg( pmsg, NULL, pszObjName, pszPath, usAccess,
pErrorInfo, NULL ) )
{
*pErrorInfo = <API key>;
}
else
{
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda; // Points to instance area
PPROPHND hProp;
HANDLE hMutexSem = NULL;
// keep other process from doing property related stuff..
GETMUTEX(hMutexSem);
pIda = pPropBatchIda;
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hProp = MakePropHnd( pIda)) != NULL)
{
hProp->pCntl = LoadPropFile( pIda, pmsg->pszName, pmsg->pszPath, pmsg->fFlg);
if( !hProp->pCntl )
{
FreePropHnd( pIda, hProp );
hProp = NULL;
} /* endif */
} /* endif */
if ( hProp != NULL )
{
hProp->lFlgs |= pmsg->fFlg;
hprop = (HPROP)hProp;
} /* endif */
// release Mutex
RELEASEMUTEX(hMutexSem);
}
else
{
hprop = (HPROP)<API key>( <API key>,
MP1FROMSHORT(0),
MP2FROMP(pmsg) );
} /* endif */
} /* endif */
UtlAlloc( (PVOID *)&pmsg, 0L, 0L, NOMSG );
return( hprop);
}
/*!
Create Properties
*/
HPROP CreateProperties(
PSZ pszObjName, // Name of properties * input
PSZ pszPath, // Full path to properties * input
USHORT usClass, // Class of properties * input
PEQFINFO pErrorInfo // Error indicator * output
)
{
return( CreatePropertiesEx( pszObjName, pszPath, usClass, pErrorInfo, FALSE ) );
}
HPROP CreatePropertiesEx(
PSZ pszObjName, // Name of properties * input
PSZ pszPath, // Full path to properties * input
USHORT usClass, // Class of properties * input
PEQFINFO pErrorInfo, // Error indicator * output
BOOL fOverwriteExisting // TRUE = overwrite any existing property file
)
{
HPROP hprop=NULL;
PPROPMSGPARM pmsg;
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
UtlAlloc( (PVOID *)&pmsg, 0L, (LONG)sizeof( *pmsg), ERROR_STORAGE );
if ( !pmsg )
{
*pErrorInfo = Err_NoStorage;
return( NULL );
} /* endif */
do {
if( !pszObjName){
*pErrorInfo = <API key>;
break;
}
if( !GetPropSize( usClass)){
*pErrorInfo = <API key>;
break;
}
if( LoadPropMsg( pmsg, NULL, pszObjName, pszPath, usClass,
pErrorInfo, NULL))
{
*pErrorInfo = <API key>;
break;
}
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda; // Points to instance area
PPROPHND hProp = NULL;
HANDLE hMutexSem = NULL;
// keep other process from doing property related stuff..
GETMUTEX(hMutexSem);
pIda = pPropBatchIda;
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
hProp = MakePropHnd( pIda);
if ( hProp )
{
hProp->pCntl = CreatePropFile( pIda, pmsg->pszName, pmsg->pszPath,
pmsg->fFlg, fOverwriteExisting );
if( !hProp->pCntl)
{
FreePropHnd( pIda, hProp);
hProp = NULL;
}
} /* endif */
if ( hProp )
{
hProp->lFlgs = <API key>;
hprop = (HPROP)hProp;
} /* endif */
// release Mutex
RELEASEMUTEX(hMutexSem);
}
else
{
pmsg->fOverWrite = fOverwriteExisting;
hprop = (HPROP)<API key>( <API key>,
MP1FROMSHORT(0),
MP2FROMP(pmsg) );
} /* endif */
} while( fTrueFalse /*TRUE & FALSE*/);
UtlAlloc( (PVOID *)&pmsg, 0L, 0L, NOMSG );
return( hprop);
}
/*!
Delete Properties
*/
SHORT DeleteProperties(
PSZ pszObjName, // Name of properties * input
PSZ pszPath, // Full path to properties * input
PEQFINFO pErrorInfo // Error indicator * output
)
{
PROPMSGPARM PropMsg; // buffer for message structure
SHORT rc = 0;
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
do {
if( !pszObjName)
{
*pErrorInfo = <API key>;
break;
}
if( LoadPropMsg( &PropMsg, NULL, pszObjName, pszPath, 0,
pErrorInfo, NULL))
{
*pErrorInfo = <API key>;
break;
}
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda; // Points to instance area
PPROPCNTL pcntl;
HANDLE hMutexSem = NULL;
// keep other process from doing property related stuff..
GETMUTEX(hMutexSem);
pIda = pPropBatchIda;
pIda->pErrorInfo = PropMsg.pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
pcntl = FindPropCntl( pIda->TopCntl, PropMsg.pszName,
PropMsg.pszPath);
if( pcntl)
{
*pIda->pErrorInfo = ErrProp_ObjectBusy;
rc = -1; // indicate error
}
else
{
rc = DeletePropFile( pIda, PropMsg.pszName, PropMsg.pszPath);
}
// release Mutex
RELEASEMUTEX(hMutexSem);
}
else
{
rc = SHORT1FROMMR(<API key>( <API key>,
MP1FROMSHORT(0),
MP2FROMP(&PropMsg) ) );
} /* endif */
} while( fTrueFalse /*TRUE & FALSE*/);
return( rc);
}
/*!
Close Properties
*/
SHORT CloseProperties(
HPROP hObject, // Handle to object properties *input
USHORT fClose, // Flags for closeing *input
PEQFINFO pErrorInfo // Error indicator * output
)
{
PROPMSGPARM PropMsg; // buffer for message structure
SHORT rc = TRUE;
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
do {
if( !hObject)
{
*pErrorInfo = <API key>;
break;
}
if( LoadPropMsg( &PropMsg, (PPROPHND) hObject, NULL, NULL, fClose,
pErrorInfo, NULL))
{
*pErrorInfo = <API key>;
break;
}
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda; // Points to instance area
PPROPCNTL pcntl;
PPROPHND hProp = NULL;
HANDLE hMutexSem = NULL;
// keep other process from doing property related stuff..
GETMUTEX(hMutexSem);
pIda = pPropBatchIda;
pIda->pErrorInfo = PropMsg.pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hProp=FindPropHnd( pIda->TopUsedHnd, (PPROPHND) PropMsg.hObject)) == NULL)
{
*pIda->pErrorInfo = <API key>;
rc = -1; // indicate error
}
else
{
pcntl = hProp->pCntl;
if( pcntl->lFlgs & PROP_STATUS_UPDATED)
if( PropMsg.fFlg & PROP_FILE)
{
if( !(hProp->lFlgs & PROP_ACCESS_WRITE))
{
*pIda->pErrorInfo = <API key>;
rc = -2; // indicate error
}
else if( (*pIda->pErrorInfo = PutItAway( pcntl)) != 0 )
{
rc = -3; // indicate error
}
}
if ( rc == TRUE )
{
if( --pcntl->usUser < 1)
DropPropFile( pIda, pcntl);
else
if( hProp->lFlgs & PROP_ACCESS_WRITE)
pcntl->lFlgs &= ~PROP_ACCESS_WRITE;
FreePropHnd( pIda, hProp);
rc = *pIda->pErrorInfo ? -4 : 0;
} /* endif */
} /* endif */
// release Mutex
RELEASEMUTEX(hMutexSem);
}
else
{
rc = SHORT1FROMMR(<API key>( <API key>,
MP1FROMSHORT(0),
MP2FROMP(&PropMsg) ) );
} /* endif */
} while( fTrueFalse /*TRUE & FALSE*/);
return( rc);
}
/*!
Get All Properties
*/
SHORT GetAllProperties(
HPROP hObject, // Handle to object properties *input
PVOID pBuffer, // Buffer to be loaded * output
PEQFINFO pErrorInfo // Error indicator * output
)
{
PPROPMSGPARM pmsg;
SHORT rc = TRUE;
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
UtlAlloc( (PVOID *)&pmsg, 0L, (LONG)sizeof( *pmsg), ERROR_STORAGE );
if ( !pmsg )
{
*pErrorInfo = Err_NoStorage;
return( rc );
} /* endif */
do {
if( !hObject){
*pErrorInfo = <API key>;
break;
}
if( !pBuffer){
*pErrorInfo = <API key>;
break;
}
if( LoadPropMsg( pmsg,(PPROPHND) hObject, NULL, NULL, 0,
pErrorInfo, (BYTE *) pBuffer)){
*pErrorInfo = <API key>;
break;
}
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda; // Points to instance area
PPROPCNTL pcntl;
PPROPHND hprop;
HANDLE hMutexSem = NULL;
// keep other process from doing property related stuff..
GETMUTEX(hMutexSem);
pIda = pPropBatchIda;
pIda->pErrorInfo = pmsg->pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop=FindPropHnd( pIda->TopUsedHnd, (PPROPHND) pmsg->hObject)) == NULL)
{
*pIda->pErrorInfo = <API key>;
rc = -1; // indicate error
}
else if( !(hprop->lFlgs & PROP_ACCESS_READ))
{
*pIda->pErrorInfo = <API key>;
rc = -2; // indicate error
}
else
{
pcntl = hprop->pCntl;
memcpy( pmsg->pBuffer, pcntl->pHead, pcntl->usFsize);
}
RELEASEMUTEX(hMutexSem);
}
else
{
rc = SHORT1FROMMR(<API key>( <API key>,
MP1FROMSHORT(0),
MP2FROMP(pmsg) ) );
} /* endif */
} while( fTrueFalse /*TRUE & FALSE*/);
UtlAlloc( (PVOID *)&pmsg, 0L, 0L, NOMSG );
return( rc);
}
/*!
Put All Properties
*/
SHORT PutAllProperties(
HPROP hObject, // Handle to object properties *input
PVOID pBuffer, // Buffer with properties * input
PEQFINFO pErrorInfo // Error indicator * output
)
{
PROPMSGPARM PropMsg; // buffer for message structure
SHORT rc = TRUE;
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
do {
if( !hObject)
{
*pErrorInfo = <API key>;
break;
}
if( LoadPropMsg( &PropMsg,(PPROPHND) hObject, NULL, NULL, 0,
pErrorInfo, (BYTE *) pBuffer))
{
*pErrorInfo = <API key>;
break;
}
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda; // Points to instance area
PPROPCNTL pcntl;
PPROPHND hprop;
HANDLE hMutexSem = NULL;
GETMUTEX(hMutexSem);
pIda = pPropBatchIda;
pIda->pErrorInfo = PropMsg.pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop=FindPropHnd( pIda->TopUsedHnd, (PPROPHND) PropMsg.hObject)) == NULL)
{
*pIda->pErrorInfo = <API key>;
rc = -1; // indicate error
}
else if( !(hprop->lFlgs & PROP_ACCESS_WRITE))
{
*pIda->pErrorInfo = <API key>;
rc = -2; // indicate error
}
else
{
pcntl = hprop->pCntl;
if( PropMsg.pBuffer)
{
memcpy( pcntl->pHead, PropMsg.pBuffer, pcntl->usFsize);
hprop->pCntl->lFlgs |= PROP_STATUS_UPDATED;
rc = 0;
}
} /* endif */
RELEASEMUTEX(hMutexSem);
}
else
{
rc = SHORT1FROMMR(<API key>( <API key>,
MP1FROMSHORT(0),
MP2FROMP(&PropMsg) ) );
} /* endif */
} while( fTrueFalse /*TRUE & FALSE*/);
if( !rc && (UtlQueryUShort( QS_RUNMODE ) != FUNCCALL_RUNMODE) )
{
NotifyAll( hObject);
} /* endif */
return( rc);
}
/*!
Save Properties
*/
SHORT SaveProperties(
HPROP hObject, // Handle to object properties *input
PEQFINFO pErrorInfo // Error indicator * output
)
{
PROPMSGPARM PropMsg; // buffer for message structure
SHORT rc = 0;
BOOL fTrueFalse = TRUE&FALSE; // to avoid compile-w C4127
do {
if( !hObject)
{
*pErrorInfo = <API key>;
break;
}
if( LoadPropMsg( &PropMsg, (PPROPHND) hObject, NULL, NULL, 0,
pErrorInfo, NULL))
{
*pErrorInfo = <API key>;
break;
}
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda; // Points to instance area
PPROPCNTL pcntl;
PPROPHND hprop;
HANDLE hMutexSem = NULL;
// keep other process from doing property related stuff..
GETMUTEX(hMutexSem);
pIda = pPropBatchIda;
pIda->pErrorInfo = PropMsg.pErrorInfo;
*pIda->pErrorInfo = 0L; // assume a good return
if( (hprop=FindPropHnd( pIda->TopUsedHnd, (PPROPHND) PropMsg.hObject)) == NULL)
{
*pIda->pErrorInfo = <API key>;
rc = -1;
}
else
{
pcntl = hprop->pCntl;
if( pcntl->lFlgs & PROP_STATUS_UPDATED)
{
if( !(hprop->lFlgs & PROP_ACCESS_WRITE))
{
*pIda->pErrorInfo = <API key>;
rc = -2;
}
else
{
*pIda->pErrorInfo = PutItAway( pcntl);
} /* endif */
}
if ( !rc ) rc = *pIda->pErrorInfo ? -3 : 0;
} /* endif */
// release Mutex
RELEASEMUTEX(hMutexSem);
}
else
{
rc = SHORT1FROMMR( <API key>( <API key>,
MP1FROMSHORT(0),
MP2FROMP(&PropMsg) ) );
} /* endif */
} while( fTrueFalse /*TRUE & FALSE*/);
return( rc);
}
/*!
Miscellaneous properties functions
*/
PVOID MakePropPtrFromHnd( HPROP hprop)
{
return( hprop ? (PVOID)(((PPROPHND)hprop)->pCntl->pHead) : NULP);
}
PVOID MakePropPtrFromHwnd( HWND hObject)
{
PIDA_HEAD pIda = ACCESSWNDIDA( hObject, PIDA_HEAD );
return( pIda ? MakePropPtrFromHnd( pIda->hProp) : NULL);
}
PPROPSYSTEM GetSystemPropPtr( VOID )
{
HPROP hSysProp;
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda; // Points to instance area
pIda = pPropBatchIda;
hSysProp = pIda->hSystem;
}
else
{
hSysProp = <API key>();
assert( hSysProp != NULL );
} /* endif */
return( (PPROPSYSTEM)( MakePropPtrFromHnd( hSysProp )));
}
BOOL SetPropAccess( HPROP hprop, USHORT flgs)
{
if( flgs & PROP_ACCESS_WRITE){
if( ((PPROPHND)hprop)->lFlgs & (ULONG)PROP_ACCESS_WRITE)
return( FALSE);
if( ((PPROPHND)hprop)->pCntl->lFlgs & (ULONG)PROP_ACCESS_WRITE)
return( FALSE);
((PPROPHND)hprop)->pCntl->lFlgs |= (ULONG)(PROP_ACCESS_WRITE | \
PROP_STATUS_UPDATED);
((PPROPHND)hprop)->lFlgs |= (ULONG)PROP_ACCESS_WRITE;
}
return( TRUE);
}
VOID ResetPropAccess( HPROP hprop, USHORT flgs)
{
((PPROPHND)hprop)->pCntl->lFlgs &= (ULONG)~flgs;
((PPROPHND)hprop)->lFlgs &= (ULONG)~flgs;
if( flgs & PROP_ACCESS_WRITE) NotifyAll( hprop);
}
/*!
GetPropSize - return size of properties given by its class
*/
SHORT GetPropSize( USHORT usClass)
{
USHORT usSize; // size of properties
switch ( usClass )
{
case PROP_CLASS_SYSTEM :
usSize = sizeof( PROPSYSTEM );
break;
case <API key> :
usSize = sizeof( PROPFOLDERLIST );
break;
case PROP_CLASS_FOLDER :
usSize = sizeof( PROPFOLDER );
break;
case PROP_CLASS_DOCUMENT :
usSize = sizeof( PROPDOCUMENT );
break;
case PROP_CLASS_IMEX :
usSize = sizeof( PROPIMEX );
break;
case PROP_CLASS_EDITOR :
usSize = sizeof( PROPEDIT );
break;
case <API key>:
usSize = sizeof( PROPDICTIONARY );
break;
case PROP_CLASS_DICTLIST:
usSize = sizeof( PROPDICTLIST );
break;
case PROP_CLASS_TAGTABLE:
usSize = sizeof( PROPTAGTABLE );
break;
case PROP_CLASS_LIST:
usSize = sizeof( PROPLIST );
break;
case PROP_CLASS_MEMORY :
case PROP_CLASS_MEMORYDB :
case <API key> :
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
usSize = 2048; // MEM_PROP_SIZE;
}
else
{
usSize = (USHORT) EqfSend2Handler( MEMORYHANDLER, <API key>, MP1FROMSHORT(usClass), 0L);
} /* endif */
break;
case PROP_CLASS_TQMLIST :
usSize = (USHORT) EqfSend2Handler( TQMLISTHANDLER, <API key>, MP1FROMSHORT(usClass), 0L);
break;
default :
usSize = 0;
break;
} /* endswitch */
return( usSize );
}
/*!
MakePropPath
*/
PSZ MakePropPath( PSZ pbuf, PSZ pd, PSZ pp, PSZ pn, PSZ pe)
{
PPROPSYSTEM pprop;
PPROPHND hprop;
CHAR tmppath[ MAX_EQF_PATH];
*pbuf = NULC;
if( (hprop = (PPROPHND) <API key>())== NULL)
return( pbuf);
pprop = (PPROPSYSTEM) MakePropPtrFromHnd( hprop);
sprintf( tmppath, "%s\\%s", pp, pprop->szPropertyPath );
_makepath( pbuf, pd, tmppath, pn, pe);
return( pbuf);
}
// Function <API key>
// Initialize the property handler for non-windows environments;
// i.e. perform WM_CREATE handling to allocate our IDA
BOOL <API key>( void )
{
int size;
BOOL fOK = TRUE;
if( !UtlAlloc( (PVOID *)&pPropBatchIda, 0L, (LONG)sizeof( *pPropBatchIda), ERROR_STORAGE ) )
return( FALSE ); // do not create the window
memset( pPropBatchIda, NULC, sizeof( *pPropBatchIda));
size = sizeof(pPropBatchIda->IdaHead.szObjName);
pPropBatchIda->IdaHead.pszObjName = pPropBatchIda->IdaHead.szObjName;
<API key>( APPL_Name, KEY_SysProp, pPropBatchIda->IdaHead.pszObjName, size, "" );
{ // keep other process from doing property related stuff..
HANDLE hMutexSem = NULL;
GETMUTEX(hMutexSem);
if( GetSysProp( pPropBatchIda))
fOK = FALSE;
// return( FALSE); // do not create the window
RELEASEMUTEX(hMutexSem); // release Mutex
}
return( fOK );
} /* end of function <API key> */
// Function <API key>
// Terminate the property handler in non-windows environments;
BOOL
<API key>( void )
{// keep other process from doing property related stuff..
HANDLE hMutexSem = NULL;
PPROP_IDA pIda = pPropBatchIda;
GETMUTEX(hMutexSem);
assert( pIda->hSystem != NULL );
if ( pIda->hSystem != NULL )
{
if( pIda->hSystem->pCntl->lFlgs & PROP_STATUS_UPDATED)
{
PutItAway( pIda->hSystem->pCntl);
}
} /* endif */
RELEASEMUTEX(hMutexSem); // release Mutex
return( TRUE );
} /* end of function <API key> */
HPROP <API key>( void )
{
HPROP hprop;
if ( UtlQueryUShort( QS_RUNMODE ) == FUNCCALL_RUNMODE )
{
PPROP_IDA pIda = pPropBatchIda;
hprop = pIda->hSystem;
}
else
{
// hprop = (HPROP)WinSendMsg( EqfQueryHandler( PROPERTYHANDLER),
// <API key>,
// NULL, NULL);
if (hwndPropertyHandler)
{
PPROP_IDA pIda = ACCESSWNDIDA( hwndPropertyHandler, PPROP_IDA );
hprop = (HPROP)pIda->hSystem;
}
else
{
hprop = NULL;
}
} /* endif */
return( hprop );
} /* end of function <API key> */ |
package com.odcgroup.t24.server.properties.util;
import java.util.Properties;
/**
* TODO: Document me!
*
* @author hdebabrata
*
*/
public class ServerProperties extends Properties {
private static final long serialVersionUID = 1L;
@Override
public synchronized Object put(Object key, Object value) {
String keyLowerCase = ((String)key).toLowerCase();
return super.put(keyLowerCase, value);
}
@Override
public String getProperty(String key) {
String lowercase = key.toLowerCase();
return super.getProperty(lowercase);
}
@Override
public String getProperty(String key, String defaultValue) {
String lowercase = key.toLowerCase();
return super.getProperty(lowercase, defaultValue);
}
@Override
public synchronized Object get(Object key) {
return super.get(key);
}
} |
# Southpaw Technology, and is not to be reproduced, transmitted,
__all__ = ["FileException", "File", "FileAccess", "IconCreator", "FileGroup", "FileRange"]
from pyasm.common import Common, Xml, TacticException, Environment, System, Config
from pyasm.search import *
from project import Project
from subprocess import Popen, PIPE
import sys, os, string, re, stat, glob
try:
#import Image
from PIL import Image
# Test to see if imaging actually works
import _imaging
HAS_PIL = True
except:
HAS_PIL = False
try:
import Image
# Test to see if imaging actually works
import _imaging
HAS_PIL = True
except:
HAS_PIL = False
# check if imagemagick is installed, and find exe if possible
convert_exe = ''
HAS_IMAGE_MAGICK = False
if os.name == "nt":
# prefer direct exe to not confuse with other convert.exe present on nt systems
convert_exe_list = glob.glob('C:\\Program Files\\ImageMagick*')
for exe in convert_exe_list:
try:
convert_process = Popen(['%s\\convert.exe'%exe,'-version'], stdout=PIPE, stderr=PIPE)
convert_return,convert_err = convert_process.communicate()
if 'ImageMagick' in convert_return:
convert_exe = '%s\\convert.exe'%exe
HAS_IMAGE_MAGICK = True
except:
print "Running %s failed" %exe
if not convert_exe_list:
# IM might not be in Program Files but may still be in PATH
try:
convert_process = Popen(['convert','-version'], stdout=PIPE, stderr=PIPE)
convert_return,convert_err = convert_process.communicate()
if 'ImageMagick' in convert_return:
convert_exe = 'convert'
HAS_IMAGE_MAGICK = True
except:
pass
else:
# in other systems (e.g. unix) 'convert' is expected to be in PATH
try:
convert_process = Popen(['convert','-version'], stdout=PIPE, stderr=PIPE)
convert_return,convert_err = convert_process.communicate()
if 'ImageMagick' in convert_return:
convert_exe = 'convert'
HAS_IMAGE_MAGICK = True
except:
pass
if Common.which("ffprobe"):
HAS_FFMPEG = True
else:
HAS_FFMPEG = False
import subprocess
class FileException(TacticException):
pass
class File(SObject):
NORMAL_EXT = ['max','ma','xls' ,'xlsx', 'doc', 'docx','txt', 'rtf', 'odt','fla','psd', 'xsi', 'scn', 'hip', 'xml','eani','pdf', 'fbx',
'gz', 'zip', 'rar',
'ini', 'db', 'py', 'pyd', 'spt'
]
VIDEO_EXT = ['mov','wmv','mpg','mpeg','m1v','m2v','mp2','mp4','mpa','mpe','mp4','wma','asf','asx','avi','wax',
'wm','wvx','ogg','webm','mkv','m4v','mxf','f4v','rmvb']
IMAGE_EXT = ['jpg','png','tif','tiff','gif','dds','dcm']
SEARCH_TYPE = "sthpw/file"
BASE_TYPE_SEQ = "sequence"
BASE_TYPE_DIR = "directory"
BASE_TYPE_FILE = "file"
def get_code(my):
return my.get_value("code")
def get_file_name(my):
return my.get_value("file_name")
def get_file_range(my):
return my.get_value("file_range")
def get_type(my):
return my.get_value("type")
def <API key>(cls, path):
tmp, ext = os.path.splitext(path)
ext = ext.lstrip(".")
ext = ext.lower()
if ext in File.VIDEO_EXT:
return "video"
elif ext in File.NORMAL_EXT:
return "document"
else:
return "image"
<API key> = classmethod(<API key>)
def get_sobject(my):
'''get the sobject associated with this file'''
search = Search(my.get_value("search_type"))
search.add_id_filter(my.get_value("search_id"))
sobject = search.get_sobject()
return sobject
def get_full_file_name(my):
'''Gets the full file name. This is the same as get_file_name'''
return my.get_file_name()
def get_lib_dir(my,snapshot=None):
'''go through the stored snapshot_code to get the actual path'''
code = my.get_value("snapshot_code")
from snapshot import Snapshot
snapshot = Snapshot.get_by_code(code)
return snapshot.get_lib_dir()
def get_env_dir(my,snapshot=None):
'''go through the stored snapshot_code to get the actual path'''
code = my.get_value("snapshot_code")
from snapshot import Snapshot
snapshot = Snapshot.get_by_code(code)
return snapshot.get_env_dir()
def get_web_dir(my,snapshot=None):
'''go through the stored snapshot_code to get the actual path'''
code = my.get_value("snapshot_code")
from snapshot import Snapshot
snapshot = Snapshot.get_by_code(code)
return snapshot.get_web_dir()
def get_lib_path(my):
filename = my.get_full_file_name()
return "%s/%s" % (my.get_lib_dir(), filename)
def get_env_path(my):
'''path beginning with $TACTIC_ASSET_DIR'''
filename = my.get_full_file_name()
return "%s/%s" % (my.get_env_dir(), filename)
def get_web_path(my):
filename = my.get_full_file_name()
return "%s/%s" % (my.get_web_dir(), filename)
# Static Methods
"""
# DEPRERECATED
PADDING = 10
# DEPRERECATED
def add_file_code(file_path, file_code):
ext = ".".join( File.get_extensions(file_path) )
padded_id = str(file_code).zfill(File.PADDING)
file_path = file_path.replace(".%s" % ext, "_%s.%s" % (padded_id, ext) )
return file_path
add_file_code = staticmethod(add_file_code)
# DEPRERECATED
def remove_file_code(file_path):
new_path = re.compile(r'_(\w{%s})\.' % File.PADDING).sub(".", file_path)
return new_path
remove_file_code = staticmethod(remove_file_code)
# DEPRERECATED
def extract_file_code(file_path):
p = re.compile(r'_(\w{%s})\.' % File.PADDING)
m = p.search(file_path)
if not m:
return 0
groups = m.groups()
if not groups:
return 0
else:
file_code = groups[0]
# make sure there are only alpha/numberic characters
if file_code.find("_") != -1:
return 0
# make sure the first 3 are numeric
if not re.match('^\d{3}\w+$', file_code):
return 0
# strip out the leading zeros
return file_code.lstrip("0")
extract_file_code = staticmethod(extract_file_code)
# DEPRERECATED
def extract_file_path(file_path):
'''return file path without the unique id'''
p = re.compile(r'_(\w{%s})\.' % File.PADDING)
m = p.search(file_path)
if not m:
return file_path
groups = m.groups()
if not groups:
return file_path
else:
new_path = file_path.replace("_%s" % groups[0], "")
return new_path
extract_file_path = staticmethod(extract_file_path)
# DEPRERECATED
def has_file_code(file_path):
file_code = File.extract_file_code(file_path)
if file_code == 0:
return False
else:
return True
has_file_code = staticmethod(has_file_code)
"""
def get_extension(file_path):
'''get only the final extension'''
parts = os.path.basename(file_path).split(".")
ext = parts[len(parts)-1]
return ext
get_extension = staticmethod(get_extension)
def get_extensions(file_path):
'''get all of the extensions after the first .'''
parts = os.path.basename(file_path).split(".")
ext = parts[1:len(parts)]
return ext
get_extensions = staticmethod(get_extensions)
def get_by_snapshot(cls, snapshot, file_type=None):
xml = snapshot.get_xml_value("snapshot")
file_codes = xml.get_values("snapshot/file/@file_code")
search = Search( cls.SEARCH_TYPE)
search.add_filters("code", file_codes)
if file_type:
search.add_filter("type", file_type)
return search.get_sobjects()
get_by_snapshot = classmethod(get_by_snapshot)
def get_by_filename(cls, filename, skip_id=None, padding=0):
search = Search(cls.SEARCH_TYPE)
# if this is a file range then convert file name to padding
# FIXME: need some way to know what and where the padding is
if padding:
filename = re.sub("(.*\.)(\d+)", r"\1
search.add_filter("file_name", filename)
project_code = Project.get_project_code()
search.add_filter("project_code", project_code)
if skip_id:
search.add_where('id != %s'%skip_id)
return search.get_sobject()
get_by_filename = classmethod(get_by_filename)
def get_by_snapshots(cls, snapshots, file_type=None):
all_file_codes = []
for snapshot in snapshots:
xml = snapshot.get_xml_value("snapshot")
file_codes = xml.get_values("snapshot/file/@file_code")
all_file_codes.extend(file_codes)
search = Search( cls.SEARCH_TYPE)
search.add_filters("code", all_file_codes)
if file_type:
search.add_filter("type", file_type)
files = search.get_sobjects()
# cache these
for file in files:
key = "%s|%s" % (file.get_search_type(),file.get_code())
SObject.cache_sobject(key, file)
return files
get_by_snapshots = classmethod(get_by_snapshots)
# DEPRECATED
"""
def get_by_path(path):
file_code = File.extract_file_code(path)
if file_code == 0:
return None
search = Search(File.SEARCH_TYPE)
search.add_id_filter(file_code)
file = search.get_sobject()
return file
get_by_path = staticmethod(get_by_path)
"""
def get_by_path(path):
asset_dir = Environment.get_asset_dir()
path = path.replace("%s/" % asset_dir, "")
relative_dir = os.path.dirname(path)
file_name = os.path.basename(path)
# NOTE: this does not work with base_dir_alias
search = Search("sthpw/file")
search.add_filter("relative_dir", relative_dir)
search.add_filter("file_name", file_name)
sobject = search.get_sobject()
return sobject
get_by_path = staticmethod(get_by_path)
def create( file_path, search_type, search_id, file_type=None, requires_file=True, st_size=None, repo_type=None, search_code = None):
exists = os.path.exists(file_path)
isdir = os.path.isdir(file_path)
if requires_file and not os.path.exists(file_path):
raise FileException("File '%s' does not exist" % file_path)
file_name = os.path.basename(file_path)
file = File(File.SEARCH_TYPE)
file.set_value("file_name", file_name)
file.set_value("search_type", search_type)
if search_code:
file.set_value("search_code", search_code)
# MongoDb
if search_id and isinstance(search_id, int):
file.set_value("search_id", search_id)
if file_type:
file.set_value("type", file_type)
if isdir:
file.set_value("base_type", File.BASE_TYPE_DIR)
else:
file.set_value("base_type", File.BASE_TYPE_FILE)
project = Project.get()
file.set_value("project_code", project.get_code())
if exists:
if isdir:
dir_info = Common.get_dir_info(file_path)
size = dir_info.get("size")
file.set_value("st_size", size)
else:
from stat import ST_SIZE
size = os.stat(file_path)[ST_SIZE]
file.set_value("st_size", size)
elif st_size != None:
file.set_value("st_size", st_size)
if repo_type:
file.set_value("repo_type", repo_type)
file.commit()
return file
create = staticmethod(create)
def makedirs(dir, mode=None):
'''wrapper to mkdirs in case it ever needs to be overridden'''
print "DEPRECATED: use System().makedirs()"
return System().makedirs(dir,mode)
makedirs = staticmethod(makedirs)
def get_filesystem_name(name, strict=True):
'''takes a name and converts it to a name that can be saved in
the filesystem.'''
filename = name
filename = filename.replace("/", "__")
filename = filename.replace("|", "__")
filename = filename.replace(":", "__")
filename = filename.replace("?", "__")
filename = filename.replace("=", "__")
if strict:
filename = filename.replace(" ", "_")
filename_base, ext = os.path.splitext(filename)
ext = string.lower(ext)
filename = "%s%s" % (filename_base, ext)
return filename
get_filesystem_name = staticmethod(get_filesystem_name)
def process_file_path(file_path):
'''makes a file path completely kosher with the file system. Only do it on basename or it would remove the : from C:/'''
return Common.get_filesystem_name(file_path)
process_file_path = staticmethod(process_file_path)
def get_md5(path):
'''get md5 checksum'''
py_exec = Config.get_value("services", "python")
if not py_exec:
py_exec = "python"
if isinstance(path, unicode):
path = path.encode('utf-8')
popen = subprocess.Popen([py_exec, '%s/src/bin/get_md5.py'%Environment.get_install_dir(), path], shell=False, stdout=subprocess.PIPE)
popen.wait()
output = ''
value = popen.communicate()
if value:
output = value[0].strip()
if not output:
err = value[1]
print err
return output
get_md5 = staticmethod(get_md5)
def is_file_group(file_path):
'''returns True if it is a file group'''
return not (file_path.find('#') == -1 and file_path.find('%') == -1)
is_file_group = staticmethod(is_file_group)
class FileAccess(SObject):
SEARCH_TYPE = "sthpw/file_access"
def create(file):
file_code = file.get_code()
file_access = FileAccess(FileAccess.SEARCH_TYPE)
file_access.set_value("file_code", file_code)
security = WebContainer.get_security()
user = security.get_user_name()
file_access.set_value("login", user)
file_access.commit()
return file_access
create = staticmethod(create)
class IconCreator(object):
'''Utility class that creates icons of an image or document in the
same directory as the image'''
def __init__(my, file_path):
my.file_path = file_path
# check if it exists
if not os.path.exists( file_path ):
raise FileException( \
"Error: file [%s] does not exist" % my.file_path )
my.tmp_dir = os.path.dirname(file_path)
my.icon_path = None
my.web_path = None
my.texture_mode = False
my.icon_mode = False
def set_texture_mode(my):
'''texture mode down res is 1/4 size'''
my.texture_mode = True
def set_icon_mode(my):
'''icon mode down res is 1/4 size'''
my.icon_mode = True
def get_icon_path(my):
return my.icon_path
def get_web_path(my):
return my.web_path
def create_icons(my):
my.execute()
def execute(my):
# check file name
file_name = os.path.basename(my.file_path)
ext = File.get_extension(file_name)
type = string.lower(ext)
if type == "pdf":
my._process_pdf( file_name )
elif type in File.NORMAL_EXT:
# skip icon generation for normal or video files
pass
elif type in File.VIDEO_EXT:
try:
my._process_video( file_name )
except IOError, e:
'''This is an unknown file type. Do nothing and except as a
file'''
print "WARNING: ", e.__str__()
Environment.add_warning("Unknown file type", e.__str__())
else:
# assume it is an image
try:
my._process_image( file_name )
except IOError, e:
'''This is an unknown file type. Do nothing and except as a
file'''
print "WARNING: ", e.__str__()
Environment.add_warning("Unknown file type", e.__str__())
def _process_pdf(my, file_name):
base, ext = os.path.splitext(file_name)
icon_file_name = base + "_icon.png"
tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name)
if sys.platform == 'darwin':
return
else:
if not Common.which("convert"):
return
try:
my.file_path = my.file_path.encode('utf-8')
import shlex, subprocess
subprocess.call(['convert', '-geometry','80','-raise','2x2','%s[0]'%my.file_path,\
"%s"%tmp_icon_path])
except Exception, e:
print "Error extracting from pdf [%s]" % e
return
# check that it actually got created
if os.path.exists(tmp_icon_path):
my.icon_path = tmp_icon_path
else:
print "Warning: [%s] did not get created from pdf" % tmp_icon_path
def get_web_file_size(my):
from pyasm.prod.biz import ProdSetting
web_file_size = ProdSetting.get_value_by_key('web_file_size')
thumb_size = (640, 480)
if web_file_size:
parts = re.split('[\Wx]+', web_file_size)
thumb_size = (640, 480)
if len(parts) == 2:
try:
thumb_size = (int(parts[0]), int(parts[1]))
except ValueError:
thumb_size = (640, 480)
return thumb_size
def _process_video(my, file_name):
ffmpeg = Common.which("ffmpeg")
if not ffmpeg:
return
thumb_web_size = my.get_web_file_size()
thumb_icon_size = (120, 100)
exts = File.get_extensions(file_name)
base, ext = os.path.splitext(file_name)
icon_file_name = "%s_icon.png" % base
web_file_name = "%s_web.jpg" % base
tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name)
tmp_web_path = "%s/%s" % (my.tmp_dir, web_file_name)
#cmd = '''"%s" -i "%s" -r 1 -ss 00:00:01 -t 1 -s %sx%s -vframes 1 "%s"''' % (ffmpeg, my.file_path, thumb_web_size[0], thumb_web_size[1], tmp_web_path)
#os.system(cmd)
import subprocess
try:
subprocess.call([ffmpeg, '-i', my.file_path, "-y", "-ss", "00:00:01","-t","1",\
"-s","%sx%s"%(thumb_web_size[0], thumb_web_size[1]),"-vframes","1","-f","image2", tmp_web_path])
if os.path.exists(tmp_web_path):
my.web_path = tmp_web_path
else:
my.web_path = None
except Exception, e:
Environment.add_warning("Could not process file", \
"%s - %s" % (my.file_path, e.__str__()))
pass
try:
subprocess.call([ffmpeg, '-i', my.file_path, "-y", "-ss", "00:00:01","-t","1",\
"-s","%sx%s"%(thumb_icon_size[0], thumb_icon_size[1]),"-vframes","1","-f","image2", tmp_icon_path])
if os.path.exists(tmp_icon_path):
my.icon_path = tmp_icon_path
else:
my.icon_path = None
except Exception, e:
Environment.add_warning("Could not process file", \
"%s - %s" % (my.file_path, e.__str__()))
pass
def _process_image(my, file_name):
base, ext = os.path.splitext(file_name)
# get all of the extensions
exts = File.get_extensions(file_name)
frame = 0
if len(exts) == 2:
try:
frame = int(exts[0])
base = base.replace(".%s" % exts[0], '' )
except ValueError:
frame = 0
if frame:
icon_file_name = "%s_icon.%s.png" % (base, exts[0])
web_file_name = "%s_web.%s.jpg" % (base, exts[0])
else:
icon_file_name = "%s_icon.png" % base
web_file_name = "%s_web.jpg" % base
tmp_icon_path = "%s/%s" % (my.tmp_dir, icon_file_name)
tmp_web_path = "%s/%s" % (my.tmp_dir, web_file_name)
# create the web image
try:
if my.texture_mode:
my._resize_texture(my.file_path, tmp_web_path, 0.5)
my.web_path = tmp_web_path
# create the icon
thumb_size = (120,100)
try:
my._resize_image(tmp_web_path, tmp_icon_path, thumb_size)
except TacticException:
my.icon_path = None
else:
my.icon_path = tmp_icon_path
elif my.icon_mode: # just icon, no web
# create the icon only
thumb_size = (120,100)
try:
my._resize_image(my.file_path, tmp_icon_path, thumb_size)
except TacticException:
my.icon_path = None
else:
my.icon_path = tmp_icon_path
else:
thumb_size = my.get_web_file_size()
try:
my._resize_image(my.file_path, tmp_web_path, thumb_size)
except TacticException:
my.web_path = None
else:
my.web_path = tmp_web_path
# create the icon
thumb_size = (120,100)
try:
my._resize_image(tmp_web_path, tmp_icon_path, thumb_size)
except TacticException:
my.icon_path = None
else:
my.icon_path = tmp_icon_path
# check icon file size, reset to none if it is empty
# TODO: use finally in Python 2.5
if my.web_path:
web_path_size = os.stat(my.web_path)[stat.ST_SIZE]
if not web_path_size:
my.web_path = None
if my.icon_path:
icon_path_size = os.stat(my.icon_path)[stat.ST_SIZE]
if not icon_path_size:
my.icon_path = None
except IOError, e:
Environment.add_warning("Could not process file", \
"%s - %s" % (my.file_path, e.__str__()))
my.web_path = None
my.icon_path = None
def _extract_frame(my, large_path, small_path, thumb_size):
pass
def _resize_image(my, large_path, small_path, thumb_size):
try:
large_path = large_path.encode('utf-8')
small_path = small_path.encode('utf-8')
if HAS_IMAGE_MAGICK:
# generate imagemagick command
convert_cmd = []
convert_cmd.append(convert_exe)
# png's and psd's can have multiple layers which need to be flattened to make an accurate thumbnail
if large_path.lower().endswith('png'):
convert_cmd.append('-flatten')
if large_path.lower().endswith('psd'):
large_path += "[0]"
convert_cmd.extend(['-resize','%sx%s'%(thumb_size[0], thumb_size[1])])
# FIXME: needs PIL for this ... should use ImageMagick to find image size
if HAS_PIL:
try:
im = Image.open(large_path)
x,y = im.size
except Exception, e:
print "WARNING: ", e
x = 0
y = 0
if x < y:
# icons become awkward if height is bigger than width
# add white background for more reasonable icons
convert_cmd.extend(['-background','white'])
convert_cmd.extend(['-gravity','center'])
convert_cmd.extend(['-extent','%sx%s'%(thumb_size[0], thumb_size[1])])
convert_cmd.append('%s'%(large_path))
convert_cmd.append('%s'%(small_path))
subprocess.call(convert_cmd)
# if we don't have ImageMagick, use PIL, if installed (in non-mac os systems)
elif HAS_PIL:
# use PIL
# create the thumbnail
im = Image.open(large_path)
try:
im.seek(1)
except EOFError:
is_animated = False
else:
is_animated = True
im.seek(0)
im = im.convert('RGB')
x,y = im.size
to_ext = "PNG"
if small_path.lower().endswith('jpg') or small_path.lower().endswith('jpeg'):
to_ext = "JPEG"
if x >= y:
im.thumbnail( (thumb_size[0],10000), Image.ANTIALIAS )
im.save(small_path, to_ext)
else:
#im.thumbnail( (10000,thumb_size[1]), Image.ANTIALIAS )
x,y = im.size
# first resize to match this thumb_size
base_height = thumb_size[1]
h_percent = (base_height/float(y))
base_width = int((float(x) * float(h_percent)))
im = im.resize((base_width, base_height), Image.ANTIALIAS )
# then paste to white image
im2 = Image.new( "RGB", thumb_size, (255,255,255) )
offset = (thumb_size[0]/2) - (im.size[0]/2)
im2.paste(im, (offset,0) )
im2.save(small_path, to_ext)
# if neither IM nor PIL is installed, check if this is a mac system and use sips if so
elif sys.platform == 'darwin':
convert_cmd = ['sips', '--resampleWidth', '%s'%thumb_size[0], '--out', small_path, large_path]
subprocess.call(convert_cmd)
else:
raise TacticException('No image manipulation tool installed')
except Exception, e:
print "Error: ", e
# after these operations, confirm that the icon has been generated
if not os.path.exists(small_path):
raise TacticException('Icon generation failed')
def _resize_texture(my, large_path, small_path, scale):
# create the thumbnail
try:
im = Image.open(large_path)
x,y = im.size
resize = int( float(x) * scale )
im.thumbnail( (resize,10000), Image.ANTIALIAS )
im.save(small_path, "PNG")
except:
if sys.platform == 'darwin':
cmd = "sips --resampleWidth 25%% --out %s %s" \
% (large_path, small_path)
else:
cmd = "convert -resize 25%% %s %s" \
% (large_path, small_path)
os.system(cmd)
if not os.path.exists(small_path):
raise
def add_icons(file_paths):
new_file_paths=[]
new_file_types=[]
for file_path in file_paths:
# create icons and add to the list
creator = IconCreator(file_path)
creator.create_icons()
icon_path = creator.get_icon_path()
new_file_paths.append(icon_path)
new_file_types.append("icon")
web_path = creator.get_web_path()
new_file_paths.append(web_path)
new_file_types.append("web")
return new_file_paths, new_file_types
add_icons = staticmethod(add_icons)
class FileGroup(File):
def check_paths(file_path, file_range):
''' check existence of files. this expects a FileRange object'''
expanded = FileGroup.expand_paths(file_path, file_range)
for expand in expanded:
if not System().exists(expand):
raise FileException("File '%s' does not exist!" % expand)
return expanded
check_paths = staticmethod(check_paths)
def create( file_path, file_range, search_type, search_id, file_type=None ):
expanded = FileGroup.check_paths(file_path, file_range)
file_name = os.path.basename(file_path)
file = File(File.SEARCH_TYPE)
file.set_value("file_name", file_name)
file.set_value("search_type", search_type)
file.set_value("search_id", search_id)
from stat import ST_SIZE
total = 0
for expanded in expanded:
size = os.stat(expanded)[ST_SIZE]
total += size
project = Project.get()
file.set_value("project_code", project.get_code())
file.set_value("st_size", total)
file.set_value("file_range", file_range.get_key())
if file_type:
file.set_value("type", file_type)
file.set_value("base_type", File.BASE_TYPE_SEQ)
file.commit()
return file
create = staticmethod(create)
def expand_paths( file_path, file_range ):
'''expands the file paths, replacing # as specified in the file_range object'''
file_paths = []
# frame_by is not really used here yet
frame_start, frame_end, frame_by = file_range.get_values()
# support %0.4d notation
if file_path.find('
for i in range(frame_start, frame_end+1, frame_by):
expanded = file_path % i
file_paths.append( expanded )
else:
# find out the number of #'s in the path
padding = len( file_path[file_path.index('#'):file_path.rindex('#')] )+1
for i in range(frame_start, frame_end+1, frame_by):
expanded = file_path.replace( '#'*padding, str(i).zfill(padding) )
file_paths.append(expanded)
return file_paths
expand_paths = staticmethod(expand_paths)
def <API key>(cls, paths):
frame = None
# do we extract a range?
padding = 0
for i in range(12,0,-1):
p = re.compile("(\d{%d,})" % i)
path = paths[0].replace("\\", "/")
basename = os.path.basename(path)
dirname = os.path.dirname(path)
m = p.search(basename)
if m:
frame = m.groups()[0]
padding = len(frame)
break
if not frame:
padding = 4
frame = 'x'*padding
template = basename.replace(frame, '#'*padding)
frange = []
last_frame = None
p = re.compile("(\d{%s})" % padding)
for path in paths:
path = path.replace("\\", "/")
basename = os.path.basename(path)
m = p.search(basename)
if m:
frame = int(m.groups()[0])
else:
frame = 0
# the first one is always added
if last_frame == None:
frange.append(frame)
frange.append('-')
frange.append(frame)
last_frame = frame
continue
# the next ones are not
diff = frame - last_frame
if diff == 1:
frange[-1] = frame
else:
frange.append(frame)
frange.append('-')
frange.append(frame)
last_frame = frame
template = "%s/%s" % (dirname,template)
frange = "".join([str(x) for x in frange])
return template, frange
<API key> = classmethod(<API key>)
class FileRange(object):
def __init__(my, frame_start=1, frame_end=1, frame_by=1):
my.frame_start = frame_start
my.frame_end = frame_end
my.frame_by = frame_by
assert(isinstance(frame_start, (int)))
assert(isinstance(frame_end, (int)))
assert(isinstance(frame_by, (int)))
def get_frame_by(my):
return my.frame_by
def get_frame_start(my):
return my.frame_start
def get_frame_end(my):
return my.frame_end
def set_frame_by(my, frame_by):
assert(isinstance(frame_by, (int)))
my.frame_by = frame_by
def set_duration(my, duration):
my.frame_start = 1
my.frame_end = duration
def get_num_frames(my):
return (my.frame_end - my.frame_start + 1) / my.frame_by
def get_key(my):
return "%s-%s/%s" % (my.frame_start, my.frame_end, my.frame_by)
def get_display(my):
if my.frame_by == 1:
return "%s-%s" % (my.frame_start, my.frame_end)
else:
return my.get_key()
def get_values(my):
return (my.frame_start, my.frame_end, my.frame_by)
# static method
def get(file_range):
''' build a FileRange obj from a string'''
frame_by = 1
if file_range.find("/") != -1:
file_range, frame_by = file_range.split("/")
tmps = file_range.split("-")
if len(tmps) > 2:
raise FileException("Unable to determine file_range [%s]" %file_range)
frame_start, frame_end = tmps[0], tmps[1]
frame_start = int(frame_start)
frame_end = int(frame_end)
frame_by = int(frame_by)
return FileRange(frame_start, frame_end, frame_by)
get = staticmethod(get) |
package ch.elexis.core.model;
import java.util.ResourceBundle;
import ch.elexis.core.interfaces.ILocalizedEnum;
import ch.elexis.core.interfaces.INumericEnum;
public enum InvoiceState implements INumericEnum, ILocalizedEnum {
//@formatter:off
UNKNOWN(0),
BILLED(1),
NOT_BILLED(2),
ONGOING(3),
OPEN(4),
OPEN_AND_PRINTED(5),
DEMAND_NOTE_1(6),
<API key>(7),
DEMAND_NOTE_2(8),
<API key>(9),
DEMAND_NOTE_3(10),
<API key>(11),
IN_EXECUTION(12),
PARTIAL_LOSS(13),
TOTAL_LOSS(14),
PARTIAL_PAYMENT(15),
PAID(16),
EXCESSIVE_PAYMENT(17),
CANCELLED(18),
FROM_TODAY(19),
NOT_FROM_TODAY(20),
NOT_FROM_YOU(21),
DEFECTIVE(22),
TO_PRINT(23),
OWING(24),
<API key>(25),
DEPRECIATED(26), // (Abgeschrieben) Storniert und Kons nicht mehr freigegeben
REJECTED(27);
//@formatter:on
private int state;
public static enum REJECTCODE {
<API key>, NO_DIAG, NO_MANDATOR, NO_CASE, NO_DEBITOR,
NO_GUARANTOR, VALIDATION_ERROR, REJECTED_BY_PEER, SUM_MISMATCH, INTERNAL_ERROR;
};
private InvoiceState(int state){
this.state = state;
}
public int getState(){
return state;
}
@Override
public int numericValue(){
return state;
}
/**
* @return the combined (or) states that represent the owing state
*/
public static InvoiceState[] owingStates(){
return new InvoiceState[] {
OPEN_AND_PRINTED, <API key>, <API key>, <API key>
};
}
/**
*
* @return the combined (or) states that represent the to print state
*/
public static InvoiceState[] toPrintStates() {
return new InvoiceState[] {
OPEN, DEMAND_NOTE_1, DEMAND_NOTE_2, DEMAND_NOTE_3
};
}
/**
* Decide whether this state means an "active" state, i.e. the bill is not paid or closed by any
* means
*
* @return true if there are still payments awaited
*/
public boolean isActive(){
if (state > ONGOING.getState() && state < PARTIAL_LOSS.getState()) {
return true;
}
if (state == PARTIAL_PAYMENT.getState()) {
return true;
}
if (state > DEFECTIVE.getState() && state < DEPRECIATED.getState()) {
return true;
}
return false;
}
public static InvoiceState fromState(int value){
for (InvoiceState is : InvoiceState.values()) {
if (value == is.getState()) {
return is;
}
}
return InvoiceState.UNKNOWN;
}
@Override
public String getLocaleText(){
try {
return ResourceBundle.getBundle("ch.elexis.core.model.messages")
.getString(InvoiceState.class.getSimpleName() + "." + this.name());
} catch (Exception e) {
return this.name();
}
}
} |
var person = {
firstName: "John",
lastName : "Doe",
id : 5566
};
document.getElementById("demo").innerHTML =
person["firstName"] + " " + person["lastName"]; |
package com.ibm.ws.security.authentication.internal.cache;
/**
* Key-value authentication cache interface.
*/
public interface AuthCache {
/**
* Clear all entries in the cache.
*/
public void clearAllEntries();
/**
* Get the value for the specified key from the cache.
*
* @param key The key to look for the value for.
* @return The value mapped to the specified key, or null if no mapping exists.
*/
public Object get(Object key);
/**
* Remove the value for the specified from the cache.
*
* @param key The key to remove the value for.
*/
public void remove(Object key);
/**
* Insert the value for the specified key into the cache.
*
* @param key The key to add the value for.
* @param value The value to map to the specified key.
*/
public void insert(Object key, Object value);
/**
* Stop the eviction task, if any.
*/
public void stopEvictionTask();
} |
package hu.elte.txtuml.export.uml2;
import org.eclipse.uml2.uml.<API key>;
import org.eclipse.uml2.uml.CallOperationAction;
import org.eclipse.uml2.uml.ConditionalNode;
import org.eclipse.uml2.uml.ExpansionRegion;
import org.eclipse.uml2.uml.LoopNode;
import org.eclipse.uml2.uml.Model;
import org.eclipse.uml2.uml.ReadVariableAction;
import org.eclipse.uml2.uml.SequenceNode;
import org.eclipse.uml2.uml.<API key>;
import org.junit.BeforeClass;
import org.junit.Test;
public class <API key> extends UMLExportTestBase {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
<API key>.initialize();
}
@Test
public void testDoWhileLoop() throws Exception {
Model model = model("hu.elte.txtuml.export.uml2.tests.models.do_while_control");
SequenceNode body = loadActionCode(model, "TestClass", "test");
LoopNode loopNode = (LoopNode) node(body, 0, "do { ... } while (x>0)", LoopNode.class);
SequenceNode bodyNode = loopBody(loopNode, 0, null, SequenceNode.class);
node(bodyNode, 0, "x=--x;", SequenceNode.class);
loopCond(loopNode, "x>0", CallOperationAction.class);
}
@Test
public void testForLoop() throws Exception {
Model model = model("hu.elte.txtuml.export.uml2.tests.models.for_control");
SequenceNode body = loadActionCode(model, "TestClass", "test");
LoopNode loopNode = node(body, 0, "for (i<limit) { ... }", LoopNode.class);
SequenceNode bodyNode = loopBody(loopNode, 0, null, SequenceNode.class);
node(bodyNode, 0, "this.sum=this.sum+i;", SequenceNode.class);
SequenceNode increment = loopBody(loopNode, 1, "update", SequenceNode.class);
node(increment, 0, "i=++i", SequenceNode.class);
loopCond(loopNode, "i<limit", SequenceNode.class);
SequenceNode setup = loopSetup(loopNode, "setup", SequenceNode.class);
node(setup, 0, "0", <API key>.class);
node(setup, 1, "i=0", <API key>.class);
}
@Test
public void testForEachLoop() throws Exception {
Model model = model("hu.elte.txtuml.export.uml2.tests.models.foreach_control");
SequenceNode body = loadActionCode(model, "TestClass", "test");
ExpansionRegion exp = node(body, 0, "foreach (coll)", ExpansionRegion.class);
inputElement(exp, 0, "coll_expansion");
node(exp, 0, "coll", ReadVariableAction.class);
SequenceNode inner = node(exp, 1, null, SequenceNode.class);
node(inner, 0, "this.sum=this.sum+i;", SequenceNode.class);
}
@Test
public void testIf() throws Exception {
Model model = model("hu.elte.txtuml.export.uml2.tests.models.if_control");
SequenceNode body = loadActionCode(model, "TestClass", "testIf");
SequenceNode ifNode = node(body, 0, "if (test)", SequenceNode.class);
node(ifNode, 0, "test", ReadVariableAction.class);
node(ifNode, 1, "#if_cond=test", <API key>.class);
ConditionalNode condNode = node(ifNode, 2, null, ConditionalNode.class);
clauseTest(condNode, 0, "#if_cond", ReadVariableAction.class);
SequenceNode clauseBody = clauseBody(condNode, 0, null, SequenceNode.class);
node(clauseBody, 0, "Action.log(\"then\");", SequenceNode.class);
}
@Test
public void testIfElse() throws Exception {
Model model = model("hu.elte.txtuml.export.uml2.tests.models.if_control");
SequenceNode body = loadActionCode(model, "TestClass", "testIfElse");
SequenceNode ifNode = node(body, 0, "if (test)", SequenceNode.class);
node(ifNode, 0, "test", ReadVariableAction.class);
node(ifNode, 1, "#if_cond=test", <API key>.class);
ConditionalNode condNode = node(ifNode, 2, null, ConditionalNode.class);
clauseTest(condNode, 0, "#if_cond", ReadVariableAction.class);
SequenceNode clauseBody = clauseBody(condNode, 0, null, SequenceNode.class);
node(clauseBody, 0, "Action.log(\"then\");", SequenceNode.class);
clauseTest(condNode, 1, "!#if_cond", CallOperationAction.class);
SequenceNode elseBody = clauseBody(condNode, 1, null, SequenceNode.class);
node(elseBody, 0, "Action.log(\"else\");", SequenceNode.class);
}
@Test
public void testInlineIf() throws Exception {
Model model = model("hu.elte.txtuml.export.uml2.tests.models.if_control");
SequenceNode body = loadActionCode(model, "TestClass", "testInlineIf");
SequenceNode ifNode = node(body, 0, "if (test)", SequenceNode.class);
node(ifNode, 0, "test", ReadVariableAction.class);
node(ifNode, 1, "#if_cond=test", <API key>.class);
ConditionalNode condNode = node(ifNode, 2, null, ConditionalNode.class);
clauseTest(condNode, 0, "#if_cond", ReadVariableAction.class);
clauseBody(condNode, 0, "Action.log(\"then\");", SequenceNode.class);
}
@Test
public void testInlineIfElse() throws Exception {
Model model = model("hu.elte.txtuml.export.uml2.tests.models.if_control");
SequenceNode body = loadActionCode(model, "TestClass", "testInlineIfElse");
SequenceNode ifNode = node(body, 0, "if (test)", SequenceNode.class);
node(ifNode, 0, "test", ReadVariableAction.class);
node(ifNode, 1, "#if_cond=test", <API key>.class);
ConditionalNode condNode = node(ifNode, 2, null, ConditionalNode.class);
clauseTest(condNode, 0, "#if_cond", ReadVariableAction.class);
clauseBody(condNode, 0, "Action.log(\"then\");", SequenceNode.class);
clauseTest(condNode, 1, "!#if_cond", CallOperationAction.class);
clauseBody(condNode, 1, "Action.log(\"else\");", SequenceNode.class);
}
@Test
public void testWhileLoop() throws Exception {
Model model = model("hu.elte.txtuml.export.uml2.tests.models.while_control");
SequenceNode body = loadActionCode(model, "TestClass", "test");
LoopNode loopNode = node(body, 0, "while (i>0) { ... }", LoopNode.class);
SequenceNode bodyNode = loopBody(loopNode, 0, null, SequenceNode.class);
node(bodyNode, 0, "i=--i;", SequenceNode.class);
loopCond(loopNode, "i>0", CallOperationAction.class);
}
} |
package org.eclipse.persistence.testing.tests.wdf.jpa1.simple;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Arrays;
import java.util.Date;
import java.util.GregorianCalendar;
import javax.persistence.EntityManager;
import javax.persistence.<API key>;
import org.eclipse.persistence.testing.framework.wdf.Bugzilla;
import org.eclipse.persistence.testing.framework.wdf.JPAEnvironment;
import org.eclipse.persistence.testing.framework.wdf.Skip;
import org.eclipse.persistence.testing.models.wdf.jpa1.types.<API key>;
import org.eclipse.persistence.testing.models.wdf.jpa1.types.UserDefinedEnum;
import org.eclipse.persistence.testing.models.wdf.jpa1.types.<API key>;
import org.eclipse.persistence.testing.tests.wdf.jpa1.JPA1Base;
import org.junit.Test;
public class TestBasicFieldTypes extends JPA1Base {
@Test
public void testInsert() {
JPAEnvironment env = getEnvironment();
EntityManager em = env.getEntityManager();
try {
<API key> obj = new <API key>(0);
obj.fill();
env.beginTransaction(em);
em.persist(obj);
env.<API key>(em);
verify(true, "no Exception");
obj = em.find(<API key>.class, new Integer(0));
} finally {
closeEntityManager(em);
}
}
private void validatePrimitive(final int id, Validator validator, String fieldName) {
JPAEnvironment env = getEnvironment();
EntityManager em = env.getEntityManager();
try {
// insert the object
env.beginTransaction(em);
<API key> obj = new <API key>(id);
validator.set(obj);
em.persist(obj);
env.<API key>(em);
verify(true, "no Exception");
obj = em.find(<API key>.class, new Integer(id));
verify(!validator.isChanged(obj), fieldName + " not persisted");
// update unchanged object
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
env.<API key>(em);
verify(!obj.postUpdateWasCalled(), "postUpdate was called");
obj = em.find(<API key>.class, new Integer(id));
verify(!validator.isChanged(obj), fieldName + " is changed");
// update changed object
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
validator.change(obj);
env.<API key>(em);
verify(obj.postUpdateWasCalled(), "postUpdate was not called");
obj = em.find(<API key>.class, new Integer(id));
verify(validator.isChanged(obj), fieldName + " is unchanged");
} finally {
closeEntityManager(em);
}
}
private void validateReference(final int id, ReferenceValidator validator, String fieldName) {
JPAEnvironment env = getEnvironment();
EntityManager em = env.getEntityManager();
try {
<API key> obj = new <API key>(id);
// insert object with null-field
env.beginTransaction(em);
em.persist(obj);
env.<API key>(em);
obj = em.find(<API key>.class, new Integer(id));
verify(validator.isNull(obj), fieldName + " is not null");
// delete the object again
env.beginTransaction(em);
em.remove(em.find(<API key>.class, new Integer(id)));
env.<API key>(em);
// insert object with non-null field
env.beginTransaction(em);
validator.set(obj);
em.persist(obj);
env.<API key>(em);
verify(true, "no Exception");
obj = em.find(<API key>.class, new Integer(id));
verify(!validator.isChanged(obj), fieldName + " not persisted");
// update unchanged
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
env.<API key>(em);
verify(!obj.postUpdateWasCalled(), "postUpdate was called");
obj = em.find(<API key>.class, new Integer(id));
verify(!validator.isChanged(obj), fieldName + " is changed");
// update changed object
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
validator.change(obj);
env.<API key>(em);
verify(obj.postUpdateWasCalled(), "postUpdate was not called");
obj = em.find(<API key>.class, new Integer(id));
verify(validator.isChanged(obj), fieldName + " is unchanged");
// update to null
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
validator.setNull(obj);
env.<API key>(em);
verify(obj.postUpdateWasCalled(), "postUpdate was not called");
obj = em.find(<API key>.class, new Integer(id));
verify(validator.isNull(obj), fieldName + " is not null");
} finally {
closeEntityManager(em);
}
}
private void validateMutable(final int id, MutableValidator validator, String fieldName) {
JPAEnvironment env = getEnvironment();
EntityManager em = env.getEntityManager();
try {
<API key> obj = new <API key>(id);
// insert object with null-field
env.beginTransaction(em);
em.persist(obj);
env.<API key>(em);
obj = em.find(<API key>.class, new Integer(id));
verify(validator.isNull(obj), fieldName + " is not null");
// delete the object again
env.beginTransaction(em);
em.remove(em.find(<API key>.class, new Integer(id)));
env.<API key>(em);
// insert object with non-null field
env.beginTransaction(em);
validator.set(obj);
em.persist(obj);
env.<API key>(em);
verify(true, "no Exception");
obj = em.find(<API key>.class, new Integer(id));
verify(!validator.isChanged(obj), fieldName + " not persisted");
// update unchanged
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
env.<API key>(em);
verify(!obj.postUpdateWasCalled(), "postUpdate was not called");
obj = em.find(<API key>.class, new Integer(id));
verify(!validator.isChanged(obj), fieldName + " is changed");
// update changed
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
validator.setNull(obj);
env.<API key>(em);
verify(obj.postUpdateWasCalled(), "postUpdate was not called");
obj = em.find(<API key>.class, new Integer(id));
verify(validator.isNull(obj), fieldName + " is not null");
// update original
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
validator.set(obj);
env.<API key>(em);
obj = em.find(<API key>.class, new Integer(id));
verify(!validator.isChanged(obj), fieldName + " not persisted");
// mutate
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
validator.mutate(obj);
env.<API key>(em);
verify(obj.postUpdateWasCalled(), "postUpdate was not called");
obj = em.find(<API key>.class, new Integer(id));
verify(validator.isChanged(obj), fieldName + " not mutated");
// update to null
env.beginTransaction(em);
obj = em.find(<API key>.class, new Integer(id));
obj.clearPostUpdate();
validator.setNull(obj);
env.<API key>(em);
verify(obj.postUpdateWasCalled(), "postUpdate was not called");
obj = em.find(<API key>.class, new Integer(id));
verify(validator.isNull(obj), fieldName + " is not null");
} finally {
closeEntityManager(em);
}
}
// primitive types
@Test
public void <API key>() {
Validator validator = new Validator() {
public void set(<API key> obj) {
obj.setPrimitiveBoolean(true);
}
public void change(<API key> obj) {
obj.setPrimitiveBoolean(false);
}
public boolean isChanged(<API key> obj) {
return obj.isPrimitiveBoolean() != true;
}
};
validatePrimitive(1, validator, "primitiveBoolean");
}
@Test
public void testPrimitiveByte() {
Validator validator = new Validator() {
public void set(<API key> obj) {
obj.setPrimititveByte((byte) 17);
}
public void change(<API key> obj) {
obj.setPrimititveByte((byte) 23);
}
public boolean isChanged(<API key> obj) {
return obj.getPrimititveByte() != 17;
}
};
validatePrimitive(2, validator, "primitiveByte");
}
@Test
public void testPrimitiveChar() {
Validator validator = new Validator() {
public void set(<API key> obj) {
obj.setPrimitiveChar('A');
}
public void change(<API key> obj) {
obj.setPrimitiveChar('B');
}
public boolean isChanged(<API key> obj) {
return obj.getPrimitiveChar() != 'A';
}
};
validatePrimitive(3, validator, "primitiveChar");
}
@Test
public void testPrimitiveShort() {
Validator validator = new Validator() {
public void set(<API key> obj) {
obj.setPrimitiveShort((short) 19);
}
public void change(<API key> obj) {
obj.setPrimitiveShort((short) 45);
}
public boolean isChanged(<API key> obj) {
return obj.getPrimitiveShort() != 19;
}
};
validatePrimitive(4, validator, "primitiveShort");
}
@Test
public void testPrimitiveInt() {
Validator validator = new Validator() {
public void set(<API key> obj) {
obj.setPrimitiveInt(88);
}
public void change(<API key> obj) {
obj.setPrimitiveInt(77);
}
public boolean isChanged(<API key> obj) {
return obj.getPrimitiveInt() != 88;
}
};
validatePrimitive(5, validator, "primitiveInt");
}
@Test
public void testPrimitiveLong() {
Validator validator = new Validator() {
public void set(<API key> obj) {
obj.setPrimitiveLong(88);
}
public void change(<API key> obj) {
obj.setPrimitiveLong(77);
}
public boolean isChanged(<API key> obj) {
return obj.getPrimitiveLong() != 88;
}
};
validatePrimitive(6, validator, "primitiveLong");
}
@Test
public void testPrimitiveFloat() {
Validator validator = new Validator() {
public void set(<API key> obj) {
obj.setPrimitiveFloat((float) 88.5);
}
public void change(<API key> obj) {
obj.setPrimitiveFloat((float) 77.5);
}
public boolean isChanged(<API key> obj) {
return obj.getPrimitiveFloat() != 88.5;
}
};
validatePrimitive(7, validator, "primitiveFloat");
}
@Test
public void testPrimitiveDouble() {
Validator validator = new Validator() {
public void set(<API key> obj) {
obj.setPrimitiveDouble(99.5);
}
public void change(<API key> obj) {
obj.setPrimitiveDouble(77.5);
}
public boolean isChanged(<API key> obj) {
return obj.getPrimitiveDouble() != 99.5;
}
};
validatePrimitive(8, validator, "primitiveDouble");
}
// wrappers of primitive types
@Test
public void testWrapperBoolean() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setWrapperBoolean(Boolean.TRUE);
}
public void change(<API key> obj) {
obj.setWrapperBoolean(Boolean.FALSE);
}
public void setNull(<API key> obj) {
obj.setWrapperBoolean(null);
}
public boolean isNull(<API key> obj) {
return obj.getWrapperBoolean() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getWrapperBoolean().equals(Boolean.TRUE);
}
};
validateReference(11, validator, "wrapperBoolean");
}
@Test
public void testWrapperByte() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setWrapperByte(new Byte((byte) 17));
}
public void change(<API key> obj) {
obj.setWrapperByte(new Byte((byte) 18));
}
public void setNull(<API key> obj) {
obj.setWrapperByte(null);
}
public boolean isNull(<API key> obj) {
return obj.getWrapperByte() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getWrapperByte().equals(new Byte((byte) 17));
}
};
validateReference(12, validator, "wrapperByte");
}
@Test
public void <API key>() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setWrapperCharacter(new Character('A'));
}
public void change(<API key> obj) {
obj.setWrapperCharacter(new Character('B'));
}
public void setNull(<API key> obj) {
obj.setWrapperCharacter(null);
}
public boolean isNull(<API key> obj) {
return obj.getWrapperCharacter() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getWrapperCharacter().equals(new Character('A'));
}
};
validateReference(13, validator, "wrapperCharacter");
}
@Test
public void testWrapperShort() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setWrapperShort(new Short((short) 1));
}
public void change(<API key> obj) {
obj.setWrapperShort(new Short((short) 2));
}
public void setNull(<API key> obj) {
obj.setWrapperShort(null);
}
public boolean isNull(<API key> obj) {
return obj.getWrapperShort() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getWrapperShort().equals(new Short((short) 1));
}
};
validateReference(14, validator, "wrapperShort");
}
@Test
public void testWrapperInteger() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setWrapperInteger(new Integer(1));
}
public void change(<API key> obj) {
obj.setWrapperInteger(new Integer(2));
}
public void setNull(<API key> obj) {
obj.setWrapperInteger(null);
}
public boolean isNull(<API key> obj) {
return obj.getWrapperInteger() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getWrapperInteger().equals(new Integer(1));
}
};
validateReference(15, validator, "wrapperInteger");
}
@Test
public void testWrapperLong() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setWrapperLong(new Long(1));
}
public void change(<API key> obj) {
obj.setWrapperLong(new Long(2));
}
public void setNull(<API key> obj) {
obj.setWrapperLong(null);
}
public boolean isNull(<API key> obj) {
return obj.getWrapperLong() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getWrapperLong().equals(new Long(1));
}
};
validateReference(16, validator, "wrapperLong");
}
@Test
public void testWrapperDouble() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setWrapperDouble(new Double(1));
}
public void change(<API key> obj) {
obj.setWrapperDouble(new Double(2));
}
public void setNull(<API key> obj) {
obj.setWrapperDouble(null);
}
public boolean isNull(<API key> obj) {
return obj.getWrapperDouble() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getWrapperDouble().equals(new Double(1));
}
};
validateReference(18, validator, "wrapperDouble");
}
@Test
public void testWrapperFloat() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setWrapperFloat(new Float(1));
}
public void change(<API key> obj) {
obj.setWrapperFloat(new Float(2));
}
public void setNull(<API key> obj) {
obj.setWrapperFloat(null);
}
public boolean isNull(<API key> obj) {
return obj.getWrapperFloat() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getWrapperFloat().equals(new Float(1));
}
};
validateReference(17, validator, "wrapperFloat");
}
// immutable reference types
@Test
public void testString2Varchar() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setString2Varchar("VC 1");
}
public void change(<API key> obj) {
obj.setString2Varchar("VC 2");
}
public void setNull(<API key> obj) {
obj.setString2Varchar(null);
}
public boolean isNull(<API key> obj) {
return obj.getString2Varchar() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getString2Varchar().equals("VC 1");
}
};
validateReference(21, validator, "string2Varchar");
}
@Test
public void testString2Clob() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setString2Clob("VC 1");
}
public void change(<API key> obj) {
obj.setString2Clob("VC 2");
}
public void setNull(<API key> obj) {
obj.setString2Clob(null);
}
public boolean isNull(<API key> obj) {
return obj.getString2Clob() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getString2Clob().equals("VC 1");
}
};
validateReference(22, validator, "string2Clob");
}
@Test
public void testBigDecimal() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setBigDecimal(new BigDecimal("1.1"));
}
public void change(<API key> obj) {
obj.setBigDecimal(new BigDecimal("2.2"));
}
public void setNull(<API key> obj) {
obj.setBigDecimal(null);
}
public boolean isNull(<API key> obj) {
return obj.getBigDecimal() == null;
}
public boolean isChanged(<API key> obj) {
return obj.getBigDecimal().compareTo(new BigDecimal("1.1")) != 0;
}
};
validateReference(23, validator, "bigDecimal");
}
@Test
public void testBigInteger() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setBigInteger(new BigInteger("11"));
}
public void change(<API key> obj) {
obj.setBigInteger(new BigInteger("22"));
}
public void setNull(<API key> obj) {
obj.setBigInteger(null);
}
public boolean isNull(<API key> obj) {
return obj.getBigInteger() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getBigInteger().equals(new BigInteger("11"));
}
};
validateReference(24, validator, "bigInteger");
}
// mutable types
@Test
public void testUtilDate() {
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.setUtilDate(new Date(1000));
}
public void change(<API key> obj) {
obj.setUtilDate(new Date(2000));
}
public void setNull(<API key> obj) {
obj.setUtilDate(null);
}
public boolean isNull(<API key> obj) {
return obj.getUtilDate() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getUtilDate().equals(new Date(1000));
}
public void mutate(<API key> obj) {
obj.getUtilDate().setTime(2000);
}
};
validateMutable(31, validator, "utilDate");
}
@Test
public void testUtilCalendar() {
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.setUtilCalendar(new GregorianCalendar(2005, 9, 8, 10, 49));
}
public void change(<API key> obj) {
obj.setUtilCalendar(new GregorianCalendar(2005, 9, 9, 10, 49));
}
public void setNull(<API key> obj) {
obj.setUtilCalendar(null);
}
public boolean isNull(<API key> obj) {
return obj.getUtilCalendar() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getUtilCalendar().equals(new GregorianCalendar(2005, 9, 8, 10, 49));
}
public void mutate(<API key> obj) {
obj.getUtilCalendar().set(2005, 9, 9);
}
};
validateMutable(32, validator, "utilCalendar");
}
@Test
public void testSqlDate() {
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.setSqlDate(java.sql.Date.valueOf("2005-09-08"));
}
public void change(<API key> obj) {
obj.setSqlDate(java.sql.Date.valueOf("2005-09-09"));
}
public void setNull(<API key> obj) {
obj.setSqlDate(null);
}
public boolean isNull(<API key> obj) {
return obj.getSqlDate() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getSqlDate().equals(java.sql.Date.valueOf("2005-09-08"));
}
public void mutate(<API key> obj) {
obj.getSqlDate().setTime(java.sql.Date.valueOf("2005-09-09").getTime());
}
};
validateMutable(33, validator, "sqlDate");
}
@Test
public void testSqlTime() {
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.setSqlTime(java.sql.Time.valueOf("10:49:00"));
}
public void change(<API key> obj) {
obj.setSqlTime(java.sql.Time.valueOf("11:49:00"));
}
public void setNull(<API key> obj) {
obj.setSqlTime(null);
}
public boolean isNull(<API key> obj) {
return obj.getSqlTime() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getSqlTime().equals(java.sql.Time.valueOf("10:49:00"));
}
public void mutate(<API key> obj) {
obj.getSqlTime().setTime(java.sql.Time.valueOf("11:49:00").getTime());
}
};
validateMutable(34, validator, "sqlTime");
}
@Test
public void testSqlTimestamp() {
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.setSqlTimestamp(new java.sql.Timestamp(1000));
}
public void change(<API key> obj) {
obj.setSqlTimestamp(new java.sql.Timestamp(2000));
}
public void setNull(<API key> obj) {
obj.setSqlTimestamp(null);
}
public boolean isNull(<API key> obj) {
return obj.getSqlTimestamp() == null;
}
public boolean isChanged(<API key> obj) {
return !obj.getSqlTimestamp().equals(new java.sql.Timestamp(1000));
}
public void mutate(<API key> obj) {
obj.getSqlTimestamp().setTime(2000);
}
};
validateMutable(35, validator, "sqlTimestamp");
}
// arrays
@Test
public void <API key>() {
final byte[] UNCHANGED = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new byte[] { 8, 1, 2, 3, 4, 5, 6, 7 });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = 8;
}
};
validateMutable(41, validator, "<API key>");
}
@Test
@Skip(databaseNames = "org.eclipse.persistence.platform.database.MaxDBPlatform")
public void <API key>() {
final byte[] UNCHANGED = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new byte[] { 8, 1, 2, 3, 4, 5, 6, 7 });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = 8;
}
};
validateMutable(42, validator, "<API key>");
}
@Test
public void <API key>() {
final byte[] UNCHANGED = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7 };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new byte[] { 8, 1, 2, 3, 4, 5, 6, 7 });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = 8;
}
};
validateMutable(43, validator, "<API key>");
}
@Test
public void <API key>() {
final char[] UNCHANGED = new char[] { 'U', 'N', 'C', 'H', 'A', 'N', 'G', 'E', 'D' };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new char[] { 'C', 'H', 'A', 'N', 'G', 'E', 'D' });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = 'X';
}
};
validateMutable(44, validator, "<API key>");
}
@Test
public void <API key>() {
final char[] UNCHANGED = new char[] { 'U', 'N', 'C', 'H', 'A', 'N', 'G', 'E', 'D' };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new char[] { 'C', 'H', 'A', 'N', 'G', 'E', 'D' });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = 'X';
}
};
validateMutable(45, validator, "<API key>");
}
@Test
public void <API key>() {
final Byte[] UNCHANGED = new Byte[] { Byte.valueOf((byte) 0), Byte.valueOf((byte) 1), Byte.valueOf((byte) 2),
Byte.valueOf((byte) 3), Byte.valueOf((byte) 4), Byte.valueOf((byte) 5), Byte.valueOf((byte) 6),
Byte.valueOf((byte) 7) };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new Byte[] { Byte.valueOf((byte) 8), Byte.valueOf((byte) 1),
Byte.valueOf((byte) 2), Byte.valueOf((byte) 3), Byte.valueOf((byte) 4), Byte.valueOf((byte) 5),
Byte.valueOf((byte) 6), Byte.valueOf((byte) 7) });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = Byte.valueOf((byte) 8);
}
};
validateMutable(46, validator, "<API key>");
}
@Test
public void <API key>() {
final Byte[] UNCHANGED = new Byte[] { Byte.valueOf((byte) 0), Byte.valueOf((byte) 1), Byte.valueOf((byte) 2),
Byte.valueOf((byte) 3), Byte.valueOf((byte) 4), Byte.valueOf((byte) 5), Byte.valueOf((byte) 6),
Byte.valueOf((byte) 7) };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new Byte[] { Byte.valueOf((byte) 8), Byte.valueOf((byte) 1),
Byte.valueOf((byte) 2), Byte.valueOf((byte) 3), Byte.valueOf((byte) 4), Byte.valueOf((byte) 5),
Byte.valueOf((byte) 6), Byte.valueOf((byte) 7) });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = Byte.valueOf((byte) 8);
}
};
validateMutable(47, validator, "<API key>");
}
@Test
public void <API key>() {
final Byte[] UNCHANGED = new Byte[] { Byte.valueOf((byte) 0), Byte.valueOf((byte) 1), Byte.valueOf((byte) 2),
Byte.valueOf((byte) 3), Byte.valueOf((byte) 4), Byte.valueOf((byte) 5), Byte.valueOf((byte) 6),
Byte.valueOf((byte) 7) };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new Byte[] { Byte.valueOf((byte) 8), Byte.valueOf((byte) 1),
Byte.valueOf((byte) 2), Byte.valueOf((byte) 3), Byte.valueOf((byte) 4), Byte.valueOf((byte) 5),
Byte.valueOf((byte) 6), Byte.valueOf((byte) 7) });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = Byte.valueOf((byte) 8);
}
};
validateMutable(48, validator, "<API key>");
}
@SuppressWarnings("boxing")
@Test
public void <API key>() {
final Character[] UNCHANGED = new Character[] { 'U', 'N', 'C', 'H', 'A', 'N', 'G', 'E', 'D' };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new Character[] { 'C', 'H', 'A', 'N', 'G', 'E', 'D' });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = 'X';
}
};
validateMutable(49, validator, "<API key>");
}
@SuppressWarnings("boxing")
@Test
public void <API key>() {
final Character[] UNCHANGED = new Character[] { 'U', 'N', 'C', 'H', 'A', 'N', 'G', 'E', 'D' };
MutableValidator validator = new MutableValidator() {
public void set(<API key> obj) {
obj.<API key>(UNCHANGED);
}
public void change(<API key> obj) {
obj.<API key>(new Character[] { 'C', 'H', 'A', 'N', 'G', 'E', 'D' });
}
public void setNull(<API key> obj) {
obj.<API key>(null);
}
public boolean isNull(<API key> obj) {
return obj.<API key>() == null;
}
public boolean isChanged(<API key> obj) {
return !Arrays.equals(UNCHANGED, obj.<API key>());
}
public void mutate(<API key> obj) {
obj.<API key>()[0] = 'X';
}
};
validateMutable(50, validator, "<API key>");
}
@Test
public void testSerializable() {
MutableValidator validator = new MutableValidator() {
<API key> UNCHANGED = new <API key>("Unchanged");
public void set(<API key> obj) {
obj.setSerializable(UNCHANGED);
}
public void change(<API key> obj) {
obj.setSerializable(new <API key>("Changed"));
}
public void setNull(<API key> obj) {
obj.setSerializable(null);
}
public boolean isNull(<API key> obj) {
return obj.getSerializable() == null;
}
public boolean isChanged(<API key> obj) {
return !UNCHANGED.equals(obj.getSerializable());
}
public void mutate(<API key> obj) {
((<API key>) obj.getSerializable()).setTxt("Changed");
}
};
validateMutable(51, validator, "serializable");
}
@Test
public void testEnumString() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setEnumString(UserDefinedEnum.HUGO);
}
public void change(<API key> obj) {
obj.setEnumString(UserDefinedEnum.EMIL);
}
public void setNull(<API key> obj) {
obj.setEnumString(null);
}
public boolean isNull(<API key> obj) {
return obj.getEnumString() == null;
}
public boolean isChanged(<API key> obj) {
return obj.getEnumString() != UserDefinedEnum.HUGO;
}
};
validateReference(52, validator, "enumString");
}
@Test
public void testEnumOrdinal() {
ReferenceValidator validator = new ReferenceValidator() {
public void set(<API key> obj) {
obj.setEnumOrdinal(UserDefinedEnum.HUGO);
}
public void change(<API key> obj) {
obj.setEnumOrdinal(UserDefinedEnum.EMIL);
}
public void setNull(<API key> obj) {
obj.setEnumOrdinal(null);
}
public boolean isNull(<API key> obj) {
return obj.getEnumOrdinal() == null;
}
public boolean isChanged(<API key> obj) {
return obj.getEnumOrdinal() != UserDefinedEnum.HUGO;
}
};
validateReference(53, validator, "enumOrdinal");
}
@Test
@Bugzilla(bugid=309681)
public void testNullsFAshort() throws SQLException {
JPAEnvironment env = getEnvironment();
EntityManager em = env.getEntityManager();
<API key> obj = new <API key>(7777);
try {
obj.fill();
env.beginTransaction(em);
em.persist(obj);
env.<API key>(em);
Connection con = env.getDataSource().getConnection();
try {
String stmt = "update TMP_BASIC_TYPES_FA set P_SHORT = ? where ID = 7777";
PreparedStatement pstmt = con.prepareStatement(stmt);
try {
pstmt.setNull(1, Types.SMALLINT);
pstmt.executeUpdate();
} finally {
pstmt.close();
}
if (!con.getAutoCommit()) {
con.commit();
}
} finally {
con.close();
}
obj = em.find(<API key>.class, new Integer(7777));
flop("missing Exception");
} catch (<API key> iae) {
// $JL-EXC$ expected behavior
} finally {
closeEntityManager(em);
}
}
@Test
@Bugzilla(bugid = 309681)
public void testNullsFAint() throws SQLException {
JPAEnvironment env = getEnvironment();
EntityManager em = env.getEntityManager();
<API key> obj = new <API key>(7778);
try {
obj.fill();
env.beginTransaction(em);
em.persist(obj);
env.<API key>(em);
Connection con = env.getDataSource().getConnection();
try {
String stmt = "update TMP_BASIC_TYPES_FA set P_INT = ? where ID = 7778";
PreparedStatement pstmt = con.prepareStatement(stmt);
try {
pstmt.setNull(1, Types.INTEGER);
pstmt.executeUpdate();
} finally {
pstmt.close();
}
if (!con.getAutoCommit()) {
con.commit();
}
} finally {
con.close();
}
obj = em.find(<API key>.class, new Integer(7778));
flop("missing exception");
} catch (<API key> iae) {
// $JL-EXC$ expected behavior
} finally {
closeEntityManager(em);
}
}
private interface Validator {
void set(<API key> obj);
void change(<API key> obj);
boolean isChanged(<API key> obj);
}
private interface ReferenceValidator extends Validator {
boolean isNull(<API key> obj);
void setNull(<API key> obj);
}
private interface MutableValidator extends ReferenceValidator {
void mutate(<API key> obj);
}
} |
package org.eclipse.kura.net.admin;
public enum <API key> {
PLATFORM_INTERFACES,
CONFIG_MTU,
CONFIG_AUTOCONNECT,
CONFIG_DRIVER,
<API key>,
CONFIG_IPV4_ADDRESS,
CONFIG_IPV4_PREFIX,
CONFIG_IPV4_GATEWAY,
CONFIG_DNS_SERVERS,
CONFIG_WINS_SERVERS,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
CONFIG_WIFI_MODE,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
USB_PORT,
USB_MANUFACTURER,
USB_PRODUCT,
USB_MANUFACTURER_ID,
USB_PRODUCT_ID,
WIFI_CAPABILITIES
} |
package ch.elexis.core.ui.dbcheck.semantic;
import org.eclipse.core.runtime.IProgressMonitor;
import ch.rgw.tools.JdbcLink;
public abstract class SemanticCheck {
StringBuilder oklog;
StringBuilder errlog;
public String getErrorLog(){
return errlog.toString();
}
public String getOutputLog(){
return oklog.toString();
}
public abstract String <API key>(JdbcLink j, IProgressMonitor monitor);
} |
package org.eclipse.jface.text.templates;
import org.eclipse.core.runtime.Assert;
/**
* A template consisting of a name and a pattern.
* <p>
* Clients may instantiate this class. May become final in the future.
* </p>
* @since 3.0
* @noextend This class is not intended to be subclassed by clients.
*/
public class Template {
/** The name of this template */
private /*final*/ String fName;
/** A description of this template */
private /*final*/ String fDescription;
/** The name of the context type of this template */
private /*final*/ String fContextTypeId;
/** The template pattern. */
private /*final*/ String fPattern;
/**
* The auto insertable property.
* @since 3.1
*/
private final boolean fIsAutoInsertable;
/**
* Creates an empty template.
*/
public Template() {
this("", "", "", "", true); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
}
/**
* Creates a copy of a template.
*
* @param template the template to copy
*/
public Template(Template template) {
this(template.getName(), template.getDescription(), template.getContextTypeId(), template.getPattern(), template.isAutoInsertable());
}
/**
* Creates a template.
*
* @param name the name of the template
* @param description the description of the template
* @param contextTypeId the id of the context type in which the template can be applied
* @param pattern the template pattern
* @deprecated as of 3.1 replaced by {@link #Template(String, String, String, String, boolean)}
*/
public Template(String name, String description, String contextTypeId, String pattern) {
this(name, description, contextTypeId, pattern, true); // templates are auto insertable per default
}
/**
* Creates a template.
*
* @param name the name of the template
* @param description the description of the template
* @param contextTypeId the id of the context type in which the template can be applied
* @param pattern the template pattern
* @param isAutoInsertable the auto insertable property of the template
* @since 3.1
*/
public Template(String name, String description, String contextTypeId, String pattern, boolean isAutoInsertable) {
Assert.isNotNull(description);
fDescription= description;
fName= name;
Assert.isNotNull(contextTypeId);
fContextTypeId= contextTypeId;
fPattern= pattern;
fIsAutoInsertable= isAutoInsertable;
}
/*
* @see Object#hashCode()
*/
public int hashCode() {
return fName.hashCode() ^ fPattern.hashCode() ^ fContextTypeId.hashCode();
}
/**
* Sets the description of the template.
*
* @param description the new description
* @deprecated Templates should never be modified
*/
public void setDescription(String description) {
Assert.isNotNull(description);
fDescription= description;
}
/**
* Returns the description of the template.
*
* @return the description of the template
*/
public String getDescription() {
return fDescription;
}
/**
* Sets the name of the context type in which the template can be applied.
*
* @param contextTypeId the new context type name
* @deprecated Templates should never be modified
*/
public void setContextTypeId(String contextTypeId) {
Assert.isNotNull(contextTypeId);
fContextTypeId= contextTypeId;
}
/**
* Returns the id of the context type in which the template can be applied.
*
* @return the id of the context type in which the template can be applied
*/
public String getContextTypeId() {
return fContextTypeId;
}
/**
* Sets the name of the template.
*
* @param name the name of the template
* @deprecated Templates should never be modified
*/
public void setName(String name) {
fName= name;
}
/**
* Returns the name of the template.
*
* @return the name of the template
*/
public String getName() {
return fName;
}
/**
* Sets the pattern of the template.
*
* @param pattern the new pattern of the template
* @deprecated Templates should never be modified
*/
public void setPattern(String pattern) {
fPattern= pattern;
}
/**
* Returns the template pattern.
*
* @return the template pattern
*/
public String getPattern() {
return fPattern;
}
/**
* Returns <code>true</code> if template is enabled and matches the context,
* <code>false</code> otherwise.
*
* @param prefix the prefix (e.g. inside a document) to match
* @param contextTypeId the context type id to match
* @return <code>true</code> if template is enabled and matches the context,
* <code>false</code> otherwise
*/
public boolean matches(String prefix, String contextTypeId) {
return fContextTypeId.equals(contextTypeId);
}
/*
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object o) {
if (!(o instanceof Template))
return false;
Template t= (Template) o;
if (t == this)
return true;
return t.fName.equals(fName)
&& t.fPattern.equals(fPattern)
&& t.fContextTypeId.equals(fContextTypeId)
&& t.fDescription.equals(fDescription)
&& t.fIsAutoInsertable == fIsAutoInsertable;
}
/**
* Returns the auto insertable property of the template.
*
* @return the auto insertable property of the template
* @since 3.1
*/
public boolean isAutoInsertable() {
return fIsAutoInsertable;
}
} |
package net.locosoft.CompuCanvas.controller.vitals.internal;
import java.util.Date;
import net.locosoft.CompuCanvas.controller.core.tsd.TSDGroup;
import net.locosoft.CompuCanvas.controller.core.tsd.TSDType;
public abstract class TimeVitalSign extends VitalSign {
public TimeVitalSign(String id, String units, TSDType type, TSDGroup group) {
super(id, units, type, group);
}
public static class Seconds extends TimeVitalSign {
public Seconds(TSDGroup group) {
super("seconds", "time", TSDType.Long, group);
}
@SuppressWarnings("deprecation")
public void update(Date date) {
_buffer.update(date.getTime(), date.getSeconds());
}
}
public static class Minutes extends TimeVitalSign {
public Minutes(TSDGroup group) {
super("minutes", "time", TSDType.Long, group);
}
@SuppressWarnings("deprecation")
public void update(Date date) {
_buffer.update(date.getTime(), date.getMinutes());
}
}
public static class Hours extends TimeVitalSign {
public Hours(TSDGroup group) {
super("hours", "time", TSDType.Long, group);
}
@SuppressWarnings("deprecation")
public void update(Date date) {
_buffer.update(date.getTime(), date.getHours());
}
}
} |
package com.patternbox.tangocalendar.event.domain.model.shared; |
package org.eclipse.viatra.solver.language.solverLanguage;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Path Component</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link org.eclipse.viatra.solver.language.solverLanguage.PathComponent#getSymbol <em>Symbol</em>}</li>
* <li>{@link org.eclipse.viatra.solver.language.solverLanguage.PathComponent#isInverse <em>Inverse</em>}</li>
* <li>{@link org.eclipse.viatra.solver.language.solverLanguage.PathComponent#isTransitiveClosure <em>Transitive Closure</em>}</li>
* <li>{@link org.eclipse.viatra.solver.language.solverLanguage.PathComponent#<API key> <em>Reflexive Transitive Closure</em>}</li>
* </ul>
*
* @see org.eclipse.viatra.solver.language.solverLanguage.<API key>#getPathComponent()
* @model
* @generated
*/
public interface PathComponent extends EObject {
/**
* Returns the value of the '<em><b>Symbol</b></em>' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Symbol</em>' reference.
* @see #setSymbol(Symbol)
* @see org.eclipse.viatra.solver.language.solverLanguage.<API key>#<API key>()
* @model
* @generated
*/
Symbol getSymbol();
/**
* Sets the value of the '{@link org.eclipse.viatra.solver.language.solverLanguage.PathComponent#getSymbol <em>Symbol</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Symbol</em>' reference.
* @see #getSymbol()
* @generated
*/
void setSymbol(Symbol value);
/**
* Returns the value of the '<em><b>Inverse</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Inverse</em>' attribute.
* @see #setInverse(boolean)
* @see org.eclipse.viatra.solver.language.solverLanguage.<API key>#<API key>()
* @model
* @generated
*/
boolean isInverse();
/**
* Sets the value of the '{@link org.eclipse.viatra.solver.language.solverLanguage.PathComponent#isInverse <em>Inverse</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Inverse</em>' attribute.
* @see #isInverse()
* @generated
*/
void setInverse(boolean value);
/**
* Returns the value of the '<em><b>Transitive Closure</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Transitive Closure</em>' attribute.
* @see #<API key>(boolean)
* @see org.eclipse.viatra.solver.language.solverLanguage.<API key>#<API key>()
* @model
* @generated
*/
boolean isTransitiveClosure();
/**
* Sets the value of the '{@link org.eclipse.viatra.solver.language.solverLanguage.PathComponent#isTransitiveClosure <em>Transitive Closure</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Transitive Closure</em>' attribute.
* @see #isTransitiveClosure()
* @generated
*/
void <API key>(boolean value);
/**
* Returns the value of the '<em><b>Reflexive Transitive Closure</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the value of the '<em>Reflexive Transitive Closure</em>' attribute.
* @see #<API key>(boolean)
* @see org.eclipse.viatra.solver.language.solverLanguage.<API key>#<API key>()
* @model
* @generated
*/
boolean <API key>();
/**
* Sets the value of the '{@link org.eclipse.viatra.solver.language.solverLanguage.PathComponent#<API key> <em>Reflexive Transitive Closure</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Reflexive Transitive Closure</em>' attribute.
* @see #<API key>()
* @generated
*/
void <API key>(boolean value);
} // PathComponent |
package org.eclipse.january.geometry.impl;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.impl.AdapterImpl;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.january.geometry.Face;
import org.eclipse.january.geometry.GeometryPackage;
import org.eclipse.january.geometry.<API key>;
import org.eclipse.january.geometry.Vertex;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Triangle Strip Poly Shape</b></em>'.
* <!-- end-user-doc -->
*
* @generated
*/
public class <API key> extends PolyShapeImpl implements <API key> {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected <API key>() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return GeometryPackage.Literals.<API key>;
}
/**
* Calculates the triangles from the faces given
*
* @generated NOT
*/
@Override
public void <API key>() {
if (getMaterial() != null) {
getMaterial().getMaterialFiles()
.addAll(getVertexSource().getMaterialFiles());
}
//Check whether indices are specified in 0 or 1 indexing
boolean oneIndexing = true;
for(Face face : getFaces()){
if(face.getVertexIndices().contains(0)){
oneIndexing = false;
break;
}
}
for (Face face : getFaces()) {
EList<Integer> indices = face.getVertexIndices();
if (indices.size() > 2) {
for (int i = 0; i < indices.size() - 2; i++) {
//Get the indices into the vertex array
int index1 = indices.get(i);
int index2 = indices.get(i + 1);
int index3 = indices.get(i + 2);
//If the indices are in one indexing, convert to zero indexing
if(oneIndexing){
index1
index2
index3
}
Vertex v1 = getVertexSource().getVertices()
.get(index1);
Vertex v2 = getVertexSource().getVertices()
.get(index2);
Vertex v3 = getVertexSource().getVertices()
.get(index3);
ComplexTriangle tri = new ComplexTriangle(v1, v2, v3);
// Listen to the triangle, passing along any notifications.
tri.eAdapters().add(new AdapterImpl() {
@Override
public void notifyChanged(Notification notification) {
eNotify(notification);
}
});
getTriangles().add(tri);
}
} else {
// Throw an exception- file is not properly specified
}
}
}
} //<API key> |
#ifndef <API key>
#define <API key>
#include "<API key>.hh"
namespace efc {
#define <API key> <API key>(__FILE__, __LINE__, errno)
#define <API key>(msg) <API key>(__FILE__, __LINE__, msg)
/**
* Unchecked exception thrown when a <API key> operation
* is invoked upon a channel in the incorrect blocking mode.
*
* @version 1.9, 01/11/19
* @since 1.4
*/
class <API key>: public <API key> {
public:
/**
* Constructs an <code><API key></code> with no
* detail message.
*
* @param _file_ __FILE__
* @param _line_ __LINE__
* @param errn errno
*/
<API key>(const char *_file_, int _line_, int errn = 0) :
<API key>(_file_, _line_, errn) {
}
/**
* Constructs an <code><API key></code> with the
* specified detail message.
*
* @param _file_ __FILE__.
* @param _line_ __LINE__.
* @param s the detail message.
*/
<API key>(const char *_file_, int _line_,
const char *s) :
<API key>(_file_, _line_, s) {
}
};
} /* namespace efc */
#endif /* <API key> */ |
package org.opendaylight.controller.config.api;
import java.util.Set;
import javax.management.<API key>;
import javax.management.<API key>;
import javax.management.ObjectName;
/**
* Represents functionality provided by configuration transaction.
*/
public interface <API key> extends LookupRegistry, <API key> {
/**
* Create new configuration bean.
*
* @param moduleName
* @param instanceName
* @return ObjectName of newly created module
* @throws <API key>
* if given ifcName and instanceName is already registered
*/
ObjectName createModule(String moduleName, String instanceName)
throws <API key>;
/**
* Re-creates an existing module configuration bean.
*
* @param objectName
* can be either read-only module name that can be obtained using
* {@link ConfigRegistry#lookupConfigBean(String, String)} or
* writable module name that must contain current transaction name.
* @throws <API key>
* if module is not found
* @throws <API key>
* if object name contains wrong transaction name or domain
*/
void reCreateModule(ObjectName objectName) throws <API key>;
/**
* Destroy existing module.
*
* @param objectName
* can be either read-only module name that can be obtained using
* {@link ConfigRegistry#lookupConfigBean(String, String)} or
* writable module name that must contain current transaction
* name.
* @throws <API key>
* if module is not found
* @throws <API key>
* if object name contains wrong transaction name or domain
*/
void destroyModule(ObjectName objectName) throws <API key>;
/**
* Destroy current transaction.
*/
void abortConfig();
/**
* This method can be called multiple times, has no side effects.
*
* @throws ValidationException
* if validation fails
*/
void validateConfig() throws ValidationException;
/**
*
* @return transactionName
*/
String getTransactionName();
/**
* @return all known module factory names as reported by {@link org.opendaylight.controller.config.spi.ModuleFactory#<API key>()}
*/
Set<String> <API key>();
} |
/* devExampleVersion.c */
/* Example device support for the lsi (long string input) record
* providing the module version string as the value
*/
#include <stddef.h>
#include <stdio.h>
#include <string.h>
#include "devSup.h"
#include "lsiRecord.h"
#include "ExampleVersion.h"
/* must be last include */
#include "epicsExport.h"
const char version[] = ExampleVERSION;
static long read_string(lsiRecord *prec)
{
size_t N = sizeof version;
char *buf = prec->val;
if (N > prec->sizv)
N = prec->sizv;
prec->len = N;
memcpy(buf, version, N);
buf[N - 1] = '\0';
return 0;
}
static lsidset devExampleVersion = {
5, NULL, NULL, NULL, NULL, read_string
};
epicsExportAddress(dset,devExampleVersion); |
package nexcore.tool.uml.model.umldiagram.impl;
import java.util.Collection;
import nexcore.tool.uml.model.umldiagram.LabelNode;
import nexcore.tool.uml.model.umldiagram.LabelType;
import nexcore.tool.uml.model.umldiagram.UMLDiagramPackage;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.<API key>;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc --> An implementation of the model object '
* <em><b>Label Node</b></em>'. <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link nexcore.tool.uml.model.umldiagram.impl.LabelNodeImpl#getType <em>Type</em>}</li>
* <li>{@link nexcore.tool.uml.model.umldiagram.impl.LabelNodeImpl#getCompartmentList <em>Compartment List</em>}</li>
* </ul>
* </p>
*
* @generated
*/
/**
* <ul>
* <li> : nexcore.tool.uml.model</li>
* <li> : nexcore.tool.uml.model.umldiagram.impl</li>
* <li> : LabelNodeImpl</li>
* <li> : 2015. 10. 6.</li>
* <li> : </li>
* </ul>
*/
public class LabelNodeImpl extends AbstractNodeImpl implements LabelNode {
/**
* The default value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected static final LabelType TYPE_EDEFAULT = LabelType.LABEL;
/**
* The cached value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected LabelType type = TYPE_EDEFAULT;
/**
* This is true if the Type attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean typeESet;
/**
* The cached value of the '{@link #getCompartmentList()
* <em>Compartment List</em>}' containment reference list. <!--
* begin-user-doc --> <!-- end-user-doc -->
*
* @see #getCompartmentList()
* @generated
* @ordered
*/
protected EList<LabelNode> compartmentList;
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
protected LabelNodeImpl() {
super();
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return UMLDiagramPackage.Literals.LABEL_NODE;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
public LabelType getType() {
return type;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
public void setType(LabelType newType) {
LabelType oldType = type;
type = newType == null ? TYPE_EDEFAULT : newType;
boolean oldTypeESet = typeESet;
typeESet = true;
if (<API key>())
eNotify(new ENotificationImpl(this, Notification.SET, UMLDiagramPackage.LABEL_NODE__TYPE, oldType, type, !oldTypeESet));
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
public void unsetType() {
LabelType oldType = type;
boolean oldTypeESet = typeESet;
type = TYPE_EDEFAULT;
typeESet = false;
if (<API key>())
eNotify(new ENotificationImpl(this, Notification.UNSET, UMLDiagramPackage.LABEL_NODE__TYPE, oldType, TYPE_EDEFAULT, oldTypeESet));
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
public boolean isSetType() {
return typeESet;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
public EList<LabelNode> getCompartmentList() {
if (compartmentList == null) {
compartmentList = new <API key><LabelNode>(LabelNode.class, this, UMLDiagramPackage.<API key>);
}
return compartmentList;
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case UMLDiagramPackage.<API key>:
return ((InternalEList<?>)getCompartmentList()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case UMLDiagramPackage.LABEL_NODE__TYPE:
return getType();
case UMLDiagramPackage.<API key>:
return getCompartmentList();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case UMLDiagramPackage.LABEL_NODE__TYPE:
setType((LabelType)newValue);
return;
case UMLDiagramPackage.<API key>:
getCompartmentList().clear();
getCompartmentList().addAll((Collection<? extends LabelNode>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case UMLDiagramPackage.LABEL_NODE__TYPE:
unsetType();
return;
case UMLDiagramPackage.<API key>:
getCompartmentList().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case UMLDiagramPackage.LABEL_NODE__TYPE:
return isSetType();
case UMLDiagramPackage.<API key>:
return compartmentList != null && !compartmentList.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc --> <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (type: ");
if (typeESet) result.append(type); else result.append("<unset>");
result.append(')');
return result.toString();
}
} // LabelNodeImpl |
package org.eclipse.packagedrone.utils.rpm.header;
import java.io.<API key>;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.ToIntFunction;
import java.util.function.ToLongFunction;
import org.eclipse.packagedrone.utils.rpm.ReadableHeader;
import org.eclipse.packagedrone.utils.rpm.RpmBaseTag;
import org.eclipse.packagedrone.utils.rpm.RpmTag;
public class Header<T extends RpmBaseTag> implements ReadableHeader<T>
{
@FunctionalInterface
public interface ArrayAllocator<T>
{
public T[] allocate ( int length );
}
@FunctionalInterface
public interface Putter<T extends RpmBaseTag, V>
{
public void put ( Header<T> header, T tag, V[] values );
}
@FunctionalInterface
public interface ToShortFunction<T>
{
public short applyAsShort ( T value );
}
private static final class I18nString
{
private final String value;
public I18nString ( final String value )
{
this.value = value;
}
}
private final Map<Integer, Object> entries = new LinkedHashMap<> ();
private static Charset charset = StandardCharsets.UTF_8;
public Header ( final HeaderEntry[] entries )
{
if ( entries != null )
{
for ( final HeaderEntry entry : entries )
{
this.entries.put ( entry.getTag (), entry );
}
}
}
public Header ( final Header<T> other )
{
Objects.requireNonNull ( other );
this.entries.putAll ( other.entries );
}
public Header ()
{
}
public int size ()
{
return this.entries.size ();
}
public void putNull ( final int tag )
{
this.entries.put ( tag, null );
}
public void putNull ( final T tag )
{
this.entries.put ( tag.getValue (), null );
}
public void putByte ( final int tag, final byte... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, value );
}
public void putByte ( final T tag, final byte... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), value );
}
public void putShort ( final int tag, final short... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, value );
}
public void putShort ( final T tag, final short... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), value );
}
public void putInt ( final int tag, final int... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, value );
}
public void putInt ( final T tag, final int... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), value );
}
public void putLong ( final int tag, final long... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, value );
}
public void putLong ( final T tag, final long... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), value );
}
public void putString ( final int tag, final String value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, value );
}
public void putString ( final T tag, final String value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), value );
}
public void putStringOptional ( final int tag, final String value )
{
if ( value == null )
{
return;
}
this.entries.put ( tag, value );
}
public void putStringOptional ( final T tag, final String value )
{
if ( value == null )
{
return;
}
this.entries.put ( tag.getValue (), value );
}
public void putStringArray ( final int tag, final String... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, value );
}
public void putStringArray ( final T tag, final String... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), value );
}
public void putI18nString ( final int tag, final String... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, Arrays.stream ( value ).map ( v -> new I18nString ( v ) ).toArray ( I18nString[]::new ) );
}
public void putI18nString ( final T tag, final String... value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), Arrays.stream ( value ).map ( v -> new I18nString ( v ) ).toArray ( I18nString[]::new ) );
}
public void putBlob ( final int tag, final byte[] value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, ByteBuffer.wrap ( value ) );
}
public void putBlob ( final int tag, final ByteBuffer value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag, value );
}
public void putBlob ( final T tag, final byte[] value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), ByteBuffer.wrap ( value ) );
}
public void putBlob ( final T tag, final ByteBuffer value )
{
Objects.requireNonNull ( value );
this.entries.put ( tag.getValue (), value );
}
public void putSize ( long value, final T intTag, final T longTag )
{
Objects.requireNonNull ( intTag );
Objects.requireNonNull ( longTag );
if ( value <= 0 )
{
value = 0;
}
if ( value > Integer.MAX_VALUE )
{
putLong ( longTag, value );
}
else
{
putInt ( intTag, (int)value );
}
}
public void remove ( final int tag )
{
this.entries.remove ( tag );
}
public void remove ( final RpmTag tag )
{
this.entries.remove ( tag.getValue () );
}
public Object get ( final int tag )
{
return this.entries.get ( tag );
}
public Object get ( final T tag )
{
return this.entries.get ( tag.getValue () );
}
@Override
public Optional<Object> getValue ( final T tag )
{
return Optional.ofNullable ( get ( tag ) );
}
/**
* Make an array of header entries with given charset
* <p>
* <strong>Note:</strong> Further updates on this instance will not update
* the returned array. This is actually a copy of the current state.
* </p>
*
* @param charset the charset of choice
* @return a new array of all header entries, unsorted
*/
public HeaderEntry[] makeEntries ( Charset charset )
{
if ( charset == null )
{
throw new <API key> ( "'charset' cannot be null" );
}
Header.charset = charset;
return this.entries.entrySet ().stream ().map ( Header::makeEntry ).toArray ( num -> new HeaderEntry[num] );
}
/**
* Make an array of header entries
* <p>
* <strong>Note:</strong> Further updates on this instance will not update
* the returned array. This is actually a copy of the current state.
* </p>
*
* @return a new array of all header entries, unsorted
*/
public HeaderEntry[] makeEntries ()
{
return makeEntries ( StandardCharsets.UTF_8 );
}
private static HeaderEntry makeEntry ( final Map.Entry<Integer, Object> entry )
{
final Object val = entry.getValue ();
final int tag = entry.getKey ();
if ( val instanceof HeaderEntry )
{
return (HeaderEntry)val;
}
// NULL
if ( val == null )
{
return new HeaderEntry ( Type.NULL, tag, 0, null );
}
// FIXME: CHAR
// BYTE
if ( val instanceof byte[] )
{
final byte[] value = (byte[])val;
return new HeaderEntry ( Type.BYTE, tag, value.length, value );
}
// SHORT
if ( val instanceof short[] )
{
final short[] value = (short[])val;
final byte[] data = new byte[value.length * 2];
final ByteBuffer buffer = ByteBuffer.wrap ( data );
for ( final short v : value )
{
buffer.putShort ( v );
}
return new HeaderEntry ( Type.SHORT, tag, value.length, data );
}
// INT
if ( val instanceof int[] )
{
final int[] value = (int[])val;
final byte[] data = new byte[value.length * 4];
final ByteBuffer buffer = ByteBuffer.wrap ( data );
for ( final int v : value )
{
buffer.putInt ( v );
}
return new HeaderEntry ( Type.INT, tag, value.length, data );
}
// LONG
if ( val instanceof long[] )
{
final long[] value = (long[])val;
final byte[] data = new byte[value.length * 8];
final ByteBuffer buffer = ByteBuffer.wrap ( data );
for ( final long v : value )
{
buffer.putLong ( v );
}
return new HeaderEntry ( Type.LONG, tag, value.length, data );
}
// STRING
if ( val instanceof String )
{
final String value = (String)val;
return new HeaderEntry ( Type.STRING, tag, 1, makeStringData ( new <API key> (), value ).toByteArray () );
}
// BLOB
if ( val instanceof ByteBuffer )
{
final ByteBuffer value = (ByteBuffer)val;
byte[] data;
if ( value.hasArray () )
{
data = value.array ();
}
else
{
data = new byte[value.remaining ()];
value.get ( data );
}
return new HeaderEntry ( Type.BLOB, tag, data.length, data );
}
// STRING_ARRAY
if ( val instanceof String[] )
{
final String[] value = (String[])val;
return new HeaderEntry ( Type.STRING_ARRAY, tag, value.length, makeStringsData ( new <API key> (), value ).toByteArray () );
}
// I18N_STRING
if ( val instanceof I18nString[] )
{
final I18nString[] value = (I18nString[])val;
return new HeaderEntry ( Type.I18N_STRING, tag, value.length, makeStringsData ( new <API key> (), value ).toByteArray () );
}
throw new <API key> ( String.format ( "Unable to process value type: %s", val.getClass () ) );
}
private static <T extends OutputStream> T makeStringData ( final T out, final String string )
{
try
{
if ( string != null )
{
out.write ( string.getBytes (charset) );
}
out.write ( 0 );
}
catch ( final IOException e )
{
throw new RuntimeException ( e );
}
return out;
}
private static <T extends OutputStream> T makeStringsData ( final T out, final String[] strings )
{
for ( final String s : strings )
{
makeStringData ( out, s );
}
return out;
}
private static <T extends OutputStream> T makeStringsData ( final T out, final I18nString[] strings )
{
for ( final I18nString s : strings )
{
if ( s != null )
{
makeStringData ( out, s.value );
}
else
{
makeStringData ( out, null );
}
}
return out;
}
public static <E, V, T extends RpmBaseTag> void putFields ( final Header<T> header, final Collection<E> entries, final T tag, final ArrayAllocator<V> arrayAllocator, final Function<E, V> func, final Putter<T, V> putter )
{
if ( entries.isEmpty () )
{
return;
}
final V[] values = arrayAllocator.allocate ( entries.size () );
int i = 0;
for ( final E entry : entries )
{
values[i] = func.apply ( entry );
i++;
}
putter.put ( header, tag, values );
}
public static <E, T extends RpmBaseTag> void putShortFields ( final Header<T> header, final Collection<E> entries, final T tag, final ToShortFunction<E> func )
{
if ( entries.isEmpty () )
{
return;
}
final short[] values = new short[entries.size ()];
int i = 0;
for ( final E entry : entries )
{
values[i] = func.applyAsShort ( entry );
i++;
}
header.putShort ( tag, values );
}
public static <E, T extends RpmBaseTag> void putIntFields ( final Header<T> header, final Collection<E> entries, final T tag, final ToIntFunction<E> func )
{
if ( entries.isEmpty () )
{
return;
}
final int[] values = new int[entries.size ()];
int i = 0;
for ( final E entry : entries )
{
values[i] = func.applyAsInt ( entry );
i++;
}
header.putInt ( tag, values );
}
public static <E, T extends RpmBaseTag> void putLongFields ( final Header<T> header, final Collection<E> entries, final T tag, final ToLongFunction<E> func )
{
if ( entries.isEmpty () )
{
return;
}
final long[] values = new long[entries.size ()];
int i = 0;
for ( final E entry : entries )
{
values[i] = func.applyAsLong ( entry );
i++;
}
header.putLong ( tag, values );
}
} |
//fgnass.github.com/spin.js#v2.0.1
! function(a, b) {
"object" == typeof exports ? module.exports = b() : "function" == typeof define && define.amd ? define(b) : a.Spinner = b()
}(this, function() {
"use strict";
function a(a, b) {
var c, d = document.createElement(a || "div");
for (c in b) d[c] = b[c];
return d
}
function b(a) {
for (var b = 1, c = arguments.length; c > b; b++) a.appendChild(arguments[b]);
return a
}
function c(a, b, c, d) {
var e = ["opacity", b, ~~ (100 * a), c, d].join("-"),
f = .01 + c / d * 100,
g = Math.max(1 - (1 - a) / b * (100 - f), a),
h = j.substring(0, j.indexOf("Animation")).toLowerCase(),
i = h && "-" + h + "-" || "";
return l[e] || (m.insertRule("@" + i + "keyframes " + e + "{0%{opacity:" + g + "}" + f + "%{opacity:" + a + "}" + (f + .01) + "%{opacity:1}" + (f + b) % 100 + "%{opacity:" + a + "}100%{opacity:" + g + "}}", m.cssRules.length), l[e] = 1), e
}
function d(a, b) {
var c, d, e = a.style;
for (b = b.charAt(0).toUpperCase() + b.slice(1), d = 0; d < k.length; d++)
if (c = k[d] + b, void 0 !== e[c]) return c;
return void 0 !== e[b] ? b : void 0
}
function e(a, b) {
for (var c in b) a.style[d(a, c) || c] = b[c];
return a
}
function f(a) {
for (var b = 1; b < arguments.length; b++) {
var c = arguments[b];
for (var d in c) void 0 === a[d] && (a[d] = c[d])
}
return a
}
function g(a, b) {
return "string" == typeof a ? a : a[b % a.length]
}
function h(a) {
this.opts = f(a || {}, h.defaults, n)
}
function i() {
function c(b, c) {
return a("<" + b + ' xmlns="urn:schemas-microsoft.com:vml" class="spin-vml">', c)
}
m.addRule(".spin-vml", "behavior:url(#default#VML)"), h.prototype.lines = function(a, d) {
function f() {
return e(c("group", {
coordsize: k + " " + k,
coordorigin: -j + " " + -j
}), {
width: k,
height: k
})
}
function h(a, h, i) {
b(m, b(e(f(), {
rotation: 360 / d.lines * a + "deg",
left: ~~h
}), b(e(c("roundrect", {
arcsize: d.corners
}), {
width: j,
height: d.width,
left: d.radius,
top: -d.width >> 1,
filter: i
}), c("fill", {
color: g(d.color, a),
opacity: d.opacity
}), c("stroke", {
opacity: 0
}))))
}
var i, j = d.length + d.width,
k = 2 * j,
l = 2 * -(d.width + d.length) + "px",
m = e(f(), {
position: "absolute",
top: l,
left: l
});
if (d.shadow)
for (i = 1; i <= d.lines; i++) h(i, -2, "progid:DXImageTransform.Microsoft.Blur(pixelradius=2,makeshadow=1,shadowopacity=.3)");
for (i = 1; i <= d.lines; i++) h(i);
return b(a, m)
}, h.prototype.opacity = function(a, b, c, d) {
var e = a.firstChild;
d = d.shadow && d.lines || 0, e && b + d < e.childNodes.length && (e = e.childNodes[b + d], e = e && e.firstChild, e = e && e.firstChild, e && (e.opacity = c))
}
}
var j, k = ["webkit", "Moz", "ms", "O"],
l = {},
m = function() {
var c = a("style", {
type: "text/css"
});
return b(document.<API key>("head")[0], c), c.sheet || c.styleSheet
}(),
n = {
lines: 12,
length: 7,
width: 5,
radius: 10,
rotate: 0,
corners: 1,
color: "#000",
direction: 1,
speed: 1,
trail: 100,
opacity: .25,
fps: 20,
zIndex: 2e9,
className: "spinner",
top: "50%",
left: "50%",
position: "absolute"
};
h.defaults = {}, f(h.prototype, {
spin: function(b) {
this.stop(); {
var c = this,
d = c.opts,
f = c.el = e(a(0, {
className: d.className
}), {
position: d.position,
width: 0,
zIndex: d.zIndex
});
d.radius + d.length + d.width
}
if (e(f, {
left: d.left,
top: d.top
}), b && b.insertBefore(f, b.firstChild || null), f.setAttribute("role", "progressbar"), c.lines(f, c.opts), !j) {
var g, h = 0,
i = (d.lines - 1) * (1 - d.direction) / 2,
k = d.fps,
l = k / d.speed,
m = (1 - d.opacity) / (l * d.trail / 100),
n = l / d.lines;
! function o() {
h++;
for (var a = 0; a < d.lines; a++) g = Math.max(1 - (h + (d.lines - a) * n) % l * m, d.opacity), c.opacity(f, a * d.direction + i, g, d);
c.timeout = c.el && setTimeout(o, ~~ (1e3 / k))
}()
}
return c
},
stop: function() {
var a = this.el;
return a && (clearTimeout(this.timeout), a.parentNode && a.parentNode.removeChild(a), this.el = void 0), this
},
lines: function(d, f) {
function h(b, c) {
return e(a(), {
position: "absolute",
width: f.length + f.width + "px",
height: f.width + "px",
background: b,
boxShadow: c,
transformOrigin: "left",
transform: "rotate(" + ~~(360 / f.lines * k + f.rotate) + "deg) translate(" + f.radius + "px,0)",
borderRadius: (f.corners * f.width >> 1) + "px"
})
}
for (var i, k = 0, l = (f.lines - 1) * (1 - f.direction) / 2; k < f.lines; k++) i = e(a(), {
position: "absolute",
top: 1 + ~(f.width / 2) + "px",
transform: f.hwaccel ? "translate3d(0,0,0)" : "",
opacity: f.opacity,
animation: j && c(f.opacity, f.trail, l + k * f.direction, f.lines) + " " + 1 / f.speed + "s linear infinite"
}), f.shadow && b(i, e(h("#000", "0 0 4px #000"), {
top: "2px"
})), b(d, b(i, h(g(f.color, k), "0 0 1px rgba(0,0,0,.1)")));
return d
},
opacity: function(a, b, c) {
b < a.childNodes.length && (a.childNodes[b].style.opacity = c)
}
});
var o = e(a("group"), {
behavior: "url(#default#VML)"
});
return !d(o, "transform") && o.adj ? i() : j = d(o, "animation"), h
}); |
package org.eclipse.dawnsci.nexus.impl;
import java.util.Date;
import java.util.Set;
import java.util.EnumSet;
import java.util.Map;
import org.eclipse.dawnsci.analysis.api.tree.DataNode;
import org.eclipse.january.dataset.IDataset;
import org.eclipse.dawnsci.nexus.*;
/**
* Document an event of data processing, reconstruction, or analysis for this data.
*
* @version 1.0
*/
public class NXprocessImpl extends NXobjectImpl implements NXprocess {
private static final long serialVersionUID = 1L; // no state in this class, so always compatible
public static final Set<NexusBaseClass> <API key> = EnumSet.of(
NexusBaseClass.NX_NOTE);
public NXprocessImpl() {
super();
}
public NXprocessImpl(final long oid) {
super(oid);
}
@Override
public Class<? extends NXobject> getNXclass() {
return NXprocess.class;
}
@Override
public NexusBaseClass getNexusBaseClass() {
return NexusBaseClass.NX_PROCESS;
}
@Override
public Set<NexusBaseClass> <API key>() {
return <API key>;
}
@Override
public IDataset getProgram() {
return getDataset(NX_PROGRAM);
}
@Override
public String getProgramScalar() {
return getString(NX_PROGRAM);
}
@Override
public DataNode setProgram(IDataset program) {
return setDataset(NX_PROGRAM, program);
}
@Override
public DataNode setProgramScalar(String program) {
return setString(NX_PROGRAM, program);
}
@Override
public IDataset getSequence_index() {
return getDataset(NX_SEQUENCE_INDEX);
}
@Override
public Long <API key>() {
return getLong(NX_SEQUENCE_INDEX);
}
@Override
public DataNode setSequence_index(IDataset sequence_index) {
return setDataset(NX_SEQUENCE_INDEX, sequence_index);
}
@Override
public DataNode <API key>(Long sequence_index) {
return setField(NX_SEQUENCE_INDEX, sequence_index);
}
@Override
public IDataset getVersion() {
return getDataset(NX_VERSION);
}
@Override
public String getVersionScalar() {
return getString(NX_VERSION);
}
@Override
public DataNode setVersion(IDataset version) {
return setDataset(NX_VERSION, version);
}
@Override
public DataNode setVersionScalar(String version) {
return setString(NX_VERSION, version);
}
@Override
public IDataset getDate() {
return getDataset(NX_DATE);
}
@Override
public Date getDateScalar() {
return getDate(NX_DATE);
}
@Override
public DataNode setDate(IDataset date) {
return setDataset(NX_DATE, date);
}
@Override
public DataNode setDateScalar(Date date) {
return setDate(NX_DATE, date);
}
@Override
public NXnote getNote() {
return getChild("note", NXnote.class);
}
@Override
public void setNote(NXnote note) {
putChild("note", note);
}
@Override
public NXnote getNote(String name) {
return getChild(name, NXnote.class);
}
@Override
public void setNote(String name, NXnote note) {
putChild(name, note);
}
@Override
public Map<String, NXnote> getAllNote() {
return getChildren(NXnote.class);
}
@Override
public void setAllNote(Map<String, NXnote> note) {
setChildren(note);
}
} |
package org.eclipse.che.api.workspace.server.model.impl;
import org.eclipse.che.api.core.model.workspace.Workspace;
import org.eclipse.che.api.core.model.workspace.WorkspaceConfig;
import org.eclipse.che.api.core.model.workspace.WorkspaceRuntime;
import org.eclipse.che.api.core.model.workspace.WorkspaceStatus;
import org.eclipse.che.commons.lang.NameGenerator;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import static com.google.common.base.MoreObjects.firstNonNull;
import static org.eclipse.che.api.core.model.workspace.WorkspaceStatus.STOPPED;
/**
* Data object for {@link Workspace}.
*
* @author Yevhenii Voevodin
*/
public class WorkspaceImpl implements Workspace {
public static <API key> builder() {
return new <API key>();
}
private String id;
private String namespace;
private WorkspaceConfigImpl config;
private boolean isTemporary;
private WorkspaceStatus status;
private Map<String, String> attributes;
private <API key> runtime;
public WorkspaceImpl(String id, String namespace, WorkspaceConfig config) {
this(id, namespace, config, null, null, false, STOPPED);
}
public WorkspaceImpl(String id,
String namespace,
WorkspaceConfig config,
WorkspaceRuntime runtime,
Map<String, String> attributes,
boolean isTemporary,
WorkspaceStatus status) {
this.id = id;
this.namespace = namespace;
this.config = new WorkspaceConfigImpl(config);
if (runtime != null) {
this.runtime = new <API key>(runtime);
}
if (attributes != null) {
this.attributes = new HashMap<>(attributes);
}
this.status = firstNonNull(status, STOPPED);
this.isTemporary = isTemporary;
}
public WorkspaceImpl(Workspace workspace) {
this(workspace.getId(),
workspace.getNamespace(),
workspace.getConfig());
this.attributes = new HashMap<>(workspace.getAttributes());
if (workspace.getRuntime() != null) {
this.runtime = new <API key>(workspace.getRuntime());
}
this.isTemporary = workspace.isTemporary();
this.status = firstNonNull(workspace.getStatus(), STOPPED);
}
@Override
public String getId() {
return id;
}
@Override
public String getNamespace() {
return namespace;
}
@Override
public WorkspaceStatus getStatus() {
return status;
}
public void setStatus(WorkspaceStatus status) {
this.status = status;
}
public void setConfig(WorkspaceConfigImpl config) {
this.config = config;
}
@Override
public Map<String, String> getAttributes() {
if (attributes == null) {
attributes = new HashMap<>();
}
return attributes;
}
public void setAttributes(Map<String, String> attributes) {
this.attributes = attributes;
}
@Override
public boolean isTemporary() {
return isTemporary;
}
public void setTemporary(boolean isTemporary) {
this.isTemporary = isTemporary;
}
@Override
public WorkspaceConfigImpl getConfig() {
return config;
}
@Override
public <API key> getRuntime() {
return runtime;
}
public void setRuntime(<API key> runtime) {
this.runtime = runtime;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof WorkspaceImpl)) return false;
final WorkspaceImpl other = (WorkspaceImpl)obj;
return Objects.equals(id, other.id)
&& Objects.equals(namespace, other.namespace)
&& Objects.equals(status, other.status)
&& isTemporary == other.isTemporary
&& getAttributes().equals(other.getAttributes())
&& Objects.equals(config, other.config)
&& Objects.equals(runtime, other.runtime);
}
@Override
public int hashCode() {
int hash = 7;
hash = 31 * hash + Objects.hashCode(id);
hash = 31 * hash + Objects.hashCode(namespace);
hash = 31 * hash + Objects.hashCode(status);
hash = 31 * hash + Objects.hashCode(config);
hash = 31 * hash + getAttributes().hashCode();
hash = 31 * hash + Boolean.hashCode(isTemporary);
hash = 31 * hash + Objects.hashCode(runtime);
return hash;
}
@Override
public String toString() {
return "WorkspaceImpl{" +
"id='" + id + '\'' +
", namespace='" + namespace + '\'' +
", config=" + config +
", isTemporary=" + isTemporary +
", status=" + status +
", attributes=" + attributes +
", runtime=" + runtime +
'}';
}
/**
* Helps to build complex {@link WorkspaceImpl users workspace instance}.
*
* @see WorkspaceImpl#builder()
*/
public static class <API key> {
private String id;
private String namespace;
private boolean isTemporary;
private WorkspaceStatus status;
private WorkspaceConfigImpl config;
private <API key> runtime;
private Map<String, String> attributes;
private <API key>() {}
public WorkspaceImpl build() {
return new WorkspaceImpl(id, namespace, config, runtime, attributes, isTemporary, status);
}
public <API key> generateId() {
id = NameGenerator.generate("workspace", 16);
return this;
}
public <API key> setConfig(WorkspaceConfig workspaceConfig) {
this.config = new WorkspaceConfigImpl(workspaceConfig);
return this;
}
public <API key> setId(String id) {
this.id = id;
return this;
}
public <API key> setNamespace(String namespace) {
this.namespace = namespace;
return this;
}
public <API key> setTemporary(boolean isTemporary) {
this.isTemporary = isTemporary;
return this;
}
public <API key> setStatus(WorkspaceStatus status) {
this.status = status;
return this;
}
public <API key> setAttributes(Map<String, String> attributes) {
this.attributes = attributes;
return this;
}
public <API key> setRuntime(<API key> runtime) {
this.runtime = runtime;
return this;
}
}
} |
# Tic-tac-toe Game Analysis
There is only one fundamental thing in tic-tac-toe and that is a
three-in-a-row. We will call this an "attack".
Now, there are four different types of attacks. There are potential
attacks, threats, shots, and void's. These distinctions are simply
descriptive of which player has claimed that attack.
A potential attack is one where no player has claimed any of the
squares in it.
A null attack is one where no player can claim because both players
have played in the attack at least once.
A threat is one in which the opposing player has claimed that attack
by placing at least one piece in it, and the other's are unclaimed. A
shot is the opposite, where the ai has claimed that attack. Both
threats and shots can have a priority, which is whether we have
claimed two of the slots in the attack or only one. |
/**
* JMX helpers.
*
* @since 3.0
*/
package org.sonatype.nexus.jmx; |
.anchorpane {
-fx-background-color: #f2f2f2;
}
.progress-bar .bar {
-fx-padding: 1px;
-<API key>: 0;
-fx-background-color: #ab263c;
}
.text-field-login {
-fx-background-color: #faffbd;
-fx-border-radius: 1.0;
-fx-border-color: #c6c5c5;
}
.text-field-main {
-fx-background-color: white;
-fx-border-radius: 1.0;
-fx-border-color: #b0b0b0;
-fx-text-fill: #5c5b5b;
}
.button{
-fx-background-color: #066db3;
-fx-font-size: 13.0px;
-fx-text-fill: white;
/*-fx-border-color: #b0b0b0;*/
}
.button:hover{
-fx-background-color: #0790ed;
}
.root {
-fx-accent: #328ff2;
-fx-focus-color: #328ff2;
-fx-text-fill: white;
}
.label-info{
-fx-font-size: 11.0;
-fx-text-fill: #5c5b5b;
/*-fx-background-color:#ffff99;*/
-fx-margin: 15.0;
-fx-padding: 0.0 0.0 0.0 8.0 ;
}
.label-lists{
-fx-text-fill: #066db3;
}
.list-view-intro{
-fx-padding: 4.0;
-fx-font-size: 15.0px;
-fx-background-color: #ab263c;
}
.list-cell:hover {
-fx-background-color: #ffa1a7;
-fx-text-fill: black;
}
.list-cell:selected {
-fx-background-color: #ab263c;
-fx-text-fill: white;
}
.tree-cell:hover{
-fx-background-color: #ffa1a7;
-fx-text-fill: black;
}
.tree-cell:selected{
-fx-background-color: #ab263c;
-fx-text-fill: white;
}
.tree-cell .<API key> .arrow {
/*-fx-shape: square;
-fx-background-color: blue;*/
-fx-background-image: url("/img/arrow_collapsed.png");
}
.tree-cell:expanded .<API key> .arrow {
/*-fx-shape: circle;
-fx-background-color: green;*/
-fx-background-image: url("/img/arrow_expanded.png");
}
.pane{
-fx-background-color:#f2f2f2;
}
.menu-button .label{
-fx-background-color: #ab263c;
/*-fx-border-color:#cc0044;*/
-fx-font-size: 11.0px;
-fx-text-fill: white;
}
.menu-button{
-fx-background-color: #ab263c;
/*-fx-border-color:#cc0044;*/
-fx-font-size: 11.0px;
-fx-text-fill: white;
}
.menu-button .arrow-button {
-fx-mark-color: white;
}
.menu-item{
-fx-background-color: #ab263c;
}
.menu-item:hover{
-fx-background-color: #066db3;
/*-fx-background-color: #ffff99;*/
}
.menu-item .label{
-fx-background-color: #ab263c;
-fx-text-fill: white;
}
.menu-item:hover .label{
-fx-background-color: #066db3;
-fx-text-fill: white;
}
.menu-bar .label{
-fx-background-color: #ab263c;
}
/* HTML */
table {
margin-bottom: 3.0em;
width: 100.0%;
font-family:Arial, Verdana, sans-serif;
text-align:center;
/*border: 1.0 solid grey;*/
border-radius: 1;
border-collapse: collapse;
}
th{
heigth:20%;
font-size:80%;
color: white;
background: #ab263c;
padding:1%;
border-collapse: collapse;
}
td {
font-size:80%;
heigth:20%;
background: white;
padding:3%;
/*border: 1.0 solid grey;*/
border-collapse: collapse;
} |
create table todo (
id serial primary key,
description text not null,
done boolean default false
) |
<!DOCTYPE HTML PUBLIC "-
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_60) on Tue Dec 23 11:14:56 CET 2014 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Interface com.datastax.driver.core.policies.ReconnectionPolicy.<API key> (DataStax Java Driver for Apache Cassandra - Binary distribution 2.1.4 API)</title>
<meta name="date" content="2014-12-23">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Interface com.datastax.driver.core.policies.ReconnectionPolicy.<API key> (DataStax Java Driver for Apache Cassandra - Binary distribution 2.1.4 API)";
}
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="topNav"><a name="navbar_top">
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.<API key>.html" title="interface in com.datastax.driver.core.policies">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?com/datastax/driver/core/policies/class-use/ReconnectionPolicy.<API key>.html" target="_top">Frames</a></li>
<li><a href="ReconnectionPolicy.<API key>.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_top">
</a></div>
<div class="header">
<h2 title="Uses of Interface com.datastax.driver.core.policies.ReconnectionPolicy.<API key>" class="title">Uses of Interface<br>com.datastax.driver.core.policies.ReconnectionPolicy.<API key></h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.<API key>.html" title="interface in com.datastax.driver.core.policies">ReconnectionPolicy.<API key></a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#com.datastax.driver.core.policies">com.datastax.driver.core.policies</a></td>
<td class="colLast">
<div class="block">Policies that allow to control some of the behavior of the DataStax Java driver for Cassandra.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="com.datastax.driver.core.policies">
</a>
<h3>Uses of <a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.<API key>.html" title="interface in com.datastax.driver.core.policies">ReconnectionPolicy.<API key></a> in <a href="../../../../../../com/datastax/driver/core/policies/package-summary.html">com.datastax.driver.core.policies</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../com/datastax/driver/core/policies/package-summary.html">com.datastax.driver.core.policies</a> that return <a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.<API key>.html" title="interface in com.datastax.driver.core.policies">ReconnectionPolicy.<API key></a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.<API key>.html" title="interface in com.datastax.driver.core.policies">ReconnectionPolicy.<API key></a></code></td>
<td class="colLast"><span class="strong">ReconnectionPolicy.</span><code><strong><a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.html#newSchedule()">newSchedule</a></strong>()</code>
<div class="block">Creates a new schedule for reconnection attempts.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.<API key>.html" title="interface in com.datastax.driver.core.policies">ReconnectionPolicy.<API key></a></code></td>
<td class="colLast"><span class="strong"><API key>.</span><code><strong><a href="../../../../../../com/datastax/driver/core/policies/<API key>.html#newSchedule()">newSchedule</a></strong>()</code>
<div class="block">A new schedule that used an exponentially growing delay between reconnection attempts.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.<API key>.html" title="interface in com.datastax.driver.core.policies">ReconnectionPolicy.<API key></a></code></td>
<td class="colLast"><span class="strong"><API key>.</span><code><strong><a href="../../../../../../com/datastax/driver/core/policies/<API key>.html#newSchedule()">newSchedule</a></strong>()</code>
<div class="block">A new schedule that uses a constant <code>getConstantDelayMs()</code> delay
between reconnection attempt.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<div class="bottomNav"><a name="navbar_bottom">
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="<API key>">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../com/datastax/driver/core/policies/ReconnectionPolicy.<API key>.html" title="interface in com.datastax.driver.core.policies">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?com/datastax/driver/core/policies/class-use/ReconnectionPolicy.<API key>.html" target="_top">Frames</a></li>
<li><a href="ReconnectionPolicy.<API key>.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_bottom">
</a></div>
<p class="legalCopy"><small>Copyright &
</body>
</html> |
date: 2013-04-17T23:00:00Z
title: 'Hardening Sprints: Just Say No?'
published: true
categories: [Agile]
type: article
external: false
I was recently asked by a PM in my company if "hardening sprints" are allowed in agile projects and I gave the very pragmatic answer of "you do what needs to be done". I left the conversation not feeling right though and I've been thinking about the question ever since. To be honest my gut instinct is that, with certain exceptions, these sort of sprints are a smell and their origins are firmly rooted in phased/waterfall delivery. I'll address the _"with certain exceptions"_ caveat a bit later but lets first dive into the idea of hardening sprints.
## What is a hardening sprint?

A hardening sprint is a timebox or sprint reserved at the end of a group of sprints (usually prior to release) to allow testing or hardening of the release. This could mean extra testing, different types of testing, refactoring, reviews etc. It brings with it the rather bizarre concept of "Done Done" - i.e. features that have been delivered in previous sprints may be "Done" but not "Done Done". So it may be coded and "kind-of" working and "kind-of" tested and people are "kind-of" happy with it and yeah its "kind-of" done but we'll wait until the end to know if we are really done.
## So What?
It strikes me as "kind-of" odd that this sort of thing happens without people raising an eyebrow. For one thing there is no such thing as "Done Done" and if we don't admit it I fear that we'll see "Done Done Done" pushing its way into our process vocabulary.
The next thing that strikes me - there is absolutely no data available to predict how long this "hardening" process should take. If we take it at face value the term implies a single sprint. How can we be sure whatever unpredicatable stuff comes up during that sprint can actually be addressed and adequatley resolved in that sprint? Or, taking it to the other extreme (indefinite amount of sprints) how do we know when to stop - afterall nothing is ever perfect.
## Undoing All The Good Work?
So we've spent X amount of sprints refining our approach to delivery, continually improving and learning and now we've thrown the project into this huge dark pit of uncertainty and "hardening". Surely I'm not alone in thinking this sounds somewhat wrong.
Another risk is that this kind of strucutre will create is a potential reduction in ongoing quality. It could be argued that hardening sprints, much like the old phased/waterfall approach to delivery, removes a certain amount of responsibility from the delivery team. Now there is less of a desire for developers to apply as much rigour to their code as they may have - afterall _"the testers will find the bugs so why waste my time being thorough?"_. But if you are deferring your user or penetration testing a few months down the line there is always this notion that the team can just _"throw it in and see what comes out in the wash"_.
## "With Certain Exceptions"
Of course, like every rule, there may be exceptions. One obvious exception is external penetration testing. There can be considerable cost and time assoicated with getting a third party specialist in and its certainly not viable to do it every sprint. So defering a full pen testing cycle until near the release is an acceptable and often necessary exception. But that doesn't mean the team should throw all care about security out the window - the goal of the penetration testing should be to validate that there are no vulnerabilities not to discover them.
## Next Time Gadget... Next Time
So are "hardening sprints" a good idea? I'm inclined, as negative as it may be, to start with "no" and take it from there, afterall its better to start with discovering __why we can't__ do certain things inside of a sprint rather than assume __we can't__ and carry on ignorant of the potential benefits. |
package es.uah.aut.srg.micobs.library.ui.handlers;
import java.util.Collection;
import org.eclipse.emf.common.command.Command;
import org.eclipse.emf.edit.domain.<API key>;
import org.eclipse.emf.edit.ui.EMFEditUIPlugin;
import org.eclipse.emf.edit.ui.action.<API key>;
import org.eclipse.ui.IWorkbenchPart;
import es.uah.aut.srg.micobs.library.ILibraryManager;
/**
* Class that implements the action used to delete a new package from a library.
*/
public class <API key> extends <API key> {
protected ILibraryManager libraryManager;
@Override
public Command createCommand(Collection<?> selection)
{
return new <API key>(libraryManager, selection);
}
public <API key>(ILibraryManager libraryManager)
{
super(null, EMFEditUIPlugin.INSTANCE.getString("<API key>"));
this.libraryManager = libraryManager;
}
public void <API key>(IWorkbenchPart workbenchPart)
{
if (workbenchPart instanceof <API key>)
{
domain = ((<API key>)workbenchPart).getEditingDomain();
}
}
} |
package generator;
import java.util.List;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.xtext.diagnostics.Severity;
import org.eclipse.xtext.generator.<API key>;
import org.eclipse.xtext.util.CancelIndicator;
import org.eclipse.xtext.validation.CheckMode;
import org.eclipse.xtext.validation.IResourceValidator;
import org.eclipse.xtext.validation.Issue;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Provider;
import generator.util.ValidationException;
import it.unifi.xtext.facpl.facpl2.Facpl;
import it.unifi.xtext.facpl.generator.Facpl2Generator;
import it.unifi.xtext.facpl.generator.SMT_LIBGenerator;
import it.unifi.xtext.facpl.generator.XMLGenerator;
import it.unifi.xtext.facpl.generator.util.StructuralProperty;
public class FacplGenerator {
private static FacplGenerator instance;
/*
* The three FACPL code generators:
* 1 - Java code generator (Facpl2Generator)
* 2 - SMT code generator (SMT_LIBGenerator)
* 3 - XACML code generator (XMLGenerator)
*/
@Inject
private Facpl2Generator java_generator;
@Inject
private SMT_LIBGenerator smt_generator;
@Inject
private XMLGenerator xml_generator;
@Inject
private Provider<ResourceSet> resourceSetProvider;
@Inject
private IResourceValidator validator;
@Inject
private <API key> fileAccess;
private static void getInstance() {
Injector injector = new it.unifi.xtext.facpl.<API key>()
.<API key>();
instance = injector.getInstance(FacplGenerator.class);
}
/**
*
* @param args:
* (1) file FACPL; (2) output folder path
* @throws Exception
* compile and validation exception. For validation checks is
* thrown ValidationException
*/
public static void java_compile(String[] args) throws Exception {
if (args.length != 2) {
throw new Exception("Aborting: no output path and no path to EMF resource provided!");
}
if (instance == null) {
getInstance();
}
instance.runGenerator_Java(args[0], args[1]);
}
public static void xml_compile(String[] args) throws Exception {
if (args.length != 2) {
throw new Exception("Aborting: no output path and no path to EMF resource provided!");
}
if (instance == null) {
getInstance();
}
instance.runGenerator_XML(args[0], args[1]);
}
public static void <API key>(String[] args, int n_Prop) throws Exception {
if (instance == null) {
getInstance();
}
instance.runGenerator_SMT(args[0], args[1],n_Prop);
}
/* GENERATORs invocation methods */
/**
*
* @param string
* Path of the file to compile
* @param outputPath
* Folder where compiled policies will be added
* @throws ValidationException
*/
protected void runGenerator_Java(String string, String outputPath) throws ValidationException {
// load the resource
ResourceSet set = resourceSetProvider.get();
Resource resource = set.getResource(URI.createURI(string), true);
// validate the resource
List<Issue> list = validator.validate(resource, CheckMode.ALL, CancelIndicator.NullImpl);
if (!list.isEmpty()) {
ValidationException v = new ValidationException();
int i = 0;
for (Issue issue : list) {
if (issue.getSeverity() != Severity.INFO && issue.getSeverity() != Severity.WARNING) {
v.addError(issue);
i++;
}
}
if (i > 0) {
throw v;
}
}
// Configure and start the generator
fileAccess.setOutputPath(outputPath);
// System.out.println(set.getResources());
java_generator.doGenerate(resource, fileAccess);
System.out.println("Java Code generation finished.");
}
protected void runGenerator_XML(String string, String outputPath) throws Exception {
// load the resource
ResourceSet set = resourceSetProvider.get();
Resource resource = set.getResource(URI.createURI(string), true);
// validate the resource
List<Issue> list = validator.validate(resource, CheckMode.ALL, CancelIndicator.NullImpl);
if (!list.isEmpty()) {
ValidationException v = new ValidationException();
int i = 0;
for (Issue issue : list) {
if (issue.getSeverity() != Severity.INFO && issue.getSeverity() != Severity.WARNING) {
v.addError(issue);
i++;
}
}
if (i > 0) {
throw v;
}
}
// Configure and start the generator
fileAccess.setOutputPath(outputPath);
// System.out.println(set.getResources());
xml_generator.doGenerateFileXACML(resource, fileAccess);
System.out.println("XACML Code generation finished.");
}
private void runGenerator_SMT(String string, String outputPath, int n_Prop) throws ValidationException {
// load the resource
ResourceSet set = resourceSetProvider.get();
Resource resource = set.getResource(URI.createURI(string), true);
// validate the resource
List<Issue> list = validator.validate(resource, CheckMode.ALL, CancelIndicator.NullImpl);
if (!list.isEmpty()) {
ValidationException v = new ValidationException();
int i = 0;
for (Issue issue : list) {
if (issue.getSeverity() != Severity.INFO && issue.getSeverity() != Severity.WARNING) {
v.addError(issue);
i++;
}
}
if (i > 0) {
throw v;
}
}
// Configure and start the generator
//System.out.println(outputPath);
fileAccess.setOutputPath(outputPath);
// System.out.println(set.getResources());
for (EObject e : resource.getContents()) {
if (e instanceof Facpl) {
smt_generator.<API key>((Facpl) e, "root", "", "COMPLETE_" + n_Prop,
StructuralProperty.COMPLETE, fileAccess);
/* TO USE THE FOLLOWING CODE IF INTERESTED IN JUST CREATING SMT_LIB CODE */
// smt_generator.<API key>((Facpl) e,"root");
System.out.println("End code Generation");
}
}
}
} |
// Informa -- RSS Library for Java
// which accompanies this distribution, and is available at
package de.nava.informa.utils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.Date;
import java.util.Timer;
import java.util.TimerTask;
/**
* Use a Timer and TimerTask to periodically refresh feeds added by
* FeedManager.
*
* @author jga
*/
public class FeedRefreshDaemon {
private static Log logger = LogFactory.getLog(FeedRefreshDaemon.class);
private Timer refreshTimer = null;
public FeedRefreshDaemon() {
logger.info("FeedRefresh Daemon instancied");
this.refreshTimer = new Timer(true);
}
public void addFeed(FeedManagerEntry feed) {
FeedRefreshTask refreshTask = new FeedRefreshTask();
refreshTask.setFeedME(feed);
// verifying every 5 minutes
logger.info("scheduling new feed ");
Date fiveMinuteLater = new Date(new Date().getTime() + (5 * 60 * 1000));
this.refreshTimer.schedule(refreshTask, fiveMinuteLater, (5 * 60 * 1000));
}
private class FeedRefreshTask extends TimerTask {
FeedManagerEntry f = null;
int nbError = 0;
public void setFeedME(FeedManagerEntry feedME) {
f = feedME;
}
public void run() {
try {
f.getFeed(); // call isOutofDate and optionnaly fetch new feed
logger.debug("feed refreshed" + f.getFeed().getLocation());
} catch (Exception e) {
this.nbError++;
logger.warn("Error retrieving feed" + f.toString() + " " + e);
}
}
}
} |
package fr.lip6.move.pnml.hlpn.terms;
import org.eclipse.emf.ecore.EFactory;
/**
* <!-- begin-user-doc -->
* The <b>Factory</b> for the model.
* It provides a create method for each non-abstract class of the model.
* <!-- end-user-doc -->
* @see fr.lip6.move.pnml.hlpn.terms.TermsPackage
* @generated
*/
public interface TermsFactory extends EFactory {
/**
* The singleton instance of the factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
TermsFactory eINSTANCE = fr.lip6.move.pnml.hlpn.terms.impl.TermsFactoryImpl.init();
/**
* Returns a new object of class '<em>Declarations</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Declarations</em>'.
* @generated
*/
Declarations createDeclarations();
/**
* Returns a new object of class '<em>Multiset Sort</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Multiset Sort</em>'.
* @generated
*/
MultisetSort createMultisetSort();
/**
* Returns a new object of class '<em>Variable Decl</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Variable Decl</em>'.
* @generated
*/
VariableDecl createVariableDecl();
/**
* Returns a new object of class '<em>Variable</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Variable</em>'.
* @generated
*/
Variable createVariable();
/**
* Returns a new object of class '<em>Product Sort</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Product Sort</em>'.
* @generated
*/
ProductSort createProductSort();
/**
* Returns a new object of class '<em>Tuple</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Tuple</em>'.
* @generated
*/
Tuple createTuple();
/**
* Returns a new object of class '<em>Named Sort</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Named Sort</em>'.
* @generated
*/
NamedSort createNamedSort();
/**
* Returns a new object of class '<em>User Sort</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>User Sort</em>'.
* @generated
*/
UserSort createUserSort();
/**
* Returns a new object of class '<em>Named Operator</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Named Operator</em>'.
* @generated
*/
NamedOperator createNamedOperator();
/**
* Returns a new object of class '<em>User Operator</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>User Operator</em>'.
* @generated
*/
UserOperator createUserOperator();
/**
* Returns the package supported by this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the package supported by this factory.
* @generated
*/
TermsPackage getTermsPackage();
} //TermsFactory |
/* -*- C++ -*-; c-basic-offset: 4; indent-tabs-mode: nil */
/* This must be included before anything else */
#if HAVE_CONFIG_H
# include <config.h>
#endif
#include <boost/scoped_ptr.hpp>
#include "opflex/engine/internal/OpflexConnection.h"
#include "opflex/engine/internal/OpflexHandler.h"
#include "opflex/engine/internal/OpflexMessage.h"
#include "opflex/logging/internal/logging.hpp"
#include "opflex/util/LockGuard.h"
#include "yajr/transport/ZeroCopyOpenSSL.hpp"
#include "opflex/yajr/rpc/message_factory.hpp"
static uv_once_t ssl_once = UV_ONCE_INIT;
namespace opflex {
namespace engine {
namespace internal {
using rapidjson::Value;
using std::string;
using boost::scoped_ptr;
using yajr::rpc::OutboundRequest;
using yajr::rpc::OutboundResult;
using yajr::rpc::OutboundError;
using yajr::transport::ZeroCopyOpenSSL;
OpflexConnection::OpflexConnection(HandlerFactory& handlerFactory)
: handler(handlerFactory.newHandler(this))
,requestId(1) ,connGeneration(0)
{
uv_mutex_init(&queue_mutex);
connect();
}
OpflexConnection::~OpflexConnection() {
cleanup();
if (handler)
delete handler;
uv_mutex_destroy(&queue_mutex);
}
static void init_ssl() {
ZeroCopyOpenSSL::initOpenSSL(true);
}
void OpflexConnection::initSSL() {
uv_once(&ssl_once, init_ssl);
}
void OpflexConnection::connect() {}
void OpflexConnection::cleanup() {
util::LockGuard guard(&queue_mutex);
connGeneration += 1;
while (!write_queue.empty()) {
delete write_queue.front().first;
write_queue.pop_front();
}
}
void OpflexConnection::disconnect() {
cleanup();
}
void OpflexConnection::close() {
disconnect();
}
bool OpflexConnection::isReady() {
return handler->isReady();
}
void OpflexConnection::notifyReady() {
}
void OpflexConnection::doWrite(OpflexMessage* message) {
if (getPeer() == NULL) return;
PayloadWrapper wrapper(message);
switch (message->getType()) {
case OpflexMessage::REQUEST:
{
yajr::rpc::MethodName method(message->getMethod().c_str());
uint64_t xid = message->getReqXid();
if (xid == 0) xid = requestId++;
OutboundRequest outm(wrapper, &method, xid, getPeer());
outm.send();
}
break;
case OpflexMessage::RESPONSE:
{
OutboundResult outm(*getPeer(), wrapper, message->getId());
outm.send();
}
break;
case OpflexMessage::ERROR_RESPONSE:
{
OutboundError outm(*getPeer(), wrapper, message->getId());
outm.send();
}
break;
}
}
void OpflexConnection::processWriteQueue() {
util::LockGuard guard(&queue_mutex);
while (!write_queue.empty()) {
const write_queue_item_t& qi = write_queue.front();
// Avoid writing messages from a previous reconnect attempt
if (qi.second < connGeneration) {
LOG(DEBUG) << "Ignoring " << qi.first->getMethod()
<< " of type " << qi.first->getType();
continue;
}
scoped_ptr<OpflexMessage> message(qi.first);
write_queue.pop_front();
doWrite(message.get());
}
}
void OpflexConnection::sendMessage(OpflexMessage* message, bool sync) {
if (sync) {
scoped_ptr<OpflexMessage> messagep(message);
doWrite(message);
} else {
util::LockGuard guard(&queue_mutex);
write_queue.push_back(std::make_pair(message, connGeneration));
}
messagesReady();
}
} /* namespace internal */
} /* namespace engine */
} /* namespace opflex */ |
package org.apache.wicket.markup.html.form;
import org.apache.wicket.markup.resolver.*;
import org.apache.wicket.*;
import org.apache.wicket.markup.*;
import org.apache.wicket.markup.parser.filter.*;
import org.apache.wicket.markup.html.*;
import org.apache.wicket.markup.parser.*;
import org.apache.wicket.util.string.*;
import org.apache.wicket.request.cycle.*;
import org.apache.wicket.markup.html.internal.*;
import org.apache.wicket.model.*;
public class <API key> implements IComponentResolver{
public Component resolve(final MarkupContainer container,final MarkupStream markupStream,final ComponentTag tag){
if(!(tag instanceof WicketTag)||!"label".equals(((WicketTag)tag).getName())){
return null;
}
Component related=null;
if(tag.getAttribute("for")!=null){
final Component component=related=AutoLabelResolver.<API key>(container,tag.getAttribute("for"));
}
if(related==null){
if(container instanceof AutoLabelResolver.AutoLabel){
related=((AutoLabelResolver.AutoLabel)container).getRelatedComponent();
}
if(related==null){
final AutoLabelResolver.AutoLabel autoLabel=container.findParent((Class<AutoLabelResolver.AutoLabel>)AutoLabelResolver.AutoLabel.class);
if(autoLabel!=null){
related=autoLabel.getRelatedComponent();
}
}
}
if(related==null){
throw new <API key>("no related component found for <wicket:label>");
}
return new TextLabel("label"+container.getPage().getAutoIndex(),related);
}
static{
WicketTagIdentifier.<API key>("label");
}
private static class TextLabel extends WebMarkupContainer{
private final Component labeled;
public TextLabel(final String id,final Component labeled){
super(id);
this.labeled=labeled;
this.setRenderBodyOnly(true);
}
protected void onComponentTag(final ComponentTag tag){
if(tag.isOpenClose()){
tag.setType(XmlTag.TagType.OPEN);
}
super.onComponentTag(tag);
}
public void onComponentTagBody(final MarkupStream markupStream,final ComponentTag openTag){
final IModel<String> labelModel=this.findLabelContent(markupStream,openTag);
this.<API key>(markupStream,openTag,(CharSequence)((labelModel!=null)?labelModel.getObject():""));
if(labelModel!=null){
if(this.labeled instanceof FormComponent){
final FormComponent<?> fc=(FormComponent<?>)this.labeled;
fc.setLabel(labelModel);
}
else{
labelModel.detach();
}
}
}
private IModel<String> findLabelContent(final MarkupStream markupStream,final ComponentTag tag){
if(this.labeled instanceof ILabelProvider){
final ILabelProvider<String> provider=(ILabelProvider<String>)this.labeled;
if(provider.getLabel()!=null&&!Strings.isEmpty((CharSequence)provider.getLabel().getObject())){
return provider.getLabel();
}
}
if(this.labeled instanceof FormComponent){
final FormComponent<?> formComponent=(FormComponent<?>)this.labeled;
final String text=formComponent.getDefaultLabel("wicket:unknown");
if(!"wicket:unknown".equals(text)&&!Strings.isEmpty((CharSequence)text)){
return new <API key><String>(){
protected String load(){
return formComponent.getDefaultLabel("wicket:unknown");
}
};
}
}
final String resourceKey=tag.getAttribute("key");
if(resourceKey!=null){
final String text=this.labeled.getString(resourceKey);
if(!Strings.isEmpty((CharSequence)text)){
return new StringResourceModel(resourceKey,this.labeled,null,new Object[0]);
}
}
final String text2=new ResponseBufferZone(RequestCycle.get(),markupStream){
protected void <API key>(){
TextLabel.this.onComponentTagBody(markupStream,tag);
}
}.execute().toString();
if(!Strings.isEmpty((CharSequence)text2)){
return Model.of(text2);
}
return null;
}
}
} |
package tk.zcraft.bans.utils;
public class TimeUrils {
public static long parse(String s){
long l = System.currentTimeMillis();
try{
String[] time = s.split(":");
int i = Integer.parseInt(time[0]);
if(i<=0){
System.out.println("[ZBANS] Variable cannot be smaller than 0!");
return -1;
}
String rest = time[1];
if(rest.equalsIgnoreCase("s")){
l += i*1000;
}
else if(rest.equalsIgnoreCase("m")){
l += i*1000*60;
}
else if(rest.equalsIgnoreCase("h")){
l += i*1000*60*60;
}
else if(rest.equalsIgnoreCase("d")){
l += i*1000*60*60*24;
}
else if(rest.equalsIgnoreCase("msc")){
l += i*1000*60*60*24*30;
}
else if(rest.equalsIgnoreCase("y")){
l += i*1000*60*60*24*30*365;
}
return l;
}catch(Exception e){
System.out.println("[ZBANS] Cannot parse time from string!");
}
return -1;
}
} |
package p;
class C {
}
interface I {
}
public class Foo<T extends C & I> {
/**
* @param foo
* @return
*/
public static <T extends C & I, U extends C & I> Foo<U> getX(Foo<T> foo) {
return foo.getX();
}
<U extends C & I> Foo<U> getX() {
return null;
}
Foo<?> f2 = Foo.getX(this);
} |
import sys
import numpy as np
import matplotlib.image as mpimg
import numpy.random as npr
import Image
mode = 'L'
for i in range(len(sys.argv)-1):
i+=1
mi = mpimg.imread(sys.argv[i])
x = np.shape(mi)[0]
mi = np.reshape(mi, (x*x))
mi = npr.permutation(mi)
mi = np.reshape(mi, (x,x))
size = np.shape(mi)
imNew=Image.new(mode , size)
data = np.ravel(mi)
data = np.floor(data * 256)
imNew.putdata(data)
imNew.save("../new-images/%s" %(sys.argv[i])) |
package org.python.pydev.debug.ui.actions;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.debug.core.model.ISuspendResume;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.ui.IWorkbenchPart;
/**
* @author Hussain Bohra
*/
public interface ISetNextTarget {
/**
*
* @param part
* @param selection
* @param target
* @throws CoreException
*/
public boolean setNextToLine(IWorkbenchPart part, ISelection selection, ISuspendResume target) throws CoreException;
/**
*
* @param part
* @param selection
* @param target
* @return
*/
public boolean canSetNextToLine(IWorkbenchPart part, ISelection selection, ISuspendResume target);
} |
package com.opti.rental.ui.providers;
import java.util.List;
import org.eclipse.jface.viewers.IColorProvider;
import org.eclipse.jface.viewers.<API key>;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Image;
import com.opcoach.training.rental.Customer;
import com.opcoach.training.rental.Rental;
import com.opcoach.training.rental.RentalAgency;
import com.opcoach.training.rental.RentalObject;
import com.opti.rental.ui.Palette;
import com.opti.rental.ui.RentalUIActivator;
import com.opti.rental.ui.RentalUIConstants;
public class RentalProvider extends LabelProvider implements <API key>, IColorProvider, RentalUIConstants {
private static final long serialVersionUID = -<API key>;
private static final Object[] EMPTY_RESULT = new Object[0];
@Override
public Object[] getElements(Object inputElement) {
Object[] result = null;
if (null != inputElement) {
if (inputElement instanceof List<?>) {
@SuppressWarnings("unchecked")
List<RentalAgency> liste = (List<RentalAgency>) inputElement;
result = liste.toArray();
}
}
return (null == result) ? EMPTY_RESULT : result;
}
@Override
public Object[] getChildren(Object parentElement) {
Object[] result = null;
if (parentElement instanceof RentalAgency) {
result = new Node[] {new Node(NodeType.CUSTOMER, (RentalAgency) parentElement), new Node(NodeType.LOCATION, (RentalAgency) parentElement), new Node(NodeType.RENTAL_OBJECT, (RentalAgency) parentElement)};
} else if (parentElement instanceof Node) {
result = ((Node) parentElement).getChildren();
}
return (null == result) ? EMPTY_RESULT : result;
}
@Override
public Object getParent(Object element) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean hasChildren(Object element) {
return (element instanceof Node || element instanceof RentalAgency);
}
@Override
public String getText(Object element) {
if (element instanceof RentalAgency) {
return ((RentalAgency) element).getName();
} else if (element instanceof Customer) {
return ((Customer) element).getDisplayName();
} else if (element instanceof RentalObject) {
return ((RentalObject) element).getName();
}
return super.getText(element);
}
@Override
public Color getForeground(Object element) {
Color result = null;
final String paletteId = RentalUIActivator.getDefault().getPreferenceStore().getString(PREF_PALETTE);
if (null != paletteId) {
final Palette palette = RentalUIActivator.getDefault().getPaletteManager().get(paletteId);
if (null != palette) {
result = palette.getProvider().getForeground(element);
}
}
return result;
}
@Override
public Color getBackground(Object element) {
Color result = null;
final String paletteId = RentalUIActivator.getDefault().getPreferenceStore().getString(PREF_PALETTE);
if (null != paletteId) {
final Palette palette = RentalUIActivator.getDefault().getPaletteManager().get(paletteId);
if (null != palette) {
result = palette.getProvider().getBackground(element);
}
}
return result;
}
@Override
public Image getImage(Object element) {
if (element instanceof Rental) {
return RentalUIActivator.getDefault().getImageRegistry().get(IMG_RENTAL);
} else if (element instanceof Customer) {
return RentalUIActivator.getDefault().getImageRegistry().get(IMG_CUSTOMER);
} else if (element instanceof RentalObject) {
return RentalUIActivator.getDefault().getImageRegistry().get(IMG_RENTAL_OBJECT);
} else if (element instanceof RentalAgency) {
return RentalUIActivator.getDefault().getImageRegistry().get(IMG_AGENCY);
} else if (element instanceof Node) {
return RentalUIActivator.getDefault().getImageRegistry().get(IMG_NODE);
}
return super.getImage(element);
}
public enum NodeType {
CUSTOMER("Client"),
LOCATION("Locations"),
RENTAL_OBJECT("Objets à louer");
private String label;
private NodeType(final String pLabel) {
label = pLabel;
}
public String getLabel() {
return this.label;
}
}
public class Node {
private NodeType type;
private RentalAgency agency;
public Node(NodeType type, RentalAgency agency) {
super();
this.type = type;
this.agency = agency;
}
public Object[] getChildren() {
Object[] result = null;
switch (this.type) {
case CUSTOMER:
result = this.agency.getCustomers().toArray();
break;
case LOCATION:
result = this.agency.getRentals().toArray();
break;
case RENTAL_OBJECT:
result = this.agency.getObjectsToRent().toArray();
break;
default:
result = EMPTY_RESULT;
}
return (null == result) ? EMPTY_RESULT : result;
}
@Override
public String toString() {
return this.type.getLabel();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + getOuterType().hashCode();
result = prime * result + ((agency == null) ? 0 : agency.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Node other = (Node) obj;
if (!getOuterType().equals(other.getOuterType()))
return false;
if (agency == null) {
if (other.agency != null)
return false;
} else if (!agency.equals(other.agency))
return false;
if (type != other.type)
return false;
return true;
}
private RentalProvider getOuterType() {
return RentalProvider.this;
}
}
@Override
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
// TODO Auto-generated method stub
}
} |
package org.mapdb;
import java.util.Map;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import javax.swing.plaf.basic.<API key>;
import static org.junit.Assert.fail;
public class Issue69Test {
private DB db;
@Before
public void setUp() {
db = DBMaker.newTempFileDB()
.journalDisable()
.checksumEnable()
.asyncWriteDisable()
.<API key>()
.make();
}
@After
public void tearDown() throws <API key> {
db.close();
}
@Test
public void <API key>() throws Exception {
try{
Map<String, String> map = db.getHashMap("test");
StringBuilder buff = new StringBuilder();
long maxIterations = 1000000;
int valueLength = 1024;
long maxKeys = 1000;
long i = 1;
while (i < maxIterations) {
if (i % 10000 == 0) {
valueLength ++;
// System.out.println("Iteration: " + i + "; Value length: " + valueLength);
}
String key = "key" + (int)(Math.random() * maxKeys);
buff.setLength(valueLength);
map.put(key, buff.toString());
i++;
}
}catch(Throwable e){
while(e!=null){
for(StackTraceElement ee: e.getStackTrace()){
System.out.println(ee);
}
System.out.println();
e = e.getCause();
}
fail();
}
}
} |
<nav id="onboarding-navbar" class="nav has-shadow">
<div class="container">
<div class="nav-left">
<a class="nav-item is-brand" href="/">
<img alt="Starcity" src="/assets/svg/logo-wordmark.svg"/>
</a>
</div>
<div class="nav-right">
<div class="nav-item" style="justify-content: flex-end;">
<p class="control">
<a id="logout" class="button is-primary is-pulled-right" href="/logout">Logout</a>
</p>
</div>
</div>
</div>
</nav> |
package com.openMap1.mapper.actions;
import java.util.ArrayList;
import java.util.List;
import java.util.Iterator;
import java.util.Vector;
import java.util.Hashtable;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.edit.domain.EditingDomain;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.jface.action.MenuManager;
import com.openMap1.mapper.util.GenUtil;
import com.openMap1.mapper.util.ModelUtil;
import com.openMap1.mapper.core.MapperException;
import com.openMap1.mapper.presentation.MapperEditor;
import com.openMap1.mapper.structures.MappableAssociation;
import com.openMap1.mapper.structures.StructureDefinition;
import com.openMap1.mapper.views.LabelledEClass;
import com.openMap1.mapper.views.WorkBenchUtil;
import com.openMap1.mapper.AssocEndMapping;
import com.openMap1.mapper.AssocMapping;
import com.openMap1.mapper.AttributeDef;
import com.openMap1.mapper.ElementDef;
import com.openMap1.mapper.MappedStructure;
import com.openMap1.mapper.MapperPackage;
import com.openMap1.mapper.MaxMult;
import com.openMap1.mapper.NodeDef;
import com.openMap1.mapper.ObjMapping;
/**
* Class to create mapper editor menu actions depending on the model class
* selected in the mapped class model view, and possibly on other criteria.
*
* @author robert
*
*/
public class <API key> {
private MapperEditor mapperEditor;
private MappedStructure mappedStructure;
private List<IAction> <API key>;
public List<IAction> <API key>() {return <API key>;}
private List<IContributionItem> <API key>;
public List<IContributionItem> <API key>() {return <API key>;}
public <API key>(MapperEditor mapperEditor) {
this.mapperEditor = mapperEditor;
Object thing = mapperEditor.getEditingDomain().getResourceSet().getResources().get(0).getContents().get(0);
if (thing instanceof MappedStructure)
{
mappedStructure = (MappedStructure)thing;
}
else System.out.println("Not editing a mapped structure");
}
/**
* Create menu actions depending on the model class
* selected in the mapped class model view, and possibly on other criteria.
* @param domain the mapper editor editing domain
* @param object the object selected in the mapper editor
*/
public void <API key>(EditingDomain domain, Object object, Object <API key>)
{
<API key> = new ArrayList<IAction>();
<API key> = new ArrayList<IContributionItem>();
if (object instanceof ElementDef)
{
ElementDef el = (ElementDef)object;
<API key>(domain, el, <API key>);
}
else if (object instanceof AttributeDef)
{
AttributeDef at = (AttributeDef)object;
<API key>(domain, at, <API key>);
}
else if (object instanceof ObjMapping)
{
ObjMapping om = (ObjMapping)object;
<API key>(om);
}
}
/**
* Actions when an Element node is selected in the mapped structure tree
* @param domain the mapper editor editing domain
* @param el the Element node selected
* @param fromClassModel the object currently selected in the mapped class model view
*/
private void <API key>(EditingDomain domain, ElementDef el, Object fromClassModel)
{
/* for any element of a complex type,
* which has not yet been expanded in the tree, add actions to expand it,
* or to create a new mapping set, or to link to an existing one. */
if ((el.getType() != null) && (!el.getType().equals(""))
&& (attachedStructure() != null))
{
if (!el.isExpanded()) <API key>.add(new <API key>(domain,el,attachedStructure()));
<API key>.add(new <API key>(mapperEditor,domain,el,attachedStructure()));
<API key>.add(new <API key>(mapperEditor,domain,el));
}
/* If the ElementDef has any child ElementDefs of unbounded max multiplicity,
* add an option to add an AttributeDef with name ElementDefImpl.<API key>
* to all its child elements */
if (<API key>(el))
{
<API key>.add(new <API key>(mapperEditor,domain,el));
}
EClass selectedClass = extractEClassFrom(fromClassModel);
// if a Class has been selected in the Class Model View, add the necessary mapping actions
if (selectedClass != null)
{
// always add an action to map to the class
String qualifiedName = ModelUtil.<API key>(selectedClass);
String nextSubset = nextSubset(mappingRoot(el),qualifiedName);
<API key>.add(new <API key>(domain, el, selectedClass.getName(),
selectedClass.getEPackage().getName(),nextSubset,""));
// for V3 Mappings to an RMIM class model, add a chain mapping menu item
if (fromClassModel instanceof LabelledEClass)
<API key>(domain,el,(LabelledEClass)fromClassModel);
/* add Property and Association Mapping actions
only if there is already an object mapping to the selected class */
<API key>(domain, el, selectedClass, fromClassModel);
<API key>(domain, el, selectedClass,fromClassModel);
// add an auto-mapping sub-menu
addAutoMappingMenu(domain, el, selectedClass,fromClassModel);
}
}
/**
* @param el an ElementDef
* @return true if it has any child ElementDefs of unbounded max multiplicity,
* or if it has more than one child ElementDef
*/
private boolean <API key>(ElementDef el)
{
boolean unbounded = false;
int children = 0;
for (Iterator<ElementDef> it = el.getChildElements().iterator();it.hasNext();)
{
ElementDef child = it.next();
if (child.getMaxMultiplicity() == MaxMult.UNBOUNDED) unbounded = true;
children++;
}
if (children > 1) unbounded = true;
return unbounded;
}
/**
* @param fromClassModel The object selected in the class model view is either an EClass (vanilla class model view)
* or a LabelledEClass (RMIM class model view)
* @return the EClass extracted from it, in either case
*/
private EClass extractEClassFrom(Object fromClassModel)
{
EClass selectedClass = null;
if (fromClassModel != null)
{
if (fromClassModel instanceof EClass)
selectedClass = (EClass)fromClassModel;
if (fromClassModel instanceof LabelledEClass)
selectedClass = ((LabelledEClass)fromClassModel).eClass();
}
return selectedClass;
}
/**
* Actions when an Attribute node is selected in the mapped structure tree
* @param domain the mapper editor editing domain
* @param at the Attribute node selected
* @param fromClassModel the object currently selected in the mapped class model view
*/
private void <API key>(EditingDomain domain, AttributeDef at, Object fromClassModel)
{
EClass selectedClass = extractEClassFrom(fromClassModel);
// if a Class has been selected in the Class Model View, add the necessary mapping actions
if (selectedClass != null)
{
/* add Property and Association Mapping actions
only if there is already an object mapping to the selected class */
<API key>(domain, at, selectedClass,fromClassModel);
<API key>(domain, at, selectedClass,fromClassModel);
// add an auto-mapping sub-menu
addAutoMappingMenu(domain, at, selectedClass,fromClassModel);
}
}
private MappedStructure mappingRoot(NodeDef nd)
{
return ModelUtil.getMappedStructure(nd);
}
// Auto-Mapping (advance property mapping) Menu
private void addAutoMappingMenu(EditingDomain domain, NodeDef nd, EClass selectedClass, Object fromClassModel)
{
MenuManager autoSubMenu = new MenuManager("Pre-map");
<API key>.add(autoSubMenu);
// add menu items to pre-map any property of the class, or of a superclass
Vector<String> allProperties = allPropertyNames(selectedClass,"");
for (Iterator<String> it = allProperties.iterator(); it.hasNext();)
{
String classProp = it.next(); // of the form 'class:property'
autoSubMenu.add(new <API key>(domain, nd, classProp,selectedClass,fromClassModel,true));
}
// add a menu item to pre-map the class
String className = selectedClass.getName();
autoSubMenu.add(new <API key>(domain, nd,className,selectedClass,fromClassModel,false));
}
// Property Mapping sub-menu
/**
* If there are any object mappings to the class selected in the mapped class model view,
* make actions to add a property mapping to that class on the selected structure node
* @param domain
* @param nd
* @param selectedClass
*/
private void <API key>(EditingDomain domain, NodeDef nd, EClass selectedClass, Object fromClassModel)
{
Vector<String> subsets = getMappedSubsets(mappingRoot(nd), fromClassModel);
if (subsets.size() > 0)
{
MenuManager propSubMenu = new MenuManager("Map Property");
<API key>.add(propSubMenu);
for (Iterator<String> is = subsets.iterator();is.hasNext();)
{
String subset = is.next();
Vector<String> allProperties = allPropertyNames(selectedClass,subset);
<API key>(selectedClass,nd,allProperties,subset);
for (Iterator<String> it = allProperties.iterator(); it.hasNext();)
{
String classProp = it.next(); // of the form 'class:property'
propSubMenu.add(new <API key>(domain, nd, classProp,selectedClass,subset,""));
}
}
}
}
/**
* If there are any object mappings to the class selected in the mapped class model view,
* make actions to add any allowed association mapping to that class on the selected structure node
* @param domain
* @param nd
* @param selectedClass
*/
private void <API key>(EditingDomain domain, NodeDef nd, EClass selectedClass, Object fromClassModel)
{
Vector<String> subsets = getMappedSubsets(mappingRoot(nd), fromClassModel);
if (subsets.size() > 0)
{
MenuManager assocSubMenu = new MenuManager("Map Association");
<API key>.add(assocSubMenu);
//find existing object mappings with their subsets
Hashtable <String,Vector<String>> theClassMappings = allClassMappings(mappingRoot(nd));
for (Iterator<String> is = subsets.iterator();is.hasNext();)
{
String thisSubset = is.next();
//find associations from the selected class to these classes
Vector<MappableAssociation> <API key> = <API key>(selectedClass,thisSubset, theClassMappings);
for (Iterator<MappableAssociation> it = <API key>.iterator(); it.hasNext();)
{
MappableAssociation am = it.next();
assocSubMenu.add(new <API key>(domain, nd, am,""));
}
}
}
}
/**
* @param root root of the MappedStructure
* @param fromClassModel a node selected in the class model view; EClass or LabelledEClass
* @return the mapped subsets of the class in the mapping. If it s an RMIM view,
* these are only those consistent with the position of the class in the RMIM
*/
private Vector<String> getMappedSubsets(EObject root, Object fromClassModel)
{
Vector<String> subsets = new Vector<String>();
if (fromClassModel instanceof EClass)
subsets = mappedSubsets(root, ModelUtil.<API key>((EClass)fromClassModel));
if (fromClassModel instanceof LabelledEClass)
subsets = <API key>(root,(LabelledEClass)fromClassModel);
return subsets;
}
/**
* @param root the MappedStructure root of a mapping set
* @param qualifiedClassName a class name preceded by its package name, if nonempty
* @return a Vector of all mapped subsets; an empty Vector if there are no mappings
*/
public static Vector<String> mappedSubsets(EObject root, String qualifiedClassName)
{
Vector<String> subsets = new Vector<String>();
if (allClassMappings(root).get(qualifiedClassName) != null)
subsets = allClassMappings(root).get(qualifiedClassName);
return subsets;
}
/**
* @param root the MappedStructure root of a mapping set
* @param qualifiedClassName a class name preceded by its package name, if nonempty
* @return the subset name to use for the next object mapping to that class,
* avoiding all clashes with subset names already used
*/
public static String nextSubset(EObject root, String qualifiedClassName)
{
String subset = "";
Vector<String> subsets = mappedSubsets(root,qualifiedClassName);
if (subsets.size() > 0)
{
boolean clash = true;
int index = 1;
while (clash)
{
subset = "s" + index;
clash = GenUtil.inVector(subset, subsets);
index++;
}
}
return subset;
}
/**
* Find all class mappings in a subtree of the mapped structure tree
* @param EObject root of the subtree
* @return Hashtable<String,Vector<String>> for each qualified class name, a Vector of its mapped subsets,
* or null if there are no mappings
*/
public static Hashtable<String,Vector<String>> allClassMappings(EObject root)
{
Hashtable<String,Vector<String>> addClassMappings = new Hashtable<String,Vector<String>>();
addClassMappings(addClassMappings,root);
return addClassMappings;
}
/**
* recursive descent of a model tree, finding all mapped subsets
* of all classes
* @param classMappings
* @param node
*/
public static void addClassMappings(Hashtable<String,Vector<String>> classMappings, EObject node)
{
if (node instanceof ObjMapping)
{
ObjMapping om = (ObjMapping)node;
String qClassName = om.<API key>(); // package name first
String subset = om.getSubset();
Vector<String> subsetsMapped = classMappings.get(qClassName);
if (subsetsMapped == null) subsetsMapped = new Vector<String>();
subsetsMapped.addElement(subset);
classMappings.put(qClassName, subsetsMapped);
}
for (Iterator<EObject> it = node.eContents().iterator(); it.hasNext();)
addClassMappings(classMappings,it.next());
}
/**
*
* @param selectedClass an ecore EClass object
* @return a Vector of 'className:propertyName' for the Class and all its superclasses
*/
private Vector<String> allPropertyNames(EClass selectedClass,String subset)
{
Vector<String> allProps = new Vector<String>();
addPropertyNames(allProps,selectedClass,subset);
return allProps;
}
/**
* Add to the Vector allProperties an entry 'className:pseudoPropertyName' for every
* pseudo-property of the class (regardless of the mapped subset)
* (Pseudo-properties in the mapped structure are converted to properties in the class
* model by property conversions)
* @param selectedClass
* @param nd a Node in the mapped structure
* @param allProperties the Vector that is to be extended with the pseudo-properties
*/
private void <API key>(EClass selectedClass,NodeDef nd, Vector<String>allProperties, String subset)
{
String className = subClassName(selectedClass,subset);
String qualifiedClassName = <API key>(selectedClass,subset);
Hashtable<String,String> pseudoProps = ModelUtil.getPseudoProperties(qualifiedClassName,nd);
for (Iterator<String> it = pseudoProps.keySet().iterator();it.hasNext();)
{allProperties.add(className + ":" + it.next());}
}
/**
* add 'className:propertyName' for all properties of this class, recursively going
* to superclasses , so properties of this class come first on the list
* @param allProps
* @param c
*/
private void addPropertyNames(Vector<String> allProps,EClass c,String subset)
{
for (Iterator<EAttribute> it = c.getEAttributes().iterator();it.hasNext();)
{allProps.addElement(subClassName(c,subset) + ":" + it.next().getName());}
for (Iterator<EClass> iu = c.getESuperTypes().iterator();iu.hasNext();)
{addPropertyNames(allProps,iu.next(),subset);}
}
/**
* @param ec a class
* @param subset a mapped subset
* @return the qualfied class name , followed by the subset name in brackets if not empty
*/
private String <API key>(EClass ec, String subset)
{
String subName = ModelUtil.<API key>(ec);
if (!subset.equals("")) subName = subName + "(" + subset + ")";
return subName;
}
/**
* @param ec a class
* @param subset a mapped subset
* @return the unqualified class name , followed by the subset name in brackets if not empty
*/
private String subClassName(EClass ec, String subset)
{
String subName = ec.getName();
if (!subset.equals("")) subName = subName + "(" + subset + ")";
return subName;
}
/**
* find all associations from the selected class (or one of its superclasses)
* to some other mapped class (or one of its superclasses)
* @param EClass the selectedClass
* @param Hashtable allClassMappings; the keys to this are the names of mapped classes
* @return
*/
private Vector<MappableAssociation> <API key>
(EClass selectedClass, String thisEndSubset, Hashtable<String,Vector<String>> allClassMappings)
{
Vector<MappableAssociation> assocs = new Vector<MappableAssociation>();
EClass originalClass = selectedClass;
EClass endClass = selectedClass;
<API key>(assocs, originalClass, endClass, thisEndSubset, allClassMappings);
return assocs;
}
/**
* recurse up through superclasses finding associations,
* but labelling them always with the original class
* @param assocs Vector of MappableAssociation objects being built up
* @param originalClass the original class selected in the mapped class model view
* @param endClass the original class or a superclass -end of the association
* @param allClassMappings supplies the names of all classes currently mapped
*/
private void <API key>(Vector<MappableAssociation> assocs,
EClass originalClass, EClass endClass, String thisEndSubset,
Hashtable<String,Vector<String>> allClassMappings)
{
// check all the associations of this class
for (Iterator<EReference> it = endClass.getEReferences().iterator();it.hasNext();)
{
EReference ref = it.next();
/* pick out only those associations whose other end class (or some subclasses of it)
* are mapped, and note the actual mapped subclasses */
for (Iterator<EClassSubset> iw = mappedSubClasses(ref, allClassMappings).iterator();iw.hasNext();)
{
EClassSubset ecs = iw.next();
MappableAssociation ma = new MappableAssociation(originalClass, thisEndSubset,ecs.eClass,ecs.subset, ref,false);
assocs.addElement(ma);
}
}
// repeat the checks for all superclasses of the selected class
for (Iterator<EClass> it = endClass.getESuperTypes().iterator(); it.hasNext();)
{<API key>(assocs,originalClass,it.next(),thisEndSubset, allClassMappings);}
}
/**
* if the other end class of the EReference (or any of its subclasses)
* are mapped classes, return a Vector the mapped classes;
* otherwise return an empty Vector
* @param am
* @param allClassMappings
* @return
*/
private Vector<EClassSubset> mappedSubClasses(EReference am, Hashtable<String,Vector<String>> allClassMappings)
{
Vector<EClassSubset> result = new Vector<EClassSubset>();
EClass endSuperClass = (EClass)am.getEType();
// find all subclasses of the class at the other end of the association
Vector<EClass> endSubClasses = new Vector<EClass>();
try {endSubClasses = mappedStructure.getAllSubClasses(endSuperClass);}
catch (Exception ex) {}
for (Iterator<EClass> it = endSubClasses.iterator();it.hasNext();)
{
EClass endSubClass = it.next();
String qualifiedClassName = ModelUtil.<API key>(endSubClass);
Vector<String> subsets = allClassMappings.get(qualifiedClassName);
// find all mapped subsets of each subclass
if (subsets != null) for (Iterator<String> is = subsets.iterator();is.hasNext();)
{
String subset = is.next();
result.add(new EClassSubset(endSubClass,subset));
}
}
return result;
}
private class EClassSubset{
EClass eClass;
String subset;
EClassSubset(EClass eClass,String subset)
{
this.eClass = eClass;
this.subset = subset;
}
}
/**
*
* @return the Structure definition attached to the current mapping set
*/
private StructureDefinition attachedStructure()
{
StructureDefinition res = null;
try
{
res = WorkBenchUtil.mappingRoot(mapperEditor).<API key>();
}
catch (MapperException ex) {}
return res;
}
// Chain Mapping Menu Item for V3 RMIM mappings
private void <API key>(EditingDomain domain,ElementDef el,LabelledEClass selected)
{
EObject root = mappingRoot(el);
Vector<String> subsets = getMappedSubsets(root, selected);
String ancestorMappedClass = null;
String <API key> = null;
/* You can make chain mappings even if the class is already mapped, because you may
* want to make repeated mappings to data type classes like II */
{
boolean classIsMapped = false;
LabelledEClass current = selected;
// Iterate up through parent classes until you find one that is mapped
while ((current != null) && (!classIsMapped))
{
// find the subset to use when mapping this class
String className = ModelUtil.<API key>(current.eClass());
String subsetToMap = nextSubset(root,className);
current.setSubsetToMap(subsetToMap);
// find the next parent class and check if it exists and is mapped
current = current.parent();
if (current != null)
{
subsets = getMappedSubsets(mappingRoot(el), current);
if (subsets.size() > 0) // found the first ancestor class already mapped
{
classIsMapped = true;
// record the mapped class and subset, for use in the top association mapping
ancestorMappedClass = ModelUtil.<API key>(current.eClass());
<API key> = subsets.get(0);
}
}
} // end of iteration over ancestor classes
if (<API key> != null)
<API key>.add(new <API key>(domain,el,ancestorMappedClass,<API key>,selected));
}
}
/**
* @param root the MappedStructure root of a mapping set
* @param selected a node selected in the RMIM class model view
* @return the restricted list of mapped subsets for the selected class,
* which are also consistent with its position in the RMIM tree.
*
* The restricted list is generally expected to have length 1.
*
* If there is one or more mapped subset, but the selected RMIM class does not have a
* chain of association mappings linking it to ancestor classes, then the result is an
* empty Vector. (an object mapping on its own does not define which instance of an RMIM
* class, eg which instance of a data type class, it refers to)
*/
private Vector<String> <API key>(EObject root, LabelledEClass selected)
{
Vector<String> filteredSubsets = null;
// get the long list of mapped subsets, restricted by the qualified class name
Vector<String> subsets = mappedSubsets(root,ModelUtil.<API key>(selected.eClass()));
// Only try to restrict the list if there is more than one subset
if ((subsets != null) && (subsets.size() == 1)) return subsets;
if ((subsets != null) && (subsets.size() > 1))
{
String assocName = selected.associationName();
LabelledEClass parent = selected.parent();
// The top class of the RMIM has no parent class, so its subsets cannot be filtered
if (parent != null)
{
filteredSubsets = new Vector<String>();
// find the restricted mapped subsets of the parent
Vector<String> parentSubsets = <API key>(root, parent);
// and the parent class must have one or more object mappings
if ((parentSubsets != null) && (parentSubsets.size() > 0))
{
// try out all mapped subsets in the long list
for (Iterator<String> is = subsets.iterator();is.hasNext();)
{
String subset = is.next();
// for each subset, try out all mapped subsets of the parent (but there should be only 1)
for (Iterator<String> it = parentSubsets.iterator();it.hasNext();)
{
String pSubset = it.next();
// retain the subset only if the association mapping exists
if (<API key>(root, parent.eClass(),pSubset,selected.eClass(),subset,assocName))
filteredSubsets.add(subset);
}
}
}
}
}
if (filteredSubsets != null) subsets = filteredSubsets;
return subsets;
}
/**
* @param root MappedStructure root
* @param parent parent class
* @param parentSubset mapped subset of parent class
* @param selected class selected in the class model view
* @param subset a possible subset of the selected class
* @param assocName role name to go from the parent class to the selected class
* @return true if the association mapping exits with all these parameters
*/
private boolean <API key>(EObject root, EClass parent,String parentSubset,
EClass selected,String subset,String assocName)
{
boolean exists = false;
List<EObject> assocMappings = ModelUtil.getEObjectsUnder(root, MapperPackage.eINSTANCE.getAssocMapping());
for (Iterator<EObject> it = assocMappings.iterator();it.hasNext();)
{
AssocMapping am = (AssocMapping)it.next();
AssocEndMapping navigable = am.getMappedEnd2();
AssocEndMapping other = am.getMappedEnd1();
if ((navigable.<API key>().equals(ModelUtil.<API key>(selected))) &&
(navigable.getSubset().equals(subset)) &&
(navigable.getMappedRole().equals(assocName)) &&
(other.<API key>().equals(ModelUtil.<API key>(parent))) &&
(other.getSubset().equals(parentSubset)))
exists = true;
}
return exists;
}
// Expanding the class model tree to show the class of the selected mapping
private void <API key>(ObjMapping om)
{
<API key>.add(new <API key>(om));
}
} |
'use strict';
angular.module('hexafacture')
.controller('NavbarCtrl', function ($location) {
var vm = this;
var itemsMap = {
'emetteur': 'émetteur',
'client': 'client',
'composition': 'composition',
'facture': 'facture',
'acompte': 'acompte',
'mentionLegale': 'mention légale',
'remiseCommerciale': 'remise commerciale'
};
vm.items = _.pairs(itemsMap);
vm.currentLocation = function () {
return itemsMap[$location.path().split('/')[2]];
};
vm.getItems = function(){
return _.filter(vm.items, function(o_){
return (o_[0] !== vm.currentLocation());
});
};
}); |
/** Generated Model - DO NOT CHANGE */
package de.metas.handlingunits.model;
import java.math.BigDecimal;
import java.sql.ResultSet;
import java.util.Properties;
import org.compiere.util.Env;
/** Generated Model for <API key>
* @author Adempiere (generated)
*/
@SuppressWarnings("javadoc")
public class <API key> extends org.compiere.model.PO implements <API key>, org.compiere.model.I_Persistent
{
private static final long serialVersionUID = -250732721L;
/** Standard Constructor */
public <API key> (Properties ctx, int <API key>, String trxName)
{
super (ctx, <API key>, trxName);
/** if (<API key> == 0)
{
} */
}
/** Load Constructor */
public <API key> (Properties ctx, ResultSet rs, String trxName)
{
super (ctx, rs, trxName);
}
/** Load Meta Data */
@Override
protected org.compiere.model.POInfo initPO (Properties ctx)
{
org.compiere.model.POInfo poi = org.compiere.model.POInfo.getPOInfo (ctx, Table_Name, get_TrxName());
return poi;
}
@Override
public org.compiere.model.I_C_UOM getC_UOM() throws RuntimeException
{
return get_ValueAsPO(COLUMNNAME_C_UOM_ID, org.compiere.model.I_C_UOM.class);
}
@Override
public void setC_UOM(org.compiere.model.I_C_UOM C_UOM)
{
set_ValueFromPO(COLUMNNAME_C_UOM_ID, org.compiere.model.I_C_UOM.class, C_UOM);
}
@Override
public void setC_UOM_ID (int C_UOM_ID)
{
if (C_UOM_ID < 1)
set_ValueNoCheck (COLUMNNAME_C_UOM_ID, null);
else
set_ValueNoCheck (COLUMNNAME_C_UOM_ID, Integer.valueOf(C_UOM_ID));
}
@Override
public int getC_UOM_ID ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_C_UOM_ID);
if (ii == null)
return 0;
return ii.intValue();
}
/**
* HUStatus AD_Reference_ID=540478
* Reference name: HUStatus
*/
public static final int <API key>=540478;
/** Planning = P */
public static final String HUSTATUS_Planning = "P";
/** Active = A */
public static final String HUSTATUS_Active = "A";
/** Destroyed = D */
public static final String HUSTATUS_Destroyed = "D";
/** Picked = S */
public static final String HUSTATUS_Picked = "S";
/** Shipped = E */
public static final String HUSTATUS_Shipped = "E";
/** Set Gebinde Status.
@param HUStatus Gebinde Status */
@Override
public void setHUStatus (java.lang.String HUStatus)
{
set_ValueNoCheck (COLUMNNAME_HUStatus, HUStatus);
}
/** Get Gebinde Status.
@return Gebinde Status */
@Override
public java.lang.String getHUStatus ()
{
return (java.lang.String)get_Value(COLUMNNAME_HUStatus);
}
/** Set Waschprobe.
@param IsQualityInspection Waschprobe */
@Override
public void <API key> (boolean IsQualityInspection)
{
set_ValueNoCheck (<API key>, Boolean.valueOf(IsQualityInspection));
}
/** Get Waschprobe.
@return Waschprobe */
@Override
public boolean isQualityInspection ()
{
Object oo = get_Value(<API key>);
if (oo != null)
{
if (oo instanceof Boolean)
return ((Boolean)oo).booleanValue();
return "Y".equals(oo);
}
return false;
}
/** Set Los-Nr..
@param Lot
Los-Nummer (alphanumerisch)
*/
@Override
public void setLot (java.lang.String Lot)
{
set_ValueNoCheck (COLUMNNAME_Lot, Lot);
}
/** Get Los-Nr..
@return Los-Nummer (alphanumerisch)
*/
@Override
public java.lang.String getLot ()
{
return (java.lang.String)get_Value(COLUMNNAME_Lot);
}
@Override
public de.metas.handlingunits.model.I_M_HU getM_HU() throws RuntimeException
{
return get_ValueAsPO(COLUMNNAME_M_HU_ID, de.metas.handlingunits.model.I_M_HU.class);
}
@Override
public void setM_HU(de.metas.handlingunits.model.I_M_HU M_HU)
{
set_ValueFromPO(COLUMNNAME_M_HU_ID, de.metas.handlingunits.model.I_M_HU.class, M_HU);
}
/** Set Handling Units.
@param M_HU_ID Handling Units */
@Override
public void setM_HU_ID (int M_HU_ID)
{
if (M_HU_ID < 1)
set_ValueNoCheck (COLUMNNAME_M_HU_ID, null);
else
set_ValueNoCheck (COLUMNNAME_M_HU_ID, Integer.valueOf(M_HU_ID));
}
/** Get Handling Units.
@return Handling Units */
@Override
public int getM_HU_ID ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_M_HU_ID);
if (ii == null)
return 0;
return ii.intValue();
}
@Override
public org.compiere.model.I_M_InOut getM_InOut_Receipt() throws RuntimeException
{
return get_ValueAsPO(<API key>, org.compiere.model.I_M_InOut.class);
}
@Override
public void setM_InOut_Receipt(org.compiere.model.I_M_InOut M_InOut_Receipt)
{
set_ValueFromPO(<API key>, org.compiere.model.I_M_InOut.class, M_InOut_Receipt);
}
/** Set Material Receipt.
@param M_InOut_Receipt_ID Material Receipt */
@Override
public void <API key> (int M_InOut_Receipt_ID)
{
if (M_InOut_Receipt_ID < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(M_InOut_Receipt_ID));
}
/** Get Material Receipt.
@return Material Receipt */
@Override
public int <API key> ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
@Override
public org.compiere.model.I_M_InOut getM_InOut_Shipment() throws RuntimeException
{
return get_ValueAsPO(<API key>, org.compiere.model.I_M_InOut.class);
}
@Override
public void setM_InOut_Shipment(org.compiere.model.I_M_InOut M_InOut_Shipment)
{
set_ValueFromPO(<API key>, org.compiere.model.I_M_InOut.class, M_InOut_Shipment);
}
/** Set Shipment.
@param M_InOut_Shipment_ID Shipment */
@Override
public void <API key> (int M_InOut_Shipment_ID)
{
if (M_InOut_Shipment_ID < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(M_InOut_Shipment_ID));
}
/** Get Shipment.
@return Shipment */
@Override
public int <API key> ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
@Override
public org.compiere.model.I_M_Locator getM_Locator() throws RuntimeException
{
return get_ValueAsPO(<API key>, org.compiere.model.I_M_Locator.class);
}
@Override
public void setM_Locator(org.compiere.model.I_M_Locator M_Locator)
{
set_ValueFromPO(<API key>, org.compiere.model.I_M_Locator.class, M_Locator);
}
/** Set Lagerort.
@param M_Locator_ID
Lagerort im Lager
*/
@Override
public void setM_Locator_ID (int M_Locator_ID)
{
if (M_Locator_ID < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(M_Locator_ID));
}
/** Get Lagerort.
@return Lagerort im Lager
*/
@Override
public int getM_Locator_ID ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
@Override
public de.metas.materialtracking.model.<API key> <API key>() throws RuntimeException
{
return get_ValueAsPO(<API key>, de.metas.materialtracking.model.<API key>.class);
}
@Override
public void <API key>(de.metas.materialtracking.model.<API key> M_Material_Tracking)
{
set_ValueFromPO(<API key>, de.metas.materialtracking.model.<API key>.class, M_Material_Tracking);
}
/** Set Material-Vorgang-ID.
@param <API key> Material-Vorgang-ID */
@Override
public void <API key> (int <API key>)
{
if (<API key> < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(<API key>));
}
/** Get Material-Vorgang-ID.
@return Material-Vorgang-ID */
@Override
public int <API key> ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
@Override
public org.compiere.model.I_M_Product getM_Product() throws RuntimeException
{
return get_ValueAsPO(<API key>, org.compiere.model.I_M_Product.class);
}
@Override
public void setM_Product(org.compiere.model.I_M_Product M_Product)
{
set_ValueFromPO(<API key>, org.compiere.model.I_M_Product.class, M_Product);
}
/** Set Produkt.
@param M_Product_ID
Produkt, Leistung, Artikel
*/
@Override
public void setM_Product_ID (int M_Product_ID)
{
if (M_Product_ID < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(M_Product_ID));
}
/** Get Produkt.
@return Produkt, Leistung, Artikel
*/
@Override
public int getM_Product_ID ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
/**
* <API key> AD_Reference_ID=131
* Reference name: _Document Status
*/
public static final int <API key>=131;
/** Drafted = DR */
public static final String <API key> = "DR";
/** Completed = CO */
public static final String <API key> = "CO";
/** Approved = AP */
public static final String <API key> = "AP";
/** NotApproved = NA */
public static final String <API key> = "NA";
/** Voided = VO */
public static final String <API key> = "VO";
/** Invalid = IN */
public static final String <API key> = "IN";
/** Reversed = RE */
public static final String <API key> = "RE";
/** Closed = CL */
public static final String <API key> = "CL";
/** Unknown = ?? */
public static final String <API key> = "??";
/** InProgress = IP */
public static final String <API key> = "IP";
/** WaitingPayment = WP */
public static final String <API key> = "WP";
/** WaitingConfirmation = WC */
public static final String <API key> = "WC";
/** Set MO Issue Doc Status.
@param <API key> MO Issue Doc Status */
@Override
public void <API key> (java.lang.String <API key>)
{
set_ValueNoCheck (<API key>, <API key>);
}
/** Get MO Issue Doc Status.
@return MO Issue Doc Status */
@Override
public java.lang.String <API key> ()
{
return (java.lang.String)get_Value(<API key>);
}
@Override
public org.compiere.model.I_C_DocType <API key>() throws RuntimeException
{
return get_ValueAsPO(<API key>, org.compiere.model.I_C_DocType.class);
}
@Override
public void <API key>(org.compiere.model.I_C_DocType <API key>)
{
set_ValueFromPO(<API key>, org.compiere.model.I_C_DocType.class, <API key>);
}
/** Set MO Issue DocType.
@param <API key> MO Issue DocType */
@Override
public void <API key> (int <API key>)
{
if (<API key> < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(<API key>));
}
/** Get MO Issue DocType.
@return MO Issue DocType */
@Override
public int <API key> ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
@Override
public org.eevolution.model.I_PP_Order getPP_Order_Issue() throws RuntimeException
{
return get_ValueAsPO(<API key>, org.eevolution.model.I_PP_Order.class);
}
@Override
public void setPP_Order_Issue(org.eevolution.model.I_PP_Order PP_Order_Issue)
{
set_ValueFromPO(<API key>, org.eevolution.model.I_PP_Order.class, PP_Order_Issue);
}
/** Set Zugeteilt zu Prod.-Auftrag.
@param PP_Order_Issue_ID Zugeteilt zu Prod.-Auftrag */
@Override
public void <API key> (int PP_Order_Issue_ID)
{
if (PP_Order_Issue_ID < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(PP_Order_Issue_ID));
}
/** Get Zugeteilt zu Prod.-Auftrag.
@return Zugeteilt zu Prod.-Auftrag */
@Override
public int <API key> ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
/**
* <API key> AD_Reference_ID=131
* Reference name: _Document Status
*/
public static final int <API key>=131;
/** Drafted = DR */
public static final String <API key> = "DR";
/** Completed = CO */
public static final String <API key> = "CO";
/** Approved = AP */
public static final String <API key> = "AP";
/** NotApproved = NA */
public static final String <API key> = "NA";
/** Voided = VO */
public static final String <API key> = "VO";
/** Invalid = IN */
public static final String <API key> = "IN";
/** Reversed = RE */
public static final String <API key> = "RE";
/** Closed = CL */
public static final String <API key> = "CL";
/** Unknown = ?? */
public static final String <API key> = "??";
/** InProgress = IP */
public static final String <API key> = "IP";
/** WaitingPayment = WP */
public static final String <API key> = "WP";
/** WaitingConfirmation = WC */
public static final String <API key> = "WC";
/** Set MO Receipt DocStatus.
@param <API key> MO Receipt DocStatus */
@Override
public void <API key> (java.lang.String <API key>)
{
set_ValueNoCheck (<API key>, <API key>);
}
/** Get MO Receipt DocStatus.
@return MO Receipt DocStatus */
@Override
public java.lang.String <API key> ()
{
return (java.lang.String)get_Value(<API key>);
}
@Override
public org.compiere.model.I_C_DocType <API key>() throws RuntimeException
{
return get_ValueAsPO(<API key>, org.compiere.model.I_C_DocType.class);
}
@Override
public void <API key>(org.compiere.model.I_C_DocType <API key>)
{
set_ValueFromPO(<API key>, org.compiere.model.I_C_DocType.class, <API key>);
}
/** Set MO Receipt DocType.
@param <API key> MO Receipt DocType */
@Override
public void <API key> (int <API key>)
{
if (<API key> < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(<API key>));
}
/** Get MO Receipt DocType.
@return MO Receipt DocType */
@Override
public int <API key> ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
@Override
public org.eevolution.model.I_PP_Order getPP_Order_Receipt() throws RuntimeException
{
return get_ValueAsPO(<API key>, org.eevolution.model.I_PP_Order.class);
}
@Override
public void setPP_Order_Receipt(org.eevolution.model.I_PP_Order PP_Order_Receipt)
{
set_ValueFromPO(<API key>, org.eevolution.model.I_PP_Order.class, PP_Order_Receipt);
}
/** Set Empf. aus Prod.-Auftrag.
@param PP_Order_Receipt_ID Empf. aus Prod.-Auftrag */
@Override
public void <API key> (int PP_Order_Receipt_ID)
{
if (PP_Order_Receipt_ID < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(PP_Order_Receipt_ID));
}
/** Get Empf. aus Prod.-Auftrag.
@return Empf. aus Prod.-Auftrag */
@Override
public int <API key> ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
/** Set Menge.
@param Qty
Menge
*/
@Override
public void setQty (java.math.BigDecimal Qty)
{
set_ValueNoCheck (COLUMNNAME_Qty, Qty);
}
/** Get Menge.
@return Menge
*/
@Override
public java.math.BigDecimal getQty ()
{
BigDecimal bd = (BigDecimal)get_Value(COLUMNNAME_Qty);
if (bd == null)
return BigDecimal.ZERO;
return bd;
}
/** Set Waschprobe.
@param <API key> Waschprobe */
@Override
public void <API key> (java.lang.String <API key>)
{
set_ValueNoCheck (<API key>, <API key>);
}
/** Get Waschprobe.
@return Waschprobe */
@Override
public java.lang.String <API key> ()
{
return (java.lang.String)get_Value(<API key>);
}
/** Set <API key>.
@param <API key> <API key> */
@Override
public void <API key> (int <API key>)
{
if (<API key> < 1)
set_ValueNoCheck (<API key>, null);
else
set_ValueNoCheck (<API key>, Integer.valueOf(<API key>));
}
/** Get <API key>.
@return <API key> */
@Override
public int <API key> ()
{
Integer ii = (Integer)get_Value(<API key>);
if (ii == null)
return 0;
return ii.intValue();
}
} |
# Makefile.in generated by automake 1.11.3 from Makefile.am.
# gio/win32/Makefile. Generated from Makefile.in by configure.
# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software
# Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# PARTICULAR PURPOSE.
# GLIB - Library of useful C routines
pkgdatadir = $(datadir)/glib
pkgincludedir = $(includedir)/glib
pkglibdir = $(libdir)/glib
pkglibexecdir = $(libexecdir)/glib
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(<API key>)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = i686-pc-linux-gnu
host_triplet = <API key>
DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \
$(top_srcdir)/Makefile.decl
subdir = gio/win32
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4macros/gtk-doc.m4 \
$(top_srcdir)/m4macros/libtool.m4 \
$(top_srcdir)/m4macros/ltoptions.m4 \
$(top_srcdir)/m4macros/ltsugar.m4 \
$(top_srcdir)/m4macros/ltversion.m4 \
$(top_srcdir)/m4macros/lt~obsolete.m4 \
$(top_srcdir)/acinclude.m4 $(top_srcdir)/acglib.m4 \
$(top_srcdir)/glib/libcharset/codeset.m4 \
$(top_srcdir)/glib/libcharset/glibc21.m4 \
$(top_srcdir)/m4macros/glib-gettext.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(<API key>) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs
CONFIG_HEADER = $(top_builddir)/config.h
CONFIG_CLEAN_FILES =
<API key> =
LTLIBRARIES = $(noinst_LTLIBRARIES)
<API key> =
am__objects_1 =
<API key> = <API key>.lo \
<API key>.lo <API key>.lo \
<API key>.lo \
<API key>.lo $(am__objects_1)
<API key> = $(<API key>)
AM_V_lt = $(am__v_lt_$(V))
am__v_lt_ = $(am__v_lt_$(<API key>))
am__v_lt_0 = --silent
libgiowin32_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC \
$(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CCLD) \
$(<API key>) $(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o \
$@
DEFAULT_INCLUDES = -I. -I$(top_builddir)
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CFLAGS) $(CFLAGS)
AM_V_CC = $(am__v_CC_$(V))
am__v_CC_ = $(am__v_CC_$(<API key>))
am__v_CC_0 = @echo " CC " $@;
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(<API key>))
am__v_at_0 = @
CCLD = $(CC)
LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CCLD = $(am__v_CCLD_$(V))
am__v_CCLD_ = $(am__v_CCLD_$(<API key>))
am__v_CCLD_0 = @echo " CCLD " $@;
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(<API key>))
am__v_GEN_0 = @echo " GEN " $@;
SOURCES = $(<API key>)
DIST_SOURCES = $(<API key>)
ETAGS = etags
CTAGS = ctags
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
<API key> =
ABS_TAPSET_DIR = $(datadir)/systemtap/tapset
ACLOCAL = ${SHELL} /home/simon/GIT/<API key>/output/build/libglib2-2.30.3/missing --run aclocal-1.11
ALLOCA =
AMTAR = $${TAR-tar}
<API key> = 0
AR = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
AS = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
AUTOCONF = ${SHELL} /home/simon/GIT/<API key>/output/build/libglib2-2.30.3/missing --run autoconf
AUTOHEADER = ${SHELL} /home/simon/GIT/<API key>/output/build/libglib2-2.30.3/missing --run autoheader
AUTOMAKE = ${SHELL} /home/simon/GIT/<API key>/output/build/libglib2-2.30.3/missing --run automake-1.11
AWK = gawk
CATALOGS = af.gmo am.gmo ar.gmo as.gmo ast.gmo az.gmo be.gmo be@latin.gmo bg.gmo bn.gmo bn_IN.gmo bs.gmo ca.gmo ca@valencia.gmo cs.gmo cy.gmo da.gmo de.gmo dz.gmo el.gmo en_CA.gmo en_GB.gmo en@shaw.gmo eo.gmo es.gmo et.gmo eu.gmo fa.gmo fi.gmo fr.gmo ga.gmo gl.gmo gu.gmo he.gmo hi.gmo hr.gmo hu.gmo hy.gmo id.gmo is.gmo it.gmo ja.gmo ka.gmo kk.gmo kn.gmo ko.gmo ku.gmo lt.gmo lv.gmo mai.gmo mg.gmo mk.gmo ml.gmo mn.gmo mr.gmo ms.gmo nb.gmo nds.gmo ne.gmo nl.gmo nn.gmo oc.gmo or.gmo pa.gmo pl.gmo ps.gmo pt.gmo pt_BR.gmo ro.gmo ru.gmo rw.gmo si.gmo sk.gmo sl.gmo sq.gmo sr.gmo sr@latin.gmo sr@ije.gmo sv.gmo ta.gmo te.gmo th.gmo tl.gmo tr.gmo ug.gmo tt.gmo uk.gmo vi.gmo wa.gmo xh.gmo yi.gmo zh_CN.gmo zh_HK.gmo zh_TW.gmo
CATOBJEXT = .gmo
CC = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
CCDEPMODE = depmode=gcc3
CFLAGS = -D_LARGEFILE_SOURCE -<API key> -D_FILE_OFFSET_BITS=64 -pipe -U_FORTIFY_SOURCE -fno-stack-protector -fomit-frame-pointer -fPIC -O2 -Wall
<API key> = $(top_srcdir)/po/LINGUAS
CPP = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
CPPFLAGS = -D_LARGEFILE_SOURCE -<API key> -D_FILE_OFFSET_BITS=64 -<API key> -pthread
CXX = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>++
CXXCPP = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -D_LARGEFILE_SOURCE -<API key> -D_FILE_OFFSET_BITS=64 -pipe -U_FORTIFY_SOURCE -fno-stack-protector -fomit-frame-pointer -fPIC -O2
CYGPATH_W = echo
DATADIRNAME = share
DBUS1_CFLAGS = -I/home/simon/GIT/<API key>/output/host/usr/<API key>/sysroot/usr/include/dbus-1.0 -I/home/simon/GIT/<API key>/output/host/usr/<API key>/sysroot/usr/lib/dbus-1.0/include
DBUS1_LIBS = -L/home/simon/GIT/<API key>/output/host/usr/<API key>/sysroot/usr/lib -ldbus-1 -lpthread -lrt
DEFS = -DHAVE_CONFIG_H
DEPDIR = .deps
DISABLE_MEM_POOLS =
DLLTOOL = false
DSYMUTIL =
DTRACE =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
FAM_LIBS =
FGREP = /bin/grep -F
GETTEXT_PACKAGE = glib20
GIO = giounix.lo
GIO_MODULE_DIR = ${libdir}/gio/modules
GLIBC21 = yes
GLIB_BINARY_AGE = 3003
<API key> = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
GLIB_DEBUG_FLAGS = -<API key>
GLIB_EXTRA_CFLAGS =
GLIB_GENMARSHAL = /home/simon/GIT/<API key>/output/host/usr/bin/glib-genmarshal
GLIB_INTERFACE_AGE = 3
GLIB_LINK_FLAGS = -Wl,-Bsymbolic-functions
GLIB_MAJOR_VERSION = 2
GLIB_MICRO_VERSION = 3
GLIB_MINOR_VERSION = 30
GLIB_RT_LIBS = -lrt
GLIB_RUNTIME_LIBDIR =
GLIB_VERSION = 2.30.3
<API key> =
GMOFILES = af.gmo am.gmo ar.gmo as.gmo ast.gmo az.gmo be.gmo be@latin.gmo bg.gmo bn.gmo bn_IN.gmo bs.gmo ca.gmo ca@valencia.gmo cs.gmo cy.gmo da.gmo de.gmo dz.gmo el.gmo en_CA.gmo en_GB.gmo en@shaw.gmo eo.gmo es.gmo et.gmo eu.gmo fa.gmo fi.gmo fr.gmo ga.gmo gl.gmo gu.gmo he.gmo hi.gmo hr.gmo hu.gmo hy.gmo id.gmo is.gmo it.gmo ja.gmo ka.gmo kk.gmo kn.gmo ko.gmo ku.gmo lt.gmo lv.gmo mai.gmo mg.gmo mk.gmo ml.gmo mn.gmo mr.gmo ms.gmo nb.gmo nds.gmo ne.gmo nl.gmo nn.gmo oc.gmo or.gmo pa.gmo pl.gmo ps.gmo pt.gmo pt_BR.gmo ro.gmo ru.gmo rw.gmo si.gmo sk.gmo sl.gmo sq.gmo sr.gmo sr@latin.gmo sr@ije.gmo sv.gmo ta.gmo te.gmo th.gmo tl.gmo tr.gmo ug.gmo tt.gmo uk.gmo vi.gmo wa.gmo xh.gmo yi.gmo zh_CN.gmo zh_HK.gmo zh_TW.gmo
GMSGFMT = /usr/bin/msgfmt
GREP = /bin/grep
GSPAWN = gspawn.lo
<API key> = -<API key> -<API key> -U_OSF_SOURCE
GTKDOC_CHECK =
GTKDOC_DEPS_CFLAGS =
GTKDOC_DEPS_LIBS =
GTKDOC_MKPDF =
GTKDOC_REBASE = true
G_LIBS_EXTRA =
<API key> = 0
<API key> = 1
G_MODULE_IMPL = G_MODULE_IMPL_DL
G_MODULE_LDFLAGS = -Wl,--export-dynamic
G_MODULE_LIBS = -ldl
G_MODULE_LIBS_EXTRA =
<API key> = 0
<API key> =
G_MODULE_SUPPORTED = true
G_THREAD_CFLAGS = -pthread
G_THREAD_LIBS = -pthread -lrt
G_THREAD_LIBS_EXTRA =
<API key> = -lpthread -lrt
HTML_DIR = ${datadir}/gtk-doc/html
ICONV_LIBS =
INDENT =
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
<API key> = $(install_sh) -c -s
INSTOBJEXT = .mo
INTLLIBS =
LD = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
LDFLAGS =
LIBFFI_CFLAGS =
LIBFFI_LIBS = -L/home/simon/GIT/<API key>/output/host/usr/<API key>/sysroot/usr/lib -lffi
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
<API key> = -<API key> "^g.*"
<API key> = X86
LIPO =
LN_S = ln -s
LTLIBOBJS =
LTP =
LTP_GENHTML =
LT_AGE = 3000
LT_CURRENT = 3000
<API key> = 0
LT_RELEASE = 2.30
LT_REVISION = 3
MAINT =
MAKEINFO = ${SHELL} /home/simon/GIT/<API key>/output/build/libglib2-2.30.3/missing --run makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
MKINSTALLDIRS = ./mkinstalldirs
MSGFMT = /usr/bin/msgfmt
MSGFMT_OPTS = -c
NETWORK_LIBS = -lresolv
NM = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
NMEDIT =
OBJDUMP = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = glib
PACKAGE_BUGREPORT = http://bugzilla.gnome.org/enter_bug.cgi?product=glib
PACKAGE_NAME = glib
PACKAGE_STRING = glib 2.30.3
PACKAGE_TARNAME = glib
PACKAGE_URL =
PACKAGE_VERSION = 2.30.3
PATH_SEPARATOR = :
PCRE_CFLAGS =
PCRE_LIBS =
PCRE_REQUIRES =
PCRE_WARN_CFLAGS = -Wno-pointer-sign
PERL = perl
PERL_PATH = /usr/bin/perl
PKG_CONFIG = /home/simon/GIT/<API key>/output/host/usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH =
PLATFORMDEP =
POFILES = af.po am.po ar.po as.po ast.po az.po be.po be@latin.po bg.po bn.po bn_IN.po bs.po ca.po ca@valencia.po cs.po cy.po da.po de.po dz.po el.po en_CA.po en_GB.po en@shaw.po eo.po es.po et.po eu.po fa.po fi.po fr.po ga.po gl.po gu.po he.po hi.po hr.po hu.po hy.po id.po is.po it.po ja.po ka.po kk.po kn.po ko.po ku.po lt.po lv.po mai.po mg.po mk.po ml.po mn.po mr.po ms.po nb.po nds.po ne.po nl.po nn.po oc.po or.po pa.po pl.po ps.po pt.po pt_BR.po ro.po ru.po rw.po si.po sk.po sl.po sq.po sr.po sr@latin.po sr@ije.po sv.po ta.po te.po th.po tl.po tr.po ug.po tt.po uk.po vi.po wa.po xh.po yi.po zh_CN.po zh_HK.po zh_TW.po
POSUB = po
PO_IN_DATADIR_FALSE =
PO_IN_DATADIR_TRUE =
PYTHON = /home/simon/GIT/<API key>/output/host/usr/bin/python
PYTHON_EXEC_PREFIX = ${exec_prefix}
PYTHON_PLATFORM = linux2
PYTHON_PREFIX = ${prefix}
PYTHON_VERSION = 2.7
RANLIB = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
REBUILD =
SED = /bin/sed
SELINUX_LIBS =
SET_MAKE =
SHELL = /bin/sh
SHTOOL =
STRIP = /home/simon/GIT/<API key>/output/host/usr/bin/<API key>
USE_NLS = yes
VERSION = 2.30.3
WINDRES =
XATTR_LIBS =
XGETTEXT = /usr/bin/xgettext
XMLCATALOG =
XML_CATALOG_FILE =
XSLTPROC =
ZLIB_CFLAGS = -I/home/simon/GIT/<API key>/output/host/usr/<API key>/sysroot/usr/include
ZLIB_LIBS = -L/home/simon/GIT/<API key>/output/host/usr/<API key>/sysroot/usr/lib -lz
abs_builddir = /home/simon/GIT/<API key>/output/build/libglib2-2.30.3/gio/win32
abs_srcdir = /home/simon/GIT/<API key>/output/build/libglib2-2.30.3/gio/win32
abs_top_builddir = /home/simon/GIT/<API key>/output/build/libglib2-2.30.3
abs_top_srcdir = /home/simon/GIT/<API key>/output/build/libglib2-2.30.3
ac_ct_AR =
ac_ct_CC =
ac_ct_CXX =
ac_ct_DUMPBIN =
am__include = include
am__leading_dot = .
am__quote =
am__tar = tar --format=ustar -chf - "$$tardir"
am__untar = tar -xf -
bindir = ${exec_prefix}/bin
build = i686-pc-linux-gnu
build_alias = i686-pc-linux-gnu
build_cpu = i686
build_os = linux-gnu
build_vendor = pc
builddir = .
config_h_INCLUDES = -I$(top_builddir)
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = /usr
gio_INCLUDES = $(gmodule_INCLUDES)
glib_INCLUDES = $(config_h_INCLUDES) -I$(top_builddir)/glib -I$(top_srcdir)/glib -I$(top_srcdir)
gmodule_INCLUDES = $(glib_INCLUDES) -I$(top_srcdir)/gmodule
gobject_INCLUDES = $(gthread_INCLUDES)
gthread_INCLUDES = $(glib_INCLUDES)
host = <API key>
host_alias = <API key>
host_cpu = arm
host_os = linux-gnueabi
host_vendor = buildroot
htmldir = ${docdir}
includedir = ${prefix}/include
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/simon/GIT/<API key>/output/build/libglib2-2.30.3/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = /usr/share/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = /bin/mkdir -p
ms_librarian =
oldincludedir = /usr/include
pdfdir = ${docdir}
pkgpyexecdir = ${pyexecdir}/glib
pkgpythondir = ${pythondir}/glib
prefix = /usr
<API key> = s&^&&
psdir = ${docdir}
pyexecdir = ${exec_prefix}/lib/python2.7/site-packages
pythondir = ${prefix}/lib/python2.7/site-packages
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = /etc
target_alias = <API key>
top_build_prefix = ../../
top_builddir = ../..
top_srcdir = ../..
#GTESTER = gtester # for non-GLIB packages
GTESTER = $(top_builddir)/glib/gtester # for the GLIB package
GTESTER_REPORT = $(top_builddir)/glib/gtester-report # for the GLIB package
# initialize variables for unconditional += appending
EXTRA_DIST =
TEST_PROGS =
NULL =
noinst_LTLIBRARIES = libgiowin32.la
<API key> = \
<API key>.c \
<API key>.h \
gwinhttpvfs.c \
gwinhttpvfs.h \
gwinhttpfile.c \
gwinhttpfile.h \
<API key>.c \
<API key>.h \
<API key>.c \
<API key>.h \
winhttp.h \
$(NULL)
<API key> = \
-DG_LOG_DOMAIN=\"GLib-GIO\" \
$(gio_INCLUDES) \
$(GLIB_DEBUG_FLAGS) \
-DGIO_MODULE_DIR=\"$(GIO_MODULE_DIR)\" \
-DGIO_COMPILATION \
-<API key>
all: all-am
.SUFFIXES:
.SUFFIXES: .c .lo .o .obj
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(top_srcdir)/Makefile.decl $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu gio/win32/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --gnu gio/win32/Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_srcdir)/Makefile.decl:
$(top_builddir)/config.status: $(top_srcdir)/configure $(<API key>)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
<API key>:
-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
@list='$(noinst_LTLIBRARIES)'; for p in $$list; do \
dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
test "$$dir" != "$$p" || dir=.; \
echo "rm -f \"$${dir}/so_locations\""; \
rm -f "$${dir}/so_locations"; \
done
libgiowin32.la: $(<API key>) $(<API key>) $(<API key>)
$(AM_V_CCLD)$(libgiowin32_la_LINK) $(<API key>) $(<API key>) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
include ./$(DEPDIR)/<API key>.Plo
include ./$(DEPDIR)/<API key>.Plo
include ./$(DEPDIR)/<API key>.Plo
include ./$(DEPDIR)/<API key>.Plo
include ./$(DEPDIR)/<API key>.Plo
.c.o:
$(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CC)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \
# $(AM_V_CC_no)$(COMPILE) -c $<
.c.obj:
$(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CC)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \
# $(AM_V_CC_no)$(COMPILE) -c `$(CYGPATH_W) '$<'`
.c.lo:
$(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
# $(AM_V_CC)source='$<' object='$@' libtool=yes \
# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \
# $(AM_V_CC_no)$(LTCOMPILE) -c -o $@ $<
<API key>.lo: <API key>.c
$(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -MT <API key>.lo -MD -MP -MF $(DEPDIR)/<API key>.Tpo -c -o <API key>.lo `test -f '<API key>.c' || echo '$(srcdir)/'`<API key>.c
$(AM_V_at)$(am__mv) $(DEPDIR)/<API key>.Tpo $(DEPDIR)/<API key>.Plo
# $(AM_V_CC)source='<API key>.c' object='<API key>.lo' libtool=yes \
# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \
# $(AM_V_CC_no)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -c -o <API key>.lo `test -f '<API key>.c' || echo '$(srcdir)/'`<API key>.c
<API key>.lo: gwinhttpvfs.c
$(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -MT <API key>.lo -MD -MP -MF $(DEPDIR)/<API key>.Tpo -c -o <API key>.lo `test -f 'gwinhttpvfs.c' || echo '$(srcdir)/'`gwinhttpvfs.c
$(AM_V_at)$(am__mv) $(DEPDIR)/<API key>.Tpo $(DEPDIR)/<API key>.Plo
# $(AM_V_CC)source='gwinhttpvfs.c' object='<API key>.lo' libtool=yes \
# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \
# $(AM_V_CC_no)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -c -o <API key>.lo `test -f 'gwinhttpvfs.c' || echo '$(srcdir)/'`gwinhttpvfs.c
<API key>.lo: gwinhttpfile.c
$(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -MT <API key>.lo -MD -MP -MF $(DEPDIR)/<API key>.Tpo -c -o <API key>.lo `test -f 'gwinhttpfile.c' || echo '$(srcdir)/'`gwinhttpfile.c
$(AM_V_at)$(am__mv) $(DEPDIR)/<API key>.Tpo $(DEPDIR)/<API key>.Plo
# $(AM_V_CC)source='gwinhttpfile.c' object='<API key>.lo' libtool=yes \
# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \
# $(AM_V_CC_no)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -c -o <API key>.lo `test -f 'gwinhttpfile.c' || echo '$(srcdir)/'`gwinhttpfile.c
<API key>.lo: <API key>.c
$(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -MT <API key>.lo -MD -MP -MF $(DEPDIR)/<API key>.Tpo -c -o <API key>.lo `test -f '<API key>.c' || echo '$(srcdir)/'`<API key>.c
$(AM_V_at)$(am__mv) $(DEPDIR)/<API key>.Tpo $(DEPDIR)/<API key>.Plo
# $(AM_V_CC)source='<API key>.c' object='<API key>.lo' libtool=yes \
# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \
# $(AM_V_CC_no)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -c -o <API key>.lo `test -f '<API key>.c' || echo '$(srcdir)/'`<API key>.c
<API key>.lo: <API key>.c
$(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -MT <API key>.lo -MD -MP -MF $(DEPDIR)/<API key>.Tpo -c -o <API key>.lo `test -f '<API key>.c' || echo '$(srcdir)/'`<API key>.c
$(AM_V_at)$(am__mv) $(DEPDIR)/<API key>.Tpo $(DEPDIR)/<API key>.Plo
# $(AM_V_CC)source='<API key>.c' object='<API key>.lo' libtool=yes \
# DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) \
# $(AM_V_CC_no)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(<API key>) $(CFLAGS) -c -o <API key>.lo `test -f '<API key>.c' || echo '$(srcdir)/'`<API key>.c
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
mkid -fID $$unique
tags: TAGS
TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
set x; \
here=`pwd`; \
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: CTAGS
CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) '{ files[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in files) print i; }; }'`; \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
$(MAKE) $(AM_MAKEFLAGS) check-local
check: check-am
all-am: Makefile $(LTLIBRARIES)
installdirs:
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(<API key>)" \
install_sh_PROGRAM="$(<API key>)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(<API key>)" \
install_sh_PROGRAM="$(<API key>)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(<API key>)" || rm -f $(<API key>)
<API key>:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool <API key> \
mostlyclean-am
distclean: distclean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am <API key>
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am:
.MAKE: check-am install-am install-strip
.PHONY: CTAGS GTAGS all all-am check check-am check-local clean \
clean-generic clean-libtool <API key> ctags \
distclean distclean-compile distclean-generic \
distclean-libtool distclean-tags distdir dvi dvi-am html \
html-am info info-am install install-am install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am install-info \
install-info-am install-man install-pdf install-pdf-am \
install-ps install-ps-am install-strip installcheck \
installcheck-am installdirs maintainer-clean \
<API key> mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags uninstall uninstall-am
testing rules
# test: run all tests in cwd and subdirs
test: test-nonrecursive
@ for subdir in $(SUBDIRS) . ; do \
test "$$subdir" = "." -o "$$subdir" = "po" || \
( cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $@ ) || exit $? ; \
done
# test-nonrecursive: run tests only in cwd
test-nonrecursive: ${TEST_PROGS}
@test -z "${TEST_PROGS}" || MALLOC_CHECK_=2 MALLOC_PERTURB_=$$(($${RANDOM:-256} % 256)) ${GTESTER} --verbose ${TEST_PROGS}
#test-nonrecursive:
# test-report: run tests in subdirs and generate report
# perf-report: run tests in subdirs with -m perf and generate report
# full-report: like test-report: with -m perf and -m slow
test-report perf-report full-report: ${TEST_PROGS}
@test -z "${TEST_PROGS}" || { \
case $@ in \
test-report) test_options="-k";; \
perf-report) test_options="-k -m=perf";; \
full-report) test_options="-k -m=perf -m=slow";; \
esac ; \
if test -z "$$GTESTER_LOGDIR" ; then \
${GTESTER} --verbose $$test_options -o test-report.xml ${TEST_PROGS} ; \
elif test -n "${TEST_PROGS}" ; then \
${GTESTER} --verbose $$test_options -o `mktemp "$$GTESTER_LOGDIR/log-XXXXXX"` ${TEST_PROGS} ; \
fi ; \
}
@ ignore_logdir=true ; \
if test -z "$$GTESTER_LOGDIR" ; then \
GTESTER_LOGDIR=`mktemp -d "\`pwd\`/.testlogs-XXXXXX"`; export GTESTER_LOGDIR ; \
ignore_logdir=false ; \
fi ; \
if test -d "$(top_srcdir)/.git" ; then \
REVISION=`git describe` ; \
else \
REVISION=$(VERSION) ; \
fi ; \
for subdir in $(SUBDIRS) . ; do \
test "$$subdir" = "." -o "$$subdir" = "po" || \
( cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $@ ) || exit $? ; \
done ; \
$$ignore_logdir || { \
echo '<?xml version="1.0"?>' > $@.xml ; \
echo '<report-collection>' >> $@.xml ; \
echo '<info>' >> $@.xml ; \
echo ' <package>$(PACKAGE)</package>' >> $@.xml ; \
echo ' <version>$(VERSION)</version>' >> $@.xml ; \
echo " <revision>$$REVISION</revision>" >> $@.xml ; \
echo '</info>' >> $@.xml ; \
for lf in `ls -L "$$GTESTER_LOGDIR"/.` ; do \
sed '1,1s/^<?xml\b[^>?]*?>//' <"$$GTESTER_LOGDIR"/"$$lf" >> $@.xml ; \
done ; \
echo >> $@.xml ; \
echo '</report-collection>' >> $@.xml ; \
rm -rf "$$GTESTER_LOGDIR"/ ; \
${GTESTER_REPORT} --version 2>/dev/null 1>&2 ; test "$$?" != 0 || ${GTESTER_REPORT} $@.xml >$@.html ; \
}
.PHONY: test test-report perf-report full-report test-nonrecursive
.PHONY: lcov genlcov lcov-clean
# use recursive makes in order to ignore errors during check
lcov:
-$(MAKE) $(AM_MAKEFLAGS) -k check
$(MAKE) $(AM_MAKEFLAGS) genlcov
# we have to massage the lcov.info file slightly to hide the effect of libtool
# placing the objects files in the .libs/ directory separate from the *.c
# we also have to delete tests/.libs/<API key>*.gcda
genlcov:
rm -f $(top_builddir)/tests/.libs/<API key>*.gcda
$(LTP) --directory $(top_builddir) --capture --output-file glib-lcov.info --test-name GLIB_PERF --no-checksum --compat-libtool
LANG=C $(LTP_GENHTML) --prefix $(top_builddir) --output-directory glib-lcov --title "GLib Code Coverage" --legend --show-details glib-lcov.info
@echo "file://$(abs_top_builddir)/glib-lcov/index.html"
lcov-clean:
-$(LTP) --directory $(top_builddir) -z
-rm -rf glib-lcov.info glib-lcov
-find -name '*.gcda' -print | xargs rm
# run tests in cwd as part of make check
check-local: test-nonrecursive
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT: |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.