blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 7 410 | content_id stringlengths 40 40 | detected_licenses listlengths 0 51 | license_type stringclasses 2
values | repo_name stringlengths 5 132 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 80 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 5.85k 684M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 132
values | src_encoding stringclasses 34
values | language stringclasses 1
value | is_vendor bool 1
class | is_generated bool 2
classes | length_bytes int64 3 9.45M | extension stringclasses 28
values | content stringlengths 3 9.45M | authors listlengths 1 1 | author_id stringlengths 0 352 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
772ccae995ca4327ae19c7c22acdc0c97fc7b251 | 61093dd1de48d37175133d3a874a4c897d05bdfe | /tika-parsers/src/main/java/org/apache/tika/parser/mp4/DirectFileReadDataSource.java | 698a1065bbd197ce889c0b9aef6e9bf755cb9d57 | [
"Apache-2.0",
"GPL-2.0-only",
"LGPL-2.1-or-later",
"CDDL-1.0",
"Classpath-exception-2.0",
"LicenseRef-scancode-unknown-license-reference",
"CDDL-1.1",
"LicenseRef-scancode-unknown",
"EPL-1.0",
"ICU",
"LicenseRef-scancode-bsd-simplified-darwin",
"MPL-2.0",
"LicenseRef-scancode-proprietary-lic... | permissive | fsonntag/tika | ca4dbbe41cb03848735269a35e4762be4935dbe5 | ff762e6c262aa830235b1fe302df474ba4a5cf5b | refs/heads/master | 2020-09-15T11:20:44.930098 | 2019-11-21T13:43:37 | 2019-11-21T13:43:37 | 223,430,660 | 1 | 0 | Apache-2.0 | 2019-11-22T15:21:31 | 2019-11-22T15:21:29 | null | UTF-8 | Java | false | false | 4,896 | java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tika.parser.mp4;
import static com.googlecode.mp4parser.util.CastUtils.l2i;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import com.googlecode.mp4parser.DataSource;
/**
* A {@link DataSource} implementation that relies on direct reads from a {@link RandomAccessFile}.
* It should be slower than {@link com.googlecode.mp4parser.FileDataSourceImpl} but does not incur the implicit file locks of
* memory mapped I/O on some JVMs. This implementation allows for a more controlled deletion of files
* and might be preferred when working with temporary files.
* @see <a href="http://bugs.java.com/view_bug.do?bug_id=4724038">JDK-4724038 : (fs) Add unmap method to MappedByteBuffer</a>
* @see <a href="http://bugs.java.com/view_bug.do?bug_id=6359560">JDK-6359560 : (fs) File.deleteOnExit() doesn't work when MappedByteBuffer exists (win)</a>
*/
public class DirectFileReadDataSource implements DataSource {
private static final int TRANSFER_SIZE = 8192;
private RandomAccessFile raf;
public DirectFileReadDataSource(File f) throws IOException {
this.raf = new RandomAccessFile(f, "r");
}
public int read(ByteBuffer byteBuffer) throws IOException {
int len = byteBuffer.remaining();
int totalRead = 0;
int bytesRead = 0;
byte[] buf = new byte[TRANSFER_SIZE];
while (totalRead < len) {
int bytesToRead = Math.min((len - totalRead), TRANSFER_SIZE);
bytesRead = raf.read(buf, 0, bytesToRead);
if (bytesRead < 0) {
break;
} else {
totalRead += bytesRead;
}
byteBuffer.put(buf, 0, bytesRead);
}
if (bytesRead < 0 && position() == size() && byteBuffer.hasRemaining()) {
throw new IOException("End of stream reached earlier than expected");
}
return ((bytesRead < 0) && (totalRead == 0)) ? -1 : totalRead;
}
public int readAllInOnce(ByteBuffer byteBuffer) throws IOException {
if (byteBuffer.remaining() > raf.length()) {
throw new IOException("trying to readAllInOnce past end of stream");
}
byte[] buf = new byte[byteBuffer.remaining()];
int read = raf.read(buf);
byteBuffer.put(buf, 0, read);
return read;
}
public long size() throws IOException {
return raf.length();
}
public long position() throws IOException {
return raf.getFilePointer();
}
public void position(long nuPos) throws IOException {
if (nuPos > raf.length()) {
throw new IOException("requesting seek past end of stream");
}
raf.seek(nuPos);
}
public long transferTo(long position, long count, WritableByteChannel target) throws IOException {
return target.write(map(position, count));
}
public ByteBuffer map(long startPosition, long size) throws IOException {
if (startPosition < 0 || size < 0) {
throw new IOException("startPosition and size must both be >= 0");
}
//make sure that start+size aren't greater than avail size
//in raf.
BigInteger end = BigInteger.valueOf(startPosition);
end = end.add(BigInteger.valueOf(size));
if (end.compareTo(BigInteger.valueOf(raf.length())) > 0) {
throw new IOException("requesting read past end of stream");
}
raf.seek(startPosition);
int payLoadSize = l2i(size);
//hack to check for potential overflow
if (Long.MAX_VALUE-payLoadSize < startPosition ||
Long.MAX_VALUE-payLoadSize > raf.length()) {
throw new IOException("requesting read past end of stream");
}
byte[] payload = new byte[payLoadSize];
raf.readFully(payload);
return ByteBuffer.wrap(payload);
}
@Override
public void close() throws IOException {
raf.close();
}
} | [
"tallison@apache.org"
] | tallison@apache.org |
2b0ab021fd70b352046ffeffbad4e6e83813505f | e97e5f120fc7fa2686a2817959f90fd82d0b1aa5 | /webAppMVC/src/main/java/com/example/webAppMVC/controller/HomeController.java | 2e6966ae414a4bd869e159126933725b318e6530 | [] | no_license | Goesby1/Summit | 894fc7f4f9f46c7ddfdd812966afb8fa73befc19 | 454f2121c28842cd1df760d962a8728b79030221 | refs/heads/master | 2023-08-10T19:35:25.921991 | 2021-09-28T15:58:51 | 2021-09-28T15:58:51 | 410,915,073 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,833 | java | package com.example.webAppMVC.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.ModelAndView;
import com.example.webAppMVC.model.Country;
import com.example.webAppMVC.service.CountryService;
@Controller
public class HomeController {
@Autowired
CountryService countryService;
@RequestMapping("/")
public String home(Model model) {
List<Country> lst = countryService.listCountries();
model.addAttribute("countries", lst);
return "index";
}
@RequestMapping("/info/{name}")
public String capAndPop(@PathVariable(name = "name") String name, Model model) {
model.addAttribute("capital", countryService.capital(name));
model.addAttribute("population", countryService.population(name));
return "country";
}
@RequestMapping("/add")
public ModelAndView addPopulation() {
ModelAndView mav = new ModelAndView("add_population");
Country con = new Country();
mav.addObject("country", con);
return mav;
}
@RequestMapping("/edit/{name}")
public ModelAndView editPopulation(@PathVariable(name = "name") String name) {
ModelAndView mav = new ModelAndView("edit_population");
Country conGet = countryService.get(name);
mav.addObject("country", conGet);
return mav;
}
@RequestMapping(value="/save", method=RequestMethod.POST)
public String savePopulation(@ModelAttribute("country") Country con) {
countryService.saveCountry(con);
return "redirect:/";
}
}
| [
"yosepheshete@gmail.com"
] | yosepheshete@gmail.com |
a6a0d2e7384fbf9ed2a9da225e508f1d345dfa6e | 065c1f648e8dd061a20147ff9c0dbb6b5bc8b9be | /checkstyle_cluster/1321/src_1.java | af14844c9bee7751746c869c0a647f6505c5417d | [] | no_license | martinezmatias/GenPat-data-C3 | 63cfe27efee2946831139747e6c20cf952f1d6f6 | b360265a6aa3bb21bd1d64f1fc43c3b37d0da2a4 | refs/heads/master | 2022-04-25T17:59:03.905613 | 2020-04-15T14:41:34 | 2020-04-15T14:41:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,041 | java | ////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2002 Oliver Burn
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.checks;
import java.io.Serializable;
import java.io.ObjectStreamException;
import java.util.Map;
/**
* Abstract class that represents options.
*
* @author <a href="mailto:oliver@puppycrawl.com">Oliver Burn</a>
* @author Rick Giles
*/
public abstract class AbstractOption
implements Serializable
{
/** the string representation of the option **/
private final String mStrRep;
/**
* Creates a new <code>AbstractOption</code> instance.
* @param aStrRep the string representation
*/
protected AbstractOption(String aStrRep)
{
mStrRep = aStrRep.trim().toLowerCase();
Map strToOpt = getStrToOpt();
strToOpt.put(mStrRep, this);
}
/**
* Returns the map from string representations to options.
* @return <code>Map</code> from strings to options.
*/
protected abstract Map getStrToOpt();
/**
* Returns the option specified by a string representation. If no
* option exists then null is returned.
* @param aStrRep the String representation to parse
* @return the <code>AbstractOption</code> value represented by
* aStrRep, or null if none exists.
*/
public AbstractOption decode(String aStrRep)
{
Map strToOpt = getStrToOpt();
return (AbstractOption) strToOpt.get(aStrRep.trim().toLowerCase());
}
/**
* Returns the string representation of this AbstractOption.
* @see java.lang.Object
**/
public String toString()
{
return mStrRep;
}
/**
* Ensures that we don't get multiple instances of one AbstractOption
* during deserialization. See Section 3.6 of the Java Object
* Serialization Specification for details.
*
* @return the serialization replacement object
* @throws ObjectStreamException if a deserialization error occurs
*/
protected Object readResolve()
throws ObjectStreamException
{
return decode(mStrRep);
}
}
| [
"375833274@qq.com"
] | 375833274@qq.com |
3b96f4491f7bca74a5af4b7b34041ea1de56e381 | bce328d0f0e3d1ae77e63eea74b39fcf253c9469 | /showphonedemo/src/main/java/com/example/showphonedemo/MainActivity.java | 5c6e712aaa0e4785d3c3f94fdb0e63fd9027de16 | [] | no_license | Android04-sdc/MyApplication4 | 640672698b8f1e307d6523a17542e5888b696af9 | 1ebdb3cf2646864413280acc785ca605f1794268 | refs/heads/master | 2021-01-20T18:43:51.682480 | 2016-07-22T06:52:08 | 2016-07-22T06:52:08 | 63,930,570 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 460 | java | package com.example.showphonedemo;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.Button;
import android.widget.RadioGroup;
public class MainActivity extends AppCompatActivity {
Button mb;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mb= (Button) findViewById(R.id.b);
}
}
| [
"1984291674@qq.com"
] | 1984291674@qq.com |
79353be7ce024fa5950aef6db3bbdc52d181368e | 5e221b67fdc090de5c8004b8f462ae510e6f6684 | /civcraft/src/main/java/com/avrgaming/civcraft/object/Buff.java | 635b20cc964d6c6b35aecb78ceac26395bb7393e | [] | no_license | cnlimiter/civcraft | 95b6159d7936015e68797410466ad9da33c784f7 | 0cae6fb3ba1aea3a04c0e6b5d7806866900bd9ce | refs/heads/master | 2022-04-06T09:17:06.080416 | 2020-01-07T04:04:46 | 2020-01-07T04:04:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,919 | java | /*************************************************************************
*
* AVRGAMING LLC
* __________________
*
* [2013] AVRGAMING LLC
* All Rights Reserved.
*
* NOTICE: All information contained herein is, and remains
* the property of AVRGAMING LLC and its suppliers,
* if any. The intellectual and technical concepts contained
* herein are proprietary to AVRGAMING LLC
* and its suppliers and may be covered by U.S. and Foreign Patents,
* patents in process, and are protected by trade secret or copyright law.
* Dissemination of this information or reproduction of this material
* is strictly forbidden unless prior written permission is obtained
* from AVRGAMING LLC.
*/
package com.avrgaming.civcraft.object;
import java.text.DecimalFormat;
import com.avrgaming.civcraft.config.CivSettings;
import com.avrgaming.civcraft.config.ConfigBuff;
public class Buff {
/* Quick redefines for id/name from yml. */
public static final String FINE_ART = "buff_fine_art";
public static final String CONSTRUCTION = "buff_construction";
public static final String GROWTH_RATE = "buff_year_of_plenty";
public static final String TRADE = "buff_monopoly";
public static final String REDUCE_CONSUME = "buff_preservative";
public static final String SCIENCE_RATE = "buff_innovation";
public static final String ADVANCED_TOOLING = "buff_advanced_tooling";
public static final String BARRICADE = "buff_barricade";
public static final String BARTER = "buff_barter";
public static final String EXTRACTION = "buff_extraction";
public static final String FIRE_BOMB = "buff_fire_bomb";
public static final String FISHING = "buff_fishing";
public static final String MEDICINE = "buff_medicine";
public static final String RUSH = "buff_rush";
public static final String DEBUFF_PYRAMID_LEECH = "debuff_pyramid_leech";
private ConfigBuff config;
private String source;
private String key;
public Buff(String buffkey, String buffId, String source) {
config = CivSettings.buffs.get(buffId);
setKey(buffkey);
this.source = source;
}
@Override
public int hashCode() {
return config.id.hashCode();
}
@Override
public boolean equals(Object other) {
if (other instanceof Buff) {
Buff otherBuff = (Buff) other;
if (otherBuff.getConfig().id.equals(this.getConfig().id)) {
return true;
}
}
return false;
}
/**
* @return the source
*/
public String getSource() {
return source;
}
/**
* @param source the source to set
*/
public void setSource(String source) {
this.source = source;
}
/**
* @return the config
*/
public ConfigBuff getConfig() {
return config;
}
/**
* @param config the config to set
*/
public void setConfig(ConfigBuff config) {
this.config = config;
}
public boolean isStackable() {
return config.stackable;
}
public String getId() {
return config.id;
}
public String getValue() {
return config.value;
}
public String getDisplayDouble() {
try {
double d = Double.parseDouble(config.value);
DecimalFormat df = new DecimalFormat();
return df.format(d * 100) + "%";
} catch (NumberFormatException e) {
return "NAN!";
}
}
public String getDisplayInt() {
try {
int i = Integer.parseInt(config.value);
return "" + i;
} catch (NumberFormatException e) {
return "NAN!";
}
}
public String getDisplayName() {
return config.name;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
}
| [
"799453724@qq.com"
] | 799453724@qq.com |
79b9623b24582500a598a30ca3e5f451b6942066 | 13c2d3db2d49c40c74c2e6420a9cd89377f1c934 | /program_data/JavaProgramData/9/113.java | 2064a58ee6ea71849a4543f86c7411c405c39572 | [
"MIT"
] | permissive | qiuchili/ggnn_graph_classification | c2090fefe11f8bf650e734442eb96996a54dc112 | 291ff02404555511b94a4f477c6974ebd62dcf44 | refs/heads/master | 2021-10-18T14:54:26.154367 | 2018-10-21T23:34:14 | 2018-10-21T23:34:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,329 | java | package <missing>;
public class GlobalMembers
{
public static void Main()
{
int n;
int i;
int j;
int z;
int y;
y = 0;
z = 0;
//C++ TO JAVA CONVERTER TODO TASK: Java does not allow declaring types within methods:
// struct member
// {
// char xh[10];
// int ag;
// };
member[] a = tangible.Arrays.initializeWithDefaultmemberInstances(100);
member[] b = tangible.Arrays.initializeWithDefaultmemberInstances(101);
member[] c = tangible.Arrays.initializeWithDefaultmemberInstances(100);
String tempVar = ConsoleInput.scanfRead();
if (tempVar != null)
{
n = Integer.parseInt(tempVar);
}
for (i = 0;i < n;i++)
{
String tempVar2 = ConsoleInput.scanfRead();
if (tempVar2 != null)
{
a[i].xh = tempVar2;
}
String tempVar3 = ConsoleInput.scanfRead(" ");
if (tempVar3 != null)
{
a[i].ag = tempVar3;
}
}
for (i = 0;i < n;i++)
{
if (a[i].ag >= 60)
{
b[y] = a[i];
y++;
}
else
{
c[z] = a[i];
z++;
}
}
for (i = 1;i < y;i++)
{
for (j = 0;j < y - i;j++)
{
if (b[j].ag < b[j + 1].ag)
{
b[100] = b[j];
b[j] = b[j + 1];
b[j + 1] = b[100];
}
}
}
for (i = 0;i < y;i++)
{
System.out.printf("%s\n",b[i].xh);
}
for (i = 0;i < z;i++)
{
System.out.printf("%s\n",c[i].xh);
}
}
}
| [
"y.yu@open.ac.uk"
] | y.yu@open.ac.uk |
d58ed9f94b208c283ac891813b8bc64479a9c17c | 72ea1d1190e9648a1fb85ec965a5a588754939f9 | /docs/maven-idea-plugin/src/main/java/org/apache/maven/plugin/idea/IdeaModuleMojo.java | efc98be362f65352a7f816efebfba53fc91ca099 | [] | no_license | wangxingyong/docs | 45fdaec7f27332be50a10ec18f8bd89388db66d9 | 194e12fd6cb0c976bfd5f154281abbb4086264ef | refs/heads/master | 2016-09-08T01:35:30.311245 | 2015-01-12T10:49:19 | 2015-01-12T10:49:19 | 20,048,116 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 40,617 | java | package org.apache.maven.plugin.idea;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.manager.WagonManager;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.model.Resource;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.logging.Log;
import org.apache.maven.project.MavenProject;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.apache.maven.wagon.TransferFailedException;
import org.codehaus.plexus.util.StringUtils;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Creates the module files (*.iml) for IntelliJ IDEA.
*
* @author Edwin Punzalan
* @goal module
* @execute phase="generate-sources"
*/
public class IdeaModuleMojo
extends AbstractIdeaMojo
{
/**
* The reactor projects in a multi-module build.
*
* @parameter expression="${reactorProjects}"
* @required
* @readonly
*/
private List reactorProjects;
/**
* @component
*/
private WagonManager wagonManager;
/**
* Whether to link the reactor projects as dependency modules or as libraries.
*
* @parameter expression="${linkModules}" default-value="true"
*/
private boolean linkModules;
/**
* Specify the location of the deployment descriptor file, if one is provided.
*
* @parameter expression="${deploymentDescriptorFile}"
*/
private String deploymentDescriptorFile;
/**
* Whether to use full artifact names when referencing libraries.
*
* @parameter expression="${useFullNames}" default-value="false"
*/
private boolean useFullNames;
/**
* Enables/disables the downloading of source attachments.
*
* @parameter expression="${downloadSources}" default-value="false"
*/
private boolean downloadSources;
/**
* Enables/disables the downloading of javadoc attachments.
*
* @parameter expression="${downloadJavadocs}" default-value="false"
*/
private boolean downloadJavadocs;
/**
* Sets the classifier string attached to an artifact source archive name.
*
* @parameter expression="${sourceClassifier}" default-value="sources"
*/
private String sourceClassifier;
/**
* Sets the classifier string attached to an artifact javadoc archive name.
*
* @parameter expression="${javadocClassifier}" default-value="javadoc"
*/
private String javadocClassifier;
/**
* An optional set of Library objects that allow you to specify a comma separated list of source dirs, class dirs,
* or to indicate that the library should be excluded from the module. For example:
* <p/>
* <pre>
* <libraries>
* <library>
* <name>webwork</name>
* <sources>file://$webwork$/src/java</sources>
* <!--
* <classes>...</classes>
* <exclude>true</exclude>
* -->
* </library>
* </libraries>
* </pre>
*
* @parameter
*/
private Library[] libraries;
/**
* A comma-separated list of directories that should be excluded. These directories are in addition to those
* already excluded, such as target.
*
* @parameter
*/
private String exclude;
/**
* Causes the module libraries to use a short name for all dependencies. This is very convenient but has been
* reported to cause problems with IDEA.
*
* @parameter default-value="false"
*/
private boolean dependenciesAsLibraries;
/**
* A temporary cache of artifacts that's already been downloaded or
* attempted to be downloaded. This is to refrain from trying to download a
* dependency that we have already tried to download.
*
* @todo this is nasty! the only reason this is static is to use the same cache between reactor calls
*/
private static Map attemptedDownloads = new HashMap();
/**
* Tell IntelliJ IDEA that this module is an IntelliJ IDEA Plugin.
*
* @parameter default-value="false"
*/
private boolean ideaPlugin;
/**
* Specify the version of IDEA to target. This is needed to identify the default formatting of
* project-jdk-name used by IDEA. Currently supports 4.x and 5.x.
* <p/>
* This will only be used when parameter jdkName is not set.
*
* @parameter expression="${ideaVersion}" default-value="5.x"
*/
private String ideaVersion;
private Set macros;
public void initParam( MavenProject project, ArtifactFactory artifactFactory, ArtifactRepository localRepo,
ArtifactResolver artifactResolver, ArtifactMetadataSource artifactMetadataSource, Log log,
boolean overwrite, MavenProject executedProject, List reactorProjects,
WagonManager wagonManager, boolean linkModules, boolean useFullNames,
boolean downloadSources, String sourceClassifier, boolean downloadJavadocs,
String javadocClassifier, Library[] libraries, Set macros, String exclude,
boolean useShortDependencyNames, String deploymentDescriptorFile, boolean ideaPlugin,
String ideaVersion )
{
super.initParam( project, artifactFactory, localRepo, artifactResolver, artifactMetadataSource, log,
overwrite );
this.reactorProjects = reactorProjects;
this.wagonManager = wagonManager;
this.linkModules = linkModules;
this.useFullNames = useFullNames;
this.downloadSources = downloadSources;
this.sourceClassifier = sourceClassifier;
this.downloadJavadocs = downloadJavadocs;
this.javadocClassifier = javadocClassifier;
this.libraries = libraries;
this.macros = macros;
this.exclude = exclude;
this.dependenciesAsLibraries = useShortDependencyNames;
this.deploymentDescriptorFile = deploymentDescriptorFile;
this.ideaPlugin = ideaPlugin;
this.ideaVersion = ideaVersion;
}
/**
* Create IDEA (.iml) project files.
*
* @throws org.apache.maven.plugin.MojoExecutionException
*
*/
public void execute()
throws MojoExecutionException
{
try
{
doDependencyResolution( executedProject, localRepo );
}
catch ( Exception e )
{
throw new MojoExecutionException( "Unable to build project dependencies.", e );
}
rewriteModule();
}
public void rewriteModule()
throws MojoExecutionException
{
File moduleFile = new File( executedProject.getBasedir(), executedProject.getArtifactId() +"-"+executedProject.getVersion()+ ".iml" );
try
{
Document document = readXmlDocument( moduleFile, "module.xml" );
Element module = document.getRootElement();
// TODO: how can we let the WAR/EJBs plugin hook in and provide this?
// TODO: merge in ejb-module, etc.
if ( "war".equals( executedProject.getPackaging() ) )
{
addWebModule( module );
}
else if ( "ejb".equals( executedProject.getPackaging() ) )
{
addEjbModule( module );
}
else if ( "ear".equals( executedProject.getPackaging() ) )
{
addEarModule( module );
}
else if ( ideaPlugin )
{
addPluginModule( module );
}
Element component = findComponent( module, "NewModuleRootManager" );
Element output = findElement( component, "output" );
output.addAttribute( "url", getModuleFileUrl( executedProject.getBuild().getOutputDirectory() ) );
Element outputTest = findElement( component, "output-test" );
outputTest.addAttribute( "url", getModuleFileUrl( executedProject.getBuild().getTestOutputDirectory() ) );
Element content = findElement( component, "content" );
removeOldElements( content, "sourceFolder" );
for ( Iterator i = executedProject.getCompileSourceRoots().iterator(); i.hasNext(); )
{
String directory = (String) i.next();
addSourceFolder( content, directory, false );
}
for ( Iterator i = executedProject.getTestCompileSourceRoots().iterator(); i.hasNext(); )
{
String directory = (String) i.next();
addSourceFolder( content, directory, true );
}
for ( Iterator i = executedProject.getBuild().getResources().iterator(); i.hasNext(); )
{
Resource resource = (Resource) i.next();
String directory = resource.getDirectory();
if ( resource.getTargetPath() == null && !resource.isFiltering() )
{
addSourceFolder( content, directory, false );
}
else
{
getLog().info(
"Not adding resource directory as it has an incompatible target path or filtering: "
+ directory );
}
}
for ( Iterator i = executedProject.getBuild().getTestResources().iterator(); i.hasNext(); )
{
Resource resource = (Resource) i.next();
String directory = resource.getDirectory();
if ( resource.getTargetPath() == null && !resource.isFiltering() )
{
addSourceFolder( content, directory, true );
}
else
{
getLog().info(
"Not adding test resource directory as it has an incompatible target path or filtering: "
+ directory );
}
}
removeOldElements( content, "excludeFolder" );
//For excludeFolder
File target = new File( executedProject.getBuild().getDirectory() );
File classes = new File( executedProject.getBuild().getOutputDirectory() );
File testClasses = new File( executedProject.getBuild().getTestOutputDirectory() );
List sourceFolders = content.elements( "sourceFolder" );
List filteredExcludes = new ArrayList();
filteredExcludes.addAll( getExcludedDirectories( target, filteredExcludes, sourceFolders ) );
filteredExcludes.addAll( getExcludedDirectories( classes, filteredExcludes, sourceFolders ) );
filteredExcludes.addAll( getExcludedDirectories( testClasses, filteredExcludes, sourceFolders ) );
if ( exclude != null )
{
String[] dirs = exclude.split( "[,\\s]+" );
for ( int i = 0; i < dirs.length; i++ )
{
File excludedDir = new File( executedProject.getBasedir(), dirs[i] );
filteredExcludes.addAll( getExcludedDirectories( excludedDir, filteredExcludes, sourceFolders ) );
}
}
// even though we just ran all the directories in the filteredExcludes List through the intelligent
// getExcludedDirectories method, we never actually were guaranteed the order that they were added was
// in the order required to make the most optimized exclude list. In addition, the smart logic from
// that method is entirely skipped if the directory doesn't currently exist. A simple string matching
// will do pretty much the same thing and make the list more concise.
ArrayList actuallyExcluded = new ArrayList();
Collections.sort( filteredExcludes );
for ( Iterator i = filteredExcludes.iterator(); i.hasNext(); )
{
String dirToExclude = i.next().toString();
String dirToExcludeTemp = dirToExclude.replace( '\\', '/' );
boolean addExclude = true;
for ( Iterator iterator = actuallyExcluded.iterator(); iterator.hasNext(); )
{
String dir = iterator.next().toString();
String dirTemp = dir.replace( '\\', '/' );
if ( dirToExcludeTemp.startsWith( dirTemp + "/" ) )
{
addExclude = false;
break;
}
else if ( dir.startsWith( dirToExcludeTemp + "/" ) )
{
actuallyExcluded.remove( dir );
}
}
if ( addExclude )
{
actuallyExcluded.add( dirToExclude );
addExcludeFolder( content, dirToExclude );
}
}
//Remove default exclusion for output dirs if there are sources in it
String outputModuleUrl = getModuleFileUrl( executedProject.getBuild().getOutputDirectory() );
String testOutputModuleUrl = getModuleFileUrl( executedProject.getBuild().getTestOutputDirectory() );
for ( Iterator i = content.elements( "sourceFolder" ).iterator(); i.hasNext(); )
{
Element sourceFolder = (Element) i.next();
String sourceUrl = sourceFolder.attributeValue( "url" ).replace( '\\', '/' );
if ( sourceUrl.startsWith( outputModuleUrl + "/" ) || sourceUrl.startsWith( testOutputModuleUrl ) )
{
component.remove( component.element( "exclude-output" ) );
break;
}
}
rewriteDependencies( component );
writeXmlDocument( moduleFile, document );
}
catch ( DocumentException e )
{
throw new MojoExecutionException( "Error parsing existing IML file " + moduleFile.getAbsolutePath(), e );
}
catch ( IOException e )
{
throw new MojoExecutionException( "Error parsing existing IML file " + moduleFile.getAbsolutePath(), e );
}
}
private void rewriteDependencies( Element component )
{
Map modulesByName = new HashMap();
Map modulesByUrl = new HashMap();
Set unusedModules = new HashSet();
for ( Iterator children = component.elementIterator( "orderEntry" ); children.hasNext(); )
{
Element orderEntry = (Element) children.next();
String type = orderEntry.attributeValue( "type" );
if ( "module".equals( type ) )
{
modulesByName.put( orderEntry.attributeValue( "module-name" ), orderEntry );
}
else if ( "module-library".equals( type ) )
{
// keep track for later so we know what is left
unusedModules.add( orderEntry );
Element lib = orderEntry.element( "library" );
String name = lib.attributeValue( "name" );
if ( name != null )
{
modulesByName.put( name, orderEntry );
}
else
{
Element classesChild = lib.element( "CLASSES" );
if ( classesChild != null )
{
Element rootChild = classesChild.element( "root" );
if ( rootChild != null )
{
String url = rootChild.attributeValue( "url" );
if ( url != null )
{
// Need to ignore case because of Windows drive letters
modulesByUrl.put( url.toLowerCase(), orderEntry );
}
}
}
}
}
}
List testClasspathElements = executedProject.getTestArtifacts();
for ( Iterator i = testClasspathElements.iterator(); i.hasNext(); )
{
Artifact a = (Artifact) i.next();
Library library = findLibrary( a );
if ( library != null && library.isExclude() )
{
continue;
}
String moduleName;
if ( useFullNames )
{
moduleName = a.getGroupId() + ':' + a.getArtifactId() + ':' + a.getType() + ':' + a.getVersion();
}
else
{
moduleName = a.getArtifactId() +"-"+a.getVersion();
}
Element dep = (Element) modulesByName.get( moduleName );
if ( dep == null )
{
// Need to ignore case because of Windows drive letters
dep = (Element) modulesByUrl.get( getLibraryUrl( a ).toLowerCase() );
}
if ( dep != null )
{
unusedModules.remove( dep );
}
else
{
dep = createElement( component, "orderEntry" );
}
boolean isIdeaModule = false;
if ( linkModules )
{
isIdeaModule = isReactorProject( a.getGroupId(), a.getArtifactId() );
if ( isIdeaModule )
{
dep.addAttribute( "type", "module" );
dep.addAttribute( "module-name", moduleName );
}
}
if ( a.getFile() != null && !isIdeaModule )
{
dep.addAttribute( "type", "module-library" );
Element lib = dep.element( "library" );
if ( lib == null )
{
lib = createElement( dep, "library" );
}
if ( dependenciesAsLibraries )
{
lib.addAttribute( "name", moduleName );
}
// replace classes
removeOldElements( lib, "CLASSES" );
Element classes = createElement( lib, "CLASSES" );
if ( library != null && library.getSplitClasses().length > 0 )
{
lib.addAttribute( "name", moduleName );
String[] libraryClasses = library.getSplitClasses();
for ( int k = 0; k < libraryClasses.length; k++ )
{
String classpath = libraryClasses[k];
extractMacro( classpath );
Element classEl = createElement( classes, "root" );
classEl.addAttribute( "url", classpath );
}
}
else
{
createElement( classes, "root" ).addAttribute( "url", getLibraryUrl( a ) );
}
if ( library != null && library.getSplitSources().length > 0 )
{
removeOldElements( lib, "SOURCES" );
Element sourcesElement = createElement( lib, "SOURCES" );
String[] sources = library.getSplitSources();
for ( int k = 0; k < sources.length; k++ )
{
String source = sources[k];
extractMacro( source );
Element sourceEl = createElement( sourcesElement, "root" );
sourceEl.addAttribute( "url", source );
}
}
else if ( downloadSources )
{
resolveClassifier( createOrGetElement( lib, "SOURCES" ), a, sourceClassifier );
}
if ( library != null && library.getSplitJavadocs().length > 0 )
{
removeOldElements( lib, "JAVADOC" );
Element javadocsElement = createElement( lib, "JAVADOC" );
String[] javadocs = library.getSplitJavadocs();
for ( int k = 0; k < javadocs.length; k++ )
{
String javadoc = javadocs[k];
extractMacro( javadoc );
Element sourceEl = createElement( javadocsElement, "root" );
sourceEl.addAttribute( "url", javadoc );
}
}
else if ( downloadJavadocs )
{
resolveClassifier( createOrGetElement( lib, "JAVADOC" ), a, javadocClassifier );
}
}
}
for ( Iterator i = unusedModules.iterator(); i.hasNext(); )
{
Element orderEntry = (Element) i.next();
component.remove( orderEntry );
}
}
private Element createOrGetElement( Element lib, String name )
{
Element el = lib.element( name );
if ( el == null )
{
el = createElement( lib, name );
}
return el;
}
private void addEarModule( Element module )
{
module.addAttribute( "type", "J2EE_APPLICATION_MODULE" );
Element component = findComponent( module, "ApplicationModuleProperties" );
addDeploymentDescriptor( component, "application.xml", "1.3",
executedProject.getBuild().getDirectory() + "/application.xml" );
}
private void addEjbModule( Element module )
{
String ejbVersion = getPluginSetting( "maven-ejb-plugin", "ejbVersion", "2.x" );
module.addAttribute( "type", "J2EE_EJB_MODULE" );
String explodedDir = executedProject.getBuild().getDirectory() + "/" + executedProject.getArtifactId();
Element component = findComponent( module, "EjbModuleBuildComponent" );
Element setting = findSetting( component, "EXPLODED_URL" );
setting.addAttribute( "value", getModuleFileUrl( explodedDir ) );
component = findComponent( module, "EjbModuleProperties" );
Element deployDescElement =
addDeploymentDescriptor( component, "ejb-jar.xml", ejbVersion, "src/main/resources/META-INF/ejb-jar.xml" );
deployDescElement.addAttribute( "optional", ejbVersion.startsWith( "3" ) + "" );
removeOldElements( component, "containerElement" );
List artifacts = executedProject.getTestArtifacts();
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
Artifact artifact = (Artifact) i.next();
Element containerElement = createElement( component, "containerElement" );
if ( linkModules && isReactorProject( artifact.getGroupId(), artifact.getArtifactId() ) )
{
containerElement.addAttribute( "type", "module" );
containerElement.addAttribute( "name", artifact.getArtifactId() );
Element methodAttribute = createElement( containerElement, "attribute" );
methodAttribute.addAttribute( "name", "method" );
methodAttribute.addAttribute( "value", "6" );
Element uriAttribute = createElement( containerElement, "attribute" );
uriAttribute.addAttribute( "name", "URI" );
uriAttribute.addAttribute( "value", "/lib/" + artifact.getArtifactId() + ".jar" );
}
else if ( artifact.getFile() != null )
{
containerElement.addAttribute( "type", "library" );
containerElement.addAttribute( "level", "module" );
//no longer needed in IntelliJ 6
if ( StringUtils.isEmpty( ideaVersion ) || !ideaVersion.startsWith( "6" ) )
{
containerElement.addAttribute( "name", artifact.getArtifactId() );
}
Element methodAttribute = createElement( containerElement, "attribute" );
methodAttribute.addAttribute( "name", "method" );
methodAttribute.addAttribute( "value", "2" );
Element uriAttribute = createElement( containerElement, "attribute" );
uriAttribute.addAttribute( "name", "URI" );
uriAttribute.addAttribute( "value", "/lib/" + artifact.getFile().getName() );
Element urlElement = createElement( containerElement, "url" );
urlElement.setText( getLibraryUrl( artifact ) );
}
}
}
private void extractMacro( String path )
{
if ( macros != null )
{
Pattern p = Pattern.compile( ".*\\$([^\\$]+)\\$.*" );
Matcher matcher = p.matcher( path );
while ( matcher.find() )
{
String macro = matcher.group( 1 );
macros.add( macro );
}
}
}
private Library findLibrary( Artifact a )
{
if ( libraries != null )
{
for ( int j = 0; j < libraries.length; j++ )
{
Library library = libraries[j];
if ( a.getArtifactId().equals( library.getName() ) )
{
return library;
}
}
}
return null;
}
private List getExcludedDirectories( File target, List excludeList, List sourceFolders )
{
List foundFolders = new ArrayList();
int totalDirs = 0, excludedDirs = 0;
if ( target.exists() && !excludeList.contains( target.getAbsolutePath() ) )
{
File[] files = target.listFiles();
for ( int i = 0; i < files.length; i++ )
{
File file = files[i];
if ( file.isDirectory() && !excludeList.contains( file.getAbsolutePath() ) )
{
totalDirs++;
String absolutePath = file.getAbsolutePath();
String url = getModuleFileUrl( absolutePath );
boolean addToExclude = true;
for ( Iterator sources = sourceFolders.iterator(); sources.hasNext(); )
{
String source = ( (Element) sources.next() ).attributeValue( "url" );
if ( source.equals( url ) )
{
addToExclude = false;
break;
}
else if ( source.indexOf( url ) == 0 )
{
foundFolders.addAll(
getExcludedDirectories( new File( absolutePath ), excludeList, sourceFolders ) );
addToExclude = false;
break;
}
}
if ( addToExclude )
{
excludedDirs++;
foundFolders.add( absolutePath );
}
}
}
//if all directories are excluded, then just exclude the parent directory
if ( totalDirs > 0 && totalDirs == excludedDirs )
{
foundFolders.clear();
foundFolders.add( target.getAbsolutePath() );
}
}
else if ( !target.exists() )
{
//might as well exclude a non-existent dir so that it won't show when it suddenly appears
foundFolders.add( target.getAbsolutePath() );
}
return foundFolders;
}
/**
* Adds the Web module to the (.iml) project file.
*
* @param module Xpp3Dom element
*/
private void addWebModule( Element module )
{
// TODO: this is bad - reproducing war plugin defaults, etc!
// --> this is where the OGNL out of a plugin would be helpful as we could run package first and
// grab stuff from the mojo
String warWebapp = executedProject.getBuild().getDirectory() + "/" + executedProject.getArtifactId();
String warSrc = getPluginSetting( "maven-war-plugin", "warSourceDirectory", "src/main/webapp" );
String webXml = warSrc + "/WEB-INF/web.xml";
module.addAttribute( "type", "J2EE_WEB_MODULE" );
Element component = findComponent( module, "WebModuleBuildComponent" );
Element setting = findSetting( component, "EXPLODED_URL" );
setting.addAttribute( "value", getModuleFileUrl( warWebapp ) );
component = findComponent( module, "WebModuleProperties" );
removeOldElements( component, "containerElement" );
List artifacts = executedProject.getTestArtifacts();
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
Artifact artifact = (Artifact) i.next();
Element containerElement = createElement( component, "containerElement" );
if ( linkModules && isReactorProject( artifact.getGroupId(), artifact.getArtifactId() ) )
{
containerElement.addAttribute( "type", "module" );
containerElement.addAttribute( "name", artifact.getArtifactId() );
Element methodAttribute = createElement( containerElement, "attribute" );
methodAttribute.addAttribute( "name", "method" );
methodAttribute.addAttribute( "value", "5" );
Element uriAttribute = createElement( containerElement, "attribute" );
uriAttribute.addAttribute( "name", "URI" );
uriAttribute.addAttribute( "value", "/WEB-INF/lib/" + artifact.getArtifactId() + "-"
+ artifact.getVersion() + ".jar" );
}
else if ( artifact.getFile() != null )
{
containerElement.addAttribute( "type", "library" );
containerElement.addAttribute( "level", "module" );
Element methodAttribute = createElement( containerElement, "attribute" );
methodAttribute.addAttribute( "name", "method" );
if ( Artifact.SCOPE_PROVIDED.equalsIgnoreCase( artifact.getScope() )
|| Artifact.SCOPE_SYSTEM.equalsIgnoreCase( artifact.getScope() )
|| Artifact.SCOPE_TEST.equalsIgnoreCase( artifact.getScope() ) )
{
// If scope is provided, system or test - do not package.
methodAttribute.addAttribute( "value", "0" );
}
else
{
methodAttribute.addAttribute( "value", "1" ); // IntelliJ 5.0.2 is bugged and doesn't read it
}
Element uriAttribute = createElement( containerElement, "attribute" );
uriAttribute.addAttribute( "name", "URI" );
uriAttribute.addAttribute( "value", "/WEB-INF/lib/" + artifact.getFile().getName() );
Element url = createElement( containerElement, "url" );
url.setText( getLibraryUrl( artifact ) );
}
}
addDeploymentDescriptor( component, "web.xml", "2.3", webXml );
Element element = findElement( component, "webroots" );
removeOldElements( element, "root" );
element = createElement( element, "root" );
element.addAttribute( "relative", "/" );
element.addAttribute( "url", getModuleFileUrl( warSrc ) );
}
private void addPluginModule( Element module )
{
module.addAttribute( "type", "PLUGIN_MODULE" );
// this is where the META-INF/plugin.xml file is located
Element pluginDevElement = createElement( module, "component" );
pluginDevElement.addAttribute( "name", "DevKit.ModuleBuildProperties" );
pluginDevElement.addAttribute( "url", getModuleFileUrl( "src/main/resources/META-INF/plugin.xml" ) );
}
/**
* Translate the relative path of the file into module path
*
* @param basedir File to use as basedir
* @param path Absolute path string to translate to ModuleFileUrl
* @return moduleFileUrl Translated Module File URL
*/
private String getModuleFileUrl( File basedir, String path )
{
return "file://$MODULE_DIR$/" + toRelative( basedir.getAbsolutePath(), path );
}
private String getModuleFileUrl( String file )
{
return getModuleFileUrl( executedProject.getBasedir(), file );
}
/**
* Adds a sourceFolder element to IDEA (.iml) project file
*
* @param content Xpp3Dom element
* @param directory Directory to set as url.
* @param isTest True if directory isTestSource.
*/
private void addSourceFolder( Element content, String directory, boolean isTest )
{
if ( !StringUtils.isEmpty( directory ) && new File( directory ).isDirectory() )
{
Element sourceFolder = createElement( content, "sourceFolder" );
sourceFolder.addAttribute( "url", getModuleFileUrl( directory ) );
sourceFolder.addAttribute( "isTestSource", Boolean.toString( isTest ) );
}
}
private void addExcludeFolder( Element content, String directory )
{
Element excludeFolder = createElement( content, "excludeFolder" );
excludeFolder.addAttribute( "url", getModuleFileUrl( directory ) );
}
private boolean isReactorProject( String groupId, String artifactId )
{
if ( reactorProjects != null )
{
for ( Iterator j = reactorProjects.iterator(); j.hasNext(); )
{
MavenProject p = (MavenProject) j.next();
if ( p.getGroupId().equals( groupId ) && p.getArtifactId().equals( artifactId ) )
{
return true;
}
}
}
return false;
}
private void resolveClassifier( Element element, Artifact a, String classifier )
{
String id = a.getId() + '-' + classifier;
String path;
if ( attemptedDownloads.containsKey( id ) )
{
getLog().debug( id + " was already downloaded." );
path = (String) attemptedDownloads.get( id );
}
else
{
getLog().debug( id + " was not attempted to be downloaded yet: trying..." );
path = resolveClassifiedArtifact( a, classifier );
attemptedDownloads.put( id, path );
}
if ( path != null )
{
String jarPath = "jar://" + path + "!/";
getLog().debug( "Setting " + classifier + " for " + id + " to " + jarPath );
removeOldElements( element, "root" );
createElement( element, "root" ).addAttribute( "url", jarPath );
}
}
private String resolveClassifiedArtifact( Artifact artifact, String classifier )
{
String basePath = artifact.getFile().getAbsolutePath().replace( '\\', '/' );
int delIndex = basePath.indexOf( ".jar" );
if ( delIndex < 0 )
{
return null;
}
List remoteRepos = executedProject.getRemoteArtifactRepositories();
try
{
Artifact classifiedArtifact = artifactFactory.createArtifactWithClassifier( artifact.getGroupId(),
artifact.getArtifactId(),
artifact.getVersion(),
artifact.getType(),
classifier );
String dstFilename = basePath.substring( 0, delIndex ) + '-' + classifier + ".jar";
File dstFile = new File( dstFilename );
classifiedArtifact.setFile( dstFile );
//this check is here because wagonManager does not seem to check if the remote file is newer
// or such feature is not working
if ( !dstFile.exists() )
{
wagonManager.getArtifact( classifiedArtifact, remoteRepos );
}
return dstFile.getAbsolutePath().replace( '\\', '/' );
}
catch ( TransferFailedException e )
{
getLog().debug( e );
return null;
}
catch ( ResourceDoesNotExistException e )
{
getLog().debug( e );
return null;
}
}
/**
* Returns an Xpp3Dom element (setting).
*
* @param component Xpp3Dom element
* @param name Setting attribute to find
* @return setting Xpp3Dom element
*/
private Element findSetting( Element component, String name )
{
return findElement( component, "setting", name );
}
private String getLibraryUrl( Artifact artifact )
{
return "jar://" + convertDriveLetter( artifact.getFile().getAbsolutePath() ).replace( '\\', '/' ) + "!/";
}
private Element addDeploymentDescriptor( Element component, String name, String version, String file )
{
Element deploymentDescriptor = findElement( component, "deploymentDescriptor" );
if ( deploymentDescriptor.attributeValue( "version" ) == null )
{
deploymentDescriptor.addAttribute( "version", version );
}
if ( deploymentDescriptor.attributeValue( "name" ) == null )
{
deploymentDescriptor.addAttribute( "name", name );
}
deploymentDescriptor.addAttribute( "optional", "false" );
if ( deploymentDescriptorFile == null )
{
deploymentDescriptorFile = file;
}
deploymentDescriptor.addAttribute( "url", getModuleFileUrl( deploymentDescriptorFile ) );
return deploymentDescriptor;
}
}
| [
"xingyong.wangxy@alibaba-inc.com"
] | xingyong.wangxy@alibaba-inc.com |
e24a17758152daba92400f61ca3cdd9ec9045ecd | 9e20645e45cc51e94c345108b7b8a2dd5d33193e | /L2J_Mobius_C4_ScionsOfDestiny/dist/game/data/scripts/quests/Q080_SagaOfTheWindRider/Q080_SagaOfTheWindRider.java | a64c2c5d136941d5392b473cbe36d92010e8cb67 | [] | no_license | Enryu99/L2jMobius-01-11 | 2da23f1c04dcf6e88b770f6dcbd25a80d9162461 | 4683916852a03573b2fe590842f6cac4cc8177b8 | refs/heads/master | 2023-09-01T22:09:52.702058 | 2021-11-02T17:37:29 | 2021-11-02T17:37:29 | 423,405,362 | 2 | 2 | null | null | null | null | UTF-8 | Java | false | false | 2,896 | java | /*
* This file is part of the L2J Mobius project.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package quests.Q080_SagaOfTheWindRider;
import quests.SagasSuperClass;
/**
* @author Emperorc
*/
public class Q080_SagaOfTheWindRider extends SagasSuperClass
{
public Q080_SagaOfTheWindRider()
{
super(80, "Saga of the Wind Rider");
_npc = new int[]
{
31603,
31624,
31284,
31615,
31612,
31646,
31648,
31652,
31654,
31655,
31659,
31616
};
_items = new int[]
{
7080,
7517,
7081,
7495,
7278,
7309,
7340,
7371,
7402,
7433,
7103,
0
};
_mob = new int[]
{
27300,
27229,
27303
};
_classId = new int[]
{
101
};
_prevClass = new int[]
{
0x17
};
_x = new int[]
{
161719,
124314,
124355
};
_y = new int[]
{
-92823,
82155,
82155
};
_z = new int[]
{
-1893,
-2803,
-2803
};
_text = new String[]
{
"PLAYERNAME! Pursued to here! However, I jumped out of the Banshouren boundaries! You look at the giant as the sign of power!",
"... Oh ... good! So it was ... let's begin!",
"I do not have the patience ..! I have been a giant force ...! Cough chatter ah ah ah!",
"Paying homage to those who disrupt the orderly will be PLAYERNAME's death!",
"Now, my soul freed from the shackles of the millennium, Halixia, to the back side I come ...",
"Why do you interfere others' battles?",
"This is a waste of time.. Say goodbye...!",
"...That is the enemy",
"...Goodness! PLAYERNAME you are still looking?",
"PLAYERNAME ... Not just to whom the victory. Only personnel involved in the fighting are eligible to share in the victory.",
"Your sword is not an ornament. Don't you think, PLAYERNAME?",
"Goodness! I no longer sense a battle there now.",
"let...",
"Only engaged in the battle to bar their choice. Perhaps you should regret.",
"The human nation was foolish to try and fight a giant's strength.",
"Must...Retreat... Too...Strong.",
"PLAYERNAME. Defeat...by...retaining...and...Mo...Hacker",
"....! Fight...Defeat...It...Fight...Defeat...It..."
};
registerNPCs();
}
} | [
"MobiusDevelopment@7325c9f8-25fd-504a-9f63-8876acdc129b"
] | MobiusDevelopment@7325c9f8-25fd-504a-9f63-8876acdc129b |
7d27eb05aaa1d11271e07e87548a0bc08ed4a83c | 8bcf5a608fe60fcfaacdd590efdbd503ffd8177b | /src/main/java/lk/rgd/crs/core/service/ContentRepository.java | f7fff12e9faef3e40a7fb3c84c2708c507b46bcb | [] | no_license | pradeepy2kg/pradeepy2kg-adsfasdf | d3513ec46c473b34daff93ae4c9fdd1856ccf369 | 463209273ec932cd9e53f878a59c95927e9e21b5 | refs/heads/master | 2020-05-29T15:22:22.920310 | 2013-12-09T09:35:48 | 2013-12-09T09:35:48 | 33,045,611 | 0 | 1 | null | null | null | null | UTF-8 | Java | false | false | 500 | java | package lk.rgd.crs.core.service;
import java.io.File;
/**
* @author asankha
*/
public interface ContentRepository {
/**
* Store given file in the content repository and return its relative path name
* @param division the first level of hierarchy
* @param idUKey the unique ID of the resource stored
* @param image the image file
* @return the relative file path used to store the file
*/
public String storeFile(long division, String idUKey, File image);
}
| [
"Duminda@localhost"
] | Duminda@localhost |
37c839733472196a974b549c4bd38ad0bfcf05df | 52c36ce3a9d25073bdbe002757f08a267abb91c6 | /src/main/java/com/alipay/api/response/AlipayInsCooperationProductOfflineBatchqueryResponse.java | 7e3a8fcbb25606facdf1ebbde32058e51cebd205 | [
"Apache-2.0"
] | permissive | itc7/alipay-sdk-java-all | d2f2f2403f3c9c7122baa9e438ebd2932935afec | c220e02cbcdda5180b76d9da129147e5b38dcf17 | refs/heads/master | 2022-08-28T08:03:08.497774 | 2020-05-27T10:16:10 | 2020-05-27T10:16:10 | 267,271,062 | 0 | 0 | Apache-2.0 | 2020-05-27T09:02:04 | 2020-05-27T09:02:04 | null | UTF-8 | Java | false | false | 946 | java | package com.alipay.api.response;
import java.util.List;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
import com.alipay.api.domain.InsOffilneProduct;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.ins.cooperation.product.offline.batchquery response.
*
* @author auto create
* @since 1.0, 2019-01-07 20:51:15
*/
public class AlipayInsCooperationProductOfflineBatchqueryResponse extends AlipayResponse {
private static final long serialVersionUID = 4328757299832459755L;
/**
* 返回给机构的线下产品信息列表
*/
@ApiListField("product_list")
@ApiField("ins_offilne_product")
private List<InsOffilneProduct> productList;
public void setProductList(List<InsOffilneProduct> productList) {
this.productList = productList;
}
public List<InsOffilneProduct> getProductList( ) {
return this.productList;
}
}
| [
"ben.zy@antfin.com"
] | ben.zy@antfin.com |
ef75472341d635c1ba74c11389becb21f396e5a4 | 81719679e3d5945def9b7f3a6f638ee274f5d770 | /aws-java-sdk-waf/src/main/java/com/amazonaws/services/waf/model/waf_regional/transform/CreateRegexPatternSetResultJsonUnmarshaller.java | 73942541132c18efd8d7bc34a2a948ff63e54df5 | [
"Apache-2.0"
] | permissive | ZeevHayat1/aws-sdk-java | 1e3351f2d3f44608fbd3ff987630b320b98dc55c | bd1a89e53384095bea869a4ea064ef0cf6ed7588 | refs/heads/master | 2022-04-10T14:18:43.276970 | 2020-03-07T12:15:44 | 2020-03-07T12:15:44 | 172,681,373 | 1 | 0 | Apache-2.0 | 2019-02-26T09:36:47 | 2019-02-26T09:36:47 | null | UTF-8 | Java | false | false | 3,186 | java | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.waf.model.waf_regional.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.waf.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* CreateRegexPatternSetResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateRegexPatternSetResultJsonUnmarshaller implements Unmarshaller<CreateRegexPatternSetResult, JsonUnmarshallerContext> {
public CreateRegexPatternSetResult unmarshall(JsonUnmarshallerContext context) throws Exception {
CreateRegexPatternSetResult createRegexPatternSetResult = new CreateRegexPatternSetResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return createRegexPatternSetResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("RegexPatternSet", targetDepth)) {
context.nextToken();
createRegexPatternSetResult.setRegexPatternSet(RegexPatternSetJsonUnmarshaller.getInstance().unmarshall(context));
}
if (context.testExpression("ChangeToken", targetDepth)) {
context.nextToken();
createRegexPatternSetResult.setChangeToken(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return createRegexPatternSetResult;
}
private static CreateRegexPatternSetResultJsonUnmarshaller instance;
public static CreateRegexPatternSetResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new CreateRegexPatternSetResultJsonUnmarshaller();
return instance;
}
}
| [
""
] | |
14b8442f7e685013b4f1b8ee8898ada4557957b5 | 34f2167325dc868bc629a49825b03f416f0fd4b2 | /app/src/androidTest/java/org/sairaa/tennisscore/ExampleInstrumentedTest.java | cbf669d3a986800e9b88d17c525b60e69389194b | [] | no_license | prafulnayak/TennisScore | ace6df479fc3adb61bb73ae4b46263fc5b8eb058 | 4d4d59f89ee92bf42d71be052fa175499f6a5966 | refs/heads/master | 2020-03-19T14:59:16.495092 | 2018-06-12T15:52:07 | 2018-06-12T15:52:07 | 136,650,584 | 4 | 1 | null | null | null | null | UTF-8 | Java | false | false | 728 | java | package org.sairaa.tennisscore;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("org.sairaa.tennisscore", appContext.getPackageName());
}
}
| [
"prafulnayak1988@gmail.com"
] | prafulnayak1988@gmail.com |
04e97dca073b63e7f15208f660bfaff2875c9dba | db8056922ba69684135bf194c29c3ea649766960 | /cei37/src/com/cei37/sortingSearching/Q11_1.java | 022a03cfc1039b01406223947bb885062c6652ae | [] | no_license | cei37/java | 3aa36daa8003cae5de352136ec2df998d8e706f6 | 5e0692842df843bf5da25e50373a284467c5e6c5 | refs/heads/master | 2021-01-10T13:28:17.951163 | 2015-11-11T17:21:09 | 2015-11-11T17:21:09 | 45,993,264 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 923 | java | package com.cei37.sortingSearching;
public class Q11_1 {
/**
* You are given two sorted arrays, A and B, where A has a large enough buffer at the
* end to hold B. Write a method to merge B into A in sorted order.
*
* A --> n
* B --> m
* O(m (log n))
*/
public static void main(String[] args) {
int[] A = new int[10];
A[0] = 1;
A[1] = 2;
A[2] = 4;
A[3] = 7;
A[4] = 8;
A[5] = 9;
A[6] = 10;
int[] B = {3,5,6};
merge(A,B);
for (int i=0; i<A.length; i++) {
System.out.println(i + " --> "+A[i]);
}
}
public static void merge(int A[], int[] B) {
int pointerA = A.length - 1, cB = B.length -1, cA = A.length - B.length - 1;
while(cB>=0 && cA>=0) {
if (B[cB]>=A[cA]) {
A[pointerA] = B[cB];
pointerA--;
cB--;
} else {
while(B[cB]<=A[cA]) {
A[pointerA] = A[cA];
pointerA--;
cA--;
}
}
}
}
} | [
"cei37@hotmail.com"
] | cei37@hotmail.com |
612d1cc82a08422c3d24effe72ec82ebd5c4fadd | 067cbf679024c610d74ce78ecac952bb67db2f08 | /src/SortArrayFromNumToK.java | a6a84287386915ced3ae70481b948d3043e3ff6e | [] | no_license | KennanObura/ProblemSolving | 78935dbddf941ebcc3d0bd112796c712f4f60221 | 845002d2a9784162fc7817d809bf708e631f04e7 | refs/heads/master | 2021-03-13T09:56:26.880566 | 2020-07-28T04:08:58 | 2020-07-28T04:08:58 | 246,667,470 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,629 | java | import java.lang.Math;
import java.util.Arrays;
public class SortArrayFromNumToK {
public static void main(String[] args) {
int[] nums = {2, 6, 3, 12, 56, 8};
Sort sort = new Sort(nums);
sort.calculate();
}
static class Sort {
int[] nums;
Sort(int[] nums) {
this.nums = nums;
}
void calculate() {
calculate(6, 3);
System.out.println(Arrays.toString(nums));
}
void calculate(int element1, int element2) {
int index_1 = findIndex(element1);
int index_2 = findIndex(element2);
if (index_1 == -1 || index_2 == -1) {
System.out.println("Nums out of bond");
return;
}
int start = Math.min(index_1, index_2);
int end = Math.max(index_1, index_2);
for (int i = start; i < end; i++) {
for (int j = start; j < end - 1; j++)
if (nums[j] > nums[j + 1]) swap(j, j + 1);
}
return;
}
private void swap(int i, int j) {
System.out.println("Swaping " + nums[i] + " with " + nums[j]);
int temp = nums[i];
nums[i] = nums[j];
nums[j] = temp;
}
private int findIndex(int k) {
int index = -1;
for (int i = 0; i < nums.length; i++)
if (nums[i] == k) {
index = i;
break;
}
System.out.println("index at :" + index);
return index;
}
}
}
| [
"kennan.obura@gmail.com"
] | kennan.obura@gmail.com |
880c528de9b688221f4a4d01a5cb883fc7685b33 | dc1dbb7e5a4b95bf44170d2f51fd08b3814f2ac9 | /data_defect4j/preprossed_method_corpus/Chart/3/org/jfree/chart/text/TextUtilities_drawRotatedString_457.java | 72ffdee7935c24776fb8ec0f448a513ecc28f85c | [] | no_license | hvdthong/NetML | dca6cf4d34c5799b400d718e0a6cd2e0b167297d | 9bb103da21327912e5a29cbf9be9ff4d058731a5 | refs/heads/master | 2021-06-30T15:03:52.618255 | 2020-10-07T01:58:48 | 2020-10-07T01:58:48 | 150,383,588 | 1 | 1 | null | 2018-09-26T07:08:45 | 2018-09-26T07:08:44 | null | UTF-8 | Java | false | false | 1,190 | java |
org jfree chart text
util method work text
text util textutil
util method draw rotat text
common rotat math draw text 'vertically'
top charact left
param text text
param graphic devic
param angl angl clockwis rotat radian
param coordin
param coordin
draw rotat string drawrotatedstr string text graphics2 graphics2d
angl
draw rotat string drawrotatedstr text angl
| [
"hvdthong@gmail.com"
] | hvdthong@gmail.com |
060e90371d48e91057f566775b08dc36849b3979 | 0d835dfcff37e22c85492e602a2649a87d6f4ba0 | /Android/LawQingHai/idcardlibs/src/main/java/com/scxd/idcardlibs/BitmapUtil.java | f3a0e353d53fa38cab3db9f493121c2f6cfe7cea | [] | no_license | wvvwcom/QH_YDJC | 9076f881202096c36f1928ad6420886a6107a17a | c764faddf791625e5fbc9ba68c13295b79ed39c2 | refs/heads/master | 2021-10-19T09:49:10.598521 | 2019-02-20T03:41:23 | 2019-02-20T03:41:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 9,435 | java | package com.scxd.idcardlibs;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Build;
import android.provider.MediaStore;
import android.util.Base64;
import android.util.Log;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class BitmapUtil {
private static final String TAG = BitmapUtil.class.getSimpleName();
/**
* convert Bitmap to byte array
*/
public static byte[] bitmapToByte(Bitmap b) {
ByteArrayOutputStream o = new ByteArrayOutputStream();
b.compress(Bitmap.CompressFormat.PNG, 100, o);
return o.toByteArray();
}
/**
* convert byte array to Bitmap
*/
public static Bitmap byteToBitmap(byte[] b) {
return (b == null || b.length == 0) ? null : BitmapFactory.decodeByteArray(b, 0, b.length);
}
/**
* 把bitmap转换成Base64编码String
*/
public static String bitmapToString(Bitmap bitmap) {
return Base64.encodeToString(bitmapToByte(bitmap), Base64.DEFAULT);
}
/**
* convert Drawable to Bitmap
*/
public static Bitmap drawableToBitmap(Drawable drawable) {
return drawable == null ? null : ((BitmapDrawable) drawable).getBitmap();
}
/**
* convert Bitmap to Drawable
*/
public static Drawable bitmapToDrawable(Bitmap bitmap) {
return bitmap == null ? null : new BitmapDrawable(bitmap);
}
/**
* scale image
*/
public static Bitmap scaleImageTo(Bitmap org, int newWidth, int newHeight) {
return scaleImage(org, (float) newWidth / org.getWidth(), (float) newHeight / org.getHeight());
}
/**
* scale image
*/
public static Bitmap scaleImage(Bitmap org, float scaleWidth, float scaleHeight) {
if (org == null) {
return null;
}
Matrix matrix = new Matrix();
matrix.postScale(scaleWidth, scaleHeight);
return Bitmap.createBitmap(org, 0, 0, org.getWidth(), org.getHeight(), matrix, true);
}
public static Bitmap toRoundCorner(Bitmap bitmap) {
int height = bitmap.getHeight();
int width = bitmap.getHeight();
Bitmap output = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(output);
final Paint paint = new Paint();
final Rect rect = new Rect(0, 0, width, height);
paint.setAntiAlias(true);
canvas.drawARGB(0, 0, 0, 0);
//paint.setColor(0xff424242);
paint.setColor(Color.TRANSPARENT);
canvas.drawCircle(width / 2, height / 2, width / 2, paint);
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_IN));
canvas.drawBitmap(bitmap, rect, rect, paint);
return output;
}
public static Bitmap createBitmapThumbnail(Bitmap bitMap, boolean needRecycle, int newHeight, int newWidth) {
int width = bitMap.getWidth();
int height = bitMap.getHeight();
// 计算缩放比例
float scaleWidth = ((float) newWidth) / width;
float scaleHeight = ((float) newHeight) / height;
// 取得想要缩放的matrix参数
Matrix matrix = new Matrix();
matrix.postScale(scaleWidth, scaleHeight);
// 得到新的图片
Bitmap newBitMap = Bitmap.createBitmap(bitMap, 0, 0, width, height, matrix, true);
if (needRecycle)
bitMap.recycle();
return newBitMap;
}
public static boolean saveBitmap(Bitmap bitmap, File file) {
if (bitmap == null)
return false;
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
fos.flush();
return true;
} catch (Exception e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return false;
}
public static boolean saveBitmap(Bitmap bitmap, String absPath) {
File file = new File(absPath);
// 如果文件路径所对应的文件存在,并且是一个文件,则直接删除
if (file.exists() && file.isFile()) {
file.delete();
}
return saveBitmap(bitmap, new File(absPath));
}
public static Intent buildImageGetIntent(Uri saveTo, int outputX, int outputY, boolean returnData) {
return buildImageGetIntent(saveTo, 1, 1, outputX, outputY, returnData);
}
public static Intent buildImageGetIntent(Uri saveTo, int aspectX, int aspectY,
int outputX, int outputY, boolean returnData) {
Log.i(TAG, "Build.VERSION.SDK_INT : " + Build.VERSION.SDK_INT);
Intent intent = new Intent();
if (Build.VERSION.SDK_INT < 19) {
intent.setAction(Intent.ACTION_GET_CONTENT);
} else {
intent.setAction(Intent.ACTION_OPEN_DOCUMENT);
intent.addCategory(Intent.CATEGORY_OPENABLE);
}
intent.setType("image/*");
intent.putExtra("output", saveTo);
intent.putExtra("aspectX", aspectX);
intent.putExtra("aspectY", aspectY);
intent.putExtra("outputX", outputX);
intent.putExtra("outputY", outputY);
intent.putExtra("scale", true);
intent.putExtra("return-data", returnData);
intent.putExtra("outputFormat", Bitmap.CompressFormat.PNG.toString());
return intent;
}
public static Intent buildImageCropIntent(Uri uriFrom, Uri uriTo, int outputX, int outputY, boolean returnData) {
return buildImageCropIntent(uriFrom, uriTo, 1, 1, outputX, outputY, returnData);
}
public static Intent buildImageCropIntent(Uri uriFrom, Uri uriTo, int aspectX, int aspectY,
int outputX, int outputY, boolean returnData) {
Intent intent = new Intent("com.android.camera.action.CROP");
intent.setDataAndType(uriFrom, "image/*");
intent.putExtra("crop", "true");
intent.putExtra("output", uriTo);
intent.putExtra("aspectX", aspectX);
intent.putExtra("aspectY", aspectY);
intent.putExtra("outputX", outputX);
intent.putExtra("outputY", outputY);
intent.putExtra("scale", true);
intent.putExtra("return-data", returnData);
intent.putExtra("outputFormat", Bitmap.CompressFormat.PNG.toString());
return intent;
}
public static Intent buildImageCaptureIntent(Uri uri) {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, uri);
return intent;
}
public static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
int h = options.outHeight;
int w = options.outWidth;
int inSampleSize = 0;
if (h > reqHeight || w > reqWidth) {
float ratioW = (float) w / reqWidth;
float ratioH = (float) h / reqHeight;
inSampleSize = (int) Math.min(ratioH, ratioW);
}
inSampleSize = Math.max(1, inSampleSize);
return inSampleSize;
}
public static Bitmap getSmallBitmap(String filePath, int reqWidth, int reqHeight) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filePath, options);
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
options.inJustDecodeBounds = false;
return BitmapFactory.decodeFile(filePath, options);
}
public byte[] compressBitmapToBytes(String filePath, int reqWidth, int reqHeight, int quality) {
Bitmap bitmap = getSmallBitmap(filePath, reqWidth, reqHeight);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, quality, baos);
byte[] bytes = baos.toByteArray();
bitmap.recycle();
Log.i(TAG, "Bitmap compressed success, size: " + bytes.length);
return bytes;
}
public byte[] compressBitmapSmallTo(String filePath, int reqWidth, int reqHeight, int maxLenth) {
int quality = 100;
byte[] bytes = compressBitmapToBytes(filePath, reqWidth, reqHeight, quality);
while (bytes.length > maxLenth && quality > 0) {
quality = quality / 2;
bytes = compressBitmapToBytes(filePath, reqWidth, reqHeight, quality);
}
return bytes;
}
public byte[] compressBitmapQuikly(String filePath) {
return compressBitmapToBytes(filePath, 480, 800, 50);
}
public byte[] compressBitmapQuiklySmallTo(String filePath, int maxLenth) {
return compressBitmapSmallTo(filePath, 480, 800, maxLenth);
}
}
| [
"920123"
] | 920123 |
ea7a6889cd108cae0c3ca7ab8435864448b8b762 | 508da7012b304f5d698adcaa21ef1ea444417851 | /connectors/camel-pulsar-kafka-connector/src/main/java/org/apache/camel/kafkaconnector/pulsar/CamelPulsarSourceTask.java | 8079b601672c2fa8b5a5ae9c6b89d587043ae6d0 | [
"Apache-2.0"
] | permissive | jboss-fuse/camel-kafka-connector | 02fd86c99ee4d2ac88ac5cf8b4cf56bd0bbcc007 | 8411f2f772a00d1d4a53ca8f24e13128306f5c02 | refs/heads/master | 2021-07-02T19:43:30.085652 | 2020-10-19T13:17:53 | 2020-10-19T13:18:04 | 181,911,766 | 16 | 7 | Apache-2.0 | 2019-12-04T08:44:47 | 2019-04-17T14:45:05 | Java | UTF-8 | Java | false | false | 1,685 | java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.kafkaconnector.pulsar;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Generated;
import org.apache.camel.kafkaconnector.CamelSourceConnectorConfig;
import org.apache.camel.kafkaconnector.CamelSourceTask;
@Generated("This class has been generated by camel-kafka-connector-generator-maven-plugin, remove this annotation to prevent it from being generated.")
public class CamelPulsarSourceTask extends CamelSourceTask {
@Override
protected CamelSourceConnectorConfig getCamelSourceConnectorConfig(
Map<String, String> props) {
return new CamelPulsarSourceConnectorConfig(props);
}
@Override
protected Map<String, String> getDefaultConfig() {
return new HashMap<String, String>() {{
put(CamelSourceConnectorConfig.CAMEL_SOURCE_COMPONENT_CONF, "pulsar");
}};
}
} | [
"andrea.tarocchi@gmail.com"
] | andrea.tarocchi@gmail.com |
073977341ab6eca8d6cb6be4b985094baec886c5 | e70eb98cf3efc23d74dd08d37f1067cb2aacb7bf | /src/main/java/pers/zander/edu/arithmetic/sort/MySort.java | 29cfc5ee253924e648c07b910d3f199e1a7e3afc | [] | no_license | Zander829/edupro1.0 | 59cf1326b4ae99c0b2ed2973130fcc0db0c83313 | ee989483970acb441b8c4a4e9f9cab45d89e5e96 | refs/heads/master | 2021-01-20T13:13:34.787668 | 2018-04-16T12:42:57 | 2018-04-16T12:42:57 | 82,680,412 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 243 | java | package pers.zander.edu.arithmetic.sort;
/**
* 排序接口
* @author zhaozhao
* @time 2016-5-10 上午9:55:05
*/
public interface MySort {
//排序抽象方法,子类自定义实现
public void insertSort(String name,int array[]);
}
| [
"zhao0829zhao@163.com"
] | zhao0829zhao@163.com |
5195b5dd047529f6746c43ff6a92f8bc8c599b2b | 062258aedb21af4755275737e412952255c415df | /src/test/java/exercises/Day12Test.java | 7602cbf22e56120a60fc2a10c1973e31e88c8a7a | [] | no_license | luigiDB/adventOfCode2020 | 5f55675a518f3aac88c615039858a0052fefb697 | ffeb8d7522a86e3b9d21ed11cb400803a34adfc3 | refs/heads/master | 2023-02-07T23:34:56.863335 | 2021-01-02T00:05:47 | 2021-01-02T00:05:47 | 318,427,675 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,161 | java | package exercises;
import org.jooq.lambda.tuple.Tuple2;
import org.junit.Test;
import java.util.stream.Stream;
import static org.junit.Assert.assertEquals;
import static utilities.AOCTestFramework.getTestInput;
import static utilities.AOCTestFramework.parseInput;
public class Day12Test {
private String testInput = "F10\n" +
"N3\n" +
"F7\n" +
"R90\n" +
"F11";
@Test
public void es1Test() {
Stream<Tuple2<Day12.Movement, Integer>> parseInput = parseInput(testInput, Day12::parser);
assertEquals(25, Day12.es1(parseInput));
}
@Test
public void es1() {
int x = Day12.es1(parseInput(getTestInput("12"), Day12::parser));
assertEquals(1032, x);
System.out.println(x);
}
@Test
public void es2Test() {
Stream<Tuple2<Day12.Movement, Integer>> parseInput = parseInput(testInput, Day12::parser);
assertEquals(286, Day12.es2(parseInput));
}
@Test
public void es2() {
int x = Day12.es2(parseInput(getTestInput("12"), Day12::parser));
assertEquals(156735, x);
System.out.println(x);
}
} | [
"luigi.debianchi@gmail.com"
] | luigi.debianchi@gmail.com |
4df686ee7101543da24b60c7a61e8e9ba8fbbcc2 | 81ab6cfca35fdace73526fe8df53fc55691f6832 | /tutorial-jsf-samples/CRUDExam/src/main/java/org/javaee7/crudexam/web/PersonController.java | c2b25c98106923718a107335e19f023d0b34868d | [] | no_license | luiz158/JavaEE-7-Samples | 1f584377665360bd9f7b34f8d1d092efa1a8fe9e | 1915a161eb6cb68ee7c5922ddfe8f76a6a6a01b0 | refs/heads/master | 2020-12-25T08:43:31.273379 | 2014-05-11T16:47:30 | 2014-05-11T16:47:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 900 | java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.javaee7.crudexam.web;
import java.util.Date;
import javax.enterprise.context.RequestScoped;
import javax.inject.Inject;
import javax.inject.Named;
import org.javaee7.crudexam.model.Person;
import org.javaee7.crudexam.sessions.PersonFacade;
/**
*
* @author Masudul Haque
*/
@Named
@RequestScoped
public class PersonController {
private Date dob;
@Inject
PersonFacade pf;
public Date getDob() {
return dob;
}
public void setDob(Date dob) {
this.dob = dob;
}
public String save(){
Person p=new Person();
p.setDob(dob);
pf.save(p);
System.out.println("Person is saved");
return null;
}
}
| [
"masud_cse_05@hotmail.com"
] | masud_cse_05@hotmail.com |
efbb9b88441845bd0f27eefcf94856eec6ad2852 | 8542e91136a909a4ee9b8f1d8a5ecc9a925ecf1c | /java-common-struts2/src/common/struts2/logs/LoggerStruts2.java | 1341886909621333d1ad4492bfef6b5e7d73d61b | [] | no_license | nativebinary/java-common | d7c404c60e7fc2213338d8c06dda59a23258df56 | e471dfb004962b51b2d7472e010a83f241497f1b | refs/heads/master | 2021-06-02T13:39:22.095979 | 2019-06-10T05:43:36 | 2019-06-10T05:43:36 | 20,173,645 | 1 | 2 | null | 2015-07-01T12:35:31 | 2014-05-26T04:07:49 | Java | UTF-8 | Java | false | false | 3,319 | java | //package common.struts2.logs;
//
//import common.basic.logs.ILogger;
//import common.basic.logs.Level;
//import common.basic.logs.LogUtil;
//import org.apache.commons.logging.Log;
//import org.apache.commons.logging.LogFactory;
//
//public class LoggerStruts2 implements ILogger {
// private boolean debug = false;
// private Level level = Level.Verbose;
// private final int stackRewindCount = 4;
// private final Log log = LogFactory.getLog("STRUTS2");
//
// @Override
// public void setDebug(boolean debug) {
// this.debug = debug;
// }
//
// @Override
// public boolean isDebug() {
// return debug;
// }
//
// @Override
// public boolean isLoggable(Level level) {
// return debug && level.isLoggable(this.level);
// }
//
// @Override
// public void v(Object... arrayObject) {
// if(!isLoggable(Level.Verbose))
// return;
//
// log.trace(LogUtil.makeMessage(arrayObject, Level.Verbose, stackRewindCount));
// }
//
// @Override
// public void d(Object... arrayObject) {
// if(!isLoggable(Level.Debug))
// return;
//
// log.debug(LogUtil.makeMessage(arrayObject, Level.Debug, stackRewindCount));
// }
//
// @Override
// public void i(Object... arrayObject) {
// if(!isLoggable(Level.Info))
// return;
//
// log.info(LogUtil.makeMessage(arrayObject, Level.Info, stackRewindCount));
// }
//
// @Override
// public void w(Object... arrayObject) {
// if(!isLoggable(Level.Warn))
// return;
//
// log.warn(LogUtil.makeMessage(arrayObject, Level.Warn, stackRewindCount));
// }
//
// @Override
// public void e(Object... arrayObject) {
// if(!isLoggable(Level.Error))
// return;
//
// log.error(LogUtil.makeMessage(arrayObject, Level.Error, stackRewindCount));
// }
//
//
// @Override
// public void rv(int additionalStackRewindCount, Object... arrayObject) {
// if(!isLoggable(Level.Verbose))
// return;
//
// log.trace(LogUtil.makeMessage(arrayObject, Level.Verbose, stackRewindCount + additionalStackRewindCount));
// }
//
// @Override
// public void rd(int additionalStackRewindCount, Object... arrayObject) {
// if(!isLoggable(Level.Debug))
// return;
//
// log.debug(LogUtil.makeMessage(arrayObject, Level.Debug, stackRewindCount + additionalStackRewindCount));
// }
//
// @Override
// public void ri(int additionalStackRewindCount, Object... arrayObject) {
// if(!isLoggable(Level.Info))
// return;
//
// log.info(LogUtil.makeMessage(arrayObject, Level.Info, stackRewindCount + additionalStackRewindCount));
// }
//
// @Override
// public void rw(int additionalStackRewindCount, Object... arrayObject) {
// if(!isLoggable(Level.Warn))
// return;
//
// log.warn(LogUtil.makeMessage(arrayObject, Level.Warn, stackRewindCount + additionalStackRewindCount));
// }
//
// @Override
// public void re(int additionalStackRewindCount, Object... arrayObject) {
// if(!isLoggable(Level.Error))
// return;
//
// log.error(LogUtil.makeMessage(arrayObject, Level.Error, stackRewindCount + additionalStackRewindCount));
// }
//
//}
| [
"aha00a@gmail.com"
] | aha00a@gmail.com |
1a55f2aa7618ee77dd879534e194dd18ad054bd7 | f0a21142d452a18ff5c2fd2438557ef30604c405 | /app/src/main/java/org/wikipedia/activity/FragmentUtil.java | 4355b25562fee7592f141137292a24fc1bd461a9 | [
"Apache-2.0"
] | permissive | tanvir-irfan/Wikipedia-Test-Espresso | 0ad9a73f63d64d7e8e3d6ba6c9864c7b218ead07 | 6d6ad71702b45ccfb5b6e6a9529367cb241c5433 | refs/heads/master | 2021-06-18T17:22:35.655561 | 2019-03-26T18:08:25 | 2019-03-26T18:08:25 | 75,755,110 | 0 | 0 | Apache-2.0 | 2021-04-01T04:02:46 | 2016-12-06T17:31:28 | Java | UTF-8 | Java | false | false | 842 | java | package org.wikipedia.activity;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
public final class FragmentUtil {
@Nullable public static <T> T getCallback(@NonNull Fragment fragment, @NonNull Class<T> callback) {
if (callback.isInstance(fragment.getTargetFragment())) {
//noinspection unchecked
return (T) fragment.getTargetFragment();
}
if (callback.isInstance(fragment.getParentFragment())) {
//noinspection unchecked
return (T) fragment.getParentFragment();
}
if (callback.isInstance(fragment.getActivity())) {
//noinspection unchecked
return (T) fragment.getActivity();
}
return null;
}
private FragmentUtil() { }
} | [
"tanvir_cse072@yahoo.com"
] | tanvir_cse072@yahoo.com |
be8275dfb4a456c76b90b04d201d78c42766917f | 672ee6d4679f5a435561da8d3e482fe90b9c9943 | /ee/src/cdi/CDI9.java | 92746efe2d039facee891cde25c2f5f3bc7152f0 | [] | no_license | stea1th/FirstServletLesson | 8859dc899348c9e286bec2b29e2e5e3e475d6b78 | 783475a1f08a81fbfa455619216c78d853f472f2 | refs/heads/master | 2020-07-27T02:12:03.795668 | 2019-09-19T11:54:48 | 2019-09-19T11:54:48 | 208,825,488 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 550 | java | package cdi;
import interfaces.A;
import javax.inject.Inject;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@WebServlet("/test9")
public class CDI9 extends HttpServlet {
@Inject
A a;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
a.print();
}
}
| [
"stea1th@mail.ru"
] | stea1th@mail.ru |
44eddca783430ac5c35df324eab6f382fa9b22ec | 3e9a23209145406ea1a3033ce22506fb93f3ff04 | /src/main/java/Database/UserProfileDb.java | 21ad96294a2861bf93dd6728bb767f99f2fe007c | [] | no_license | jainsarthak3/eCFHackathonIceBreaker | 727f7adeb37cbe23a152613f100bcb37cc46fca9 | 1752b4ae7cea9a3fc4f9ae886482a12eaf88d9b3 | refs/heads/master | 2022-12-18T06:06:10.536592 | 2020-09-18T06:27:06 | 2020-09-18T06:27:06 | 296,445,958 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 512 | java | package Database;
import java.util.HashMap;
import java.util.Map;
public class UserProfileDb {
private Map<String, ProfileData> userProfileMap = new HashMap<>();
public Map<String, ProfileData> getUserPoolData() {
return userProfileMap;
}
public void populateUserPoolData(Map<String, ProfileData> userProfileMap) {
this.userProfileMap = userProfileMap;
}
public ProfileData getUserProfileData(String profileId) {
return userProfileMap.get(profileId);
}
}
| [
"jsarthak@amazon.com"
] | jsarthak@amazon.com |
57b2f0c137086edb29d6f7da3bb5a7c5595cad79 | 3f5f0a2890d52229c77bc4c9c9112333e2c67885 | /src/main/java/utils/DriverFactory.java | 6f93316c75a428d177218e4c51107dce07f69106 | [] | no_license | ArtistaVeKaras/ElsevierTest | 5401d1025e51f5a3e0a7981caafd3c4a7d2d5b1b | fd6001a0c0a42dafaee7856b1b2da0cb32039cd6 | refs/heads/feature | 2021-04-02T12:54:50.911650 | 2020-03-20T15:24:20 | 2020-03-20T15:24:20 | 248,277,406 | 0 | 0 | null | 2020-10-13T20:27:55 | 2020-03-18T15:57:48 | HTML | UTF-8 | Java | false | false | 1,889 | java | package utils;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.support.PageFactory;
import pageObject.BasePage;
import pageObject.IndexPage;
import stepDefinitions.IndexSteps;
import java.io.FileInputStream;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
public class DriverFactory {
public static WebDriver driver;
public static BasePage basePage;
public static IndexPage indexPage;
public WebDriver getDriver() {
try {
// Read properties file
Properties p = new Properties();
FileInputStream fis = new FileInputStream(System.getProperty("user.dir") + "/src/main/java/properties/config.properties");
p.load(fis);
String browserName = p.getProperty("browser");
switch (browserName) {
case "firefox":
// code
if (null == driver) {
System.setProperty("webdriver.gecko.driver", Constant.GECKO_DRIVER_DIRECTORY);
DesiredCapabilities capabilities = DesiredCapabilities.firefox();
capabilities.setCapability("marionette", true);
driver = new FirefoxDriver();
}
break;
case "chrome":
// code
if (null == driver) {
System.setProperty("webdriver.chrome.driver", Constant.CHROME_DRIVER_DIRECTORY);
// CHROME OPTIONS
driver = new ChromeDriver();
driver.manage().window().maximize();
}
}
} catch (Exception e) {
System.out.println("Unable to load browser: " + e.getMessage());
} finally {
driver.manage().timeouts().pageLoadTimeout(150, TimeUnit.SECONDS);
basePage = PageFactory.initElements(driver,BasePage.class);
indexPage = PageFactory.initElements(driver,IndexPage.class);
}
return driver;
}
}
| [
"claudiooartista@hotmail.co.uk"
] | claudiooartista@hotmail.co.uk |
fbf6e8c7aae3e91a0ed59c2454ad96c2d19ac7fa | 159b05e7a03ee513f3a361d1da820b25a58f323f | /src/modelos/Jugador_Juego.java | 53bc34e00464f81b1e8a34a352d4dec69e632350 | [] | no_license | pablozam/idnum | 14c1abd3e7163d37efba4fe4bd89152490921aec | 335ac74803e3834a0a5f6dfea50c429f3e8b69c4 | refs/heads/master | 2022-01-16T21:18:52.229327 | 2019-08-11T20:49:00 | 2019-08-11T20:49:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 9,025 | java |
package modelos;
import static idnum.Idnum.conexion;
import java.sql.ResultSet;
public class Jugador_Juego implements DatabaseAble{
Jugador jugador;
Juego juego;
String fecha;
public Jugador_Juego(Jugador jugador, Juego juego, String fecha) {
this.jugador = jugador;
this.juego = juego;
this.fecha = fecha;
}
public String getAciertos(){
String sentencia = "SELECT pregunta.id_contenido AS Contenido, SUM(IF (pregunta.respuesta= contestacion.contestacion_objetiva,1,0)) AS Resultado FROM contestacion, pregunta, literal, jugador_juego"
+ " WHERE jugador_juego.fecha = contestacion.fecha AND pregunta.id_pregunta = contestacion.id_pregunta"
+ " AND jugador_juego.fecha = '"+getFecha()+"'"
+ " AND ((pregunta.literal_1 = literal.id_literal AND contestacion.contestacion_objetiva='A')"
+ " OR (pregunta.literal_2 = literal.id_literal AND contestacion.contestacion_objetiva='B') "
+ " OR (pregunta.literal_3 = literal.id_literal AND contestacion.contestacion_objetiva='C') "
+ " OR (pregunta.literal_4 = literal.id_literal AND contestacion.contestacion_objetiva='D')) "
+ " GROUP BY pregunta.id_contenido ORDER BY contestacion.fecha ASC";
ResultSet rs;
try {
idnum.Idnum.conexion.conectaBD();
rs = idnum.Idnum.conexion.consultaBD(sentencia);
String valor = "";
while(rs.next()){
valor += rs.getString("Resultado")+"-";
}
idnum.Idnum.conexion.cerrar_conexionBD();
return valor;
} catch (Exception ex) {
System.out.println("Jugador_Juego - getAciertos: "+ex);
return null;
}
}
public String [][] getRespuestasExamen(){
String respuestas [][] = new String[100][4];
String sentencia = "SELECT pregunta.id_contenido AS contenido, contestacion_objetiva AS Respuesta, IF (pregunta.respuesta= contestacion.contestacion_objetiva,'Correcta','Incorrecta') AS Resultado FROM contestacion, pregunta, literal\n" +
"WHERE contestacion.id_pregunta = pregunta.id_pregunta\n" +
"AND contestacion.fecha = '"+ getFecha() +"'" +
"AND ((pregunta.literal_1 = literal.id_literal AND contestacion.contestacion_objetiva='A')\n" +
"OR (pregunta.literal_2 = literal.id_literal AND contestacion.contestacion_objetiva='B')\n" +
"OR (pregunta.literal_3 = literal.id_literal AND contestacion.contestacion_objetiva='C')\n" +
"OR (pregunta.literal_4 = literal.id_literal AND contestacion.contestacion_objetiva='D'))";
ResultSet rs;
try {
idnum.Idnum.conexion.conectaBD();
rs = idnum.Idnum.conexion.consultaBD(sentencia);
int contador = 0;
int id_contenido = 0;
String nombre_contenido = "Primer contenido";
while(rs.next()){
if(contador == 0){
respuestas[contador][0] = "";
respuestas[contador][1] = "Respuesta";
respuestas[contador][2] = "Contenido";
respuestas[contador][3] = "Resultado";
id_contenido = rs.getInt("contenido");
}
if(id_contenido != rs.getInt("contenido")){
id_contenido = rs.getInt("contenido");
switch(nombre_contenido){
case "Primer contenido": nombre_contenido = "Segundo contenido";
break;
case "Segundo contenido": nombre_contenido = "Tercer contenido";
}
}
respuestas[contador+1][0] = ""+(contador+1);
respuestas[contador+1][1] = rs.getString("Respuesta");
respuestas[contador+1][2] = nombre_contenido;
respuestas[contador+1][3] = rs.getString("Resultado");
contador++;
}
idnum.Idnum.conexion.cerrar_conexionBD();
return respuestas;
} catch (Exception ex) {
System.out.println("Jugador_Juego - getAciertos: "+ex);
return null;
}
}
public static Jugador_Juego [] getJugador_Juegos(){
Jugador_Juego [] jugador_juegos = new Jugador_Juego[20];
ResultSet rs;
String sentencia = "SELECT id_jugador, id_juego, fecha FROM jugador_juego";
try{
conexion.conectaBD();
rs = idnum.Idnum.conexion.consultaBD(sentencia);
int contador = 0;
while(rs.next()){
Jugador jugador = new Jugador(rs.getInt("id_jugador"));
Juego juego = new Juego(rs.getInt("id_juego"));
jugador.consultarBD();
juego.consultarBD();
jugador_juegos[contador] = new Jugador_Juego(jugador, juego, rs.getString("fecha"));
contador++;
}
rs.close();
conexion.cerrar_conexionBD();
return jugador_juegos;
}catch(Exception ex){
System.out.println("Jugador_Juego - getJugador_Juegos: "+ex);
}
return null;
}
@Override
public void ingresarBD() {
String sentencia = "INSERT INTO jugador_juego (id_jugador, id_juego, fecha) "
+ "VALUES ('"+getJugador().getId_jugador()+"','"+getJuego().getId_juego()+"',NOW())";
try{
conexion.conectaBD();
conexion.actualizaBD(sentencia);
System.out.println("Llego");
conexion.cerrar_conexionBD();
}catch(Exception ex){
System.out.println("Jugador_Juego juego - ingresar: "+ex);
}
}
@Override
public void actualizarBD() {
}
@Override
public boolean borrarBD() {
return false;
}
@Override
public void consultarBD() {
String sentencia = "SELECT * FROM jugador_juego WHERE id_jugador = '"+getJugador().getId_jugador()+"'"
+ "AND id_juego = '"+getJuego().getId_juego()+"' ORDER BY fecha DESC LIMIT 1";
ResultSet rs;
try{
conexion.conectaBD();
rs = idnum.Idnum.conexion.consultaBD(sentencia);
if(rs.next()){
this.setFecha(rs.getString("fecha"));
}
conexion.cerrar_conexionBD();
}catch(Exception ex){
System.out.println("Jugador_Juego - consultar: "+ex);
}
}
public void consultarBD(String fecha) {
String sentencia = "SELECT * FROM jugador_juego WHERE id_jugador = '"+getJugador().getId_jugador()+"'"
+ "AND id_juego = '"+getJuego().getId_juego()+"' AND fecha = '"+fecha+"' ORDER BY fecha DESC LIMIT 1";
ResultSet rs;
try{
conexion.conectaBD();
rs = idnum.Idnum.conexion.consultaBD(sentencia);
if(rs.next()){
Jugador jugador_aux = new Jugador(rs.getInt("id_jugador"));
Juego juego_aux = new Juego(rs.getInt("id_juego"));
jugador_aux.consultarBD();
juego_aux.consultarBD();
this.setJugador(jugador_aux);
this.setJuego(juego_aux);
}
conexion.cerrar_conexionBD();
}catch(Exception ex){
System.out.println("Jugador_Juego - consultar: "+ex);
}
}
public Jugador getJugador() {
return jugador;
}
public void setJugador(Jugador jugador) {
this.jugador = jugador;
}
public Juego getJuego() {
return juego;
}
public void setJuego(Juego id_juego) {
this.juego = juego;
}
public String getFecha() {
return fecha;
}
public void setFecha(String fecha) {
this.fecha = fecha;
}
}
| [
"wilmerdavid97@hotmail.com"
] | wilmerdavid97@hotmail.com |
c5b88962e08141922e31147ea67ff37e0a340a54 | eb36f508ff386bfcc79cc2c130b9c6bd0dfff1e2 | /app/src/main/java/jaavajaava/smarthome/UserActivity.java | 97152b1a0fec54dda1bb319abcaeef2a79914608 | [] | no_license | Vaintti/Smarthome | 6628ec0530f1ee8419bb23810ab3641e0fe7778a | 095ef33250b8a011fad2044918fa3a56a549a420 | refs/heads/master | 2021-01-10T15:23:44.024360 | 2016-03-31T09:27:30 | 2016-03-31T09:27:30 | 49,932,862 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,643 | java | package jaavajaava.smarthome;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.os.AsyncTask;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CursorAdapter;
import android.widget.ListView;
import android.widget.TextView;
import java.util.List;
public class UserActivity extends AppCompatActivity {
ListView listView;
long uid;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_user);
listView = (ListView)findViewById(R.id.userView);
Intent intent = getIntent();
uid = intent.getLongExtra("EXTRA_UID", 0);
}
@Override
protected void onResume() {
super.onResume();
new AdminAsyncTask().execute();
}
private class AdminAsyncTask extends AsyncTask<Void, Void, Cursor> {
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected void onPostExecute(Cursor cursor) {
super.onPostExecute(cursor);
EstateCursorAdapter adapter = new EstateCursorAdapter(getApplicationContext(), cursor, 0);
listView.setAdapter(adapter);
}
@Override
protected Cursor doInBackground(Void... params) {
SmarthomeOpenHelper db = new SmarthomeOpenHelper(getApplicationContext());
return db.getUserEstates(uid);
}
}
public class EstateCursorAdapter extends CursorAdapter {
public EstateCursorAdapter(Context context, Cursor cursor, int flags) {
super(context, cursor, 0);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return LayoutInflater.from(context).inflate(R.layout.item_estate, parent, false);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
TextView tvName = (TextView) view.findViewById(R.id.estateNameView);
TextView tvAddress = (TextView) view.findViewById(R.id.estateAddressView);
String name = cursor.getString(cursor.getColumnIndexOrThrow(SmarthomeContract.Estate.COLUMN_NAME_ESTATENAME));
String address = cursor.getString(cursor.getColumnIndexOrThrow(SmarthomeContract.Estate.COLUMN_NAME_ADDRESS));
tvName.setText(name);
tvAddress.setText(address);
}
};
}
| [
"antti.jk.vainikka@gmail.com"
] | antti.jk.vainikka@gmail.com |
fe46949c8676df5dc18ebc61d0abb28ba8873332 | f58192e415ec3244470598194350ef1c2bd9283f | /src/main/java/com/gemsrobotics/lib/controls/DriveMotionPlanner.java | 8751c0caa81f2bdf4cca26d92fb127ed059d0f32 | [] | no_license | frc4362/gemlib | 200af997edfbcd5375cb5b12d5437aed1b3c521d | 8318332285a27e8fc0b9a57574bbdd03b50a402a | refs/heads/master | 2022-12-10T20:33:33.251666 | 2022-12-03T14:02:36 | 2022-12-03T14:02:36 | 221,908,057 | 3 | 0 | null | null | null | null | UTF-8 | Java | false | false | 7,572 | java | package com.gemsrobotics.lib.controls;
import com.gemsrobotics.lib.subsystems.drivetrain.ChassisState;
import com.gemsrobotics.lib.subsystems.drivetrain.DifferentialDriveModel;
import com.gemsrobotics.lib.subsystems.drivetrain.WheelState;
import com.gemsrobotics.lib.math.se2.RigidTransform;
import com.gemsrobotics.lib.math.se2.RigidTransformWithCurvature;
import com.gemsrobotics.lib.trajectory.TrajectoryIterator;
import com.gemsrobotics.lib.trajectory.parameterization.*;
import io.github.oblarg.oblog.Loggable;
import io.github.oblarg.oblog.annotations.Config;
import io.github.oblarg.oblog.annotations.Log;
import java.util.Objects;
import java.util.Optional;
import static com.gemsrobotics.lib.utils.MathUtils.*;
import static java.lang.Double.isInfinite;
import static java.lang.Math.abs;
import static java.lang.Math.sqrt;
// consider refactoring into a strategy-pattern with a PathController interface
public class DriveMotionPlanner implements Loggable {
@Override
public String configureLogName() {
return "Motion Planner";
}
protected transient final DifferentialDriveModel m_model;
protected transient final MotionPlanner.MotionConfig m_config;
protected FollowerType m_followerType;
public enum FollowerType {
FEEDFORWARD,
RAMSETE
}
public void setFollowerType(final FollowerType newType) {
m_followerType = newType;
}
@Log.ToString(name="Follower Type (Algorithm)")
public FollowerType getFollowerType() {
return m_followerType;
}
protected TrajectoryIterator<TimedState<RigidTransformWithCurvature>> m_trajectory;
@Log.ToString(name="Error (Pose)")
protected RigidTransform m_error;
@Log.ToString(name="Setpoint (Timed Pose w/ Curvature)")
protected TimedState<RigidTransformWithCurvature> m_setpoint;
@Log(name="Reversed? (Boolean)")
protected boolean m_isReversed;
protected double m_lastTime;
protected MotionPlanner.Output m_output;
protected ChassisState m_previousVelocity;
public DriveMotionPlanner(
final MotionPlanner.MotionConfig config,
final DifferentialDriveModel model,
final FollowerType followerType
) {
m_config = config;
m_model = model;
m_followerType = followerType;
reset();
}
public final void setTrajectory(final TrajectoryIterator<TimedState<RigidTransformWithCurvature>> trajectory) {
m_trajectory = trajectory;
m_setpoint = trajectory.getState();
for (int i = 0; i < trajectory.getTrajectory().length(); i++) {
final var state = trajectory.getTrajectory().getState(i);
if (state.getVelocity() > Epsilon) {
m_isReversed = false;
break;
} else if (state.getVelocity() < Epsilon) {
m_isReversed = true;
break;
}
}
}
public final void reset() {
m_error = RigidTransform.identity();
m_output = new MotionPlanner.Output();
m_lastTime = Double.POSITIVE_INFINITY;
m_previousVelocity = new ChassisState();
}
public Optional<MotionPlanner.Output> update(final double timestamp, final RigidTransform currentPose, final boolean isHighGear) {
if (Objects.isNull(m_trajectory)) {
return Optional.empty();
}
if (m_trajectory.getProgress() == 0.0 && isInfinite(m_lastTime)) {
m_lastTime = timestamp;
}
final double dt = timestamp - m_lastTime;
m_lastTime = timestamp;
final var samplePoint = m_trajectory.advance(dt);
m_setpoint = samplePoint.getState();
if (!m_trajectory.isDone()) {
final var velocityMetersPerSecond = m_setpoint.getVelocity();
final var curvatureRadiansPerMeter = m_setpoint.getState().getCurvature();
final var curvatureDsRadiansPerMeterSquared = m_setpoint.getState().getDCurvatureDs();
final var accelerationMetersPerSecondSquared = m_setpoint.getAcceleration();
final var setpointDynamics = m_model.solveInverseDynamics(
new ChassisState(velocityMetersPerSecond, velocityMetersPerSecond * curvatureRadiansPerMeter),
new ChassisState(accelerationMetersPerSecondSquared,
accelerationMetersPerSecondSquared * curvatureRadiansPerMeter
+ velocityMetersPerSecond * velocityMetersPerSecond * curvatureDsRadiansPerMeterSquared),
isHighGear
);
m_error = currentPose.inverse().transformBy(m_setpoint.getState().getRigidTransform());
switch (m_followerType) {
case FEEDFORWARD:
m_output.velocityRadiansPerSecond = setpointDynamics.wheelVelocityRadiansPerSecond;
m_output.accelerationRadiansPerSecondSquared = setpointDynamics.wheelAccelerationRadiansPerSecondSquared;
m_output.feedforwardVoltage = setpointDynamics.voltage;
break;
case RAMSETE:
m_output = updateRamsete(dt, setpointDynamics, isHighGear);
break;
}
} else {
return Optional.empty();
}
return Optional.of(m_output);
}
// Implements eqn. 5.12 from https://www.dis.uniroma1.it/~labrob/pub/papers/Ramsete01.pdf
protected MotionPlanner.Output updateRamsete(final double dt, final DifferentialDriveModel.Dynamics ref, final boolean isHighGear) {
final double k = 2.0 * 0.7 * sqrt(2.0 * ref.chassisVelocity.linear * ref.chassisVelocity.linear + ref.chassisVelocity.angular * ref.chassisVelocity.angular);
final var angularErrorRadians = m_error.getRotation().getRadians();
// adjust for error
ref.chassisVelocity = new ChassisState(
ref.chassisVelocity.linear * m_error.getRotation().cos()
+ k * m_error.getTranslation().x(),
ref.chassisVelocity.angular
+ k * angularErrorRadians
+ ref.chassisVelocity.linear * 2.0 * sinc(angularErrorRadians, 0.01) * m_error.getTranslation().y());
// this is where everything goes from meters to wheel radians/s!!
ref.wheelVelocityRadiansPerSecond = m_model.inverseKinematics(ref.chassisVelocity);
if (dt == 0.0) {
ref.chassisAcceleration.linear = 0.0;
ref.chassisAcceleration.angular = 0.0;
} else {
ref.chassisAcceleration.linear = (ref.chassisVelocity.linear - m_previousVelocity.linear) / dt;
ref.chassisAcceleration.angular = (ref.chassisVelocity.angular - m_previousVelocity.angular) / dt;
}
// store previous velocity, allows the user to only have to worry about passing the new state
// this is superior to passing velocity and acceleration in, like 1678 does, since it allows the user to worry
// about fewer calculations up front and works fine with a variant dt. However, where it lacks is in application-
// our Ramsete controller is highly coupled. This should be fine. - Ethan, 9/24/19
m_previousVelocity = ref.chassisVelocity;
final var output = new MotionPlanner.Output();
output.velocityRadiansPerSecond = ref.wheelVelocityRadiansPerSecond;
output.accelerationRadiansPerSecondSquared = ref.wheelAccelerationRadiansPerSecondSquared;
output.feedforwardVoltage = m_model.solveInverseDynamics(ref.chassisVelocity, ref.chassisAcceleration, isHighGear).voltage;
return output;
}
public final TimedState<RigidTransformWithCurvature> getReference() {
return m_setpoint;
}
public final RigidTransform getError() {
return m_error;
}
@Log.Graph(name="Pose Error (m)", visibleTime=15.0)
protected double getErrorMagnitude() {
return m_error.getTranslation().norm();
}
@Log.Graph(name="Pose Error (deg)", visibleTime=15.0)
protected double getErrorHeading() {
return abs(m_error.getRotation().getDegrees());
}
@Log.BooleanBox(name="Complete? (Boolean)")
public final boolean isDone() {
return !Objects.isNull(m_trajectory) && m_trajectory.isDone();
}
}
| [
"ejmalzone@gmail.com"
] | ejmalzone@gmail.com |
096f94f8db89fe0516e669de83d1f2d21144a1c4 | a258e00326e68a7df37f57660983e90e46582fe1 | /drill_data_cache/src/test/java/com/henry/chapter08_0503/Chapter080503ApplicationTests.java | 422dcac687097c3d8264a739082ba72fcd3e4a0b | [] | no_license | henryinshanghai/spring_boot_series | 0b995d3eabe2ba9459d4b39a2b56a84cad79e7ae | 372c7aeee84d214abfed90e4cbe5bc2ea876522c | refs/heads/master | 2023-06-07T11:43:52.728369 | 2021-06-29T15:37:36 | 2021-06-29T15:37:36 | 359,640,200 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 517 | java | package com.henry.chapter08_0503;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = Chapter080503Application.class)
@WebAppConfiguration
class Chapter080503ApplicationTests {
@Test
void contextLoads() {
}
}
| [
"2291972433@qq.com"
] | 2291972433@qq.com |
27994709de967a6ee1794749c4c9750c923f935a | 301123f0a2f4f383c4135fb3dfc1bfb527ac195b | /spring-boot-rest-pdf/src/main/java/com/mkyong/pdf/ServletInitializer.java | 129a5b6e926f0233ae00531e3123c594c11e9c64 | [] | no_license | asepmaryana/mkyong | 7da49e6b2a2123a2af21982cd31338c8b1893e83 | 21b097654a7fc14719b708ca5371612f5fb881a6 | refs/heads/master | 2021-01-20T01:10:27.253698 | 2017-08-25T06:58:59 | 2017-08-25T06:58:59 | 101,282,150 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 407 | java | package com.mkyong.pdf;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.support.SpringBootServletInitializer;
public class ServletInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(SpringBootRestPdfApplication.class);
}
}
| [
"asep.maryana@gmail.com"
] | asep.maryana@gmail.com |
52cf7988036acb96ef9e626a8c6897b08f3c5880 | 5f0b4bfe7e417af753403f966c9a536ee46405c9 | /src/main/java/com/jae/app/ServletInitializer.java | 8b7a162b7f4d5065fb9e688e8a810989caa74492 | [] | no_license | zhunengfei/SpringBootTutorials-angularjs | 7bad2c42ea7859f436c16963bcab815020dca167 | 0f4e93a66d64ee22977288675b57241a0988483f | refs/heads/master | 2020-03-19T14:24:39.756714 | 2018-06-08T13:12:15 | 2018-06-08T13:12:15 | 136,621,353 | 0 | 1 | null | null | null | null | UTF-8 | Java | false | false | 423 | java | package com.jae.app;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
public class ServletInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(SpringBootTutorialsAngularjsApplication.class);
}
}
| [
"zhunengfeisvip@163.com"
] | zhunengfeisvip@163.com |
7b7446473d55c99bb2f0e93f2580213f9fc6742f | eb194155216aa8871901cc4a618734a278e40c2b | /src/main/java/com/aholdusa/am/audittracking/service/ActivityLogServiceImpl.java | 2ceddbfdfe6409008abb8664cf66c240c7ac0bc8 | [] | no_license | ansateesh/audittracking | a6ba3e111d6d598346f8d5ba7a0459deee661734 | 980c18d03a7d87e62e815ca90d0c74144298a1c3 | refs/heads/master | 2022-12-21T04:26:23.885415 | 2020-03-05T09:02:53 | 2020-03-05T09:02:53 | 244,287,184 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 9,479 | java | package com.aholdusa.am.audittracking.service;
import java.sql.Date;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import org.apache.commons.collections4.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.aholdusa.am.audittracking.dao.ActivityLogDAO;
import com.aholdusa.am.audittracking.dao.EmployeeDAO;
import com.aholdusa.am.audittracking.dto.ReportDTO;
import com.aholdusa.am.audittracking.entity.ActivityLogs;
import com.aholdusa.am.audittracking.entity.Employee;
import com.aholdusa.am.audittracking.entity.Notification;
import com.aholdusa.am.audittracking.entity.ReportActivityLogs;
import com.aholdusa.am.audittracking.entity.ReportConstants;
@Service("activityLogService")
public class ActivityLogServiceImpl extends AMServiceImpl<ActivityLogs>
implements ActivityLogService {
@Autowired
private ActivityLogDAO activityLogDao;
@Autowired
private EmployeeDAO employeeDao;
private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
public void insert(ActivityLogs entity) {
activityLogDao.insert(entity);
}
@Override
public List<ActivityLogs> findByJsonObject(String jsonString) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<ReportActivityLogs> findActivityByManagerBeginEndDate(
ActivityLogs activityLog, Date beginDate, Date endDate) {
List<ReportActivityLogs>reportActivityLogList=null;
List <ActivityLogs>activityLogList=activityLogDao.findActivityByManagerBeginEndDate(activityLog, beginDate, endDate);
if(activityLogList!=null && activityLogList.size() >0){
reportActivityLogList=new ArrayList<ReportActivityLogs>();
for(ActivityLogs activity:activityLogList){
ReportActivityLogs reportActivity= new ReportActivityLogs();
reportActivity.setActivityLog(activity);
reportActivityLogList.add(reportActivity);
//Set Manager
List <Employee> managerList= employeeDao.findByOperatorNumber(activity.getStoreNumber(), activity.getMgrOpNum());
reportActivity.setManager(managerList!=null?managerList.get(0):null);
//Set Employee
List <Employee> employeeList = employeeDao.findByOperatorNumber(activity.getStoreNumber(), activity.getEmpOpNum());
reportActivity.setEmployee(employeeList!=null?employeeList.get(0):null);
}
}
return reportActivityLogList;
}
@Override
public List<ReportActivityLogs> findActivityByBeginEndDate(
ActivityLogs activityLog, Date beginDate, Date endDate) {
List<ReportActivityLogs>reportActivityLogList=null;
List <ActivityLogs>activityLogList=null;
if(activityLog.getEmpOpNum()!=null){
activityLogList=activityLogDao.findActivityOperatorNumberByBeginEndDate(activityLog, beginDate, endDate);
}else{
activityLogList=activityLogDao.findActivityByBeginEndDate(activityLog, beginDate, endDate);
}
if(activityLogList!=null && activityLogList.size() >0){
reportActivityLogList=new ArrayList<ReportActivityLogs>();
for(ActivityLogs activity:activityLogList){
ReportActivityLogs reportActivity= new ReportActivityLogs();
reportActivity.setActivityLog(activity);
reportActivityLogList.add(reportActivity);
//Set Manager
List <Employee> managerList= employeeDao.findByOperatorNumber(activity.getStoreNumber(), activity.getMgrOpNum());
if(managerList!=null && managerList.size()>0){
reportActivity.setManager( managerList.get(0) );
}
//Set Employee
List <Employee> employeeList = employeeDao.findByOperatorNumber(activity.getStoreNumber(), activity.getEmpOpNum());
if(employeeList!=null && employeeList.size()>0){
reportActivity.setEmployee( employeeList.get(0) );
}
}
}
return reportActivityLogList;
}
@Override
public List<ActivityLogs> findActivityByActivityType(
ActivityLogs activityLog, Date currentDate) {
List <ActivityLogs>activityLogList = activityLogDao.findActivityByActivityType(activityLog,currentDate);
return activityLogList;
}
@Override
public List<ActivityLogs> findActivityTillContentsByOpManagerNumber(
ActivityLogs activityLog, Date currentDate) {
return activityLogDao.findActivityTillContentsByOpManagerNumber(activityLog, currentDate);
}
@Override
public ActivityLogs findActivityById(ActivityLogs activityLog) {
// TODO Auto-generated method stub
return activityLogDao.findActivityById(activityLog);
}
@Override
public Notification findNotificationById(Notification notification) {
// TODO Auto-generated method stub
ActivityLogs activity = new ActivityLogs();
String completeMessage;
String filteredReason = "";
activity.setId(notification.getActivityId());
activity= activityLogDao.findActivityById(activity);
if(activity != null){
// Mapping Activity vs. Notification
notification.setActivityType(activity.getActivityType());
notification.setStoreNumber(activity.getStoreNumber().longValue());
completeMessage = activity.getMgrReason()!=null?activity.getMgrReason():"";
if(completeMessage.contains("Reason")) filteredReason = completeMessage.split("Reason:")[1];
notification.setMessage(filteredReason);
/*
* Extracting TerminalId
*/
boolean prevTokenIsTerminal=false;
String reason=activity.getMgrReason()!=null?activity.getMgrReason():"";
String terminalNumber="0";
StringTokenizer strTokenizer=new StringTokenizer(reason,"|");
while(strTokenizer.hasMoreTokens() && !prevTokenIsTerminal){
String element=strTokenizer.nextToken();
StringTokenizer strTokenizerInner=new StringTokenizer(element,":");
String token="";
while(strTokenizerInner.hasMoreTokens() && !prevTokenIsTerminal){
if(token.contains("TerminalId")){
prevTokenIsTerminal=true;
}
token=strTokenizerInner.nextToken();
if(prevTokenIsTerminal){
terminalNumber=token;
}
}
}
notification.setTerminalId(new Long(terminalNumber));
notification.setOperatorNumber(activity.getEmpOpNum().longValue());
notification.setActivityType(activity.getActivityType());
}else{
notification=null;
}
return notification;
}
@Override
public List<ActivityLogs> findActivityByActivityTypeAndOperatorNumber(
ActivityLogs activityLog) {
return activityLogDao.findActivityByActivityTypeAndOperatorNumber(activityLog);
}
/**
* On SignOnDecline, query the ActivityLog Table for a SignOnDecline entry
* with the same operator number, lane number (terminal id), store number,
* and trunc(created_date) with is_deleted = 0. If any entries match these
* parameters,
* create a new ActivityLog entry for the SignOnDecline but with is_deleted = 1.
* ActivityLogs al = new ActivityLogs();
query.setParameter("storeNumber", activityLog.getStoreNumber());
query.setParameter("activityType", activityLog.getActivityType());
query.setParameter("empOpNum",activityLog.getEmpOpNum() );
query.setParameter("createdDate", activityLog.getCreateDate());
return activityLogDao.findActivityByActivityTypeAndOperatorNumber(activityLog);
*/
@Override
public boolean checkSignOnDeclineActivity(ActivityLogs activityLog, Integer laneNumber) {
//return activityLogDao.findActivityByActivityTypeAndOperatorNumber(activityLog);
boolean activityExist = false;
try{
List<ActivityLogs> list = activityLogDao.findActivityByActivityTypeAndOperatorNumber(activityLog);
if(list != null && list.size() > 1 ){
activityExist = true;
}
}catch(Exception e){
LOGGER.info("Exception: "+e);
}
return activityExist;
}
@Override
public void deletePrevOverrideActivities(ActivityLogs activityLog) {
List<ActivityLogs> activities= activityLogDao.findActivityByActivityTypeAndOperatorNumber(activityLog);
if(activities!=null && activities.size()>0){
for (ActivityLogs activity : activities){
activity.setIsDeleted(true);
activityLogDao.update(activity);
}
}
}
@Override
public List<ReportActivityLogs> getActivityLogsBySearchCriteria(ReportDTO reportDTO) {
// TODO Auto-generated method stub
List<ReportActivityLogs> reportActivityLogList=new ArrayList<ReportActivityLogs>();;
List<ActivityLogs> activityLogs = activityLogDao.getActivityLogsBySearchCriteria(reportDTO);
for (ActivityLogs activityLogsFromDao : activityLogs) {
ReportActivityLogs reportActivityLogObj = new ReportActivityLogs();
if(activityLogsFromDao.getEmpOpNum()!=null){
List<Employee> employeesList = employeeDao.findByOperatorNumber(activityLogsFromDao.getStoreNumber(), activityLogsFromDao.getEmpOpNum());
if(CollectionUtils.isNotEmpty(employeesList)){
reportActivityLogObj.setEmployee(employeesList.get(0));
}
}
if(activityLogsFromDao.getMgrOpNum()!=null){
List<Employee> employeesList = employeeDao.findByOperatorNumber(activityLogsFromDao.getStoreNumber(), activityLogsFromDao.getMgrOpNum());
if(CollectionUtils.isNotEmpty(employeesList)){
reportActivityLogObj.setManager(employeesList.get(0));
}
}
activityLogsFromDao.setObjectType(ReportConstants.REPORT_ACTIVITY_OBJECT_TYPE_EMP.getValue());
reportActivityLogObj.setActivityLog(activityLogsFromDao);
reportActivityLogList.add(reportActivityLogObj);
}
return reportActivityLogList;
}
} | [
"vn05523@delhaize.com"
] | vn05523@delhaize.com |
98a306110c8233eed9295365486ac46751a90940 | 6585e33e7d55c038b8027e66c87f298f18f2915a | /06-Interfaces/examples/EventHandler/src/javafx/event/EventHandler.java | 09a04890ed0dcd6fc8445225eef6a5d00cfed994 | [] | no_license | Theasker/ctajava | ea8ffcf538f183d6c0c6985657422f3735ba78e3 | e4b551364a73b1d210a01ab5bd55908d265fe250 | refs/heads/master | 2021-01-11T16:38:39.285921 | 2017-02-01T18:06:39 | 2017-02-01T18:06:39 | 80,130,028 | 3 | 2 | null | null | null | null | UTF-8 | Java | false | false | 86 | java | package javafx.event;
interface EventHandler<T> {
public void handle(T event);
}
| [
"theasker@gmail.com"
] | theasker@gmail.com |
5ac93948da61c2d8d540c73169c83f454d86a832 | ec3e9fff4641a206ec07911e9ac80b8694215e83 | /src/edu/ArturKim/JavaSintax/task011/Solution.java | fc0c185387f5eaf2785f39bcf6c2f4be383b513f | [] | no_license | DadakhodjaevRustam/JavaCoreLearningTasks | f64175396eea266d1ca60de9e1128ba75d694bf1 | 43b860afe491ca32359c6273a50998020443bfb8 | refs/heads/master | 2023-01-06T16:19:48.554460 | 2020-11-11T12:30:31 | 2020-11-11T12:30:31 | 312,291,587 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 545 | java | package edu.ArturKim.JavaSintax.task011;
public class Solution {
public static void main(String[] args) {
Pet pet = new Cat();
pet.setName("Я - пушистик");
System.out.println(pet.getName());
}
public static class Pet {
protected String name;
public Pet() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public static class Cat extends Pet {
}
}
| [
"archi.baldi@bk.ru"
] | archi.baldi@bk.ru |
3d140df4739ae8927e88cc00a68a76132b3e014c | bf2966abae57885c29e70852243a22abc8ba8eb0 | /aws-java-sdk-migrationhub/src/main/java/com/amazonaws/services/migrationhub/model/transform/AccessDeniedExceptionUnmarshaller.java | 40997fa7c5b07b6bc9ad0cdb87ef3d4a24d23313 | [
"Apache-2.0"
] | permissive | kmbotts/aws-sdk-java | ae20b3244131d52b9687eb026b9c620da8b49935 | 388f6427e00fb1c2f211abda5bad3a75d29eef62 | refs/heads/master | 2021-12-23T14:39:26.369661 | 2021-07-26T20:09:07 | 2021-07-26T20:09:07 | 246,296,939 | 0 | 0 | Apache-2.0 | 2020-03-10T12:37:34 | 2020-03-10T12:37:33 | null | UTF-8 | Java | false | false | 2,862 | java | /*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.migrationhub.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.migrationhub.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* AccessDeniedException JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AccessDeniedExceptionUnmarshaller extends EnhancedJsonErrorUnmarshaller {
private AccessDeniedExceptionUnmarshaller() {
super(com.amazonaws.services.migrationhub.model.AccessDeniedException.class, "AccessDeniedException");
}
@Override
public com.amazonaws.services.migrationhub.model.AccessDeniedException unmarshallFromContext(JsonUnmarshallerContext context) throws Exception {
com.amazonaws.services.migrationhub.model.AccessDeniedException accessDeniedException = new com.amazonaws.services.migrationhub.model.AccessDeniedException(
null);
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return accessDeniedException;
}
private static AccessDeniedExceptionUnmarshaller instance;
public static AccessDeniedExceptionUnmarshaller getInstance() {
if (instance == null)
instance = new AccessDeniedExceptionUnmarshaller();
return instance;
}
}
| [
""
] | |
21ad60d79ab798d84ef03c7983404dd217d6c028 | a262ce7e8c005dcff31e0eedaa0d50e2a97dd32e | /src/test/java/com/vumc/pf/finders/PalindromeFinderCLITest.java | 3a3f518fdf83d4dcbece0b95cc95ada4b419a11b | [] | no_license | zmcmackin/PalindromeFinder | c072196a154906736e2b176764de5aedc61d0613 | 4f822d82c59e51e9e847286c65675e99f48fa55a | refs/heads/master | 2021-01-25T09:15:26.103593 | 2017-06-09T12:40:31 | 2017-06-09T12:40:31 | 93,809,666 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 686 | java | package com.vumc.pf.finders;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.vumc.pf.PalindromeFinderCLI;
public class PalindromeFinderCLITest {
PalindromeFinderCLI cliApp;
@Before
public void testSetup() {
cliApp = new PalindromeFinderCLI();
}
@Test
public void test_executeFinder() {
PalindromeFinderCLI app = new PalindromeFinderCLI();
app.executeFinder(3);
}
@Test
public void test_displayResult() {
PalindromeResult result = new PalindromeResult();
result.setFactorLength(2);
result.setFactor1(99);
result.setFactor2(91);
result.setPalindrome(9009);
}
@After
public void testTearDown() {
cliApp = null;
}
}
| [
"zack@blueplanetsolutions.com"
] | zack@blueplanetsolutions.com |
3707eba2574aa5051e2bff71994ff67b9ba6b0b3 | 0c2621f91b80b7476861eac33c2b73ae261eef31 | /Perpustakaan/src/perpustakaan/viewers/jFrmKembali.java | 589ffd34bcea32581836b465734af30bc010bfd6 | [] | no_license | ocpyosep78/SI-Perpustakaan | 179b2247b2f0197c5f5ebe2dff40ebf20cf1fdea | 03f876c2e052b50fffa9fca0e629967e54c51585 | refs/heads/master | 2021-01-17T23:32:26.344097 | 2012-08-26T15:31:51 | 2012-08-26T15:31:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 19,779 | java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package perpustakaan.viewers;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JTextField;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableModel;
import perpustakaan.controllers.database.cDatabaseConnection;
import perpustakaan.controllers.utils.cUtils;
import perpustakaan.main;
import perpustakaan.models.mBuku;
import perpustakaan.models.mTransaksi;
import perpustakaan.models.mTransaksiDetil;
import perpustakaan.viewers.pop.jFrmPopBuku;
/**
*
* @author ophicxs
*/
public class jFrmKembali extends javax.swing.JInternalFrame {
private java.util.Date today = new java.util.Date();
private Date skg = new Date(today.getTime());
public jFrmKembali() {
initComponents();
String[] column = new String[]{"Kode Buku", "Judul Buku", "Harga Sewa", "Status"};
cUtils.TabCreateColumn(jTabDetil, column);
txtTglKembali.setText(skg.toString());
//<editor-fold defaultstate="collapsed" desc="Create Listener Kode Buku">
txtKodeBuku.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
if (txtKodeBuku.getText().length()>0){
fillCurrentPeminjamanDetil(txtKodeBuku.getText());
}
}
@Override
public void removeUpdate(DocumentEvent e) {
if (txtKodeBuku.getText().length()>0){
fillCurrentPeminjamanDetil(txtKodeBuku.getText());
}
}
@Override
public void changedUpdate(DocumentEvent e) {
if (txtKodeBuku.getText().length()>0){
fillCurrentPeminjamanDetil(txtKodeBuku.getText());
}
}
});
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Create Listener JTabDetil">
jTabDetil.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (jTabDetil.getSelectedRow()>-1){
btnKembali.setEnabled(true);
} else {
btnKembali.setEnabled(false);
}
}
});
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Create Listener NoPeminjaman">
txtNoPinjam.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
if (txtNoPinjam.getText().length()>0){ btnProses.setEnabled(true); lblDenda.setText("Denda : 0"); } else { btnProses.setEnabled(false); }
}
@Override
public void removeUpdate(DocumentEvent e) {
if (txtNoPinjam.getText().length()>0){ btnProses.setEnabled(true); lblDenda.setText("Denda : 0"); } else { btnProses.setEnabled(false); }
}
@Override
public void changedUpdate(DocumentEvent e) {
if (txtNoPinjam.getText().length()>0){ btnProses.setEnabled(true); lblDenda.setText("Denda : 0"); } else { btnProses.setEnabled(false); }
}
});
//</editor-fold>
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
txtKodeBuku = new javax.swing.JTextField();
btnBrowse = new javax.swing.JButton();
jPanel1 = new javax.swing.JPanel();
jLabel2 = new javax.swing.JLabel();
txtNoPinjam = new javax.swing.JTextField();
jLabel3 = new javax.swing.JLabel();
txtTglPinjam = new javax.swing.JTextField();
txtTglBatas = new javax.swing.JTextField();
jLabel4 = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
txtTglKembali = new javax.swing.JTextField();
jScrollPane1 = new javax.swing.JScrollPane();
jTabDetil = new javax.swing.JTable();
lblDenda = new javax.swing.JLabel();
btnKembali = new javax.swing.JButton();
jLabel7 = new javax.swing.JLabel();
txtJudulBuku = new javax.swing.JTextField();
btnProses = new javax.swing.JButton();
setClosable(true);
setTitle("Transaksi Pengembalian");
jLabel1.setText("Kode Buku");
txtKodeBuku.setEditable(false);
btnBrowse.setText("...");
btnBrowse.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnBrowseActionPerformed(evt);
}
});
jPanel1.setBorder(javax.swing.BorderFactory.createTitledBorder("Data Peminjaman"));
jLabel2.setText("No. Peminjaman");
txtNoPinjam.setEditable(false);
txtNoPinjam.setHorizontalAlignment(javax.swing.JTextField.RIGHT);
jLabel3.setText("Tgl. Pinjam");
txtTglPinjam.setEditable(false);
txtTglBatas.setEditable(false);
jLabel4.setText("Tgl. batas");
jLabel5.setText("Tgl. Kembali");
txtTglKembali.setEditable(false);
jTabDetil.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
}
));
jScrollPane1.setViewportView(jTabDetil);
lblDenda.setFont(new java.awt.Font("Tahoma", 1, 14)); // NOI18N
lblDenda.setText("Denda : 0");
btnKembali.setText("Kembali");
btnKembali.setEnabled(false);
btnKembali.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnKembaliActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(jLabel3)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(txtTglPinjam, javax.swing.GroupLayout.PREFERRED_SIZE, 90, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel4)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(txtTglBatas, javax.swing.GroupLayout.PREFERRED_SIZE, 93, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel5)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(txtTglKembali, javax.swing.GroupLayout.PREFERRED_SIZE, 96, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnKembali, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(txtNoPinjam, javax.swing.GroupLayout.PREFERRED_SIZE, 114, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(lblDenda))
.addComponent(jScrollPane1))
.addContainerGap())
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(txtNoPinjam, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(txtTglPinjam, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel4)
.addComponent(txtTglBatas, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel5)
.addComponent(txtTglKembali, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnKembali)))
.addComponent(lblDenda))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 301, Short.MAX_VALUE)
.addGap(5, 5, 5))
);
jLabel7.setText("Judul Buku");
txtJudulBuku.setEditable(false);
btnProses.setText("Proses");
btnProses.setEnabled(false);
btnProses.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnProsesActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel7)
.addComponent(jLabel1))
.addGap(25, 25, 25)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(txtJudulBuku)
.addGroup(layout.createSequentialGroup()
.addComponent(txtKodeBuku, javax.swing.GroupLayout.PREFERRED_SIZE, 111, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnBrowse, javax.swing.GroupLayout.PREFERRED_SIZE, 34, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))))
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(btnProses)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(txtKodeBuku, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnBrowse))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel7)
.addComponent(txtJudulBuku, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnProses)
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void fillCurrentPeminjamanDetil(String idBuku){
try {
String sql = "SELECT "
+ " transaksi.no_peminjaman "
+ " , transaksi.tgl_pinjam "
+ " , transaksi.tgl_batas "
+ " , detil_transaksi.id_buku "
+ " , buku.judul "
+ " , detil_transaksi.nilai_sewa "
+ "FROM "
+ " buku "
+ " INNER JOIN detil_transaksi "
+ " ON (buku.id = detil_transaksi.id_buku) "
+ " INNER JOIN transaksi "
+ " ON (transaksi.no_peminjaman = detil_transaksi.no_peminjaman) "
+ "WHERE (ISNULL(transaksi.tgl_kembali) "
+ " AND detil_transaksi.id_buku = ?);";
PreparedStatement command = cDatabaseConnection.dbConn.prepareStatement(sql);
command.setString(1, idBuku);
ResultSet rs = command.executeQuery();
DefaultTableModel tblModel = (DefaultTableModel) jTabDetil.getModel();
cUtils.ResetTableContent(tblModel);
while (rs.next()){
txtNoPinjam.setText(rs.getString("no_peminjaman"));
txtTglPinjam.setText(rs.getString("tgl_pinjam"));
txtTglBatas.setText(rs.getString("tgl_batas"));
tblModel.addRow(new Object[]{rs.getString("id_buku"),
rs.getString("judul"),
rs.getInt("nilai_sewa"),
" - "});
}
} catch (SQLException ex) {
Logger.getLogger(jFrmKembali.class.getName()).log(Level.SEVERE, null, ex);
}
}
private void btnBrowseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnBrowseActionPerformed
jFrmPopBuku fBuku = new jFrmPopBuku(txtKodeBuku, txtJudulBuku, new JTextField(), mBuku.EnumBukuStatus.keluar);
fBuku.setVisible(true);
}//GEN-LAST:event_btnBrowseActionPerformed
private void btnKembaliActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnKembaliActionPerformed
DefaultTableModel tblModel = (DefaultTableModel) jTabDetil.getModel();
tblModel.setValueAt("kembali", jTabDetil.getSelectedRow(), 3);
}//GEN-LAST:event_btnKembaliActionPerformed
private void btnProsesActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnProsesActionPerformed
mTransaksiDetil trxDetil = new mTransaksiDetil(Integer.parseInt(txtNoPinjam.getText()));
mTransaksi trx = new mTransaksi();
trx.setNoPeminjaman(Integer.parseInt(txtNoPinjam.getText()));
DefaultTableModel tblModel = (DefaultTableModel) jTabDetil.getModel();
for (int i = 0; i < jTabDetil.getRowCount(); i++){
if (tblModel.getValueAt(i, 3) == "kembali") {
trxDetil.setIdBuku(tblModel.getValueAt(i, 0).toString());
trxDetil.UpdateBukuStatus(0);
}
}
trx.updateTglKembali(skg);
this.CalculateDenda(Integer.parseInt(txtNoPinjam.getText()));
}//GEN-LAST:event_btnProsesActionPerformed
private void CalculateDenda(int noPeminjaman){
try {
int denda = 0;
mTransaksi trx = new mTransaksi();
trx.setNoPeminjaman(noPeminjaman);
String sql = "SELECT DATEDIFF(tgl_kembali, tgl_batas) * (SELECT COUNT(det.no_peminjaman) FROM detil_transaksi det WHERE det.no_peminjaman = ?) * ? AS denda "
+ "FROM transaksi "
+ "WHERE transaksi.no_peminjaman = ?";
PreparedStatement command = cDatabaseConnection.dbConn.prepareStatement(sql);
command.setInt(1, noPeminjaman);
command.setInt(2, main.DefaultDenda);
command.setInt(3, noPeminjaman);
ResultSet rs = command.executeQuery();
while (rs.next()){
denda = rs.getInt("denda");
lblDenda.setText("Denda : "+String.valueOf(denda));
trx.updateDenda(denda);
break;
}
} catch (SQLException ex) {
Logger.getLogger(jFrmKembali.class.getName()).log(Level.SEVERE, null, ex);
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnBrowse;
private javax.swing.JButton btnKembali;
private javax.swing.JButton btnProses;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel7;
private javax.swing.JPanel jPanel1;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTable jTabDetil;
private javax.swing.JLabel lblDenda;
private javax.swing.JTextField txtJudulBuku;
private javax.swing.JTextField txtKodeBuku;
private javax.swing.JTextField txtNoPinjam;
private javax.swing.JTextField txtTglBatas;
private javax.swing.JTextField txtTglKembali;
private javax.swing.JTextField txtTglPinjam;
// End of variables declaration//GEN-END:variables
}
| [
"ophicxs@shinobi_kuuga.mshome.net"
] | ophicxs@shinobi_kuuga.mshome.net |
6ef47351137c879fcb1697c5d38bc21bb3f5e659 | 2f5f18f4f3c705f48f117747c4f3cd3030f78152 | /src/main/java/server/categories/CategoryTag.java | 9f411d038b321d629b4483500c279048a8ada6ca | [] | no_license | kkraft7/NoteElements | f895507f6bd736649eebc587cd2cf686bfa9b477 | ffc3126934e1e0f3dce8b30d548fc99c9579a936 | refs/heads/master | 2022-12-06T06:15:46.245666 | 2020-08-23T07:58:12 | 2020-08-23T07:58:12 | 270,917,478 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 338 | java | package server.categories;
import java.util.*;
// Interface for enum category tags
public interface CategoryTag {
static final Random rand = new Random();
default CategoryTag getRandomTag() {
return getTagValues().get(rand.nextInt(getTagValues().size()));
}
abstract public List<CategoryTag> getTagValues();
}
| [
"kkraft7@gmail.com"
] | kkraft7@gmail.com |
b620ddfd6e0154657533382cdcb91470423c8383 | a1205bb2751ce15192ed547061c2962c806cd5f4 | /src/main/java/kz/koko/agaionline/service/EmailServiceImpl.java | 2e78fe245b614267b81d5c37d68e0b739e2222f8 | [] | no_license | K0rlan/project | 6908cd590a781d1deac786af6c833c3ca83d13bc | 9ccd1949dacca2e51dd2e6396d67b37966ee29ab | refs/heads/master | 2023-04-29T16:34:00.026483 | 2021-05-25T17:55:12 | 2021-05-25T17:55:12 | 361,545,989 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 658 | java | package kz.koko.agaionline.service;
import kz.koko.agaionline.models.Email;
import kz.koko.agaionline.repo.EmailRepository;
import kz.koko.agaionline.service.Interfaces.EmailService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class EmailServiceImpl implements EmailService {
@Autowired
private EmailRepository emailRepository;
@Override
public Email addEmailUserInfo(Email email) {
return emailRepository.save(email);
}
@Override
public Email getEmailUserById(Integer id) {
return emailRepository.getEmailUserById(id);
}
}
| [
"omar_korlan@mail.ru"
] | omar_korlan@mail.ru |
f32792ee5fed407a68039e3096fdd45a2f06078b | 49c50a05d3904a314798f66d6d037a8d22b96b56 | /app/src/main/java/com/example/jnuelibrary/ReturnBook.java | 07c28d6d44a85c990c119c2a507d94554ddcb07d | [] | no_license | sudiptacsejnu/JnU_eLibrary | 5b89939db5fbebe99d6e52e30ad9f75da1ee9e52 | d121176ec4469f27a073683f902fbc1dcde210d9 | refs/heads/master | 2023-03-16T22:51:50.113043 | 2023-03-11T11:49:57 | 2023-03-11T11:49:57 | 244,022,927 | 0 | 1 | null | null | null | null | UTF-8 | Java | false | false | 6,944 | java | package com.example.jnuelibrary;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.util.ArrayList;
import java.util.List;
public class ReturnBook extends AppCompatActivity {
TextView returnUserNameTV, returnBookNameTV;
private String returnBookID;
private String returnUserName;
private int bookQuantity;
private int returnStatus = 1;
private String UserName;
private String uID;
private long maxid = 0;
DatabaseReference databaseReferenceBook;
DatabaseReference databaseReferenceUser;
DatabaseReference databaseReferenceReturn;
DatabaseReference databaseReferenceBorrow;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_return_book);
returnUserNameTV = findViewById(R.id.returnUserNameTV);
returnBookNameTV = findViewById(R.id.returnBookNameTV);
returnBookID = getIntent().getStringExtra("returnBookID");
bookQuantity = getIntent().getIntExtra("returnBookQuantity",0);
UserName = getIntent().getStringExtra("returnBookUserName");
//bookQuantityInt = Integer.parseInt(bookQuantity);
//Toast.makeText(this, bookQuantity, Toast.LENGTH_SHORT).show();
uID = FirebaseAuth.getInstance().getUid();
databaseReferenceBook = FirebaseDatabase.getInstance().getReference("Books");
databaseReferenceUser = FirebaseDatabase.getInstance().getReference("Users");
databaseReferenceReturn = FirebaseDatabase.getInstance().getReference("ReturnInformation");
databaseReferenceReturn.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
if (dataSnapshot.exists()) {
maxid = (dataSnapshot.getChildrenCount());
}
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
databaseReferenceUser.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
//returnUserName = dataSnapshot.child(uID).child("name").getValue().toString();
returnUserName = UserName;
returnUserNameTV.setText(returnUserName);
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
databaseReferenceBook.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
String returnBookName = dataSnapshot.child(returnBookID).child("bname").getValue().toString();
returnBookNameTV.setText(returnBookName);
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
}
public void BookReturnConform(View view) {
int returnBookQuantityUpdate = bookQuantity + 1 ;
databaseReferenceBook.child(returnBookID)
.child("bquantity").setValue(Integer.toString(returnBookQuantityUpdate)).addOnCompleteListener(new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
if (task.isSuccessful()) {
ReturnInformation returnInformation = new ReturnInformation(returnUserName, returnBookID);
databaseReferenceReturn.child(String.valueOf(maxid))
.setValue(returnInformation).addOnCompleteListener(new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
if (task.isSuccessful()) {
Toast.makeText(ReturnBook.this, "Book Information saved Successfully", Toast.LENGTH_SHORT).show();
databaseReferenceBorrow = FirebaseDatabase.getInstance().getReference("BorrowInformation");
databaseReferenceBorrow.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot dataSnapshot) {
for (DataSnapshot dataSnapshot1 : dataSnapshot.getChildren()) {
BorrowInformation borrowInformation = dataSnapshot1.getValue(BorrowInformation.class);
if (borrowInformation.getBookID().matches(returnBookID)) {
if(borrowInformation.getUserName().matches(returnUserName)) {
databaseReferenceBorrow.child(dataSnapshot1.getRef().getKey().toString())
.child("status").setValue(returnStatus);
}
}
}
}
@Override
public void onCancelled(@NonNull DatabaseError databaseError) {
}
});
Intent intent = new Intent(ReturnBook.this, MainActivity.class);
startActivity(intent);
finish();
}
else {
Toast.makeText(ReturnBook.this, "Error!" + task.getException().getMessage(), Toast.LENGTH_SHORT).show();
}
}
});
}
else {
//display a failure message
Toast.makeText(ReturnBook.this, "Error!" + task.getException().getMessage(), Toast.LENGTH_SHORT).show();
}
}
});
}
} | [
"sudiptacsejnu@gmail.com"
] | sudiptacsejnu@gmail.com |
a7912ff51bb7daac4c7f20f7ad2d5284ecc3360f | 92682142308cc6692234b2e8495975e089789a39 | /src/project1/FileSender.java | 3a3d73383a46f403e4804c815133b6456310e446 | [
"MIT"
] | permissive | Dawtt/dntp | 3e164cc617c48f3a92025df91a4741963ac24a2c | c7abad77c5b413e4f62aca40832a311c1bac37a4 | refs/heads/master | 2020-03-29T02:07:27.987405 | 2019-03-29T14:53:18 | 2019-03-29T14:53:18 | 149,421,456 | 0 | 1 | MIT | 2019-03-29T14:53:19 | 2018-09-19T08:55:54 | Java | UTF-8 | Java | false | false | 4,917 | java | /**
* File Created by Joshua Zierman on Sep 25, 2018
*/
package project1;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.LinkedList;
import log.Log;
import log.Loggable;
/**
* @author Joshua Zierman [py1422xs@metrostate.edu]
*
* compile with the following line (from project root): javac -d bin -cp
* bin -sourcepath src src\project1\FileSender.java
*
* run with the follwoing line (from project root): java -cp bin
* project1.FileSender
*/
public class FileSender implements Sender, Loggable
{
private Log log = new Log();
private InetAddress destinationIp;
private int destinationPort, bytesPerChunk;
private String filename;
private LinkedList<Chunk> chunks = new LinkedList<Chunk>();
private int next;
FileSender(String filename, InetAddress ip, int port, int bytesPerChunk)
{
setBytesPerChunk(bytesPerChunk);
setFilename(filename);
setToAddress(ip);
setToPort(port);
}
/**
* Sends a File to a destination
*
* @param args
* filename, bytesPerChunk, toIp, toPort
*/
public static void main(String[] args)
{
String filename = Project1.getInputFilename();
int bytesPerChunk = Project1.getBytesPerChunk();
InetAddress toIp = Project1.getDestinationIp();
int toPort = Project1.getPort();
// handle inline args (and default bytesPerChunk override for large
// files)
if (args.length > 0)
{
filename = args[0];
}
if (args.length > 1)
{
bytesPerChunk = Integer.parseInt(args[1]);
}
else
{
try
{
long bytesInFile = new File(filename).length();
if (bytesInFile > Project1.getMinBytesInFileBeforeBpcOverride())
{
bytesPerChunk = (int) (bytesInFile / 20);
}
}
catch (Exception e)
{
// dont do anything
}
}
if (args.length > 2)
{
try
{
toIp = InetAddress.getByName(args[2]);
}
catch (UnknownHostException e)
{
throw new IllegalArgumentException("Invalid 3rd argument value. Must be a valid InetAddress name");
}
}
if (args.length > 3)
{
toPort = Integer.parseInt(args[3]);
}
// sendAllFiles ... incidentally only one file
FileSender sender = new FileSender(filename, toIp, toPort, bytesPerChunk);
while (sender.hasNext())
{
sender.sendNext();
}
// print log
sender.printLog(System.out);
sender.clearLog();
}
/*
* (non-Javadoc)
*
* @see log.Loggable#absorbLog(log.Loggable)
*/
@Override
public void absorbLog(Loggable l)
{
log.absorb(l.getLog());
}
/*
* (non-Javadoc)
*
* @see log.Loggable#clearLog()
*/
@Override
public void clearLog()
{
log.clear();
}
/*
* (non-Javadoc)
*
* @see log.Loggable#getLog()
*/
@Override
public Log getLog()
{
return log;
}
/*
* (non-Javadoc)
*
* @see project1.Sender#hasNext()
*/
@Override
public boolean hasNext()
{
return next < 1;
}
/*
* (non-Javadoc)
*
* @see log.Loggable#printLog(java.io.PrintStream)
*/
@Override
public void printLog(PrintStream printStream)
{
printStream.println("<FileSender Log Start>");
log.print(printStream);
printStream.println("<FileSender Log End>");
}
/*
* (non-Javadoc)
*
* @see project1.Sender#resetNextPosition()
*/
@Override
public void resetNextPosition()
{
next = 0;
}
/*
* (non-Javadoc)
*
* @see project1.Sender#sendNext()
*/
@Override
public void sendNext()
{
send();
next++;
}
/*
* (non-Javadoc)
*
* @see project1.Sender#setToAddress(java.net.InetAddress)
*/
@Override
public void setToAddress(InetAddress ip)
{
destinationIp = ip;
}
/*
* (non-Javadoc)
*
* @see project1.Sender#setToPort(int)
*/
@Override
public void setToPort(int port)
{
destinationPort = port;
}
/**
* Sends the file
*
*/
private void send()
{
try
{
FileSplitter splitter = new FileSplitter(filename, bytesPerChunk);
splitter.overwrite(chunks);
absorbLog(splitter);
ChunkSender chunkSender = new ChunkSender(destinationIp, destinationPort);
chunkSender.load(chunks);
while (chunkSender.hasNext())
{
chunkSender.sendNext();
}
absorbLog(chunkSender);
}
catch (IOException e)
{
e.printStackTrace();
}
}
/**
* sets the number of bytes per chunk
*
* @param bytesPerChunk
* an integer value that represents the number of bytes per chunk
*/
private void setBytesPerChunk(int bytesPerChunk)
{
if (bytesPerChunk < 1)
{
throw new IllegalArgumentException();
}
this.bytesPerChunk = bytesPerChunk;
}
/**
* sets the filename
*
* @param filename
* the String containing the filename
*/
private void setFilename(String filename)
{
if (filename.length() < 1 || filename.endsWith("."))
{
throw new IllegalArgumentException();
}
this.filename = filename;
}
}
| [
"py1422xs@metrostate.edu"
] | py1422xs@metrostate.edu |
691e7fb1ffc843bb1e71d7bb04fb330931a87816 | 1cedb98670494d598273ca8933b9d989e7573291 | /ezyfox-server-core/src/main/java/com/tvd12/ezyfoxserver/wrapper/EzyUserAddableManager.java | 7fc23161733efdfbbde60564932797091894b514 | [
"Apache-2.0"
] | permissive | thanhdatbkhn/ezyfox-server | ee00e1e23a2b38597bac94de7103bdc3a0e0bedf | 069e70c8a7d962df8341444658b198ffadc3ce61 | refs/heads/master | 2020-03-10T11:08:10.921451 | 2018-01-14T16:50:37 | 2018-01-14T16:50:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 124 | java | package com.tvd12.ezyfoxserver.wrapper;
public interface EzyUserAddableManager extends EzyUserManager, EzyUserAddable {
}
| [
"itprono3@gmail.com"
] | itprono3@gmail.com |
da7d05cfc2aa86f691c758fd6b4bfccef1d45d5e | 3a30d8a1c7992e18c784ffa59ee5543b9da8d8e2 | /src/main/java/com/heshun/dsm/ui/ListPanel.java | 04507f695ab63320d4ec45852bafbf637f724013 | [] | no_license | jhongwei80/ElecGateway103 | f9c26663fb90377152d54d851d0e98faf1985150 | 9dc067b76f6a90ac9c133388383df762e41d8301 | refs/heads/master | 2023-03-16T22:34:45.704671 | 2018-10-29T08:51:15 | 2018-10-29T08:51:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 4,092 | java | package com.heshun.dsm.ui;
import java.awt.HeadlessException;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.Map;
import java.util.Map.Entry;
import javax.swing.DefaultListModel;
import javax.swing.JFrame;
import javax.swing.JList;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPopupMenu;
import javax.swing.SwingUtilities;
import org.apache.mina.core.session.IoSession;
import com.heshun.dsm.service.SystemHelper;
import com.heshun.dsm.util.SessionUtils;
public class ListPanel extends JFrame {
private static final long serialVersionUID = 1L;
private long clickTime = 0;
private JPopupMenu jpm;
private DefaultListModel<String> dlm = new DefaultListModel<>();// 动态内容
private JList<String> jList;// 定义列表框
private int currentPoint;// 当前高亮的item
// private IoSession currentSession;
private Map<Long, IoSession> sessions;
private Long[] mapIndex;// 辅助定位session
public ListPanel(JList<String> jList) throws HeadlessException {
this.jList = jList;
jpm = new JPopupMenu();
// 初始化右键Menu
initPopMenu();
initJListPanel();
}
public void upData() {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
dlm.clear();
StringBuffer sb;
sessions = SystemHelper.minaAcceptor.getManagedSessions();
int index = 0;
mapIndex = new Long[sessions.size()];
for (Entry<Long, IoSession> entry : sessions.entrySet()) {
sb = new StringBuffer();
IoSession session = entry.getValue();
int logtype = session.getAttribute("logotype") == null ? -1 : (int) session
.getAttribute("logotype");
sb.append(entry.getKey()).append("#").append(SessionUtils.getIpFromSession(session)).append(":")
.append(SessionUtils.getPortFromSession(session)).append("-->")
.append(String.valueOf(logtype));
dlm.addElement(sb.toString());
mapIndex[index++] = entry.getKey();
}
}
});
}
/**
* JList入口
*
* @return
*/
public void initJListPanel() {
jList.setModel(dlm);
jList.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
// 右击事件
if (e.getButton() == 3) {
currentPoint = jList.locationToIndex(e.getPoint());
jList.setSelectedIndex(currentPoint);// 本条高亮
jpm.show(jList, e.getX(), e.getY());
}
// 双击事件
if (System.currentTimeMillis() - clickTime < 500) {
int index;
index = jList.locationToIndex(e.getPoint());
new FrameCenter(sessions.get(mapIndex[currentPoint])).detailFrame().setVisible(true);
System.out.println("" + dlm.get(index));
}
clickTime = System.currentTimeMillis();
}
});
}
/**
* 初始化右键菜单
*/
public void initPopMenu() {
JMenuItem item = new JMenuItem("查看设备");
item.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
System.out.println(String.format("查看%s的设备", dlm.getElementAt(currentPoint)));
}
});
jpm.add(item);
item = new JMenuItem("踢出管理机");
// 踢出事件
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
if (e.getModifiers() == 16) {// 左击
int action_id = JOptionPane.showConfirmDialog(null, "确定踢出?", "警告", JOptionPane.WARNING_MESSAGE);// i=0/1
if (action_id == 0) {// 确定后执行
dlm.remove(currentPoint);
jList.setModel(dlm);
IoSession s = sessions.get(mapIndex[currentPoint]);// 关闭连接
s.getWriteRequestQueue().clear(s);
s.closeNow();
System.out.println(e.getActionCommand());
}
}
}
});
jpm.add(item);// 加入设备
item = new JMenuItem("添加设备");
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
if (e.getModifiers() == 16) {
System.out.println(e.getActionCommand());
}
}
});
jpm.add(item);
}
} | [
"sun_flo@yeah.net"
] | sun_flo@yeah.net |
73b4e926e622fca99c0fbdeb299c35baaf615cdc | 04d921370164fb8fd82f94d47002fb9132d09058 | /src/day49_Abstraction/RemoteDriverTask/TestCases.java | 779bb95e7d1781f3ce0dd4679157344d7d125d55 | [] | no_license | KatrinSi/Batch21_Java | 794ec7dd23504d3830193917199228d95f6642ea | e6139be4f01d741a21259d0466f83831578efc89 | refs/heads/master | 2023-03-26T20:04:51.913387 | 2021-03-23T13:41:19 | 2021-03-23T13:41:19 | 311,477,737 | 0 | 3 | null | null | null | null | UTF-8 | Java | false | false | 521 | java | package day49_Abstraction.RemoteDriverTask;
public class TestCases {
public static void main(String[] args) {
ChromeDriver driver = new ChromeDriver();
driver.takeScreenShot("pic1");
driver.get("google");
driver.close();
System.out.println("------------------------");
FirefoxDriver driver1 = new FirefoxDriver();
driver1.get("google");
driver1.takeScreenShot("pic");
driver1.close();
WebDriver driver2 = new ChromeDriver();
}
}
| [
"katrinsi@me.com"
] | katrinsi@me.com |
3d45c82b27f5154a1b5a1a9c3e510f4ebc7d4d4d | 5d39e833cca9e4778d2ad5ca2df9155334f1ae6e | /src/main/java/org/aacish/disease_prediction/classifier/PrepareInputParameter.java | f6d53d536bcf4dd839a3d37eead6a12e6c3347d4 | [] | no_license | sraashis/diseaseprediction | feb0c2e08aab1e6a4e01a644955046b1cc13101b | c85fe48e2ac4115f7c769e3d1b560ab7855fc6e8 | refs/heads/master | 2021-11-24T13:14:30.418126 | 2021-10-28T13:31:29 | 2021-10-28T13:31:29 | 43,866,847 | 13 | 10 | null | null | null | null | UTF-8 | Java | false | false | 1,320 | java | package org.aacish.disease_prediction.classifier;
import java.util.ArrayList;
import java.util.List;
import org.aacish.disease_prediction.DAO.VocabDAO;
public class PrepareInputParameter implements InputParameter{
private VocabDAO vocabDAO = null;
private ArrayList<Integer> featureVector;
String[] tknDoc;
public VocabDAO getVocabDAO() {
return vocabDAO;
}
public void setVocabDAO(VocabDAO vocabDAO) {
this.vocabDAO = vocabDAO;
}
public PrepareInputParameter() {}
public ArrayList<Integer> prepareFeaturevector(List<String> docs) {
List<String> symptoms = this.vocabDAO.getSymptomsList();
featureVector = new ArrayList<Integer>();
for(String s: symptoms){
int tokenInNOofDocs = 0;
for(String doc: docs){
/* Each doc prepares a feature vector.*/
tknDoc = doc.split(" +");
for(String tkD : tknDoc){
if(s.equals(alphaOnly(tkD))){
tokenInNOofDocs++;
break;
}
}
}
featureVector.add(tokenInNOofDocs);
}
return featureVector;
}
public String alphaOnly(String ip){
String formatted="";
ip = ip.toLowerCase();
for(int i = 0; i < ip.length();i++){
if(Character.isLetter(ip.charAt(i))){
formatted += ip.charAt(i);
}
}
return formatted;
}
}
| [
"aashis.khanal@tektak.com"
] | aashis.khanal@tektak.com |
a8ec83a0846e823f411dac2f2fca82ead56f15b9 | 83dced5bea77d0aaed5964754fcdd3c8da3d67f7 | /itest/src/it/regression-multi/src/test/java/org/ops4j/pax/exam/regression/multi/junit/BeforeAfterParent.java | 2673c7a28da1a40977fa10e6c821b3103a21de1b | [
"Apache-2.0"
] | permissive | carrot-garden/pax_org.ops4j.pax.exam2 | 66742252e9ea1c20c41ab6e3cefde900ae8702e6 | ca8369105c7c793a3dc59401be76942697114cbe | refs/heads/master | 2021-01-17T22:37:55.869970 | 2012-09-29T14:44:10 | 2012-09-29T14:44:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,415 | java | /*
* Copyright (C) 2011 Harald Wellmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ops4j.pax.exam.regression.multi.junit;
import static org.ops4j.pax.exam.CoreOptions.junitBundles;
import static org.ops4j.pax.exam.CoreOptions.options;
import static org.ops4j.pax.exam.CoreOptions.url;
import static org.ops4j.pax.exam.regression.multi.RegressionConfiguration.regressionDefaults;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import org.junit.After;
import org.junit.Before;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.TestContainerException;
import org.ops4j.pax.exam.junit.Configuration;
import org.ops4j.pax.exam.util.PathUtils;
public class BeforeAfterParent
{
@Configuration
public Option[] config()
{
return options(
regressionDefaults(),
url( "reference:file:" + PathUtils.getBaseDir() +
"/target/regression-pde-bundle.jar" ),
junitBundles() );
}
@Before
public void before()
{
addMessage( "Before in parent" );
}
@After
public void after()
{
addMessage( "After in parent" );
}
public static void clearMessages() throws BackingStoreException
{
Preferences prefs = Preferences.userNodeForPackage( BeforeAfterParent.class );
prefs.clear();
prefs.sync();
}
public static void addMessage( String message )
{
Preferences prefs = Preferences.userNodeForPackage( BeforeAfterParent.class );
int numMessages = prefs.getInt( "numMessages", 0 );
prefs.put( "message." + numMessages, message );
prefs.putInt( "numMessages", ++numMessages );
try
{
prefs.sync();
}
catch ( BackingStoreException exc )
{
throw new TestContainerException( exc );
}
}
}
| [
"harald.wellmann@gmx.de"
] | harald.wellmann@gmx.de |
f702347683e32f8e4ab76fd3be1886ba2d75d674 | 21e092f59107e5cceaca12442bc2e90835473c8b | /eorderfile/src/main/java/com/basoft/file/application/FileData.java | b5c98318e876e800a77c8e88be2943027002e8df | [] | no_license | kim50012/smart-menu-endpoint | 55095ac22dd1af0851c04837190b7b6652d884a0 | d45246f341f315f8810429b3a4ec1d80cb894d7f | refs/heads/master | 2023-06-17T02:52:01.135480 | 2021-07-15T15:05:19 | 2021-07-15T15:05:19 | 381,788,497 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,176 | java | package com.basoft.file.application;
import java.util.Arrays;
public class FileData extends FileService.FileRef {
private String name;
private String originalName;
private String type;
private int size;
private byte[] payload;
private String keyUrl;
public FileData(String fileId, String fullFilePath, String fullName) {
super(fileId, fullFilePath, fullName, null);
}
public String name() {
return name;
}
public String originalName() {
return originalName;
}
public String type() {
return type;
}
public int size() {
return size;
}
public byte[] payload() {
return payload;
}
public String getKeyUrl() {
return keyUrl;
}
public static final Builder newBuilder() {
return new Builder();
}
public static final class Builder {
private String id;
private String name;
private String originalName;
private String type;
private int size;
private String fullUrl;
private byte[] payload;
public Builder id(String id) {
this.id = id;
return this;
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder originalName(String originalName) {
this.originalName = originalName;
return this;
}
public Builder contentsType(String type) {
this.type = type;
return this;
}
public Builder size(int size) {
this.size = size;
return this;
}
public Builder fullUrl(String url) {
this.fullUrl = url;
return this;
}
public Builder payload(byte[] payload) {
this.payload = payload;
return this;
}
private String keyUrl;
public Builder keyUrl(String key_url) {
this.keyUrl = key_url;
return this;
}
public String getId() {
return id;
}
public String getName() {
return name;
}
public String getOriginalName() {
return originalName;
}
public String getType() {
return type;
}
public String getFullUrl() {
return fullUrl;
}
public FileData build() {
FileData fd = new FileData(id, fullUrl, name);
fd.name = this.name;
fd.originalName = this.originalName;
fd.size = this.size;
fd.type = this.type;
fd.payload = this.payload;
fd.keyUrl = this.keyUrl;
return fd;
}
}
@Override
public String toString() {
return "FileData{" +
"name='" + name + '\'' +
", originalName='" + originalName + '\'' +
", type='" + type + '\'' +
", size=" + size +
", payload=" + Arrays.toString(payload) +
", keyUrl='" + keyUrl + '\'' +
'}';
}
}
| [
"kim50012@naver.com"
] | kim50012@naver.com |
4e2e8c8f20fd98b3dfbef7fa590efd76b8ac91bc | aea01209ae2af58dacfc12b218dd47281b81df33 | /src/main/java/io/ennate/trucker/repository/VehicleRepository.java | 26e943916bd2d19412f060907966340cd4a104d1 | [] | no_license | niketpatel2525/trucker | 075a7d956bb1f8ff41d7dc2e8ec1e0bbf2e77077 | 4aec3caa98e48febfa4f17eb3e8ba4e1f5f1ca93 | refs/heads/master | 2022-07-01T08:07:44.546658 | 2020-11-15T19:22:01 | 2020-11-15T19:22:01 | 134,105,122 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 354 | java | package io.ennate.trucker.repository;
import io.ennate.trucker.entity.Vehicle;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
import java.util.Optional;
@Repository
public interface VehicleRepository extends CrudRepository<Vehicle, String> {
Optional<Vehicle> findByVin(String vin);
}
| [
"niketpatel2525@gmail.com"
] | niketpatel2525@gmail.com |
0129cd5f6822a408862690b94d29dd95bedead4b | 77cca4b6d518eeac6d88fe8fcdf0b6b8a6a503d7 | /src/java/Entidade/Tiposituacao.java | 76cd75733a97b79515d93fbb6d3e9ba0585c2613 | [] | no_license | TheProjecter/projetopibid | d3ee552a7fab0a7c085e0561bc2ff17ecfc67dec | d0888458b778664013c1b483836340b58b3e67da | refs/heads/master | 2021-01-10T15:13:20.973412 | 2013-03-20T15:14:03 | 2013-03-20T15:14:03 | 43,161,576 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 5,560 | java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package Entidade;
import java.io.Serializable;
import java.util.Collection;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.validation.constraints.Size;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
/**
*
* @author jp
*/
@Entity
@Table(name = "tiposituacao")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "Tiposituacao.findAll", query = "SELECT t FROM Tiposituacao t"),
@NamedQuery(name = "Tiposituacao.findById", query = "SELECT t FROM Tiposituacao t WHERE t.id = :id"),
@NamedQuery(name = "Tiposituacao.findBySituacao", query = "SELECT t FROM Tiposituacao t WHERE t.situacao = :situacao")})
public class Tiposituacao implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "id")
private Integer id;
@Size(max = 45)
@Column(name = "situacao")
private String situacao;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "tipoSituacaoid")
private Collection<Bolsista> bolsistaCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "tipoSituacaoid")
private Collection<Projeto> projetoCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "tipoSituacaoid")
private Collection<Supervisor> supervisorCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "tipoSituacaoid")
private Collection<Professorcolaborador> professorcolaboradorCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "tipoSituacaoid")
private Collection<Escola> escolaCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "tipoSituacaoid")
private Collection<Tarefa> tarefaCollection;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "tipoSituacaoid")
private Collection<Alunocolaborador> alunocolaboradorCollection;
public Tiposituacao() {
}
public Tiposituacao(Integer id) {
this.id = id;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getSituacao() {
return situacao;
}
public void setSituacao(String situacao) {
this.situacao = situacao;
}
@XmlTransient
public Collection<Bolsista> getBolsistaCollection() {
return bolsistaCollection;
}
public void setBolsistaCollection(Collection<Bolsista> bolsistaCollection) {
this.bolsistaCollection = bolsistaCollection;
}
@XmlTransient
public Collection<Projeto> getProjetoCollection() {
return projetoCollection;
}
public void setProjetoCollection(Collection<Projeto> projetoCollection) {
this.projetoCollection = projetoCollection;
}
@XmlTransient
public Collection<Supervisor> getSupervisorCollection() {
return supervisorCollection;
}
public void setSupervisorCollection(Collection<Supervisor> supervisorCollection) {
this.supervisorCollection = supervisorCollection;
}
@XmlTransient
public Collection<Professorcolaborador> getProfessorcolaboradorCollection() {
return professorcolaboradorCollection;
}
public void setProfessorcolaboradorCollection(Collection<Professorcolaborador> professorcolaboradorCollection) {
this.professorcolaboradorCollection = professorcolaboradorCollection;
}
@XmlTransient
public Collection<Escola> getEscolaCollection() {
return escolaCollection;
}
public void setEscolaCollection(Collection<Escola> escolaCollection) {
this.escolaCollection = escolaCollection;
}
@XmlTransient
public Collection<Tarefa> getTarefaCollection() {
return tarefaCollection;
}
public void setTarefaCollection(Collection<Tarefa> tarefaCollection) {
this.tarefaCollection = tarefaCollection;
}
@XmlTransient
public Collection<Alunocolaborador> getAlunocolaboradorCollection() {
return alunocolaboradorCollection;
}
public void setAlunocolaboradorCollection(Collection<Alunocolaborador> alunocolaboradorCollection) {
this.alunocolaboradorCollection = alunocolaboradorCollection;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof Tiposituacao)) {
return false;
}
Tiposituacao other = (Tiposituacao) object;
if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
return false;
}
return true;
}
@Override
public String toString() {
return "Entidade.Tiposituacao[ id=" + id + " ]";
}
}
| [
"jpinfoagraria@gmail.com"
] | jpinfoagraria@gmail.com |
7c73afd0cc8880fc22c36bb54443872bfaec8ddc | 9f8304a649e04670403f5dc1cb049f81266ba685 | /common/src/test/java/com/cmcc/vrp/province/service/impl/ApprovalRequestTest.java | 80206880b6ae65e9a6ba2912126b77e6ef99cab3 | [] | no_license | hasone/pdata | 632d2d0df9ddd9e8c79aca61a87f52fc4aa35840 | 0a9cfd988e8a414f3bdbf82ae96b82b61d8cccc2 | refs/heads/master | 2020-03-25T04:28:17.354582 | 2018-04-09T00:13:55 | 2018-04-09T00:13:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 10,293 | java | package com.cmcc.vrp.province.service.impl;
import com.cmcc.vrp.enums.ApprovalType;
import com.cmcc.vrp.province.dao.AdminManagerMapper;
import com.cmcc.vrp.province.dao.ApprovalDetailDefinitionMapper;
import com.cmcc.vrp.province.dao.ApprovalProcessDefinitionMapper;
import com.cmcc.vrp.province.dao.EntManagerMapper;
import com.cmcc.vrp.province.dao.EnterpriseApprovalRecordMapper;
import com.cmcc.vrp.province.dao.EnterpriseMapper;
import com.cmcc.vrp.province.model.ApprovalDetailDefinition;
import com.cmcc.vrp.province.model.ApprovalProcessDefinition;
import com.cmcc.vrp.province.model.Enterprise;
import com.cmcc.vrp.province.model.EnterpriseApprovalRecord;
import com.cmcc.vrp.province.model.Manager;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.FileWriter;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
/**
* Created by qinqinyan on 2016/9/9.
*/
@Ignore
public class ApprovalRequestTest {
@Autowired
ApprovalProcessDefinitionMapper approvalProcessDefinitionMapper;
@Autowired
ApprovalDetailDefinitionMapper approvalDetailDefinitionMapper;
@Autowired
EnterpriseMapper enterpriseMapper;
@Autowired
EnterpriseApprovalRecordMapper enterpriseApprovalRecordMapper;
@Autowired
EntManagerMapper entManagerMapper;
@Autowired
AdminManagerMapper adminManagerMapper;
@Ignore
@Test
public void createApproval() throws IOException {
FileWriter writer;
writer = new FileWriter("approval-sql.txt");
ApprovalProcessDefinition approvalProcessDefinition = approvalProcessDefinitionMapper
.selectByType(ApprovalType.Enterprise_Approval.getCode());
List<ApprovalDetailDefinition> approvalDetailDefinitions = approvalDetailDefinitionMapper
.getByApprovalProcessId(approvalProcessDefinition.getId());
List<Enterprise> enterprises = enterpriseMapper.getEnterprisesByStatus();
//待审核的记录
List<EnterpriseApprovalRecord> entApprovalRecordIsNew = new ArrayList<EnterpriseApprovalRecord>();
if (enterprises != null) {
//审批请求id
Long approvalRequestId = 1L;
//审批记录id
Long approvalRecordId = 1L;
Long processId = approvalProcessDefinition.getId();
for (Enterprise enter : enterprises) {
/**
* step1、创建审批请求sql,默认status=0,再最后一步时在更新status
* */
//客户经理
Manager manager = entManagerMapper.getManagerForEnter(enter.getId());
Long adminId = adminManagerMapper.selectAdminIdByManagerId(manager.getParentId()).get(0);
String approvalRequestSQL = MessageFormat.format("insert into approval_request (id, process_id, ent_id, creator_id," +
"status, create_time, update_time, delete_flag) values ({0}, {1}, {2}, {3}, '0', now(),now(),'0');",
approvalRequestId, processId, enter.getId(), adminId);
writer.write(approvalRequestSQL);
writer.write("\n");
/**
* step2、创建历史审批记录sql
* */
//获取审核记录
List<EnterpriseApprovalRecord> enterpriseApprovalRecords = enterpriseApprovalRecordMapper.selectByEntId(enter.getId());
if (enterpriseApprovalRecords != null) {
for (EnterpriseApprovalRecord enterpriseApprovalRecord : enterpriseApprovalRecords) {
if (enterpriseApprovalRecord.getIsnew().toString().equals("1")) {
//未审批记录,放在最后统一插入
entApprovalRecordIsNew.add(enterpriseApprovalRecord);
} else {
//已经审批记录
String approvalRecordSQL = MessageFormat.format("insert into approval_record (id, request_id, creator_id, operator_id," +
"comment, description, delete_flag, create_time, update_time, is_new) values " +
"({0}, {1}, {2}, {3}, {5}, '0', now(), now(), '0');",
approvalRecordId, approvalRequestId, enterpriseApprovalRecord.getCreatorId(), enterpriseApprovalRecord.getOperatorId(),
enterpriseApprovalRecord.getOperatorComment(), enterpriseApprovalRecord.getDescription());
writer.write(approvalRecordSQL);
writer.write("\n");
approvalRecordId++;
}
}
}
/**
* step3、创建历史审批记录sql
* */
if (entApprovalRecordIsNew != null) {
for (EnterpriseApprovalRecord item : entApprovalRecordIsNew) {
if (item.getNewStatus().toString().equals("4")) {
//带客户经理审核
String approvalRecordIsNewSQL = MessageFormat.format("insert into approval_record (id, request_id, creator_id, operator_id," +
"comment, description, delete_flag, create_time, update_time, is_new) values " +
"({0}, {1}, {2}, {3}, {5}, '0', now(), null, '0');",
approvalRecordId, approvalRequestId, item.getCreatorId(), null,
null, item.getDescription());
writer.write(approvalRecordIsNewSQL);
writer.write("\n");
approvalRecordId++;
//更新审批请求记录sql
ApprovalDetailDefinition currApprovalDetailDefinition = approvalDetailDefinitions.get(0);
Integer currStatus = currApprovalDetailDefinition.getPrecondition();
String updateApprovalRequestSQL = MessageFormat.format("update approval_request set status = {0} where ent_id = {1};",
currStatus, item.getEntId());
writer.write(updateApprovalRequestSQL);
writer.write("\n");
} else if (item.getNewStatus().toString().equals("5")) {
//带省级管理员审核
String approvalRecordIsNewSQL = MessageFormat.format("insert into approval_record (id, request_id, creator_id, operator_id," +
"comment, description, delete_flag, create_time, update_time, is_new) values " +
"({0}, {1}, {2}, {3}, {5}, '0', now(), null, '0');",
approvalRecordId, approvalRequestId, item.getCreatorId(), null,
null, item.getDescription());
writer.write(approvalRecordIsNewSQL);
writer.write("\n");
approvalRecordId++;
//更新审批请求记录sql
ApprovalDetailDefinition currApprovalDetailDefinition = approvalDetailDefinitions.get(1);
Integer currStatus = currApprovalDetailDefinition.getPrecondition();
String updateApprovalRequestSQL = MessageFormat.format("update approval_request set status = {0} where ent_id = {1};",
currStatus, item.getEntId());
writer.write(updateApprovalRequestSQL);
writer.write("\n");
} else if (item.getNewStatus().toString().equals("6")) {
//带省级管理员审核
String approvalRecordIsNewSQL = MessageFormat.format("insert into approval_record (id, request_id, creator_id, operator_id," +
"comment, description, delete_flag, create_time, update_time, is_new) values " +
"({0}, {1}, {2}, {3}, {5}, '0', now(), null, '0');",
approvalRecordId, approvalRequestId, item.getCreatorId(), null,
null, item.getDescription());
writer.write(approvalRecordIsNewSQL);
writer.write("\n");
approvalRecordId++;
//更新审批请求记录sql
ApprovalDetailDefinition currApprovalDetailDefinition = approvalDetailDefinitions.get(2);
Integer currStatus = currApprovalDetailDefinition.getPrecondition();
String updateApprovalRequestSQL = MessageFormat.format("update approval_request set status = {0} where ent_id = {1};",
currStatus, item.getEntId());
writer.write(updateApprovalRequestSQL);
writer.write("\n");
} else if (item.getNewStatus().toString().equals("10")) {
//未填写合作信息
/*String approvalRecordIsNewSQL = MessageFormat.format("insert into approval_record (id, request_id, creator_id, operator_id," +
"comment, description, delete_flag, create_time, update_time, is_new) values " +
"({0}, {1}, {2}, {3}, {5}, '0', now(), null, '0');",
approvalRecordId, approvalRequestId, item.getCreatorId(), null,
null, item.getDescription());
writer.write(approvalRecordIsNewSQL);
writer.write("\n");
approvalRecordId++;*/
}
}
}
}
}
}
}
| [
"fromluozuwu@qq.com"
] | fromluozuwu@qq.com |
c4b553aa77462647bf629fabff70ab5345cd7477 | 46b51f93d09df1d77684bc52cf5bc703bf06c5b5 | /microservice-order/src/main/java/com/hao/microservice/Model/OrderDetail.java | b983e643845366e7c140205f08d317301694f062 | [] | no_license | MuggleLee/microservice | 9665ebff94187263ec0f28c709127a39e51bf090 | c375f4392f95a5ef407f9276f212c5c96d937877 | refs/heads/master | 2020-04-14T13:10:22.874990 | 2019-01-08T09:58:47 | 2019-01-08T09:58:47 | 163,861,139 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,765 | java | package com.hao.microservice.Model;
import lombok.Data;
import javax.persistence.Entity;
import javax.persistence.Id;
import java.math.BigDecimal;
@Data
@Entity
public class OrderDetail {
@Id
private String detailId;
/** 订单id. */
private String orderId;
/** 商品id. */
private String productId;
/** 商品名称. */
private String productName;
/** 商品单价. */
private BigDecimal productPrice;
/** 商品数量. */
private Integer productQuantity;
/** 商品小图. */
private String productIcon;
public String getDetailId() {
return detailId;
}
public void setDetailId(String detailId) {
this.detailId = detailId;
}
public String getOrderId() {
return orderId;
}
public void setOrderId(String orderId) {
this.orderId = orderId;
}
public String getProductId() {
return productId;
}
public void setProductId(String productId) {
this.productId = productId;
}
public String getProductName() {
return productName;
}
public void setProductName(String productName) {
this.productName = productName;
}
public BigDecimal getProductPrice() {
return productPrice;
}
public void setProductPrice(BigDecimal productPrice) {
this.productPrice = productPrice;
}
public Integer getProductQuantity() {
return productQuantity;
}
public void setProductQuantity(Integer productQuantity) {
this.productQuantity = productQuantity;
}
public String getProductIcon() {
return productIcon;
}
public void setProductIcon(String productIcon) {
this.productIcon = productIcon;
}
}
| [
"770796059@qq.com"
] | 770796059@qq.com |
464edbf27dc1b8e72549f039c21919d65cd213f1 | c3db327c084abf1eeddbf241bc5ffc059abf3310 | /src/main/java/util/AllureReporter.java | c21a538bf198f67cda86ca3001d9a295a1e55248 | [] | no_license | tvarderesyan/Selenium_course | b0fd872cdea3282b18dbd3b2aede2f81e125b1e6 | 45e5fde7c01c8a0843f678000c6e9a83312d04b5 | refs/heads/master | 2021-05-11T19:45:43.636985 | 2018-01-29T18:38:32 | 2018-01-29T18:38:32 | 117,888,247 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 779 | java | package util;
import Steps.BaseSteps;
import gherkin.formatter.model.Result;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import ru.yandex.qatools.allure.Allure;
import ru.yandex.qatools.allure.annotations.Attachment;
/**
* Created by Varderesyan Tsolak on 27.01.2018.
*/
public class AllureReporter extends ru.yandex.qatools.allure.cucumberjvm.AllureReporter {
@Override
public void result(Result result) {
if ("failed".equals(result.getStatus()))
takeScreenShot();
super.result(result);
}
@Attachment(type = "image/png", value = "Screenshot")
public static byte[] takeScreenShot() {
return ((TakesScreenshot) BaseSteps.driver()).getScreenshotAs(OutputType.BYTES);
}
}
| [
"colovarderesyan@yahoo.com"
] | colovarderesyan@yahoo.com |
d6f086281851b2190cd06ab4ee10ab6384975ce5 | b8bcfda4679f74a0c9e53dfb2b7415031e950419 | /AirPorts/src/main/java/com/fedorova/airPorts/services/PlaneService.java | 287e5082ad785bccd3f151bf393a6a79c838d076 | [] | no_license | nastya0715/Projects | f7fb793efd067a7273f7c3e14b2cfc543906ca21 | 43da8222dc69279bf8c749a12761fe7a06c8ead4 | refs/heads/master | 2020-05-06T15:37:15.992760 | 2019-04-08T18:15:49 | 2019-04-08T18:15:49 | 180,199,925 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 598 | java | package com.fedorova.airPorts.services;
import org.apache.log4j.Logger;
import com.fedorova.airPorts.dao.IPlaneDAO;
import com.fedorova.airPorts.dao.jdbcImplement.PlaneDAO;
import com.fedorova.airPorts.models.planes.Plane;
public class PlaneService {
private IPlaneDAO planeDao = new PlaneDAO();
private final static Logger logger= Logger.getLogger(PlaneService.class);
public Plane getInf(int id) {
Plane plane = planeDao.getById(id);
ModelService ms = new ModelService();
plane.setModel(ms.getInf(plane.getModel().getId()));
logger.info(plane.toString());
return plane;
}
}
| [
"375291699298@yandex.by"
] | 375291699298@yandex.by |
954d57b5aa44480b97b44a8ac5bea6706f375cef | 2bfd6167e387ce29119a9634bb08bdf7d63b6474 | /maven_practice/mavenproject/src/main/java/aivick/ListUtil.java | 5a7c76e4f8514cbe618f6ec52aee6116ce9d7ac6 | [] | no_license | yukis1996/Java | 32db1fac3a32f383b679b3c81171afcef9cddee9 | 8b324531d48a525d5b81427637bf011a1dbe306e | refs/heads/master | 2022-10-22T22:08:16.171476 | 2020-06-10T08:07:54 | 2020-06-10T08:07:54 | 262,509,738 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 5,291 | java | package aivick;
import java.util.ArrayList;
import java.util.List;
public class ListUtil {
// private static Object object;
public static void main(String[] args) {
// ②リストから偶数のみを抽出する
var intList = List.of(1, 2, 3, 4, 5, 6);
List<Integer> evenList = ListUtil.evensof(intList);
for (Integer e : evenList) {
System.out.println(e);
}
// ③指定した大きさと文字列からなるリストを作成
List<String> hogeList = ListUtil.replicate(3, "hoge");
System.out.println(hogeList);
// ④zipの引数同士を組み合わせて抽出
var zipList = ListUtil.zip(List.of(1, 2, 3), List.of(10, 9));
System.out.println(zipList.toString());
// toString()の記載せずとも自動的に呼び出し可能
// ⑤値の約数を抽出しリスト化する
var factorList = ListUtil.factors(15);
System.out.println(factorList);
// ⑥完全数を一覧として返す
var perfect = ListUtil.perfects(500);
System.out.println(perfect);
var pa = ListUtil.pairs(List.of(1, 2, 3, 4));
// var st = ListUtil.pairs(List.of("a", "b", "c"));
System.out.println(pa);
var sortedTest1 = ListUtil.sorted(List.of(1, 2, 3, 4));
var sortedTest2 = ListUtil.sorted(List.of(3, 2, 5, 6));
System.out.println(sortedTest1);
System.out.println(sortedTest2);
var pos = ListUtil.positions(10, List.of(10, 15, 20, 10, 10, 33));
System.out.println(pos);
var scalar = ListUtil.scalarProduct(List.of(1, 2, 3), List.of(4, 5, 6));
System.out.println(scalar);
var tetrads = ListUtil.zip4(List.of(1, 2, 3), List.of(10, 20, 30), List.of(100, 200, 300),
List.of(1000, 2000, 3000));
var firstTetrad = tetrads.get(0); //リストの最初の4つ組
System.out.println(firstTetrad);
var first = firstTetrad.first(); //1
System.out.println(first);
var tuple = firstTetrad.second();
var second = tuple.first(); //10
System.out.println(second);
var pair = tuple.second();
var thrid = pair.first(); //100
System.out.println(thrid);
var fourh = pair.second(); //1000
System.out.println(fourh);
}
// ②evensofの処理
public static List<Integer> evensof(List<Integer> intList) {
List<Integer> evensList = new ArrayList<>();
for (Integer e : intList) {
if (e % 2 == 0) {
evensList.add(e);
}
}
return evensList;
}
// ③replicateの処理
public static List<String> replicate(int item, String word) {
List<String> strList = new ArrayList<>();
for (int n = 0; n < item; n++) {
strList.add(word);
}
return strList;
}
// ④zipの処理
public static List<Pair<Integer, Integer>> zip(List<Integer> key, List<Integer> value) {
List<Pair<Integer, Integer>> zipsList = new ArrayList<>();
int small = Math.min(key.size(), value.size());
for (int j = 0; j < small; j++) {
Pair<Integer, Integer> pp = new Pair<>(key.get(j), value.get(j));
zipsList.add(pp);
}
return zipsList;
}
// ⑤factorsの処理
public static List<Integer> factors(int number) {
List<Integer> fcList = new ArrayList<>();
for (int n = 1; n <= number; n++) {
if (number % n == 0) {
fcList.add(n);
}
}
return fcList;
}
// ⑥perfectsの処理
public static List<Integer> perfects(int count) {
List<Integer> pfList = new ArrayList<>();
for (int p = 1; p <= count; p++) {
int sum = 0;
List<Integer> fact = factors(p);
for (int ff : fact) {
sum += ff;
}
if (sum - p == p) {
pfList.add(p);
}
}
return pfList;
}
public static List<Pair<Integer, Integer>> pairs(List<Integer> a) {
List<Pair<Integer, Integer>> pList = new ArrayList<>();
for (int d = 0; d < a.size() - 1; d++) {
Pair<Integer, Integer> tt = new Pair<>(a.get(d), a.get(d + 1));
pList.add(tt);
}
return pList;
}
public static boolean sorted(List<Integer> number) {
var pairList = pairs(number);
for (var pair : pairList) {
if (pair.first() > pair.second()) {
return false;
}
}
return true;
}
public static List<Integer> positions(int num, List<Integer> numList) {
List<Integer> posList = new ArrayList<>();
for (int i = 0; i < numList.size(); i++) {
posList.add(i);
}
List<Integer> position = new ArrayList<Integer>();
var pairs = zip(posList, numList);
for (var pair : pairs) {
if (pair.second() == num) {
position.add(pair.first());
}
}
return position;
}
public static int scalarProduct(List<Integer> key, List<Integer> value) {
int product = 0;
int total = 0;
for (int c = 0; c < key.size(); c++) {
product = key.get(c) * value.get(c);
total += product;
}
return total;
}
public static List<Pair<Integer, Pair<Integer, Pair<Integer, Integer>>>> zip4(List<Integer> first,
List<Integer> second, List<Integer> third, List<Integer> fourth) {
var min = Math.min(Math.min(first.size(), second.size()),
Math.min(third.size(), fourth.size()));
List<Pair<Integer, Pair<Integer, Pair<Integer, Integer>>>> zip4List = new ArrayList<>();
for (int e = 0; e < min; e++) {
zip4List.add(tetrad(first.get(e), second.get(e), third.get(e), fourth.get(e)));
}
return zip4List;
}
private static Pair<Integer, Pair<Integer, Pair<Integer, Integer>>> tetrad(Integer a, Integer b,
Integer c, Integer d) {
return new Pair<>(a, new Pair<>(b, new Pair<>(c, d)));
}
} | [
"58467980+yukis1996@users.noreply.github.com"
] | 58467980+yukis1996@users.noreply.github.com |
1f1b7eedc84c9a429a89d0d7536392c6894842a9 | c36842d81ca5df57da61b263dd639fb8ac9ae096 | /src/main/java/com/ixcode/framework/parameter/model/Category.java | e3d57bd1081bacc478eef471ac9a353f35f3d9d1 | [] | no_license | jimbarritt/bugsim | ebbc7ee7fb10df678b6c3e6107bf90169c01dfec | 7f9a83770fff9bac0d9e07c560cd0b604eb1c937 | refs/heads/master | 2016-09-06T08:32:19.941440 | 2010-03-13T10:13:50 | 2010-03-13T10:13:50 | 32,143,814 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 6,954 | java | package com.ixcode.framework.parameter.model;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Stack;
public class Category implements IParameterModel{
public boolean hasParent() {
return getParentCategory() != null;
}
public IParameterModel getParent() {
return getParentCategory();
}
public Category() {
}
public Category(String name) {
_name = name;
}
public String getName() {
return _name;
}
public IParameterModel findParentCalled(String name) {
IParameterModel found = null;
if (_name.equals(name)) {
found = this;
} else if (hasParent()) {
found = getParent().findParentCalled(name) ;
}
return found;
}
public void setName(String name) {
_name = name;
}
public List getParameters() {
return _parameters;
}
public List getSubCategories() {
return _subCategories;
}
public void addParameter(Parameter parameter) {
_parameters.add(parameter);
parameter.setParentCategory(this);
}
public void addSubCategory(Category subCategory) {
_subCategories.add(subCategory);
subCategory.setParent(this);
}
public Category findSubCategory(String name) {
Category found = null;
for (Iterator itr = _subCategories.iterator(); itr.hasNext();) {
Category category = (Category)itr.next();
if (category.getName().equals(name)) {
found = category;
break;
}
}
return found;
}
public Parameter findParameter(String name) {
Parameter found = null;
for (Iterator itr = _parameters.iterator(); itr.hasNext();) {
Parameter parameter = (Parameter)itr.next();
if (parameter.findParameter(name)!= null) {
found = parameter;
break;
}
}
return found;
}
public String getFullyQualifiedName() {
Category current = this;
String fullName = current.getName();
while (current.getParentCategory() != null) {
current = current.getParentCategory();
fullName = current.getName() + "." + fullName;
}
return fullName;
}
public Category getParentCategory() {
return _parent;
}
/**
* @todo probably need to do the opposit aswell although at this point we have no plans to be able to dynamically replace categoreis!@!
* @param parent
*/
public void setParent(Category parent) {
_parent = parent;
if (_parent.isConnectedToParameterMap()) {
fireConnectedEvents(this);
}
}
private void fireConnectedEvents(Category category) {
for (Iterator itr = category.getParameters().iterator(); itr.hasNext();) {
Parameter parameter = (Parameter)itr.next();
parameter.fireParameterConnectedEvent(new Stack());
}
for (Iterator itr = category.getSubCategories().iterator(); itr.hasNext();) {
Category subCategory= (Category)itr.next();
fireConnectedEvents(subCategory);
}
}
public void setRoot(ParameterMap parent) {
_root = parent;
fireConnectedEvents(this);
}
public List getAllParameters() {
List allParams = new ArrayList();
addParameters(allParams, _parameters);
for (Iterator itr = _subCategories.iterator(); itr.hasNext();) {
Category category = (Category)itr.next();
allParams.addAll(category.getAllParameters());
}
return allParams;
}
private void addParameters(List allParams, List currentParameters) {
for (Iterator itr = currentParameters.iterator(); itr.hasNext();) {
Parameter parameter = (Parameter)itr.next();
allParams.add(parameter);
if (parameter instanceof StrategyDefinitionParameter) {
addAllAlgorithmParameters(allParams, (StrategyDefinitionParameter)parameter);
} else if (parameter.containsStrategy()) {
allParams.add(parameter.getValue());
addAllAlgorithmParameters(allParams, (StrategyDefinitionParameter)parameter.getValue());
}
}
}
private void addAllAlgorithmParameters(List allParams, StrategyDefinitionParameter algorithmParameter) {
addParameters(allParams, algorithmParameter.getParameters());
}
public String toString() {
return _name;
}
/**
* Recursively searches for an object
* @param nameStack
* @return
*/
public Object findObject(Stack nameStack) {
String currentLevelName = (String)nameStack.peek();
Object found = null;
if (currentLevelName.equals(_name)) {
nameStack.pop();
found = recurseCategories(nameStack);
if (found == null) {
found = recurseParameters(nameStack);
}
}
return found;
}
private Object recurseParameters(Stack nameStack) {
Object found = null;
for (Iterator itr = _parameters.iterator(); itr.hasNext();) {
Parameter parameter = (Parameter)itr.next();
found = parameter.findObject(nameStack);
if (found != null) {
break;
}
}
return found;
}
private Object recurseCategories(Stack nameStack) {
Object found = null;
for (Iterator itr = _subCategories.iterator(); itr.hasNext();) {
Category category = (Category)itr.next();
found = category.findObject(nameStack);
if (found != null) {
break;
}
}
return found;
}
public Category findCategory(String name) {
Category found = null;
for (Iterator itr = _subCategories.iterator(); itr.hasNext();) {
Category category = (Category)itr.next();
if (category.getName().equals(name)) {
found = category;
break;
}
}
return found;
}
public ParameterMap getParameterMap() {
ParameterMap parameterMap = null;
if (_parent != null) {
parameterMap = _parent.getParameterMap();
} else if (_root != null) {
parameterMap = _root;
}
return parameterMap;
}
public boolean isConnectedToParameterMap() {
return getParameterMap() != null;
}
private String _name;
private List _parameters = new ArrayList();
private List _subCategories = new ArrayList();
private Category _parent;
private ParameterMap _root;
public static final String P_NAME = "name";
}
| [
"jim.barritt@cada1ade-4555-11de-b25d-9f9778916a3c"
] | jim.barritt@cada1ade-4555-11de-b25d-9f9778916a3c |
8cf7fb8fdb65f3697d30255a02561b4477825aa0 | 7716da2d98134f31d07c4935336217d4a9869421 | /app/src/main/java/com/stormphoenix/ogit/mvp/ui/activities/MainActivity.java | 4a88ab8858fe22ab69a1c16f6cb3758ebd8ed7fc | [
"Apache-2.0"
] | permissive | RepoForks/OGit | 7993eb06ba83b2e0bdea51742ab25d5742b28d8e | 6062b3f1459d8105a6c999545d5d1c340d9a8fc0 | refs/heads/master | 2021-01-21T20:33:47.279886 | 2017-04-21T01:43:04 | 2017-04-21T01:43:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 13,544 | java | package com.stormphoenix.ogit.mvp.ui.activities;
import android.app.SearchManager;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.design.widget.NavigationView;
import android.support.design.widget.Snackbar;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.widget.AppCompatEditText;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.afollestad.materialdialogs.DialogAction;
import com.afollestad.materialdialogs.MaterialDialog;
import com.ogaclejapan.smarttablayout.SmartTabLayout;
import com.stormphoenix.httpknife.github.GitNotification;
import com.stormphoenix.ogit.R;
import com.stormphoenix.ogit.adapters.base.FragmentsAdapter;
import com.stormphoenix.ogit.dagger2.component.DaggerActivityComponent;
import com.stormphoenix.ogit.dagger2.module.ContextModule;
import com.stormphoenix.ogit.mvp.presenter.MainPresenter;
import com.stormphoenix.ogit.mvp.presenter.issue.IssuePresenter;
import com.stormphoenix.ogit.mvp.presenter.user.FolloweringsPresenter;
import com.stormphoenix.ogit.mvp.presenter.user.FollowersPresenter;
import com.stormphoenix.ogit.mvp.presenter.user.UserOwnRepoListPresenter;
import com.stormphoenix.ogit.mvp.presenter.user.UserReceivedEventsPresenter;
import com.stormphoenix.ogit.mvp.presenter.user.UserStaredRepoListPresenter;
import com.stormphoenix.ogit.mvp.ui.activities.base.TabPagerActivity;
import com.stormphoenix.ogit.mvp.ui.dialog.ActionDialogGenerator;
import com.stormphoenix.ogit.mvp.ui.fragments.base.BaseFragment;
import com.stormphoenix.ogit.mvp.ui.fragments.base.EventsFragment;
import com.stormphoenix.ogit.mvp.ui.fragments.base.UsersFragment;
import com.stormphoenix.ogit.mvp.ui.fragments.repository.ReposListFragment;
import com.stormphoenix.ogit.mvp.view.IssueView;
import com.stormphoenix.ogit.mvp.view.MainView;
import com.stormphoenix.ogit.utils.ActivityUtils;
import com.stormphoenix.ogit.utils.ImageUtils;
import com.stormphoenix.ogit.utils.PreferenceUtils;
import com.stormphoenix.ogit.utils.ViewUtils;
import com.stormphoenix.ogit.widget.manager.NotifyMenuManager;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import butterknife.BindView;
import de.hdodenhof.circleimageview.CircleImageView;
public class MainActivity extends TabPagerActivity<FragmentsAdapter> implements NavigationView.OnNavigationItemSelectedListener, MainView, IssueView {
private static final String TAG = MainActivity.class.getSimpleName();
@BindView(R.id.tab_layout)
SmartTabLayout mTabLayout;
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.drawer_layout)
DrawerLayout mDrawerLayout;
// 个人信息布局
View mUserInfoView;
CircleImageView mHeaderImage;
TextView mTextUsername;
View menuNotify;
FragmentsAdapter mAdapter;
ActionBarDrawerToggle mDrawerToggle;
ActionDialogGenerator actionDialogGenerator = null;
@BindView(R.id.nav_view)
NavigationView mNavView;
@Inject
public MainPresenter mainPresenter;
@Inject
public IssuePresenter issuePresenter;
private List<GitNotification> mNotifications = null;
public static Intent newIntent(Context context) {
Intent intent = new Intent(context, MainActivity.class);
return intent;
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mainPresenter.onAttachView(this);
issuePresenter.onAttachView(this);
mainPresenter.onCreate(savedInstanceState);
issuePresenter.onCreate(savedInstanceState);
initViews();
loadPagerData();
}
private void initViews() {
initToolbar();
initDrawerView();
initUserView();
}
private void loadPagerData() {
if (!TextUtils.isEmpty(PreferenceUtils.getString(this, PreferenceUtils.AVATAR_URL))) {
setHeaderImage(PreferenceUtils.getString(this, PreferenceUtils.AVATAR_URL));
}
mTextUsername.setText(PreferenceUtils.getUsername(this));
configureTabPager();
}
/**
* 初始化用户头像、用户名视图
*/
private void initUserView() {
mUserInfoView = mNavView.getHeaderView(0);
mHeaderImage = (CircleImageView) mUserInfoView.findViewById(R.id.img_owner_header);
mTextUsername = (TextView) mUserInfoView.findViewById(R.id.text_owner_name);
}
@Override
protected FragmentsAdapter createAdapter() {
String[] titleList = {"Event", "Starred", "Repos", "Followers", "Followings"};
List<BaseFragment> fragmentList = new ArrayList<>();
EventsFragment receiveEventsFragment = EventsFragment.newInstance(new UserReceivedEventsPresenter(this));
receiveEventsFragment.setOnScrollListener(NotifyMenuManager.getInstance());
ReposListFragment staredReposFragment = ReposListFragment.newInstance(new UserStaredRepoListPresenter(this));
staredReposFragment.setOnScrollListener(NotifyMenuManager.getInstance());
ReposListFragment reposListFragment = ReposListFragment.newInstance(new UserOwnRepoListPresenter(this));
reposListFragment.setOnScrollListener(NotifyMenuManager.getInstance());
UsersFragment followersFragment = UsersFragment.newInstance(new FollowersPresenter(this));
followersFragment.setOnScrollListener(NotifyMenuManager.getInstance());
UsersFragment followingsFragment = UsersFragment.newInstance(new FolloweringsPresenter(this));
followingsFragment.setOnScrollListener(NotifyMenuManager.getInstance());
fragmentList.add(receiveEventsFragment);
fragmentList.add(staredReposFragment);
fragmentList.add(reposListFragment);
fragmentList.add(followersFragment);
fragmentList.add(followingsFragment);
mAdapter = new FragmentsAdapter(this.getSupportFragmentManager());
mAdapter.setFragmentList(fragmentList, titleList);
return mAdapter;
}
@Override
protected int getLayoutId() {
return R.layout.activity_main;
}
@Override
public void initializeInjector() {
DaggerActivityComponent.builder()
.contextModule(new ContextModule(this))
.build()
.inject(this);
}
/**
* 初始化标题栏
*/
public void initToolbar() {
mToolbar.setTitle(this.getString(R.string.ogit));
setSupportActionBar(mToolbar);
getSupportActionBar().setHomeAsUpIndicator(R.drawable.ic_arrow_back_white_24dp);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
/**
* 初始化侧滑栏视图
*/
public void initDrawerView() {
mDrawerToggle = new ActionBarDrawerToggle(
this,
mDrawerLayout,
mToolbar,
R.string.navigation_drawer_open,
R.string.navigation_drawer_close);
mDrawerLayout.setDrawerListener(mDrawerToggle);
mDrawerToggle.syncState();
mNavView.setNavigationItemSelectedListener(this);
}
/**
* 加载用户图像
*
* @param url 用户图片的定位符
*/
public void setHeaderImage(String url) {
ImageUtils.getInstance().displayImage(url, mHeaderImage);
}
/**
* 创建 Toolbar中的菜单选项
*
* @param menu
* @return
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_home_toolbar, menu);
final MenuItem searchMenuItem = menu.findItem(R.id.action_search);
final SearchView searchView = (SearchView) MenuItemCompat.getActionView(searchMenuItem);
SearchManager searchManager = (SearchManager) getSystemService(SEARCH_SERVICE);
searchView.setSearchableInfo(searchManager.getSearchableInfo(new ComponentName(this, SearchActivity.class)));
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
MenuItemCompat.collapseActionView(searchMenuItem);
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
searchView.setOnSuggestionListener(new SearchView.OnSuggestionListener() {
@Override
public boolean onSuggestionSelect(int position) {
MenuItemCompat.collapseActionView(searchMenuItem);
return false;
}
@Override
public boolean onSuggestionClick(int position) {
MenuItemCompat.collapseActionView(searchMenuItem);
return false;
}
});
return super.onCreateOptionsMenu(menu);
}
/**
* 设置Toolbar中的菜单被点击时间
*
* @param item
* @return
*/
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_search:
onSearchRequested();
return true;
case R.id.action_notification:
if (menuNotify == null) {
menuNotify = toolbar.findViewById(R.id.action_notification);
}
NotifyMenuManager.getInstance().toggleMenuFromView(menuNotify);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* 设置侧滑栏点击时间
*
* @param item
* @return
*/
@Override
public boolean onNavigationItemSelected(@NonNull MenuItem item) {
switch (item.getItemId()) {
case R.id.nav_home:
return true;
case R.id.nav_trends:
mDrawerLayout.closeDrawers();
ActivityUtils.startActivity(this, TrendActivity.newIntent(this));
return true;
case R.id.nav_org:
mDrawerLayout.closeDrawers();
Bundle bundle = new Bundle();
bundle.putInt(ToolbarActivity.TYPE, ToolbarActivity.TYPE_ORGANIZATION);
ActivityUtils.startActivity(this, ToolbarActivity.newIntent(this, bundle));
return true;
case R.id.nav_send:
/* 对话框测试区域 */
if (actionDialogGenerator == null) {
createActionDialog();
}
actionDialogGenerator.show();
/* */
return true;
case R.id.nav_exit:
// 退出登录
PreferenceUtils.putBoolean(this, PreferenceUtils.IS_LOGIN, false);
// ActivityUtils.startActivity(this, LoginActivity.newIntent(this));
finish();
default:
return false;
}
}
private void createActionDialog() {
actionDialogGenerator = new ActionDialogGenerator(this);
actionDialogGenerator.title("Feedback(I will consider your suggestion carefully)");
View actionDialogView = (View) getLayoutInflater().inflate(R.layout.ui_empty_background_edit_text, mDrawerLayout, false);
AppCompatEditText editText = (AppCompatEditText) actionDialogView.findViewById(R.id.dialog_edit_text);
ProgressBar progressBar = (ProgressBar) actionDialogView.findViewById(R.id.dialog_progress);
actionDialogGenerator.customView(actionDialogView);
actionDialogGenerator.onPositive(new MaterialDialog.SingleButtonCallback() {
@Override
public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) {
issuePresenter.sendAnIssue("An feedback from " + PreferenceUtils.getUsername(MainActivity.this), editText.getText().toString().trim());
// progressBar.setVisibility(View.VISIBLE);
}
});
actionDialogGenerator.cancelable(false);
actionDialogGenerator.setActionButton(DialogAction.POSITIVE, getString(R.string.send));
}
@Override
public void showMessage(String message) {
Snackbar.make(mDrawerLayout, message, Snackbar.LENGTH_LONG).show();
}
@Override
public void saveNotificationMessage(List<GitNotification> notifications) {
this.mNotifications = notifications;
String notifyMessage = String.valueOf(mNotifications.size()) + " unread messages";
NotifyMenuManager.getInstance().setNotifyContent(notifyMessage, notifications);
}
@Override
public void onSendIssueSuccess() {
ViewUtils.showMessage(mDrawerLayout, getString(R.string.send_feedback_successful));
}
@Override
public void onSendIssueFailed() {
ViewUtils.showMessage(mDrawerLayout, getString(R.string.send_feedback_failed));
}
@Override
public void showProgress() {
}
@Override
public void hideProgress() {
}
}
| [
"hzau.cs.wangcheng@gmail.com"
] | hzau.cs.wangcheng@gmail.com |
05047d6883e25c5f9c5612f9ad629a3d4167ad1d | 9a79a30ad2ef2c51f3cd0c53806d918b2394524e | /src/plasticcraft/ContainerDisassembler.java | f8e34a65ea2e7a44254b05c15ec15bcfb4569ad5 | [] | no_license | TheBoy2795/PlasticCraft | 9929fa1a51e4a93bc30be58f941026de2bf63137 | 4d10b4c30157bec86dac385c6a75dce079e59b1f | refs/heads/master | 2021-01-15T18:31:18.708873 | 2012-01-18T21:17:02 | 2012-01-18T21:17:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,918 | java | package net.minecraft.src.plasticcraft;
import java.util.List;
import net.minecraft.src.*;
public class ContainerDisassembler extends Container {
private TileEntityDisassembler disassembler;
private int lastCraftTime;
public ContainerDisassembler(InventoryPlayer inventoryplayer, TileEntityDisassembler tileentitydisassembler) {
lastCraftTime = 0;
disassembler = tileentitydisassembler;
addSlot(new Slot(tileentitydisassembler, 0, 34, 35)); // item
int id1 = 1;
for (int j=92; j<=128; j+=18) { // row 1
addSlot(new SlotFurnace(inventoryplayer.player, tileentitydisassembler, id1, j, 17));
id1++;
}
int id2 = 4;
for (int j=92; j<=128; j+=18) { // row 2
addSlot(new SlotFurnace(inventoryplayer.player, tileentitydisassembler, id2, j, 35));
id2++;
}
int id3 = 7;
for (int j=92; j<=128; j+=18) { // row 3
addSlot(new SlotFurnace(inventoryplayer.player, tileentitydisassembler, id3, j, 53));
id3++;
}
// player inventory stuff
for (int i=0; i<3; i++)
for (int k = 0; k < 9; k++)
addSlot(new Slot(inventoryplayer, k + i * 9 + 9, 8 + k * 18, 84 + i * 18));
for (int j = 0; j < 9; j++)
addSlot(new Slot(inventoryplayer, j, 8 + j * 18, 142));
}
public void updateCraftingResults() {
super.updateCraftingResults();
for (int i=0; i<crafters.size(); i++) {
ICrafting icrafting = (ICrafting)crafters.get(i);
if (lastCraftTime != disassembler.disassemblerCraftTime)
icrafting.updateCraftingInventoryInfo(this, 0, disassembler.disassemblerCraftTime);
}
lastCraftTime = disassembler.disassemblerCraftTime;
}
public void updateProgressBar(int i, int j) {
if (i == 0)
disassembler.disassemblerCraftTime = j;
}
public boolean canInteractWith(EntityPlayer entityplayer) {
return disassembler.isUseableByPlayer(entityplayer);
}
public ItemStack transferStackInSlot(int i) {
ItemStack itemstack = null;
Slot slot = (Slot)inventorySlots.get(i);
if (slot != null && slot.getHasStack()) {
ItemStack itemstack1 = slot.getStack();
itemstack = itemstack1.copy();
if (i == 2) {
if (!mergeItemStack(itemstack1, 3, 39, true))
return null;
} else if (i >= 3 && i < 30) {
if (!mergeItemStack(itemstack1, 30, 39, false))
return null;
} else if (i >= 30 && i < 39) {
if (!mergeItemStack(itemstack1, 3, 30, false))
return null;
} else if (!mergeItemStack(itemstack1, 3, 39, false))
return null;
if (itemstack1.stackSize == 0)
slot.putStack(null);
else slot.onSlotChanged();
if (itemstack1.stackSize != itemstack.stackSize)
slot.onPickupFromSlot(itemstack1);
else return null;
}
return itemstack;
}
} | [
"tehkrush@gmail.com"
] | tehkrush@gmail.com |
fc4f4aed9bf370b406358e54500b08b85f5f5e3f | 9dff9c90cf2296b35828c9b0adf6cfd78c54591c | /app/src/main/java/com/example/Smarttouch/wheel/OnRecyclerItemClickListener.java | 505b74cc82e8c67b4739d1ca8ffaca3e5ccba837 | [] | no_license | Nirmals502/Android_TTLock_Demo-master | 851bd3e7783e53fa086f3b7fd7f6812a7e614356 | b71c2b34ff88d36bb9ef2e25737923830c04289d | refs/heads/master | 2020-03-25T20:12:01.998387 | 2018-10-22T16:23:54 | 2018-10-22T16:23:57 | 144,120,487 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,057 | java | package com.example.Smarttouch.wheel;
import android.support.v4.view.GestureDetectorCompat;
import android.support.v7.widget.RecyclerView;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
/**
* Created by TTLock on 2017/3/24.
*/
public abstract class OnRecyclerItemClickListener implements RecyclerView.OnItemTouchListener{
private final RecyclerView recyclerView;
private final GestureDetectorCompat mGestureDetector;
public OnRecyclerItemClickListener(RecyclerView recyclerView){
this.recyclerView=recyclerView;
mGestureDetector = new GestureDetectorCompat(recyclerView.getContext(),new ItemTouchHelperGestureListener());
}
@Override
public void onTouchEvent(RecyclerView rv, MotionEvent e) {
mGestureDetector.onTouchEvent(e);
}
@Override
public void onRequestDisallowInterceptTouchEvent(boolean disallowIntercept) {
}
@Override
public boolean onInterceptTouchEvent(RecyclerView rv, MotionEvent e) {
mGestureDetector.onTouchEvent(e);
return false;
}
public abstract void onItemClick(RecyclerView.ViewHolder viewHolder);
public abstract void onItemLOngClick(RecyclerView.ViewHolder viewHolder);
private class ItemTouchHelperGestureListener extends GestureDetector.SimpleOnGestureListener {
public boolean onSingleTapUp(MotionEvent event){
View child = recyclerView.findChildViewUnder(event.getX(), event.getY());
if (child != null){
RecyclerView.ViewHolder viewHolder = recyclerView.getChildViewHolder(child);
onItemClick(viewHolder);
}
return true;
}
public void onLongPress(MotionEvent event){
View child = recyclerView.findChildViewUnder(event.getX(), event.getY());
if (child != null){
RecyclerView.ViewHolder viewHolder = recyclerView.getChildViewHolder(child);
onItemLOngClick(viewHolder);
}
}
}
} | [
"nirmals502@gmail.com"
] | nirmals502@gmail.com |
edbeb8f5c5b7f41a574f2680a35ee322c0923cc0 | 362561242927a7cc170aacba24e335e6f009b34a | /lg-rpc的副本/lg-rpc-provider/src/main/java/com/lagou/rpc/provider/config/RegistrationConfig.java | 6afa8f304d2d762fabbf614d4f18ced38fbeb5ea | [] | no_license | yutingcaicyt/zookeeper | 311a585e97ee11716be263e6a4ef76ba680cfbe8 | ce0f6772b8d8b4afc9eb641fbe4d7209aeb99db4 | refs/heads/master | 2023-04-03T02:53:44.121654 | 2021-04-17T13:13:41 | 2021-04-17T13:13:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 466 | java | package com.lagou.rpc.provider.config;
import com.lagou.rpc.registrationcenter.service.ProviderService;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class RegistrationConfig {
@Bean
public ProviderService getProviderService(){
ProviderService providerService = new ProviderService();
providerService.init();
return providerService;
}
}
| [
"199522cytCYT"
] | 199522cytCYT |
951d2ac6590de6a3573acd3439432a969d109a0b | 58d97d43a8979f4cdf4cfb164f9d6fd7384d9aa6 | /RailwayTicketPlatform/src/cn/lanqiao/ui/AlterPTelJDialog.java | 363a6314c10f2dd18df12944343f6957d93e05b0 | [] | no_license | W1106H/RailwayTicketPlatform | 1fbf158d9c58e613dd94385fd0a1a405e91dc0c4 | 13d000c4b003bf497952fd0e3862879bd76fdea3 | refs/heads/master | 2023-04-03T14:33:21.947782 | 2021-04-21T12:30:31 | 2021-04-21T12:30:31 | 356,285,107 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 4,790 | java | /*
* Created by JFormDesigner on Thu Apr 15 17:19:52 CST 2021
*/
package cn.lanqiao.ui;
import cn.lanqiao.entity.Peoples.Passenger;
import cn.lanqiao.entity.Peoples.User;
import cn.lanqiao.service.PassengerService;
import cn.lanqiao.service.impl.PassengerServiceImpl;
import cn.lanqiao.util.TelMathches;
import java.awt.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.table.DefaultTableModel;
/**
* @author Brainrain
*/
public class AlterPTelJDialog extends JDialog {
private String[] header ={"编号","姓名","性别","生日","邮箱","专业","密码","账号"};
private JTable jTable=null;
private User currentUser = null;
public AlterPTelJDialog(JTable jTable,User user) {
this.jTable = jTable;
this.currentUser = user;
initComponents();
}
private void CancleButtonActionPerformed(ActionEvent e) {
// 取消
this.dispose();
}
private void SureButtonActionPerformed(ActionEvent e) {
//确认修改
if (currentUser == null) {
return;
}
String PTel = PTelTextField.getText().trim();
if (this.jTable == null) {
return;
}
int rowindex = jTable.getSelectedRow();
String passengerId=jTable.getValueAt(rowindex,1).toString().trim();
PassengerService passengerService = new PassengerServiceImpl();
boolean pTelMatches = TelMathches.judgeLegal(PTel);
if (pTelMatches) {
int result = passengerService.updatePTel(PTel, passengerId);
if (result > 0) {
//刷新表格数据;
jTable.setModel(new DefaultTableModel(
passengerService.getAllPassenger(currentUser),
header
));
//对窗体相关组件数据初使化;
JOptionPane.showMessageDialog(null, "修改成功!");
PTelTextField.setText(null);
this.dispose();
} else {
JOptionPane.showMessageDialog(null, "该电话号码已被其他乘客绑定!");
}
} else {
JOptionPane.showMessageDialog(null, "电话号码输入格式错误!");
}
}
private void initComponents() {
// JFormDesigner - Component initialization - DO NOT MODIFY //GEN-BEGIN:initComponents
SureButton = new JButton();
CancleButton = new JButton();
label1 = new JLabel();
PTelTextField = new JTextField();
panel1 = new JPanel();
//======== this ========
setBackground(new Color(102, 102, 255));
setResizable(false);
setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
var contentPane = getContentPane();
contentPane.setLayout(null);
//---- SureButton ----
SureButton.setText("\u786e\u8ba4\u4fee\u6539");
SureButton.setBackground(Color.white);
SureButton.setFont(SureButton.getFont().deriveFont(SureButton.getFont().getStyle() | Font.BOLD));
SureButton.addActionListener(e -> SureButtonActionPerformed(e));
contentPane.add(SureButton);
SureButton.setBounds(140, 95, 95, 30);
//---- CancleButton ----
CancleButton.setText("\u53d6\u6d88");
CancleButton.setBackground(Color.white);
CancleButton.setFont(CancleButton.getFont().deriveFont(CancleButton.getFont().getStyle() | Font.BOLD));
CancleButton.addActionListener(e -> CancleButtonActionPerformed(e));
contentPane.add(CancleButton);
CancleButton.setBounds(20, 95, 80, 30);
//---- label1 ----
label1.setText("\u8bf7\u8f93\u5165\u65b0\u7684\u7535\u8bdd\u53f7\u7801");
label1.setForeground(Color.white);
contentPane.add(label1);
label1.setBounds(75, 40, 125, label1.getPreferredSize().height);
contentPane.add(PTelTextField);
PTelTextField.setBounds(30, 5, 190, PTelTextField.getPreferredSize().height);
//======== panel1 ========
{
panel1.setBackground(new Color(102, 102, 255));
panel1.setLayout(null);
}
contentPane.add(panel1);
panel1.setBounds(0, 0, 260, 140);
contentPane.setPreferredSize(new Dimension(260, 170));
setSize(260, 170);
setLocationRelativeTo(getOwner());
// JFormDesigner - End of component initialization //GEN-END:initComponents
}
// JFormDesigner - Variables declaration - DO NOT MODIFY //GEN-BEGIN:variables
private JButton SureButton;
private JButton CancleButton;
private JLabel label1;
private JTextField PTelTextField;
private JPanel panel1;
// JFormDesigner - End of variables declaration //GEN-END:variables
}
| [
"497171989@qq.com"
] | 497171989@qq.com |
9526a60768f57be2f661aedefcedad7115dd931a | ed5345426f096e102666c07a2a1d9182c7694cce | /app/src/main/java/io/github/xudaojie/developersettings/Constants.java | e5e70fb441ee55c77d20a98511364b28361ffa09 | [
"Apache-2.0"
] | permissive | csotf/DeveloperSettings | 4989d456b89dff5ea42a93f65b5330cc90889081 | 237babce42c4e0c8bc9d8c6246c5e07078a9dffb | refs/heads/master | 2021-06-12T03:31:54.310346 | 2017-03-08T06:04:41 | 2017-03-08T06:04:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 815 | java | package io.github.xudaojie.developersettings;
/**
* Created by xdj on 2017/3/7.
*/
public class Constants {
// android.provider.Settings.System.SHOW_TOUCHES
public static final String SETTINGS_SYSTEM_SHOW_TOUCHES = "show_touches";
// android.provider.Settings.System.POINTER_LOCATION
public static final String SETTINGS_SYSTEM_POINTER_LOCATION = "pointer_location";
// android.view.View.DEBUG_LAYOUT_PROPERTY
public static final String VIEW_DEBUG_LAYOUT_PROPERTY = "debug.layout";
// android.view.ThreadedRenderer.DEBUG_OVERDRAW_PROPERTY
public static final String THREADED_RENDERER_DEBUG_OVERDRAW_PROPERTY = "debug.hwui.overdraw";
// android.view.ThreadedRenderer.PROFILE_PROPERTY
public static final String THREADED_RENDERER_PROFILE_PROPERTY = "debug.hwui.profile";
}
| [
"xudaojieg@gmail.com"
] | xudaojieg@gmail.com |
29ac2c799b6a718536bbea64cd4fe0d88883eb0a | d6a17c85c1311857aa23d9d902463b56a747973b | /or-tools/src/main/java/com/google/ortools/constraintsolver/ConstraintSolverParameters.java | 9b90c2374cf7c05907b40beb5dcd6db805709cc4 | [
"MIT"
] | permissive | ejif/or-tools-java | 89a5ac34a4a4043459e776dfeb9901550f82c776 | 2ac926f0b70a75dd3c59fb5351188c579841663b | refs/heads/master | 2021-01-05T00:56:58.997347 | 2020-02-17T20:00:16 | 2020-02-17T21:08:01 | 240,820,551 | 2 | 0 | null | null | null | null | UTF-8 | Java | false | true | 83,584 | java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: ortools/constraint_solver/solver_parameters.proto
package com.google.ortools.constraintsolver;
/**
* <pre>
* Solver parameters.
* </pre>
*
* Protobuf type {@code operations_research.ConstraintSolverParameters}
*/
public final class ConstraintSolverParameters extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:operations_research.ConstraintSolverParameters)
ConstraintSolverParametersOrBuilder {
private static final long serialVersionUID = 0L;
// Use ConstraintSolverParameters.newBuilder() to construct.
private ConstraintSolverParameters(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ConstraintSolverParameters() {
compressTrail_ = 0;
profileFile_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ConstraintSolverParameters();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ConstraintSolverParameters(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int rawValue = input.readEnum();
compressTrail_ = rawValue;
break;
}
case 16: {
trailBlockSize_ = input.readInt32();
break;
}
case 24: {
arraySplitSize_ = input.readInt32();
break;
}
case 32: {
storeNames_ = input.readBool();
break;
}
case 40: {
nameCastVariables_ = input.readBool();
break;
}
case 48: {
nameAllVariables_ = input.readBool();
break;
}
case 56: {
profilePropagation_ = input.readBool();
break;
}
case 66: {
java.lang.String s = input.readStringRequireUtf8();
profileFile_ = s;
break;
}
case 72: {
tracePropagation_ = input.readBool();
break;
}
case 80: {
traceSearch_ = input.readBool();
break;
}
case 88: {
printModel_ = input.readBool();
break;
}
case 96: {
printModelStats_ = input.readBool();
break;
}
case 104: {
printAddedConstraints_ = input.readBool();
break;
}
case 120: {
disableSolve_ = input.readBool();
break;
}
case 128: {
profileLocalSearch_ = input.readBool();
break;
}
case 136: {
printLocalSearchProfile_ = input.readBool();
break;
}
case 808: {
useSmallTable_ = input.readBool();
break;
}
case 840: {
useCumulativeEdgeFinder_ = input.readBool();
break;
}
case 848: {
useCumulativeTimeTable_ = input.readBool();
break;
}
case 856: {
useSequenceHighDemandTasks_ = input.readBool();
break;
}
case 864: {
useAllPossibleDisjunctions_ = input.readBool();
break;
}
case 872: {
maxEdgeFinderSize_ = input.readInt32();
break;
}
case 880: {
diffnUseCumulative_ = input.readBool();
break;
}
case 888: {
useElementRmq_ = input.readBool();
break;
}
case 896: {
useCumulativeTimeTableSync_ = input.readBool();
break;
}
case 904: {
skipLocallyOptimalPaths_ = input.readBool();
break;
}
case 912: {
checkSolutionPeriod_ = input.readInt32();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ortools.constraintsolver.SolverParameters.internal_static_operations_research_ConstraintSolverParameters_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ortools.constraintsolver.SolverParameters.internal_static_operations_research_ConstraintSolverParameters_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ortools.constraintsolver.ConstraintSolverParameters.class, com.google.ortools.constraintsolver.ConstraintSolverParameters.Builder.class);
}
/**
* <pre>
* Internal parameters of the solver.
* </pre>
*
* Protobuf enum {@code operations_research.ConstraintSolverParameters.TrailCompression}
*/
public enum TrailCompression
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>NO_COMPRESSION = 0;</code>
*/
NO_COMPRESSION(0),
/**
* <code>COMPRESS_WITH_ZLIB = 1;</code>
*/
COMPRESS_WITH_ZLIB(1),
UNRECOGNIZED(-1),
;
/**
* <code>NO_COMPRESSION = 0;</code>
*/
public static final int NO_COMPRESSION_VALUE = 0;
/**
* <code>COMPRESS_WITH_ZLIB = 1;</code>
*/
public static final int COMPRESS_WITH_ZLIB_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static TrailCompression valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static TrailCompression forNumber(int value) {
switch (value) {
case 0: return NO_COMPRESSION;
case 1: return COMPRESS_WITH_ZLIB;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<TrailCompression>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
TrailCompression> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<TrailCompression>() {
public TrailCompression findValueByNumber(int number) {
return TrailCompression.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ortools.constraintsolver.ConstraintSolverParameters.getDescriptor().getEnumTypes().get(0);
}
private static final TrailCompression[] VALUES = values();
public static TrailCompression valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private TrailCompression(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:operations_research.ConstraintSolverParameters.TrailCompression)
}
public static final int COMPRESS_TRAIL_FIELD_NUMBER = 1;
private int compressTrail_;
/**
* <pre>
* This parameter indicates if the solver should compress the trail
* during the search. No compression means that the solver will be faster,
* but will use more memory.
* </pre>
*
* <code>.operations_research.ConstraintSolverParameters.TrailCompression compress_trail = 1;</code>
* @return The enum numeric value on the wire for compressTrail.
*/
public int getCompressTrailValue() {
return compressTrail_;
}
/**
* <pre>
* This parameter indicates if the solver should compress the trail
* during the search. No compression means that the solver will be faster,
* but will use more memory.
* </pre>
*
* <code>.operations_research.ConstraintSolverParameters.TrailCompression compress_trail = 1;</code>
* @return The compressTrail.
*/
public com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression getCompressTrail() {
@SuppressWarnings("deprecation")
com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression result = com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression.valueOf(compressTrail_);
return result == null ? com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression.UNRECOGNIZED : result;
}
public static final int TRAIL_BLOCK_SIZE_FIELD_NUMBER = 2;
private int trailBlockSize_;
/**
* <pre>
* This parameter indicates the default size of a block of the trail.
* Compression applies at the block level.
* </pre>
*
* <code>int32 trail_block_size = 2;</code>
* @return The trailBlockSize.
*/
public int getTrailBlockSize() {
return trailBlockSize_;
}
public static final int ARRAY_SPLIT_SIZE_FIELD_NUMBER = 3;
private int arraySplitSize_;
/**
* <pre>
* When a sum/min/max operation is applied on a large array, this
* array is recursively split into blocks of size 'array_split_size'.
* </pre>
*
* <code>int32 array_split_size = 3;</code>
* @return The arraySplitSize.
*/
public int getArraySplitSize() {
return arraySplitSize_;
}
public static final int STORE_NAMES_FIELD_NUMBER = 4;
private boolean storeNames_;
/**
* <pre>
* This parameters indicates if the solver should store the names of
* the objets it manages.
* </pre>
*
* <code>bool store_names = 4;</code>
* @return The storeNames.
*/
public boolean getStoreNames() {
return storeNames_;
}
public static final int NAME_CAST_VARIABLES_FIELD_NUMBER = 5;
private boolean nameCastVariables_;
/**
* <pre>
* Create names for cast variables.
* </pre>
*
* <code>bool name_cast_variables = 5;</code>
* @return The nameCastVariables.
*/
public boolean getNameCastVariables() {
return nameCastVariables_;
}
public static final int NAME_ALL_VARIABLES_FIELD_NUMBER = 6;
private boolean nameAllVariables_;
/**
* <pre>
* Should anonymous variables be given a name.
* </pre>
*
* <code>bool name_all_variables = 6;</code>
* @return The nameAllVariables.
*/
public boolean getNameAllVariables() {
return nameAllVariables_;
}
public static final int PROFILE_PROPAGATION_FIELD_NUMBER = 7;
private boolean profilePropagation_;
/**
* <pre>
* Activate propagation profiling.
* </pre>
*
* <code>bool profile_propagation = 7;</code>
* @return The profilePropagation.
*/
public boolean getProfilePropagation() {
return profilePropagation_;
}
public static final int PROFILE_FILE_FIELD_NUMBER = 8;
private volatile java.lang.Object profileFile_;
/**
* <pre>
* Export propagation profiling data to file.
* </pre>
*
* <code>string profile_file = 8;</code>
* @return The profileFile.
*/
public java.lang.String getProfileFile() {
java.lang.Object ref = profileFile_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
profileFile_ = s;
return s;
}
}
/**
* <pre>
* Export propagation profiling data to file.
* </pre>
*
* <code>string profile_file = 8;</code>
* @return The bytes for profileFile.
*/
public com.google.protobuf.ByteString
getProfileFileBytes() {
java.lang.Object ref = profileFile_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
profileFile_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROFILE_LOCAL_SEARCH_FIELD_NUMBER = 16;
private boolean profileLocalSearch_;
/**
* <pre>
* Activate local search profiling.
* </pre>
*
* <code>bool profile_local_search = 16;</code>
* @return The profileLocalSearch.
*/
public boolean getProfileLocalSearch() {
return profileLocalSearch_;
}
public static final int PRINT_LOCAL_SEARCH_PROFILE_FIELD_NUMBER = 17;
private boolean printLocalSearchProfile_;
/**
* <pre>
* Print local search profiling data after solving.
* </pre>
*
* <code>bool print_local_search_profile = 17;</code>
* @return The printLocalSearchProfile.
*/
public boolean getPrintLocalSearchProfile() {
return printLocalSearchProfile_;
}
public static final int TRACE_PROPAGATION_FIELD_NUMBER = 9;
private boolean tracePropagation_;
/**
* <pre>
* Activate propagate tracing.
* </pre>
*
* <code>bool trace_propagation = 9;</code>
* @return The tracePropagation.
*/
public boolean getTracePropagation() {
return tracePropagation_;
}
public static final int TRACE_SEARCH_FIELD_NUMBER = 10;
private boolean traceSearch_;
/**
* <pre>
* Trace search.
* </pre>
*
* <code>bool trace_search = 10;</code>
* @return The traceSearch.
*/
public boolean getTraceSearch() {
return traceSearch_;
}
public static final int PRINT_MODEL_FIELD_NUMBER = 11;
private boolean printModel_;
/**
* <pre>
* Print the model before solving.
* </pre>
*
* <code>bool print_model = 11;</code>
* @return The printModel.
*/
public boolean getPrintModel() {
return printModel_;
}
public static final int PRINT_MODEL_STATS_FIELD_NUMBER = 12;
private boolean printModelStats_;
/**
* <pre>
* Print model statistics before solving.
* </pre>
*
* <code>bool print_model_stats = 12;</code>
* @return The printModelStats.
*/
public boolean getPrintModelStats() {
return printModelStats_;
}
public static final int PRINT_ADDED_CONSTRAINTS_FIELD_NUMBER = 13;
private boolean printAddedConstraints_;
/**
* <pre>
* Print added constraints.
* </pre>
*
* <code>bool print_added_constraints = 13;</code>
* @return The printAddedConstraints.
*/
public boolean getPrintAddedConstraints() {
return printAddedConstraints_;
}
public static final int DISABLE_SOLVE_FIELD_NUMBER = 15;
private boolean disableSolve_;
/**
* <code>bool disable_solve = 15;</code>
* @return The disableSolve.
*/
public boolean getDisableSolve() {
return disableSolve_;
}
public static final int USE_SMALL_TABLE_FIELD_NUMBER = 101;
private boolean useSmallTable_;
/**
* <pre>
* Control the implementation of the table constraint.
* </pre>
*
* <code>bool use_small_table = 101;</code>
* @return The useSmallTable.
*/
public boolean getUseSmallTable() {
return useSmallTable_;
}
public static final int USE_CUMULATIVE_EDGE_FINDER_FIELD_NUMBER = 105;
private boolean useCumulativeEdgeFinder_;
/**
* <pre>
* Control the propagation of the cumulative constraint.
* </pre>
*
* <code>bool use_cumulative_edge_finder = 105;</code>
* @return The useCumulativeEdgeFinder.
*/
public boolean getUseCumulativeEdgeFinder() {
return useCumulativeEdgeFinder_;
}
public static final int USE_CUMULATIVE_TIME_TABLE_FIELD_NUMBER = 106;
private boolean useCumulativeTimeTable_;
/**
* <code>bool use_cumulative_time_table = 106;</code>
* @return The useCumulativeTimeTable.
*/
public boolean getUseCumulativeTimeTable() {
return useCumulativeTimeTable_;
}
public static final int USE_CUMULATIVE_TIME_TABLE_SYNC_FIELD_NUMBER = 112;
private boolean useCumulativeTimeTableSync_;
/**
* <code>bool use_cumulative_time_table_sync = 112;</code>
* @return The useCumulativeTimeTableSync.
*/
public boolean getUseCumulativeTimeTableSync() {
return useCumulativeTimeTableSync_;
}
public static final int USE_SEQUENCE_HIGH_DEMAND_TASKS_FIELD_NUMBER = 107;
private boolean useSequenceHighDemandTasks_;
/**
* <code>bool use_sequence_high_demand_tasks = 107;</code>
* @return The useSequenceHighDemandTasks.
*/
public boolean getUseSequenceHighDemandTasks() {
return useSequenceHighDemandTasks_;
}
public static final int USE_ALL_POSSIBLE_DISJUNCTIONS_FIELD_NUMBER = 108;
private boolean useAllPossibleDisjunctions_;
/**
* <code>bool use_all_possible_disjunctions = 108;</code>
* @return The useAllPossibleDisjunctions.
*/
public boolean getUseAllPossibleDisjunctions() {
return useAllPossibleDisjunctions_;
}
public static final int MAX_EDGE_FINDER_SIZE_FIELD_NUMBER = 109;
private int maxEdgeFinderSize_;
/**
* <code>int32 max_edge_finder_size = 109;</code>
* @return The maxEdgeFinderSize.
*/
public int getMaxEdgeFinderSize() {
return maxEdgeFinderSize_;
}
public static final int DIFFN_USE_CUMULATIVE_FIELD_NUMBER = 110;
private boolean diffnUseCumulative_;
/**
* <pre>
* Control the propagation of the diffn constraint.
* </pre>
*
* <code>bool diffn_use_cumulative = 110;</code>
* @return The diffnUseCumulative.
*/
public boolean getDiffnUseCumulative() {
return diffnUseCumulative_;
}
public static final int USE_ELEMENT_RMQ_FIELD_NUMBER = 111;
private boolean useElementRmq_;
/**
* <pre>
* Control the implementation of the element constraint.
* </pre>
*
* <code>bool use_element_rmq = 111;</code>
* @return The useElementRmq.
*/
public boolean getUseElementRmq() {
return useElementRmq_;
}
public static final int SKIP_LOCALLY_OPTIMAL_PATHS_FIELD_NUMBER = 113;
private boolean skipLocallyOptimalPaths_;
/**
* <pre>
* Skip locally optimal pairs of paths in PathOperators. Setting this
* parameter to true might skip valid neighbors if there are constraints
* linking paths together (such as precedences). In any other case this
* should only speed up the search without omitting any neighbors.
* </pre>
*
* <code>bool skip_locally_optimal_paths = 113;</code>
* @return The skipLocallyOptimalPaths.
*/
public boolean getSkipLocallyOptimalPaths() {
return skipLocallyOptimalPaths_;
}
public static final int CHECK_SOLUTION_PERIOD_FIELD_NUMBER = 114;
private int checkSolutionPeriod_;
/**
* <pre>
* Control the behavior of local search.
* </pre>
*
* <code>int32 check_solution_period = 114;</code>
* @return The checkSolutionPeriod.
*/
public int getCheckSolutionPeriod() {
return checkSolutionPeriod_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (compressTrail_ != com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression.NO_COMPRESSION.getNumber()) {
output.writeEnum(1, compressTrail_);
}
if (trailBlockSize_ != 0) {
output.writeInt32(2, trailBlockSize_);
}
if (arraySplitSize_ != 0) {
output.writeInt32(3, arraySplitSize_);
}
if (storeNames_ != false) {
output.writeBool(4, storeNames_);
}
if (nameCastVariables_ != false) {
output.writeBool(5, nameCastVariables_);
}
if (nameAllVariables_ != false) {
output.writeBool(6, nameAllVariables_);
}
if (profilePropagation_ != false) {
output.writeBool(7, profilePropagation_);
}
if (!getProfileFileBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 8, profileFile_);
}
if (tracePropagation_ != false) {
output.writeBool(9, tracePropagation_);
}
if (traceSearch_ != false) {
output.writeBool(10, traceSearch_);
}
if (printModel_ != false) {
output.writeBool(11, printModel_);
}
if (printModelStats_ != false) {
output.writeBool(12, printModelStats_);
}
if (printAddedConstraints_ != false) {
output.writeBool(13, printAddedConstraints_);
}
if (disableSolve_ != false) {
output.writeBool(15, disableSolve_);
}
if (profileLocalSearch_ != false) {
output.writeBool(16, profileLocalSearch_);
}
if (printLocalSearchProfile_ != false) {
output.writeBool(17, printLocalSearchProfile_);
}
if (useSmallTable_ != false) {
output.writeBool(101, useSmallTable_);
}
if (useCumulativeEdgeFinder_ != false) {
output.writeBool(105, useCumulativeEdgeFinder_);
}
if (useCumulativeTimeTable_ != false) {
output.writeBool(106, useCumulativeTimeTable_);
}
if (useSequenceHighDemandTasks_ != false) {
output.writeBool(107, useSequenceHighDemandTasks_);
}
if (useAllPossibleDisjunctions_ != false) {
output.writeBool(108, useAllPossibleDisjunctions_);
}
if (maxEdgeFinderSize_ != 0) {
output.writeInt32(109, maxEdgeFinderSize_);
}
if (diffnUseCumulative_ != false) {
output.writeBool(110, diffnUseCumulative_);
}
if (useElementRmq_ != false) {
output.writeBool(111, useElementRmq_);
}
if (useCumulativeTimeTableSync_ != false) {
output.writeBool(112, useCumulativeTimeTableSync_);
}
if (skipLocallyOptimalPaths_ != false) {
output.writeBool(113, skipLocallyOptimalPaths_);
}
if (checkSolutionPeriod_ != 0) {
output.writeInt32(114, checkSolutionPeriod_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (compressTrail_ != com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression.NO_COMPRESSION.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, compressTrail_);
}
if (trailBlockSize_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(2, trailBlockSize_);
}
if (arraySplitSize_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, arraySplitSize_);
}
if (storeNames_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(4, storeNames_);
}
if (nameCastVariables_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, nameCastVariables_);
}
if (nameAllVariables_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(6, nameAllVariables_);
}
if (profilePropagation_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(7, profilePropagation_);
}
if (!getProfileFileBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, profileFile_);
}
if (tracePropagation_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(9, tracePropagation_);
}
if (traceSearch_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(10, traceSearch_);
}
if (printModel_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(11, printModel_);
}
if (printModelStats_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(12, printModelStats_);
}
if (printAddedConstraints_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(13, printAddedConstraints_);
}
if (disableSolve_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(15, disableSolve_);
}
if (profileLocalSearch_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(16, profileLocalSearch_);
}
if (printLocalSearchProfile_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(17, printLocalSearchProfile_);
}
if (useSmallTable_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(101, useSmallTable_);
}
if (useCumulativeEdgeFinder_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(105, useCumulativeEdgeFinder_);
}
if (useCumulativeTimeTable_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(106, useCumulativeTimeTable_);
}
if (useSequenceHighDemandTasks_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(107, useSequenceHighDemandTasks_);
}
if (useAllPossibleDisjunctions_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(108, useAllPossibleDisjunctions_);
}
if (maxEdgeFinderSize_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(109, maxEdgeFinderSize_);
}
if (diffnUseCumulative_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(110, diffnUseCumulative_);
}
if (useElementRmq_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(111, useElementRmq_);
}
if (useCumulativeTimeTableSync_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(112, useCumulativeTimeTableSync_);
}
if (skipLocallyOptimalPaths_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(113, skipLocallyOptimalPaths_);
}
if (checkSolutionPeriod_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(114, checkSolutionPeriod_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ortools.constraintsolver.ConstraintSolverParameters)) {
return super.equals(obj);
}
com.google.ortools.constraintsolver.ConstraintSolverParameters other = (com.google.ortools.constraintsolver.ConstraintSolverParameters) obj;
if (compressTrail_ != other.compressTrail_) return false;
if (getTrailBlockSize()
!= other.getTrailBlockSize()) return false;
if (getArraySplitSize()
!= other.getArraySplitSize()) return false;
if (getStoreNames()
!= other.getStoreNames()) return false;
if (getNameCastVariables()
!= other.getNameCastVariables()) return false;
if (getNameAllVariables()
!= other.getNameAllVariables()) return false;
if (getProfilePropagation()
!= other.getProfilePropagation()) return false;
if (!getProfileFile()
.equals(other.getProfileFile())) return false;
if (getProfileLocalSearch()
!= other.getProfileLocalSearch()) return false;
if (getPrintLocalSearchProfile()
!= other.getPrintLocalSearchProfile()) return false;
if (getTracePropagation()
!= other.getTracePropagation()) return false;
if (getTraceSearch()
!= other.getTraceSearch()) return false;
if (getPrintModel()
!= other.getPrintModel()) return false;
if (getPrintModelStats()
!= other.getPrintModelStats()) return false;
if (getPrintAddedConstraints()
!= other.getPrintAddedConstraints()) return false;
if (getDisableSolve()
!= other.getDisableSolve()) return false;
if (getUseSmallTable()
!= other.getUseSmallTable()) return false;
if (getUseCumulativeEdgeFinder()
!= other.getUseCumulativeEdgeFinder()) return false;
if (getUseCumulativeTimeTable()
!= other.getUseCumulativeTimeTable()) return false;
if (getUseCumulativeTimeTableSync()
!= other.getUseCumulativeTimeTableSync()) return false;
if (getUseSequenceHighDemandTasks()
!= other.getUseSequenceHighDemandTasks()) return false;
if (getUseAllPossibleDisjunctions()
!= other.getUseAllPossibleDisjunctions()) return false;
if (getMaxEdgeFinderSize()
!= other.getMaxEdgeFinderSize()) return false;
if (getDiffnUseCumulative()
!= other.getDiffnUseCumulative()) return false;
if (getUseElementRmq()
!= other.getUseElementRmq()) return false;
if (getSkipLocallyOptimalPaths()
!= other.getSkipLocallyOptimalPaths()) return false;
if (getCheckSolutionPeriod()
!= other.getCheckSolutionPeriod()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + COMPRESS_TRAIL_FIELD_NUMBER;
hash = (53 * hash) + compressTrail_;
hash = (37 * hash) + TRAIL_BLOCK_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getTrailBlockSize();
hash = (37 * hash) + ARRAY_SPLIT_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getArraySplitSize();
hash = (37 * hash) + STORE_NAMES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getStoreNames());
hash = (37 * hash) + NAME_CAST_VARIABLES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getNameCastVariables());
hash = (37 * hash) + NAME_ALL_VARIABLES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getNameAllVariables());
hash = (37 * hash) + PROFILE_PROPAGATION_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getProfilePropagation());
hash = (37 * hash) + PROFILE_FILE_FIELD_NUMBER;
hash = (53 * hash) + getProfileFile().hashCode();
hash = (37 * hash) + PROFILE_LOCAL_SEARCH_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getProfileLocalSearch());
hash = (37 * hash) + PRINT_LOCAL_SEARCH_PROFILE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getPrintLocalSearchProfile());
hash = (37 * hash) + TRACE_PROPAGATION_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getTracePropagation());
hash = (37 * hash) + TRACE_SEARCH_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getTraceSearch());
hash = (37 * hash) + PRINT_MODEL_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getPrintModel());
hash = (37 * hash) + PRINT_MODEL_STATS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getPrintModelStats());
hash = (37 * hash) + PRINT_ADDED_CONSTRAINTS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getPrintAddedConstraints());
hash = (37 * hash) + DISABLE_SOLVE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getDisableSolve());
hash = (37 * hash) + USE_SMALL_TABLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getUseSmallTable());
hash = (37 * hash) + USE_CUMULATIVE_EDGE_FINDER_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getUseCumulativeEdgeFinder());
hash = (37 * hash) + USE_CUMULATIVE_TIME_TABLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getUseCumulativeTimeTable());
hash = (37 * hash) + USE_CUMULATIVE_TIME_TABLE_SYNC_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getUseCumulativeTimeTableSync());
hash = (37 * hash) + USE_SEQUENCE_HIGH_DEMAND_TASKS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getUseSequenceHighDemandTasks());
hash = (37 * hash) + USE_ALL_POSSIBLE_DISJUNCTIONS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getUseAllPossibleDisjunctions());
hash = (37 * hash) + MAX_EDGE_FINDER_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getMaxEdgeFinderSize();
hash = (37 * hash) + DIFFN_USE_CUMULATIVE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getDiffnUseCumulative());
hash = (37 * hash) + USE_ELEMENT_RMQ_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getUseElementRmq());
hash = (37 * hash) + SKIP_LOCALLY_OPTIMAL_PATHS_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getSkipLocallyOptimalPaths());
hash = (37 * hash) + CHECK_SOLUTION_PERIOD_FIELD_NUMBER;
hash = (53 * hash) + getCheckSolutionPeriod();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ortools.constraintsolver.ConstraintSolverParameters prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Solver parameters.
* </pre>
*
* Protobuf type {@code operations_research.ConstraintSolverParameters}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:operations_research.ConstraintSolverParameters)
com.google.ortools.constraintsolver.ConstraintSolverParametersOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ortools.constraintsolver.SolverParameters.internal_static_operations_research_ConstraintSolverParameters_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ortools.constraintsolver.SolverParameters.internal_static_operations_research_ConstraintSolverParameters_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ortools.constraintsolver.ConstraintSolverParameters.class, com.google.ortools.constraintsolver.ConstraintSolverParameters.Builder.class);
}
// Construct using com.google.ortools.constraintsolver.ConstraintSolverParameters.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
compressTrail_ = 0;
trailBlockSize_ = 0;
arraySplitSize_ = 0;
storeNames_ = false;
nameCastVariables_ = false;
nameAllVariables_ = false;
profilePropagation_ = false;
profileFile_ = "";
profileLocalSearch_ = false;
printLocalSearchProfile_ = false;
tracePropagation_ = false;
traceSearch_ = false;
printModel_ = false;
printModelStats_ = false;
printAddedConstraints_ = false;
disableSolve_ = false;
useSmallTable_ = false;
useCumulativeEdgeFinder_ = false;
useCumulativeTimeTable_ = false;
useCumulativeTimeTableSync_ = false;
useSequenceHighDemandTasks_ = false;
useAllPossibleDisjunctions_ = false;
maxEdgeFinderSize_ = 0;
diffnUseCumulative_ = false;
useElementRmq_ = false;
skipLocallyOptimalPaths_ = false;
checkSolutionPeriod_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ortools.constraintsolver.SolverParameters.internal_static_operations_research_ConstraintSolverParameters_descriptor;
}
@java.lang.Override
public com.google.ortools.constraintsolver.ConstraintSolverParameters getDefaultInstanceForType() {
return com.google.ortools.constraintsolver.ConstraintSolverParameters.getDefaultInstance();
}
@java.lang.Override
public com.google.ortools.constraintsolver.ConstraintSolverParameters build() {
com.google.ortools.constraintsolver.ConstraintSolverParameters result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ortools.constraintsolver.ConstraintSolverParameters buildPartial() {
com.google.ortools.constraintsolver.ConstraintSolverParameters result = new com.google.ortools.constraintsolver.ConstraintSolverParameters(this);
result.compressTrail_ = compressTrail_;
result.trailBlockSize_ = trailBlockSize_;
result.arraySplitSize_ = arraySplitSize_;
result.storeNames_ = storeNames_;
result.nameCastVariables_ = nameCastVariables_;
result.nameAllVariables_ = nameAllVariables_;
result.profilePropagation_ = profilePropagation_;
result.profileFile_ = profileFile_;
result.profileLocalSearch_ = profileLocalSearch_;
result.printLocalSearchProfile_ = printLocalSearchProfile_;
result.tracePropagation_ = tracePropagation_;
result.traceSearch_ = traceSearch_;
result.printModel_ = printModel_;
result.printModelStats_ = printModelStats_;
result.printAddedConstraints_ = printAddedConstraints_;
result.disableSolve_ = disableSolve_;
result.useSmallTable_ = useSmallTable_;
result.useCumulativeEdgeFinder_ = useCumulativeEdgeFinder_;
result.useCumulativeTimeTable_ = useCumulativeTimeTable_;
result.useCumulativeTimeTableSync_ = useCumulativeTimeTableSync_;
result.useSequenceHighDemandTasks_ = useSequenceHighDemandTasks_;
result.useAllPossibleDisjunctions_ = useAllPossibleDisjunctions_;
result.maxEdgeFinderSize_ = maxEdgeFinderSize_;
result.diffnUseCumulative_ = diffnUseCumulative_;
result.useElementRmq_ = useElementRmq_;
result.skipLocallyOptimalPaths_ = skipLocallyOptimalPaths_;
result.checkSolutionPeriod_ = checkSolutionPeriod_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ortools.constraintsolver.ConstraintSolverParameters) {
return mergeFrom((com.google.ortools.constraintsolver.ConstraintSolverParameters)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ortools.constraintsolver.ConstraintSolverParameters other) {
if (other == com.google.ortools.constraintsolver.ConstraintSolverParameters.getDefaultInstance()) return this;
if (other.compressTrail_ != 0) {
setCompressTrailValue(other.getCompressTrailValue());
}
if (other.getTrailBlockSize() != 0) {
setTrailBlockSize(other.getTrailBlockSize());
}
if (other.getArraySplitSize() != 0) {
setArraySplitSize(other.getArraySplitSize());
}
if (other.getStoreNames() != false) {
setStoreNames(other.getStoreNames());
}
if (other.getNameCastVariables() != false) {
setNameCastVariables(other.getNameCastVariables());
}
if (other.getNameAllVariables() != false) {
setNameAllVariables(other.getNameAllVariables());
}
if (other.getProfilePropagation() != false) {
setProfilePropagation(other.getProfilePropagation());
}
if (!other.getProfileFile().isEmpty()) {
profileFile_ = other.profileFile_;
onChanged();
}
if (other.getProfileLocalSearch() != false) {
setProfileLocalSearch(other.getProfileLocalSearch());
}
if (other.getPrintLocalSearchProfile() != false) {
setPrintLocalSearchProfile(other.getPrintLocalSearchProfile());
}
if (other.getTracePropagation() != false) {
setTracePropagation(other.getTracePropagation());
}
if (other.getTraceSearch() != false) {
setTraceSearch(other.getTraceSearch());
}
if (other.getPrintModel() != false) {
setPrintModel(other.getPrintModel());
}
if (other.getPrintModelStats() != false) {
setPrintModelStats(other.getPrintModelStats());
}
if (other.getPrintAddedConstraints() != false) {
setPrintAddedConstraints(other.getPrintAddedConstraints());
}
if (other.getDisableSolve() != false) {
setDisableSolve(other.getDisableSolve());
}
if (other.getUseSmallTable() != false) {
setUseSmallTable(other.getUseSmallTable());
}
if (other.getUseCumulativeEdgeFinder() != false) {
setUseCumulativeEdgeFinder(other.getUseCumulativeEdgeFinder());
}
if (other.getUseCumulativeTimeTable() != false) {
setUseCumulativeTimeTable(other.getUseCumulativeTimeTable());
}
if (other.getUseCumulativeTimeTableSync() != false) {
setUseCumulativeTimeTableSync(other.getUseCumulativeTimeTableSync());
}
if (other.getUseSequenceHighDemandTasks() != false) {
setUseSequenceHighDemandTasks(other.getUseSequenceHighDemandTasks());
}
if (other.getUseAllPossibleDisjunctions() != false) {
setUseAllPossibleDisjunctions(other.getUseAllPossibleDisjunctions());
}
if (other.getMaxEdgeFinderSize() != 0) {
setMaxEdgeFinderSize(other.getMaxEdgeFinderSize());
}
if (other.getDiffnUseCumulative() != false) {
setDiffnUseCumulative(other.getDiffnUseCumulative());
}
if (other.getUseElementRmq() != false) {
setUseElementRmq(other.getUseElementRmq());
}
if (other.getSkipLocallyOptimalPaths() != false) {
setSkipLocallyOptimalPaths(other.getSkipLocallyOptimalPaths());
}
if (other.getCheckSolutionPeriod() != 0) {
setCheckSolutionPeriod(other.getCheckSolutionPeriod());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ortools.constraintsolver.ConstraintSolverParameters parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ortools.constraintsolver.ConstraintSolverParameters) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int compressTrail_ = 0;
/**
* <pre>
* This parameter indicates if the solver should compress the trail
* during the search. No compression means that the solver will be faster,
* but will use more memory.
* </pre>
*
* <code>.operations_research.ConstraintSolverParameters.TrailCompression compress_trail = 1;</code>
* @return The enum numeric value on the wire for compressTrail.
*/
public int getCompressTrailValue() {
return compressTrail_;
}
/**
* <pre>
* This parameter indicates if the solver should compress the trail
* during the search. No compression means that the solver will be faster,
* but will use more memory.
* </pre>
*
* <code>.operations_research.ConstraintSolverParameters.TrailCompression compress_trail = 1;</code>
* @param value The enum numeric value on the wire for compressTrail to set.
* @return This builder for chaining.
*/
public Builder setCompressTrailValue(int value) {
compressTrail_ = value;
onChanged();
return this;
}
/**
* <pre>
* This parameter indicates if the solver should compress the trail
* during the search. No compression means that the solver will be faster,
* but will use more memory.
* </pre>
*
* <code>.operations_research.ConstraintSolverParameters.TrailCompression compress_trail = 1;</code>
* @return The compressTrail.
*/
public com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression getCompressTrail() {
@SuppressWarnings("deprecation")
com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression result = com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression.valueOf(compressTrail_);
return result == null ? com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression.UNRECOGNIZED : result;
}
/**
* <pre>
* This parameter indicates if the solver should compress the trail
* during the search. No compression means that the solver will be faster,
* but will use more memory.
* </pre>
*
* <code>.operations_research.ConstraintSolverParameters.TrailCompression compress_trail = 1;</code>
* @param value The compressTrail to set.
* @return This builder for chaining.
*/
public Builder setCompressTrail(com.google.ortools.constraintsolver.ConstraintSolverParameters.TrailCompression value) {
if (value == null) {
throw new NullPointerException();
}
compressTrail_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* This parameter indicates if the solver should compress the trail
* during the search. No compression means that the solver will be faster,
* but will use more memory.
* </pre>
*
* <code>.operations_research.ConstraintSolverParameters.TrailCompression compress_trail = 1;</code>
* @return This builder for chaining.
*/
public Builder clearCompressTrail() {
compressTrail_ = 0;
onChanged();
return this;
}
private int trailBlockSize_ ;
/**
* <pre>
* This parameter indicates the default size of a block of the trail.
* Compression applies at the block level.
* </pre>
*
* <code>int32 trail_block_size = 2;</code>
* @return The trailBlockSize.
*/
public int getTrailBlockSize() {
return trailBlockSize_;
}
/**
* <pre>
* This parameter indicates the default size of a block of the trail.
* Compression applies at the block level.
* </pre>
*
* <code>int32 trail_block_size = 2;</code>
* @param value The trailBlockSize to set.
* @return This builder for chaining.
*/
public Builder setTrailBlockSize(int value) {
trailBlockSize_ = value;
onChanged();
return this;
}
/**
* <pre>
* This parameter indicates the default size of a block of the trail.
* Compression applies at the block level.
* </pre>
*
* <code>int32 trail_block_size = 2;</code>
* @return This builder for chaining.
*/
public Builder clearTrailBlockSize() {
trailBlockSize_ = 0;
onChanged();
return this;
}
private int arraySplitSize_ ;
/**
* <pre>
* When a sum/min/max operation is applied on a large array, this
* array is recursively split into blocks of size 'array_split_size'.
* </pre>
*
* <code>int32 array_split_size = 3;</code>
* @return The arraySplitSize.
*/
public int getArraySplitSize() {
return arraySplitSize_;
}
/**
* <pre>
* When a sum/min/max operation is applied on a large array, this
* array is recursively split into blocks of size 'array_split_size'.
* </pre>
*
* <code>int32 array_split_size = 3;</code>
* @param value The arraySplitSize to set.
* @return This builder for chaining.
*/
public Builder setArraySplitSize(int value) {
arraySplitSize_ = value;
onChanged();
return this;
}
/**
* <pre>
* When a sum/min/max operation is applied on a large array, this
* array is recursively split into blocks of size 'array_split_size'.
* </pre>
*
* <code>int32 array_split_size = 3;</code>
* @return This builder for chaining.
*/
public Builder clearArraySplitSize() {
arraySplitSize_ = 0;
onChanged();
return this;
}
private boolean storeNames_ ;
/**
* <pre>
* This parameters indicates if the solver should store the names of
* the objets it manages.
* </pre>
*
* <code>bool store_names = 4;</code>
* @return The storeNames.
*/
public boolean getStoreNames() {
return storeNames_;
}
/**
* <pre>
* This parameters indicates if the solver should store the names of
* the objets it manages.
* </pre>
*
* <code>bool store_names = 4;</code>
* @param value The storeNames to set.
* @return This builder for chaining.
*/
public Builder setStoreNames(boolean value) {
storeNames_ = value;
onChanged();
return this;
}
/**
* <pre>
* This parameters indicates if the solver should store the names of
* the objets it manages.
* </pre>
*
* <code>bool store_names = 4;</code>
* @return This builder for chaining.
*/
public Builder clearStoreNames() {
storeNames_ = false;
onChanged();
return this;
}
private boolean nameCastVariables_ ;
/**
* <pre>
* Create names for cast variables.
* </pre>
*
* <code>bool name_cast_variables = 5;</code>
* @return The nameCastVariables.
*/
public boolean getNameCastVariables() {
return nameCastVariables_;
}
/**
* <pre>
* Create names for cast variables.
* </pre>
*
* <code>bool name_cast_variables = 5;</code>
* @param value The nameCastVariables to set.
* @return This builder for chaining.
*/
public Builder setNameCastVariables(boolean value) {
nameCastVariables_ = value;
onChanged();
return this;
}
/**
* <pre>
* Create names for cast variables.
* </pre>
*
* <code>bool name_cast_variables = 5;</code>
* @return This builder for chaining.
*/
public Builder clearNameCastVariables() {
nameCastVariables_ = false;
onChanged();
return this;
}
private boolean nameAllVariables_ ;
/**
* <pre>
* Should anonymous variables be given a name.
* </pre>
*
* <code>bool name_all_variables = 6;</code>
* @return The nameAllVariables.
*/
public boolean getNameAllVariables() {
return nameAllVariables_;
}
/**
* <pre>
* Should anonymous variables be given a name.
* </pre>
*
* <code>bool name_all_variables = 6;</code>
* @param value The nameAllVariables to set.
* @return This builder for chaining.
*/
public Builder setNameAllVariables(boolean value) {
nameAllVariables_ = value;
onChanged();
return this;
}
/**
* <pre>
* Should anonymous variables be given a name.
* </pre>
*
* <code>bool name_all_variables = 6;</code>
* @return This builder for chaining.
*/
public Builder clearNameAllVariables() {
nameAllVariables_ = false;
onChanged();
return this;
}
private boolean profilePropagation_ ;
/**
* <pre>
* Activate propagation profiling.
* </pre>
*
* <code>bool profile_propagation = 7;</code>
* @return The profilePropagation.
*/
public boolean getProfilePropagation() {
return profilePropagation_;
}
/**
* <pre>
* Activate propagation profiling.
* </pre>
*
* <code>bool profile_propagation = 7;</code>
* @param value The profilePropagation to set.
* @return This builder for chaining.
*/
public Builder setProfilePropagation(boolean value) {
profilePropagation_ = value;
onChanged();
return this;
}
/**
* <pre>
* Activate propagation profiling.
* </pre>
*
* <code>bool profile_propagation = 7;</code>
* @return This builder for chaining.
*/
public Builder clearProfilePropagation() {
profilePropagation_ = false;
onChanged();
return this;
}
private java.lang.Object profileFile_ = "";
/**
* <pre>
* Export propagation profiling data to file.
* </pre>
*
* <code>string profile_file = 8;</code>
* @return The profileFile.
*/
public java.lang.String getProfileFile() {
java.lang.Object ref = profileFile_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
profileFile_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Export propagation profiling data to file.
* </pre>
*
* <code>string profile_file = 8;</code>
* @return The bytes for profileFile.
*/
public com.google.protobuf.ByteString
getProfileFileBytes() {
java.lang.Object ref = profileFile_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
profileFile_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Export propagation profiling data to file.
* </pre>
*
* <code>string profile_file = 8;</code>
* @param value The profileFile to set.
* @return This builder for chaining.
*/
public Builder setProfileFile(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
profileFile_ = value;
onChanged();
return this;
}
/**
* <pre>
* Export propagation profiling data to file.
* </pre>
*
* <code>string profile_file = 8;</code>
* @return This builder for chaining.
*/
public Builder clearProfileFile() {
profileFile_ = getDefaultInstance().getProfileFile();
onChanged();
return this;
}
/**
* <pre>
* Export propagation profiling data to file.
* </pre>
*
* <code>string profile_file = 8;</code>
* @param value The bytes for profileFile to set.
* @return This builder for chaining.
*/
public Builder setProfileFileBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
profileFile_ = value;
onChanged();
return this;
}
private boolean profileLocalSearch_ ;
/**
* <pre>
* Activate local search profiling.
* </pre>
*
* <code>bool profile_local_search = 16;</code>
* @return The profileLocalSearch.
*/
public boolean getProfileLocalSearch() {
return profileLocalSearch_;
}
/**
* <pre>
* Activate local search profiling.
* </pre>
*
* <code>bool profile_local_search = 16;</code>
* @param value The profileLocalSearch to set.
* @return This builder for chaining.
*/
public Builder setProfileLocalSearch(boolean value) {
profileLocalSearch_ = value;
onChanged();
return this;
}
/**
* <pre>
* Activate local search profiling.
* </pre>
*
* <code>bool profile_local_search = 16;</code>
* @return This builder for chaining.
*/
public Builder clearProfileLocalSearch() {
profileLocalSearch_ = false;
onChanged();
return this;
}
private boolean printLocalSearchProfile_ ;
/**
* <pre>
* Print local search profiling data after solving.
* </pre>
*
* <code>bool print_local_search_profile = 17;</code>
* @return The printLocalSearchProfile.
*/
public boolean getPrintLocalSearchProfile() {
return printLocalSearchProfile_;
}
/**
* <pre>
* Print local search profiling data after solving.
* </pre>
*
* <code>bool print_local_search_profile = 17;</code>
* @param value The printLocalSearchProfile to set.
* @return This builder for chaining.
*/
public Builder setPrintLocalSearchProfile(boolean value) {
printLocalSearchProfile_ = value;
onChanged();
return this;
}
/**
* <pre>
* Print local search profiling data after solving.
* </pre>
*
* <code>bool print_local_search_profile = 17;</code>
* @return This builder for chaining.
*/
public Builder clearPrintLocalSearchProfile() {
printLocalSearchProfile_ = false;
onChanged();
return this;
}
private boolean tracePropagation_ ;
/**
* <pre>
* Activate propagate tracing.
* </pre>
*
* <code>bool trace_propagation = 9;</code>
* @return The tracePropagation.
*/
public boolean getTracePropagation() {
return tracePropagation_;
}
/**
* <pre>
* Activate propagate tracing.
* </pre>
*
* <code>bool trace_propagation = 9;</code>
* @param value The tracePropagation to set.
* @return This builder for chaining.
*/
public Builder setTracePropagation(boolean value) {
tracePropagation_ = value;
onChanged();
return this;
}
/**
* <pre>
* Activate propagate tracing.
* </pre>
*
* <code>bool trace_propagation = 9;</code>
* @return This builder for chaining.
*/
public Builder clearTracePropagation() {
tracePropagation_ = false;
onChanged();
return this;
}
private boolean traceSearch_ ;
/**
* <pre>
* Trace search.
* </pre>
*
* <code>bool trace_search = 10;</code>
* @return The traceSearch.
*/
public boolean getTraceSearch() {
return traceSearch_;
}
/**
* <pre>
* Trace search.
* </pre>
*
* <code>bool trace_search = 10;</code>
* @param value The traceSearch to set.
* @return This builder for chaining.
*/
public Builder setTraceSearch(boolean value) {
traceSearch_ = value;
onChanged();
return this;
}
/**
* <pre>
* Trace search.
* </pre>
*
* <code>bool trace_search = 10;</code>
* @return This builder for chaining.
*/
public Builder clearTraceSearch() {
traceSearch_ = false;
onChanged();
return this;
}
private boolean printModel_ ;
/**
* <pre>
* Print the model before solving.
* </pre>
*
* <code>bool print_model = 11;</code>
* @return The printModel.
*/
public boolean getPrintModel() {
return printModel_;
}
/**
* <pre>
* Print the model before solving.
* </pre>
*
* <code>bool print_model = 11;</code>
* @param value The printModel to set.
* @return This builder for chaining.
*/
public Builder setPrintModel(boolean value) {
printModel_ = value;
onChanged();
return this;
}
/**
* <pre>
* Print the model before solving.
* </pre>
*
* <code>bool print_model = 11;</code>
* @return This builder for chaining.
*/
public Builder clearPrintModel() {
printModel_ = false;
onChanged();
return this;
}
private boolean printModelStats_ ;
/**
* <pre>
* Print model statistics before solving.
* </pre>
*
* <code>bool print_model_stats = 12;</code>
* @return The printModelStats.
*/
public boolean getPrintModelStats() {
return printModelStats_;
}
/**
* <pre>
* Print model statistics before solving.
* </pre>
*
* <code>bool print_model_stats = 12;</code>
* @param value The printModelStats to set.
* @return This builder for chaining.
*/
public Builder setPrintModelStats(boolean value) {
printModelStats_ = value;
onChanged();
return this;
}
/**
* <pre>
* Print model statistics before solving.
* </pre>
*
* <code>bool print_model_stats = 12;</code>
* @return This builder for chaining.
*/
public Builder clearPrintModelStats() {
printModelStats_ = false;
onChanged();
return this;
}
private boolean printAddedConstraints_ ;
/**
* <pre>
* Print added constraints.
* </pre>
*
* <code>bool print_added_constraints = 13;</code>
* @return The printAddedConstraints.
*/
public boolean getPrintAddedConstraints() {
return printAddedConstraints_;
}
/**
* <pre>
* Print added constraints.
* </pre>
*
* <code>bool print_added_constraints = 13;</code>
* @param value The printAddedConstraints to set.
* @return This builder for chaining.
*/
public Builder setPrintAddedConstraints(boolean value) {
printAddedConstraints_ = value;
onChanged();
return this;
}
/**
* <pre>
* Print added constraints.
* </pre>
*
* <code>bool print_added_constraints = 13;</code>
* @return This builder for chaining.
*/
public Builder clearPrintAddedConstraints() {
printAddedConstraints_ = false;
onChanged();
return this;
}
private boolean disableSolve_ ;
/**
* <code>bool disable_solve = 15;</code>
* @return The disableSolve.
*/
public boolean getDisableSolve() {
return disableSolve_;
}
/**
* <code>bool disable_solve = 15;</code>
* @param value The disableSolve to set.
* @return This builder for chaining.
*/
public Builder setDisableSolve(boolean value) {
disableSolve_ = value;
onChanged();
return this;
}
/**
* <code>bool disable_solve = 15;</code>
* @return This builder for chaining.
*/
public Builder clearDisableSolve() {
disableSolve_ = false;
onChanged();
return this;
}
private boolean useSmallTable_ ;
/**
* <pre>
* Control the implementation of the table constraint.
* </pre>
*
* <code>bool use_small_table = 101;</code>
* @return The useSmallTable.
*/
public boolean getUseSmallTable() {
return useSmallTable_;
}
/**
* <pre>
* Control the implementation of the table constraint.
* </pre>
*
* <code>bool use_small_table = 101;</code>
* @param value The useSmallTable to set.
* @return This builder for chaining.
*/
public Builder setUseSmallTable(boolean value) {
useSmallTable_ = value;
onChanged();
return this;
}
/**
* <pre>
* Control the implementation of the table constraint.
* </pre>
*
* <code>bool use_small_table = 101;</code>
* @return This builder for chaining.
*/
public Builder clearUseSmallTable() {
useSmallTable_ = false;
onChanged();
return this;
}
private boolean useCumulativeEdgeFinder_ ;
/**
* <pre>
* Control the propagation of the cumulative constraint.
* </pre>
*
* <code>bool use_cumulative_edge_finder = 105;</code>
* @return The useCumulativeEdgeFinder.
*/
public boolean getUseCumulativeEdgeFinder() {
return useCumulativeEdgeFinder_;
}
/**
* <pre>
* Control the propagation of the cumulative constraint.
* </pre>
*
* <code>bool use_cumulative_edge_finder = 105;</code>
* @param value The useCumulativeEdgeFinder to set.
* @return This builder for chaining.
*/
public Builder setUseCumulativeEdgeFinder(boolean value) {
useCumulativeEdgeFinder_ = value;
onChanged();
return this;
}
/**
* <pre>
* Control the propagation of the cumulative constraint.
* </pre>
*
* <code>bool use_cumulative_edge_finder = 105;</code>
* @return This builder for chaining.
*/
public Builder clearUseCumulativeEdgeFinder() {
useCumulativeEdgeFinder_ = false;
onChanged();
return this;
}
private boolean useCumulativeTimeTable_ ;
/**
* <code>bool use_cumulative_time_table = 106;</code>
* @return The useCumulativeTimeTable.
*/
public boolean getUseCumulativeTimeTable() {
return useCumulativeTimeTable_;
}
/**
* <code>bool use_cumulative_time_table = 106;</code>
* @param value The useCumulativeTimeTable to set.
* @return This builder for chaining.
*/
public Builder setUseCumulativeTimeTable(boolean value) {
useCumulativeTimeTable_ = value;
onChanged();
return this;
}
/**
* <code>bool use_cumulative_time_table = 106;</code>
* @return This builder for chaining.
*/
public Builder clearUseCumulativeTimeTable() {
useCumulativeTimeTable_ = false;
onChanged();
return this;
}
private boolean useCumulativeTimeTableSync_ ;
/**
* <code>bool use_cumulative_time_table_sync = 112;</code>
* @return The useCumulativeTimeTableSync.
*/
public boolean getUseCumulativeTimeTableSync() {
return useCumulativeTimeTableSync_;
}
/**
* <code>bool use_cumulative_time_table_sync = 112;</code>
* @param value The useCumulativeTimeTableSync to set.
* @return This builder for chaining.
*/
public Builder setUseCumulativeTimeTableSync(boolean value) {
useCumulativeTimeTableSync_ = value;
onChanged();
return this;
}
/**
* <code>bool use_cumulative_time_table_sync = 112;</code>
* @return This builder for chaining.
*/
public Builder clearUseCumulativeTimeTableSync() {
useCumulativeTimeTableSync_ = false;
onChanged();
return this;
}
private boolean useSequenceHighDemandTasks_ ;
/**
* <code>bool use_sequence_high_demand_tasks = 107;</code>
* @return The useSequenceHighDemandTasks.
*/
public boolean getUseSequenceHighDemandTasks() {
return useSequenceHighDemandTasks_;
}
/**
* <code>bool use_sequence_high_demand_tasks = 107;</code>
* @param value The useSequenceHighDemandTasks to set.
* @return This builder for chaining.
*/
public Builder setUseSequenceHighDemandTasks(boolean value) {
useSequenceHighDemandTasks_ = value;
onChanged();
return this;
}
/**
* <code>bool use_sequence_high_demand_tasks = 107;</code>
* @return This builder for chaining.
*/
public Builder clearUseSequenceHighDemandTasks() {
useSequenceHighDemandTasks_ = false;
onChanged();
return this;
}
private boolean useAllPossibleDisjunctions_ ;
/**
* <code>bool use_all_possible_disjunctions = 108;</code>
* @return The useAllPossibleDisjunctions.
*/
public boolean getUseAllPossibleDisjunctions() {
return useAllPossibleDisjunctions_;
}
/**
* <code>bool use_all_possible_disjunctions = 108;</code>
* @param value The useAllPossibleDisjunctions to set.
* @return This builder for chaining.
*/
public Builder setUseAllPossibleDisjunctions(boolean value) {
useAllPossibleDisjunctions_ = value;
onChanged();
return this;
}
/**
* <code>bool use_all_possible_disjunctions = 108;</code>
* @return This builder for chaining.
*/
public Builder clearUseAllPossibleDisjunctions() {
useAllPossibleDisjunctions_ = false;
onChanged();
return this;
}
private int maxEdgeFinderSize_ ;
/**
* <code>int32 max_edge_finder_size = 109;</code>
* @return The maxEdgeFinderSize.
*/
public int getMaxEdgeFinderSize() {
return maxEdgeFinderSize_;
}
/**
* <code>int32 max_edge_finder_size = 109;</code>
* @param value The maxEdgeFinderSize to set.
* @return This builder for chaining.
*/
public Builder setMaxEdgeFinderSize(int value) {
maxEdgeFinderSize_ = value;
onChanged();
return this;
}
/**
* <code>int32 max_edge_finder_size = 109;</code>
* @return This builder for chaining.
*/
public Builder clearMaxEdgeFinderSize() {
maxEdgeFinderSize_ = 0;
onChanged();
return this;
}
private boolean diffnUseCumulative_ ;
/**
* <pre>
* Control the propagation of the diffn constraint.
* </pre>
*
* <code>bool diffn_use_cumulative = 110;</code>
* @return The diffnUseCumulative.
*/
public boolean getDiffnUseCumulative() {
return diffnUseCumulative_;
}
/**
* <pre>
* Control the propagation of the diffn constraint.
* </pre>
*
* <code>bool diffn_use_cumulative = 110;</code>
* @param value The diffnUseCumulative to set.
* @return This builder for chaining.
*/
public Builder setDiffnUseCumulative(boolean value) {
diffnUseCumulative_ = value;
onChanged();
return this;
}
/**
* <pre>
* Control the propagation of the diffn constraint.
* </pre>
*
* <code>bool diffn_use_cumulative = 110;</code>
* @return This builder for chaining.
*/
public Builder clearDiffnUseCumulative() {
diffnUseCumulative_ = false;
onChanged();
return this;
}
private boolean useElementRmq_ ;
/**
* <pre>
* Control the implementation of the element constraint.
* </pre>
*
* <code>bool use_element_rmq = 111;</code>
* @return The useElementRmq.
*/
public boolean getUseElementRmq() {
return useElementRmq_;
}
/**
* <pre>
* Control the implementation of the element constraint.
* </pre>
*
* <code>bool use_element_rmq = 111;</code>
* @param value The useElementRmq to set.
* @return This builder for chaining.
*/
public Builder setUseElementRmq(boolean value) {
useElementRmq_ = value;
onChanged();
return this;
}
/**
* <pre>
* Control the implementation of the element constraint.
* </pre>
*
* <code>bool use_element_rmq = 111;</code>
* @return This builder for chaining.
*/
public Builder clearUseElementRmq() {
useElementRmq_ = false;
onChanged();
return this;
}
private boolean skipLocallyOptimalPaths_ ;
/**
* <pre>
* Skip locally optimal pairs of paths in PathOperators. Setting this
* parameter to true might skip valid neighbors if there are constraints
* linking paths together (such as precedences). In any other case this
* should only speed up the search without omitting any neighbors.
* </pre>
*
* <code>bool skip_locally_optimal_paths = 113;</code>
* @return The skipLocallyOptimalPaths.
*/
public boolean getSkipLocallyOptimalPaths() {
return skipLocallyOptimalPaths_;
}
/**
* <pre>
* Skip locally optimal pairs of paths in PathOperators. Setting this
* parameter to true might skip valid neighbors if there are constraints
* linking paths together (such as precedences). In any other case this
* should only speed up the search without omitting any neighbors.
* </pre>
*
* <code>bool skip_locally_optimal_paths = 113;</code>
* @param value The skipLocallyOptimalPaths to set.
* @return This builder for chaining.
*/
public Builder setSkipLocallyOptimalPaths(boolean value) {
skipLocallyOptimalPaths_ = value;
onChanged();
return this;
}
/**
* <pre>
* Skip locally optimal pairs of paths in PathOperators. Setting this
* parameter to true might skip valid neighbors if there are constraints
* linking paths together (such as precedences). In any other case this
* should only speed up the search without omitting any neighbors.
* </pre>
*
* <code>bool skip_locally_optimal_paths = 113;</code>
* @return This builder for chaining.
*/
public Builder clearSkipLocallyOptimalPaths() {
skipLocallyOptimalPaths_ = false;
onChanged();
return this;
}
private int checkSolutionPeriod_ ;
/**
* <pre>
* Control the behavior of local search.
* </pre>
*
* <code>int32 check_solution_period = 114;</code>
* @return The checkSolutionPeriod.
*/
public int getCheckSolutionPeriod() {
return checkSolutionPeriod_;
}
/**
* <pre>
* Control the behavior of local search.
* </pre>
*
* <code>int32 check_solution_period = 114;</code>
* @param value The checkSolutionPeriod to set.
* @return This builder for chaining.
*/
public Builder setCheckSolutionPeriod(int value) {
checkSolutionPeriod_ = value;
onChanged();
return this;
}
/**
* <pre>
* Control the behavior of local search.
* </pre>
*
* <code>int32 check_solution_period = 114;</code>
* @return This builder for chaining.
*/
public Builder clearCheckSolutionPeriod() {
checkSolutionPeriod_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:operations_research.ConstraintSolverParameters)
}
// @@protoc_insertion_point(class_scope:operations_research.ConstraintSolverParameters)
private static final com.google.ortools.constraintsolver.ConstraintSolverParameters DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ortools.constraintsolver.ConstraintSolverParameters();
}
public static com.google.ortools.constraintsolver.ConstraintSolverParameters getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ConstraintSolverParameters>
PARSER = new com.google.protobuf.AbstractParser<ConstraintSolverParameters>() {
@java.lang.Override
public ConstraintSolverParameters parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ConstraintSolverParameters(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ConstraintSolverParameters> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ConstraintSolverParameters> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ortools.constraintsolver.ConstraintSolverParameters getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| [
"jnliang90@gmail.com"
] | jnliang90@gmail.com |
921e65fc1174495f63de51f88f03a2a9c2e72e7e | fc59aeeb55577739535facc80a2b2c62a4fc9d2c | /src/main/java/jdk/com/sun/corba/se/spi/activation/RepositoryPackage/ServerDefHelper.java | 286c17405b5aafc88ffb4cf71cb0d5a4f448abc4 | [] | no_license | goubo/javaDemo | 8dc1037b824ce71fab4b246fc437fa8aaa66b859 | 35234d9a2b410e17f1672c0b05d2f370b85f806a | refs/heads/main | 2023-03-12T08:27:41.194350 | 2021-02-24T02:08:14 | 2021-02-24T02:08:14 | 340,292,219 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,824 | java | package com.sun.corba.se.spi.activation.RepositoryPackage;
/**
* com/sun/corba/se/spi/activation/RepositoryPackage/ServerDefHelper.java .
* Generated by the IDL-to-Java compiler (portable), version "3.2"
* from /jenkins/workspace/8-2-build-macosx-x86_64/jdk8u271/605/corba/src/share/classes/com/sun/corba/se/spi/activation/activation.idl
* Wednesday, September 16, 2020 4:54:59 PM GMT
*/
abstract public class ServerDefHelper
{
private static String _id = "IDL:activation/Repository/ServerDef:1.0";
public static void insert (org.omg.CORBA.Any a, com.sun.corba.se.spi.activation.RepositoryPackage.ServerDef that)
{
org.omg.CORBA.portable.OutputStream out = a.create_output_stream ();
a.type (type ());
write (out, that);
a.read_value (out.create_input_stream (), type ());
}
public static com.sun.corba.se.spi.activation.RepositoryPackage.ServerDef extract (org.omg.CORBA.Any a)
{
return read (a.create_input_stream ());
}
private static org.omg.CORBA.TypeCode __typeCode = null;
private static boolean __active = false;
synchronized public static org.omg.CORBA.TypeCode type ()
{
if (__typeCode == null)
{
synchronized (org.omg.CORBA.TypeCode.class)
{
if (__typeCode == null)
{
if (__active)
{
return org.omg.CORBA.ORB.init().create_recursive_tc ( _id );
}
__active = true;
org.omg.CORBA.StructMember[] _members0 = new org.omg.CORBA.StructMember [5];
org.omg.CORBA.TypeCode _tcOf_members0 = null;
_tcOf_members0 = org.omg.CORBA.ORB.init ().create_string_tc (0);
_members0[0] = new org.omg.CORBA.StructMember (
"applicationName",
_tcOf_members0,
null);
_tcOf_members0 = org.omg.CORBA.ORB.init ().create_string_tc (0);
_members0[1] = new org.omg.CORBA.StructMember (
"serverName",
_tcOf_members0,
null);
_tcOf_members0 = org.omg.CORBA.ORB.init ().create_string_tc (0);
_members0[2] = new org.omg.CORBA.StructMember (
"serverClassPath",
_tcOf_members0,
null);
_tcOf_members0 = org.omg.CORBA.ORB.init ().create_string_tc (0);
_members0[3] = new org.omg.CORBA.StructMember (
"serverArgs",
_tcOf_members0,
null);
_tcOf_members0 = org.omg.CORBA.ORB.init ().create_string_tc (0);
_members0[4] = new org.omg.CORBA.StructMember (
"serverVmArgs",
_tcOf_members0,
null);
__typeCode = org.omg.CORBA.ORB.init ().create_struct_tc (com.sun.corba.se.spi.activation.RepositoryPackage.ServerDefHelper.id (), "ServerDef", _members0);
__active = false;
}
}
}
return __typeCode;
}
public static String id ()
{
return _id;
}
public static com.sun.corba.se.spi.activation.RepositoryPackage.ServerDef read (org.omg.CORBA.portable.InputStream istream)
{
com.sun.corba.se.spi.activation.RepositoryPackage.ServerDef value = new com.sun.corba.se.spi.activation.RepositoryPackage.ServerDef ();
value.applicationName = istream.read_string ();
value.serverName = istream.read_string ();
value.serverClassPath = istream.read_string ();
value.serverArgs = istream.read_string ();
value.serverVmArgs = istream.read_string ();
return value;
}
public static void write (org.omg.CORBA.portable.OutputStream ostream, com.sun.corba.se.spi.activation.RepositoryPackage.ServerDef value)
{
ostream.write_string (value.applicationName);
ostream.write_string (value.serverName);
ostream.write_string (value.serverClassPath);
ostream.write_string (value.serverArgs);
ostream.write_string (value.serverVmArgs);
}
}
| [
"goujinbo@outlook.com"
] | goujinbo@outlook.com |
4663362d6174a549ce9fcffb53187147bf7e3b1d | f92cbfcc13a845b68e036710c3931d30f1363c57 | /older/wayMQ-html5/src/ananas/waymq/ht5/litegroup/JSONResponderForEvent.java | da8d33bf54c50671ff391c7b15503bf4e419b509 | [
"Apache-2.0"
] | permissive | xukun0217/wayMQ | 298d807eff07feec2e7e5162356a4bc90eef5bb3 | 0f162f9930d9bd72c5f5bc9ac559719d7acc325a | refs/heads/master | 2021-03-12T19:35:25.574458 | 2014-06-24T15:02:47 | 2014-06-24T15:02:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 4,004 | java | package ananas.waymq.ht5.litegroup;
import java.io.File;
import java.util.Calendar;
import java.util.Set;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
public class JSONResponderForEvent implements JSONResponder {
public void process(RequestContext rc) {
DaoForEvent dao = new DaoForEvent(rc._repo, rc._target_path);
File path = dao.getPath();
if (!path.exists()) {
System.err.println("no path " + path);
rc._json.put("error", "no_event");
return;
}
String todo = rc._do;
if (todo.equals("get")) {
this.doGet(rc, dao);
} else if (todo.equals("join")) {
this.doJoin(rc, dao);
} else if (todo.equals("leave")) {
this.doLeave(rc, dao);
}
}
private void doGet(RequestContext rc, DaoForEvent dao) {
JSONObject out = dao.loadOutput();
if (out == null) {
out = this.buildOutput(rc, dao);
}
Set<String> keys = out.keySet();
for (String key : keys) {
Object value = out.get(key);
rc._json.put(key, value);
}
rc._json.put("success", true);
}
private JSONObject buildOutput(RequestContext rc, DaoForEvent dao) {
JSONObject group = dao.getGroupDao().loadInfo();
JSONObject json = dao.loadInfo();
JSONArray joinList = new JSONArray();
File[] jfs = dao.listJoinFile();
for (File jf : jfs) {
JSONObject join = dao.loadJoinShip(jf);
joinList.add(join);
}
// adds
json.put("join", joinList);
Helper.genStringByTime(json, "time_open");
{
String groupName = group.getString("title");
json.put("group_title", groupName);
}
// save
dao.saveOutput(json);
return json;
}
private void doLeave(RequestContext rc, DaoForEvent dao) {
String phone = rc._request.getParameter("phone");
JSONObject json = dao.loadJoinShip(phone);
if (json == null) {
return;
}
json.put("join", false);
dao.saveJoinShip(phone, json);
dao.clearOutput();
rc._json.put("success", true);
}
private void doJoin(RequestContext rc, DaoForEvent dao) {
String phone = rc._request.getParameter("phone");
String nickname = rc._request.getParameter("nickname");
if (!Util.checkPhoneNumber(phone)) {
rc._json.put("error", "bad_phone_number");
return;
}
if (nickname == null) {
nickname = "unnamed";
} else {
if (nickname.trim().length() == 0) {
nickname = "unnamed";
}
}
JSONObject json = dao.loadJoinShip(phone);
if (json == null) {
json = new JSONObject();
json.put("phone", phone);
json.put("nickname", nickname);
}
json.put("join", true);
dao.saveJoinShip(phone, json);
dao.clearOutput();
rc._json.put("success", true);
}
private static JSONResponderForEvent _inst;
public static JSONResponderForEvent getInstance() {
if (_inst == null) {
_inst = new JSONResponderForEvent();
}
return _inst;
}
static class Helper {
public static void genStringByTime(JSONObject json, String key) {
Object obj = json.get(key);
if (obj == null)
return;
long time = json.getLongValue(key);
json.put(key + "_text", timeToString(time));
}
final static String[] weekday = { "日", "一", "二", "三", "四", "五", "六" };
public static String timeToString(long time) {
StringBuilder sb = new StringBuilder();
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(time);
int h, m, s, yy, mm, dd;
yy = cal.get(Calendar.YEAR);
mm = cal.get(Calendar.MONTH) + 1;
dd = cal.get(Calendar.DAY_OF_MONTH);
int week = cal.get(Calendar.DAY_OF_WEEK);
h = cal.get(Calendar.HOUR_OF_DAY);
m = cal.get(Calendar.MINUTE);
s = cal.get(Calendar.SECOND);
sb.append(yy);
sb.append('-');
sb.append(mm);
sb.append('-');
sb.append(dd);
sb.append("(" + weekday[week % weekday.length] + ")");
sb.append(h);
sb.append(':');
sb.append(m);
sb.append(':');
sb.append(s);
return sb.toString();
}
}
}
| [
"xukun@tot"
] | xukun@tot |
f0431afd218e9a4190136c284ffce8d79a4522ac | d76e76e2b6322b4a821ec18f67c03156d5b1e70f | /src/com/zsy/frame/sample/java/control/designmode/structural/adapter/interfaceof/single/SourceSub1.java | e16b9d5094c76dffa41f958d9f740d993fe51350 | [] | no_license | sy-and/sample_java | f20749562741bedde83e824fc6653ab5859d0176 | c54afe0509873499c59cf9f63d8ebe978e5ef0dd | refs/heads/main | 2023-02-28T10:38:16.749939 | 2017-04-16T07:01:57 | 2017-04-16T07:01:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 247 | java | package com.zsy.frame.sample.java.control.designmode.structural.adapter.interfaceof.single;
public class SourceSub1 extends Wrapper {
public void method1(){
System.out.println("the sourceable interface's first Sub1!");
}
} | [
"shengyouzhang@qq.com"
] | shengyouzhang@qq.com |
89aca586e03ea91d7e24672bc034a91c5a7cd555 | b56c0a5696d40f7780667ea61dfde10dc11b5b92 | /src/main/java/entity/cards/TimeRip.java | 20a0ce8b2ceac4975275834b0fa4282a42b82a36 | [
"MIT"
] | permissive | godinj/EntityMod | deb85814992ef362f8327463367e859cc785abb0 | 64cd0d0743e31fc5d7b703f7c3283cc67fe50e84 | refs/heads/master | 2020-12-14T17:20:00.330988 | 2020-02-13T20:14:01 | 2020-02-13T20:14:01 | 234,822,715 | 0 | 0 | MIT | 2020-01-28T17:06:41 | 2020-01-19T01:37:10 | Java | UTF-8 | Java | false | false | 3,221 | java | package entity.cards;
import static entity.EntityMod.makeCardPath;
import com.megacrit.cardcrawl.actions.common.GainBlockAction;
import com.megacrit.cardcrawl.actions.common.GainEnergyAction;
import com.megacrit.cardcrawl.actions.common.MakeTempCardInDiscardAction;
import com.megacrit.cardcrawl.cards.status.VoidCard;
import com.megacrit.cardcrawl.characters.AbstractPlayer;
import com.megacrit.cardcrawl.core.CardCrawlGame;
import com.megacrit.cardcrawl.dungeons.AbstractDungeon;
import com.megacrit.cardcrawl.localization.CardStrings;
import com.megacrit.cardcrawl.monsters.AbstractMonster;
import entity.EntityMod;
import entity.characters.Entity;
public class TimeRip extends AbstractDynamicCard {
public static final String ID = EntityMod.makeID(TimeRip.class.getSimpleName());
public static final String IMG = makeCardPath("AetherForm.png");
private static final CardStrings cardStrings = CardCrawlGame.languagePack.getCardStrings(ID);
public static final String DESCRIPTION = cardStrings.DESCRIPTION;
public static final String[] EXTENDED_DESCRIPTION = cardStrings.EXTENDED_DESCRIPTION;
private static final CardRarity RARITY = CardRarity.UNCOMMON;
private static final CardTarget TARGET = CardTarget.SELF;
private static final CardType TYPE = CardType.SKILL;
public static final CardColor COLOR = Entity.Enums.COLOR_TEAL;
private static final int COST = 0;
private static final int BLOCK = 4;
// Represents the amount of energy gained.
private static final int MAGIC = 1;
private static final int UPGRADE_PLUS_MAGIC = 1;
private static final int SELF_MAGIC = 1;
public TimeRip() {
super(ID, IMG, COST, TYPE, COLOR, RARITY, TARGET);
this.block = this.baseBlock = BLOCK;
this.magicNumber = this.baseMagicNumber = MAGIC;
this.selfMagicNumber = this.baseSelfMagicNumber = SELF_MAGIC;
generateAndInitializeExtendedDescription();
}
public void generateAndInitializeExtendedDescription() {
StringBuilder sb = new StringBuilder();
sb.append(DESCRIPTION);
for (int i = 0; i < magicNumber; i++) {
sb.append("[E] ");
}
sb.append(EXTENDED_DESCRIPTION[0]);
if (selfMagicNumber == 1) {
sb.append(EXTENDED_DESCRIPTION[1]);
} else {
sb.append(selfMagicNumber);
sb.append(EXTENDED_DESCRIPTION[2]);
}
this.rawDescription = sb.toString();
initializeDescription();
}
@Override
public void use(AbstractPlayer p, AbstractMonster m) {
// gain block
AbstractDungeon.actionManager.addToBottom(new GainBlockAction(p, p, this.block));
// gain energy
AbstractDungeon.actionManager.addToBottom(new GainEnergyAction(this.magicNumber));
// make 1 void card in discard.
AbstractDungeon.actionManager.addToBottom(
new MakeTempCardInDiscardAction(new VoidCard(), this.selfMagicNumber));
}
@Override
public void upgrade() {
if (!upgraded) {
upgradeName();
this.upgradeMagicNumber(UPGRADE_PLUS_MAGIC);
generateAndInitializeExtendedDescription();
}
}
}
| [
"jon.m.godin@gmail.com"
] | jon.m.godin@gmail.com |
cc75513ffc5907d4899d9b2f8853eafd4e470f91 | f65fd4e60f10618e301ac50ae2cc0ac341255ea6 | /app/src/main/java/com/bignerdranch/android/photogallery/PhotoGalleryFragment.java | d5588a0dd9818ac429ec02bbbffe26ce35f63377 | [] | no_license | liruiAXE/PhotoGallery | 5bf30d52d3d3d4e29a5eb99dc41034a2bdbe6e06 | d3fc08d99469f688251b811cd5af561c73c13f3f | refs/heads/master | 2021-01-12T13:32:59.195907 | 2016-09-23T02:31:34 | 2016-09-23T02:31:34 | 68,981,163 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 13,892 | java | package com.bignerdranch.android.photogallery;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.provider.ContactsContract;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SearchView;
import android.util.Log;
import android.util.LruCache;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.inputmethod.InputMethodManager;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.content.Context;
import com.squareup.picasso.Picasso;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by dlw on 2016/8/6.
*/
public class PhotoGalleryFragment extends VisibleFragment{
private boolean isSetLayoutManager;
private RecyclerView mPhotoRecycleView;
public static PhotoGalleryFragment newInstance() {
return new PhotoGalleryFragment();
}
private int page;
boolean loading;
PhotoAdapter adapter;
private ThumbnailDownloader<PhotoHolder> thumbnailDownloader;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
setHasOptionsMenu(true);
Intent i=PollService.newIntent(getContext());
getContext().startService(i);
isSetLayoutManager=false;
page=0;
// fetchItemsTask=new FetchItemsTask(null);
// fetchItemsTask.execute(new Integer(page+1));
updateItems();
loading=false;
Handler responseHandler=new Handler();
// thumbnailDownloader=new ThumbnailDownloader<>(responseHandler);//T??
// thumbnailDownloader.setThumbnailDownloadListener(new ThumbnailDownloader.ThumbnailDownloadListener<PhotoHolder>() {
// @Override
// public void onThumbnailDownloaded(PhotoHolder target, Bitmap thumbnail) {
// Drawable drawable = new BitmapDrawable(getResources(), thumbnail);
// target.bindDrawable(drawable);
// }
// });
// thumbnailDownloader.start();
// thumbnailDownloader.getLooper();
Log.i(TAG, "Background thread started");
GalleryItem a=new GalleryItem();
a.setmCaption("a");
GalleryItem b=new GalleryItem();
b.setmCaption("b");
test(a, b);
Log.i(TAG, a.getmCaption());
Log.i(TAG, b.getmCaption());
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.fragment_photo_gallery, menu);
final MenuItem searchItem=menu.findItem(R.id.menu_item_search);
searchItem.setShowAsAction(MenuItem.SHOW_AS_ACTION_COLLAPSE_ACTION_VIEW|MenuItem.SHOW_AS_ACTION_ALWAYS);
searchItem.setIcon(android.R.drawable.ic_menu_search);
final SearchView searchView=(SearchView)searchItem.getActionView();
searchView.setOnSearchClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String query = QueryPreferences.getStoredQuery(getContext());
searchView.setQuery(query, false);
}
});
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
InputMethodManager im = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
im.hideSoftInputFromWindow(searchView.getWindowToken(), InputMethodManager.HIDE_IMPLICIT_ONLY);
searchItem.collapseActionView();
Log.d(TAG, "QueryTextSubmit: " + query);
QueryPreferences.setStoredQuery(getContext(), query);
updateItems();
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
Log.d(TAG, "QueryTextChange: " + newText);
return false;
}
});
MenuItem toggleItem=menu.findItem(R.id.menu_item_toggle_polling);
if (PollService.isServiceAlarmOn(getContext())){
toggleItem.setTitle(R.string.stop_polling);
} else {
toggleItem.setTitle(R.string.start_polling);
}
}
private void updateItems(){
String query=QueryPreferences.getStoredQuery(getContext());
new FetchItemsTask(query).execute(1);
}
private void test(GalleryItem a,GalleryItem b){
GalleryItem t;
t=a;
a=b;
b=t;
Log.i(TAG,a.getmCaption());
Log.i(TAG,b.getmCaption());
}
private class PhotoHolder extends RecyclerView.ViewHolder implements View.OnClickListener{
private ImageView photo;
private GalleryItem item;
public PhotoHolder(View itemView) {
super(itemView);
photo=(ImageView)itemView.findViewById(R.id.fragment_photo_gallery_image_view);
photo.setOnClickListener(this);
}
// public void bindGalleryItem(GalleryItem item){
// mTitleTextView.setText(item.getmCaption());
// }
public void bindDrawable(Drawable drawable){
photo.setImageDrawable(drawable);
}
public void bindGalleryItem(GalleryItem item){
this.item=item;
Picasso.with(getContext())
.load(item.getmUrl())
.placeholder(R.drawable.ai)
.into(photo);
}
@Override
public void onClick(View v) {
Intent intent=new Intent(Intent.ACTION_VIEW,item.getPhotoPageUri());
Intent intent1=PhotoPageActivity.newIntent(getActivity(),item.getPhotoPageUri());
startActivity(intent1);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()){
case R.id.menu_item_clear:
QueryPreferences.setStoredQuery(getContext(), null);
updateItems();
return true;
case R.id.menu_item_toggle_polling:
boolean shouldStartAlarm=!PollService.isServiceAlarmOn(getContext());
PollService.setServiceAlarm(getContext(),shouldStartAlarm);
getActivity().supportInvalidateOptionsMenu();
return true;
default: return super.onOptionsItemSelected(item);
}
}
private class PhotoAdapter extends RecyclerView.Adapter<PhotoHolder>{
private List<GalleryItem> items;
public PhotoAdapter(List<GalleryItem> items){
this.items=items;
}
@Override
public PhotoHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view=LayoutInflater.from(getContext()).inflate(R.layout.gallery_item,parent,false);
return new PhotoHolder(view);
}
public void preLoad(int num){
// int l=(page-1)*100;
// int r=l+num;
// Log.i(TAG,"preload num "+l);
// Log.i(TAG,"preload num "+r);
// for (int i=l;i<r;i++){
// String url=items.get(i).getmUrl();
// thumbnailDownloader.preLoad(url);
// }
}
@Override
public void onBindViewHolder(PhotoHolder holder, int position) {
// Drawable placeholder=getResources().getDrawable(R.drawable.bill_up_close);
// holder.bindDrawable(placeholder);
// GalleryItem galleryItem=items.get(position);
// thumbnailDownloader.queueThumbnail(holder,galleryItem.getmUrl());
// GridLayoutManager manager=(GridLayoutManager)mPhotoRecycleView.getLayoutManager();
// int firstp=manager.findFirstVisibleItemPosition();
// int lastp=manager.findLastVisibleItemPosition();
// int j1=position-firstp;
// int j2=lastp-position;
// int preLoadPos;
// if (j1<j2){
// preLoadPos=position-9;
// } else {
// preLoadPos=position+9;
// }
//
// if (0<=preLoadPos&&preLoadPos<page*100){
// String url=items.get(preLoadPos).getmUrl();
// thumbnailDownloader.preLoad(url);
// }
holder.bindGalleryItem(items.get(position));
}
@Override
public int getItemCount() {
return items.size();
}
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View v=inflater.inflate(R.layout.fragment_photo_gallery,container,false);
mPhotoRecycleView=(RecyclerView)v.findViewById(R.id.fragment_photo_gallery_recycler_view);
// mPhotoRecycleView.setLayoutManager(new GridLayoutManager(getActivity(), 3));
mPhotoRecycleView.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
super.onScrollStateChanged(recyclerView, newState);
// GridLayoutManager manager = (GridLayoutManager) recyclerView.getLayoutManager();
// if (newState == RecyclerView.SCROLL_STATE_IDLE) {
// int lastVisiblePosition = manager.findLastVisibleItemPosition();
// if (lastVisiblePosition > manager.getItemCount() -9) {
// Log.i(TAG, "Reach the END");
// if (!loading && page < 10) {
// loading = true;
// new FetchItemsTask(null).execute(page+1);
// }
// }
// }
}
});
setupAdapter(null);//maybe none
ViewTreeObserver observer=mPhotoRecycleView.getViewTreeObserver();
observer.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
if (isSetLayoutManager) return;
isSetLayoutManager=true;
float desity=getResources().getDisplayMetrics().density;
int dp= (int)(mPhotoRecycleView.getWidth()/desity);
mPhotoRecycleView.setLayoutManager(new GridLayoutManager(getContext(), dp/120));
Log.i(TAG, "tree " + mPhotoRecycleView.getWidth());
Log.i(TAG,"tree "+dp);
}
});
return v;
}
private List<GalleryItem> mItems=new ArrayList<>();
private void setupAdapter(List<GalleryItem> list){
if (isAdded()){
if (mPhotoRecycleView.getAdapter()==null){
adapter=new PhotoAdapter(mItems);
mPhotoRecycleView.setAdapter(adapter);
} else {
mItems.addAll(list);
adapter.notifyDataSetChanged();
}
}
}
private void setupSearchAdapter(List<GalleryItem> items){
if (isAdded()){
if (mPhotoRecycleView.getAdapter()==null){
mItems.addAll(items);
adapter=new PhotoAdapter(items);
mPhotoRecycleView.setAdapter(adapter);
} else {
mItems.clear();
mItems.addAll(items);
adapter.notifyDataSetChanged();
mPhotoRecycleView.smoothScrollToPosition(0);
}
}
}
private static final String TAG="PhotoFrag";
private class FetchItemsTask extends AsyncTask<Integer,Void,List<GalleryItem>>{
private String mQuery;
public FetchItemsTask(String query){
mQuery=query;
}
ProgressDialog dialog;
@Override
protected List<GalleryItem> doInBackground(Integer... params) {
publishProgress();
if (mQuery==null){
Log.d(TAG, "doin mQuery==null");
Log.d(TAG,"EX",new Exception());
return new FlickrFetch().fetchRecentPhotos(params[0]);
} else {
Log.d(TAG, "doin search");
return new FlickrFetch().searchPhotos(mQuery);
}
}
@Override
protected void onPostExecute(List<GalleryItem> galleryItems) {
if (mQuery==null){
setupAdapter(galleryItems);
loading = false;
page=page+1;
adapter.preLoad(30);
} else {
setupSearchAdapter(galleryItems);
}
dialog.dismiss();
}
@Override
protected void onProgressUpdate(Void... values) {
super.onProgressUpdate(values);
dialog=new ProgressDialog(getContext());
dialog.setIndeterminate(true);
dialog.setTitle("从Flickr下载图片中。。。");
dialog.show();
}
}
@Override
public void onDestroy() {
super.onDestroy();
// thumbnailDownloader.quit();
// thumbnailDownloader.clearQueue();
Log.i(TAG,"Background thread destroy");
}
}
| [
"1420692335@qq.com"
] | 1420692335@qq.com |
4d8dda0288046821c2c1ca95aff7ad4f8a008262 | 850efe57551253e32648d3638f8680c362fc939e | /src/main/java/com/dryork/vision/base/util/Validate.java | 7bf35176a77b51b0c4a8a3467dae92ca4599d5f3 | [] | no_license | vivs0/dcmServer | 26713fbf0b51a2f4b8e33adbf1876e4fb7e0102f | 2b361ee391a6f33c0b6bc139eb41fef67130a3e6 | refs/heads/master | 2020-03-23T15:03:09.160143 | 2017-07-24T02:32:58 | 2017-07-24T02:32:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 5,111 | java | package com.dryork.vision.base.util;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Validate {
/**
* 正则表达式:验证用户名
*/
public static final String REGEX_USERNAME = "^[a-zA-Z]\\w{5,17}$";
/**
* 正则表达式:验证密码
*/
public static final String REGEX_PASSWORD = "^[a-zA-Z0-9]{6,16}$";
/**
* 正则表达式:验证手机号
*/
public static final String REGEX_MOBILE = "^((13[0-9])|(15[^4,\\D])|(18[0,5-9]))\\d{8}$";
/**
* 正则表达式:验证邮箱
*/
public static final String REGEX_EMAIL = "^([a-z0-9A-Z]+[-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$";
/**
* 正则表达式:验证汉字
*/
public static final String REGEX_CHINESE = "^[\u4e00-\u9fa5],{0,}$";
/**
* 正则表达式:验证身份证
*/
public static final String REGEX_ID_CARD = "(^\\d{18}$)|(^\\d{15}$)";
/**
* 正则表达式:验证URL
*/
public static final String REGEX_URL = "http(s)?://([\\w-]+\\.)+[\\w-]+(/[\\w- ./?%&=]*)?";
/**
* 正则表达式:验证MAC地址
*/
public static final String REGEX_MAC="^[A-F0-9]{2}(-[A-F0-9]{2}){5}$";
/**
* 正则表达式:验证IP地址
*/
public static final String REGEX_IP_ADDR = "(25[0-5]|2[0-4]\\d|[0-1]\\d{2}|[1-9]?\\d)";
/**
* 正则表达式:验证日期格式 2016-12-12
*/
public static final String REGEX_DATE= "^((\\d{2}(([02468][048])|([13579][26]))[\\-\\/\\s]?((((0?[13578])|(1[02]))[\\-\\/\\s]?((0?[1-9])|([1-2][0-9])|(3[01])))|(((0?[469])|(11))[\\-\\/\\s]?((0?[1-9])|([1-2][0-9])|(30)))|(0?2[\\-\\/\\s]?((0?[1-9])|([1-2][0-9])))))|(\\d{2}(([02468][1235679])|([13579][01345789]))[\\-\\/\\s]?((((0?[13578])|(1[02]))[\\-\\/\\s]?((0?[1-9])|([1-2][0-9])|(3[01])))|(((0?[469])|(11))[\\-\\/\\s]?((0?[1-9])|([1-2][0-9])|(30)))|(0?2[\\-\\/\\s]?((0?[1-9])|(1[0-9])|(2[0-8]))))))";
/**
* 校验用户名
*
* @param username
* @return 校验通过返回true,否则返回false
*/
public static boolean isUsername(String username) {
return Pattern.matches(REGEX_USERNAME, username);
}
/**
* 校验密码
*
* @param password
* @return 校验通过返回true,否则返回false
*/
public static boolean isPassword(String password) {
return Pattern.matches(REGEX_PASSWORD, password);
}
/**
* 校验手机号
*
* @param mobile
* @return 校验通过返回true,否则返回false
*/
public static boolean isMobile(String mobile) {
return Pattern.matches(REGEX_MOBILE, mobile);
}
/**
* 校验邮箱
*
* @param email
* @return 校验通过返回true,否则返回false
*/
public static boolean isEmail(String email) {
return Pattern.matches(REGEX_EMAIL, email);
}
/**
* 校验汉字
*
* @param chinese
* @return 校验通过返回true,否则返回false
*/
public static boolean isChinese(String chinese) {
return Pattern.matches(REGEX_CHINESE, chinese);
}
/**
* 校验身份证
*
* @param idCard
* @return 校验通过返回true,否则返回false
*/
public static boolean isIDCard(String idCard) {
return Pattern.matches(REGEX_ID_CARD, idCard);
}
/**
* 校验URL
*
* @param url
* @return 校验通过返回true,否则返回false
*/
public static boolean isUrl(String url) {
return Pattern.matches(REGEX_URL, url);
}
/**
* 校验IP地址
*
* @param ipAddr
* @return
*/
public static boolean isIPAddr(String ipAddr) {
return Pattern.matches(REGEX_IP_ADDR, ipAddr);
}
public static void main(String[] args) {
String username = "fdsdfsdj";
System.out.println(Validate.isUsername(username));
System.out.println(Validate.isChinese(username));
}
/**
* 正则验证mac地址格式是否正确
* @param mac
* @return
* @date 2016/0707
*/
public static boolean getMac(String mac){
//正则校验MAC合法性
if(!Pattern.compile(REGEX_MAC).matcher(mac).find()){
//MAC地址格式或者大小写错误
return false;
}
return true;
}
/**
* 正则验证ip地址格式是否正确
* @param ip
* @return
*/
public static boolean getIp(String ip){
Pattern pattern = Pattern.compile( "^((\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5]|[*])\\.){3}(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5]|[*])$" );
if(!pattern.matcher(ip).find()){
//IP地址格式不正确"
return false;
}
return true;
}
/**
* 正则表达式验证DATE 格式是否正确
* @param date
* @return
*/
public static boolean getDate(String date){
Pattern pattern = Pattern.compile(REGEX_DATE);
Matcher m = pattern.matcher(date);
boolean b = m.matches();
if(!b){
return false;
}
return true;
}
}
| [
"guo_yuhao@foxmail.com"
] | guo_yuhao@foxmail.com |
a9738f932208673a71356820c97ff190a6a2b71c | 236aa54730c9c540a36b62457e2447e6098da3fe | /CloudyMail/lib_src/net/fortuna/ical4j/model/component/VAlarm.java | 3b910ec52021015566e6ac00c64d277d91db64bf | [
"Apache-2.0"
] | permissive | cocalele/GloryMail | f38ea753bac2ea464253a04bcc26ecc3e5c5ff7b | f5250bd53f0b45e4dd25adc3940902d20f5597c2 | refs/heads/master | 2020-08-06T21:06:16.125665 | 2017-10-11T15:26:56 | 2017-10-11T15:26:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 16,121 | java | /**
* Copyright (c) 2012, Ben Fortuna
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* o Neither the name of Ben Fortuna nor the names of any other contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.fortuna.ical4j.model.component;
import java.util.HashMap;
import java.util.Map;
import net.fortuna.ical4j.model.DateTime;
import net.fortuna.ical4j.model.Dur;
import net.fortuna.ical4j.model.Property;
import net.fortuna.ical4j.model.PropertyList;
import net.fortuna.ical4j.model.ValidationException;
import net.fortuna.ical4j.model.Validator;
import net.fortuna.ical4j.model.property.Action;
import net.fortuna.ical4j.model.property.Attach;
import net.fortuna.ical4j.model.property.Description;
import net.fortuna.ical4j.model.property.Duration;
import net.fortuna.ical4j.model.property.Method;
import net.fortuna.ical4j.model.property.Repeat;
import net.fortuna.ical4j.model.property.Summary;
import net.fortuna.ical4j.model.property.Trigger;
import net.fortuna.ical4j.util.PropertyValidator;
/**
* $Id$ [Apr 5, 2004]
*
* Defines an iCalendar VALARM component.
*
* <pre>
* 4.6.6 Alarm Component
*
* Component Name: VALARM
*
* Purpose: Provide a grouping of component properties that define an
* alarm.
*
* Formal Definition: A "VALARM" calendar component is defined by the
* following notation:
*
* alarmc = "BEGIN" ":" "VALARM" CRLF
* (audioprop / dispprop / emailprop / procprop)
* "END" ":" "VALARM" CRLF
*
* audioprop = 2*(
*
* ; 'action' and 'trigger' are both REQUIRED,
* ; but MUST NOT occur more than once
*
* action / trigger /
*
* ; 'duration' and 'repeat' are both optional,
* ; and MUST NOT occur more than once each,
* ; but if one occurs, so MUST the other
*
* duration / repeat /
*
* ; the following is optional,
* ; but MUST NOT occur more than once
*
* attach /
*
* ; the following is optional,
* ; and MAY occur more than once
*
* x-prop
*
* )
*
*
*
* dispprop = 3*(
*
* ; the following are all REQUIRED,
* ; but MUST NOT occur more than once
*
* action / description / trigger /
*
* ; 'duration' and 'repeat' are both optional,
* ; and MUST NOT occur more than once each,
* ; but if one occurs, so MUST the other
*
* duration / repeat /
*
* ; the following is optional,
* ; and MAY occur more than once
*
* *x-prop
*
* )
*
*
*
* emailprop = 5*(
*
* ; the following are all REQUIRED,
* ; but MUST NOT occur more than once
*
* action / description / trigger / summary
*
* ; the following is REQUIRED,
* ; and MAY occur more than once
*
* attendee /
*
* ; 'duration' and 'repeat' are both optional,
* ; and MUST NOT occur more than once each,
* ; but if one occurs, so MUST the other
*
* duration / repeat /
*
* ; the following are optional,
* ; and MAY occur more than once
*
* attach / x-prop
*
* )
*
*
*
* procprop = 3*(
*
* ; the following are all REQUIRED,
* ; but MUST NOT occur more than once
*
* action / attach / trigger /
*
* ; 'duration' and 'repeat' are both optional,
* ; and MUST NOT occur more than once each,
* ; but if one occurs, so MUST the other
*
* duration / repeat /
*
* ; 'description' is optional,
* ; and MUST NOT occur more than once
*
* description /
*
* ; the following is optional,
* ; and MAY occur more than once
*
* x-prop
*
* )
* </pre>
*
* Example 1 - Creating an alarm to trigger at a specific time:
*
* <pre><code>
* java.util.Calendar cal = java.util.Calendar.getInstance();
* cal.set(java.util.Calendar.MONTH, java.util.Calendar.DECEMBER);
* cal.set(java.util.Calendar.DAY_OF_MONTH, 25);
*
* VAlarm christmas = new VAlarm(cal.getTime());
* </code></pre>
*
* Example 2 - Creating an alarm to trigger one (1) hour before the scheduled start of the parent event/the parent todo
* is due:
*
* <pre><code>
* VAlarm reminder = new VAlarm(new Dur(0, -1, 0, 0));
*
* // repeat reminder four (4) more times every fifteen (15) minutes..
* reminder.getProperties().add(new Repeat(4));
* reminder.getProperties().add(new Duration(new Dur(0, 0, 15, 0)));
*
* // display a message..
* reminder.getProperties().add(Action.DISPLAY);
* reminder.getProperties().add(new Description("Progress Meeting at 9:30am"));
* </code></pre>
*
* @author Ben Fortuna
*/
public class VAlarm extends CalendarComponent {
private static final long serialVersionUID = -8193965477414653802L;
private final Map<Action, Validator> actionValidators = new HashMap<Action, Validator>();
{
actionValidators.put(Action.AUDIO, new AudioValidator());
actionValidators.put(Action.DISPLAY, new DisplayValidator());
actionValidators.put(Action.EMAIL, new EmailValidator());
actionValidators.put(Action.PROCEDURE, new ProcedureValidator());
}
private final Validator itipValidator = new ITIPValidator();
/**
* Default constructor.
*/
public VAlarm() {
super(VALARM);
}
/**
* Constructor.
* @param properties a list of properties
*/
public VAlarm(final PropertyList properties) {
super(VALARM, properties);
}
/**
* Constructs a new VALARM instance that will trigger at the specified time.
* @param trigger the time the alarm will trigger
*/
public VAlarm(final DateTime trigger) {
this();
getProperties().add(new Trigger(trigger));
}
/**
* Constructs a new VALARM instance that will trigger at the specified time relative to the event/todo component.
* @param trigger a duration of time relative to the parent component that the alarm will trigger at
*/
public VAlarm(final Dur trigger) {
this();
getProperties().add(new Trigger(trigger));
}
/**
* {@inheritDoc}
*/
public final void validate(final boolean recurse)
throws ValidationException {
/*
* ; 'action' and 'trigger' are both REQUIRED, ; but MUST NOT occur more than once action / trigger /
*/
PropertyValidator.getInstance().assertOne(Property.ACTION, getProperties());
PropertyValidator.getInstance().assertOne(Property.TRIGGER, getProperties());
/*
* ; 'duration' and 'repeat' are both optional, ; and MUST NOT occur more than once each, ; but if one occurs,
* so MUST the other duration / repeat /
*/
PropertyValidator.getInstance().assertOneOrLess(Property.DURATION, getProperties());
PropertyValidator.getInstance().assertOneOrLess(Property.REPEAT, getProperties());
try {
PropertyValidator.getInstance().assertNone(Property.DURATION, getProperties());
PropertyValidator.getInstance().assertNone(Property.REPEAT, getProperties());
}
catch (ValidationException ve) {
PropertyValidator.getInstance().assertOne(Property.DURATION, getProperties());
PropertyValidator.getInstance().assertOne(Property.REPEAT, getProperties());
}
/*
* ; the following is optional, ; and MAY occur more than once x-prop
*/
final Validator actionValidator = (Validator) actionValidators.get(getAction());
if (actionValidator != null) {
actionValidator.validate();
}
if (recurse) {
validateProperties();
}
}
/**
* {@inheritDoc}
*/
protected Validator getValidator(Method method) {
return itipValidator;
}
private class AudioValidator implements Validator {
private static final long serialVersionUID = 1L;
/**
* {@inheritDoc}
*/
public void validate() throws ValidationException {
/*
* ; the following is optional, ; but MUST NOT occur more than once attach /
*/
PropertyValidator.getInstance().assertOneOrLess(Property.ATTACH, getProperties());
}
}
private class DisplayValidator implements Validator {
private static final long serialVersionUID = 1L;
/**
* {@inheritDoc}
*/
public void validate() throws ValidationException {
/*
* ; the following are all REQUIRED, ; but MUST NOT occur more than once action / description / trigger /
*/
PropertyValidator.getInstance().assertOne(Property.DESCRIPTION, getProperties());
}
}
private class EmailValidator implements Validator {
private static final long serialVersionUID = 1L;
/**
* {@inheritDoc}
*/
public void validate() throws ValidationException {
/*
* ; the following are all REQUIRED,
* ; but MUST NOT occur more than once action / description / trigger / summary
* ; the following is REQUIRED,
* ; and MAY occur more than once attendee /
* ; 'duration' and 'repeat' are both optional,
* ; and MUST NOT occur more than once each,
* ; but if one occurs, so MUST the other duration / repeat /
* ; the following are optional,
* ; and MAY occur more than once attach / x-prop
*/
PropertyValidator.getInstance().assertOne(Property.DESCRIPTION, getProperties());
PropertyValidator.getInstance().assertOne(Property.SUMMARY, getProperties());
PropertyValidator.getInstance().assertOneOrMore(Property.ATTENDEE, getProperties());
}
}
private class ProcedureValidator implements Validator {
private static final long serialVersionUID = 1L;
/**
* {@inheritDoc}
*/
public void validate() throws ValidationException {
/*
* ; the following are all REQUIRED,
* ; but MUST NOT occur more than once action / attach / trigger /
* ; 'duration' and 'repeat' are both optional,
* ; and MUST NOT occur more than once each,
* ; but if one occurs, so MUST the other duration / repeat /
* ; 'description' is optional,
* ; and MUST NOT occur more than once description /
* ; the following is optional, ; and MAY occur more than once x-prop
*/
PropertyValidator.getInstance().assertOne(Property.ATTACH, getProperties());
PropertyValidator.getInstance().assertOneOrLess(Property.DESCRIPTION, getProperties());
}
}
/**
* Common validation for all iTIP methods.
*
* <pre>
* Component/Property Presence
* ------------------- ----------------------------------------------
* VALARM 0+
* ACTION 1
* ATTACH 0+
* DESCRIPTION 0 or 1
* DURATION 0 or 1 if present REPEAT MUST be present
* REPEAT 0 or 1 if present DURATION MUST be present
* SUMMARY 0 or 1
* TRIGGER 1
* X-PROPERTY 0+
* </pre>
*/
private class ITIPValidator implements Validator {
private static final long serialVersionUID = 1L;
/**
* {@inheritDoc}
*/
public void validate() throws ValidationException {
PropertyValidator.getInstance().assertOne(Property.ACTION, getProperties());
PropertyValidator.getInstance().assertOne(Property.TRIGGER, getProperties());
PropertyValidator.getInstance().assertOneOrLess(Property.DESCRIPTION, getProperties());
PropertyValidator.getInstance().assertOneOrLess(Property.DURATION, getProperties());
PropertyValidator.getInstance().assertOneOrLess(Property.REPEAT, getProperties());
PropertyValidator.getInstance().assertOneOrLess(Property.SUMMARY, getProperties());
}
}
/**
* Returns the mandatory action property.
* @return the ACTION property or null if not specified
*/
public final Action getAction() {
return (Action) getProperty(Property.ACTION);
}
/**
* Returns the mandatory trigger property.
* @return the TRIGGER property or null if not specified
*/
public final Trigger getTrigger() {
return (Trigger) getProperty(Property.TRIGGER);
}
/**
* Returns the optional duration property.
* @return the DURATION property or null if not specified
*/
public final Duration getDuration() {
return (Duration) getProperty(Property.DURATION);
}
/**
* Returns the optional repeat property.
* @return the REPEAT property or null if not specified
*/
public final Repeat getRepeat() {
return (Repeat) getProperty(Property.REPEAT);
}
/**
* Returns the optional attachment property.
* @return the ATTACH property or null if not specified
*/
public final Attach getAttachment() {
return (Attach) getProperty(Property.ATTACH);
}
/**
* Returns the optional description property.
* @return the DESCRIPTION property or null if not specified
*/
public final Description getDescription() {
return (Description) getProperty(Property.DESCRIPTION);
}
/**
* Returns the optional summary property.
* @return the SUMMARY property or null if not specified
*/
public final Summary getSummary() {
return (Summary) getProperty(Property.SUMMARY);
}
}
| [
"support@glorycloud.com.cn"
] | support@glorycloud.com.cn |
3c7e2a0a14f5d1a0dfb241a0c519f0bb47a394f6 | e755cfe2a740da115834e46f7e54f49f759d4cb0 | /src/engineTester/Score.java | 80f5c68f12392f95d9dbce5d7f3c364b453af747 | [
"MIT"
] | permissive | harryi3t/Save-The-Ball | d961dd3bc82df3c629f3447bccc5d420ae7f94b2 | ee726c01ac79203e2c78e67f12e207d7fa7f5a4a | refs/heads/master | 2020-05-18T08:14:25.232292 | 2016-12-15T06:53:33 | 2016-12-15T06:53:33 | 32,817,364 | 2 | 1 | null | null | null | null | UTF-8 | Java | false | false | 6,130 | java | package engineTester;
import java.awt.Font;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.GL11;
import org.newdawn.slick.Color;
import org.newdawn.slick.TrueTypeFont;
import org.newdawn.slick.opengl.Texture;
import org.newdawn.slick.opengl.TextureLoader;
import org.newdawn.slick.util.ResourceLoader;
public class Score {
private Texture texture1;
private Texture texture2;
/** The fonts to draw to the screen */
private TrueTypeFont font;
private TrueTypeFont font2;
private int width;
private int height;
private float currentScore = 0;
private int currentSessionScore;
private int highScore;
/** Boolean flag on whether AntiAliasing is enabled or not */
private boolean antiAlias = true;
/**
* Initialise resources
*/
public Score() {
readHighScore();
//load a default java font
Font awtFont = new Font("Times New Roman", Font.BOLD, 24);
font = new TrueTypeFont(awtFont, antiAlias);
// load font from file
try {
InputStream inputStream = ResourceLoader.getResourceAsStream("res/fonts/brick.ttf");
Font awtFont2 = Font.createFont(Font.TRUETYPE_FONT, inputStream);
awtFont2 = awtFont2.deriveFont(50f); // set font size
font2 = new TrueTypeFont(awtFont2, antiAlias);
} catch (Exception e) {
e.printStackTrace();
}
}
public void prepare(){
GL11.glDisable(GL11.GL_TEXTURE_2D);
GL11.glShadeModel(GL11.GL_SMOOTH);
GL11.glDisable(GL11.GL_DEPTH_TEST);
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glClearDepth(1);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
GL11.glViewport(0,0,width,height);
GL11.glMatrixMode(GL11.GL_MODELVIEW);
GL11.glMatrixMode(GL11.GL_PROJECTION);
GL11.glLoadIdentity();
GL11.glOrtho(0, width, height, 0, 1, -1);
GL11.glMatrixMode(GL11.GL_MODELVIEW);
}
public void updateScore(float ballSpeed,int width, int height) {
this.height = height;
this.width = width;
if(currentSessionScore==0) // the ball has not yet fallen
currentScore += ballSpeed/10;
prepare();
font2.drawString(0, 0, "", Color.yellow);
font.drawString(width-200, 20, "Score "+(int)currentScore, Color.green);
}
public void showScore(){
prepare();
int x = width/2-300;
int y = height/2-100;
GL11.glColor3f(1, 1, 1);
GL11.glRectf(x-50, y-150, x+640, y+200);
font2.drawString(x, y, "Your Score = "+(int)currentScore, Color.red);
font2.drawString(x, y + 100, "High Score = "+highScore, Color.black);
font2.drawString(x+100,y-100, "GAME OVER", Color.magenta);
font.drawString(x+100,y+170, "Please <Return> to Restart . . .", Color.black);
}
public void storeSessionScore() {
currentSessionScore = (int) currentScore;
if(currentSessionScore > highScore){
writeHighScore(currentSessionScore);
highScore = currentSessionScore;
}
}
public void setCurrentSessionScore(int currentSessionScore) {
this.currentSessionScore = currentSessionScore;
}
public void setCurrentScore(float currentScore) {
this.currentScore = currentScore;
}
public void readHighScore(){
try {
FileReader reader = new FileReader("res/score/highScore.txt");
char[] score = new char[5];
char r;
StringBuilder builder = new StringBuilder();
while(Character.isDigit(r=(char) reader.read())){
builder.append(r);
}
highScore = Integer.parseInt(builder.toString());
} catch (IOException | NumberFormatException e) {
// No high score
highScore = 0;
System.out.println("No high score found");
}
}
public void writeHighScore(int score){
try {
FileWriter writer = new FileWriter("res/score/highScore.txt", false);
writer.write(String.valueOf(score));
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public void loadPlayImage(String image1, String image2) {
try {
texture1 = TextureLoader.getTexture("PNG", new FileInputStream(image1));
texture2 = TextureLoader.getTexture("PNG", new FileInputStream(image2));
} catch (IOException e) {
e.printStackTrace();
}
}
public void renderPlayButton(boolean isPlayButtonHovered){
int w = Display.getWidth(), h = Display.getHeight();
int mw = Display.getDesktopDisplayMode().getWidth(); // max width
int mh = Display.getDesktopDisplayMode().getHeight(); // max height
int size = (int) (200*(h*w/(float)(mw*mh)));
int x=w/2 - size/2, y=h/2 + size/2;
GL11.glEnable(GL11.GL_TEXTURE_2D);
GL11.glShadeModel(GL11.GL_SMOOTH);
GL11.glDisable(GL11.GL_DEPTH_TEST);
GL11.glDisable(GL11.GL_LIGHTING);
GL11.glClearDepth(1);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
GL11.glViewport(0,0,width,height);
GL11.glMatrixMode(GL11.GL_MODELVIEW);
GL11.glMatrixMode(GL11.GL_PROJECTION);
GL11.glLoadIdentity();
GL11.glOrtho(0, width, height, 0, 1, -1);
GL11.glMatrixMode(GL11.GL_MODELVIEW);
if(isPlayButtonHovered){
texture2.bind();
}
else{
GL11.glColor3f(1, 1, 1);
texture1.bind();
}
GL11.glBegin(GL11.GL_QUADS);
GL11.glTexCoord2f(0, 0);
GL11.glVertex2i(x, y);
GL11.glTexCoord2f(0, 1);
GL11.glVertex2i(x, y+size);
GL11.glTexCoord2f(1, 1);
GL11.glVertex2i(x+size, y+size);
GL11.glTexCoord2f(1, 0);
GL11.glVertex2i(x+size, y);
GL11.glEnd();
}
} | [
"2011195@iiitdmj.ac.in"
] | 2011195@iiitdmj.ac.in |
2ff14469645069533eccea5b59bf03f56f212aa6 | de622e401c2ff3d540ff4e822140cab986204bad | /app/src/main/java/com/cdkj/hydz/module/model/OrderBodyModel.java | c92c39d8a6f459699eb203db2ccf5ebc3cd7de77 | [] | no_license | ibisTime/xn-dzt-bfront-android | e6d41d0c36c64b17c7cdd0539bdaa9975968afc5 | 8c2ebd49cf87956a293252162106b63910c8f209 | refs/heads/master | 2021-01-19T14:59:34.125763 | 2017-11-30T02:05:49 | 2017-11-30T02:05:49 | 100,936,020 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,145 | java | package com.cdkj.hydz.module.model;
/**
* Created by lei on 2017/8/23.
*/
public class OrderBodyModel {
private boolean canSelect;
private String key = "";
private String name = "";
private String value = "";
private String valueKey = "";
private String remark = "";
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public String getValueKey() {
return valueKey;
}
public void setValueKey(String valueKey) {
this.valueKey = valueKey;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public boolean isCanSelect() {
return canSelect;
}
public void setCanSelect(boolean canSelect) {
this.canSelect = canSelect;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
| [
"lei@leideMacBook-Pro.local"
] | lei@leideMacBook-Pro.local |
86eac1928c57213118f198a3a691c1c450e4b963 | 24b4b8ae8f1318e04239c40650531d73c04929d0 | /agile.java/code/02.base/test/test/EnumTest.java | 5515410481e511db2d88d4d85af9bb1202682ae4 | [] | no_license | houziershi/study | 11c4a855013cab1e868165beb4eeea89ca59b46a | 2c6dcdf392abb58d37635a4641a98738554f93b9 | refs/heads/master | 2020-12-25T22:29:11.165117 | 2014-02-15T03:46:18 | 2014-02-15T03:46:18 | 16,856,860 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 513 | java | package test;
import static org.junit.Assert.assertEquals;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import stringtools.Gender;
import stringtools.Color;
public class EnumTest {
@Test
public void testGender() {
Gender g = Gender.male;
assertEquals("male", g.toString());
assertEquals(g, Gender.valueOf("male"));
}
@Test
public void testColor() {
Color c = Color.RED;
assertEquals("RED(255,0,0)", c.toString());
assertEquals(Color.RED, c.valueOf("RED"));
}
}
| [
"evokeralucard@gmail.com"
] | evokeralucard@gmail.com |
95de99cf23cb3032b6dc99fd77431d1545216adc | 5a9c3f3305428c5669c3d536f30780472ba09968 | /src/main/java/service/AddToCart.java | f9474e3fa2ffb936c88160ba71f9873274714ffd | [] | no_license | yongju6/WebMarket-Cor | a1cd35009d05c0782cd29b30d8fd0ce15804e7b4 | 8c7e909b652e92b757ede4fdf20270101a841f3e | refs/heads/master | 2023-04-22T22:41:39.823151 | 2021-05-12T08:05:36 | 2021-05-12T08:05:36 | 360,821,139 | 0 | 0 | null | null | null | null | UHC | Java | false | false | 2,139 | java | package service;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import DTO.Product;
/**
* Servlet implementation class AddToCart
*/
@WebServlet("/cart/add")
public class AddToCart extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public AddToCart() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// 사용자가 장바구니에 추가할 상품 번호를 전달하면
// 해당 번호를 저장한다.
// 사용자의 장바구니 정보를 갖고 있는 세션을 가져온다.
HttpSession session = request.getSession();
// // 세션이 생성된 시간을 반환
// System.out.println(session.getCreationTime());
// // 세션의 ID를 반환
// System.out.println(session.getId());
// // 세션의 생성 여부(처음 생성된 세션이면 true, 이전에 생성된 세션이면 false 반환)
// System.out.println(session.isNew());
// 세션에서 장바구니(goodsList)를 꺼낸다.
ArrayList<String> goodsList = (ArrayList<String>) session.getAttribute("goodsList");
if(goodsList == null) {
goodsList = new ArrayList<String>();
}
String productId = request.getParameter("productId");
goodsList.add(productId);
session.setAttribute("goodsList", goodsList);
response.setStatus(204);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
doGet(request, response);
}
}
| [
"jyongju928@gmail.com"
] | jyongju928@gmail.com |
24f2849bf51a32c7cbdc2f173d9762ceed1d8e60 | e207c6bae632964c4eb76690275ab195c60f789a | /spring-kafka-producer/src/test/java/com/saber/spring/kafka/producer/SpringKafkaProducerTestApplicationTests.java | a0e01b9b62de29a9319b69fd419d7152f2869825 | [] | no_license | saberlee1987/spring-kafka-test | 2a01c9fdf16b7bd1ea9b8c704d54ddb64a7191f6 | 74c0a5266e533cb3b4b9cb68707c0f12172395ff | refs/heads/master | 2023-04-05T17:26:30.982359 | 2021-05-07T22:28:35 | 2021-05-07T22:28:35 | 365,320,587 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 249 | java | package com.saber.spring.kafka.producer;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class SpringKafkaProducerTestApplicationTests {
@Test
void contextLoads() {
}
}
| [
"saberazizi66@yahoo.com"
] | saberazizi66@yahoo.com |
da34fc3ff314967cefbd363c3263d662b7f8ebfe | 34e0d0aac782a234ca9a56b349a8f86006ea16e4 | /src/main/java/ro/ubb/olympics/ui/command/impl/ManageParticipations.java | 303750c39cf7acfdc5005621566ef869e6763569 | [] | no_license | fazecasdavid/OlympicsManager | e58cd2879425905213ceecfbd7c0e3a46c663cca | 4442b947a0a9a5d74042a5c214b1e7f7052e5392 | refs/heads/main | 2023-03-17T19:05:58.517934 | 2021-03-16T12:14:36 | 2021-03-16T12:14:36 | 348,334,762 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,487 | java | package ro.ubb.olympics.ui.command.impl;
import ro.ubb.olympics.controller.Controller;
import ro.ubb.olympics.ui.command.Command;
import ro.ubb.olympics.ui.command.impl.participation.AddParticipation;
import ro.ubb.olympics.ui.command.impl.participation.DeleteParticipation;
import ro.ubb.olympics.ui.command.impl.participation.FindParticipationById;
import ro.ubb.olympics.ui.command.impl.participation.ManageParticipationFilters;
import ro.ubb.olympics.ui.command.impl.participation.SeeAllParticipations;
import ro.ubb.olympics.ui.command.impl.participation.UpdateParticipation;
import ro.ubb.olympics.utils.ConsoleUtils;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
/**
* Submenu that manages participations.
*/
public class ManageParticipations extends Command {
private final Controller controller;
private final Scanner scanner;
private final Map<String, Command> commands;
/**
* Initializes the command.
*
* @param key the key of the command
* @param description the command's description
* @param controller the controller used in the operations
* @param scanner the scanner which provides user input
*/
public ManageParticipations(final String key, final String description, final Controller controller, final Scanner scanner) {
super(key, description);
this.controller = controller;
this.scanner = scanner;
this.commands = new HashMap<>();
this.initializeCommands();
}
/**
* Initializes the commands available to the user.
*/
private void initializeCommands() {
ConsoleUtils.addCommand(new AddParticipation("1", "Add a new participation.", controller, scanner), commands);
ConsoleUtils.addCommand(new DeleteParticipation("2", "Delete a participation.", controller, scanner), commands);
ConsoleUtils.addCommand(new UpdateParticipation("3", "Update a participation.", controller, scanner), commands);
ConsoleUtils.addCommand(new FindParticipationById("4", "Find a participation by its ID.", controller, scanner), commands);
ConsoleUtils.addCommand(new SeeAllParticipations("5", "See all participations.", controller, scanner), commands);
ConsoleUtils.addCommand(new ManageParticipationFilters("6", "Filter participations.", controller, scanner), commands);
}
@Override
public void execute() {
ConsoleUtils.runMenu(scanner, commands);
}
} | [
"fazecasdavid@gmail.com"
] | fazecasdavid@gmail.com |
b3b90e4a34b9e7737e0186bb53245566a8587e56 | 84300cbcad1c21543bbdce68dc7d13fe2913297f | /de.squidward-gui/src/main/java/de/picman/gui/components/PictureTransferHandler.java | ac2f61929cdfb90d9114b7c0b8733d6bebac1889 | [] | no_license | Jerady/squidward | b414d3014d478e73aea150a54fd5a1fb30ce94da | 3213a3dc299d2c7c2bba53432e54a0f7987c5140 | refs/heads/master | 2021-01-02T09:19:52.367813 | 2012-01-18T22:03:42 | 2012-01-18T22:03:42 | 2,156,089 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,273 | java | package de.picman.gui.components;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.io.IOException;
import javax.swing.JComponent;
import javax.swing.JList;
import javax.swing.TransferHandler;
import de.picman.backend.db.Picture;
public class PictureTransferHandler extends TransferHandler {
private static final long serialVersionUID = 1615536956826455661L;
private DataFlavor pictureFlavor;
private String pictureFlavorType = DataFlavor.javaJVMLocalObjectMimeType+";class=de.picman.backend.db.Picture";
private JList source;
public PictureTransferHandler() {
try {
pictureFlavor = new DataFlavor(pictureFlavorType);
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
private boolean hasPictureFlavor(DataFlavor[] flavors) {
if (pictureFlavor == null) {
return false;
}
for (int i = 0; i < flavors.length; i++) {
if (flavors[i].equals(pictureFlavor)) {
return true;
}
}
return false;
}
@Override
public boolean canImport(JComponent comp, DataFlavor[] transferFlavors) {
if(hasPictureFlavor(transferFlavors)){
return true;
}
return false;
}
@Override
public boolean canImport(TransferSupport support) {
if(hasPictureFlavor(support.getDataFlavors())){
return true;
}
return false;
}
@Override
public boolean importData(JComponent comp, Transferable t) {
if(!canImport(comp, t.getTransferDataFlavors())){
return false;
}
Picture pic = null;
try {
if(hasPictureFlavor(t.getTransferDataFlavors())){
pic = (Picture) t.getTransferData(pictureFlavor);
}
else{
return false;
}
} catch (UnsupportedFlavorException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
PictureClipboard.getInstance().addPicture(pic);
return true;
}
@Override
protected Transferable createTransferable(JComponent c) {
if(c instanceof JList){
source = (JList)c;
Object value = source.getSelectedValue();
if( value == null || !(value instanceof Picture) ){
return null;
}
Picture pic = (Picture)value;
return new PictureTransferable(pic);
}
return super.createTransferable(c);
}
@Override
public int getSourceActions(JComponent c) {
return COPY_OR_MOVE;
}
public class PictureTransferable implements Transferable{
private Picture picture;
public PictureTransferable(Picture picture) {
this.picture = picture;
}
@Override
public boolean isDataFlavorSupported(DataFlavor flavor) {
if(pictureFlavor.equals(flavor)){
return true;
}
return false;
}
@Override
public DataFlavor[] getTransferDataFlavors() {
return new DataFlavor[] {
pictureFlavor
};
}
@Override
public Object getTransferData(DataFlavor flavor)
throws UnsupportedFlavorException, IOException {
if(!isDataFlavorSupported(flavor)){
throw new UnsupportedFlavorException(flavor);
}
return picture;
}
}
}
| [
"mail@jensd.de"
] | mail@jensd.de |
9faefa42d731afb169833e2b6816008bc7d19b83 | 8dc84558f0058d90dfc4955e905dab1b22d12c08 | /third_party/android_tools/sdk/sources/android-25/com/android/internal/app/procstats/PssTable.java | b6df983d2ac8c0067aa4dec2ca9d849df93f97d1 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0"
] | permissive | meniossin/src | 42a95cc6c4a9c71d43d62bc4311224ca1fd61e03 | 44f73f7e76119e5ab415d4593ac66485e65d700a | refs/heads/master | 2022-12-16T20:17:03.747113 | 2020-09-03T10:43:12 | 2020-09-03T10:43:12 | 263,710,168 | 1 | 0 | BSD-3-Clause | 2020-05-13T18:20:09 | 2020-05-13T18:20:08 | null | UTF-8 | Java | false | false | 4,224 | java | /*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.internal.app.procstats;
import static com.android.internal.app.procstats.ProcessStats.PSS_SAMPLE_COUNT;
import static com.android.internal.app.procstats.ProcessStats.PSS_MINIMUM;
import static com.android.internal.app.procstats.ProcessStats.PSS_AVERAGE;
import static com.android.internal.app.procstats.ProcessStats.PSS_MAXIMUM;
import static com.android.internal.app.procstats.ProcessStats.PSS_USS_MINIMUM;
import static com.android.internal.app.procstats.ProcessStats.PSS_USS_AVERAGE;
import static com.android.internal.app.procstats.ProcessStats.PSS_USS_MAXIMUM;
import static com.android.internal.app.procstats.ProcessStats.PSS_COUNT;
/**
* Class to accumulate PSS data.
*/
public class PssTable extends SparseMappingTable.Table {
/**
* Construct the PssTable with 'tableData' as backing store
* for the longs data.
*/
public PssTable(SparseMappingTable tableData) {
super(tableData);
}
/**
* Merge the the values from the other table into this one.
*/
public void mergeStats(PssTable that) {
final int N = that.getKeyCount();
for (int i=0; i<N; i++) {
final int key = that.getKeyAt(i);
final int state = SparseMappingTable.getIdFromKey(key);
mergeStats(state, (int)that.getValue(key, PSS_SAMPLE_COUNT),
that.getValue(key, PSS_MINIMUM),
that.getValue(key, PSS_AVERAGE),
that.getValue(key, PSS_MAXIMUM),
that.getValue(key, PSS_USS_MINIMUM),
that.getValue(key, PSS_USS_AVERAGE),
that.getValue(key, PSS_USS_MAXIMUM));
}
}
/**
* Merge the supplied PSS data in. The new min pss will be the minimum of the existing
* one and the new one, the average will now incorporate the new average, etc.
*/
public void mergeStats(int state, int inCount, long minPss, long avgPss, long maxPss,
long minUss, long avgUss, long maxUss) {
final int key = getOrAddKey((byte)state, PSS_COUNT);
final long count = getValue(key, PSS_SAMPLE_COUNT);
if (count == 0) {
setValue(key, PSS_SAMPLE_COUNT, inCount);
setValue(key, PSS_MINIMUM, minPss);
setValue(key, PSS_AVERAGE, avgPss);
setValue(key, PSS_MAXIMUM, maxPss);
setValue(key, PSS_USS_MINIMUM, minUss);
setValue(key, PSS_USS_AVERAGE, avgUss);
setValue(key, PSS_USS_MAXIMUM, maxUss);
} else {
setValue(key, PSS_SAMPLE_COUNT, count + inCount);
long val;
val = getValue(key, PSS_MINIMUM);
if (val > minPss) {
setValue(key, PSS_MINIMUM, minPss);
}
val = getValue(key, PSS_AVERAGE);
setValue(key, PSS_AVERAGE,
(long)(((val*(double)count)+(avgPss*(double)inCount)) / (count+inCount)));
val = getValue(key, PSS_MAXIMUM);
if (val < maxPss) {
setValue(key, PSS_MAXIMUM, maxPss);
}
val = getValue(key, PSS_USS_MINIMUM);
if (val > minUss) {
setValue(key, PSS_USS_MINIMUM, minUss);
}
val = getValue(key, PSS_USS_AVERAGE);
setValue(key, PSS_AVERAGE,
(long)(((val*(double)count)+(avgUss*(double)inCount)) / (count+inCount)));
val = getValue(key, PSS_USS_MAXIMUM);
if (val < maxUss) {
setValue(key, PSS_USS_MAXIMUM, maxUss);
}
}
}
}
| [
"arnaud@geometry.ee"
] | arnaud@geometry.ee |
a7d492aa199d92cebc24369323caa890031e3fbe | 4a589f392f9c021e1b7e7541df342f9e3c05a170 | /src/main/java/com/evertix/masterregister/controller/commons/MessageResponse.java | b9cc2b93b9a7cefc4819f9b5fc720b7eb42ce819 | [] | no_license | MSAlbert40/master-register-api | 6ba061796f0117f307ae1cb731caf5afaf7f4a9b | 5053b766fecf134740fa8ac06a2c87bc060e6761 | refs/heads/master | 2023-08-29T14:58:17.068728 | 2021-11-08T19:30:16 | 2021-11-08T19:30:16 | 408,036,267 | 0 | 0 | null | 2021-09-22T23:25:36 | 2021-09-19T05:03:33 | Java | UTF-8 | Java | false | false | 438 | java | package com.evertix.masterregister.controller.commons;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import lombok.Builder;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
@Data
@Builder
@JsonPropertyOrder
public class MessageResponse implements Serializable {
private Integer code;
private String message;
private Object data;
@Builder.Default
private Date date = new Date();
} | [
"54085998+MSAlbert40@users.noreply.github.com"
] | 54085998+MSAlbert40@users.noreply.github.com |
49f010d25c4d15609b608cc40bcf5d82d1a1988b | 3f1c66ba4e86ff25ddacf8fe741e4b087c65e221 | /singleton/src/test/java/com/iluwatar/singleton/ignore/StaticClassTest.java | 6ba661f2d2df9623cdbe7efefd7694262645ce53 | [
"MIT"
] | permissive | moxingwang/java-design-patterns | 8d189379b9906126c235395d6d83598c008eae10 | 47b7bef783b7ddc6d65449dd2cacae08d9592648 | refs/heads/master | 2020-03-23T09:16:20.651857 | 2018-07-19T08:21:05 | 2018-07-19T08:21:05 | 141,377,581 | 0 | 0 | MIT | 2018-07-18T03:44:01 | 2018-07-18T03:44:00 | null | UTF-8 | Java | false | false | 649 | java | package com.iluwatar.singleton.ignore;
/**
* @description:
* @author: MoXingwang 2018-07-18 14:45
**/
public class StaticClassTest {
static {
System.out.println("outer static block");
}
public static void main(String[] args) {
System.out.println("outer");
//验证内部类的加载顺序和机制
StaticClassTest.Inner.getInner();
}
public static final class Inner {
public static Inner getInner() {
System.out.println("init a inner");
return new Inner();
}
static {
System.out.println("inner static block");
}
}
}
| [
"xingwang.mo@chinaredstar.com"
] | xingwang.mo@chinaredstar.com |
ea4a681a27be9f29e9253228c544fe13306ab54e | e977c424543422f49a25695665eb85bfc0700784 | /benchmark/icse15/1140355/buggy-version/db/derby/code/branches/10.7/java/engine/org/apache/derby/impl/sql/catalog/DataDictionaryImpl.java | 1608b0144d687eeb30fc3d74315e94bab6cfd880 | [] | no_license | amir9979/pattern-detector-experiment | 17fcb8934cef379fb96002450d11fac62e002dd3 | db67691e536e1550245e76d7d1c8dced181df496 | refs/heads/master | 2022-02-18T10:24:32.235975 | 2019-09-13T15:42:55 | 2019-09-13T15:42:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 448,686 | java | /*
Derby - Class org.apache.derby.impl.sql.catalog.DataDictionaryImpl
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.catalog;
import org.apache.derby.iapi.reference.Attribute;
import org.apache.derby.iapi.reference.EngineType;
import org.apache.derby.iapi.reference.JDBC30Translation;
import org.apache.derby.iapi.reference.Property;
import org.apache.derby.iapi.reference.SQLState;
import org.apache.derby.iapi.reference.Limits;
import org.apache.derby.iapi.sql.conn.Authorizer;
import org.apache.derby.iapi.sql.dictionary.AliasDescriptor;
import org.apache.derby.iapi.sql.dictionary.CatalogRowFactory;
import org.apache.derby.iapi.sql.dictionary.ColumnDescriptor;
import org.apache.derby.iapi.sql.dictionary.ColumnDescriptorList;
import org.apache.derby.iapi.sql.dictionary.FileInfoDescriptor;
import org.apache.derby.iapi.sql.dictionary.ConglomerateDescriptor;
import org.apache.derby.iapi.sql.dictionary.ConstraintDescriptor;
import org.apache.derby.iapi.sql.dictionary.ConglomerateDescriptorList;
import org.apache.derby.iapi.sql.dictionary.ConstraintDescriptorList;
import org.apache.derby.iapi.sql.dictionary.DataDescriptorGenerator;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.sql.dictionary.DefaultDescriptor;
import org.apache.derby.iapi.sql.dictionary.DependencyDescriptor;
import org.apache.derby.iapi.sql.dictionary.ForeignKeyConstraintDescriptor;
import org.apache.derby.iapi.sql.dictionary.GenericDescriptorList;
import org.apache.derby.iapi.sql.dictionary.TupleDescriptor;
import org.apache.derby.iapi.sql.dictionary.IndexRowGenerator;
import org.apache.derby.iapi.sql.dictionary.KeyConstraintDescriptor;
import org.apache.derby.iapi.sql.dictionary.TablePermsDescriptor;
import org.apache.derby.iapi.sql.dictionary.ColPermsDescriptor;
import org.apache.derby.iapi.sql.dictionary.RoutinePermsDescriptor;
import org.apache.derby.iapi.sql.dictionary.PermissionsDescriptor;
import org.apache.derby.iapi.sql.dictionary.ReferencedKeyConstraintDescriptor;
import org.apache.derby.iapi.sql.dictionary.RoleGrantDescriptor;
import org.apache.derby.iapi.sql.dictionary.RoleClosureIterator;
import org.apache.derby.iapi.sql.dictionary.SPSDescriptor;
import org.apache.derby.iapi.sql.dictionary.SchemaDescriptor;
import org.apache.derby.iapi.sql.dictionary.CheckConstraintDescriptor;
import org.apache.derby.iapi.sql.dictionary.SubCheckConstraintDescriptor;
import org.apache.derby.iapi.sql.dictionary.SubConstraintDescriptor;
import org.apache.derby.iapi.sql.dictionary.SubKeyConstraintDescriptor;
import org.apache.derby.iapi.sql.dictionary.TableDescriptor;
import org.apache.derby.iapi.sql.dictionary.TriggerDescriptor;
import org.apache.derby.iapi.sql.dictionary.ViewDescriptor;
import org.apache.derby.iapi.sql.dictionary.SystemColumn;
import org.apache.derby.iapi.sql.dictionary.SequenceDescriptor;
import org.apache.derby.iapi.sql.dictionary.PermDescriptor;
import org.apache.derby.iapi.sql.depend.DependencyManager;
import org.apache.derby.impl.sql.compile.CollectNodesVisitor;
import org.apache.derby.impl.sql.compile.ColumnReference;
import org.apache.derby.impl.sql.compile.FromBaseTable;
import org.apache.derby.impl.sql.compile.QueryTreeNode;
import org.apache.derby.impl.sql.compile.StatementNode;
import org.apache.derby.impl.sql.compile.TableName;
import org.apache.derby.impl.sql.depend.BasicDependencyManager;
import org.apache.derby.iapi.sql.execute.ExecIndexRow;
import org.apache.derby.iapi.sql.execute.ExecutionContext;
import org.apache.derby.iapi.sql.execute.ExecutionFactory;
import org.apache.derby.iapi.sql.execute.ScanQualifier;
import org.apache.derby.iapi.types.DataValueFactory;
import org.apache.derby.iapi.types.NumberDataValue;
import org.apache.derby.iapi.types.SQLBoolean;
import org.apache.derby.iapi.types.SQLChar;
import org.apache.derby.iapi.types.SQLLongint;
import org.apache.derby.iapi.types.SQLVarchar;
import org.apache.derby.iapi.types.StringDataValue;
import org.apache.derby.iapi.types.TypeId;
import org.apache.derby.iapi.types.UserType;
import org.apache.derby.iapi.types.DataTypeDescriptor;
import org.apache.derby.iapi.types.DataValueDescriptor;
import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;
import org.apache.derby.iapi.sql.conn.LanguageConnectionFactory;
import org.apache.derby.iapi.store.access.AccessFactory;
import org.apache.derby.iapi.store.access.ConglomerateController;
import org.apache.derby.iapi.types.Orderable;
import org.apache.derby.iapi.types.RowLocation;
import org.apache.derby.iapi.store.access.RowUtil;
import org.apache.derby.iapi.store.access.ScanController;
import org.apache.derby.iapi.store.access.TransactionController;
import org.apache.derby.iapi.store.access.Qualifier;
import org.apache.derby.iapi.services.monitor.Monitor;
import org.apache.derby.iapi.services.monitor.ModuleControl;
import org.apache.derby.iapi.services.monitor.ModuleSupportable;
import org.apache.derby.iapi.services.context.ContextManager;
import org.apache.derby.iapi.services.context.ContextService;
import org.apache.derby.iapi.error.StandardException;
// RESOLVE - paulat - remove this import when track 3677 is fixed
import org.apache.derby.iapi.services.sanity.AssertFailure;
import org.apache.derby.iapi.sql.execute.ExecRow;
import org.apache.derby.iapi.sql.execute.TupleFilter;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.iapi.services.cache.CacheFactory;
import org.apache.derby.iapi.services.cache.CacheManager;
import org.apache.derby.iapi.services.cache.Cacheable;
import org.apache.derby.iapi.services.cache.CacheableFactory;
import org.apache.derby.iapi.services.locks.LockFactory;
import org.apache.derby.iapi.services.locks.C_LockFactory;
import org.apache.derby.iapi.services.property.PropertyUtil;
import org.apache.derby.impl.services.locks.Timeout;
import org.apache.derby.iapi.services.uuid.UUIDFactory;
import org.apache.derby.catalog.AliasInfo;
import org.apache.derby.catalog.DefaultInfo;
import org.apache.derby.catalog.TypeDescriptor;
import org.apache.derby.catalog.UUID;
import org.apache.derby.catalog.types.BaseTypeIdImpl;
import org.apache.derby.catalog.types.RoutineAliasInfo;
import org.apache.derby.iapi.services.io.FormatableBitSet;
import org.apache.derby.iapi.services.locks.CompatibilitySpace;
import org.apache.derby.iapi.services.locks.ShExLockable;
import org.apache.derby.iapi.services.locks.ShExQual;
import org.apache.derby.iapi.util.IdUtil;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Hashtable;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Properties;
import java.util.Vector;
import java.util.List;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Enumeration;
import java.io.InputStream;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.sql.Types;
// LOBStoredProcedure is imported only to get hold of a constant.
import org.apache.derby.impl.jdbc.LOBStoredProcedure;
/**
* Standard database implementation of the data dictionary
* that stores the information in the system catlogs.
*/
public final class DataDictionaryImpl
implements DataDictionary, CacheableFactory, ModuleControl, ModuleSupportable,java.security.PrivilegedAction
{
private static final String CFG_SYSTABLES_ID = "SystablesIdentifier";
private static final String CFG_SYSTABLES_INDEX1_ID = "SystablesIndex1Identifier";
private static final String CFG_SYSTABLES_INDEX2_ID = "SystablesIndex2Identifier";
private static final String CFG_SYSCOLUMNS_ID = "SyscolumnsIdentifier";
private static final String CFG_SYSCOLUMNS_INDEX1_ID = "SyscolumnsIndex1Identifier";
private static final String CFG_SYSCOLUMNS_INDEX2_ID = "SyscolumnsIndex2Identifier";
private static final String CFG_SYSCONGLOMERATES_ID = "SysconglomeratesIdentifier";
private static final String CFG_SYSCONGLOMERATES_INDEX1_ID = "SysconglomeratesIndex1Identifier";
private static final String CFG_SYSCONGLOMERATES_INDEX2_ID = "SysconglomeratesIndex2Identifier";
private static final String CFG_SYSCONGLOMERATES_INDEX3_ID = "SysconglomeratesIndex3Identifier";
private static final String CFG_SYSSCHEMAS_ID = "SysschemasIdentifier";
private static final String CFG_SYSSCHEMAS_INDEX1_ID = "SysschemasIndex1Identifier";
private static final String CFG_SYSSCHEMAS_INDEX2_ID = "SysschemasIndex2Identifier";
private static final int SYSCONGLOMERATES_CORE_NUM = 0;
private static final int SYSTABLES_CORE_NUM = 1;
private static final int SYSCOLUMNS_CORE_NUM = 2;
private static final int SYSSCHEMAS_CORE_NUM = 3;
private static final int NUM_CORE = 4;
/**
* SYSFUN functions. Table of functions that automatically appear
* in the SYSFUN schema. These functions are resolved to directly
* if no schema name is given, e.g.
*
* <code>
* SELECT COS(angle) FROM ROOM_WALLS
* </code>
*
* Adding a function here is suitable when the function defintion
* can have a single return type and fixed parameter types.
*
* Functions that need to have a return type based upon the
* input type(s) are not supported here. Typically those are
* added into the parser and methods added into the DataValueDescriptor interface.
* Examples are character based functions whose return type
* length is based upon the passed in type, e.g. passed a CHAR(10)
* returns a CHAR(10).
*
*
* This simple table can handle an arbitrary number of arguments
* and RETURNS NULL ON NULL INPUT. The scheme could be expanded
* to handle other function options such as other parameters if needed.
*[0] = FUNCTION name
*[1] = RETURNS type
*[2] = Java class
*[3] = method name and signature
*[4] = "true" or "false" depending on whether the function is DETERMINSTIC
*[5..N] = arguments (optional, if not present zero arguments is assumed)
*
*/
private static final String[][] SYSFUN_FUNCTIONS = {
{"ACOS", "DOUBLE", "java.lang.StrictMath", "acos(double)", "true", "DOUBLE"},
{"ASIN", "DOUBLE", "java.lang.StrictMath", "asin(double)", "true", "DOUBLE"},
{"ATAN", "DOUBLE", "java.lang.StrictMath", "atan(double)", "true", "DOUBLE"},
{"ATAN2", "DOUBLE", "java.lang.StrictMath", "atan2(double,double)", "true", "DOUBLE", "DOUBLE"},
{"COS", "DOUBLE", "java.lang.StrictMath", "cos(double)", "true", "DOUBLE"},
{"SIN", "DOUBLE", "java.lang.StrictMath", "sin(double)", "true", "DOUBLE"},
{"TAN", "DOUBLE", "java.lang.StrictMath", "tan(double)", "true", "DOUBLE"},
{"PI", "DOUBLE", "org.apache.derby.catalog.SystemProcedures", "PI()", "true"},
{"DEGREES", "DOUBLE", "java.lang.StrictMath", "toDegrees(double)", "true", "DOUBLE"},
{"RADIANS", "DOUBLE", "java.lang.StrictMath", "toRadians(double)", "true", "DOUBLE"},
{"LN", "DOUBLE", "java.lang.StrictMath", "log(double)", "true", "DOUBLE"},
{"LOG", "DOUBLE", "java.lang.StrictMath", "log(double)", "true", "DOUBLE"}, // Same as LN
{"LOG10", "DOUBLE", "org.apache.derby.catalog.SystemProcedures", "LOG10(double)", "true", "DOUBLE"},
{"EXP", "DOUBLE", "java.lang.StrictMath", "exp(double)", "true", "DOUBLE"},
{"CEIL", "DOUBLE", "java.lang.StrictMath", "ceil(double)", "true", "DOUBLE"},
{"CEILING", "DOUBLE", "java.lang.StrictMath", "ceil(double)", "true", "DOUBLE"}, // Same as CEIL
{"FLOOR", "DOUBLE", "java.lang.StrictMath", "floor(double)", "true", "DOUBLE"},
{"SIGN", "INTEGER", "org.apache.derby.catalog.SystemProcedures", "SIGN(double)", "true", "DOUBLE"},
{"RANDOM", "DOUBLE", "java.lang.StrictMath", "random()", "false" },
{"RAND", "DOUBLE", "org.apache.derby.catalog.SystemProcedures", "RAND(int)", "false", "INTEGER"}, // Escape function spec.
{"COT", "DOUBLE", "org.apache.derby.catalog.SystemProcedures", "COT(double)", "true", "DOUBLE"},
{"COSH", "DOUBLE", "org.apache.derby.catalog.SystemProcedures", "COSH(double)", "true", "DOUBLE"},
{"SINH", "DOUBLE", "org.apache.derby.catalog.SystemProcedures", "SINH(double)", "true", "DOUBLE"},
{"TANH", "DOUBLE", "org.apache.derby.catalog.SystemProcedures", "TANH(double)", "true", "DOUBLE"}
};
/**
* Index into SYSFUN_FUNCTIONS of the DETERMINISTIC indicator.
* Used to determine whether the system function is DETERMINISTIC
*/
private static final int SYSFUN_DETERMINISTIC_INDEX = 4;
/**
* The index of the first parameter in entries in the SYSFUN_FUNCTIONS
* table. Used to determine the parameter count (zero to many).
*/
private static final int SYSFUN_FIRST_PARAMETER_INDEX = 5;
/**
* Runtime definition of the functions from SYSFUN_FUNCTIONS.
* Populated dynamically as functions are called.
*/
private static final AliasDescriptor[] SYSFUN_AD =
new AliasDescriptor[SYSFUN_FUNCTIONS.length];
// the structure that holds all the core table info
private TabInfoImpl[] coreInfo;
/*
** SchemaDescriptors for system and app schemas. Both
** are canonical. We cache them for fast lookup.
*/
private SchemaDescriptor systemSchemaDesc;
private SchemaDescriptor sysIBMSchemaDesc;
private SchemaDescriptor declaredGlobalTemporaryTablesSchemaDesc;
private SchemaDescriptor systemUtilSchemaDesc;
// This array of non-core table names *MUST* be in the same order
// as the non-core table numbers, above.
private static final String[] nonCoreNames = {
"SYSCONSTRAINTS",
"SYSKEYS",
"SYSDEPENDS",
"SYSALIASES",
"SYSVIEWS",
"SYSCHECKS",
"SYSFOREIGNKEYS",
"SYSSTATEMENTS",
"SYSFILES",
"SYSTRIGGERS",
"SYSSTATISTICS",
"SYSDUMMY1",
"SYSTABLEPERMS",
"SYSCOLPERMS",
"SYSROUTINEPERMS",
"SYSROLES",
"SYSSEQUENCES",
"SYSPERMS"
};
private static final int NUM_NONCORE = nonCoreNames.length;
/**
* List of all "system" schemas
* <p>
* This list should contain all schema's used by the system and are
* created when the database is created. Users should not be able to
* create or drop these schema's and should not be able to create or
* drop objects in these schema's. This list is used by code that
* needs to check if a particular schema is a "system" schema.
**/
private static final String[] systemSchemaNames = {
SchemaDescriptor.IBM_SYSTEM_CAT_SCHEMA_NAME,
SchemaDescriptor.IBM_SYSTEM_FUN_SCHEMA_NAME,
SchemaDescriptor.IBM_SYSTEM_PROC_SCHEMA_NAME,
SchemaDescriptor.IBM_SYSTEM_STAT_SCHEMA_NAME,
SchemaDescriptor.IBM_SYSTEM_NULLID_SCHEMA_NAME,
SchemaDescriptor.STD_SYSTEM_DIAG_SCHEMA_NAME,
SchemaDescriptor.STD_SYSTEM_UTIL_SCHEMA_NAME,
SchemaDescriptor.IBM_SYSTEM_SCHEMA_NAME,
SchemaDescriptor.STD_SQLJ_SCHEMA_NAME,
SchemaDescriptor.STD_SYSTEM_SCHEMA_NAME
};
/** Dictionary version of the on-disk database */
private DD_Version dictionaryVersion;
/** Dictionary version of the currently running engine */
private DD_Version softwareVersion;
private String authorizationDatabaseOwner;
private boolean usesSqlAuthorization;
/*
** This property and value are written into the database properties
** when the database is created, and are used to determine whether
** the system catalogs need to be upgraded.
*/
// the structure that holds all the noncore info
private TabInfoImpl[] noncoreInfo;
// no other system tables have id's in the configuration.
public DataDescriptorGenerator dataDescriptorGenerator;
private DataValueFactory dvf;
AccessFactory af;
//DataDictionaryContext ddc;
private ExecutionFactory exFactory;
protected UUIDFactory uuidFactory;
Properties startupParameters;
int engineType;
/* Information about whether or not we are at boot time */
protected boolean booting;
private TransactionController bootingTC;
protected DependencyManager dmgr;
/* Cache of table descriptors */
CacheManager OIDTdCache;
CacheManager nameTdCache;
private CacheManager spsNameCache;
private CacheManager sequenceGeneratorCache;
private Hashtable spsIdHash;
// private Hashtable spsTextHash;
int tdCacheSize;
int stmtCacheSize;
private int seqgenCacheSize;
/* Cache of permissions data */
CacheManager permissionsCache;
int permissionsCacheSize;
/*
** Lockable object for synchronizing transition from caching to non-caching
*/
ShExLockable cacheCoordinator;
public LockFactory lockFactory;
volatile int cacheMode = DataDictionary.COMPILE_ONLY_MODE;
/* Number of DDL users */
volatile int ddlUsers;
/* Number of readers that start in DDL_MODE */
volatile int readersInDDLMode;
/**
True if the database is read only and requires
some form of upgrade, that makes the stored prepared
statements invalid.
With this case the engine is running at a different
version to the underlying stored database. This
can happen in 5.1 if the database is read only
and a different point release (later than 5.1.25?)
to the one that created it, has been booted. (Beetle 5170).
<P>
In 5.2 and newer this will be the case if the engine
booting the database is newer than the engine
that created it.
*/
public boolean readOnlyUpgrade;
//systemSQLNameNumber is the number used as the last digit during the previous call to getSystemSQLName.
//If it is 9 for a given calendarForLastSystemSQLName, we will restart the counter to 0
//and increment the calendarForLastSystemSQLName by 10ms.
private int systemSQLNameNumber;
private GregorianCalendar calendarForLastSystemSQLName = new GregorianCalendar();
private long timeForLastSystemSQLName;
/**
* List of procedures in SYSCS_UTIL schema with PUBLIC access
*/
private static final String[] sysUtilProceduresWithPublicAccess = {
"SYSCS_SET_RUNTIMESTATISTICS",
"SYSCS_SET_STATISTICS_TIMING",
"SYSCS_INPLACE_COMPRESS_TABLE",
"SYSCS_COMPRESS_TABLE",
"SYSCS_UPDATE_STATISTICS",
};
/**
* List of functions in SYSCS_UTIL schema with PUBLIC access
*/
private static final String[] sysUtilFunctionsWithPublicAccess = {
"SYSCS_GET_RUNTIMESTATISTICS",
};
/**
* Collation Type for SYSTEM schemas. In Derby 10.3, this will always
* be UCS_BASIC
*/
private int collationTypeOfSystemSchemas;
/**
* Collation Type for user schemas. In Derby 10.3, this is either
* UCS_BASIC or TERRITORY_BASED. The exact value is decided by what has
* user asked for through JDBC url optional attribute COLLATION. If that
* atrribute is set to UCS_BASIC, the collation type for user schemas
* will be UCS_BASIC. If that attribute is set to TERRITORY_BASED, the
* collation type for user schemas will be TERRITORY_BASED. If the user
* has not provide COLLATION attribute value in the JDBC url at database
* create time, then collation type of user schemas will default to
* UCS_BASIC. Pre-10.3 databases after upgrade to Derby 10.3 will also
* use UCS_BASIC for collation type of user schemas.
*/
private int collationTypeOfUserSchemas;
/*
** Constructor
*/
public DataDictionaryImpl() {
}
/**
* This is the data dictionary implementation for
* the standard database engine.
@return true if this service requested is for a database engine.
*/
public boolean canSupport(Properties startParams)
{
return Monitor.isDesiredType(startParams, EngineType.STANDALONE_DB);
}
/**
* Start-up method for this instance of the data dictionary.
*
* @param startParams The start-up parameters
*
* @exception StandardException Thrown if the module fails to start
*/
public void boot(boolean create, Properties startParams)
throws StandardException
{
softwareVersion = new DD_Version(this, DataDictionary.DD_VERSION_DERBY_10_7);
startupParameters = startParams;
uuidFactory = Monitor.getMonitor().getUUIDFactory();
engineType = Monitor.getEngineType( startParams );
//Set the collation type of system schemas before we start loading
//built-in schemas's SchemaDescriptor(s). This is because
//SchemaDescriptor will look to DataDictionary to get the correct
//collation type for themselves. We can't load SD for SESSION schema
//just yet because we do not know the collation type for user schemas
//yet. We will know the right collation for user schema little later
//in this boot method.
collationTypeOfSystemSchemas = StringDataValue.COLLATION_TYPE_UCS_BASIC;
getBuiltinSystemSchemas();
// REMIND: actually, we're supposed to get the DataValueFactory
// out of the connection context...this is a bit of a shortcut.
// We get the DataValueFactory early in order to help bootstrap the system catalogs.
LanguageConnectionFactory langConnFactory = (LanguageConnectionFactory) Monitor.bootServiceModule(
create, this, LanguageConnectionFactory.MODULE, startParams);
dvf = langConnFactory.getDataValueFactory();
exFactory = (ExecutionFactory) Monitor.bootServiceModule(
create, this,
ExecutionFactory.MODULE,
startParams);
// initailze the arrays of core and noncore tables
initializeCatalogInfo();
// indicate that we are in the process of booting
booting = true;
// set only if child class hasn't overriden this already
if ( dataDescriptorGenerator == null )
{ dataDescriptorGenerator = new DataDescriptorGenerator( this ); }
if (!create) {
// SYSTABLES
coreInfo[SYSTABLES_CORE_NUM].setHeapConglomerate(
getBootParameter(startParams, CFG_SYSTABLES_ID, true));
coreInfo[SYSTABLES_CORE_NUM].setIndexConglomerate(SYSTABLESRowFactory.SYSTABLES_INDEX1_ID,
getBootParameter(startParams, CFG_SYSTABLES_INDEX1_ID, true));
coreInfo[SYSTABLES_CORE_NUM].setIndexConglomerate(
SYSTABLESRowFactory.SYSTABLES_INDEX2_ID,
getBootParameter(startParams, CFG_SYSTABLES_INDEX2_ID, true));
// SYSCOLUMNS
coreInfo[SYSCOLUMNS_CORE_NUM].setHeapConglomerate(
getBootParameter(startParams, CFG_SYSCOLUMNS_ID, true));
coreInfo[SYSCOLUMNS_CORE_NUM].setIndexConglomerate(
SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX1_ID,
getBootParameter(startParams, CFG_SYSCOLUMNS_INDEX1_ID, true));
// 2nd syscolumns index added in Xena, hence may not be there
coreInfo[SYSCOLUMNS_CORE_NUM].setIndexConglomerate(
SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX2_ID,
getBootParameter(startParams, CFG_SYSCOLUMNS_INDEX2_ID, false));
// SYSCONGLOMERATES
coreInfo[SYSCONGLOMERATES_CORE_NUM].setHeapConglomerate(
getBootParameter(startParams, CFG_SYSCONGLOMERATES_ID, true));
coreInfo[SYSCONGLOMERATES_CORE_NUM].setIndexConglomerate(
SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX1_ID,
getBootParameter(startParams, CFG_SYSCONGLOMERATES_INDEX1_ID, true));
coreInfo[SYSCONGLOMERATES_CORE_NUM].setIndexConglomerate(
SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX2_ID,
getBootParameter(startParams, CFG_SYSCONGLOMERATES_INDEX2_ID, true));
coreInfo[SYSCONGLOMERATES_CORE_NUM].setIndexConglomerate(
SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX3_ID,
getBootParameter(startParams, CFG_SYSCONGLOMERATES_INDEX3_ID, true));
// SYSSCHEMAS
coreInfo[SYSSCHEMAS_CORE_NUM].setHeapConglomerate(
getBootParameter(startParams, CFG_SYSSCHEMAS_ID, true));
coreInfo[SYSSCHEMAS_CORE_NUM].setIndexConglomerate(
SYSSCHEMASRowFactory.SYSSCHEMAS_INDEX1_ID,
getBootParameter(startParams, CFG_SYSSCHEMAS_INDEX1_ID, true));
coreInfo[SYSSCHEMAS_CORE_NUM].setIndexConglomerate(
SYSSCHEMASRowFactory.SYSSCHEMAS_INDEX2_ID,
getBootParameter(startParams, CFG_SYSSCHEMAS_INDEX2_ID, true));
}
String value = startParams.getProperty(Property.LANG_TD_CACHE_SIZE);
tdCacheSize = PropertyUtil.intPropertyValue(Property.LANG_TD_CACHE_SIZE, value,
0, Integer.MAX_VALUE, Property.LANG_TD_CACHE_SIZE_DEFAULT);
value = startParams.getProperty(Property.LANG_SPS_CACHE_SIZE);
stmtCacheSize = PropertyUtil.intPropertyValue(Property.LANG_SPS_CACHE_SIZE, value,
0, Integer.MAX_VALUE, Property.LANG_SPS_CACHE_SIZE_DEFAULT);
value = startParams.getProperty(Property.LANG_SEQGEN_CACHE_SIZE);
seqgenCacheSize = PropertyUtil.intPropertyValue(Property.LANG_SEQGEN_CACHE_SIZE, value,
0, Integer.MAX_VALUE, Property.LANG_SEQGEN_CACHE_SIZE_DEFAULT);
value = startParams.getProperty(Property.LANG_PERMISSIONS_CACHE_SIZE);
permissionsCacheSize = PropertyUtil.intPropertyValue(Property.LANG_PERMISSIONS_CACHE_SIZE, value,
0, Integer.MAX_VALUE, Property.LANG_PERMISSIONS_CACHE_SIZE_DEFAULT);
/*
* data dictionary contexts are only associated with connections.
* we have to look for the basic data dictionary, as there is
* no connection, and thus no context stack yet.
*/
/*
* Get the table descriptor cache.
*/
CacheFactory cf =
(CacheFactory) Monitor.startSystemModule(org.apache.derby.iapi.reference.Module.CacheFactory);
OIDTdCache =
cf.newCacheManager(this,
"TableDescriptorOIDCache",
tdCacheSize,
tdCacheSize);
nameTdCache =
cf.newCacheManager(this,
"TableDescriptorNameCache",
tdCacheSize,
tdCacheSize);
if (stmtCacheSize > 0)
{
spsNameCache =
cf.newCacheManager(this,
"SPSNameDescriptorCache",
stmtCacheSize,
stmtCacheSize);
spsIdHash = new Hashtable(stmtCacheSize);
// spsTextHash = new Hashtable(stmtCacheSize);
}
sequenceGeneratorCache = cf.newCacheManager
( this, "SequenceGeneratorCache", seqgenCacheSize, seqgenCacheSize );
/* Get the object to coordinate cache transitions */
cacheCoordinator = new ShExLockable();
/* Get AccessFactory in order to transaction stuff */
af = (AccessFactory) Monitor.findServiceModule(this, AccessFactory.MODULE);
/* Get the lock factory */
lockFactory = af.getLockFactory();
/*
* now we need to setup a context stack for the database creation work.
* We assume the System boot process has created a context
* manager already, but not that contexts we need are there.
*/
ContextService csf = ContextService.getFactory();
ContextManager cm = csf.getCurrentContextManager();
if (SanityManager.DEBUG)
SanityManager.ASSERT((cm != null), "Failed to get current ContextManager");
// RESOLVE other non-StandardException errors.
bootingTC = null;
try
{
// Get a transaction controller. This has the side effect of
// creating a transaction context if there isn't one already.
bootingTC = af.getTransaction(cm);
/*
We need an execution context so that we can generate rows
REMIND: maybe only for create case?
*/
exFactory.newExecutionContext(cm);
DataDescriptorGenerator ddg = getDataDescriptorGenerator();
//We should set the user schema collation type here now because
//later on, we are going to create user schema APP. By the time any
//user schema gets created, we should have the correct collation
//type set for such schemas to use. For this reason, don't remove
//the following if else statement and don't move it later in this
//method.
String userDefinedCollation;
if (create) {
//Get the collation attribute from the JDBC url. It can only
//have one of 2 possible values - UCS_BASIC or TERRITORY_BASED
//This attribute can only be specified at database create time.
//The attribute value has already been verified in DVF.boot and
//hence we can be assured that the attribute value if provided
//is either UCS_BASIC or TERRITORY_BASED. If none provided,
//then we will take it to be the default which is UCS_BASIC.
userDefinedCollation = startParams.getProperty(
Attribute.COLLATION, Property.UCS_BASIC_COLLATION);
bootingTC.setProperty(Property.COLLATION,userDefinedCollation,true);
} else {
userDefinedCollation = startParams.getProperty(
Property.COLLATION, Property.UCS_BASIC_COLLATION);
}
//Initialize the collation type of user schemas by looking at
//collation property/attribute.
collationTypeOfUserSchemas = DataTypeDescriptor.getCollationType(userDefinedCollation);
if (SanityManager.DEBUG)
SanityManager.ASSERT((collationTypeOfUserSchemas != -1), "Invalid collation type: "+userDefinedCollation);
//Now is also a good time to create schema descriptor for global
//temporary tables. Since this is a user schema, it should use the
//collation type associated with user schemas. Since we just
//finished setting up the collation type of user schema, it is
//safe to create user SchemaDescriptor(s) now.
declaredGlobalTemporaryTablesSchemaDesc =
newDeclaredGlobalTemporaryTablesSchemaDesc(
SchemaDescriptor.STD_DECLARED_GLOBAL_TEMPORARY_TABLES_SCHEMA_NAME);
if (create) {
String userName = IdUtil.getUserNameFromURLProps(startParams);
authorizationDatabaseOwner = IdUtil.getUserAuthorizationId(userName);
HashSet newlyCreatedRoutines = new HashSet();
// create any required tables.
createDictionaryTables(startParams, bootingTC, ddg);
//create procedures for network server metadata
create_SYSIBM_procedures(bootingTC, newlyCreatedRoutines );
//create metadata sps statement required for network server
createSystemSps(bootingTC);
// create the SYSCS_UTIL system procedures)
create_SYSCS_procedures(bootingTC, newlyCreatedRoutines );
// now grant execute permission on some of these routines
grantPublicAccessToSystemRoutines( newlyCreatedRoutines, bootingTC, authorizationDatabaseOwner );
// log the current dictionary version
dictionaryVersion = softwareVersion;
/* Set properties for current and create time
* DataDictionary versions.
*/
bootingTC.setProperty(
DataDictionary.CORE_DATA_DICTIONARY_VERSION,
dictionaryVersion, true);
bootingTC.setProperty(
DataDictionary.CREATE_DATA_DICTIONARY_VERSION,
dictionaryVersion, true);
// If SqlAuthorization is set as system property during database
// creation, set it as database property also, so it gets persisted.
if (PropertyUtil.getSystemBoolean(Property.SQL_AUTHORIZATION_PROPERTY))
{
bootingTC.setProperty(Property.SQL_AUTHORIZATION_PROPERTY,"true",true);
usesSqlAuthorization=true;
}
// Set default hash algorithm used to protect passwords stored
// in the database for BUILTIN authentication.
bootingTC.setProperty(
Property.AUTHENTICATION_BUILTIN_ALGORITHM,
findDefaultBuiltinAlgorithm(),
false);
} else {
// Get the ids for non-core tables
loadDictionaryTables(bootingTC, ddg, startParams);
String sqlAuth = PropertyUtil.getDatabaseProperty(bootingTC,
Property.SQL_AUTHORIZATION_PROPERTY);
// Feature compatibility check.
if (Boolean.valueOf
(startParams.getProperty(
Attribute.SOFT_UPGRADE_NO_FEATURE_CHECK))
.booleanValue()) {
// Do not perform check if this boot is the first
// phase (soft upgrade boot) of a hard upgrade,
// which happens in two phases beginning with
// DERBY-2264. In this case, we need to always be
// able to boot to authenticate, notwithstanding
// any feature properties set
// (e.g. derby.database.sqlAuthorization) which
// may not yet be supported until that hard
// upgrade has happened, normally causing an error
// below.
//
// Feature sqlAuthorization is a special case:
// Since database ownership checking only happens
// when sqlAuthorization is true, we can't afford
// to *not* use it for upgrades from 10.2 or
// later, lest we lose the database owner check.
// For upgrades from 10.1 and earlier there is no
// database owner check at a hard upgrade.
if (dictionaryVersion.majorVersionNumber >=
DataDictionary.DD_VERSION_DERBY_10_2) {
usesSqlAuthorization = Boolean.valueOf(sqlAuth).
booleanValue();
}
} else {
if (Boolean.valueOf(sqlAuth).booleanValue()) {
// SQL authorization requires 10.2 or higher database
checkVersion(DataDictionary.DD_VERSION_DERBY_10_2,
"sqlAuthorization");
usesSqlAuthorization=true;
}
}
}
if (SanityManager.DEBUG)
SanityManager.ASSERT((authorizationDatabaseOwner != null), "Failed to get Database Owner authorization");
/* Commit & destroy the create database */
bootingTC.commit();
cm.getContext(ExecutionContext.CONTEXT_ID).popMe(); // done with ctx
} finally {
if (bootingTC != null) {
bootingTC.destroy(); // gets rid of the transaction context
bootingTC = null;
}
}
setDependencyManager();
booting = false;
}
/**
* Find the default message digest algorithm to use for BUILTIN
* authentication on this database.
*
* @return the name of the algorithm to use as the default
*/
private String findDefaultBuiltinAlgorithm() {
try {
// First check for the preferred default, and return it if present
MessageDigest.getInstance(
Property.AUTHENTICATION_BUILTIN_ALGORITHM_DEFAULT);
return Property.AUTHENTICATION_BUILTIN_ALGORITHM_DEFAULT;
} catch (NoSuchAlgorithmException nsae) {
// Couldn't find the preferred algorithm, so use the fallback
return Property.AUTHENTICATION_BUILTIN_ALGORITHM_FALLBACK;
}
}
private CacheManager getPermissionsCache() throws StandardException
{
if( permissionsCache == null)
{
CacheFactory cf =
(CacheFactory) Monitor.startSystemModule(org.apache.derby.iapi.reference.Module.CacheFactory);
LanguageConnectionContext lcc = getLCC();
TransactionController tc = lcc.getTransactionExecute();
permissionsCacheSize = PropertyUtil.getServiceInt( tc,
Property.LANG_PERMISSIONS_CACHE_SIZE,
40, /* min value */
Integer.MAX_VALUE,
permissionsCacheSize /* value from boot time. */);
permissionsCache = cf.newCacheManager( this,
"PermissionsCache",
permissionsCacheSize,
permissionsCacheSize);
}
return permissionsCache;
} // end of getPermissionsCache
/**
* sets the dependencymanager associated with this dd. subclasses can
* override this to install their own funky dependency manager.
*/
protected void setDependencyManager()
{
dmgr = new BasicDependencyManager(this);
}
/**
* returns the dependencymanager associated with this datadictionary.
* @see DataDictionary#getDependencyManager
*/
public DependencyManager getDependencyManager()
{
return dmgr;
}
/**
* Stop this module. In this case, nothing needs to be done.
*/
public void stop()
{
}
/*
** CacheableFactory interface
*/
public Cacheable newCacheable(CacheManager cm) {
if ( cm == OIDTdCache ) { return new OIDTDCacheable( this ); }
else if ( cm == nameTdCache ) { return new NameTDCacheable( this ); }
else if ( cm == permissionsCache ) { return new PermissionsCacheable( this ); }
else if ( cm == sequenceGeneratorCache ) { return new SequenceUpdater.SyssequenceUpdater( this ); }
else { return new SPSNameCacheable( this ); }
}
/*
** Methods related to ModuleControl
*/
/**
* @see org.apache.derby.iapi.sql.dictionary.DataDictionary#startReading
*
* @exception StandardException Thrown on error
*/
public int startReading(LanguageConnectionContext lcc)
throws StandardException
{
int bindCount = lcc.incrementBindCount();
int localCacheMode;
boolean needRetry = false;
do
{
if (needRetry)
{
// could not get lock while holding the synchronized(this),
// so now wait until we can get the lock. Once we get the
// lock it is automatically released, hopefully when we
// go the the synchronized(this) block we will be able to
// get the lock, while holding the synchronized(this)
// monitor now.
try
{
lockFactory.zeroDurationlockObject(
lcc.getTransactionExecute().getLockSpace(),
cacheCoordinator,
ShExQual.SH,
C_LockFactory.WAIT_FOREVER);
}
catch (StandardException e)
{
// DEADLOCK, timeout will not happen with WAIT_FOREVER
lcc.decrementBindCount();
throw e;
}
needRetry = false;
}
// "this" is used to synchronize between startReading,doneReading,
// and startWriting.
synchronized(this)
{
localCacheMode = getCacheMode();
/*
** Keep track of how deeply nested this bind() operation is.
** It's possible for nested binding to happen if the user
** prepares SQL statements from within a static initializer
** of a class, and calls a method on that class (or uses a
** field in the class).
**
** If nested binding is happening, we only want to lock the
** DataDictionary on the outermost nesting level.
*/
if (bindCount == 1)
{
if (localCacheMode == DataDictionary.COMPILE_ONLY_MODE)
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(ddlUsers == 0,
"Cache mode is COMPILE_ONLY and there are DDL users.");
}
/*
** If we deadlock while waiting for a lock,
** then be sure to restore things as they
** were.
*/
boolean lockGranted = false;
try
{
// When the C_LockFactory.NO_WAIT is used this
// routine will not throw timeout or deadlock
// exceptions. The boolean returned will indicate
// if the lock was granted or not. If it would
// have had to wait, it just returns immediately
// and returns false.
//
// See if we can get this lock granted without
// waiting (while holding the dataDictionary
// synchronization).
CompatibilitySpace space =
lcc.getTransactionExecute().getLockSpace();
lockGranted =
lockFactory.lockObject(
space, space.getOwner(),
cacheCoordinator,
ShExQual.SH,
C_LockFactory.NO_WAIT);
}
catch (StandardException e)
{
// neither TIMEOUT or DEADLOCK can happen with
// NO_WAIT flag. This must be some other exception.
lcc.decrementBindCount();
throw e;
}
if (!lockGranted)
needRetry = true;
}
else
{
readersInDDLMode++;
}
}
} // end of sync block
} while (needRetry);
return localCacheMode;
}
/* @see org.apache.derby.iapi.sql.dictionary.DataDictionary#doneReading */
public void doneReading(int mode, LanguageConnectionContext lcc)
throws StandardException
{
int bindCount = lcc.decrementBindCount();
/* This is an arbitrary choice of object to synchronize these methods */
synchronized(this)
{
/*
** Keep track of how deeply nested this bind() operation is.
** It's possible for nested binding to happen if the user
** prepares SQL statements from within a static initializer
** of a class, and calls a method on that class (or uses a
** field in the class).
**
** If nested binding is happening, we only want to unlock the
** DataDictionary on the outermost nesting level.
*/
if (bindCount == 0)
{
if (mode == DataDictionary.COMPILE_ONLY_MODE)
{
/*
** Release the share lock that was acquired by the reader when
** it called startReading().
** Beetle 4418, during bind, we may even execute something (eg., in a vti
** constructor) and if a severe error occured, the transaction is rolled
** back and lock released already, so don't try to unlock if statement context
** is cleared.
*/
if ((lcc.getStatementContext() != null) && lcc.getStatementContext().inUse())
{
CompatibilitySpace space =
lcc.getTransactionExecute().getLockSpace();
int unlockCount =
lockFactory.unlock(
space, space.getOwner(),
cacheCoordinator, ShExQual.SH);
if (SanityManager.DEBUG)
{
if (unlockCount != 1)
{
SanityManager.THROWASSERT("unlockCount not "+
"1 as expected, it is "+unlockCount);
}
}
}
}
else
{
readersInDDLMode--;
/*
** We can only switch back to cached (COMPILE_ONLY)
** mode if there aren't any readers that started in
** DDL_MODE. Otherwise we could get a reader
** in DDL_MODE that reads a cached object that
** was brought in by a reader in COMPILE_ONLY_MODE.
** If 2nd reader finished and releases it lock
** on the cache there is nothing to pevent another
** writer from coming along an deleting the cached
** object.
*/
if (ddlUsers == 0 && readersInDDLMode == 0)
{
clearCaches();
setCacheMode(DataDictionary.COMPILE_ONLY_MODE);
}
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(readersInDDLMode >= 0,
"readersInDDLMode is invalid -- should never be < 0");
}
}
}
}
}
/*
* @see org.apache.derby.iapi.sql.dictionary.DataDictionary#startWriting
*
* @exception StandardException Thrown on error
*/
public void startWriting(LanguageConnectionContext lcc)
throws StandardException
{
boolean blocked = true;
/*
** Don't allow DDL if we're binding a SQL statement.
*/
if (lcc.getBindCount() != 0)
{
throw StandardException.newException(SQLState.LANG_DDL_IN_BIND);
}
/*
** Check whether we've already done a DDL statement in this
** transaction. If so, we don't want to re-enter DDL mode, or
** bump the DDL user count.
*/
if ( ! lcc.dataDictionaryInWriteMode())
{
for (int i = 0; blocked; i++)
{
/*
** If we already tried 5 times and failed, do
** an unbounded wait for the lock w/o
** synchronization. Immediately unlock and
** sleep a random amount of time and start
** the whole process over again.
*/
if (i > 4 &&
getCacheMode() == DataDictionary.COMPILE_ONLY_MODE)
{
// Wait until the settable timeout value for the lock,
// and once granted, immediately release the lock. If
// this wait time's out then a TIMEOUT error is sent
// up the stack.
lockFactory.zeroDurationlockObject(
lcc.getTransactionExecute().getLockSpace(),
cacheCoordinator,
ShExQual.EX,
C_LockFactory.TIMED_WAIT);
i = 1;
}
if (i > 0)
{
try
{
Thread.sleep(
(long)((java.lang.Math.random() * 1131) % 20));
}
catch (InterruptedException ie)
{
throw StandardException.interrupt(ie);
}
}
synchronized(this)
{
if (getCacheMode() == DataDictionary.COMPILE_ONLY_MODE)
{
// When the C_LockFactory.NO_WAIT is used this routine
// will not throw timeout or deadlock exceptions. The
// boolean returned will indicate if the lock was
// granted or not. If it would have had to wait, it
// just returns immediately and returns false.
//
// See if we can get this lock granted without waiting
// (while holding the dataDictionary synchronization).
boolean lockGranted =
lockFactory.zeroDurationlockObject(
lcc.getTransactionExecute().getLockSpace(),
cacheCoordinator,
ShExQual.EX,
C_LockFactory.NO_WAIT);
if (!lockGranted)
continue;
/* Switch the caching mode to DDL */
setCacheMode(DataDictionary.DDL_MODE);
/* Clear out all the caches */
clearCaches();
}
/* Keep track of the number of DDL users */
ddlUsers++;
} // end synchronized
/*
** Tell the connection the DD is in DDL mode, so it can take
** it out of DDL mode when the transaction finishes.
*/
lcc.setDataDictionaryWriteMode();
blocked = false;
}
}
else if (SanityManager.DEBUG)
{
SanityManager.ASSERT(getCacheMode() == DataDictionary.DDL_MODE,
"lcc.getDictionaryInWriteMode() but DataDictionary is COMPILE_MODE");
}
}
/* @see org.apache.derby.iapi.sql.dictionary.DataDictionary#transactionFinished */
public void transactionFinished() throws StandardException
{
/* This is an arbitrary choice of object to synchronize these methods */
synchronized(this)
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(ddlUsers > 0,
"Number of DDL Users is <= 0 when finishing a transaction");
SanityManager.ASSERT(getCacheMode() == DataDictionary.DDL_MODE,
"transactionFinished called when not in DDL_MODE");
}
ddlUsers--;
/*
** We can only switch back to cached (COMPILE_ONLY)
** mode if there aren't any readers that started in
** DDL_MODE. Otherwise we could get a reader
** in DDL_MODE that reads a cached object that
** was brought in by a reader in COMPILE_ONLY_MODE.
** If 2nd reader finished and releases it lock
** on the cache there is nothing to pevent another
** writer from coming along an deleting the cached
** object.
*/
if (ddlUsers == 0 && readersInDDLMode == 0)
{
clearCaches();
setCacheMode(DataDictionary.COMPILE_ONLY_MODE);
}
}
}
/*
** SYNCHRONIZATION: no synchronization
** necessary since integer reads/writes
** are atomic
*/
public int getCacheMode()
{
return cacheMode;
}
/*
** SYNCHRONIZATION: no synchronization
** necessary since integer reads/writes
** are atomic
*/
private void setCacheMode(int newMode)
{
cacheMode = newMode;
}
/**
* Get a DataDescriptorGenerator, through which we can create
* objects to be stored in the DataDictionary.
*
* @return A DataDescriptorGenerator
*/
public DataDescriptorGenerator getDataDescriptorGenerator()
{
return dataDescriptorGenerator;
}
/**
* Get authorizationID of Database Owner
*
* @return authorizationID
*/
public String getAuthorizationDatabaseOwner()
{
return authorizationDatabaseOwner;
}
/**
* @see DataDictionary#usesSqlAuthorization
*/
public boolean usesSqlAuthorization()
{
return usesSqlAuthorization;
}
/** @see DataDictionary#getCollationTypeOfSystemSchemas() */
public int getCollationTypeOfSystemSchemas()
{
return collationTypeOfSystemSchemas;
}
/** @see DataDictionary#getCollationTypeOfUserSchemas() */
public int getCollationTypeOfUserSchemas()
{
return collationTypeOfUserSchemas;
}
/**
* Get a DataValueFactory, through which we can create
* data value objects.
*
* @return A DataValueFactory
*/
public DataValueFactory getDataValueFactory()
{
return dvf;
}
/**
* Get ExecutionFactory associated with this database.
*
* @return The ExecutionFactory
*/
public ExecutionFactory getExecutionFactory()
{
return exFactory;
}
/**
* Set up the builtin schema descriptors for system schemas.
*/
private void getBuiltinSystemSchemas()
{
if (systemSchemaDesc != null)
return;
systemSchemaDesc =
newSystemSchemaDesc(SchemaDescriptor.STD_SYSTEM_SCHEMA_NAME,
SchemaDescriptor.SYSTEM_SCHEMA_UUID);
sysIBMSchemaDesc =
newSystemSchemaDesc(SchemaDescriptor.IBM_SYSTEM_SCHEMA_NAME,
SchemaDescriptor.SYSIBM_SCHEMA_UUID);
systemUtilSchemaDesc =
newSystemSchemaDesc(SchemaDescriptor.STD_SYSTEM_UTIL_SCHEMA_NAME,
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID);
}
/**
* Get the descriptor for the system schema. Schema descriptors include
* authorization ids and schema ids.
*
* SQL92 allows a schema to specify a default character set - we will
* not support this.
*
* @return The descriptor for the schema.
*
* @exception StandardException Thrown on failure
*/
public SchemaDescriptor getSystemSchemaDescriptor()
throws StandardException
{
return systemSchemaDesc;
}
/**
* Get the descriptor for the SYSCS_UTIL system schema.
* Schema descriptors include authorization ids and schema ids.
*
* SQL92 allows a schema to specify a default character set - we will
* not support this.
*
* @return The descriptor for the schema.
*
* @exception StandardException Thrown on failure
*/
public SchemaDescriptor getSystemUtilSchemaDescriptor()
throws StandardException
{
return(systemUtilSchemaDesc);
}
/**
* Get the descriptor for the SYSIBM schema. Schema descriptors include
* authorization ids and schema ids.
*
* SQL92 allows a schema to specify a default character set - we will
* not support this.
*
* @return The descriptor for the schema.
*
* @exception StandardException Thrown on failure
*/
public SchemaDescriptor getSysIBMSchemaDescriptor()
throws StandardException
{
return sysIBMSchemaDesc;
}
/**
* Get the descriptor for the declared global temporary table schema which
* is always named "SESSION".
*
* @return The descriptor for the schema.
*
* @exception StandardException Thrown on failure
*/
public SchemaDescriptor getDeclaredGlobalTemporaryTablesSchemaDescriptor()
throws StandardException
{
return declaredGlobalTemporaryTablesSchemaDesc;
}
/**
* Determine whether a string is the name of the system schema.
*
* @param name
* @return true or false
*
* @exception StandardException Thrown on failure
*/
public boolean isSystemSchemaName( String name)
throws StandardException
{
boolean ret_val = false;
for (int i = systemSchemaNames.length - 1; i >= 0;)
{
if ((ret_val = systemSchemaNames[i--].equals(name)))
break;
}
return(ret_val);
}
/**
* Get the descriptor for the named schema.
* Schema descriptors include authorization ids and schema ids.
* SQL92 allows a schema to specify a default character set - we will
* not support this. Will check default schema for a match
* before scanning a system table.
*
* @param schemaName The name of the schema we're interested in. Must not be null.
* @param tc TransactionController
*
* @param raiseError whether an exception should be thrown if the schema does not exist.
*
* @return The descriptor for the schema. Can be null (not found) if raiseError is false.
*
* @exception StandardException Thrown on error
*/
public SchemaDescriptor getSchemaDescriptor(String schemaName,
TransactionController tc,
boolean raiseError)
throws StandardException
{
/*
** Check for APP and SYS schemas before going any
** further.
*/
if ( tc == null )
{
tc = getTransactionCompile();
}
if (getSystemSchemaDescriptor().getSchemaName().equals(schemaName))
{
return getSystemSchemaDescriptor();
}
else if (getSysIBMSchemaDescriptor().getSchemaName().equals(schemaName))
{
// oh you are really asking SYSIBM, if this db is soft upgraded
// from pre 52, I may have 2 versions for you, one on disk
// (user SYSIBM), one imaginary (builtin). The
// one on disk (real one, if it exists), should always be used.
if (dictionaryVersion.checkVersion(
DataDictionary.DD_VERSION_CS_5_2, null))
{
return getSysIBMSchemaDescriptor();
}
}
/*
** Manual lookup
*/
SchemaDescriptor sd = locateSchemaRow(schemaName, tc);
//if no schema found and schema name is SESSION, then create an
//in-memory schema descriptor
if (sd == null &&
getDeclaredGlobalTemporaryTablesSchemaDescriptor().getSchemaName().equals(schemaName))
{
return getDeclaredGlobalTemporaryTablesSchemaDescriptor();
}
if (sd == null && raiseError)
{
throw StandardException.newException(
SQLState.LANG_SCHEMA_DOES_NOT_EXIST, schemaName);
}
else
{
return sd;
}
}
/**
* Get the target schema by searching for a matching row
* in SYSSCHEMAS by schemaId. Read only scan.
*
* @param schemaId The id of the schema we're interested in.
* If non-null, overrides schemaName
*
* @param tc TransactionController. If null, one
* is gotten off of the language connection context.
*
* @return The row for the schema
*
* @exception StandardException Thrown on error
*/
private SchemaDescriptor locateSchemaRow(UUID schemaId,
TransactionController tc)
throws StandardException
{
return locateSchemaRowBody(
schemaId,
TransactionController.ISOLATION_REPEATABLE_READ,
tc);
}
/**
* Get the target schema by searching for a matching row
* in SYSSCHEMAS by schemaId. Read only scan.
*
* @param schemaId The id of the schema we're interested in.
* If non-null, overrides schemaName
* @param isolationLevel Use this explicit isolation level. Only
* ISOLATION_REPEATABLE_READ (normal usage) or
* ISOLATION_READ_UNCOMMITTED (corner cases)
* supported for now.
* @param tc TransactionController. If null, one
* is gotten off of the language connection context.
*
* @return The row for the schema
*
* @exception StandardException Thrown on error
*/
private SchemaDescriptor locateSchemaRow(UUID schemaId,
int isolationLevel,
TransactionController tc)
throws StandardException
{
return locateSchemaRowBody(
schemaId,
isolationLevel,
tc);
}
private SchemaDescriptor locateSchemaRowBody(UUID schemaId,
int isolationLevel,
TransactionController tc)
throws StandardException
{
DataValueDescriptor UUIDStringOrderable;
TabInfoImpl ti = coreInfo[SYSSCHEMAS_CORE_NUM];
/* Use UUIDStringOrderable in both start and stop positions for scan */
UUIDStringOrderable = getIDValueAsCHAR(schemaId);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, UUIDStringOrderable);
return (SchemaDescriptor)
getDescriptorViaIndex(
SYSSCHEMASRowFactory.SYSSCHEMAS_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false,
isolationLevel,
tc);
}
/**
* Get the target schema by searching for a matching row
* in SYSSCHEMAS by schema name. Read only scan.
*
* @param schemaName The name of the schema we're interested in.
* If schemaId is null, used to qual.
*
* @param tc TransactionController. If null, one
* is gotten off of the language connection context.
*
* @return The row for the schema
*
* @exception StandardException Thrown on error
*/
private SchemaDescriptor locateSchemaRow(String schemaName,
TransactionController tc)
throws StandardException
{
DataValueDescriptor schemaNameOrderable;
TabInfoImpl ti = coreInfo[SYSSCHEMAS_CORE_NUM];
/* Use aliasNameOrderable in both start
* and stop position for scan.
*/
schemaNameOrderable = new SQLVarchar(schemaName);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, schemaNameOrderable);
return (SchemaDescriptor)
getDescriptorViaIndex(
SYSSCHEMASRowFactory.SYSSCHEMAS_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false,
TransactionController.ISOLATION_REPEATABLE_READ,
tc);
}
/**
* Get the SchemaDescriptor for the given schema identifier.
*
* @param schemaId The id of the schema we're interested in.
*
* @param tc The transaction controller to us when scanning
* SYSSCHEMAS
*
* @return The descriptor for the schema, null if no such schema exists.
*
* @exception StandardException Thrown on failure
*/
public SchemaDescriptor getSchemaDescriptor(UUID schemaId,
TransactionController tc)
throws StandardException
{
return getSchemaDescriptorBody(
schemaId,
TransactionController.ISOLATION_REPEATABLE_READ,
tc);
}
/**
* Get the SchemaDescriptor for the given schema identifier.
*
* @param schemaId the uuid of the schema we want a descriptor for
* @param isolationLevel use this explicit isolation level. Only
* ISOLATION_REPEATABLE_READ (normal usage) or
* ISOLATION_READ_UNCOMMITTED (corner cases)
* supported for now.
* @param tc transaction controller
* @throws StandardException thrown on error
*/
public SchemaDescriptor getSchemaDescriptor(UUID schemaId,
int isolationLevel,
TransactionController tc)
throws StandardException
{
return getSchemaDescriptorBody(
schemaId,
isolationLevel,
tc);
}
private SchemaDescriptor getSchemaDescriptorBody(
UUID schemaId,
int isolationLevel,
TransactionController tc) throws StandardException
{
SchemaDescriptor sd = null;
if ( tc == null )
{
tc = getTransactionCompile();
}
/*
** Check for APP and SYS schemas before going any
** further.
*/
if (schemaId != null)
{
if (getSystemSchemaDescriptor().getUUID().equals(schemaId))
{
return getSystemSchemaDescriptor();
}
else if (getSysIBMSchemaDescriptor().getUUID().equals(schemaId))
{
return getSysIBMSchemaDescriptor();
}
}
/*
** If we aren't booting, lets see if we already
** have the descriptor. If we are in the middle
** of booting we cannot get the LanguageConnectionContext.
*/
if (!booting)
{
LanguageConnectionContext lcc = getLCC();
if (lcc != null)
{
sd = lcc.getDefaultSchema();
if ((sd != null) &&
((schemaId == null) ||
schemaId.equals(sd.getUUID())))
{
return sd;
}
}
}
return locateSchemaRow(schemaId, isolationLevel, tc);
}
/**
* Return true of there exists a schema whose authorizationId equals
* authid, i.e. SYS.SYSSCHEMAS contains a row whose column
* (AUTHORIZATIONID) equals authid.
*
* @param authid authorizationId
* @param tc TransactionController
* @return true iff there is a matching schema
* @exception StandardException
*/
public boolean existsSchemaOwnedBy(String authid,
TransactionController tc)
throws StandardException {
TabInfoImpl ti = coreInfo[SYSSCHEMAS_CORE_NUM];
SYSSCHEMASRowFactory
rf = (SYSSCHEMASRowFactory)ti.getCatalogRowFactory();
ConglomerateController
heapCC = tc.openConglomerate(
ti.getHeapConglomerate(), false, 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
DataValueDescriptor authIdOrderable = new SQLVarchar(authid);
ScanQualifier[][] scanQualifier = exFactory.getScanQualifier(1);
scanQualifier[0][0].setQualifier(
SYSSCHEMASRowFactory.SYSSCHEMAS_SCHEMAAID - 1, /* to zero-based */
authIdOrderable,
Orderable.ORDER_OP_EQUALS,
false,
false,
false);
ScanController sc = tc.openScan(
ti.getHeapConglomerate(),
false, // don't hold open across commit
0, // for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
(DataValueDescriptor[]) null, // start position -
0, // startSearchOperation - none
scanQualifier, //
(DataValueDescriptor[]) null, // stop position -through last row
0); // stopSearchOperation - none
boolean result = false;
try {
ExecRow outRow = rf.makeEmptyRow();
if (sc.fetchNext(outRow.getRowArray())) {
result = true;
}
} finally {
if (sc != null) {
sc.close();
}
if (heapCC != null) {
heapCC.close();
}
}
return result;
}
/**
* @see DataDictionary#addDescriptor
*/
public void addDescriptor(TupleDescriptor td, TupleDescriptor parent,
int catalogNumber, boolean duplicatesAllowed,
TransactionController tc)
throws StandardException
{
TabInfoImpl ti = (catalogNumber < NUM_CORE) ? coreInfo[catalogNumber] :
getNonCoreTI(catalogNumber);
ExecRow row = ti.getCatalogRowFactory().makeRow(td, parent);
int insertRetCode = ti.insertRow(row, tc);
if (!duplicatesAllowed)
{
if (insertRetCode != TabInfoImpl.ROWNOTDUPLICATE)
throw duplicateDescriptorException(td, parent);
}
}
private StandardException
duplicateDescriptorException(TupleDescriptor tuple,
TupleDescriptor parent)
{
if (parent != null)
return
StandardException.newException(SQLState.LANG_OBJECT_ALREADY_EXISTS_IN_OBJECT,
tuple.getDescriptorType(),
tuple.getDescriptorName(),
parent.getDescriptorType(),
parent.getDescriptorName());
else return
StandardException.newException(SQLState.LANG_OBJECT_ALREADY_EXISTS,
tuple.getDescriptorType(),
tuple.getDescriptorName());
}
/** array version of addDescriptor.
* @see DataDictionary#addDescriptor
*/
public void addDescriptorArray(TupleDescriptor[] td,
TupleDescriptor parent,
int catalogNumber,
boolean allowDuplicates,
TransactionController tc)
throws StandardException
{
TabInfoImpl ti = (catalogNumber < NUM_CORE) ? coreInfo[catalogNumber] :
getNonCoreTI(catalogNumber);
CatalogRowFactory crf = ti.getCatalogRowFactory();
ExecRow[] rl = new ExecRow[td.length];
for (int index = 0; index < td.length; index++)
{
ExecRow row = crf.makeRow(td[index], parent);
rl[index] = row;
}
int insertRetCode = ti.insertRowList( rl, tc );
if (!allowDuplicates && insertRetCode != TabInfoImpl.ROWNOTDUPLICATE)
{
throw duplicateDescriptorException(td[insertRetCode], parent);
}
}
/**
* @see DataDictionary#dropRoleGrant
*/
public void dropRoleGrant(String roleName,
String grantee,
String grantor,
TransactionController tc)
throws StandardException
{
DataValueDescriptor roleNameOrderable;
DataValueDescriptor granteeOrderable;
DataValueDescriptor grantorOrderable;
TabInfoImpl ti = getNonCoreTI(SYSROLES_CATALOG_NUM);
roleNameOrderable = new SQLVarchar(roleName);
granteeOrderable = new SQLVarchar(grantee);
grantorOrderable = new SQLVarchar(grantor);
ExecIndexRow keyRow = null;
/* Set up the start/stop position for the scan */
keyRow = exFactory.getIndexableRow(3);
keyRow.setColumn(1, roleNameOrderable);
keyRow.setColumn(2, granteeOrderable);
keyRow.setColumn(3, grantorOrderable);
ti.deleteRow(tc, keyRow,
SYSROLESRowFactory.SYSROLES_INDEX_ID_EE_OR_IDX);
}
/**
* Drop the descriptor for a schema, given the schema's name
*
* @param schemaName The name of the schema to drop
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on error
*/
public void dropSchemaDescriptor(String schemaName,
TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow1 = null;
DataValueDescriptor schemaNameOrderable;
TabInfoImpl ti = coreInfo[SYSSCHEMAS_CORE_NUM];
if (SanityManager.DEBUG)
{
SchemaDescriptor sd = getSchemaDescriptor(schemaName, getTransactionCompile(), true);
if (!isSchemaEmpty(sd))
{
SanityManager.THROWASSERT("Attempt to drop schema "+schemaName+" that is not empty");
}
}
/* Use schemaNameOrderable in both start
* and stop position for index 1 scan.
*/
schemaNameOrderable = new SQLVarchar(schemaName);
/* Set up the start/stop position for the scan */
keyRow1 = exFactory.getIndexableRow(1);
keyRow1.setColumn(1, schemaNameOrderable);
ti.deleteRow( tc, keyRow1, SYSSCHEMASRowFactory.SYSSCHEMAS_INDEX1_ID );
}
/**
* Get the descriptor for the named table within the given schema.
* If the schema parameter is NULL, it looks for the table in the
* current (default) schema. Table descriptors include object ids,
* object types (table, view, etc.)
*
* @param tableName The name of the table to get the descriptor for
* @param schema The descriptor for the schema the table lives in.
* If null, use the system schema.
* @return The descriptor for the table, null if table does not
* exist.
*
* @exception StandardException Thrown on failure
*/
public TableDescriptor getTableDescriptor(String tableName,
SchemaDescriptor schema, TransactionController tc)
throws StandardException
{
TableDescriptor retval = null;
/*
** If we didn't get a schema descriptor, we had better
** have a system table.
*/
if (SanityManager.DEBUG)
{
if ((schema == null) && !tableName.startsWith("SYS"))
{
SanityManager.THROWASSERT("null schema for non system table "+tableName);
}
}
SchemaDescriptor sd = (schema == null) ?
getSystemSchemaDescriptor()
: schema;
UUID schemaUUID = sd.getUUID();
if (SchemaDescriptor.STD_SYSTEM_DIAG_SCHEMA_NAME.equals(
sd.getSchemaName()))
{
TableDescriptor td =
new TableDescriptor(this, tableName, sd,
TableDescriptor.VTI_TYPE,
TableDescriptor.DEFAULT_LOCK_GRANULARITY);
// ensure a vti class exists
if (getVTIClass(td, false) != null)
return td;
// otherwise just standard search
}
TableKey tableKey = new TableKey(schemaUUID, tableName);
/* Only use the cache if we're in compile-only mode */
if (getCacheMode() == DataDictionary.COMPILE_ONLY_MODE)
{
NameTDCacheable cacheEntry = (NameTDCacheable) nameTdCache.find(tableKey);
if (cacheEntry != null)
{
retval = cacheEntry.getTableDescriptor();
// bind in previous command might have set refernced cols
retval.setReferencedColumnMap(null);
nameTdCache.release(cacheEntry);
}
return retval;
}
return getTableDescriptorIndex1Scan(tableName, schemaUUID.toString());
}
/**
* Scan systables_index1 (tablename, schemaid) for a match.
*
* @return TableDescriptor The matching descriptor, if any.
*
* @exception StandardException Thrown on failure
*/
private TableDescriptor getTableDescriptorIndex1Scan(
String tableName,
String schemaUUID)
throws StandardException
{
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor tableNameOrderable;
TableDescriptor td;
TabInfoImpl ti = coreInfo[SYSTABLES_CORE_NUM];
/* Use tableNameOrderable and schemaIdOrderable in both start
* and stop position for scan.
*/
tableNameOrderable = new SQLVarchar(tableName);
schemaIDOrderable = new SQLChar(schemaUUID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, tableNameOrderable);
keyRow.setColumn(2, schemaIDOrderable);
td = (TableDescriptor)
getDescriptorViaIndex(
SYSTABLESRowFactory.SYSTABLES_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
return finishTableDescriptor(td);
}
/**
* This method can get called from the DataDictionary cache.
*
* @param tableKey The TableKey of the table
*
* @return The descriptor for the table, null if the table does
* not exist.
*
* @exception StandardException Thrown on failure
*/
TableDescriptor getUncachedTableDescriptor(TableKey tableKey)
throws StandardException
{
return getTableDescriptorIndex1Scan(tableKey.getTableName(),
tableKey.getSchemaId().toString());
}
/**
* Get the descriptor for the table with the given UUID.
*
* NOTE: I'm assuming that the object store will define an UUID for
* persistent objects. I'm also assuming that UUIDs are unique across
* schemas, and that the object store will be able to do efficient
* lookups across schemas (i.e. that no schema descriptor parameter
* is needed).
*
* @param tableID The UUID of the table to get the descriptor for
*
* @return The descriptor for the table, null if the table does
* not exist.
*
* @exception StandardException Thrown on failure
*/
public TableDescriptor getTableDescriptor(UUID tableID)
throws StandardException
{
OIDTDCacheable cacheEntry;
TableDescriptor retval = null;
/* Only use the cache if we're in compile-only mode */
if (getCacheMode() == DataDictionary.COMPILE_ONLY_MODE)
{
cacheEntry = (OIDTDCacheable) OIDTdCache.find(tableID);
if (cacheEntry != null)
{
retval = cacheEntry.getTableDescriptor();
// bind in previous command might have set refernced cols
retval.setReferencedColumnMap(null);
OIDTdCache.release(cacheEntry);
}
return retval;
}
return getTableDescriptorIndex2Scan(tableID.toString());
}
/**
* This method can get called from the DataDictionary cache.
*
* @param tableID The UUID of the table to get the descriptor for
*
* @return The descriptor for the table, null if the table does
* not exist.
*
* @exception StandardException Thrown on failure
*/
protected TableDescriptor getUncachedTableDescriptor(UUID tableID)
throws StandardException
{
return getTableDescriptorIndex2Scan(tableID.toString());
}
/**
* Scan systables_index2 (tableid) for a match.
*
* @return TableDescriptor The matching descriptor, if any.
*
* @exception StandardException Thrown on failure
*/
private TableDescriptor getTableDescriptorIndex2Scan(
String tableUUID)
throws StandardException
{
DataValueDescriptor tableIDOrderable;
TableDescriptor td;
TabInfoImpl ti = coreInfo[SYSTABLES_CORE_NUM];
/* Use tableIDOrderable in both start and stop position for scan.
*/
tableIDOrderable = new SQLChar(tableUUID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, tableIDOrderable);
td = (TableDescriptor)
getDescriptorViaIndex(
SYSTABLESRowFactory.SYSTABLES_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
return finishTableDescriptor(td);
}
/**
* Finish filling in the TableDescriptor.
* (Build the various lists that hang off the TD.)
*
* @param td The TableDescriptor.
*
* @return The completed TableDescriptor.
*
* @exception StandardException Thrown on failure
*/
private TableDescriptor finishTableDescriptor(TableDescriptor td)
throws StandardException
{
if (td != null)
{
synchronized(td)
{
getColumnDescriptorsScan(td);
getConglomerateDescriptorsScan(td);
}
}
return td;
}
/**
* Indicate whether there is anything in the
* particular schema. Checks for tables in the
* the schema, on the assumption that there cannot
* be any other objects in a schema w/o a table.
*
* @param sd descriptor
*
* @return true/false
*
* @exception StandardException on error
*/
public boolean isSchemaEmpty(SchemaDescriptor sd)
throws StandardException
{
DataValueDescriptor schemaIdOrderable;
TransactionController tc = getTransactionCompile();
schemaIdOrderable = getIDValueAsCHAR(sd.getUUID());
if (isSchemaReferenced(tc, coreInfo[SYSTABLES_CORE_NUM],
SYSTABLESRowFactory.SYSTABLES_INDEX1_ID,
SYSTABLESRowFactory.SYSTABLES_INDEX1_SCHEMAID,
schemaIdOrderable))
{
return false;
}
if (isSchemaReferenced(tc, getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM),
SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX2_ID,
2,
schemaIdOrderable))
{
return false;
}
if (isSchemaReferenced(tc, getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM),
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_INDEX2_ID,
2,
schemaIdOrderable))
{
return false;
}
if (isSchemaReferenced(tc, getNonCoreTI(SYSTRIGGERS_CATALOG_NUM),
SYSTRIGGERSRowFactory.SYSTRIGGERS_INDEX2_ID,
2,
schemaIdOrderable))
{
return false;
}
// don't orphan routines or UDTs
if (isSchemaReferenced(tc, getNonCoreTI(SYSALIASES_CATALOG_NUM),
SYSALIASESRowFactory.SYSALIASES_INDEX1_ID,
1,
schemaIdOrderable))
{
return false;
}
// These catalogs were added in 10.6. Don't look for these catalogs if we
// have soft-upgraded from an older release.
if( dictionaryVersion.majorVersionNumber >= DataDictionary.DD_VERSION_DERBY_10_6)
{
if (isSchemaReferenced(tc, getNonCoreTI(SYSSEQUENCES_CATALOG_NUM),
SYSSEQUENCESRowFactory.SYSSEQUENCES_INDEX2_ID,
1,
schemaIdOrderable))
{
return false;
}
}
return true;
}
/**
* Is the schema id referenced by the system table in question?
* Currently assumes that the schema id is in an index.
* NOTE: could be generalized a bit, and possibly used
* elsewhere...
*
* @param tc transaction controller
* @param ti table info for the system table
* @param indexId index id
* @param indexCol 1 based index column
* @param schemaIdOrderable the schemaid in a char orderable
*
* @return true if there is a reference to this schema
*
* @exception StandardException on error
*/
protected boolean isSchemaReferenced(TransactionController tc,
TabInfoImpl ti,
int indexId,
int indexCol,
DataValueDescriptor schemaIdOrderable )
throws StandardException
{
ConglomerateController heapCC = null;
ExecIndexRow indexRow1;
ExecIndexRow indexTemplateRow;
ExecRow outRow;
ScanController scanController = null;
boolean foundRow;
FormatableBitSet colToCheck = new FormatableBitSet(indexCol);
CatalogRowFactory rf = ti.getCatalogRowFactory();
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(indexId >= 0, "code needs to be enhanced"+
" to support a table scan to find the index id");
}
colToCheck.set(indexCol - 1);
ScanQualifier[][] qualifier = exFactory.getScanQualifier(1);
qualifier[0][0].setQualifier
(indexCol - 1,
schemaIdOrderable,
Orderable.ORDER_OP_EQUALS,
false,
false,
false);
outRow = rf.makeEmptyRow();
try
{
heapCC =
tc.openConglomerate(
ti.getHeapConglomerate(), false, 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
scanController = tc.openScan(
ti.getIndexConglomerate(indexId), // conglomerate to open
false, // don't hold open across commit
0, // for read
TransactionController.MODE_RECORD, // row locking
TransactionController.ISOLATION_REPEATABLE_READ,
colToCheck, // don't get any rows
null, // start position - first row
ScanController.GE, // startSearchOperation
qualifier, // scanQualifier,
null, // stop position - through last row
ScanController.GT); // stopSearchOperation
foundRow = (scanController.next());
}
finally
{
if (scanController != null)
{
scanController.close();
}
if (heapCC != null)
{
heapCC.close();
}
}
return foundRow;
}
/**
* Drop the table descriptor.
*
* @param td The table descriptor to drop
* @param schema A descriptor for the schema the table
* is a part of. If this parameter is
* NULL, then the table is part of the
* current (default) schema
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on error
*/
public void dropTableDescriptor(TableDescriptor td, SchemaDescriptor schema,
TransactionController tc)
throws StandardException
{
ConglomerateController heapCC;
ExecIndexRow keyRow1 = null;
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor tableNameOrderable;
TabInfoImpl ti = coreInfo[SYSTABLES_CORE_NUM];
/* Use tableIdOrderable and schemaIdOrderable in both start
* and stop position for index 1 scan.
*/
tableNameOrderable = new SQLVarchar(td.getName());
schemaIDOrderable = getIDValueAsCHAR(schema.getUUID());
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(2);
keyRow1.setColumn(1, tableNameOrderable);
keyRow1.setColumn(2, schemaIDOrderable);
ti.deleteRow( tc, keyRow1, SYSTABLESRowFactory.SYSTABLES_INDEX1_ID );
}
/**
* Update the lockGranularity for the specified table.
*
* @param td The TableDescriptor for the table
* @param schema The SchemaDescriptor for the table
* @param lockGranularity The new lockGranularity
* @param tc The TransactionController to use.
*
* @exception StandardException Thrown on error
*/
public void updateLockGranularity(TableDescriptor td, SchemaDescriptor schema,
char lockGranularity, TransactionController tc)
throws StandardException
{
ConglomerateController heapCC;
ExecIndexRow keyRow1 = null;
ExecRow row;
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor tableNameOrderable;
TabInfoImpl ti = coreInfo[SYSTABLES_CORE_NUM];
SYSTABLESRowFactory rf = (SYSTABLESRowFactory) ti.getCatalogRowFactory();
/* Use tableIdOrderable and schemaIdOrderable in both start
* and stop position for index 1 scan.
*/
tableNameOrderable = new SQLVarchar(td.getName());
schemaIDOrderable = getIDValueAsCHAR(schema.getUUID());
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(2);
keyRow1.setColumn(1, tableNameOrderable);
keyRow1.setColumn(2, schemaIDOrderable);
// build the row to be stuffed into SYSTABLES.
row = rf.makeRow(td, schema);
// update row in catalog (no indexes)
boolean[] bArray = new boolean[2];
for (int index = 0; index < 2; index++)
{
bArray[index] = false;
}
ti.updateRow(keyRow1, row,
SYSTABLESRowFactory.SYSTABLES_INDEX1_ID,
bArray,
(int[])null,
tc);
}
/**
* 10.6 upgrade logic to update the return type of SYSIBM.CLOBGETSUBSTRING. The length of the
* return type was changed in 10.5 but old versions of the metadata were not
* upgraded at that time. See DERBY-4214.
*/
void upgradeCLOBGETSUBSTRING_10_6( TransactionController tc )
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSALIASES_CATALOG_NUM);
ExecIndexRow keyRow = exFactory.getIndexableRow(3);
DataValueDescriptor aliasNameOrderable = new SQLVarchar( "CLOBGETSUBSTRING" );;
DataValueDescriptor nameSpaceOrderable = new SQLChar
( new String( new char[] { AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR } ) );
keyRow.setColumn(1, new SQLChar( SchemaDescriptor.SYSIBM_SCHEMA_UUID ));
keyRow.setColumn(2, aliasNameOrderable);
keyRow.setColumn(3, nameSpaceOrderable);
AliasDescriptor oldAD = (AliasDescriptor) getDescriptorViaIndex
(
SYSALIASESRowFactory.SYSALIASES_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
true,
TransactionController.ISOLATION_REPEATABLE_READ,
tc);
RoutineAliasInfo oldRai = (RoutineAliasInfo) oldAD.getAliasInfo();
TypeDescriptor newReturnType = DataTypeDescriptor.getCatalogType( Types.VARCHAR, LOBStoredProcedure.MAX_CLOB_RETURN_LEN );
RoutineAliasInfo newRai = new RoutineAliasInfo
(
oldRai.getMethodName(),
oldRai.getParameterCount(),
oldRai.getParameterNames(),
oldRai.getParameterTypes(),
oldRai.getParameterModes(),
oldRai.getMaxDynamicResultSets(),
oldRai.getParameterStyle(),
oldRai.getSQLAllowed(),
oldRai.isDeterministic(),
oldRai.hasDefinersRights(),
oldRai.calledOnNullInput(),
newReturnType
);
AliasDescriptor newAD = new AliasDescriptor
(
this,
oldAD.getUUID(),
oldAD.getObjectName(),
oldAD.getSchemaUUID(),
oldAD.getJavaClassName(),
oldAD.getAliasType(),
oldAD.getNameSpace(),
oldAD.getSystemAlias(),
newRai,
oldAD.getSpecificName()
);
ExecRow newRow = ti.getCatalogRowFactory().makeRow( newAD, null );
ti.updateRow
(
keyRow,
newRow,
SYSALIASESRowFactory.SYSALIASES_INDEX1_ID,
new boolean[] { false, false, false },
(int[])null,
tc
);
}
/**
* 10.6 upgrade logic to update the permissions granted to SYSCS_UTIL.SYSCS_INPLACE_COMPRESS_TABLE.
* If a 10.0 database was upgraded to 10.2, 10.3, or 10.4, then there will
* be an extra permissions tuple in SYSROUTINEPERMS--that tuple will have a
* null grantor field. We must delete this tuple. See DERBY-4215.
*/
void upgradeSYSROUTINEPERMS_10_6( TransactionController tc )
throws StandardException
{
//
// Get the aliasID of SYSCS_INPLACE_COMPRESS_TABLE
//
TabInfoImpl aliasTI = getNonCoreTI(SYSALIASES_CATALOG_NUM);
ExecIndexRow aliasKeyRow = exFactory.getIndexableRow(3);
DataValueDescriptor aliasNameOrderable = new SQLVarchar( "SYSCS_INPLACE_COMPRESS_TABLE" );;
DataValueDescriptor nameSpaceOrderable = new SQLChar
( new String( new char[] { AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR } ) );
aliasKeyRow.setColumn(1, new SQLChar( SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID ));
aliasKeyRow.setColumn(2, aliasNameOrderable);
aliasKeyRow.setColumn(3, nameSpaceOrderable);
AliasDescriptor oldAD = (AliasDescriptor) getDescriptorViaIndex
(
SYSALIASESRowFactory.SYSALIASES_INDEX1_ID,
aliasKeyRow,
(ScanQualifier [][]) null,
aliasTI,
(TupleDescriptor) null,
(List) null,
true,
TransactionController.ISOLATION_REPEATABLE_READ,
tc);
UUID aliasID = oldAD.getUUID();
//
// Now delete the permissions tuple which has a null grantor
//
TabInfoImpl rpTI = getNonCoreTI(SYSROUTINEPERMS_CATALOG_NUM);
ExecIndexRow rpKeyRow = exFactory.getIndexableRow(3);
rpKeyRow.setColumn(1, new SQLVarchar( "PUBLIC" ));
rpKeyRow.setColumn(2, new SQLChar( aliasID.toString() ));
rpKeyRow.setColumn(3, new SQLVarchar( (String) null ) );
int deleteCount = rpTI.deleteRow(tc, rpKeyRow, SYSROUTINEPERMSRowFactory.GRANTEE_ALIAS_GRANTOR_INDEX_NUM);
}
/**
* Drop all table descriptors for a schema.
*
* @param schema A descriptor for the schema to drop the tables
* from.
*
* @return Nothing.
*
* @exception StandardException Thrown on failure
*/
/*
public void dropAllTableDescriptors(SchemaDescriptor schema)
throws StandardException
{
if (SanityManager.DEBUG) SanityManager.NOTREACHED();
}
*/
/**
* Get a ColumnDescriptor given its Default ID.
*
* @param uuid The UUID of the default
*
* @return The ColumnDescriptor for the column.
*
* @exception StandardException Thrown on failure
*/
public ColumnDescriptor getColumnDescriptorByDefaultId(UUID uuid)
throws StandardException
{
DataValueDescriptor UUIDStringOrderable;
TabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];
/* Use UUIDStringOrderable in both start and stop positions for scan */
UUIDStringOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, UUIDStringOrderable);
return (ColumnDescriptor)
getDescriptorViaIndex(
SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(DefaultDescriptor) null,
(List) null,
false);
}
/**
* Populate the ColumnDescriptorList for the specified TableDescriptor.
*
* MT synchronization: it is assumed that the caller has synchronized
* on the CDL in the given TD.
*
* @param td The TableDescriptor.
*
* @exception StandardException Thrown on failure
*/
private void getColumnDescriptorsScan(TableDescriptor td)
throws StandardException
{
getColumnDescriptorsScan(
td.getUUID(),
td.getColumnDescriptorList(),
td);
}
/**
* Populate the ColumnDescriptorList for the specified TableDescriptor.
*
* MT synchronization: it is assumed that the caller has synchronized
* on the CDL in the given TD.
*
* @param uuid The referencing UUID
* @param cdl The column descriptor list
* @param td The parent tuple descriptor
*
* @exception StandardException Thrown on failure
*/
private void getColumnDescriptorsScan(
UUID uuid,
ColumnDescriptorList cdl,
TupleDescriptor td)
throws StandardException
{
ColumnDescriptor cd;
ColumnDescriptorList cdlCopy = new ColumnDescriptorList();
DataValueDescriptor refIDOrderable = null;
TabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];
/* Use refIDOrderable in both start and stop position for scan. */
refIDOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, refIDOrderable);
getDescriptorViaIndex(
SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
td,
(ColumnDescriptorList) cdl,
false);
/* The TableDescriptor's column descriptor list must be ordered by
* columnNumber. (It is probably not ordered correctly at this point due
* to the index on syscolumns being on (tableId, columnName).) The
* cheapest way to reorder the list appears to be to copy it (above), and then
* walk the copy and put the elements back into the original in the
* expected locations.
*/
int cdlSize = cdl.size();
for (int index = 0; index < cdlSize; index++)
{
cdlCopy.add( cdl.get(index));
}
for (int index = 0; index < cdlSize; index++)
{
cd = (ColumnDescriptor) cdlCopy.elementAt(index);
cdl.set(cd.getPosition() - 1, cd);
}
}
/**
* Given a column name and a table ID, drops the column descriptor
* from the table.
*
* @param tableID The UUID of the table to drop the column from
* @param columnName The name of the column to drop
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on error
*/
public void dropColumnDescriptor(UUID tableID,
String columnName, TransactionController tc)
throws StandardException
{
DataValueDescriptor columnNameOrderable;
DataValueDescriptor tableIdOrderable;
/* Use tableIDOrderable and columnNameOrderable in both start
* and stop position for scan.
*/
tableIdOrderable = getIDValueAsCHAR(tableID);
columnNameOrderable = new SQLVarchar(columnName);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, tableIdOrderable);
keyRow.setColumn(2, columnNameOrderable);
dropColumnDescriptorCore( tc, keyRow);
}
/**
* Drops all column descriptors from the given table. Useful for
* DROP TABLE.
*
* @param tableID The UUID of the table from which to drop
* all the column descriptors
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on error
*/
public void dropAllColumnDescriptors(UUID tableID, TransactionController tc)
throws StandardException
{
DataValueDescriptor tableIdOrderable;
/* Use tableIDOrderable in both start and stop position for scan. */
tableIdOrderable = getIDValueAsCHAR(tableID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, tableIdOrderable);
dropColumnDescriptorCore(tc, keyRow);
}
/**
* Drops all table and column permission descriptors for the given table.
*
* @param tableID The UUID of the table from which to drop
* all the permission descriptors
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on error
*/
public void dropAllTableAndColPermDescriptors(UUID tableID, TransactionController tc)
throws StandardException
{
DataValueDescriptor tableIdOrderable;
// In Derby authorization mode, permission catalogs may not be present
if (!usesSqlAuthorization)
return;
/* Use tableIDOrderable in both start and stop position for scan. */
tableIdOrderable = getIDValueAsCHAR(tableID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, tableIdOrderable);
dropTablePermDescriptor(tc, keyRow);
dropColumnPermDescriptor(tc, keyRow);
}
/**
* Need to update SYSCOLPERMS for a given table because a new column has
* been added to that table. SYSCOLPERMS has a column called "COLUMNS"
* which is a bit map for all the columns in a given user table. Since
* ALTER TABLE .. ADD COLUMN .. has added one more column, we need to
* expand "COLUMNS" for that new column
*
* Currently, this code gets called during execution phase of
* ALTER TABLE .. ADD COLUMN ..
*
* @param tableID The UUID of the table to which a column has been added
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on error
*/
public void updateSYSCOLPERMSforAddColumnToUserTable(UUID tableID, TransactionController tc)
throws StandardException
{
rewriteSYSCOLPERMSforAlterTable(tableID, tc, null);
}
/**
* Update SYSCOLPERMS due to dropping a column from a table.
*
* Since ALTER TABLE .. DROP COLUMN .. has removed a column from the
* table, we need to shrink COLUMNS by removing the corresponding bit
* position, and shifting all the subsequent bits "left" one position.
*
* @param tableID The UUID of the table from which a col has been dropped
* @param tc TransactionController for the transaction
* @param columnDescriptor Information about the dropped column
*
* @exception StandardException Thrown on error
*/
public void updateSYSCOLPERMSforDropColumn(UUID tableID,
TransactionController tc, ColumnDescriptor columnDescriptor)
throws StandardException
{
rewriteSYSCOLPERMSforAlterTable(tableID, tc, columnDescriptor);
}
/**
* Workhorse for ALTER TABLE-driven mods to SYSCOLPERMS
*
* This method finds all the SYSCOLPERMS rows for this table. Then it
* iterates through each row, either adding a new column to the end of
* the table, or dropping a column from the table, as appropriate. It
* updates each SYSCOLPERMS row to store the new COLUMNS value.
*
* @param tableID The UUID of the table being altered
* @param tc TransactionController for the transaction
* @param columnDescriptor Dropped column info, or null if adding
*
* @exception StandardException Thrown on error
*/
private void rewriteSYSCOLPERMSforAlterTable(UUID tableID,
TransactionController tc, ColumnDescriptor columnDescriptor)
throws StandardException
{
// In Derby authorization mode, permission catalogs may not be present
if (!usesSqlAuthorization)
return;
/* This method has 2 steps to it. First get all the ColPermsDescriptor
for given tableid. And next step is to go back to SYSCOLPERMS to find
unique row corresponding to each of ColPermsDescriptor and update the
"COLUMNS" column in SYSCOLPERMS. The reason for this 2 step process is
that SYSCOLPERMS has a non-unique row on "TABLEID" column and hence
we can't get a unique handle on each of the affected row in SYSCOLPERMS
using just the "TABLEID" column */
// First get all the ColPermsDescriptor for the given tableid from
//SYSCOLPERMS using getDescriptorViaIndex().
List permissionDescriptorsList;//all ColPermsDescriptor for given tableid
DataValueDescriptor tableIDOrderable = getIDValueAsCHAR(tableID);
TabInfoImpl ti = getNonCoreTI(SYSCOLPERMS_CATALOG_NUM);
SYSCOLPERMSRowFactory rf = (SYSCOLPERMSRowFactory) ti.getCatalogRowFactory();
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, tableIDOrderable);
permissionDescriptorsList = newSList();
getDescriptorViaIndex(
SYSCOLPERMSRowFactory.TABLEID_INDEX_NUM,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
permissionDescriptorsList,
false);
/* Next, using each of the ColPermDescriptor's uuid, get the unique row
in SYSCOLPERMS and adjust the "COLUMNS" column in SYSCOLPERMS to
accomodate the added or dropped column in the tableid*/
ColPermsDescriptor colPermsDescriptor;
ExecRow curRow;
ExecIndexRow uuidKey;
// Not updating any indexes on SYSCOLPERMS
boolean[] bArray = new boolean[SYSCOLPERMSRowFactory.TOTAL_NUM_OF_INDEXES];
int[] colsToUpdate = {SYSCOLPERMSRowFactory.COLUMNS_COL_NUM};
for (Iterator iterator = permissionDescriptorsList.iterator(); iterator.hasNext(); )
{
colPermsDescriptor = (ColPermsDescriptor) iterator.next();
removePermEntryInCache(colPermsDescriptor);
uuidKey = rf.buildIndexKeyRow(rf.COLPERMSID_INDEX_NUM, colPermsDescriptor);
curRow=ti.getRow(tc, uuidKey, rf.COLPERMSID_INDEX_NUM);
FormatableBitSet columns = (FormatableBitSet) curRow.getColumn(
SYSCOLPERMSRowFactory.COLUMNS_COL_NUM).getObject();
// See whether this is ADD COLUMN or DROP COLUMN. If ADD, then
// add a new bit to the bit set. If DROP, then remove the bit
// for the dropped column.
if (columnDescriptor == null)
{
int currentLength = columns.getLength();
columns.grow(currentLength+1);
}
else
{
FormatableBitSet modifiedColumns=new FormatableBitSet(columns);
modifiedColumns.shrink(columns.getLength()-1);
// All the bits from 0 ... colPosition-2 are OK. The bits from
// colPosition to the end need to be shifted 1 to the left.
// The bit for colPosition-1 simply disappears from COLUMNS.
// ColumnPosition values count from 1, while bits in the
// FormatableBitSet count from 0.
for (int i = columnDescriptor.getPosition()-1;
i < modifiedColumns.getLength();
i++)
{
if (columns.isSet(i+1))
modifiedColumns.set(i);
else
modifiedColumns.clear(i);
}
columns = modifiedColumns;
}
curRow.setColumn(SYSCOLPERMSRowFactory.COLUMNS_COL_NUM,
new UserType((Object) columns));
ti.updateRow(uuidKey, curRow,
SYSCOLPERMSRowFactory.COLPERMSID_INDEX_NUM,
bArray,
colsToUpdate,
tc);
}
}
/**
* Remove PermissionsDescriptor from permissions cache if present
*/
private void removePermEntryInCache(PermissionsDescriptor perm)
throws StandardException
{
// Remove cached permissions entry if present
Cacheable cacheEntry = getPermissionsCache().findCached( perm);
if (cacheEntry != null)
getPermissionsCache().remove(cacheEntry);
}
/**
* Drops all routine permission descriptors for the given routine.
*
* @param routineID The UUID of the routine from which to drop
* all the permission descriptors
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on error
*/
public void dropAllRoutinePermDescriptors(UUID routineID, TransactionController tc)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSROUTINEPERMS_CATALOG_NUM);
SYSROUTINEPERMSRowFactory rf = (SYSROUTINEPERMSRowFactory) ti.getCatalogRowFactory();
DataValueDescriptor routineIdOrderable;
ExecRow curRow;
PermissionsDescriptor perm;
// In Derby authorization mode, permission catalogs may not be present
if (!usesSqlAuthorization)
return;
/* Use tableIDOrderable in both start and stop position for scan. */
routineIdOrderable = getIDValueAsCHAR(routineID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, routineIdOrderable);
while ((curRow=ti.getRow(tc, keyRow, rf.ALIASID_INDEX_NUM)) != null)
{
perm = (PermissionsDescriptor)rf.buildDescriptor(curRow, (TupleDescriptor) null, this);
removePermEntryInCache(perm);
// Build new key based on UUID and drop the entry as we want to drop
// only this row
ExecIndexRow uuidKey;
uuidKey = rf.buildIndexKeyRow(rf.ROUTINEPERMSID_INDEX_NUM, perm);
ti.deleteRow(tc, uuidKey, rf.ROUTINEPERMSID_INDEX_NUM);
}
}
/**
* @see DataDictionary#dropRoleGrantsByGrantee
*/
public void dropRoleGrantsByGrantee(String grantee,
TransactionController tc)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSROLES_CATALOG_NUM);
SYSROLESRowFactory rf = (SYSROLESRowFactory)ti.getCatalogRowFactory();
visitRoleGrants(ti,
rf,
rf.SYSROLES_GRANTEE_COLPOS_IN_INDEX_ID_EE_OR,
grantee,
tc,
DataDictionaryImpl.DROP);
}
/**
* Return true if there exists a role grant to authorization
* identifier.
*
* @param grantee authorization identifier
* @param tc Transaction Controller
*
* @return true if there exists such a grant
* @exception StandardException Thrown on failure
*/
private boolean existsRoleGrantByGrantee(String grantee,
TransactionController tc)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSROLES_CATALOG_NUM);
SYSROLESRowFactory rf = (SYSROLESRowFactory)ti.getCatalogRowFactory();
return visitRoleGrants(ti,
rf,
rf.SYSROLES_GRANTEE_COLPOS_IN_INDEX_ID_EE_OR,
grantee,
tc,
DataDictionaryImpl.EXISTS);
}
/**
* @see DataDictionary#dropRoleGrantsByName
*/
public void dropRoleGrantsByName(String roleName,
TransactionController tc)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSROLES_CATALOG_NUM);
SYSROLESRowFactory rf = (SYSROLESRowFactory)ti.getCatalogRowFactory();
visitRoleGrants(ti,
rf,
rf.SYSROLES_ROLEID_COLPOS_IN_INDEX_ID_EE_OR,
roleName,
tc,
DataDictionaryImpl.DROP);
}
/**
* Scan the {roleid, grantee, grantor} index on SYSROLES,
* locate rows containing authId in column columnNo.
*
* The action argument can be either <code>EXISTS</code> or
* <code>DROP</code> (to check for existence, or to drop that row).
*
* If the scan proves too slow, we should add more indexes. only.
*
* @param ti <code>TabInfoImpl</code> for SYSROLES.
* @param rf row factory for SYSROLES
* @param columnNo the column number to match <code>authId</code> against
* @param tc transaction controller
* @param action drop matching rows (<code>DROP</code>), or return
* <code>true</code> if there is a matching row
* (<code>EXISTS</code>)
*
* @return action=EXISTS: return {@code true} if there is a matching row
* else return {@code false}.
* @exception StandardException
*/
private boolean visitRoleGrants(TabInfoImpl ti,
SYSROLESRowFactory rf,
int columnNo,
String authId,
TransactionController tc,
int action)
throws StandardException
{
ConglomerateController heapCC = tc.openConglomerate(
ti.getHeapConglomerate(), false, 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
DataValueDescriptor authIdOrderable = new SQLVarchar(authId);
ScanQualifier[][] scanQualifier = exFactory.getScanQualifier(1);
scanQualifier[0][0].setQualifier(
columnNo - 1, /* to zero-based */
authIdOrderable,
Orderable.ORDER_OP_EQUALS,
false,
false,
false);
ScanController sc = tc.openScan(
ti.getIndexConglomerate(rf.SYSROLES_INDEX_ID_EE_OR_IDX),
false, // don't hold open across commit
0, // for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
(DataValueDescriptor[]) null, // start position -
0, // startSearchOperation - none
scanQualifier, //
(DataValueDescriptor[]) null, // stop position -through last row
0); // stopSearchOperation - none
try {
ExecRow outRow = rf.makeEmptyRow();
ExecIndexRow indexRow = getIndexRowFromHeapRow(
ti.getIndexRowGenerator(rf.SYSROLES_INDEX_ID_EE_OR_IDX),
heapCC.newRowLocationTemplate(),
outRow);
while (sc.fetchNext(indexRow.getRowArray())) {
if (action == DataDictionaryImpl.EXISTS) {
return true;
} else if (action == DataDictionaryImpl.DROP) {
ti.deleteRow(tc, indexRow,
rf.SYSROLES_INDEX_ID_EE_OR_IDX);
}
}
} finally {
if (sc != null) {
sc.close();
}
if (heapCC != null) {
heapCC.close();
}
}
return false;
}
/**
* Return an in-memory representation of the role grant graph (sans
* grant of roles to users, only role-role relation.
*
* @param tc Transaction Controller
* @param inverse make graph on inverse grant relation
* @return hash map representing role grant graph.
* <ul><li>Key: rolename,</li>
* <li>Value: List<RoleGrantDescriptor> representing a
* grant of that rolename to another role (not user).
* </li>
* </ul>
*
* FIXME: Need to cache graph and invalidate when role graph is modified.
* Currently, we always read from SYSROLES.
*/
HashMap getRoleGrantGraph(TransactionController tc, boolean inverse)
throws StandardException {
HashMap hm = new HashMap();
TabInfoImpl ti = getNonCoreTI(SYSROLES_CATALOG_NUM);
SYSROLESRowFactory rf = (SYSROLESRowFactory) ti.getCatalogRowFactory();
DataValueDescriptor isDefOrderable = new SQLVarchar("N");
ScanQualifier[][] scanQualifier = exFactory.getScanQualifier(1);
scanQualifier[0][0].setQualifier(
SYSROLESRowFactory.SYSROLES_ISDEF - 1, /* to zero-based */
isDefOrderable,
Orderable.ORDER_OP_EQUALS,
false,
false,
false);
ScanController sc = tc.openScan(
ti.getHeapConglomerate(),
false, // don't hold open across commit
0, // for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
(DataValueDescriptor[]) null, // start position -
0, // startSearchOperation - none
scanQualifier, //
(DataValueDescriptor[]) null, // stop position -through last row
0); // stopSearchOperation - none
ExecRow outRow = rf.makeEmptyRow();
RoleGrantDescriptor grantDescr;
while (sc.fetchNext(outRow.getRowArray())) {
grantDescr = (RoleGrantDescriptor)rf.buildDescriptor(
outRow,
(TupleDescriptor) null,
this);
// Next call is potentially inefficient. We could read in
// definitions first in a separate hash table limiting
// this to a 2-pass scan.
RoleGrantDescriptor granteeDef = getRoleDefinitionDescriptor
(grantDescr.getGrantee());
if (granteeDef == null) {
// not a role, must be user authid, skip
continue;
}
String hashKey;
if (inverse) {
hashKey = granteeDef.getRoleName();
} else {
hashKey = grantDescr.getRoleName();
}
List arcs = (List)hm.get(hashKey);
if (arcs == null) {
arcs = new LinkedList();
}
arcs.add(grantDescr);
hm.put(hashKey, arcs);
}
sc.close();
return hm;
}
/**
* @see DataDictionary#createRoleClosureIterator
*/
public RoleClosureIterator createRoleClosureIterator
(TransactionController tc,
String role,
boolean inverse
) throws StandardException {
return new RoleClosureIteratorImpl(role, inverse, this, tc);
}
/**
* Drop all permission descriptors corresponding to a grant to
* the named authentication identifier
*
* @param authId The authentication identifier
* @param tc Transaction Controller
*
* @exception StandardException Thrown on failure
*/
public void dropAllPermsByGrantee(String authId,
TransactionController tc)
throws StandardException
{
dropPermsByGrantee(
authId,
tc,
SYSTABLEPERMS_CATALOG_NUM,
SYSTABLEPERMSRowFactory.GRANTEE_TABLE_GRANTOR_INDEX_NUM,
SYSTABLEPERMSRowFactory.
GRANTEE_COL_NUM_IN_GRANTEE_TABLE_GRANTOR_INDEX);
dropPermsByGrantee(
authId,
tc,
SYSCOLPERMS_CATALOG_NUM,
SYSCOLPERMSRowFactory.GRANTEE_TABLE_TYPE_GRANTOR_INDEX_NUM,
SYSCOLPERMSRowFactory.
GRANTEE_COL_NUM_IN_GRANTEE_TABLE_TYPE_GRANTOR_INDEX);
dropPermsByGrantee(
authId,
tc,
SYSROUTINEPERMS_CATALOG_NUM,
SYSROUTINEPERMSRowFactory.GRANTEE_ALIAS_GRANTOR_INDEX_NUM,
SYSROUTINEPERMSRowFactory.
GRANTEE_COL_NUM_IN_GRANTEE_ALIAS_GRANTOR_INDEX);
}
/**
* Presently only used when dropping roles - user dropping is not under
* Derby control (well, built-in users are if properties are stored in
* database), any permissions granted to users remain in place even if the
* user is no more.
*/
private void dropPermsByGrantee(String authId,
TransactionController tc,
int catalog,
int indexNo,
int granteeColnoInIndex)
throws StandardException
{
visitPermsByGrantee(authId,
tc,
catalog,
indexNo,
granteeColnoInIndex,
DataDictionaryImpl.DROP);
}
/**
* Return true if there exists a permission grant descriptor to this
* authorization id.
*/
private boolean existsPermByGrantee(String authId,
TransactionController tc,
int catalog,
int indexNo,
int granteeColnoInIndex)
throws StandardException
{
return visitPermsByGrantee(authId,
tc,
catalog,
indexNo,
granteeColnoInIndex,
DataDictionaryImpl.EXISTS);
}
/**
* Possible action for visitPermsByGrantee and visitRoleGrants.
*/
static final int DROP = 0;
/**
* Possible action for visitPermsByGrantee and visitRoleGrants.
*/
static final int EXISTS = 1;
/**
* Scan <code>indexNo</code> index on a permission table
* <code>catalog</code>, looking for match(es) for the grantee column
* (given by granteeColnoInIndex for the catalog in question).
*
* The action argument can be either <code>EXISTS</code> or
* <code>DROP</code> (to check for existence, or to drop that row).
*
* There is no index on grantee column only on on any of the
* permissions tables, so we use the index which contain grantee
* and scan that, setting up a scan qualifier to match the
* grantee, then fetch the base row.
*
* If this proves too slow, we should add an index on grantee
* only.
*
* @param authId grantee to match against
* @param tc transaction controller
* @param catalog the underlying permission table to visit
* @param indexNo the number of the index by which to access the catalog
* @param granteeColnoInIndex the column number to match
* <code>authId</code> against
* @param action drop matching rows (<code>DROP</code>), or return
* <code>true</code> if there is a matching row
* (<code>EXISTS</code>)
*
* @return action=EXISTS: return {@code true} if there is a matching row
* else return {@code false}.
* @exception StandardException
*/
private boolean visitPermsByGrantee(String authId,
TransactionController tc,
int catalog,
int indexNo,
int granteeColnoInIndex,
int action)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(catalog);
PermissionsCatalogRowFactory rf =
(PermissionsCatalogRowFactory)ti.getCatalogRowFactory();
ConglomerateController heapCC = tc.openConglomerate(
ti.getHeapConglomerate(), false, 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
DataValueDescriptor authIdOrderable = new SQLVarchar(authId);
ScanQualifier[][] scanQualifier = exFactory.getScanQualifier(1);
scanQualifier[0][0].setQualifier(
granteeColnoInIndex - 1, /* to zero-based */
authIdOrderable,
Orderable.ORDER_OP_EQUALS,
false,
false,
false);
ScanController sc = tc.openScan(
ti.getIndexConglomerate(indexNo),
false, // don't hold open across commit
0, // for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
(DataValueDescriptor[]) null, // start position -
0, // startSearchOperation - none
scanQualifier, //
(DataValueDescriptor[]) null, // stop position -through last row
0); // stopSearchOperation - none
try {
ExecRow outRow = rf.makeEmptyRow();
ExecIndexRow indexRow = getIndexRowFromHeapRow(
ti.getIndexRowGenerator(indexNo),
heapCC.newRowLocationTemplate(),
outRow);
while (sc.fetchNext(indexRow.getRowArray())) {
RowLocation baseRowLocation = (RowLocation)indexRow.getColumn(
indexRow.nColumns());
boolean base_row_exists =
heapCC.fetch(
baseRowLocation, outRow.getRowArray(),
(FormatableBitSet)null);
if (SanityManager.DEBUG) {
// it can not be possible for heap row to
// disappear while holding scan cursor on index at
// ISOLATION_REPEATABLE_READ.
SanityManager.ASSERT(base_row_exists,
"base row doesn't exist");
}
if (action == DataDictionaryImpl.EXISTS) {
return true;
} else if (action == DataDictionaryImpl.DROP) {
PermissionsDescriptor perm = (PermissionsDescriptor)rf.
buildDescriptor(outRow,
(TupleDescriptor) null,
this);
removePermEntryInCache(perm);
ti.deleteRow(tc, indexRow, indexNo);
}
}
} finally {
if (sc != null) {
sc.close();
}
if (heapCC != null) {
heapCC.close();
}
}
return false;
}
/**
* Delete the appropriate rows from syscolumns when
* dropping 1 or more columns.
*
* @param tc The TransactionController
* @param keyRow Start/stop position.
*
* @exception StandardException Thrown on failure
*/
private void dropColumnDescriptorCore(
TransactionController tc,
ExecIndexRow keyRow)
throws StandardException
{
TabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];
ti.deleteRow( tc, keyRow, SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX1_ID );
}
/**
* Delete the appropriate rows from systableperms when
* dropping a table
*
* @param tc The TransactionController
* @param keyRow Start/stop position.
*
* @exception StandardException Thrown on failure
*/
private void dropTablePermDescriptor(
TransactionController tc,
ExecIndexRow keyRow)
throws StandardException
{
ExecRow curRow;
PermissionsDescriptor perm;
TabInfoImpl ti = getNonCoreTI(SYSTABLEPERMS_CATALOG_NUM);
SYSTABLEPERMSRowFactory rf = (SYSTABLEPERMSRowFactory) ti.getCatalogRowFactory();
while ((curRow=ti.getRow(tc, keyRow, rf.TABLEID_INDEX_NUM)) != null)
{
perm = (PermissionsDescriptor)rf.buildDescriptor(curRow, (TupleDescriptor) null, this);
removePermEntryInCache(perm);
// Build key on UUID and drop the entry as we want to drop only this row
ExecIndexRow uuidKey;
uuidKey = rf.buildIndexKeyRow(rf.TABLEPERMSID_INDEX_NUM, perm);
ti.deleteRow(tc, uuidKey, rf.TABLEPERMSID_INDEX_NUM);
}
}
/**
* Delete the appropriate rows from syscolperms when
* dropping a table
*
* @param tc The TransactionController
* @param keyRow Start/stop position.
*
* @exception StandardException Thrown on failure
*/
private void dropColumnPermDescriptor(
TransactionController tc,
ExecIndexRow keyRow)
throws StandardException
{
ExecRow curRow;
PermissionsDescriptor perm;
TabInfoImpl ti = getNonCoreTI(SYSCOLPERMS_CATALOG_NUM);
SYSCOLPERMSRowFactory rf = (SYSCOLPERMSRowFactory) ti.getCatalogRowFactory();
while ((curRow=ti.getRow(tc, keyRow, rf.TABLEID_INDEX_NUM)) != null)
{
perm = (PermissionsDescriptor)rf.buildDescriptor(curRow, (TupleDescriptor) null, this);
removePermEntryInCache(perm);
// Build key on UUID and drop the entry as we want to drop only this row
ExecIndexRow uuidKey;
uuidKey = rf.buildIndexKeyRow(rf.COLPERMSID_INDEX_NUM, perm);
ti.deleteRow(tc, uuidKey, rf.COLPERMSID_INDEX_NUM);
}
}
/**
* Update the column descriptor in question. Updates
* every row in the base conglomerate.
*
* @param cd The ColumnDescriptor
* @param formerUUID The UUID for this column in SYSCOLUMNS,
* may differ from what is in cd if this
* is the column that is being set.
* @param formerName The name for this column in SYSCOLUMNS
* may differ from what is in cd if this
* is the column that is being set.
* @param colsToSet Array of ints of columns to be modified,
* 1 based. May be null (all cols).
* @param tc The TransactionController to use
*
* @exception StandardException Thrown on failure
*/
private void updateColumnDescriptor(ColumnDescriptor cd,
UUID formerUUID,
String formerName,
int[] colsToSet,
TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow1 = null;
ExecRow row;
DataValueDescriptor refIDOrderable;
DataValueDescriptor columnNameOrderable;
TabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];
SYSCOLUMNSRowFactory rf = (SYSCOLUMNSRowFactory) ti.getCatalogRowFactory();
/* Use objectID/columnName in both start
* and stop position for index 1 scan.
*/
refIDOrderable = getIDValueAsCHAR(formerUUID);
columnNameOrderable = new SQLVarchar(formerName);
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(2);
keyRow1.setColumn(1, refIDOrderable);
keyRow1.setColumn(2, columnNameOrderable);
// build the row to be stuffed into SYSCOLUMNS.
row = rf.makeRow(cd, null);
/*
** Figure out if the index in syscolumns needs
** to be updated.
*/
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(rf.getNumIndexes() == 2,
"There are more indexes on syscolumns than expected, the code herein needs to change");
}
boolean[] bArray = new boolean[rf.getNumIndexes()];
/*
** Do we need to update indexes?
*/
if (colsToSet == null)
{
bArray[0] = true;
bArray[1] = true;
}
else
{
/*
** Check the specific columns for indexed
** columns.
*/
for (int i = 0; i < colsToSet.length; i++)
{
if ((colsToSet[i] == rf.SYSCOLUMNS_COLUMNNAME) ||
(colsToSet[i] == rf.SYSCOLUMNS_REFERENCEID))
{
bArray[0] = true;
break;
}
else if (colsToSet[i] == rf.SYSCOLUMNS_COLUMNDEFAULTID)
{
bArray[1] = true;
break;
}
}
}
ti.updateRow(keyRow1, row,
SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX1_ID,
bArray,
colsToSet,
tc);
}
/**
* Gets the viewDescriptor for the view with the given UUID.
*
* @param uuid The UUID for the view
*
* @return A descriptor for the view
*
* @exception StandardException Thrown on error
*/
public ViewDescriptor getViewDescriptor(UUID uuid)
throws StandardException
{
return getViewDescriptor(getTableDescriptor(uuid));
}
/**
* Gets the viewDescriptor for the view given the TableDescriptor.
*
* @param td The TableDescriptor for the view.
*
* @return A descriptor for the view
*
* @exception StandardException Thrown on error
*/
public ViewDescriptor getViewDescriptor(TableDescriptor td)
throws StandardException
{
TableDescriptor tdi = (TableDescriptor) td;
/* See if the view info is cached */
if (tdi.getViewDescriptor() != null)
{
return tdi.getViewDescriptor();
}
synchronized(tdi)
{
/* See if we were waiting on someone who just filled it in */
if (tdi.getViewDescriptor() != null)
{
return tdi.getViewDescriptor();
}
tdi.setViewDescriptor((ViewDescriptor) getViewDescriptorScan(tdi));
}
return tdi.getViewDescriptor();
}
/**
* Get the information for the view from sys.sysviews.
*
* @param tdi The TableDescriptor for the view.
*
* @return ViewDescriptor The ViewDescriptor for the view.
*
* @exception StandardException Thrown on error
*/
private ViewDescriptor getViewDescriptorScan(TableDescriptor tdi)
throws StandardException
{
ViewDescriptor vd;
DataValueDescriptor viewIdOrderable;
TabInfoImpl ti = getNonCoreTI(SYSVIEWS_CATALOG_NUM);
UUID viewID = tdi.getUUID();
/* Use viewIdOrderable in both start
* and stop position for scan.
*/
viewIdOrderable = getIDValueAsCHAR(viewID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, viewIdOrderable);
vd = (ViewDescriptor)
getDescriptorViaIndex(
SYSVIEWSRowFactory.SYSVIEWS_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
if (vd != null)
{
vd.setViewName(tdi.getName());
}
return vd;
}
/**
* Drops the view descriptor from the data dictionary.
*
* @param vd A descriptor for the view to be dropped
* @param tc TransactionController to use
*
* @exception StandardException Thrown on error
*/
public void dropViewDescriptor(ViewDescriptor vd,
TransactionController tc)
throws StandardException
{
DataValueDescriptor viewIdOrderable;
TabInfoImpl ti = getNonCoreTI(SYSVIEWS_CATALOG_NUM);
/* Use aliasNameOrderable in both start
* and stop position for scan.
*/
viewIdOrderable = getIDValueAsCHAR(vd.getUUID());
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, viewIdOrderable);
ti.deleteRow( tc, keyRow, SYSVIEWSRowFactory.SYSVIEWS_INDEX1_ID );
}
/**
* Scan sysfiles_index2 (id) for a match.
* @return TableDescriptor The matching descriptor, or null.
* @exception StandardException Thrown on failure
*/
private FileInfoDescriptor
getFileInfoDescriptorIndex2Scan(UUID id)
throws StandardException
{
DataValueDescriptor idOrderable;
TabInfoImpl ti = getNonCoreTI(SYSFILES_CATALOG_NUM);
idOrderable = getIDValueAsCHAR(id);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, idOrderable);
return (FileInfoDescriptor)
getDescriptorViaIndex(
SYSFILESRowFactory.SYSFILES_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* @see DataDictionary#getFileInfoDescriptor
* @exception StandardException Thrown on failure
*/
public FileInfoDescriptor getFileInfoDescriptor(UUID id)
throws StandardException
{
return getFileInfoDescriptorIndex2Scan(id);
}
/**
* Scan sysfiles_index1 (schemaid,name) for a match.
* @return The matching descriptor or null.
* @exception StandardException Thrown on failure
*/
private FileInfoDescriptor getFileInfoDescriptorIndex1Scan(
UUID schemaId,
String name)
throws StandardException
{
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor nameOrderable;
TabInfoImpl ti = getNonCoreTI(SYSFILES_CATALOG_NUM);
nameOrderable = new SQLVarchar(name);
schemaIDOrderable = getIDValueAsCHAR(schemaId);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, nameOrderable);
keyRow.setColumn(2, schemaIDOrderable);
FileInfoDescriptor r = (FileInfoDescriptor)
getDescriptorViaIndex(
SYSFILESRowFactory.SYSFILES_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
return r;
}
/**
* @see DataDictionary#getFileInfoDescriptor
* @exception StandardException Thrown on failure
*/
public FileInfoDescriptor getFileInfoDescriptor(SchemaDescriptor sd, String name)
throws StandardException
{
return getFileInfoDescriptorIndex1Scan(sd.getUUID(),name);
}
/**
* @see DataDictionary#dropFileInfoDescriptor
* @exception StandardException Thrown on error
*/
public void dropFileInfoDescriptor(FileInfoDescriptor fid)
throws StandardException
{
ConglomerateController heapCC;
ExecIndexRow keyRow1 = null;
DataValueDescriptor idOrderable;
TabInfoImpl ti = getNonCoreTI(SYSFILES_CATALOG_NUM);
TransactionController tc = getTransactionExecute();
/* Use tableIdOrderable and schemaIdOrderable in both start
* and stop position for index 1 scan.
*/
idOrderable = getIDValueAsCHAR(fid.getUUID());
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow1.setColumn(1, idOrderable);
ti.deleteRow( tc, keyRow1, SYSFILESRowFactory.SYSFILES_INDEX2_ID );
}
/**
* Get a SPSDescriptor given its UUID.
*
* @param uuid The UUID
*
* @return The SPSDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public SPSDescriptor getSPSDescriptor(UUID uuid)
throws StandardException
{
SPSDescriptor sps;
/* Make sure that non-core info is initialized */
getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM);
/* Only use the cache if we're in compile-only mode */
if ((spsNameCache != null) &&
(getCacheMode() == DataDictionary.COMPILE_ONLY_MODE))
{
sps = (SPSDescriptor)spsIdHash.get(uuid);
if (sps != null)
{
//System.out.println("found in hash table ");
// System.out.println("stmt text " + sps.getText());
return sps;
}
sps = getSPSDescriptorIndex2Scan(uuid.toString());
TableKey stmtKey = new TableKey(sps.getSchemaDescriptor().getUUID(), sps.getName());
try
{
SPSNameCacheable cacheEntry = (SPSNameCacheable)spsNameCache.create(stmtKey, sps);
spsNameCache.release(cacheEntry);
} catch (StandardException se)
{
/*
** If the error is that the item is already
** in the cache, then that is ok.
*/
if (SQLState.OBJECT_EXISTS_IN_CACHE.equals(se.getMessageId()))
{
return sps;
}
else
{
throw se;
}
}
}
else
{
sps = getSPSDescriptorIndex2Scan(uuid.toString());
}
return sps;
}
/**
Add an entry to the hashtables for lookup from the cache.
*/
void spsCacheEntryAdded(SPSDescriptor spsd)
{
spsIdHash.put(spsd.getUUID(), spsd);
// spsTextHash.put(spsd.getText(), spsd);
}
void spsCacheEntryRemoved(SPSDescriptor spsd) {
spsIdHash.remove(spsd.getUUID());
// spsTextHash.remove(spsd.getText());
}
//public SPSDescriptor getSPSBySQLText(String text) {
// return (SPSDescriptor) spsTextHash.get(text);
//}
/**
* This method can get called from the DataDictionary cache.
*
* @param stmtKey The TableKey of the sps
*
* @return The descriptor for the sps, null if the sps does
* not exist.
*
* @exception StandardException Thrown on failure
*/
public SPSDescriptor getUncachedSPSDescriptor(TableKey stmtKey)
throws StandardException
{
return getSPSDescriptorIndex1Scan(stmtKey.getTableName(),
stmtKey.getSchemaId().toString());
}
/**
* This method can get called from the DataDictionary cache.
*
* @param stmtId The UUID of the stmt to get the descriptor for
*
* @return The descriptor for the stmt, null if the table does
* not exist.
*
* @exception StandardException Thrown on failure
*/
protected SPSDescriptor getUncachedSPSDescriptor(UUID stmtId)
throws StandardException
{
return getSPSDescriptorIndex2Scan(stmtId.toString());
}
/**
* Scan sysstatements_index2 (stmtid) for a match.
* Note that we do not do a lookup of parameter info.
*
* @return SPSDescriptor The matching descriptor, if any.
*
* @exception StandardException Thrown on failure
*/
private SPSDescriptor getSPSDescriptorIndex2Scan(
String stmtUUID)
throws StandardException
{
DataValueDescriptor stmtIDOrderable;
TabInfoImpl ti = getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM);
/* Use stmtIdOrderable in both start
* and stop position for scan.
*/
stmtIDOrderable = new SQLChar(stmtUUID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, stmtIDOrderable);
SPSDescriptor spsd = (SPSDescriptor)
getDescriptorViaIndex(
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
return spsd;
}
/**
* Get a SPSDescriptor given its name.
* Currently no cacheing. With caching
* we need to be very careful about invalidation.
* No caching means invalidations block on
* existing SPSD instances (since they were read in
*
* @param stmtName the statement name
* @param sd The SchemaDescriptor
*
* @return The SPSDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public SPSDescriptor getSPSDescriptor(String stmtName, SchemaDescriptor sd)
throws StandardException
{
SPSDescriptor sps = null;
TableKey stmtKey;
UUID schemaUUID;
/*
** If we didn't get a schema descriptor, we had better
** have a system table.
*/
if (SanityManager.DEBUG)
{
if (sd == null)
{
SanityManager.THROWASSERT("null schema for statement "+stmtName);
}
}
schemaUUID = sd.getUUID();
stmtKey = new TableKey(schemaUUID, stmtName);
/* Only use the cache if we're in compile-only mode */
if ((spsNameCache != null) &&
(getCacheMode() == DataDictionary.COMPILE_ONLY_MODE))
{
SPSNameCacheable cacheEntry = (SPSNameCacheable) spsNameCache.find(stmtKey);
if (cacheEntry != null)
{
sps = cacheEntry.getSPSDescriptor();
spsNameCache.release(cacheEntry);
}
//System.out.println("found in cache " + stmtName);
//System.out.println("stmt text " + sps.getText());
return sps;
}
return getSPSDescriptorIndex1Scan(stmtName, schemaUUID.toString());
}
/**
* Scan sysschemas_index1 (stmtname, schemaid) for a match.
*
* @return SPSDescriptor The matching descriptor, if any.
*
* @exception StandardException Thrown on failure
*/
private SPSDescriptor getSPSDescriptorIndex1Scan(
String stmtName,
String schemaUUID)
throws StandardException
{
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor stmtNameOrderable;
TabInfoImpl ti = getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM);
/* Use stmtNameOrderable and schemaIdOrderable in both start
* and stop position for scan.
*/
stmtNameOrderable = new SQLVarchar(stmtName);
schemaIDOrderable = new SQLChar(schemaUUID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, stmtNameOrderable);
keyRow.setColumn(2, schemaIDOrderable);
SPSDescriptor spsd = (SPSDescriptor)
getDescriptorViaIndex(
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
/*
** Set up the parameter defaults. We are only
** doing this when we look up by name because
** this is the only time we cache, and it can
** be foolish to look up the parameter defaults
** for someone that doesn't need them.
*/
if (spsd != null)
{
Vector v = new Vector();
spsd.setParams(getSPSParams(spsd, v));
Object[] defaults = new Object[v.size()];
v.copyInto(defaults);
spsd.setParameterDefaults(defaults);
}
return spsd;
}
/**
* Adds the given SPSDescriptor to the data dictionary,
* associated with the given table and constraint type.
*
* @param descriptor The descriptor to add
* @param tc The transaction controller
*
* @exception StandardException Thrown on error
*/
public void addSPSDescriptor
(
SPSDescriptor descriptor,
TransactionController tc
) throws StandardException
{
ExecRow row;
TabInfoImpl ti = getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM);
SYSSTATEMENTSRowFactory rf = (SYSSTATEMENTSRowFactory) ti.getCatalogRowFactory();
int insertRetCode;
/*
** We must make sure the descriptor is locked
** while we are writing it out. Otherwise,
** the descriptor could be invalidated while
** we are writing.
*/
synchronized(descriptor)
{
// build the row to be stuffed into SYSSTATEMENTS. this will stuff an
// UUID into the descriptor
boolean compileMe = descriptor.initiallyCompilable();
row = rf.makeSYSSTATEMENTSrow(compileMe, descriptor);
// insert row into catalog and all its indices
insertRetCode = ti.insertRow(row, tc);
}
// Throw an exception duplicate table descriptor
if (insertRetCode != TabInfoImpl.ROWNOTDUPLICATE)
{
throw StandardException.newException(SQLState.LANG_OBJECT_ALREADY_EXISTS_IN_OBJECT,
descriptor.getDescriptorType(),
descriptor.getDescriptorName(),
descriptor.getSchemaDescriptor().getDescriptorType(),
descriptor.getSchemaDescriptor().getSchemaName());
}
addSPSParams(descriptor, tc);
}
/**
* Add a column in SYS.SYSCOLUMNS for each parameter in the
* parameter list.
*/
private void addSPSParams(SPSDescriptor spsd, TransactionController tc)
throws StandardException
{
UUID uuid = spsd.getUUID();
DataTypeDescriptor[] params = spsd.getParams();
Object[] parameterDefaults = spsd.getParameterDefaults();
if (params == null)
return;
/* Create the columns */
int pdlSize = params.length;
for (int index = 0; index < pdlSize; index++)
{
int parameterId = index + 1;
//RESOLVEAUTOINCREMENT
ColumnDescriptor cd =
new ColumnDescriptor(
"PARAM" + parameterId,
parameterId, // position
params[index],
((parameterDefaults == null) || // default
(index >= parameterDefaults.length)) ?
(DataValueDescriptor)null :
(DataValueDescriptor)parameterDefaults[index],
(DefaultInfo) null,
uuid,
(UUID) null, 0, 0, 0);
addDescriptor(cd, null, SYSCOLUMNS_CATALOG_NUM,
false, // no chance of duplicates here
tc);
}
}
/**
* Get all the parameter descriptors for an SPS.
* Look up the params in SYSCOLUMNS and turn them
* into parameter descriptors.
*
* @param spsd sps descriptor
* @param defaults vector for storing column defaults
*
* @return array of data type descriptors
*
* @exception StandardException Thrown on error
*/
public DataTypeDescriptor[] getSPSParams(SPSDescriptor spsd, Vector defaults)
throws StandardException
{
ColumnDescriptorList cdl = new ColumnDescriptorList();
getColumnDescriptorsScan(spsd.getUUID(), cdl, spsd);
int cdlSize = cdl.size();
DataTypeDescriptor[] params = new DataTypeDescriptor[cdlSize];
for (int index = 0; index < cdlSize; index++)
{
ColumnDescriptor cd = (ColumnDescriptor) cdl.elementAt(index);
params[index] = cd.getType();
if (defaults != null)
{
defaults.addElement(cd.getDefaultValue());
}
}
return params;
}
/**
* Updates SYS.SYSSTATEMENTS with the info from the
* SPSD.
*
* @param spsd The descriptor to add
* @param tc The transaction controller
* @param updateParamDescriptors If true, will update the
* parameter descriptors in SYS.SYSCOLUMNS.
* @param firstCompilation true, if Statement is getting compiled for first
* time and SPS was created with NOCOMPILE option.
*
* @exception StandardException Thrown on error
*/
public void updateSPS(
SPSDescriptor spsd,
TransactionController tc,
boolean recompile,
boolean updateParamDescriptors,
boolean firstCompilation)
throws StandardException
{
ExecIndexRow keyRow1 = null;
ExecRow row;
DataValueDescriptor idOrderable;
TabInfoImpl ti = getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM);
SYSSTATEMENTSRowFactory rf = (SYSSTATEMENTSRowFactory) ti.getCatalogRowFactory();
int[] updCols;
if (recompile)
{
if(firstCompilation)
{
updCols = new int[] {SYSSTATEMENTSRowFactory.SYSSTATEMENTS_VALID,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_TEXT,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_LASTCOMPILED,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_USINGTEXT,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_CONSTANTSTATE,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_INITIALLY_COMPILABLE};
}else
{
updCols = new int[] {SYSSTATEMENTSRowFactory.SYSSTATEMENTS_VALID,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_TEXT,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_LASTCOMPILED,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_USINGTEXT,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_CONSTANTSTATE };
}
}
else
{
updCols = new int[] {SYSSTATEMENTSRowFactory.SYSSTATEMENTS_VALID} ;
}
idOrderable = getIDValueAsCHAR(spsd.getUUID());
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow1.setColumn(1, idOrderable);
row = rf.makeSYSSTATEMENTSrow(false, // don't compile
spsd);
/*
** Not updating any indexes
*/
boolean[] bArray = new boolean[2];
/*
** Partial update
*/
ti.updateRow(keyRow1, row,
SYSSTATEMENTSRowFactory.SYSSTATEMENTS_INDEX1_ID,
bArray,
updCols,
tc);
/*
** If we don't need to update the parameter
** descriptors, we are done.
*/
if (!updateParamDescriptors)
{
return;
}
/*
** Set the defaults and datatypes for the parameters, if
** there are parameters.
*/
DataTypeDescriptor[] params = spsd.getParams();
if (params == null)
{
return;
}
if(firstCompilation)
{
/*beetle:5119, reason for doing add here instead of update
*is with NOCOMPILE option of create statement/boot time SPS,
*SPS statement is not compiled to find out the parameter info.
*Because of the parameter info was not inserted at SPSDescriptor
*creation time. As this is the first time we are compiling paramter
*infor should be inserted instead of the update.
*/
addSPSParams(spsd, tc);
}
else
{
Object[] parameterDefaults = spsd.getParameterDefaults();
/*
** Update each column with the new defaults and with
** the new datatypes. It is possible that someone has
** done a drop/create on the underlying table and
** changed the type of a column, which has changed
** the type of a parameter to our statement.
*/
int[] columnsToSet = new int[2];
columnsToSet[0] = SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMNDATATYPE;
columnsToSet[1] = SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMNDEFAULT;
UUID uuid = spsd.getUUID();
for (int index = 0; index < params.length; index++)
{
int parameterId = index + 1;
//RESOLVEAUTOINCREMENT
ColumnDescriptor cd = new ColumnDescriptor("PARAM" + parameterId,
parameterId, // position
params[index],
((parameterDefaults == null) || // default
(index >= parameterDefaults.length)) ?
(DataValueDescriptor)null :
(DataValueDescriptor)parameterDefaults[index],
(DefaultInfo) null,
uuid,
(UUID) null,
0, 0, 0);
updateColumnDescriptor(cd,
cd.getReferencingUUID(),
cd.getColumnName(),
columnsToSet,
tc);
}
}
}
/**
* @see DataDictionary#invalidateAllSPSPlans
* @exception StandardException Thrown on error
*/
public void invalidateAllSPSPlans() throws StandardException
{
LanguageConnectionContext lcc = (LanguageConnectionContext)
ContextService.getContext(LanguageConnectionContext.CONTEXT_ID);
startWriting(lcc);
for (java.util.Iterator li = getAllSPSDescriptors().iterator(); li.hasNext(); )
{
SPSDescriptor spsd = (SPSDescriptor) li.next();
spsd.makeInvalid(DependencyManager.USER_RECOMPILE_REQUEST, lcc);
}
}
/**
* Mark all SPS plans in the data dictionary invalid. This does
* not invalidate cached plans. This function is for use by
* the boot-up code.
* @exception StandardException Thrown on error
*/
void clearSPSPlans() throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM);
faultInTabInfo(ti);
TransactionController tc = getTransactionExecute();
FormatableBitSet columnToReadSet = new FormatableBitSet(SYSSTATEMENTSRowFactory.SYSSTATEMENTS_COLUMN_COUNT);
FormatableBitSet columnToUpdateSet = new FormatableBitSet(SYSSTATEMENTSRowFactory.SYSSTATEMENTS_COLUMN_COUNT);
columnToUpdateSet.set(SYSSTATEMENTSRowFactory.SYSSTATEMENTS_VALID -1);
columnToUpdateSet.set(SYSSTATEMENTSRowFactory.SYSSTATEMENTS_CONSTANTSTATE -1);
DataValueDescriptor[] replaceRow =
new DataValueDescriptor[SYSSTATEMENTSRowFactory.SYSSTATEMENTS_COLUMN_COUNT];
/* Set up a couple of row templates for fetching CHARS */
replaceRow[SYSSTATEMENTSRowFactory.SYSSTATEMENTS_VALID - 1] =
new SQLBoolean(false);
replaceRow[SYSSTATEMENTSRowFactory.SYSSTATEMENTS_CONSTANTSTATE - 1] =
new UserType((Object) null);
/* Scan the entire heap */
ScanController sc =
tc.openScan(
ti.getHeapConglomerate(),
false,
TransactionController.OPENMODE_FORUPDATE,
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_REPEATABLE_READ,
columnToReadSet,
(DataValueDescriptor[]) null,
ScanController.NA,
(Qualifier[][]) null,
(DataValueDescriptor[]) null,
ScanController.NA);
while (sc.fetchNext((DataValueDescriptor[]) null))
{
/* Replace the column in the table */
sc.replace(replaceRow, columnToUpdateSet);
}
sc.close();
}
/**
* Drops the given SPSDescriptor.
*
* @param descriptor The descriptor to drop
* @param tc The TransactionController.
*
* @exception StandardException Thrown on failure
*/
public void dropSPSDescriptor(SPSDescriptor descriptor,
TransactionController tc)
throws StandardException
{
dropSPSDescriptor(descriptor.getUUID(), tc);
}
/**
* Drops the given SPSDescriptor.
*
* @param uuid the statement uuid
* @param tc The TransactionController.
*
* @exception StandardException Thrown on failure
*/
public void dropSPSDescriptor
(
UUID uuid,
TransactionController tc
) throws StandardException
{
DataValueDescriptor stmtIdOrderable;
TabInfoImpl ti = getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM);
stmtIdOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, stmtIdOrderable);
ti.deleteRow( tc, keyRow, SYSSTATEMENTSRowFactory.SYSSTATEMENTS_INDEX1_ID );
/* drop all columns in SYSCOLUMNS */
dropAllColumnDescriptors(uuid, tc);
}
/**
* Get every statement in this database.
* Return the SPSDescriptors in an list.
*
* @return the list of descriptors
*
* @exception StandardException Thrown on failure
*/
public List getAllSPSDescriptors()
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSSTATEMENTS_CATALOG_NUM);
List list = newSList();
getDescriptorViaHeap(
(ScanQualifier[][]) null,
ti,
(TupleDescriptor) null,
list);
return list;
}
/**
* Get every constraint in this database.
* Note that this list of ConstraintDescriptors is
* not going to be the same objects that are typically
* cached off of the table descriptors, so this will
* most likely instantiate some duplicate objects.
*
* @return the list of descriptors
*
* @exception StandardException Thrown on failure
*/
private ConstraintDescriptorList getAllConstraintDescriptors()
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM);
ConstraintDescriptorList list = new ConstraintDescriptorList();
getConstraintDescriptorViaHeap(
(ScanQualifier[][]) null,
ti,
(TupleDescriptor) null,
list);
return list;
}
/**
* Get every trigger in this database.
* Note that this list of TriggerDescriptors is
* not going to be the same objects that are typically
* cached off of the table descriptors, so this will
* most likely instantiate some duplicate objects.
*
* @return the list of descriptors
*
* @exception StandardException Thrown on failure
*/
private GenericDescriptorList getAllTriggerDescriptors()
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSTRIGGERS_CATALOG_NUM);
GenericDescriptorList list = new GenericDescriptorList();
getDescriptorViaHeap(
(ScanQualifier[][]) null,
ti,
(TupleDescriptor) null,
list);
return list;
}
/**
* Get the trigger action string associated with the trigger after the
* references to old/new transition tables/variables in trigger action
* sql provided by CREATE TRIGGER have been transformed eg
* DELETE FROM t WHERE c = old.c
* turns into
* DELETE FROM t WHERE c = org.apache.derby.iapi.db.Factory::
* getTriggerExecutionContext().getOldRow().
* getInt(columnNumberFor'C'inRuntimeResultset)
* or
* DELETE FROM t WHERE c in (SELECT c FROM OLD)
* turns into
* DELETE FROM t WHERE c in
* (SELECT c FROM new TriggerOldTransitionTable OLD)
*
* @param actionStmt This is needed to get access to the various nodes
* generated by the Parser for the trigger action sql. These nodes will be
* used to find REFERENCEs column nodes.
*
* @param oldReferencingName The name specified by the user for REFERENCEs
* to old row columns
*
* @param newReferencingName The name specified by the user for REFERENCEs
* to new row columns
*
* @param triggerDefinition The original trigger action text provided by
* the user during CREATE TRIGGER time.
*
* @param referencedCols Trigger is defined on these columns (will be null
* in case of INSERT AND DELETE Triggers. Can also be null for DELETE
* Triggers if UPDATE trigger is not defined on specific column(s))
*
* @param referencedColsInTriggerAction what columns does the trigger
* action reference through old/new transition variables (may be null)
*
* @param actionOffset offset of start of action clause
*
* @param triggerTableDescriptor Table descriptor for trigger table
*
* @param triggerEventMask TriggerDescriptor.TRIGGER_EVENT_XXX
*
* @param createTriggerTime True if here for CREATE TRIGGER,
* false if here because an invalidated row level trigger with
* REFERENCEd columns has been fired and hence trigger action
* sql associated with SPSDescriptor may be invalid too.
*
* @return Transformed trigger action sql
* @throws StandardException
*/
public String getTriggerActionString(
StatementNode actionStmt,
String oldReferencingName,
String newReferencingName,
String triggerDefinition,
int[] referencedCols,
int[] referencedColsInTriggerAction,
int actionOffset,
TableDescriptor triggerTableDescriptor,
int triggerEventMask,
boolean createTriggerTime
) throws StandardException
{
// DERBY-1482 has caused a regression which is being worked
// under DERBY-5121. Until DERBY-5121 is fixed, we want
// Derby to create triggers same as it is done in 10.6 and
// earlier. This in other words means that do not try to
// optimize how many columns are read from the trigger table,
// simply read all the columns from the trigger table.
boolean in10_7_orHigherVersion = false;
StringBuffer newText = new StringBuffer();
int start = 0;
//Total Number of columns in the trigger table
int numberOfColsInTriggerTable = triggerTableDescriptor.getNumberOfColumns();
//The purpose of following array(triggerColsAndTriggerActionCols)
//is to identify all the trigger columns and all the columns from
//the trigger action which are referenced though old/new
//transition variables(in other words, accessed through the
//REFERENCING clause section of CREATE TRIGGER sql). This array
//will be initialized to -1 at the beginning. By the end of this
//method, all the columns referenced by the trigger action
//through the REFERENCING clause and all the trigger columns will
//have their column positions in the trigger table noted in this
//array.
//eg
//CREATE TRIGGER tr1 AFTER UPDATE OF c12 ON table1
// REFERENCING OLD AS oldt NEW AS newt
// FOR EACH ROW UPDATE table2 SET c24=oldt.c14;
//For the trigger above, triggerColsAndTriggerActionCols will
//finally have [-1,2,-1,4,-1] This list will include all the
//columns that need to be fetched into memory during trigger
//execution. All the columns with their entries marked -1 will
//not be read into memory because they are not referenced in the
//trigger action through old/new transition variables and they are
//not recognized as trigger columns.
int[] triggerColsAndTriggerActionCols = new int[numberOfColsInTriggerTable];
if (referencedCols == null) {
//This means that even though the trigger is defined at row
//level, it is either an INSERT/DELETE trigger. Or it is an
//UPDATE trigger with no specific column(s) identified as the
//trigger column(s). In these cases, Derby is going to read all
//the columns from the trigger table during trigger execution.
//eg of an UPDATE trigger with no specific trigger column(s)
// CREATE TRIGGER tr1 AFTER UPDATE ON table1
// REFERENCING OLD AS oldt NEW AS newt
// FOR EACH ROW UPDATE table2 SET c24=oldt.c14;
for (int i=0; i < numberOfColsInTriggerTable; i++) {
triggerColsAndTriggerActionCols[i]=i+1;
}
} else {
//This means that this row level trigger is an UPDATE trigger
//defined on specific column(s).
java.util.Arrays.fill(triggerColsAndTriggerActionCols, -1);
for (int i=0; i < referencedCols.length; i++){
//Make a note of this trigger column's column position in
//triggerColsAndTriggerActionCols. This will tell us that
//this column needs to be read in when the trigger fires.
//eg for the CREATE TRIGGER below, we will make a note of
//column c12's position in triggerColsAndTriggerActionCols
//eg
//CREATE TRIGGER tr1 AFTER UPDATE OF c12 ON table1
// REFERENCING OLD AS oldt NEW AS newt
// FOR EACH ROW UPDATE table2 SET c24=oldt.c14;
triggerColsAndTriggerActionCols[referencedCols[i]-1] = referencedCols[i];
}
}
CollectNodesVisitor visitor = new CollectNodesVisitor(ColumnReference.class);
actionStmt.accept(visitor);
Vector refs = visitor.getList();
/* we need to sort on position in string, beetle 4324
*/
QueryTreeNode[] cols = sortRefs(refs, true);
if (createTriggerTime) {
//The purpose of following array(triggerActionColsOnly) is to
//identify all the columns from the trigger action which are
//referenced though old/new transition variables(in other words,
//accessed through the REFERENCING clause section of
//CREATE TRIGGER sql). This array will be initialized to -1 at the
//beginning. By the end of this method, all the columns referenced
//by the trigger action through the REFERENCING clause will have
//their column positions in the trigger table noted in this array.
//eg
//CREATE TABLE table1 (c11 int, c12 int, c13 int, c14 int, c15 int);
//CREATE TABLE table2 (c21 int, c22 int, c23 int, c24 int, c25 int);
//CREATE TRIGGER tr1 AFTER UPDATE OF c12 ON table1
// REFERENCING OLD AS oldt NEW AS newt
// FOR EACH ROW UPDATE table2 SET c24=oldt.c14;
//For the trigger above, triggerActionColsOnly will finally have
//[-1,-1,-1,4,-1]. We will note all the entries for this array
//which are not -1 into SYSTRIGGERS(-1 indiciates columns with
//those column positions from the trigger table are not being
//referenced in the trigger action through the old/new transition
//variables.
int[] triggerActionColsOnly = new int[numberOfColsInTriggerTable];
java.util.Arrays.fill(triggerActionColsOnly, -1);
//By this time, we have collected the positions of the trigger
//columns in array triggerColsAndTriggerActionCols. Now we need
//to start looking at the columns in trigger action to collect
//all the columns referenced through REFERENCES clause. These
//columns will be noted in triggerColsAndTriggerActionCols and
//triggerActionColsOnly arrays.
//At the end of the for loop below, we will have both arrays
//triggerColsAndTriggerActionCols & triggerActionColsOnly
//filled up with the column positions of the columns which are
//either trigger columns or triger action columns which are
//referenced through old/new transition variables.
//eg
//CREATE TRIGGER tr1 AFTER UPDATE OF c12 ON table1
// REFERENCING OLD AS oldt NEW AS newt
// FOR EACH ROW UPDATE table2 SET c24=oldt.c14;
//For the above trigger, before the for loop below, the contents
//of the 2 arrays will be as follows
//triggerActionColsOnly [-1,-1,-1,-1,-1]
//triggerColsAndTriggerActionCols [-1,2,-1,-1,-1]
//After the for loop below, the 2 arrays will look as follows
//triggerActionColsOnly [-1,-1,-1,4,-1]
//triggerColsAndTriggerActionCols [-1,2,-1,4,-1]
//If the database is at 10.6 or earlier version(meaning we are in
//soft-upgrade mode), then we do not want to collect any
//information about trigger action columns. The collection and
//usage of trigger action columns was introduced in 10.7 DERBY-1482
for (int i = 0; i < cols.length; i++)
{
ColumnReference ref = (ColumnReference) cols[i];
/*
** Only occurrences of those OLD/NEW transition tables/variables
** are of interest here. There may be intermediate nodes in the
** parse tree that have its own RCL which contains copy of
** column references(CR) from other nodes. e.g.:
**
** CREATE TRIGGER tt
** AFTER INSERT ON x
** REFERENCING NEW AS n
** FOR EACH ROW
** INSERT INTO y VALUES (n.i), (999), (333);
**
** The above trigger action will result in InsertNode that
** contains a UnionNode of RowResultSetNodes. The UnionNode
** will have a copy of the CRs from its left child and those CRs
** will not have its beginOffset set which indicates they are
** not relevant for the conversion processing here, so we can
** safely skip them.
*/
if (ref.getBeginOffset() == -1)
{
continue;
}
TableName tableName = ref.getTableNameNode();
if ((tableName == null) ||
((oldReferencingName == null || !oldReferencingName.equals(tableName.getTableName())) &&
(newReferencingName == null || !newReferencingName.equals(tableName.getTableName()))))
{
continue;
}
if (tableName.getBeginOffset() == -1)
{
continue;
}
checkInvalidTriggerReference(tableName.getTableName(),
oldReferencingName,
newReferencingName,
triggerEventMask);
String colName = ref.getColumnName();
ColumnDescriptor triggerColDesc;
//Following will catch the case where an invalid column is
//used in trigger action through the REFERENCING clause. The
//following tigger is trying to use oldt.c13 but there is no
//column c13 in trigger table table1
//CREATE TRIGGER tr1 AFTER UPDATE OF c12 ON table1
// REFERENCING OLD AS oldt NEW AS newt
// FOR EACH ROW UPDATE table2 SET c24=oldt.c14567;
if ((triggerColDesc = triggerTableDescriptor.getColumnDescriptor(colName)) ==
null) {
throw StandardException.newException(
SQLState.LANG_COLUMN_NOT_FOUND, tableName+"."+colName);
}
if (in10_7_orHigherVersion) {
int triggerColDescPosition = triggerColDesc.getPosition();
triggerColsAndTriggerActionCols[triggerColDescPosition-1]=triggerColDescPosition;
triggerActionColsOnly[triggerColDescPosition-1]=triggerColDescPosition;
referencedColsInTriggerAction[triggerColDescPosition-1] = triggerColDescPosition;
}
}
} else {
//We are here because we have come across an invalidated trigger
//which is being fired. This code gets called for such a trigger
//only if it is a row level trigger with REFERENCEs clause
//
// referencedColsInTriggerAction can be null if trigger action
// does not use any columns through REFERENCING clause. This can
// happen when we are coming here through ALTER TABLE DROP COLUMN
// and the trigger being rebuilt does not use any columns through
// REFERENCING clause. DERBY-4887
if (referencedCols != null && referencedColsInTriggerAction != null){
for (int i = 0; i < referencedColsInTriggerAction.length; i++)
{
triggerColsAndTriggerActionCols[referencedColsInTriggerAction[i]-1] = referencedColsInTriggerAction[i];
}
}
}
//Now that we know what columns we need for trigger columns and
//trigger action columns, we can get rid of remaining -1 entries
//for the remaining columns from trigger table.
//eg
//CREATE TRIGGER tr1 AFTER UPDATE OF c12 ON table1
// REFERENCING OLD AS oldt NEW AS newt
// FOR EACH ROW UPDATE table2 SET c24=oldt.c14;
//For the above trigger, before the justTheRequiredColumns() call,
//the content of triggerColsAndTriggerActionCols array were as
//follows [-1, 2, -1, 4, -1]
//After the justTheRequiredColumns() call below,
//triggerColsAndTriggerActionCols will have [2,4]. What this means
//that, at run time, during trigger execution, these are the only
//2 column positions that will be read into memory from the
//trigger table. The columns in other column positions are not
//needed for trigger execution.
triggerColsAndTriggerActionCols = justTheRequiredColumns(
triggerColsAndTriggerActionCols, triggerTableDescriptor);
//This is where we do the actual transformation of trigger action
//sql. An eg of that is
// DELETE FROM t WHERE c = old.c
// turns into
// DELETE FROM t WHERE c = org.apache.derby.iapi.db.Factory::
// getTriggerExecutionContext().getOldRow().
// getInt(columnNumberFor'C'inRuntimeResultset)
// or
// DELETE FROM t WHERE c in (SELECT c FROM OLD)
// turns into
// DELETE FROM t WHERE c in
// (SELECT c FROM new TriggerOldTransitionTable OLD)
for (int i = 0; i < cols.length; i++)
{
ColumnReference ref = (ColumnReference) cols[i];
/*
** Only occurrences of those OLD/NEW transition tables/variables
** are of interest here. There may be intermediate nodes in the
** parse tree that have its own RCL which contains copy of
** column references(CR) from other nodes. e.g.:
**
** CREATE TRIGGER tt
** AFTER INSERT ON x
** REFERENCING NEW AS n
** FOR EACH ROW
** INSERT INTO y VALUES (n.i), (999), (333);
**
** The above trigger action will result in InsertNode that
** contains a UnionNode of RowResultSetNodes. The UnionNode
** will have a copy of the CRs from its left child and those CRs
** will not have its beginOffset set which indicates they are
** not relevant for the conversion processing here, so we can
** safely skip them.
*/
if (ref.getBeginOffset() == -1)
{
continue;
}
TableName tableName = ref.getTableNameNode();
if ((tableName == null) ||
((oldReferencingName == null || !oldReferencingName.equals(tableName.getTableName())) &&
(newReferencingName == null || !newReferencingName.equals(tableName.getTableName()))))
{
continue;
}
int tokBeginOffset = tableName.getBeginOffset();
int tokEndOffset = tableName.getEndOffset();
if (tokBeginOffset == -1)
{
continue;
}
String colName = ref.getColumnName();
int columnLength = ref.getEndOffset() - ref.getBeginOffset() + 1;
newText.append(triggerDefinition.substring(start, tokBeginOffset-actionOffset));
int colPositionInRuntimeResultSet = -1;
ColumnDescriptor triggerColDesc = triggerTableDescriptor.getColumnDescriptor(colName);
//DERBY-5121 We can come here if the column being used in trigger
// action is getting dropped and we have come here through that
// ALTER TABLE DROP COLUMN. In that case, we will not find the
// column in the trigger table.
if (triggerColDesc == null) {
throw StandardException.newException(
SQLState.LANG_COLUMN_NOT_FOUND, tableName+"."+colName);
}
int colPositionInTriggerTable = triggerColDesc.getPosition();
//This part of code is little tricky and following will help
//understand what mapping is happening here.
//eg
//CREATE TRIGGER tr1 AFTER UPDATE OF c12 ON table1
// REFERENCING OLD AS oldt NEW AS newt
// FOR EACH ROW UPDATE table2 SET c24=oldt.c14;
//For the above trigger, triggerColsAndTriggerActionCols will
//have [2,4]. What this means that, at run time, during trigger
//execution, these are the only 2 column positions that will be
//read into memory from the trigger table. The columns in other
//column positions are not needed for trigger execution. But
//even though column positions in original trigger table are 2
//and 4, their relative column positions in the columns read at
//execution time is really [1,2]. At run time, when the trigger
//gets fired, column position 2 from the trigger table will be
//read as the first column and column position 4 from the
//trigger table will be read as the second column. And those
//relative column positions at runtime is what should be used
//during trigger action conversion from
//UPDATE table2 SET c24=oldt.c14
//to
//UPDATE table2 SET c24=
// org.apache.derby.iapi.db.Factory::getTriggerExecutionContext().
// getOldRow().getInt(2)
//Note that the generated code above refers to column c14 from
//table1 by position 2 rather than position 4. Column c14's
//column position in table1 is 4 but in the relative columns
//that will be fetched during trigger execution, it's position
//is 2. That is what the following code is doing.
if (in10_7_orHigherVersion && triggerColsAndTriggerActionCols != null){
for (int j=0; j<triggerColsAndTriggerActionCols.length; j++){
if (triggerColsAndTriggerActionCols[j] == colPositionInTriggerTable)
colPositionInRuntimeResultSet=j+1;
}
} else
colPositionInRuntimeResultSet=colPositionInTriggerTable;
newText.append(genColumnReferenceSQL(triggerTableDescriptor, colName,
tableName.getTableName(),
tableName.getTableName().equals(oldReferencingName),
colPositionInRuntimeResultSet));
start = tokEndOffset- actionOffset + columnLength + 2;
}
//By this point, we are finished transforming the trigger action if
//it has any references to old/new transition variables.
if (start < triggerDefinition.length())
{
newText.append(triggerDefinition.substring(start));
}
return newText.toString();
}
/*
* The arrary passed will have either -1 or a column position as it's
* elements. If the array only has -1 as for all it's elements, then
* this method will return null. Otherwise, the method will create a
* new arrary with all -1 entries removed from the original arrary.
*/
private int[] justTheRequiredColumns(int[] columnsArrary,
TableDescriptor triggerTableDescriptor) {
int countOfColsRefedInArray = 0;
int numberOfColsInTriggerTable = triggerTableDescriptor.getNumberOfColumns();
//Count number of non -1 entries
for (int i=0; i < numberOfColsInTriggerTable; i++) {
if (columnsArrary[i] != -1)
countOfColsRefedInArray++;
}
if (countOfColsRefedInArray > 0){
int[] tempArrayOfNeededColumns = new int[countOfColsRefedInArray];
int j=0;
for (int i=0; i < numberOfColsInTriggerTable; i++) {
if (columnsArrary[i] != -1)
tempArrayOfNeededColumns[j++] = columnsArrary[i];
}
return tempArrayOfNeededColumns;
} else
return null;
}
/*
** Check for illegal combinations here: insert & old or
** delete and new
*/
private void checkInvalidTriggerReference(String tableName,
String oldReferencingName,
String newReferencingName,
int triggerEventMask) throws StandardException
{
if (tableName.equals(oldReferencingName) &&
(triggerEventMask & TriggerDescriptor.TRIGGER_EVENT_INSERT) == TriggerDescriptor.TRIGGER_EVENT_INSERT)
{
throw StandardException.newException(SQLState.LANG_TRIGGER_BAD_REF_MISMATCH, "INSERT", "new");
}
else if (tableName.equals(newReferencingName) &&
(triggerEventMask & TriggerDescriptor.TRIGGER_EVENT_DELETE) == TriggerDescriptor.TRIGGER_EVENT_DELETE)
{
throw StandardException.newException(SQLState.LANG_TRIGGER_BAD_REF_MISMATCH, "DELETE", "old");
}
}
/*
** Make sure the given column name is found in the trigger
** target table. Generate the appropriate SQL to get it.
**
** @return a string that is used to get the column using
** getObject() on the desired result set and CAST it back
** to the proper type in the SQL domain.
**
** @exception StandardException on invalid column name
*/
private String genColumnReferenceSQL(
TableDescriptor td,
String colName,
String tabName,
boolean isOldTable,
int colPositionInRuntimeResultSet
) throws StandardException
{
ColumnDescriptor colDesc = null;
if ((colDesc = td.getColumnDescriptor(colName)) ==
null)
{
throw StandardException.newException(
SQLState.LANG_COLUMN_NOT_FOUND, tabName+"."+colName);
}
/*
** Generate something like this:
**
** CAST (org.apache.derby.iapi.db.Factory::
** getTriggerExecutionContext().getNewRow().
** getObject(<colPosition>) AS DECIMAL(6,2))
**
** Column position is used to avoid the wrong column being
** selected problem (DERBY-1258) caused by the case insensitive
** JDBC rules for fetching a column by name.
**
** The cast back to the SQL Domain may seem redundant
** but we need it to make the column reference appear
** EXACTLY like a regular column reference, so we need
** the object in the SQL Domain and we need to have the
** type information. Thus a user should be able to do
** something like
**
** CREATE TRIGGER ... INSERT INTO T length(Column), ...
**
*/
DataTypeDescriptor dts = colDesc.getType();
TypeId typeId = dts.getTypeId();
if (!typeId.isXMLTypeId())
{
StringBuffer methodCall = new StringBuffer();
methodCall.append(
"CAST (org.apache.derby.iapi.db.Factory::getTriggerExecutionContext().");
methodCall.append(isOldTable ? "getOldRow()" : "getNewRow()");
methodCall.append(".getObject(");
methodCall.append(colPositionInRuntimeResultSet);
methodCall.append(") AS ");
/*
** getSQLString() returns <typeName>
** for user types, so call getSQLTypeName in that
** case.
*/
methodCall.append(
(typeId.userType() ?
typeId.getSQLTypeName() : dts.getSQLstring()));
methodCall.append(") ");
return methodCall.toString();
}
else
{
/* DERBY-2350
**
** Triggers currently use jdbc 1.2 to access columns. The default
** uses getObject() which is not supported for an XML type until
** jdbc 4. In the meantime use getString() and then call
** XMLPARSE() on the string to get the type. See Derby issue and
** http://wiki.apache.org/db-derby/TriggerImplementation , for
** better long term solutions. Long term I think changing the
** trigger architecture to not rely on jdbc, but instead on an
** internal direct access interface to execution nodes would be
** best future direction, but believe such a change appropriate
** for a major release, not a bug fix.
**
** Rather than the above described code generation, use the
** following for XML types to generate an XML column from the
** old or new row.
**
** XMLPARSE(DOCUMENT
** CAST (org.apache.derby.iapi.db.Factory::
** getTriggerExecutionContext().getNewRow().
** getString(<colPosition>) AS CLOB)
** PRESERVE WHITESPACE)
*/
StringBuffer methodCall = new StringBuffer();
methodCall.append("XMLPARSE(DOCUMENT CAST( ");
methodCall.append(
"org.apache.derby.iapi.db.Factory::getTriggerExecutionContext().");
methodCall.append(isOldTable ? "getOldRow()" : "getNewRow()");
methodCall.append(".getString(");
methodCall.append(colPositionInRuntimeResultSet);
methodCall.append(") AS CLOB) PRESERVE WHITESPACE ) ");
return methodCall.toString();
}
}
/*
** Sort the refs into array.
*/
private QueryTreeNode[] sortRefs(Vector refs, boolean isRow)
{
int size = refs.size();
QueryTreeNode[] sorted = (QueryTreeNode[])refs.toArray(new QueryTreeNode[size]);
int i = 0;
/* bubble sort
*/
QueryTreeNode temp;
for (i = 0; i < size - 1; i++)
{
temp = null;
for (int j = 0; j < size - i - 1; j++)
{
if ((isRow &&
sorted[j].getBeginOffset() >
sorted[j+1].getBeginOffset()
) ||
(!isRow &&
((FromBaseTable) sorted[j]).getTableNameField().getBeginOffset() >
((FromBaseTable) sorted[j+1]).getTableNameField().getBeginOffset()
))
{
temp = sorted[j];
sorted[j] = sorted[j+1];
sorted[j+1] = temp;
}
}
if (temp == null) // sorted
break;
}
return sorted;
}
/**
* Get a TriggerDescriptor given its UUID.
*
* @param uuid The UUID
*
* @return The TriggerDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public TriggerDescriptor getTriggerDescriptor(UUID uuid)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSTRIGGERS_CATALOG_NUM);
DataValueDescriptor triggerIdOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, triggerIdOrderable);
return (TriggerDescriptor)
getDescriptorViaIndex(
SYSTRIGGERSRowFactory.SYSTRIGGERS_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Get the stored prepared statement descriptor given
* a sps name.
*
* @param name The sps name.
* @param sd The schema descriptor.
*
* @return The TriggerDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public TriggerDescriptor getTriggerDescriptor(String name, SchemaDescriptor sd)
throws StandardException
{
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor triggerNameOrderable;
TabInfoImpl ti = getNonCoreTI(SYSTRIGGERS_CATALOG_NUM);
/* Use triggerNameOrderable and schemaIdOrderable in both start
* and stop position for scan.
*/
triggerNameOrderable = new SQLVarchar(name);
schemaIDOrderable = getIDValueAsCHAR(sd.getUUID());
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, triggerNameOrderable);
keyRow.setColumn(2, schemaIDOrderable);
return (TriggerDescriptor)
getDescriptorViaIndex(
SYSTRIGGERSRowFactory.SYSTRIGGERS_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Load up the trigger descriptor list for this table
* descriptor and return it. If the descriptor list
* is already loaded up, it is retuned without further
* ado.
*
* @param td The table descriptor.
*
* @return The ConstraintDescriptorList for the table
*
* @exception StandardException Thrown on failure
*/
public GenericDescriptorList getTriggerDescriptors(TableDescriptor td)
throws StandardException
{
GenericDescriptorList gdl;
if (td == null)
{
return getAllTriggerDescriptors();
}
/* Build the TableDescriptor's TDL if it is currently empty */
gdl = td.getTriggerDescriptorList();
/*
** Synchronize the building of the TDL. The TDL itself is created
** empty when the TD is created, so there is no need to synchronize
** the getting of the TDL.
*/
synchronized(gdl)
{
if (!gdl.getScanned())
{
getTriggerDescriptorsScan(td, false);
}
}
return gdl;
}
/**
* Populate the GenericDescriptorList for the specified TableDescriptor.
*
* MT synchronization: it is assumed that the caller has synchronized
* on the CDL in the given TD.
*
* @param td The TableDescriptor.
* @param forUpdate Whether or not to open scan for update
*
* @exception StandardException Thrown on failure
*/
private void getTriggerDescriptorsScan(TableDescriptor td, boolean forUpdate)
throws StandardException
{
GenericDescriptorList gdl = (td).getTriggerDescriptorList();
DataValueDescriptor tableIDOrderable = null;
TabInfoImpl ti = getNonCoreTI(SYSTRIGGERS_CATALOG_NUM);
/* Use tableIDOrderable in both start and stop positions for scan */
tableIDOrderable = getIDValueAsCHAR(td.getUUID());
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, tableIDOrderable);
getDescriptorViaIndex(
SYSTRIGGERSRowFactory.SYSTRIGGERS_INDEX3_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
gdl,
forUpdate);
gdl.setScanned(true);
}
/**
* Drops the given TriggerDescriptor. WARNING: does
* not drop its SPSes!!!
*
* @param descriptor The descriptor to drop
* @param tc The TransactionController.
*
* @exception StandardException Thrown on failure
*/
public void dropTriggerDescriptor
(
TriggerDescriptor descriptor,
TransactionController tc
) throws StandardException
{
DataValueDescriptor idOrderable;
TabInfoImpl ti = getNonCoreTI(SYSTRIGGERS_CATALOG_NUM);
idOrderable = getIDValueAsCHAR(descriptor.getUUID());
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, idOrderable);
ti.deleteRow(tc, keyRow, SYSTRIGGERSRowFactory.SYSTRIGGERS_INDEX1_ID);
}
/**
* Update the trigger descriptor in question. Updates
* every row in the base conglomerate that matches the uuid.
*
* @param triggerd The Trigger descriptor
* @param formerUUID The UUID for this column in SYSTRIGGERS,
* may differ from what is in triggerd if this
* is the column that is being set.
* @param colsToSet Array of ints of columns to be modified,
* 1 based. May be null (all cols).
* @param tc The TransactionController to use
*
* @exception StandardException Thrown on failure
*/
public void updateTriggerDescriptor
(
TriggerDescriptor triggerd,
UUID formerUUID,
int[] colsToSet,
TransactionController tc
) throws StandardException
{
ExecIndexRow keyRow1 = null;
ExecRow row;
DataValueDescriptor IDOrderable;
DataValueDescriptor columnNameOrderable;
TabInfoImpl ti = getNonCoreTI(SYSTRIGGERS_CATALOG_NUM);
SYSTRIGGERSRowFactory rf = (SYSTRIGGERSRowFactory) ti.getCatalogRowFactory();
/* Use objectID in both start
* and stop position for index 1 scan.
*/
IDOrderable = getIDValueAsCHAR(formerUUID);
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow1.setColumn(1, IDOrderable);
// build the row to be stuffed into SYSTRIGGERS.
row = rf.makeRow(triggerd, null);
/*
** Figure out if the index in systriggers needs
** to be updated.
*/
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(rf.getNumIndexes() == 3,
"There are more indexes on systriggers than expected, the code herein needs to change");
}
boolean[] bArray = new boolean[3];
/*
** Do we need to update indexes?
*/
if (colsToSet == null)
{
bArray[0] = true;
bArray[1] = true;
bArray[2] = true;
}
else
{
/*
** Check the specific columns for indexed
** columns.
*/
for (int i = 0; i < colsToSet.length; i++)
{
switch (colsToSet[i])
{
case SYSTRIGGERSRowFactory.SYSTRIGGERS_TRIGGERID:
bArray[0] = true;
break;
case SYSTRIGGERSRowFactory.SYSTRIGGERS_TRIGGERNAME:
case SYSTRIGGERSRowFactory.SYSTRIGGERS_SCHEMAID:
bArray[1] = true;
break;
case SYSTRIGGERSRowFactory.SYSTRIGGERS_TABLEID:
bArray[2] = true;
break;
}
}
}
ti.updateRow(keyRow1, row,
SYSTRIGGERSRowFactory.SYSTRIGGERS_INDEX1_ID,
bArray,
colsToSet,
tc);
}
/**
* Get a ConstraintDescriptor given its UUID. Please
* use getConstraintDescriptorById() is you have the
* constraints table descriptor, it is much faster.
*
* @param uuid The UUID
*
*
* @return The ConstraintDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public ConstraintDescriptor getConstraintDescriptor(UUID uuid)
throws StandardException
{
DataValueDescriptor UUIDStringOrderable;
TabInfoImpl ti = getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM);
/* Use UUIDStringOrderable in both start and stop positions for scan */
UUIDStringOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, UUIDStringOrderable);
return getConstraintDescriptorViaIndex(
SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX1_ID,
keyRow,
ti,
(TableDescriptor) null,
(ConstraintDescriptorList) null,
false);
}
/**
* Get a ConstraintDescriptor given its name and schema ID.
* Please use getConstraintDescriptorByName() if you have the
* constraint's table descriptor, it is much faster.
*
* @param constraintName Constraint name.
* @param schemaID The schema UUID
*
* @return The ConstraintDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public ConstraintDescriptor getConstraintDescriptor
(
String constraintName,
UUID schemaID
)
throws StandardException
{
DataValueDescriptor UUIDStringOrderable;
DataValueDescriptor constraintNameOrderable;
TabInfoImpl ti = getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM);
/* Construct keys for both start and stop positions for scan */
constraintNameOrderable = new SQLVarchar(constraintName);
UUIDStringOrderable = getIDValueAsCHAR(schemaID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, constraintNameOrderable);
keyRow.setColumn(2, UUIDStringOrderable);
return getConstraintDescriptorViaIndex(
SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX2_ID,
keyRow,
ti,
(TableDescriptor) null,
(ConstraintDescriptorList) null,
false);
}
/**
* Returns all the statistics descriptors for the given table.
* <p>
* NOTE: As opposed to most other data dictionary lookups, this operation is
* performed with isolation level READ_UNCOMMITTED. The reason is to avoid
* deadlocks with inserts into the statistics system table.
*
* @param td {@code TableDescriptor} for which I need statistics
* @return A list of tuple descriptors, possibly empty.
*/
public List getStatisticsDescriptors(TableDescriptor td)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSSTATISTICS_CATALOG_NUM);
List statDescriptorList = newSList();
DataValueDescriptor UUIDStringOrderable;
/* set up the start/stop position for the scan */
UUIDStringOrderable = getIDValueAsCHAR(td.getUUID());
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, UUIDStringOrderable);
getDescriptorViaIndex(SYSSTATISTICSRowFactory.SYSSTATISTICS_INDEX1_ID,
keyRow,
(ScanQualifier [][])null,
ti,
(TupleDescriptor)null,
statDescriptorList,
false,
TransactionController.ISOLATION_READ_UNCOMMITTED,
getTransactionCompile());
return statDescriptorList;
}
/**
* Load up the constraint descriptor list for this table
* descriptor and return it. If the descriptor list
* is already loaded up, it is retuned without further
* ado. If no table descriptor is passed in, then all
* constraint descriptors are retrieved. Note that in
* this case, the constraint descriptor objects may be
* duplicates of constraint descriptors that are hung
* off of the table descriptor cache.
*
* @param td The table descriptor. If null,
* all constraint descriptors are returned.
*
*
* @return The ConstraintDescriptorList for the table
*
* @exception StandardException Thrown on failure
*/
public ConstraintDescriptorList getConstraintDescriptors(TableDescriptor td)
throws StandardException
{
ConstraintDescriptorList cdl;
if (td == null)
{
return getAllConstraintDescriptors();
}
/* RESOLVE - need to look at multi-user aspects of hanging constraint
* descriptor list off of table descriptor when we restore the cache.
*/
/* Build the TableDescriptor's CDL if it is currently empty */
cdl = td.getConstraintDescriptorList();
/*
** Synchronize the building of the CDL. The CDL itself is created
** empty when the TD is created, so there is no need to synchronize
** the getting of the CDL.
*/
synchronized(cdl)
{
if (! cdl.getScanned())
{
getConstraintDescriptorsScan(td, false);
}
}
return cdl;
}
/**
* Convert a constraint descriptor list into a list
* of active constraints, that is, constraints which
* must be enforced. For the Core product, these
* are just the constraints on the original list.
* However, during REFRESH we may have deferred some
* constraints until statement end. This method returns
* the corresponding list of constraints which AREN'T
* deferred.
*
* @param cdl The constraint descriptor list to wrap with
* an Active constraint descriptor list.
*
* @return The corresponding Active ConstraintDescriptorList
*
* @exception StandardException Thrown on failure
*/
public ConstraintDescriptorList getActiveConstraintDescriptors(ConstraintDescriptorList cdl)
throws StandardException
{ return cdl; }
/**
* Reports whether an individual constraint must be
* enforced. For the Core product, this routine always
* returns true.
*
* However, during REFRESH we may have deferred some
* constraints until statement end. This method returns
* false if the constraint deferred
*
* @param constraint the constraint to check
*
*
* @return The corresponding Active ConstraintDescriptorList
*
* @exception StandardException Thrown on failure
*/
public boolean activeConstraint( ConstraintDescriptor constraint )
throws StandardException
{ return true; }
/**
* Get the constraint descriptor given a table and the UUID String
* of the backing index.
*
* @param td The table descriptor.
* @param uuid the UUID for the backing index.
*
* @return The ConstraintDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public ConstraintDescriptor getConstraintDescriptor(TableDescriptor td,
UUID uuid)
throws StandardException
{
return getConstraintDescriptors(td).getConstraintDescriptor(uuid);
}
/**
* Get the constraint descriptor given a table and the UUID String
* of the constraint
*
* @param td The table descriptor.
* @param uuid The UUID for the constraint
*
* @return The ConstraintDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public ConstraintDescriptor getConstraintDescriptorById
(
TableDescriptor td,
UUID uuid
)
throws StandardException
{
return getConstraintDescriptors(td).getConstraintDescriptorById(uuid);
}
/**
* Get the constraint descriptor given a TableDescriptor and the constraint name.
*
* @param td The table descriptor.
* @param sd The schema descriptor for the constraint
* @param constraintName The constraint name.
* @param forUpdate Whether or not access is for update
*
* @return The ConstraintDescriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public ConstraintDescriptor getConstraintDescriptorByName(TableDescriptor td,
SchemaDescriptor sd,
String constraintName,
boolean forUpdate)
throws StandardException
{
/* If forUpdate, then we need to actually read from the table. */
if (forUpdate)
{
td.emptyConstraintDescriptorList();
getConstraintDescriptorsScan(td, true);
}
return getConstraintDescriptors(td).getConstraintDescriptorByName(sd, constraintName);
}
/**
* Populate the ConstraintDescriptorList for the specified TableDescriptor.
*
* MT synchronization: it is assumed that the caller has synchronized
* on the CDL in the given TD.
*
* @param td The TableDescriptor.
* @param forUpdate Whether or not to open scan for update
*
* @exception StandardException Thrown on failure
*/
private void getConstraintDescriptorsScan(TableDescriptor td, boolean forUpdate)
throws StandardException
{
ConstraintDescriptorList cdl = td.getConstraintDescriptorList();
DataValueDescriptor tableIDOrderable = null;
TabInfoImpl ti = getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM);
/* Use tableIDOrderable in both start and stop positions for scan */
tableIDOrderable = getIDValueAsCHAR(td.getUUID());
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, tableIDOrderable);
getConstraintDescriptorViaIndex(
SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX3_ID,
keyRow,
ti,
td,
cdl,
forUpdate);
cdl.setScanned(true);
}
/**
* Return a (single or list of) ConstraintDescriptor(s) from
* SYSCONSTRAINTS where the access is from the index to the heap.
*
* @param indexId The id of the index (0 to # of indexes on table) to use
* @param keyRow The supplied ExecIndexRow for search
* @param ti The TabInfoImpl to use
* @param td The TableDescriptor, if supplied.
* @param dList The list to build, if supplied. If null, then caller expects
* a single descriptor
* @param forUpdate Whether or not to open scan for update
*
* @return The last matching descriptor
*
* @exception StandardException Thrown on error
*/
protected ConstraintDescriptor getConstraintDescriptorViaIndex(
int indexId,
ExecIndexRow keyRow,
TabInfoImpl ti,
TableDescriptor td,
ConstraintDescriptorList dList,
boolean forUpdate)
throws StandardException
{
SYSCONSTRAINTSRowFactory rf = (SYSCONSTRAINTSRowFactory) ti.getCatalogRowFactory();
ConglomerateController heapCC;
ConstraintDescriptor cd = null;
ExecIndexRow indexRow1;
ExecIndexRow indexTemplateRow;
ExecRow outRow;
RowLocation baseRowLocation;
ScanController scanController;
TransactionController tc;
// Get the current transaction controller
tc = getTransactionCompile();
outRow = rf.makeEmptyRow();
heapCC =
tc.openConglomerate(
ti.getHeapConglomerate(), false, 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
/* Scan the index and go to the data pages for qualifying rows to
* build the column descriptor.
*/
scanController = tc.openScan(
ti.getIndexConglomerate(indexId), // conglomerate to open
false, // don't hold open across commit
(forUpdate) ? TransactionController.OPENMODE_FORUPDATE : 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
keyRow.getRowArray(), // start position - exact key match.
ScanController.GE, // startSearchOperation
null, //scanQualifier,
keyRow.getRowArray(), // stop position - exact key match.
ScanController.GT); // stopSearchOperation
while (scanController.next())
{
SubConstraintDescriptor subCD = null;
// create an index row template
indexRow1 = getIndexRowFromHeapRow(
ti.getIndexRowGenerator(indexId),
heapCC.newRowLocationTemplate(),
outRow);
scanController.fetch(indexRow1.getRowArray());
baseRowLocation = (RowLocation) indexRow1.getColumn(
indexRow1.nColumns());
boolean base_row_exists =
heapCC.fetch(
baseRowLocation, outRow.getRowArray(), (FormatableBitSet) null);
if (SanityManager.DEBUG)
{
// it can not be possible for heap row to disappear while
// holding scan cursor on index at ISOLATION_REPEATABLE_READ.
SanityManager.ASSERT(base_row_exists, "base row doesn't exist");
}
switch (rf.getConstraintType(outRow))
{
case DataDictionary.PRIMARYKEY_CONSTRAINT:
case DataDictionary.FOREIGNKEY_CONSTRAINT:
case DataDictionary.UNIQUE_CONSTRAINT:
subCD = getSubKeyConstraint(
rf.getConstraintId(outRow), rf.getConstraintType(outRow));
break;
case DataDictionary.CHECK_CONSTRAINT:
subCD = getSubCheckConstraint(
rf.getConstraintId(outRow));
break;
default:
if (SanityManager.DEBUG)
{
SanityManager.THROWASSERT("unexpected value "+
"from rf.getConstraintType(outRow)" +
rf.getConstraintType(outRow));
}
}
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(subCD != null,
"subCD is expected to be non-null");
}
/* Cache the TD in the SCD so that
* the row factory doesn't need to go
* out to disk to get it.
*/
subCD.setTableDescriptor(td);
cd = (ConstraintDescriptor) rf.buildDescriptor(
outRow,
subCD,
this);
/* If dList is null, then caller only wants a single descriptor - we're done
* else just add the current descriptor to the list.
*/
if (dList == null)
{
break;
}
else
{
dList.add(cd);
}
}
scanController.close();
heapCC.close();
return cd;
}
/**
* Return a (single or list of) catalog row descriptor(s) from
* SYSCONSTRAINTS through a heap scan
*
* @param scanQualifiers qualifiers
* @param ti The TabInfoImpl to use
* @param parentTupleDescriptor The parentDescriptor, if applicable.
* @param list The list to build, if supplied.
* If null, then caller expects a single descriptor
*
* @return The last matching descriptor
*
* @exception StandardException Thrown on error
*/
protected TupleDescriptor getConstraintDescriptorViaHeap(
ScanQualifier [][] scanQualifiers,
TabInfoImpl ti,
TupleDescriptor parentTupleDescriptor,
List list)
throws StandardException
{
SYSCONSTRAINTSRowFactory rf = (SYSCONSTRAINTSRowFactory) ti.getCatalogRowFactory();
ConglomerateController heapCC;
ExecRow outRow;
ExecRow templateRow;
ScanController scanController;
TransactionController tc;
ConstraintDescriptor cd = null;
// Get the current transaction controller
tc = getTransactionCompile();
outRow = rf.makeEmptyRow();
/*
** Table scan
*/
scanController = tc.openScan(
ti.getHeapConglomerate(), // conglomerate to open
false, // don't hold open across commit
0, // for read
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
(DataValueDescriptor[]) null, // start position - first row
0, // startSearchOperation - none
scanQualifiers, // scanQualifier,
(DataValueDescriptor[]) null, // stop position -through last row
0); // stopSearchOperation - none
try
{
while (scanController.fetchNext(outRow.getRowArray()))
{
SubConstraintDescriptor subCD = null;
switch (rf.getConstraintType(outRow))
{
case DataDictionary.PRIMARYKEY_CONSTRAINT:
case DataDictionary.FOREIGNKEY_CONSTRAINT:
case DataDictionary.UNIQUE_CONSTRAINT:
subCD = getSubKeyConstraint(
rf.getConstraintId(outRow), rf.getConstraintType(outRow));
break;
case DataDictionary.CHECK_CONSTRAINT:
subCD = getSubCheckConstraint(
rf.getConstraintId(outRow));
break;
default:
if (SanityManager.DEBUG)
{
SanityManager.THROWASSERT("unexpected value from "+
" rf.getConstraintType(outRow) "
+ rf.getConstraintType(outRow));
}
}
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(subCD != null,
"subCD is expected to be non-null");
}
cd = (ConstraintDescriptor) rf.buildDescriptor(
outRow,
subCD,
this);
/* If dList is null, then caller only wants a single descriptor - we're done
* else just add the current descriptor to the list.
*/
if (list == null)
{
break;
}
else
{
list.add(cd);
}
}
}
finally
{
scanController.close();
}
return cd;
}
/**
* Return a table descriptor corresponding to the TABLEID
* field in SYSCONSTRAINTS where CONSTRAINTID matches
* the constraintId passsed in.
*
* @param constraintId The id of the constraint
*
* @return the corresponding table descriptor
*
* @exception StandardException Thrown on error
*/
public TableDescriptor getConstraintTableDescriptor(UUID constraintId)
throws StandardException
{
List slist = getConstraints(constraintId,
SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX1_ID,
SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_TABLEID);
if (slist.size() == 0)
{
return null;
}
// get the table descriptor
return getTableDescriptor((UUID)slist.get(0));
}
/**
* Return a list of foreign keys constraints referencing
* this constraint. Returns both enabled and disabled
* foreign keys.
*
* @param constraintId The id of the referenced constraint
*
* @return list of constraints, empty of there are none
*
* @exception StandardException Thrown on error
*/
public ConstraintDescriptorList getForeignKeys(UUID constraintId)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSFOREIGNKEYS_CATALOG_NUM);
List fkList = newSList();
// Use constraintIDOrderable in both start and stop positions for scan
DataValueDescriptor constraintIDOrderable = getIDValueAsCHAR(constraintId);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, constraintIDOrderable);
getDescriptorViaIndex(
SYSFOREIGNKEYSRowFactory.SYSFOREIGNKEYS_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
fkList,
false);
SubKeyConstraintDescriptor cd;
TableDescriptor td;
ConstraintDescriptorList cdl = new ConstraintDescriptorList();
ConstraintDescriptorList tmpCdl;
for (Iterator iterator = fkList.iterator(); iterator.hasNext(); )
{
cd = (SubKeyConstraintDescriptor) iterator.next();
td = getConstraintTableDescriptor(cd.getUUID());
cdl.add(getConstraintDescriptors(td).getConstraintDescriptorById(cd.getUUID()));
}
return cdl;
}
/**
* Return an List which of the relevant column matching
* the indexed criteria. If nothing matches, returns an
* empty List (never returns null).
*
* @param uuid The id of the constraint
* @param indexId The index id in SYS.SYSCONSTRAINTS
* @param columnNum The column to retrieve
*
* @return a list of UUIDs in an List.
*
* @exception StandardException Thrown on error
*/
public List getConstraints(UUID uuid, int indexId, int columnNum)
throws StandardException
{
ExecIndexRow indexRow1;
ExecIndexRow indexTemplateRow;
ExecRow outRow;
RowLocation baseRowLocation;
ConglomerateController heapCC = null;
ScanController scanController = null;
TransactionController tc;
TabInfoImpl ti = getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM);
SYSCONSTRAINTSRowFactory rf = (SYSCONSTRAINTSRowFactory) ti.getCatalogRowFactory();
TableDescriptor td = null;
List slist = newSList();
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(indexId == SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX1_ID ||
indexId == SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX3_ID,
"bad index id, must be one of the indexes on a uuid");
SanityManager.ASSERT(columnNum > 0 &&
columnNum <= SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_COLUMN_COUNT,
"invalid column number for column to be retrieved");
}
try
{
/* Use tableIDOrderable in both start and stop positions for scan */
DataValueDescriptor orderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, orderable);
// Get the current transaction controller
tc = getTransactionCompile();
outRow = rf.makeEmptyRow();
heapCC =
tc.openConglomerate(
ti.getHeapConglomerate(), false, 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
// create an index row template
indexRow1 = getIndexRowFromHeapRow(
ti.getIndexRowGenerator(indexId),
heapCC.newRowLocationTemplate(),
outRow);
// just interested in one column
DataValueDescriptor[] rowTemplate =
new DataValueDescriptor[SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_COLUMN_COUNT];
FormatableBitSet columnToGetSet =
new FormatableBitSet(SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_COLUMN_COUNT);
columnToGetSet.set(columnNum - 1);
rowTemplate[columnNum - 1] = new SQLChar();
// Scan the index and go to the data pages for qualifying rows
scanController = tc.openScan(
ti.getIndexConglomerate(indexId),// conglomerate to open
false, // don't hold open across commit
0, // for read
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ,// RESOLVE: should be level 2
(FormatableBitSet) null, // all fields as objects
keyRow.getRowArray(), // start position - exact key match.
ScanController.GE, // startSearchOperation
null, // scanQualifier (none)
keyRow.getRowArray(), // stop position - exact key match.
ScanController.GT); // stopSearchOperation
while (scanController.fetchNext(indexRow1.getRowArray()))
{
baseRowLocation = (RowLocation)
indexRow1.getColumn(indexRow1.nColumns());
// get the row and grab the uuid
boolean base_row_exists =
heapCC.fetch(
baseRowLocation, rowTemplate, columnToGetSet);
if (SanityManager.DEBUG)
{
// it can not be possible for heap row to disappear while
// holding scan cursor on index at ISOLATION_REPEATABLE_READ.
SanityManager.ASSERT(base_row_exists, "base row not found");
}
slist.add(uuidFactory.recreateUUID(
(String)((DataValueDescriptor)rowTemplate[columnNum - 1]).getObject()));
}
}
finally
{
if (heapCC != null)
{
heapCC.close();
}
if (scanController != null)
{
scanController.close();
}
}
return slist;
}
/**
* Adds the given ConstraintDescriptor to the data dictionary,
* associated with the given table and constraint type.
*
* @param descriptor The descriptor to add
* @param tc The transaction controller
*
* @exception StandardException Thrown on error
*/
public void addConstraintDescriptor(
ConstraintDescriptor descriptor,
TransactionController tc)
throws StandardException
{
ExecRow row = null;
int type = descriptor.getConstraintType();
TabInfoImpl ti = getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM);
SYSCONSTRAINTSRowFactory rf = (SYSCONSTRAINTSRowFactory) ti.getCatalogRowFactory();
int insertRetCode;
if (SanityManager.DEBUG)
{
if (!(type == DataDictionary.PRIMARYKEY_CONSTRAINT ||
type == DataDictionary.FOREIGNKEY_CONSTRAINT ||
type == DataDictionary.UNIQUE_CONSTRAINT ||
type == DataDictionary.CHECK_CONSTRAINT))
{
SanityManager.THROWASSERT("constraint type (" + type +
") is unexpected value");
}
}
addDescriptor(descriptor, descriptor.getSchemaDescriptor(),
SYSCONSTRAINTS_CATALOG_NUM, false,
tc);
switch (type)
{
case DataDictionary.PRIMARYKEY_CONSTRAINT:
case DataDictionary.FOREIGNKEY_CONSTRAINT:
case DataDictionary.UNIQUE_CONSTRAINT:
if (SanityManager.DEBUG)
{
if (!(descriptor instanceof KeyConstraintDescriptor))
{
SanityManager.THROWASSERT(
"descriptor expected to be instanceof KeyConstraintDescriptor, " +
"not, " + descriptor.getClass().getName());
}
}
addSubKeyConstraint((KeyConstraintDescriptor) descriptor, tc);
break;
case DataDictionary.CHECK_CONSTRAINT:
if (SanityManager.DEBUG)
{
if (!(descriptor instanceof CheckConstraintDescriptor))
{
SanityManager.THROWASSERT("descriptor expected "+
"to be instanceof CheckConstraintDescriptorImpl, " +
"not, " + descriptor.getClass().getName());
}
}
addDescriptor(descriptor, null, SYSCHECKS_CATALOG_NUM, true, tc);
break;
}
}
/**
* Update the constraint descriptor in question. Updates
* every row in the base conglomerate.
*
* @param cd The Constraintescriptor
* @param formerUUID The UUID for this column in SYSCONSTRAINTS,
* may differ from what is in cd if this
* is the column that is being set.
* @param colsToSet Array of ints of columns to be modified,
* 1 based. May be null (all cols).
* @param tc The TransactionController to use
*
* @exception StandardException Thrown on failure
*/
public void updateConstraintDescriptor(ConstraintDescriptor cd,
UUID formerUUID,
int[] colsToSet,
TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow1 = null;
ExecRow row;
DataValueDescriptor IDOrderable;
DataValueDescriptor columnNameOrderable;
TabInfoImpl ti = getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM);
SYSCONSTRAINTSRowFactory rf = (SYSCONSTRAINTSRowFactory) ti.getCatalogRowFactory();
/* Use objectID/columnName in both start
* and stop position for index 1 scan.
*/
IDOrderable = getIDValueAsCHAR(formerUUID);
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow1.setColumn(1, IDOrderable);
// build the row to be stuffed into SYSCONSTRAINTS.
row = rf.makeRow(cd, null);
/*
** Figure out if the index in sysconstraints needs
** to be updated.
*/
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(rf.getNumIndexes() == 3,
"There are more indexes on sysconstraints than expected, the code herein needs to change");
}
boolean[] bArray = new boolean[3];
/*
** Do we need to update indexes?
*/
if (colsToSet == null)
{
bArray[0] = true;
bArray[1] = true;
bArray[2] = true;
}
else
{
/*
** Check the specific columns for indexed
** columns.
*/
for (int i = 0; i < colsToSet.length; i++)
{
switch (colsToSet[i])
{
case SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_CONSTRAINTID:
bArray[0] = true;
break;
case SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_CONSTRAINTNAME:
case SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_SCHEMAID:
bArray[1] = true;
break;
case SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_TABLEID:
bArray[2] = true;
break;
}
}
}
ti.updateRow(keyRow1, row,
SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX1_ID,
bArray,
colsToSet,
tc);
}
/**
* Drops the given ConstraintDescriptor from the data dictionary.
*
* @param descriptor The descriptor to drop
* @param tc The TransactionController
*
* @exception StandardException Thrown on error
*/
public void dropConstraintDescriptor(
ConstraintDescriptor descriptor,
TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow = null;
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor constraintNameOrderable;
TabInfoImpl ti = getNonCoreTI(SYSCONSTRAINTS_CATALOG_NUM);
switch (descriptor.getConstraintType())
{
case DataDictionary.PRIMARYKEY_CONSTRAINT:
case DataDictionary.FOREIGNKEY_CONSTRAINT:
case DataDictionary.UNIQUE_CONSTRAINT:
dropSubKeyConstraint(
descriptor,
tc);
break;
case DataDictionary.CHECK_CONSTRAINT:
dropSubCheckConstraint(
descriptor.getUUID(),
tc);
break;
}
/* Use constraintNameOrderable and schemaIdOrderable in both start
* and stop position for index 2 scan.
*/
constraintNameOrderable = new SQLVarchar(descriptor.getConstraintName());
schemaIDOrderable = getIDValueAsCHAR(descriptor.getSchemaDescriptor().getUUID());
/* Set up the start/stop position for the scan */
keyRow = (ExecIndexRow) exFactory.getIndexableRow(2);
keyRow.setColumn(1, constraintNameOrderable);
keyRow.setColumn(2, schemaIDOrderable);
ti.deleteRow( tc, keyRow, SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_INDEX2_ID );
}
/**
* Drops all ConstraintDescriptors from the data dictionary
* that are associated with the given table,
*
* @param table The table from which to drop all
* constraint descriptors
* @param tc The TransactionController
*
* @exception StandardException Thrown on error
*/
public void dropAllConstraintDescriptors(TableDescriptor table,
TransactionController tc)
throws StandardException
{
ConstraintDescriptorList cdl = getConstraintDescriptors(table);
// Walk the table's CDL and drop each ConstraintDescriptor.
for (Iterator iterator = cdl.iterator(); iterator.hasNext(); )
{
ConstraintDescriptor cd = (ConstraintDescriptor) iterator.next();
dropConstraintDescriptor(cd, tc);
}
/*
** Null out the table's constraint descriptor list. NOTE: This is
** not really necessary at the time of this writing (11/3/97), because
** we do not cache data dictionary objects while DDL is going on,
** but in the future it might be necessary.
*/
table.setConstraintDescriptorList(null);
}
/**
* Get a SubKeyConstraintDescriptor from syskeys or sysforeignkeys for
* the specified constraint id. For primary foreign and and unique
* key constraints.
*
* @param constraintId The UUID for the constraint.
* @param type The type of the constraint
* (e.g. DataDictionary.FOREIGNKEY_CONSTRAINT)
*
* @return SubKeyConstraintDescriptor The Sub descriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
public SubKeyConstraintDescriptor getSubKeyConstraint(UUID constraintId, int type)
throws StandardException
{
DataValueDescriptor constraintIDOrderable = null;
TabInfoImpl ti;
int indexNum;
int baseNum;
if (type == DataDictionary.FOREIGNKEY_CONSTRAINT)
{
baseNum = SYSFOREIGNKEYS_CATALOG_NUM;
indexNum = SYSFOREIGNKEYSRowFactory.SYSFOREIGNKEYS_INDEX1_ID;
}
else
{
baseNum = SYSKEYS_CATALOG_NUM;
indexNum = SYSKEYSRowFactory.SYSKEYS_INDEX1_ID;
}
ti = getNonCoreTI(baseNum);
/* Use constraintIDOrderable in both start and stop positions for scan */
constraintIDOrderable = getIDValueAsCHAR(constraintId);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, constraintIDOrderable);
return (SubKeyConstraintDescriptor)
getDescriptorViaIndex(
indexNum,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Add the matching row to syskeys when adding a unique or primary key constraint
*
* @param descriptor The KeyConstraintDescriptor for the constraint.
* @param tc The TransactionController
*
* @exception StandardException Thrown on failure
*/
private void addSubKeyConstraint(KeyConstraintDescriptor descriptor,
TransactionController tc)
throws StandardException
{
ExecRow row;
TabInfoImpl ti;
/*
** Foreign keys get a row in SYSFOREIGNKEYS, and
** all others get a row in SYSKEYS.
*/
if (descriptor.getConstraintType()
== DataDictionary.FOREIGNKEY_CONSTRAINT)
{
ForeignKeyConstraintDescriptor fkDescriptor =
(ForeignKeyConstraintDescriptor)descriptor;
if (SanityManager.DEBUG)
{
if (!(descriptor instanceof ForeignKeyConstraintDescriptor))
{
SanityManager.THROWASSERT("descriptor not an fk descriptor, is "+
descriptor.getClass().getName());
}
}
ti = getNonCoreTI(SYSFOREIGNKEYS_CATALOG_NUM);
SYSFOREIGNKEYSRowFactory fkkeysRF = (SYSFOREIGNKEYSRowFactory)ti.getCatalogRowFactory();
row = fkkeysRF.makeRow(fkDescriptor, null);
/*
** Now we need to bump the reference count of the
** contraint that this FK references
*/
ReferencedKeyConstraintDescriptor refDescriptor =
fkDescriptor.getReferencedConstraint();
refDescriptor.incrementReferenceCount();
int[] colsToSet = new int[1];
colsToSet[0] = SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_REFERENCECOUNT;
updateConstraintDescriptor(refDescriptor,
refDescriptor.getUUID(),
colsToSet,
tc);
}
else
{
ti = getNonCoreTI(SYSKEYS_CATALOG_NUM);
SYSKEYSRowFactory keysRF = (SYSKEYSRowFactory) ti.getCatalogRowFactory();
// build the row to be stuffed into SYSKEYS
row = keysRF.makeRow(descriptor, null);
}
// insert row into catalog and all its indices
ti.insertRow(row, tc);
}
/**
* Drop the matching row from syskeys when dropping a primary key
* or unique constraint.
*
* @param constraint the constraint
* @param tc The TransactionController
*
* @exception StandardException Thrown on failure
*/
private void dropSubKeyConstraint(ConstraintDescriptor constraint, TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow1 = null;
DataValueDescriptor constraintIdOrderable;
TabInfoImpl ti;
int baseNum;
int indexNum;
if (constraint.getConstraintType()
== DataDictionary.FOREIGNKEY_CONSTRAINT)
{
baseNum = SYSFOREIGNKEYS_CATALOG_NUM;
indexNum = SYSFOREIGNKEYSRowFactory.SYSFOREIGNKEYS_INDEX1_ID;
/*
** If we have a foreign key, we need to decrement the
** reference count of the contraint that this FK references.
** We need to do this *before* we drop the foreign key
** because of the way FK.getReferencedConstraint() works.
*/
if (constraint.getConstraintType()
== DataDictionary.FOREIGNKEY_CONSTRAINT)
{
ReferencedKeyConstraintDescriptor refDescriptor =
(ReferencedKeyConstraintDescriptor)
getConstraintDescriptor(
((ForeignKeyConstraintDescriptor)constraint).
getReferencedConstraintId());
if (refDescriptor != null)
{
refDescriptor.decrementReferenceCount();
int[] colsToSet = new int[1];
colsToSet[0] = SYSCONSTRAINTSRowFactory.SYSCONSTRAINTS_REFERENCECOUNT;
updateConstraintDescriptor(refDescriptor,
refDescriptor.getUUID(),
colsToSet,
tc);
}
}
}
else
{
baseNum = SYSKEYS_CATALOG_NUM;
indexNum = SYSKEYSRowFactory.SYSKEYS_INDEX1_ID;
}
ti = getNonCoreTI(baseNum);
/* Use constraintIdOrderable in both start
* and stop position for index 1 scan.
*/
constraintIdOrderable = getIDValueAsCHAR(constraint.getUUID());
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow1.setColumn(1, constraintIdOrderable);
ti.deleteRow( tc, keyRow1, indexNum);
}
/**
* Get a SubCheckConstraintDescriptor from syschecks for
* the specified constraint id. (Useful for check constraints.)
*
* @param constraintId The UUID for the constraint.
*
* @return SubCheckConstraintDescriptor The Sub descriptor for the constraint.
*
* @exception StandardException Thrown on failure
*/
private SubCheckConstraintDescriptor getSubCheckConstraint(UUID constraintId)
throws StandardException
{
DataValueDescriptor constraintIDOrderable = null;
TabInfoImpl ti = getNonCoreTI(SYSCHECKS_CATALOG_NUM);
SYSCHECKSRowFactory rf = (SYSCHECKSRowFactory) ti.getCatalogRowFactory();
/* Use constraintIDOrderable in both start and stop positions for scan */
constraintIDOrderable = getIDValueAsCHAR(constraintId);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, constraintIDOrderable);
return (SubCheckConstraintDescriptor)
getDescriptorViaIndex(
SYSCHECKSRowFactory.SYSCHECKS_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Drop the matching row from syschecks when dropping a check constraint.
*
* @param constraintId The constraint id.
* @param tc The TransactionController
*
* @exception StandardException Thrown on failure
*/
private void dropSubCheckConstraint(UUID constraintId, TransactionController tc)
throws StandardException
{
ExecIndexRow checkRow1 = null;
DataValueDescriptor constraintIdOrderable;
TabInfoImpl ti = getNonCoreTI(SYSCHECKS_CATALOG_NUM);
/* Use constraintIdOrderable in both start
* and stop position for index 1 scan.
*/
constraintIdOrderable = getIDValueAsCHAR(constraintId);
/* Set up the start/stop position for the scan */
checkRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
checkRow1.setColumn(1, constraintIdOrderable);
ti.deleteRow( tc, checkRow1, SYSCHECKSRowFactory.SYSCHECKS_INDEX1_ID );
}
/**
* Get all of the ConglomerateDescriptors in the database and
* hash them by conglomerate number.
* This is useful as a performance optimization for the locking VTIs.
* NOTE: This method will scan SYS.SYSCONGLOMERATES at READ UNCOMMITTED.
*
* @param tc TransactionController for the transaction
*
* @return A Hashtable with all of the ConglomerateDescriptors
* in the database hashed by conglomerate number.
*
* @exception StandardException Thrown on failure
*/
public Hashtable hashAllConglomerateDescriptorsByNumber(TransactionController tc)
throws StandardException
{
Hashtable ht = new Hashtable();
ConglomerateDescriptor cd = null;
ScanController scanController;
ExecRow outRow;
// ExecIndexRow keyRow = null;
TabInfoImpl ti = coreInfo[SYSCONGLOMERATES_CORE_NUM];
SYSCONGLOMERATESRowFactory rf = (SYSCONGLOMERATESRowFactory) ti.getCatalogRowFactory();
outRow = rf.makeEmptyRow();
scanController = tc.openScan(
ti.getHeapConglomerate(), // conglomerate to open
false, // don't hold open across commit
0, // for read
TransactionController.MODE_RECORD, // scans whole table.
TransactionController.ISOLATION_READ_UNCOMMITTED,
(FormatableBitSet) null, // all fields as objects
(DataValueDescriptor[]) null, //keyRow.getRowArray(), // start position - first row
ScanController.GE, // startSearchOperation
(ScanQualifier [][]) null,
(DataValueDescriptor[]) null, //keyRow.getRowArray(), // stop position - through last row
ScanController.GT); // stopSearchOperation
// it is important for read uncommitted scans to use fetchNext() rather
// than fetch, so that the fetch happens while latch is held, otherwise
// the next() might position the scan on a row, but the subsequent
// fetch() may find the row deleted or purged from the table.
while (scanController.fetchNext(outRow.getRowArray()))
{
cd = (ConglomerateDescriptor) rf.buildDescriptor(
outRow,
(TupleDescriptor) null,
this );
Long hashKey = new Long(cd.getConglomerateNumber());
ht.put(hashKey, cd);
}
scanController.close();
return ht;
}
/**
* Get all of the TableDescriptors in the database and hash them
* by TableId This is useful as a performance optimization for the
* locking VTIs. NOTE: This method will scan SYS.SYSTABLES and
* SYS.SYSSCHEMAS at READ UNCOMMITTED.
*
* @param tc TransactionController for the transaction
*
* @return A Hashtable with all of the Table descriptors in the database
* hashed by TableId
*
*
* @exception StandardException Thrown on failure
*/
public Hashtable hashAllTableDescriptorsByTableId(TransactionController tc)
throws StandardException
{
Hashtable ht = new Hashtable();
ScanController scanController;
ExecRow outRow;
TabInfoImpl ti = coreInfo[SYSTABLES_CORE_NUM];
SYSTABLESRowFactory
rf = (SYSTABLESRowFactory) ti.getCatalogRowFactory();
outRow = rf.makeEmptyRow();
scanController = tc.openScan(
ti.getHeapConglomerate(), // sys.systable
false, // don't hold open across commit
0, // for read
TransactionController.MODE_RECORD,// scans whole table.
TransactionController.ISOLATION_READ_UNCOMMITTED,
(FormatableBitSet) null, // all fields as objects
(DataValueDescriptor[])null, // start position - first row
ScanController.GE, // startSearchOperation
(ScanQualifier[][])null, //scanQualifier,
(DataValueDescriptor[])null, //stop position-through last row
ScanController.GT); // stopSearchOperation
// it is important for read uncommitted scans to use fetchNext() rather
// than fetch, so that the fetch happens while latch is held, otherwise
// the next() might position the scan on a row, but the subsequent
// fetch() may find the row deleted or purged from the table.
while(scanController.fetchNext(outRow.getRowArray()))
{
TableDescriptor td = (TableDescriptor)
rf.buildDescriptor(
outRow,
(TupleDescriptor)null,
this,
TransactionController.ISOLATION_READ_UNCOMMITTED);
ht.put(td.getUUID(), td);
}
scanController.close();
return ht;
}
/**
* Get a ConglomerateDescriptor given its UUID. If it is an index
* conglomerate shared by at least another duplicate index, this returns
* one of the ConglomerateDescriptors for those indexes.
*
* @param uuid The UUID
*
*
* @return A ConglomerateDescriptor for the conglomerate.
*
* @exception StandardException Thrown on failure
*/
public ConglomerateDescriptor getConglomerateDescriptor(UUID uuid)
throws StandardException
{
ConglomerateDescriptor[] cds = getConglomerateDescriptors(uuid);
if (cds.length == 0)
return null;
return cds[0];
}
/**
* Get an array of ConglomerateDescriptors given the UUID. If it is a
* heap conglomerate or an index conglomerate not shared by a duplicate
* index, the size of the return array is 1.
*
* @param uuid The UUID
*
*
* @return An array of ConglomerateDescriptors for the conglomerate.
* returns size 0 array if no such conglomerate.
*
* @exception StandardException Thrown on failure
*/
public ConglomerateDescriptor[] getConglomerateDescriptors(UUID uuid)
throws StandardException
{
DataValueDescriptor UUIDStringOrderable;
SYSCONGLOMERATESRowFactory rf;
TabInfoImpl ti = coreInfo[SYSCONGLOMERATES_CORE_NUM];
/* Use UUIDStringOrderable in both start and stop positions for scan */
UUIDStringOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, UUIDStringOrderable);
List cdl = newSList();
getDescriptorViaIndex(
SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
cdl,
false);
ConglomerateDescriptor[] cda = new ConglomerateDescriptor[cdl.size()];
cdl.toArray(cda);
return cda;
}
/**
* Get a ConglomerateDescriptor given its conglomerate number. If it is an
* index conglomerate shared by at least another duplicate index, this
* returns one of the ConglomerateDescriptors for those indexes.
*
* @param conglomerateNumber The conglomerate number.
*
*
* @return A ConglomerateDescriptor for the conglomerate. Returns NULL if
* no such conglomerate.
*
* @exception StandardException Thrown on failure
*/
public ConglomerateDescriptor getConglomerateDescriptor(
long conglomerateNumber)
throws StandardException
{
ConglomerateDescriptor[] cds = getConglomerateDescriptors(conglomerateNumber);
if (cds.length == 0)
return null;
return cds[0];
}
/**
* Get an array of conglomerate descriptors for the given conglomerate
* number. If it is a heap conglomerate or an index conglomerate not
* shared by a duplicate index, the size of the return array is 1.
*
* @param conglomerateNumber The number for the conglomerate
* we're interested in
*
* @return An array of ConglomerateDescriptors that share the requested
* conglomerate. Returns size 0 array if no such conglomerate.
*
* @exception StandardException Thrown on failure
*/
public ConglomerateDescriptor[] getConglomerateDescriptors(
long conglomerateNumber)
throws StandardException
{
ScanController scanController;
TransactionController tc;
ExecRow outRow;
DataValueDescriptor conglomNumberOrderable = null;
TabInfoImpl ti = coreInfo[SYSCONGLOMERATES_CORE_NUM];
SYSCONGLOMERATESRowFactory rf = (SYSCONGLOMERATESRowFactory) ti.getCatalogRowFactory();
conglomNumberOrderable =
new SQLLongint(conglomerateNumber);
ScanQualifier[][] scanQualifier = exFactory.getScanQualifier(1);
scanQualifier[0][0].setQualifier(
rf.SYSCONGLOMERATES_CONGLOMERATENUMBER - 1, /* column number */
conglomNumberOrderable,
Orderable.ORDER_OP_EQUALS,
false,
false,
false);
ConglomerateDescriptorList cdl = new ConglomerateDescriptorList();
getDescriptorViaHeap(scanQualifier,
ti,
null,
cdl);
int size = cdl.size();
ConglomerateDescriptor[] cda = new ConglomerateDescriptor[size];
for (int index = 0; index < size; index++)
cda[index] = (ConglomerateDescriptor) cdl.get(index);
return cda;
}
/**
* Populate the ConglomerateDescriptorList for the
* specified TableDescriptor by scanning sysconglomerates.
*
* MT synchronization: it is assumed that the caller has synchronized
* on the CDL in the given TD.
*
* @param td The TableDescriptor.
*
* @exception StandardException Thrown on failure
*/
private void getConglomerateDescriptorsScan(TableDescriptor td)
throws StandardException
{
ConglomerateDescriptorList cdl = td.getConglomerateDescriptorList();
ExecIndexRow keyRow3 = null;
DataValueDescriptor tableIDOrderable;
TabInfoImpl ti = coreInfo[SYSCONGLOMERATES_CORE_NUM];
/* Use tableIDOrderable in both start and stop positions for scan */
tableIDOrderable = getIDValueAsCHAR(td.getUUID());
/* Set up the start/stop position for the scan */
keyRow3 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow3.setColumn(1, tableIDOrderable);
getDescriptorViaIndex(
SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX3_ID,
keyRow3,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
cdl,
false);
}
/**
* Gets a conglomerate descriptor for the named index in the given schema,
* getting an exclusive row lock on the matching row in
* sys.sysconglomerates (for DDL concurrency) if requested.
*
* @param indexName The name of the index we're looking for
* @param sd The schema descriptor
* @param forUpdate Whether or not to get an exclusive row
* lock on the row in sys.sysconglomerates.
*
* @return A ConglomerateDescriptor describing the requested
* conglomerate. Returns NULL if no such conglomerate.
*
* @exception StandardException Thrown on failure
*/
public ConglomerateDescriptor getConglomerateDescriptor(
String indexName,
SchemaDescriptor sd,
boolean forUpdate)
throws StandardException
{
ExecIndexRow keyRow2 = null;
DataValueDescriptor nameOrderable;
DataValueDescriptor schemaIDOrderable = null;
TabInfoImpl ti = coreInfo[SYSCONGLOMERATES_CORE_NUM];
nameOrderable = new SQLVarchar(indexName);
schemaIDOrderable = getIDValueAsCHAR(sd.getUUID());
/* Set up the start/stop position for the scan */
keyRow2 = exFactory.getIndexableRow(2);
keyRow2.setColumn(1, nameOrderable);
keyRow2.setColumn(2, schemaIDOrderable);
return (ConglomerateDescriptor)
getDescriptorViaIndex(
SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX2_ID,
keyRow2,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
forUpdate);
}
/**
* Drops a conglomerate descriptor
*
* @param conglomerate The ConglomerateDescriptor for the conglomerate
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on failure
*/
public void dropConglomerateDescriptor(
ConglomerateDescriptor conglomerate,
TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow2 = null;
DataValueDescriptor nameOrderable;
DataValueDescriptor schemaIDOrderable = null;
TabInfoImpl ti = coreInfo[SYSCONGLOMERATES_CORE_NUM];
nameOrderable = new SQLVarchar(conglomerate.getConglomerateName());
schemaIDOrderable = getIDValueAsCHAR(conglomerate.getSchemaID());
/* Set up the start/stop position for the scan */
keyRow2 = (ExecIndexRow) exFactory.getIndexableRow(2);
keyRow2.setColumn(1, nameOrderable);
keyRow2.setColumn(2, schemaIDOrderable);
ti.deleteRow( tc, keyRow2, SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX2_ID );
}
/**
* Drops all conglomerates associated with a table.
*
* @param td The TableDescriptor of the table
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on failure
*/
public void dropAllConglomerateDescriptors(
TableDescriptor td,
TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow3 = null;
DataValueDescriptor tableIDOrderable;
TabInfoImpl ti = coreInfo[SYSCONGLOMERATES_CORE_NUM];
/* Use tableIDOrderable in both start
* and stop position for index 3 scan.
*/
tableIDOrderable = getIDValueAsCHAR(td.getUUID());
/* Set up the start/stop position for the scan */
keyRow3 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow3.setColumn(1, tableIDOrderable);
ti.deleteRow( tc, keyRow3, SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX3_ID );
}
/**
* Update the conglomerateNumber for a ConglomerateDescriptor.
* This is useful, in 1.3, when doing a bulkInsert into an
* empty table where we insert into a new conglomerate.
* (This will go away in 1.4.)
*
* @param cd The ConglomerateDescriptor
* @param conglomerateNumber The new conglomerate number
* @param tc The TransactionController to use
*
* @exception StandardException Thrown on failure
*/
public void updateConglomerateDescriptor(ConglomerateDescriptor cd,
long conglomerateNumber,
TransactionController tc)
throws StandardException
{
ConglomerateDescriptor[] cds = new ConglomerateDescriptor[1];
cds[0] = cd;
updateConglomerateDescriptor(cds, conglomerateNumber, tc);
}
/**
* Update all system schemas to have new authorizationId. This is needed
* while upgrading pre-10.2 databases to 10.2 or later versions. From 10.2,
* all system schemas would be owned by database owner's authorizationId.
*
* @param aid AuthorizationID of Database Owner
* @param tc TransactionController to use
*
* @exception StandardException Thrown on failure
*/
public void updateSystemSchemaAuthorization(String aid,
TransactionController tc)
throws StandardException
{
updateSchemaAuth(SchemaDescriptor.STD_SYSTEM_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.IBM_SYSTEM_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.IBM_SYSTEM_CAT_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.IBM_SYSTEM_FUN_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.IBM_SYSTEM_PROC_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.IBM_SYSTEM_STAT_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.IBM_SYSTEM_NULLID_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.STD_SQLJ_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.STD_SYSTEM_DIAG_SCHEMA_NAME, aid, tc);
updateSchemaAuth(SchemaDescriptor.STD_SYSTEM_UTIL_SCHEMA_NAME, aid, tc);
// now reset our understanding of who owns the database
resetDatabaseOwner( tc );
}
/**
* Update authorizationId of specified schemaName
*
* @param schemaName Schema Name of system schema
* @param authorizationId authorizationId of new schema owner
* @param tc The TransactionController to use
*
* @exception StandardException Thrown on failure
*/
public void updateSchemaAuth(String schemaName,
String authorizationId,
TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow;
DataValueDescriptor schemaNameOrderable;
TabInfoImpl ti = coreInfo[SYSSCHEMAS_CORE_NUM];
/* Use schemaNameOrderable in both start
* and stop position for index 1 scan.
*/
schemaNameOrderable = new SQLVarchar(schemaName);
/* Set up the start/stop position for the scan */
keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, schemaNameOrderable);
SYSSCHEMASRowFactory rf = (SYSSCHEMASRowFactory) ti.getCatalogRowFactory();
ExecRow row = rf.makeEmptyRow();
row.setColumn(SYSSCHEMASRowFactory.SYSSCHEMAS_SCHEMAAID,
new SQLVarchar(authorizationId));
boolean[] bArray = {false, false};
int[] colsToUpdate = {SYSSCHEMASRowFactory.SYSSCHEMAS_SCHEMAAID};
ti.updateRow(keyRow, row,
SYSSCHEMASRowFactory.SYSSCHEMAS_INDEX1_ID,
bArray,
colsToUpdate,
tc);
}
/**
* Update the conglomerateNumber for an array of ConglomerateDescriptors.
* In case of more than one ConglomerateDescriptor, each descriptor
* should be updated separately, conglomerate id is not same for all
* the descriptors. Even when indexes are sharing the same
* conglomerate(conglomerate number), conglomerate ids are unique.
*
* This is useful, in 1.3, when doing a bulkInsert into an
* empty table where we insert into a new conglomerate.
* (This will go away in 1.4.)
*
* @param cds The array of ConglomerateDescriptors
* @param conglomerateNumber The new conglomerate number
* @param tc The TransactionController to use
*
* @exception StandardException Thrown on failure
*/
public void updateConglomerateDescriptor(ConglomerateDescriptor[] cds,
long conglomerateNumber,
TransactionController tc)
throws StandardException
{
ExecIndexRow keyRow1 = null;
ExecRow row;
DataValueDescriptor conglomIDOrderable;
TabInfoImpl ti = coreInfo[SYSCONGLOMERATES_CORE_NUM];
SYSCONGLOMERATESRowFactory rf = (SYSCONGLOMERATESRowFactory) ti.getCatalogRowFactory();
boolean[] bArray = {false, false, false};
for (int i = 0; i < cds.length; i++)
{
/* Use conglomIDOrderable in both start
* and stop position for index 1 scan.
*/
conglomIDOrderable = getIDValueAsCHAR(cds[i].getUUID());
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow1.setColumn(1, conglomIDOrderable);
cds[i].setConglomerateNumber(conglomerateNumber);
// build the row to be stuffed into SYSCONGLOMERATES.
row = rf.makeRow(cds[i], null);
// update row in catalog (no indexes)
ti.updateRow(keyRow1, row,
SYSCONGLOMERATESRowFactory.SYSCONGLOMERATES_INDEX1_ID,
bArray,
(int[])null,
tc);
}
}
/**
* Gets a list of the dependency descriptors for the given dependent's id.
*
* @param dependentID The ID of the dependent we're interested in
*
* @return List Returns a list of DependencyDescriptors.
* Returns an empty List if no stored dependencies for the
* dependent's ID.
*
* @exception StandardException Thrown on failure
*/
public List getDependentsDescriptorList(String dependentID)
throws StandardException
{
List ddlList = newSList();
DataValueDescriptor dependentIDOrderable;
TabInfoImpl ti = getNonCoreTI(SYSDEPENDS_CATALOG_NUM);
/* Use dependentIDOrderable in both start and stop positions for scan */
dependentIDOrderable = new SQLChar(dependentID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, dependentIDOrderable);
getDescriptorViaIndex(
SYSDEPENDSRowFactory.SYSDEPENDS_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
ddlList,
false);
return ddlList;
}
/**
* Gets a list of the dependency descriptors for the given provider's id.
*
* @param providerID The ID of the provider we're interested in
*
* @return List Returns a list of DependencyDescriptors.
* Returns an empty List if no stored dependencies for the
* provider's ID.
*
* @exception StandardException Thrown on failure
*/
public List getProvidersDescriptorList(String providerID)
throws StandardException
{
List ddlList = newSList();
DataValueDescriptor providerIDOrderable;
TabInfoImpl ti = getNonCoreTI(SYSDEPENDS_CATALOG_NUM);
/* Use providerIDOrderable in both start and stop positions for scan */
providerIDOrderable = new SQLChar(providerID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, providerIDOrderable);
getDescriptorViaIndex(
SYSDEPENDSRowFactory.SYSDEPENDS_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
ddlList,
false);
return ddlList;
}
/**
* Build and return an List with DependencyDescriptors for
* all of the stored dependencies.
* This is useful for consistency checking.
*
* @return List List of all DependencyDescriptors.
*
* @exception StandardException Thrown on failure
*/
public List getAllDependencyDescriptorsList()
throws StandardException
{
ScanController scanController;
TransactionController tc;
ExecRow outRow;
ExecRow templateRow;
List ddl = newSList();
TabInfoImpl ti = getNonCoreTI(SYSDEPENDS_CATALOG_NUM);
SYSDEPENDSRowFactory rf = (SYSDEPENDSRowFactory) ti.getCatalogRowFactory();
// Get the current transaction controller
tc = getTransactionCompile();
outRow = rf.makeEmptyRow();
scanController = tc.openScan(
ti.getHeapConglomerate(), // conglomerate to open
false, // don't hold open across commit
0, // for read
TransactionController.MODE_TABLE, // scans entire table.
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
null, // start position - first row
ScanController.GE, // startSearchOperation
null,
null, // stop position - through last row
ScanController.GT); // stopSearchOperation
while (scanController.fetchNext(outRow.getRowArray()))
{
DependencyDescriptor dependencyDescriptor;
dependencyDescriptor = (DependencyDescriptor)
rf.buildDescriptor(outRow,
(TupleDescriptor) null,
this);
ddl.add(dependencyDescriptor);
}
scanController.close();
return ddl;
}
/**
* Drop a single dependency from the data dictionary.
*
* @param dd The DependencyDescriptor.
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on failure
*/
public void dropStoredDependency(DependencyDescriptor dd,
TransactionController tc )
throws StandardException
{
ExecIndexRow keyRow1 = null;
UUID dependentID = dd.getUUID();
UUID providerID = dd.getProviderID();
DataValueDescriptor dependentIDOrderable = getIDValueAsCHAR(dependentID);
TabInfoImpl ti = getNonCoreTI(SYSDEPENDS_CATALOG_NUM);
/* Use dependentIDOrderable in both start
* and stop position for index 1 scan.
*/
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow1.setColumn(1, dependentIDOrderable);
// only drop the rows which have this providerID
TupleFilter filter = new DropDependencyFilter( providerID );
ti.deleteRows( tc,
keyRow1, // start row
ScanController.GE,
null, //qualifier
filter, // filter on base row
keyRow1, // stop row
ScanController.GT,
SYSDEPENDSRowFactory.SYSDEPENDS_INDEX1_ID );
}
/**
* Remove all of the stored dependencies for a given dependent's ID
* from the data dictionary.
*
* @param dependentsUUID Dependent's uuid
* @param tc TransactionController for the transaction
*
* @exception StandardException Thrown on failure
*/
public void dropDependentsStoredDependencies(UUID dependentsUUID,
TransactionController tc)
throws StandardException
{
dropDependentsStoredDependencies(dependentsUUID, tc, true);
}
/**
* @inheritDoc
*/
public void dropDependentsStoredDependencies(UUID dependentsUUID,
TransactionController tc,
boolean wait)
throws StandardException
{
ExecIndexRow keyRow1 = null;
DataValueDescriptor dependentIDOrderable;
TabInfoImpl ti = getNonCoreTI(SYSDEPENDS_CATALOG_NUM);
/* Use dependentIDOrderable in both start
* and stop position for index 1 scan.
*/
dependentIDOrderable = getIDValueAsCHAR(dependentsUUID);
/* Set up the start/stop position for the scan */
keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow1.setColumn(1, dependentIDOrderable);
ti.deleteRow( tc, keyRow1, SYSDEPENDSRowFactory.SYSDEPENDS_INDEX1_ID,
wait );
}
/**
* Get the UUID Factory. (No need to make the UUIDFactory a module.)
*
* @return UUIDFactory The UUID Factory for this DataDictionary.
*/
public UUIDFactory getUUIDFactory()
{
return uuidFactory;
}
/**
* Get the alias descriptor for an ANSI UDT.
*
* @param tc The transaction to use: if null, use the compilation transaction
* @param dtd The UDT's type descriptor
*
* @return The UDT's alias descriptor if it is an ANSI UDT; null otherwise.
*/
public AliasDescriptor getAliasDescriptorForUDT( TransactionController tc, DataTypeDescriptor dtd ) throws StandardException
{
if ( tc == null ) { tc = getTransactionCompile(); }
if ( dtd == null ) { return null; }
BaseTypeIdImpl btii = dtd.getTypeId().getBaseTypeId();
if ( !btii.isAnsiUDT() ) { return null; }
SchemaDescriptor sd = getSchemaDescriptor( btii.getSchemaName(), tc, true );
AliasDescriptor ad = getAliasDescriptor
( sd.getUUID().toString(), btii.getUnqualifiedName(), AliasInfo.ALIAS_NAME_SPACE_UDT_AS_CHAR );
return ad;
}
/**
* Get a AliasDescriptor given its UUID.
*
* @param uuid The UUID
*
*
* @return The AliasDescriptor for the alias.
*
* @exception StandardException Thrown on failure
*/
public AliasDescriptor getAliasDescriptor(UUID uuid)
throws StandardException
{
DataValueDescriptor UUIDStringOrderable;
SYSALIASESRowFactory rf;
TabInfoImpl ti = getNonCoreTI(SYSALIASES_CATALOG_NUM);
rf = (SYSALIASESRowFactory) ti.getCatalogRowFactory();
/* Use UUIDStringOrderable in both start and stop positions for scan */
UUIDStringOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, UUIDStringOrderable);
return (AliasDescriptor)
getDescriptorViaIndex(
SYSALIASESRowFactory.SYSALIASES_INDEX2_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Get a AliasDescriptor by alias name and name space.
* NOTE: caller responsible for handling no match.
*
@param schemaId schema identifier
* @param aliasName The alias name.
* @param nameSpace The alias type.
*
* @return AliasDescriptor AliasDescriptor for the alias name and name space
*
* @exception StandardException Thrown on failure
*/
public AliasDescriptor getAliasDescriptor(String schemaId, String aliasName, char nameSpace)
throws StandardException
{
DataValueDescriptor aliasNameOrderable;
DataValueDescriptor nameSpaceOrderable;
TabInfoImpl ti = getNonCoreTI(SYSALIASES_CATALOG_NUM);
SYSALIASESRowFactory rf = (SYSALIASESRowFactory) ti.getCatalogRowFactory();
/* Use aliasNameOrderable and aliasTypeOrderable in both start
* and stop position for scan.
*/
aliasNameOrderable = new SQLVarchar(aliasName);
char[] charArray = new char[1];
charArray[0] = nameSpace;
nameSpaceOrderable = new SQLChar(new String(charArray));
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(3);
keyRow.setColumn(1, new SQLChar(schemaId));
keyRow.setColumn(2, aliasNameOrderable);
keyRow.setColumn(3, nameSpaceOrderable);
return (AliasDescriptor)
getDescriptorViaIndex(
SYSALIASESRowFactory.SYSALIASES_INDEX1_ID,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
Get the list of routines matching the schema and routine name.
While we only support a single alias for a given name,namespace just
return a list of zero or one item.
If the schema is SYSFUN then do not use the system catalogs,
but instead look up the routines from the in-meomry table driven
by the contents of SYSFUN_FUNCTIONS.
*/
public java.util.List getRoutineList(String schemaID, String routineName, char nameSpace)
throws StandardException {
java.util.List list = new java.util.ArrayList();
// Special in-memory table lookup for SYSFUN
if (schemaID.equals(SchemaDescriptor.SYSFUN_SCHEMA_UUID)
&& nameSpace == AliasInfo.ALIAS_NAME_SPACE_FUNCTION_AS_CHAR)
{
for (int f = 0; f < DataDictionaryImpl.SYSFUN_FUNCTIONS.length; f++)
{
String[] details = DataDictionaryImpl.SYSFUN_FUNCTIONS[f];
String name = details[0];
if (!name.equals(routineName))
continue;
AliasDescriptor ad = DataDictionaryImpl.SYSFUN_AD[f];
if (ad == null)
{
// details[1] Return type
TypeDescriptor rt =
DataTypeDescriptor.getBuiltInDataTypeDescriptor(details[1]).getCatalogType();
boolean isDeterministic = Boolean.valueOf( details[ SYSFUN_DETERMINISTIC_INDEX ] ).booleanValue();
// Determine the number of arguments (could be zero).
int paramCount = details.length - SYSFUN_FIRST_PARAMETER_INDEX;
TypeDescriptor[] pt = new TypeDescriptor[paramCount];
String[] paramNames = new String[paramCount];
int[] paramModes = new int[paramCount];
for (int i = 0; i < paramCount; i++) {
pt[i] = DataTypeDescriptor.getBuiltInDataTypeDescriptor(
details[SYSFUN_FIRST_PARAMETER_INDEX +i]).getCatalogType();
paramNames[i] = "P" + (i +1); // Dummy names
// All parameters must be IN.
paramModes[i] = JDBC30Translation.PARAMETER_MODE_IN;
}
// details[3] = java method
RoutineAliasInfo ai = new RoutineAliasInfo(details[3],
paramCount, paramNames,
pt, paramModes, 0,
RoutineAliasInfo.PS_JAVA, RoutineAliasInfo.NO_SQL, isDeterministic,
false, /* hasDefinersRights */
false, rt);
// details[2] = class name
ad = new AliasDescriptor(this, uuidFactory.createUUID(), name,
uuidFactory.recreateUUID(schemaID),
details[2], AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR,
AliasInfo.ALIAS_NAME_SPACE_FUNCTION_AS_CHAR,
true, ai, null);
DataDictionaryImpl.SYSFUN_AD[f] = ad;
}
list.add(ad);
}
return list;
}
AliasDescriptor ad = getAliasDescriptor(schemaID, routineName, nameSpace);
if (ad != null) {
list.add(ad);
}
return list;
}
/**
* Drop a AliasDescriptor from the DataDictionary
*
* @param ad The AliasDescriptor to drop
* @param tc The TransactionController
*
* @exception StandardException Thrown on failure
*/
public void dropAliasDescriptor(AliasDescriptor ad,
TransactionController tc)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSALIASES_CATALOG_NUM);
/* Use aliasNameOrderable and nameSpaceOrderable in both start
* and stop position for index 1 scan.
*/
char[] charArray = new char[1];
charArray[0] = ad.getNameSpace();
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow1 = (ExecIndexRow) exFactory.getIndexableRow(3);
keyRow1.setColumn(1, getIDValueAsCHAR(ad.getSchemaUUID()));
keyRow1.setColumn(2, new SQLVarchar(ad.getDescriptorName()));
keyRow1.setColumn(3, new SQLChar(new String(charArray)));
ti.deleteRow( tc, keyRow1, SYSALIASESRowFactory.SYSALIASES_INDEX1_ID );
}
//
// class implementation
//
/**
* Initialize system catalogs. This is where we perform upgrade. It is our
* pious hope that we won't ever have to upgrade the core catalogs, other than
* to add fields inside Formatable columns in these catalogs.
*
* If we do have to upgrade the core catalogs, then we may need to move the
* loadCatalog calls into the upgrade machinery. It's do-able, just not pretty.
*
*
* @param tc TransactionController
* @param ddg DataDescriptorGenerator
*
* @exception StandardException Thrown on error
*/
protected void loadDictionaryTables(TransactionController tc,
DataDescriptorGenerator ddg,
Properties startParams)
throws StandardException
{
// load the core catalogs first
loadCatalogs(ddg, coreInfo);
dictionaryVersion = (DD_Version)tc.getProperty(
DataDictionary.CORE_DATA_DICTIONARY_VERSION);
resetDatabaseOwner( tc );
softwareVersion.upgradeIfNeeded(dictionaryVersion, tc, startParams);
}
/**
* Reset the database owner according to what is stored in the catalogs.
* This can change at upgrade time so we have factored this logic into
* a separately callable method.
*
*
* @param tc TransactionController
*
* @exception StandardException Thrown on error
*/
public void resetDatabaseOwner( TransactionController tc )
throws StandardException
{
SchemaDescriptor sd = locateSchemaRow
(SchemaDescriptor.IBM_SYSTEM_SCHEMA_NAME, tc );
authorizationDatabaseOwner = sd.getAuthorizationId();
systemSchemaDesc.setAuthorizationId( authorizationDatabaseOwner );
sysIBMSchemaDesc.setAuthorizationId( authorizationDatabaseOwner );
systemUtilSchemaDesc.setAuthorizationId( authorizationDatabaseOwner );
}
/**
* Initialize indices for an array of catalogs
*
* @param ddg DataDescriptorGenerator
*
*
* @exception StandardException Thrown on error
*/
public void loadCatalogs(DataDescriptorGenerator ddg, TabInfoImpl[] catalogArray)
throws StandardException
{
int ictr;
int numIndexes;
int indexCtr;
TabInfoImpl catalog;
int catalogCount = catalogArray.length;
/* Initialize the various variables associated with index scans of these catalogs */
for (ictr = 0; ictr < catalogCount; ictr++)
{
// NOTE: This only works for core catalogs, which are initialized
// up front.
catalog = catalogArray[ictr];
numIndexes = catalog.getNumberOfIndexes();
if (numIndexes > 0)
{
for (indexCtr = 0; indexCtr < numIndexes; indexCtr++)
{
initSystemIndexVariables(ddg, catalog, indexCtr);
}
}
}
}
/*
** Methods related to create
*/
/**
Create all the required dictionary tables. Any classes that extend this class
and need to create new tables should override this method, and then
call this method as the first action in the new method, e.g.
<PRE>
protected Configuration createDictionaryTables(Configuration cfg, TransactionController tc,
DataDescriptorGenerator ddg)
throws StandardException
{
super.createDictionaryTables(params, tc, ddg);
...
}
</PRE>
@exception StandardException Standard Derby error policy
*/
protected void createDictionaryTables(Properties params, TransactionController tc,
DataDescriptorGenerator ddg)
throws StandardException
{
/*
** Create a new schema descriptor -- with no args
** creates the system schema descriptor in which
** all tables reside (SYS)
*/
systemSchemaDesc =
newSystemSchemaDesc(
SchemaDescriptor.STD_SYSTEM_SCHEMA_NAME,
SchemaDescriptor.SYSTEM_SCHEMA_UUID);
/* Create the core tables and generate the UUIDs for their
* heaps (before creating the indexes).
* RESOLVE - This loop will eventually drive all of the
* work for creating the core tables.
*/
for (int coreCtr = 0; coreCtr < NUM_CORE; coreCtr++)
{
TabInfoImpl ti = coreInfo[coreCtr];
Properties heapProperties = ti.getCreateHeapProperties();
ti.setHeapConglomerate(
createConglomerate(
ti.getTableName(),
tc,
ti.getCatalogRowFactory().makeEmptyRow(),
heapProperties
)
);
// bootstrap indexes on core tables before bootstraping the tables themselves
if (coreInfo[coreCtr].getNumberOfIndexes() > 0)
{
bootStrapSystemIndexes(systemSchemaDesc, tc, ddg, ti);
}
}
// bootstrap the core tables into the data dictionary
for ( int ictr = 0; ictr < NUM_CORE; ictr++ )
{
/* RESOLVE - need to do something with COLUMNTYPE in following table creating code */
TabInfoImpl ti = coreInfo[ictr];
addSystemTableToDictionary(ti, systemSchemaDesc, tc, ddg);
}
// Add the bootstrap information to the configuration
params.put(CFG_SYSTABLES_ID,
Long.toString(
coreInfo[SYSTABLES_CORE_NUM].getHeapConglomerate()));
params.put(CFG_SYSTABLES_INDEX1_ID,
Long.toString(
coreInfo[SYSTABLES_CORE_NUM].getIndexConglomerate(
((SYSTABLESRowFactory) coreInfo[SYSTABLES_CORE_NUM].
getCatalogRowFactory()).SYSTABLES_INDEX1_ID)));
params.put(CFG_SYSTABLES_INDEX2_ID,
Long.toString(
coreInfo[SYSTABLES_CORE_NUM].getIndexConglomerate(
((SYSTABLESRowFactory) coreInfo[SYSTABLES_CORE_NUM].
getCatalogRowFactory()).SYSTABLES_INDEX2_ID)));
params.put(CFG_SYSCOLUMNS_ID,
Long.toString(
coreInfo[SYSCOLUMNS_CORE_NUM].getHeapConglomerate()));
params.put(CFG_SYSCOLUMNS_INDEX1_ID,
Long.toString(
coreInfo[SYSCOLUMNS_CORE_NUM].getIndexConglomerate(
((SYSCOLUMNSRowFactory) coreInfo[SYSCOLUMNS_CORE_NUM].
getCatalogRowFactory()).SYSCOLUMNS_INDEX1_ID)));
params.put(CFG_SYSCOLUMNS_INDEX2_ID,
Long.toString(
coreInfo[SYSCOLUMNS_CORE_NUM].getIndexConglomerate(
((SYSCOLUMNSRowFactory) coreInfo[SYSCOLUMNS_CORE_NUM].
getCatalogRowFactory()).SYSCOLUMNS_INDEX2_ID)));
params.put(CFG_SYSCONGLOMERATES_ID,
Long.toString(
coreInfo[SYSCONGLOMERATES_CORE_NUM].getHeapConglomerate()));
params.put(CFG_SYSCONGLOMERATES_INDEX1_ID,
Long.toString(
coreInfo[SYSCONGLOMERATES_CORE_NUM].getIndexConglomerate(
((SYSCONGLOMERATESRowFactory) coreInfo[SYSCONGLOMERATES_CORE_NUM].
getCatalogRowFactory()).SYSCONGLOMERATES_INDEX1_ID)));
params.put(CFG_SYSCONGLOMERATES_INDEX2_ID,
Long.toString(
coreInfo[SYSCONGLOMERATES_CORE_NUM].getIndexConglomerate(
((SYSCONGLOMERATESRowFactory) coreInfo[SYSCONGLOMERATES_CORE_NUM].
getCatalogRowFactory()).SYSCONGLOMERATES_INDEX2_ID)));
params.put(CFG_SYSCONGLOMERATES_INDEX3_ID,
Long.toString(
coreInfo[SYSCONGLOMERATES_CORE_NUM].getIndexConglomerate(
((SYSCONGLOMERATESRowFactory) coreInfo[SYSCONGLOMERATES_CORE_NUM].
getCatalogRowFactory()).SYSCONGLOMERATES_INDEX3_ID)));
params.put(CFG_SYSSCHEMAS_ID,
Long.toString(
coreInfo[SYSSCHEMAS_CORE_NUM].getHeapConglomerate()));
params.put(CFG_SYSSCHEMAS_INDEX1_ID,
Long.toString(
coreInfo[SYSSCHEMAS_CORE_NUM].getIndexConglomerate(
((SYSSCHEMASRowFactory) coreInfo[SYSSCHEMAS_CORE_NUM].
getCatalogRowFactory()).SYSSCHEMAS_INDEX1_ID)));
params.put(CFG_SYSSCHEMAS_INDEX2_ID,
Long.toString(
coreInfo[SYSSCHEMAS_CORE_NUM].getIndexConglomerate(
((SYSSCHEMASRowFactory) coreInfo[SYSSCHEMAS_CORE_NUM].
getCatalogRowFactory()).SYSSCHEMAS_INDEX2_ID)));
//Add the SYSIBM Schema
sysIBMSchemaDesc =
addSystemSchema(
SchemaDescriptor.IBM_SYSTEM_SCHEMA_NAME,
SchemaDescriptor.SYSIBM_SCHEMA_UUID, tc);
/* Create the non-core tables and generate the UUIDs for their
* heaps (before creating the indexes).
* RESOLVE - This loop will eventually drive all of the
* work for creating the non-core tables.
*/
for (int noncoreCtr = 0; noncoreCtr < NUM_NONCORE; noncoreCtr++)
{
int catalogNumber = noncoreCtr + NUM_CORE;
boolean isDummy = (catalogNumber == SYSDUMMY1_CATALOG_NUM);
TabInfoImpl ti = getNonCoreTIByNumber(catalogNumber);
makeCatalog(ti, isDummy ? sysIBMSchemaDesc : systemSchemaDesc, tc );
if (isDummy)
populateSYSDUMMY1(tc);
// Clear the table entry for this non-core table,
// to allow it to be garbage-collected. The idea
// is that a running database might never need to
// reference a non-core table after it was created.
clearNoncoreTable(noncoreCtr);
}
//Add ths System Schema
addDescriptor(
systemSchemaDesc, null, SYSSCHEMAS_CATALOG_NUM, false, tc);
// Add the following system Schema's to be compatible with DB2,
// currently Derby does not use them, but by creating them as
// system schema's it will insure applications can't create them,
// drop them, or create objects in them. This set includes:
// SYSCAT
// SYSFUN
// SYSPROC
// SYSSTAT
// NULLID
//Add the SYSCAT Schema
addSystemSchema(
SchemaDescriptor.IBM_SYSTEM_CAT_SCHEMA_NAME,
SchemaDescriptor.SYSCAT_SCHEMA_UUID, tc);
//Add the SYSFUN Schema
addSystemSchema(
SchemaDescriptor.IBM_SYSTEM_FUN_SCHEMA_NAME,
SchemaDescriptor.SYSFUN_SCHEMA_UUID, tc);
//Add the SYSPROC Schema
addSystemSchema(
SchemaDescriptor.IBM_SYSTEM_PROC_SCHEMA_NAME,
SchemaDescriptor.SYSPROC_SCHEMA_UUID, tc);
//Add the SYSSTAT Schema
addSystemSchema(
SchemaDescriptor.IBM_SYSTEM_STAT_SCHEMA_NAME,
SchemaDescriptor.SYSSTAT_SCHEMA_UUID, tc);
//Add the NULLID Schema
addSystemSchema(
SchemaDescriptor.IBM_SYSTEM_NULLID_SCHEMA_NAME,
SchemaDescriptor.NULLID_SCHEMA_UUID, tc);
//Add the SQLJ Schema
addSystemSchema(
SchemaDescriptor.STD_SQLJ_SCHEMA_NAME,
SchemaDescriptor.SQLJ_SCHEMA_UUID, tc);
//Add the SYSCS_DIAG Schema
addSystemSchema(
SchemaDescriptor.STD_SYSTEM_DIAG_SCHEMA_NAME,
SchemaDescriptor.SYSCS_DIAG_SCHEMA_UUID, tc);
//Add the SYSCS_UTIL Schema
addSystemSchema(
SchemaDescriptor.STD_SYSTEM_UTIL_SCHEMA_NAME,
SchemaDescriptor.SYSCS_UTIL_SCHEMA_UUID, tc);
//Add the APP schema
SchemaDescriptor appSchemaDesc = new SchemaDescriptor(this,
SchemaDescriptor.STD_DEFAULT_SCHEMA_NAME,
SchemaDescriptor.DEFAULT_USER_NAME,
uuidFactory.recreateUUID( SchemaDescriptor.DEFAULT_SCHEMA_UUID),
false);
addDescriptor(appSchemaDesc, null, SYSSCHEMAS_CATALOG_NUM, false, tc);
}
/**
* Add a system schema to the database.
* <p>
*
* @param schema_name name of the schema to add.
*
* @exception StandardException Standard exception policy.
**/
private SchemaDescriptor addSystemSchema(
String schema_name,
String schema_uuid,
TransactionController tc)
throws StandardException
{
// create the descriptor
SchemaDescriptor schema_desc =
new SchemaDescriptor(
this,
schema_name,
authorizationDatabaseOwner,
uuidFactory.recreateUUID(schema_uuid),
true);
// add it to the catalog.
addDescriptor(schema_desc, null, SYSSCHEMAS_CATALOG_NUM, false, tc);
return(schema_desc);
}
/** called by the upgrade code (dd_xena etc) to add a new system catalog.
*
* @param tc TransactionController to use.
* @param catalogNumber catalogNumber
*/
protected void upgradeMakeCatalog(TransactionController tc, int catalogNumber)
throws StandardException
{
TabInfoImpl ti;
if (catalogNumber >= NUM_CORE)
ti = getNonCoreTIByNumber(catalogNumber);
else
ti = coreInfo[catalogNumber];
makeCatalog(ti, (catalogNumber == SYSDUMMY1_CATALOG_NUM) ? getSysIBMSchemaDescriptor() :
getSystemSchemaDescriptor(), tc);
}
/**
* The dirty work of creating a catalog.
*
* @param ti TabInfoImpl describing catalog to create.
* @param sd Schema to create catalogs in.
* @param tc Transaction context.
*
* @exception StandardException Standard Derby error policy
*/
public void makeCatalog( TabInfoImpl ti,
SchemaDescriptor sd,
TransactionController tc )
throws StandardException
{
DataDescriptorGenerator ddg = getDataDescriptorGenerator();
Properties heapProperties = ti.getCreateHeapProperties();
ti.setHeapConglomerate(
createConglomerate(
ti.getTableName(),
tc,
ti.getCatalogRowFactory().makeEmptyRow(),
heapProperties
)
);
// bootstrap indexes on core tables before bootstrapping the tables themselves
if (ti.getNumberOfIndexes() > 0)
{
bootStrapSystemIndexes(sd, tc, ddg, ti);
}
addSystemTableToDictionary(ti, sd, tc, ddg);
}
/**
* Upgrade an existing system catalog column's definition
* by setting it to the value it would have in a newly
* created database. This is only used to for a couple
* of columns that had incorrectly nullability. Other
* uses (e.g. changing column type) might require more work.
*
* @param columnNumber The column to change
* @param tc Transaction controller
*
* @exception StandardException Standard Derby error policy
*/
public void upgradeFixSystemColumnDefinition(CatalogRowFactory rowFactory,
int columnNumber,
TransactionController tc)
throws StandardException
{
SystemColumn theColumn;
SystemColumn[] columns = rowFactory.buildColumnList();
SchemaDescriptor sd = getSystemSchemaDescriptor();
TableDescriptor td = getTableDescriptor(rowFactory.getCatalogName(), sd, tc);
theColumn = columns[columnNumber - 1]; // from 1 to 0 based
ColumnDescriptor cd = makeColumnDescriptor(theColumn, columnNumber, td );
String columnName = cd.getColumnName();
int[] columnNameColArray = new int[1];
columnNameColArray[0] = SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMNDATATYPE ;
updateColumnDescriptor(cd,
td.getUUID(),
columnName,
columnNameColArray,
tc);
}
/**
* Upgrade an existing catalog by adding columns.
*
* @param rowFactory Associated with this catalog.
* @param newColumnIDs Array of 1-based column ids.
* @param tc Transaction controller
*
* @exception StandardException Standard Derby error policy
*/
public void upgrade_addColumns( CatalogRowFactory rowFactory,
int[] newColumnIDs,
TransactionController tc )
throws StandardException
{
int columnID;
SystemColumn currentColumn;
ColumnDescriptor cd;
SystemColumn[] columns = rowFactory.buildColumnList();
ExecRow templateRow = rowFactory.makeEmptyRow();
int columnCount = newColumnIDs.length;
SchemaDescriptor sd = getSystemSchemaDescriptor();
TableDescriptor td;
long conglomID;
// Special case when adding a column to systables or syscolumns,
// since we can't go to systables/syscolumns to get the
// table/column descriptor until after we add and populate the new column.
if (rowFactory instanceof SYSTABLESRowFactory)
{
td = dataDescriptorGenerator.newTableDescriptor(
"SYSTABLES",
sd,
TableDescriptor.BASE_TABLE_TYPE,
TableDescriptor.ROW_LOCK_GRANULARITY);
td.setUUID(getUUIDForCoreTable("SYSTABLES", sd.getUUID().toString(), tc));
conglomID = coreInfo[SYSTABLES_CORE_NUM].getHeapConglomerate();
}
else if (rowFactory instanceof SYSCOLUMNSRowFactory)
{
td = dataDescriptorGenerator.newTableDescriptor(
"SYSCOLUMNS",
sd,
TableDescriptor.BASE_TABLE_TYPE,
TableDescriptor.ROW_LOCK_GRANULARITY);
td.setUUID(getUUIDForCoreTable("SYSCOLUMNS", sd.getUUID().toString(), tc));
conglomID = coreInfo[SYSCOLUMNS_CORE_NUM].getHeapConglomerate();
}
else
{
td = getTableDescriptor( rowFactory.getCatalogName(), sd, tc );
conglomID = td.getHeapConglomerateId();
}
widenConglomerate( templateRow, newColumnIDs, conglomID, tc );
ColumnDescriptor[] cdArray = new ColumnDescriptor[columnCount];
for ( int ix = 0; ix < columnCount; ix++ )
{
columnID = newColumnIDs[ix];
currentColumn = columns[ columnID - 1 ]; // from 1 to 0 based
cdArray[ix] = makeColumnDescriptor( currentColumn, ix + 1, td );
}
addDescriptorArray(cdArray, td, SYSCOLUMNS_CATALOG_NUM, false, tc);
}
/**
* Add invisible columns to an existing system catalog
*
* @param rowFactory Associated with this catalog.
* @param newColumnIDs Array of 1-based column ids.
* @param tc Transaction controller
*
* @exception StandardException Standard Derby error policy
*/
public void upgrade_addInvisibleColumns
(
CatalogRowFactory rowFactory,
int[] newColumnIDs,
TransactionController tc
)
throws StandardException
{
ExecRow templateRow = rowFactory.makeEmptyRow();
SchemaDescriptor sd = getSystemSchemaDescriptor( );
long conglomID = getTableDescriptor( rowFactory.getCatalogName(), sd, tc ).getHeapConglomerateId();
widenConglomerate( templateRow, newColumnIDs, conglomID, tc );
}
/**
* Adds columns to the conglomerate underlying a system table.
*
* @param templateRow Ultimate shape of base row of table
* @param newColumnIDs Array of 1-based column ids
* @param conglomID heap id
* @param tc Transaction controller
*
* @exception StandardException Standard Derby error policy
*/
private void widenConglomerate
(
ExecRow templateRow,
int[] newColumnIDs,
long conglomID,
TransactionController tc
)
throws StandardException
{
int columnCount = newColumnIDs.length;
for ( int ix = 0; ix < columnCount; ix++ )
{
int columnID = newColumnIDs[ix];
int storablePosition = columnID - 1; // from 1 to 0 based
// system catalog columns always have UCS_BASIC collation.
tc.addColumnToConglomerate(
conglomID,
storablePosition,
templateRow.getColumn( columnID),
StringDataValue.COLLATION_TYPE_UCS_BASIC);
}
}
/**
* Code to add an index to a catalog during upgrade.
*
* @param tc transaction controller
* @param ti information on the catalog that's having a new index added
* @param indexNumber 0-based index number
* @param heapConglomerateNumber what it is
*
* @return The conglomerate number of the new index.
*
* @exception StandardException Thrown on failure
*/
public long upgrade_makeOneIndex
(
TransactionController tc,
TabInfoImpl ti,
int indexNumber,
long heapConglomerateNumber
)
throws StandardException
{
SchemaDescriptor sd = getSystemSchemaDescriptor( );
DataDescriptorGenerator ddg = getDataDescriptorGenerator();
long indexConglomerateNumber;
ConglomerateDescriptor conglomerateDescriptor = bootstrapOneIndex
( sd, tc, ddg, ti, indexNumber, heapConglomerateNumber );
indexConglomerateNumber = conglomerateDescriptor.getConglomerateNumber();
addDescriptor(conglomerateDescriptor, sd,
SYSCONGLOMERATES_CATALOG_NUM, false, tc);
return indexConglomerateNumber;
}
/**
* Get the UUID for the specified system table. Prior
* to Plato, system tables did not have canonical UUIDs, so
* we need to scan systables to get the UUID when we
* are updating the core tables.
*
* @param tableName Name of the table
* @param schemaUUID UUID of schema
* @param tc TransactionController to user
*
* @return UUID The UUID of the core table.
*
* @exception StandardException Thrown on failure
*/
private UUID getUUIDForCoreTable(String tableName,
String schemaUUID,
TransactionController tc)
throws StandardException
{
ConglomerateController heapCC;
ExecIndexRow indexRow1;
ExecRow row;
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor tableNameOrderable;
ScanController scanController;
TabInfoImpl ti = coreInfo[SYSTABLES_CORE_NUM];
SYSTABLESRowFactory rf = (SYSTABLESRowFactory) ti.getCatalogRowFactory();
// We only want the 1st column from the heap
row = exFactory.getValueRow(1);
/* Use tableNameOrderable and schemaIdOrderable in both start
* and stop position for scan.
*/
tableNameOrderable = new SQLVarchar(tableName);
schemaIDOrderable = new SQLChar(schemaUUID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, tableNameOrderable);
keyRow.setColumn(2, schemaIDOrderable);
heapCC = tc.openConglomerate(
ti.getHeapConglomerate(), false, 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
ExecRow indexTemplateRow = rf.buildEmptyIndexRow( SYSTABLESRowFactory.SYSTABLES_INDEX1_ID, heapCC.newRowLocationTemplate() );
/* Scan the index and go to the data pages for qualifying rows to
* build the column descriptor.
*/
scanController = tc.openScan(
ti.getIndexConglomerate(SYSTABLESRowFactory.SYSTABLES_INDEX1_ID), // conglomerate to open
false, // don't hold open across commit
0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
keyRow.getRowArray(), // start position - first row
ScanController.GE, // startSearchOperation
(ScanQualifier[][]) null, //scanQualifier,
keyRow.getRowArray(), // stop position - through last row
ScanController.GT); // stopSearchOperation
/* OK to fetch into the template row,
* since we won't be doing a next.
*/
if (scanController.fetchNext(indexTemplateRow.getRowArray()))
{
RowLocation baseRowLocation;
baseRowLocation = (RowLocation) indexTemplateRow.getColumn(
indexTemplateRow.nColumns());
/* 1st column is TABLEID (UUID - char(36)) */
row.setColumn(SYSTABLESRowFactory.SYSTABLES_TABLEID, new SQLChar());
FormatableBitSet bi = new FormatableBitSet(1);
bi.set(0);
boolean base_row_exists =
heapCC.fetch(
baseRowLocation, row.getRowArray(), (FormatableBitSet) null);
if (SanityManager.DEBUG)
{
// it can not be possible for heap row to disappear while
// holding scan cursor on index at ISOLATION_REPEATABLE_READ.
SanityManager.ASSERT(base_row_exists, "base row not found");
}
}
scanController.close();
heapCC.close();
return uuidFactory.recreateUUID(row.getColumn(1).toString());
}
/**
* Initialize noncore columns to fixed values
*
* @param tc The TransactionController for the transaction to do the
* upgrade in.
* @param isCoreTable true if it is a core table
* @param tableNum the noncore table number
* @param columnsToUpdateSet a bit set of columns to update. ZERO BASED
* @param replaceRow an object array of Orderables for the new values
*
* @exception StandardException Thrown on error
*/
void upgrade_initSystemTableCols(
TransactionController tc,
boolean isCoreTable,
int tableNum,
FormatableBitSet columnsToUpdateSet,
DataValueDescriptor[] replaceRow
)
throws StandardException
{
TabInfoImpl ti = (isCoreTable) ? coreInfo[tableNum] :
getNonCoreTIByNumber(tableNum);
if (!isCoreTable)
faultInTabInfo(ti);
/* Scan the entire heap */
ScanController sc =
tc.openScan(
ti.getHeapConglomerate(),
false,
TransactionController.OPENMODE_FORUPDATE,
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_REPEATABLE_READ,
RowUtil.EMPTY_ROW_BITSET,
(DataValueDescriptor[]) null,
ScanController.NA,
(Qualifier[][]) null,
(DataValueDescriptor[]) null,
ScanController.NA);
while (sc.next())
{
/* Replace the column in the table */
sc.replace(replaceRow, columnsToUpdateSet);
}
sc.close();
}
/*
*******************************************************************************
*
* See RepBasicDataDictionary for sample code on how to create a system
* table.
*
* What follows here is special code for the core catalogs. These are catalogs
* which have to exist before any other system tables are created.
*
* Creating a core catalog consists of two steps: 1) creating all the infrastructure
* needed to make generic systemTableCreation work, 2) actually populating the
* Data Dictionary and core conglomerates with tuples.
*
*******************************************************************************
*/
/**
* Infrastructure work for indexes on catalogs.
*
@exception StandardException Standard Derby error policy
*/
private void bootStrapSystemIndexes(
SchemaDescriptor sd,
TransactionController tc,
DataDescriptorGenerator ddg,
TabInfoImpl ti)
throws StandardException
{
ConglomerateDescriptor[] cgd = new ConglomerateDescriptor[ti.getNumberOfIndexes()];
/* Ordering problem with sysconglomerates. We need to create
* all of the conglomerates first before adding rows to
* sysconglomerates. (All of the conglomerates for sysconglomerates
* must be there before we can add to them.)
*
*/
for (int indexCtr = 0; indexCtr < ti.getNumberOfIndexes(); indexCtr++)
{
cgd[indexCtr] = bootstrapOneIndex( sd, tc, ddg, ti, indexCtr, ti.getHeapConglomerate() );
}
for (int indexCtr = 0; indexCtr < ti.getNumberOfIndexes(); indexCtr++)
{
addDescriptor(cgd[indexCtr], sd,
SYSCONGLOMERATES_CATALOG_NUM, false, tc);
}
}
/**
* @see DataDictionary#computeAutoincRowLocations
*/
public RowLocation[] computeAutoincRowLocations(TransactionController tc,
TableDescriptor td)
throws StandardException
{
int size;
if (!(td.tableHasAutoincrement()))
return null;
size = td.getNumberOfColumns();
RowLocation[] rla = new RowLocation[size];
for (int i = 0; i < size; i++)
{
ColumnDescriptor cd = td.getColumnDescriptor(i + 1);
if (cd.isAutoincrement())
rla[i] = computeRowLocation(tc, td, cd.getColumnName());
}
return rla;
}
/**
* @see DataDictionary#getSetAutoincrementValue
*/
public NumberDataValue getSetAutoincrementValue(
RowLocation rl,
TransactionController tc,
boolean doUpdate,
NumberDataValue newValue,
boolean wait)
throws StandardException
{
FormatableBitSet columnToUpdate = new
FormatableBitSet(SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMN_COUNT);
int columnNum = SYSCOLUMNSRowFactory.SYSCOLUMNS_AUTOINCREMENTVALUE;
TabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];
ConglomerateController heapCC = null;
SYSCOLUMNSRowFactory rf = (SYSCOLUMNSRowFactory) ti.getCatalogRowFactory();
ExecRow row = rf.makeEmptyRow();
FormatableBitSet columnToRead = new
FormatableBitSet(SYSCOLUMNSRowFactory.SYSCOLUMNS_COLUMN_COUNT);
// FormatableBitSet is 0 based.
columnToRead.set(columnNum - 1); // current value.
columnToRead.set(columnNum); // start value.
columnToRead.set(columnNum + 1); // increment value.
try
{
/* if wait is true then we need to do a wait while trying to
open/fetch from the conglomerate. note we use wait both to
open as well as fetch from the conglomerate.
*/
heapCC =
tc.openConglomerate(
ti.getHeapConglomerate(),
false,
(TransactionController.OPENMODE_FORUPDATE |
((wait) ? 0 : TransactionController.OPENMODE_LOCK_NOWAIT)),
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
boolean baseRowExists =
heapCC.fetch(rl, row.getRowArray(), columnToRead, wait);
columnToUpdate.set(columnNum - 1); // current value.
// while the Row interface is 1 based.
NumberDataValue currentAI = (NumberDataValue)row.getColumn(columnNum);
long currentAIValue = currentAI.getLong();
NumberDataValue increment = (NumberDataValue)row.getColumn(columnNum + 2);
if (doUpdate)
{
// we increment and store the new value in SYSCOLUMNS
currentAI = currentAI.plus(currentAI, increment, currentAI);
row.setColumn(columnNum, currentAI);
heapCC.replace(rl, row.getRowArray(), columnToUpdate);
}
// but we return the "currentAIValue"-- i.e the value before
// incrementing it.
if (newValue != null)
{
// user has passed in an object; set the current value in there and
// return it.
newValue.setValue(currentAIValue);
return newValue;
}
else
{
// reuse the object read from row.
currentAI.setValue(currentAIValue);
return currentAI;
}
}
finally
{
if (heapCC != null)
heapCC.close();
}
}
private ConglomerateDescriptor bootstrapOneIndex
(
SchemaDescriptor sd,
TransactionController tc,
DataDescriptorGenerator ddg,
TabInfoImpl ti,
int indexNumber,
long heapConglomerateNumber
)
throws StandardException
{
boolean isUnique;
ConglomerateController cc;
ExecRow baseRow;
ExecIndexRow indexableRow;
int numColumns;
long conglomId;
RowLocation rl;
CatalogRowFactory rf = ti.getCatalogRowFactory();
IndexRowGenerator irg;
ConglomerateDescriptor conglomerateDescriptor;
initSystemIndexVariables(ddg, ti, indexNumber);
irg = ti.getIndexRowGenerator(indexNumber);
numColumns = ti.getIndexColumnCount(indexNumber);
/* Is the index unique */
isUnique = ti.isIndexUnique(indexNumber);
// create an index row template
indexableRow = irg.getIndexRowTemplate();
baseRow = rf.makeEmptyRow();
// Get a RowLocation template
cc = tc.openConglomerate(
heapConglomerateNumber, false, 0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
rl = cc.newRowLocationTemplate();
cc.close();
// Get an index row based on the base row
irg.getIndexRow(baseRow, rl, indexableRow, (FormatableBitSet) null);
// Describe the properties of the index to the store using Properties
// RESOLVE: The following properties assume a BTREE index.
Properties indexProperties = ti.getCreateIndexProperties(indexNumber);
// Tell it the conglomerate id of the base table
indexProperties.put(
"baseConglomerateId",
Long.toString( heapConglomerateNumber ) );
// All indexes are unique because they contain the RowLocation.
// The number of uniqueness columns must include the RowLocation
// if the user did not specify a unique index.
indexProperties.put("nUniqueColumns",
Integer.toString(
isUnique ? numColumns : numColumns + 1));
// By convention, the row location column is the last column
indexProperties.put("rowLocationColumn",
Integer.toString(numColumns));
// For now, all columns are key fields, including the RowLocation
indexProperties.put("nKeyFields",
Integer.toString(numColumns + 1));
/* Create and add the conglomerate (index) */
conglomId = tc.createConglomerate(
"BTREE", // we're requesting an index conglomerate
indexableRow.getRowArray(),
null, //default sort order
null, //default collation id's for collumns in all system congloms
indexProperties, // default properties
TransactionController.IS_DEFAULT); // not temporary
conglomerateDescriptor =
ddg.newConglomerateDescriptor(conglomId,
rf.getIndexName(indexNumber),
true,
irg,
false,
rf.getCanonicalIndexUUID(indexNumber),
rf.getCanonicalTableUUID(),
sd.getUUID());
ti.setIndexConglomerate( conglomerateDescriptor );
return conglomerateDescriptor;
}
public void initSystemIndexVariables(DataDescriptorGenerator ddg,
TabInfoImpl ti,
int indexNumber)
throws StandardException
{
int numCols = ti.getIndexColumnCount(indexNumber);
int[] baseColumnPositions = new int[numCols];
CatalogRowFactory rf = ti.getCatalogRowFactory();
for (int colCtr = 0; colCtr < numCols; colCtr++)
{
baseColumnPositions[colCtr] =
ti.getBaseColumnPosition(indexNumber, colCtr);
}
boolean[] isAscending = new boolean[baseColumnPositions.length];
for (int i = 0; i < baseColumnPositions.length; i++)
isAscending[i] = true;
IndexRowGenerator irg = null;
if (softwareVersion.checkVersion(
DataDictionary.DD_VERSION_DERBY_10_4,null))
{
irg = new IndexRowGenerator(
"BTREE", ti.isIndexUnique(indexNumber),
false,
baseColumnPositions,
isAscending,
baseColumnPositions.length);
}
else
{
//older version of Data Disctionary
//use old constructor
irg = new IndexRowGenerator (
"BTREE", ti.isIndexUnique(indexNumber),
baseColumnPositions,
isAscending,
baseColumnPositions.length);
}
// For now, assume that all index columns are ordered columns
ti.setIndexRowGenerator(indexNumber, irg);
}
/**
* Populate SYSDUMMY1 table with a single row.
*
* @exception StandardException Standard Derby error policy
*/
protected void populateSYSDUMMY1(
TransactionController tc)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSDUMMY1_CATALOG_NUM);
ExecRow row = ti.getCatalogRowFactory().makeRow(null, null);
int insertRetCode = ti.insertRow(row, tc);
}
/**
* Clear all of the DataDictionary caches.
*
* @exception StandardException Standard Derby error policy
*/
public void clearCaches() throws StandardException
{
nameTdCache.cleanAll();
nameTdCache.ageOut();
OIDTdCache.cleanAll();
OIDTdCache.ageOut();
sequenceGeneratorCache.cleanAll();
sequenceGeneratorCache.ageOut();
if (spsNameCache != null)
{
//System.out.println("CLEARING SPS CACHE");
spsNameCache.cleanAll();
spsNameCache.ageOut();
spsIdHash.clear();
// spsTextHash.clear();
}
}
/**
Add the required entries to the data dictionary for a System table.
*/
private void addSystemTableToDictionary(TabInfoImpl ti,
SchemaDescriptor sd,
TransactionController tc,
DataDescriptorGenerator ddg)
throws StandardException
{
CatalogRowFactory crf = ti.getCatalogRowFactory();
String name = ti.getTableName();
long conglomId = ti.getHeapConglomerate();
SystemColumn[] columnList = crf.buildColumnList();
UUID heapUUID = crf.getCanonicalHeapUUID();
String heapName = crf.getCanonicalHeapName();
TableDescriptor td;
UUID toid;
ColumnDescriptor cd;
int columnCount;
SystemColumn column;
// add table to the data dictionary
columnCount = columnList.length;
td = ddg.newTableDescriptor(name, sd, TableDescriptor.SYSTEM_TABLE_TYPE,
TableDescriptor.ROW_LOCK_GRANULARITY);
td.setUUID(crf.getCanonicalTableUUID());
addDescriptor(td, sd, SYSTABLES_CATALOG_NUM,
false, tc);
toid = td.getUUID();
/* Add the conglomerate for the heap */
ConglomerateDescriptor cgd = ddg.newConglomerateDescriptor(conglomId,
heapName,
false,
null,
false,
heapUUID,
toid,
sd.getUUID());
addDescriptor(cgd, sd, SYSCONGLOMERATES_CATALOG_NUM, false, tc);
/* Create the columns */
ColumnDescriptor[] cdlArray = new ColumnDescriptor[columnCount];
for (int columnNumber = 0; columnNumber < columnCount; columnNumber++)
{
column = columnList[columnNumber];
if (SanityManager.DEBUG)
{
if (column == null)
{
SanityManager.THROWASSERT("column "+columnNumber+" for table "+ti.getTableName()+" is null");
}
}
cdlArray[columnNumber] = makeColumnDescriptor( column,
columnNumber + 1, td );
}
addDescriptorArray(cdlArray, td, SYSCOLUMNS_CATALOG_NUM, false, tc);
// now add the columns to the cdl of the table.
ColumnDescriptorList cdl = td.getColumnDescriptorList();
for (int i = 0; i < columnCount; i++)
cdl.add(cdlArray[i]);
}
/**
* Converts a SystemColumn to a ColumnDescriptor.
*
* @param column a SystemColumn
* @param columnPosition Position of the column in the table, one based.
* @param td descriptor for table that column lives in
*
* @return a ColumnDes*criptor
*
* @exception StandardException Standard Derby error policy
*/
private ColumnDescriptor makeColumnDescriptor( SystemColumn column,
int columnPosition,
TableDescriptor td )
throws StandardException
{
//RESOLVEAUTOINCREMENT
return new ColumnDescriptor
(column.getName(), columnPosition, column.getType(), null, null, td,
(UUID) null, // No defaults yet for system columns
0, 0
);
}
/**
* Create a conglomerate for a system table
*
* @param name Name of new catalog.
* @param tc Transaction context.
* @param rowTemplate Template for rows for the new table
* @param properties Properties for createConglomerate
*
* @return Conglomerate id.
@exception StandardException Standard Derby error policy.
*/
private long createConglomerate(String name, TransactionController tc,
ExecRow rowTemplate,
Properties properties)
throws StandardException
{
long conglomId;
conglomId = tc.createConglomerate(
"heap", // we're requesting a heap conglomerate
rowTemplate.getRowArray(), // row template
null, // default sort order
null, // default collation ids
properties, // default properties
TransactionController.IS_DEFAULT); // not temporary
return conglomId;
}
/**
* Converts a UUID to an DataValueDescriptor.
*
* @return the UUID converted to an DataValueDescriptor
*
*/
private static SQLChar getIDValueAsCHAR(UUID uuid)
{
String uuidString = uuid.toString();
return new SQLChar(uuidString);
}
/**
* Initialize catalog information. This method is overridden by children.
* @exception StandardException Thrown on error
*/
public void initializeCatalogInfo()
throws StandardException
{
initializeCoreInfo();
initializeNoncoreInfo();
}
/**
* Initialized the core info array.
*/
private void initializeCoreInfo()
throws StandardException
{
TabInfoImpl[] lcoreInfo = coreInfo = new TabInfoImpl[NUM_CORE];
UUIDFactory luuidFactory = uuidFactory;
lcoreInfo[SYSTABLES_CORE_NUM] =
new TabInfoImpl(new SYSTABLESRowFactory(luuidFactory, exFactory, dvf));
lcoreInfo[SYSCOLUMNS_CORE_NUM] =
new TabInfoImpl(new SYSCOLUMNSRowFactory(luuidFactory, exFactory, dvf));
lcoreInfo[SYSCONGLOMERATES_CORE_NUM] =
new TabInfoImpl(new SYSCONGLOMERATESRowFactory(luuidFactory, exFactory, dvf));
lcoreInfo[SYSSCHEMAS_CORE_NUM] =
new TabInfoImpl(new SYSSCHEMASRowFactory(luuidFactory, exFactory, dvf));
}
/**
* Initialized the noncore info array.
*/
private void initializeNoncoreInfo()
throws StandardException
{
noncoreInfo = new TabInfoImpl[NUM_NONCORE];
}
/**
* Get the TransactionController to use, when not
* passed in as a parameter. (This hides logic about
* whether or not we're at boot time in a single
* place. NOTE: There's no LCC at boot time.)
* NOTE: All <get> methods in the DD should call this method.
*
* @return TransactionController The TC to use.
*
* @exception StandardException Thrown on error
*/
public TransactionController getTransactionCompile()
throws StandardException
{
if (bootingTC != null)
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(booting, "booting is expected to be true");
}
return bootingTC;
}
else
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(! booting, "booting is expected to be false");
}
{
LanguageConnectionContext lcc = getLCC();
return lcc.getTransactionCompile();
}
}
}
/**
* Get the TransactionController to use, when not
* passed in as a parameter. (This hides logic about
* whether or not we're at boot time in a single
* place. NOTE: There's no LCC at boot time.)
* NOTE: All <get> methods in the DD should call this method.
*
* @return TransactionController The TC to use.
*
* @exception StandardException Thrown on error
*/
public TransactionController getTransactionExecute()
throws StandardException
{
if (bootingTC != null)
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(booting, "booting is expected to be true");
}
return bootingTC;
}
else
{
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(! booting, "booting is expected to be false");
}
{
LanguageConnectionContext lcc = getLCC();
return lcc.getTransactionExecute();
}
}
}
/**
* Return a (single or list of) catalog row descriptor(s) from a
* system table where the access is from the index to the heap.
*
* @param indexId The id of the index (0 to # of indexes on table) to use
* @param keyRow The supplied ExecIndexRow for search
* @param ti The TabInfoImpl to use
* @param parentTupleDescriptor The parentDescriptor, if applicable.
* @param list The list to build, if supplied. If null, then
* caller expects a single descriptor
* @param forUpdate Whether or not to open the index for update.
*
* @return The last matching descriptor
*
* @exception StandardException Thrown on error
*/
private final TupleDescriptor getDescriptorViaIndex(
int indexId,
ExecIndexRow keyRow,
ScanQualifier [][] scanQualifiers,
TabInfoImpl ti,
TupleDescriptor parentTupleDescriptor,
List list,
boolean forUpdate)
throws StandardException
{
// Get the current transaction controller
TransactionController tc = getTransactionCompile();
return getDescriptorViaIndexMinion(
indexId,
keyRow,
scanQualifiers,
ti,
parentTupleDescriptor,
list,
forUpdate,
TransactionController.ISOLATION_REPEATABLE_READ,
tc);
}
/**
* Return a (single or list of) catalog row descriptor(s) from a
* system table where the access is from the index to the heap.
*
* This overload variant takes an explicit tc, in contrast to the normal
* one which uses the one returned by getTransactionCompile.
*
* @param indexId The id of the index (0 to # of indexes on table) to use
* @param keyRow The supplied ExecIndexRow for search
* @param ti The TabInfoImpl to use
* @param parentTupleDescriptor The parentDescriptor, if applicable.
* @param list The list to build, if supplied. If null, then
* caller expects a single descriptor
* @param forUpdate Whether or not to open the index for update.
* @param isolationLevel
* Use this explicit isolation level. Only
* ISOLATION_REPEATABLE_READ (normal usage) or
* ISOLATION_READ_UNCOMMITTED (corner cases)
* supported for now.
* @param tc Transaction controller
*
* @return The last matching descriptor. If isolationLevel is
* ISOLATION_READ_UNCOMMITTED, the base row may be gone by the
* time we access it via the index; in such a case a null is
* returned.
*
* @exception StandardException Thrown on error.
*/
private final TupleDescriptor getDescriptorViaIndex(
int indexId,
ExecIndexRow keyRow,
ScanQualifier [][] scanQualifiers,
TabInfoImpl ti,
TupleDescriptor parentTupleDescriptor,
List list,
boolean forUpdate,
int isolationLevel,
TransactionController tc)
throws StandardException
{
if (tc == null) {
tc = getTransactionCompile();
}
return getDescriptorViaIndexMinion(indexId,
keyRow,
scanQualifiers,
ti,
parentTupleDescriptor,
list,
forUpdate,
isolationLevel,
tc);
}
private final TupleDescriptor getDescriptorViaIndexMinion(
int indexId,
ExecIndexRow keyRow,
ScanQualifier [][] scanQualifiers,
TabInfoImpl ti,
TupleDescriptor parentTupleDescriptor,
List list,
boolean forUpdate,
int isolationLevel,
TransactionController tc)
throws StandardException
{
CatalogRowFactory rf = ti.getCatalogRowFactory();
ConglomerateController heapCC;
ExecIndexRow indexRow1;
ExecRow outRow;
RowLocation baseRowLocation;
ScanController scanController;
TupleDescriptor td = null;
if (SanityManager.DEBUG) {
SanityManager.ASSERT
(isolationLevel ==
TransactionController.ISOLATION_REPEATABLE_READ ||
isolationLevel ==
TransactionController.ISOLATION_READ_UNCOMMITTED);
}
outRow = rf.makeEmptyRow();
heapCC = tc.openConglomerate(
ti.getHeapConglomerate(), false, 0,
TransactionController.MODE_RECORD,
isolationLevel);
/* Scan the index and go to the data pages for qualifying rows to
* build the column descriptor.
*/
scanController = tc.openScan(
ti.getIndexConglomerate(indexId), // conglomerate to open
false, // don't hold open across commit
(forUpdate) ? TransactionController.OPENMODE_FORUPDATE : 0,
TransactionController.MODE_RECORD,
isolationLevel,
(FormatableBitSet) null, // all fields as objects
keyRow.getRowArray(), // start position - first row
ScanController.GE, // startSearchOperation
scanQualifiers, //scanQualifier,
keyRow.getRowArray(), // stop position - through last row
ScanController.GT); // stopSearchOperation
while (true)
{
// create an index row template
indexRow1 = getIndexRowFromHeapRow(
ti.getIndexRowGenerator(indexId),
heapCC.newRowLocationTemplate(),
outRow);
// It is important for read uncommitted scans to use fetchNext()
// rather than fetch, so that the fetch happens while latch is
// held, otherwise the next() might position the scan on a row,
// but the subsequent fetch() may find the row deleted or purged
// from the table.
if (!scanController.fetchNext(indexRow1.getRowArray())) {
break;
}
baseRowLocation = (RowLocation) indexRow1.getColumn(
indexRow1.nColumns());
// RESOLVE paulat - remove the try catch block when track 3677 is fixed
// just leave the contents of the try block
// adding to get more info on track 3677
boolean base_row_exists = false;
try
{
base_row_exists =
heapCC.fetch(
baseRowLocation, outRow.getRowArray(), (FormatableBitSet) null);
}
catch (RuntimeException re)
{
if (SanityManager.DEBUG)
{
if (re instanceof AssertFailure)
{
StringBuffer strbuf = new StringBuffer("Error retrieving base row in table "+ti.getTableName());
strbuf.append(": An ASSERT was thrown when trying to locate a row matching index row "+indexRow1+" from index "+ti.getIndexName(indexId)+", conglom number "+ti.getIndexConglomerate(indexId));
debugGenerateInfo(strbuf,tc,heapCC,ti,indexId);
}
}
throw re;
}
catch (StandardException se)
{
if (SanityManager.DEBUG)
{
// only look for a specific error i.e. that of record on page
// no longer exists
// do not want to catch lock timeout errors here
if (se.getSQLState().equals("XSRS9"))
{
StringBuffer strbuf = new StringBuffer("Error retrieving base row in table "+ti.getTableName());
strbuf.append(": A StandardException was thrown when trying to locate a row matching index row "+indexRow1+" from index "+ti.getIndexName(indexId)+", conglom number "+ti.getIndexConglomerate(indexId));
debugGenerateInfo(strbuf,tc,heapCC,ti,indexId);
}
}
throw se;
}
if (SanityManager.DEBUG)
{
// it can not be possible for heap row to disappear while
// holding scan cursor on index at ISOLATION_REPEATABLE_READ.
if (! base_row_exists &&
(isolationLevel ==
TransactionController.ISOLATION_REPEATABLE_READ)) {
StringBuffer strbuf = new StringBuffer("Error retrieving base row in table "+ti.getTableName());
strbuf.append(": could not locate a row matching index row "+indexRow1+" from index "+ti.getIndexName(indexId)+", conglom number "+ti.getIndexConglomerate(indexId));
debugGenerateInfo(strbuf,tc,heapCC,ti,indexId);
// RESOLVE: for now, we are going to kill the VM
// to help debug this problem.
System.exit(1);
// RESOLVE: not currently reached
//SanityManager.THROWASSERT(strbuf.toString());
}
}
if (!base_row_exists &&
(isolationLevel ==
TransactionController.ISOLATION_READ_UNCOMMITTED)) {
// If isolationLevel == ISOLATION_READ_UNCOMMITTED we may
// possibly see that the base row does not exist even if the
// index row did. This mode is currently only used by
// TableNameInfo's call to hashAllTableDescriptorsByTableId,
// cf. DERBY-3678, and by getStatisticsDescriptors,
// cf. DERBY-4881.
//
// For the former call, a table's schema descriptor is attempted
// read, and if the base row for the schema has gone between
// reading the index and the base table, the table that needs
// this information has gone, too. So, the table should not
// be needed for printing lock timeout or deadlock
// information, so we can safely just return an empty (schema)
// descriptor. Furthermore, neither Timeout or DeadLock
// diagnostics access the schema of a table descriptor, so it
// seems safe to just return an empty schema descriptor for
// the table.
//
// There is a theoretical chance another row may have taken
// the first one's place, but only if a compress of the base
// table managed to run between the time we read the index and
// the base row, which seems unlikely so we ignore that.
//
// Even the index row may be gone in the above use case, of
// course, and that case also returns an empty descriptor
// since no match is found.
td = null;
} else {
// normal case
td = rf.buildDescriptor(outRow, parentTupleDescriptor, this);
}
/* If list is null, then caller only wants a single descriptor - we're done
* else just add the current descriptor to the list.
*/
if (list == null)
{
break;
}
else if (td != null)
{
list.add(td);
}
}
scanController.close();
heapCC.close();
return td;
}
private void debugGenerateInfo(StringBuffer strbuf,
TransactionController tc, ConglomerateController heapCC, TabInfoImpl ti,
int indexId)
{
if (SanityManager.DEBUG) {
try
{
strbuf.append("\nadditional information: ");
// print the lock table
// will get a NullPointerException if lcc doesn't yet exist e.g. at boot time
LanguageConnectionContext lcc = (LanguageConnectionContext)
ContextService.getContext(LanguageConnectionContext.CONTEXT_ID);
if (lcc != null)
{
long currentTime = System.currentTimeMillis();
//EXCLUDE-START-lockdiag-
Enumeration lockTable = lockFactory.makeVirtualLockTable();
String lockTableString = Timeout.buildString(lockTable,currentTime);
strbuf.append("lock table at time of failure\n\n");
strbuf.append(lockTableString);
//EXCLUDE-END-lockdiag-
}
// consistency checking etc.
ConglomerateController btreeCC =
tc.openConglomerate(
ti.getIndexConglomerate(indexId),
false,
0, TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
btreeCC.debugConglomerate();
heapCC.debugConglomerate();
heapCC.checkConsistency();
strbuf.append("\nheapCC.checkConsistency() = true");
ConglomerateController indexCC = tc.openConglomerate(
ti.getIndexConglomerate(indexId),
false,
0,
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_REPEATABLE_READ);
indexCC.checkConsistency();
strbuf.append("\nindexCC.checkConsistency() = true");
System.err.println("ASSERT FAILURE: "+strbuf.toString());
System.out.println("ASSERT FAILURE: "+strbuf.toString());
SanityManager.DEBUG_PRINT("ASSERT FAILURE", strbuf.toString());
}
catch (StandardException se)
{
strbuf.append("\ngot the following error when doing extra consistency checks:\n"+se.toString());
}
}
}
/**
* Return a (single or list of) catalog row descriptor(s) from a
* system table where the access a heap scan
*
* @param scanQualifiers qualifiers
* @param ti The TabInfoImpl to use
* @param parentTupleDescriptor The parentDescriptor, if applicable.
* @param list The list to build, if supplied.
* If null, then caller expects a single descriptor
*
* @return The last matching descriptor
*
* @exception StandardException Thrown on error
*/
protected TupleDescriptor getDescriptorViaHeap(
ScanQualifier [][] scanQualifiers,
TabInfoImpl ti,
TupleDescriptor parentTupleDescriptor,
List list)
throws StandardException
{
CatalogRowFactory rf = ti.getCatalogRowFactory();
ConglomerateController heapCC;
ExecRow outRow;
ScanController scanController;
TransactionController tc;
TupleDescriptor td = null;
// Get the current transaction controller
tc = getTransactionCompile();
outRow = rf.makeEmptyRow();
/*
** Table scan
*/
scanController = tc.openScan(
ti.getHeapConglomerate(), // conglomerate to open
false, // don't hold open across commit
0, // for read
TransactionController.MODE_TABLE,
TransactionController.ISOLATION_REPEATABLE_READ,
(FormatableBitSet) null, // all fields as objects
(DataValueDescriptor[]) null, // start position - first row
0, // startSearchOperation - none
scanQualifiers, // scanQualifier,
(DataValueDescriptor[]) null, // stop position - through last row
0); // stopSearchOperation - none
while (scanController.fetchNext(outRow.getRowArray()))
{
td = rf.buildDescriptor(outRow, parentTupleDescriptor, this);
/* If dList is null, then caller only wants a single descriptor - we're done
* else just add the current descriptor to the list.
*/
if (list == null)
{
break;
}
else
{
list.add(td);
}
}
scanController.close();
return td;
}
/**
* Get a TabInfoImpl for a non-core table.
* (We fault in information about non-core tables as needed.)
*
* @param catalogNumber The index into noncoreTable[].
*
* @exception StandardException Thrown on error
*/
private TabInfoImpl getNonCoreTI(int catalogNumber)
throws StandardException
{
TabInfoImpl ti = getNonCoreTIByNumber(catalogNumber);
faultInTabInfo( ti );
return ti;
}
/** returns the tabinfo for a non core system catalog. Input is a
* catalogNumber (defined in DataDictionary).
*/
protected TabInfoImpl getNonCoreTIByNumber(int catalogNumber)
throws StandardException
{
int nonCoreNum = catalogNumber - NUM_CORE;
// Look up the TabInfoImpl in the array. This does not have to be
// synchronized, because getting a reference is atomic.
TabInfoImpl retval = noncoreInfo[nonCoreNum];
if (retval == null)
{
// If we did not find the TabInfoImpl, get the right one and
// load it into the array. There is a small chance that
// two threads will do this at the same time. The code will
// work properly in that case, since storing a reference
// is atomic (although we could get extra object instantiation
// if two threads come through here at the same time.
UUIDFactory luuidFactory = uuidFactory;
switch (catalogNumber)
{
case SYSCONSTRAINTS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSCONSTRAINTSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSKEYS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSKEYSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSDEPENDS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSDEPENDSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSVIEWS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSVIEWSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSCHECKS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSCHECKSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSFOREIGNKEYS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSFOREIGNKEYSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSSTATEMENTS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSSTATEMENTSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSFILES_CATALOG_NUM:
retval = new TabInfoImpl(new SYSFILESRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSALIASES_CATALOG_NUM:
retval = new TabInfoImpl(new SYSALIASESRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSTRIGGERS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSTRIGGERSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSSTATISTICS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSSTATISTICSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSDUMMY1_CATALOG_NUM:
retval = new TabInfoImpl(new SYSDUMMY1RowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSTABLEPERMS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSTABLEPERMSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSCOLPERMS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSCOLPERMSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSROUTINEPERMS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSROUTINEPERMSRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSROLES_CATALOG_NUM:
retval = new TabInfoImpl(new SYSROLESRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSSEQUENCES_CATALOG_NUM:
retval = new TabInfoImpl(new SYSSEQUENCESRowFactory(
luuidFactory, exFactory, dvf));
break;
case SYSPERMS_CATALOG_NUM:
retval = new TabInfoImpl(new SYSPERMSRowFactory(
luuidFactory, exFactory, dvf));
break;
}
initSystemIndexVariables(retval);
noncoreInfo[nonCoreNum] = retval;
}
return retval;
}
protected void initSystemIndexVariables(TabInfoImpl ti)
throws StandardException
{
int numIndexes = ti.getNumberOfIndexes();
if (numIndexes > 0)
{
DataDescriptorGenerator ddg = getDataDescriptorGenerator();
for (int indexCtr = 0; indexCtr < numIndexes; indexCtr++)
{
initSystemIndexVariables(ddg, ti, indexCtr);
}
}
}
// Expected to be called only during boot time, so no synchronization.
private void clearNoncoreTable(int nonCoreNum)
{
noncoreInfo[nonCoreNum] = null;
}
/**
* Finishes building a TabInfoImpl if it hasn't already been faulted in.
* NOP if TabInfoImpl has already been faulted in.
*
* @param ti TabInfoImpl to fault in.
*
* @exception StandardException Thrown on error
*/
public void faultInTabInfo( TabInfoImpl ti )
throws StandardException
{
int numIndexes;
/* Most of the time, the noncoreInfo will be complete.
* It's okay to do an unsynchronized check and return
* if it is complete, since it never becomes "un-complete".
* If we change the code, for some reason, to allow
* it to become "un-complete" after being complete,
* then we will have to do a synchronized check here
* as well.
*/
if (ti.isComplete())
{
return;
}
/* The completing of the noncoreInfo entry must be synchronized.
* NOTE: We are assuming that we will not access a different
* noncoreInfo in the course of completing of this noncoreInfo,
* otherwise a deadlock could occur.
*/
synchronized(ti)
{
/* Now that we can run, the 1st thing that we must do
* is to verify that we still need to complete the
* object. (We may have been blocked on another user
* doing the same.)
*/
if (ti.isComplete())
{
return;
}
TableDescriptor td = getTableDescriptor(ti.getTableName(),
getSystemSchemaDescriptor(), null);
// It's possible that the system table is not there right
// now. This can happen, for example, if we're in the
// process of upgrading a source or target to Xena, in
// which case SYSSYNCINSTANTS is dropped and re-created.
// Just return in this case, so we don't get a null pointer
// exception.
if (td == null)
{
return;
}
ConglomerateDescriptor cd = null;
ConglomerateDescriptor[] cds = td.getConglomerateDescriptors();
/* Init the heap conglomerate here */
for (int index = 0; index < cds.length; index++)
{
cd = cds[index];
if (! cd.isIndex())
{
ti.setHeapConglomerate(cd.getConglomerateNumber());
break;
}
}
if (SanityManager.DEBUG)
{
if (cd == null)
{
SanityManager.THROWASSERT("No heap conglomerate found for "
+ ti.getTableName());
}
}
/* Initialize the index conglomerates */
numIndexes = ti.getCatalogRowFactory().getNumIndexes();
if (numIndexes == 0)
{
return;
}
/* For each index, we get its id from the CDL */
ConglomerateDescriptor icd = null;
int indexCount = 0;
for (int index = 0; index < cds.length; index++)
{
icd = cds[index];
if (icd.isIndex())
{
ti.setIndexConglomerate(icd);
indexCount++;
}
continue;
}
if (SanityManager.DEBUG)
{
if (indexCount != ti.getCatalogRowFactory().getNumIndexes())
{
SanityManager.THROWASSERT("Number of indexes found (" + indexCount +
") does not match the number expected (" +
ti.getCatalogRowFactory().getNumIndexes() + ")");
}
}
}
}
/**
* Get an index row based on a row from the heap.
*
* @param irg IndexRowGenerator to use
* @param rl RowLocation for heap
* @param heapRow Row from the heap
*
* @return ExecIndexRow Index row.
*
* @exception StandardException Thrown on error
*/
public static ExecIndexRow getIndexRowFromHeapRow(IndexRowGenerator irg,
RowLocation rl,
ExecRow heapRow)
throws StandardException
{
ExecIndexRow indexRow;
indexRow = irg.getIndexRowTemplate();
// Get an index row based on the base row
irg.getIndexRow(heapRow, rl, indexRow, (FormatableBitSet) null);
return indexRow;
}
public int getEngineType()
{
return engineType;
}
/**
* Get the heap conglomerate number for SYS.SYSCOLUMNS.
* (Useful for adding new index to the table.)
*
* @return The heap conglomerate number for SYS.SYSCOLUMNS.
*/
public long getSYSCOLUMNSHeapConglomerateNumber()
{
return coreInfo[SYSCOLUMNS_CORE_NUM].getHeapConglomerate();
}
void addSYSCOLUMNSIndex2Property(TransactionController tc, long index2ConglomerateNumber)
{
startupParameters.put(CFG_SYSCOLUMNS_INDEX2_ID,
Long.toString(index2ConglomerateNumber));
}
/**
*/
private long getBootParameter(Properties startParams, String key, boolean required)
throws StandardException {
String value = startParams.getProperty(key);
if (value == null)
{
if (! required)
{
return -1;
}
throw StandardException.newException(SQLState.PROPERTY_MISSING, key);
}
try {
return Long.parseLong(value);
} catch (NumberFormatException nfe) {
throw StandardException.newException(SQLState.PROPERTY_INVALID_VALUE, key, value);
}
}
/**
* Returns a unique system generated name of the form SQLyymmddhhmmssxxn
* yy - year, mm - month, dd - day of month, hh - hour, mm - minute, ss - second,
* xx - the first 2 digits of millisec because we don't have enough space to keep the exact millisec value,
* n - number between 0-9
*
* The number at the end is to handle more than one system generated name request came at the same time.
* In that case, the timestamp will remain the same, we will just increment n at the end of the name.
*
* Following is how we get around the problem of more than 10 system generated name requestes at the same time:
* When the database boots up, we start a counter with value -1 for the last digit in the generated name.
* We also keep the time in millisec to keep track of when the last system name was generated. At the
* boot time, it will be default to 0L. In addition, we have a calendar object for the time in millisec
* That calendar object is used to fetch yy, mm, dd, etc for the string SQLyymmddhhmmssxxn
*
* When the first request for the system generated name comes, time of last system generated name will be less than
* the current time. We initialize the counter to 0, set the time of last system generated name to the
* current time truncated off to lower 10ms time. The first name request is the only time we know for sure the
* time of last system generated name will be less than the current time. After this first request, the next request
* could be at any time. We go through the following algorithm for every generated name request.
*
* First check if the current time(truncated off to lower 10ms) is greater than the timestamp for last system generated name
*
* If yes, then we change the timestamp for system generated name to the current timestamp and reset the counter to 0
* and generate the name using the current timestamp and 0 as the number at the end of the generated name.
*
* If no, then it means this request for generated name has come at the same time as last one.
* Or it may come at a time less than the last generated name request. This could be because of seasonal time change
* or somebody manually changing the time on the computer. In any case,
* if the counter is less than 10(meaning this is not yet our 11th request for generated name at a given time),
* we use that in the generated name. But if the counter has reached 10(which means, this is the 11th name request
* at the same time), then we increment the system generated name timestamp by 10ms and reset the counter to 0
* (notice, at this point, the timestamp for system generated names is not in sync with the real current time, but we
* need to have this mechanism to get around the problem of more than 10 generated name requests at a same physical time).
*
* @return system generated unique name
*/
public String getSystemSQLName()
{
//note that we are using Date class to change the Calendar object time rather than Calendar.setTimeInMills.
//This is because of java bug 4243802. setTimeInMillis and getTimeInMillis were protected prior to jdk14
//But since we still support jdk13, we can't rely on these methods being public starting jdk14.
//Because of that, we are using Date to do all the time manipulations on the Calendar object
StringBuffer generatedSystemSQLName = new StringBuffer("SQL");
synchronized (this) {
//get the current timestamp
long timeNow = (System.currentTimeMillis()/10L)*10L;
//if the current timestamp is greater than last constraint name generation time, then we reset the counter and
//record the new timestamp
if (timeNow > timeForLastSystemSQLName) {
systemSQLNameNumber = 0;
calendarForLastSystemSQLName.setTimeInMillis(timeNow);
timeForLastSystemSQLName = timeNow;
} else {
//the request has come at the same time as the last generated name request
//or it has come at a time less than the time the last generated name request. This can happen
//because of seasonal time change or manual update of computer time.
//get the number that was last used for the last digit of generated name and increment it by 1.
systemSQLNameNumber++;
if (systemSQLNameNumber == 10) { //we have already generated 10 names at the last system generated timestamp value
//so reset the counter
systemSQLNameNumber = 0;
timeForLastSystemSQLName = timeForLastSystemSQLName + 10L;
//increment the timestamp for system generated names by 10ms
calendarForLastSystemSQLName.setTimeInMillis(timeForLastSystemSQLName);
}
}
generatedSystemSQLName.append(twoDigits(calendarForLastSystemSQLName.get(Calendar.YEAR)));
//have to add 1 to the month value returned because the method give 0-January, 1-February and so on and so forth
generatedSystemSQLName.append(twoDigits(calendarForLastSystemSQLName.get(Calendar.MONTH)+1));
generatedSystemSQLName.append(twoDigits(calendarForLastSystemSQLName.get(Calendar.DAY_OF_MONTH)));
generatedSystemSQLName.append(twoDigits(calendarForLastSystemSQLName.get(Calendar.HOUR_OF_DAY)));
generatedSystemSQLName.append(twoDigits(calendarForLastSystemSQLName.get(Calendar.MINUTE)));
generatedSystemSQLName.append(twoDigits(calendarForLastSystemSQLName.get(Calendar.SECOND)));
//because we don't have enough space to store the entire millisec value, just store the higher 2 digits.
generatedSystemSQLName.append(twoDigits((int) (calendarForLastSystemSQLName.get(Calendar.MILLISECOND)/10)));
generatedSystemSQLName.append(systemSQLNameNumber);
}
return generatedSystemSQLName.toString();
}
private static String twoDigits(int val) {
String retval;
if (val < 10) {
retval = "0" + val;
} else {
int retvalLength = Integer.toString(val).length();
retval = Integer.toString(val).substring(retvalLength-2);
}
return retval;
}
/**
* sets a new value in SYSCOLUMNS for a particular
* autoincrement column.
*
* @param tc Transaction Controller to use.
* @param columnName Name of the column.
* @param aiValue Value to write to SYSCOLUMNS.
* @param incrementNeeded whether to increment the value passed in by the
* user (aiValue) or not before writing it to SYSCOLUMNS.
*/
public void setAutoincrementValue(TransactionController tc,
UUID tableUUID,
String columnName,
long aiValue, boolean incrementNeeded)
throws StandardException
{
TabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];
ExecIndexRow keyRow = null;
keyRow = (ExecIndexRow)exFactory.getIndexableRow(2);
keyRow.setColumn(1, getIDValueAsCHAR(tableUUID));
keyRow.setColumn(2, new SQLChar(columnName));
SYSCOLUMNSRowFactory rf = (SYSCOLUMNSRowFactory) ti.getCatalogRowFactory();
ExecRow row = rf.makeEmptyRow();
boolean[] bArray = new boolean[2];
for (int index = 0; index < 2; index++)
{
bArray[index] = false;
}
int[] colsToUpdate = new int[1];
colsToUpdate[0] = SYSCOLUMNSRowFactory.SYSCOLUMNS_AUTOINCREMENTVALUE;
if (incrementNeeded)
{
ExecRow readRow = ti.getRow(tc, keyRow,
SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX1_ID);
NumberDataValue increment =
(NumberDataValue)readRow.getColumn(SYSCOLUMNSRowFactory.SYSCOLUMNS_AUTOINCREMENTINC);
aiValue += increment.getLong();
}
row.setColumn(SYSCOLUMNSRowFactory.SYSCOLUMNS_AUTOINCREMENTVALUE,
new SQLLongint(aiValue));
ti.updateRow(keyRow, row,
SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX1_ID,
bArray,
colsToUpdate,
tc);
return;
}
/**
* Computes the RowLocation in SYSCOLUMNS for a particular
* autoincrement column.
*
* @param tc Transaction Controller to use.
* @param td Table Descriptor.
* @param columnName Name of column which has autoincrement column.
*
* @exception StandardException thrown on failure.
*/
private RowLocation computeRowLocation(TransactionController tc,
TableDescriptor td,
String columnName)
throws StandardException
{
TabInfoImpl ti = coreInfo[SYSCOLUMNS_CORE_NUM];
ExecIndexRow keyRow = null;
ExecRow row;
UUID tableUUID = td.getUUID();
keyRow = (ExecIndexRow)exFactory.getIndexableRow(2);
keyRow.setColumn(1, getIDValueAsCHAR(tableUUID));
keyRow.setColumn(2, new SQLChar(columnName));
return ti.getRowLocation(tc, keyRow,
SYSCOLUMNSRowFactory.SYSCOLUMNS_INDEX1_ID);
}
/**
* Computes the RowLocation in SYSSEQUENCES for a particular sequence. Also
* constructs the sequence descriptor.
*
* @param tc Transaction Controller to use.
* @param sequenceIDstring UUID of the sequence as a string
* @param rowLocation OUTPUT param for returing the row location
* @param sequenceDescriptor OUTPUT param for return the sequence descriptor
*
* @exception StandardException thrown on failure.
*/
void computeSequenceRowLocation
( TransactionController tc, String sequenceIDstring, RowLocation[] rowLocation, SequenceDescriptor[] sequenceDescriptor )
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSSEQUENCES_CATALOG_NUM);
ExecIndexRow keyRow = null;
keyRow = (ExecIndexRow)exFactory.getIndexableRow(1);
keyRow.setColumn(1, new SQLChar( sequenceIDstring ) );
rowLocation[ 0 ] = ti.getRowLocation( tc, keyRow, SYSSEQUENCESRowFactory.SYSSEQUENCES_INDEX1_ID );
sequenceDescriptor[ 0 ] = (SequenceDescriptor)
getDescriptorViaIndex
(
SYSSEQUENCESRowFactory.SYSSEQUENCES_INDEX1_ID,
keyRow,
(ScanQualifier[][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false,
TransactionController.ISOLATION_REPEATABLE_READ,
tc);
}
/**
* Set the current value of an ANSI/ISO sequence. This method does not perform
* any sanity checking but assumes that the caller knows what they are doing. If the
* old value on disk is not what we expect it to be, then we are in a race with another
* session. They won and we don't update the value on disk. However, if the old value
* is null, that is a signal to us that we should update the value on disk anyway.
*
* @param tc Transaction Controller to use.
* @param rowLocation Row in SYSSEQUENCES to update.
* @param wait True if we should wait for locks
* @param oldValue What we expect to find in the CURRENTVALUE column.
* @param newValue What to stuff into the CURRENTVALUE column.
*
* @return Returns true if the value was successfully updated, false if we lost a race with another session.
*
* @exception StandardException thrown on failure.
*/
boolean updateCurrentSequenceValue
( TransactionController tc, RowLocation rowLocation, boolean wait, Long oldValue, Long newValue )
throws StandardException
{
int columnNum = SYSSEQUENCESRowFactory.SYSSEQUENCES_CURRENT_VALUE;
FormatableBitSet columnToUpdate = new FormatableBitSet( SYSSEQUENCESRowFactory.SYSSEQUENCES_COLUMN_COUNT );
TabInfoImpl ti = getNonCoreTI( SYSSEQUENCES_CATALOG_NUM );
ConglomerateController heapCC = null;
SYSSEQUENCESRowFactory rf = (SYSSEQUENCESRowFactory) ti.getCatalogRowFactory();
ExecRow row = rf.makeEmptyRow();
// FormatableBitSet is 0 based.
columnToUpdate.set( columnNum - 1 ); // current value.
try
{
/* if wait is true then we need to do a wait while trying to
open/fetch from the conglomerate. note we use wait both to
open as well as fetch from the conglomerate.
*/
heapCC =
tc.openConglomerate(
ti.getHeapConglomerate(),
false,
(TransactionController.OPENMODE_FORUPDATE |
((wait) ? 0 : TransactionController.OPENMODE_LOCK_NOWAIT)),
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
heapCC.fetch( rowLocation, row.getRowArray(), columnToUpdate, wait );
NumberDataValue oldValueOnDisk = (NumberDataValue) row.getColumn( columnNum );
SQLLongint expectedOldValue;
if ( oldValue == null ) { expectedOldValue = new SQLLongint(); }
else { expectedOldValue = new SQLLongint( oldValue.longValue() ); }
// only update value if what's on disk is what we expected
if ( ( oldValue == null ) || ( expectedOldValue.compare( oldValueOnDisk ) == 0 ) )
{
SQLLongint newValueOnDisk;
if ( newValue == null ) { newValueOnDisk = new SQLLongint(); }
else { newValueOnDisk = new SQLLongint( newValue.longValue() ); }
row.setColumn( columnNum, newValueOnDisk );
heapCC.replace( rowLocation, row.getRowArray(), columnToUpdate );
return true;
}
else
{
return false;
}
}
finally
{
if (heapCC != null) { heapCC.close(); }
}
}
/**
* @see org.apache.derby.iapi.sql.dictionary.DataDictionary#getCurrentValueAndAdvance
*/
public void getCurrentValueAndAdvance
( String sequenceUUIDstring, NumberDataValue returnValue )
throws StandardException
{
SequenceUpdater sequenceUpdater = null;
try {
sequenceUpdater = (SequenceUpdater) sequenceGeneratorCache.find( sequenceUUIDstring );
sequenceUpdater.getCurrentValueAndAdvance( returnValue );
}
finally
{
if ( sequenceUpdater != null )
{
sequenceGeneratorCache.release( sequenceUpdater );
}
}
}
public RowLocation getRowLocationTemplate(LanguageConnectionContext lcc,
TableDescriptor td)
throws StandardException
{
RowLocation rl;
ConglomerateController heapCC = null;
TransactionController tc =
lcc.getTransactionCompile();
long tableId = td.getHeapConglomerateId();
heapCC =
tc.openConglomerate(
tableId, false, 0, tc.MODE_RECORD, tc.ISOLATION_READ_COMMITTED);
try
{
rl = heapCC.newRowLocationTemplate();
}
finally
{
heapCC.close();
}
return rl;
}
/**
*
* Add a table descriptor to the "other" cache. The other cache is
* determined by the type of the object c.
*
* @param td TableDescriptor to add to the other cache.
* @param c Cacheable Object which lets us figure out the other cache.
*
* @exception StandardException
*/
public void addTableDescriptorToOtherCache(TableDescriptor td,
Cacheable c)
throws StandardException
{
// get the other cache. if the entry we are setting in the cache is of
// type oidtdcacheable then use the nametdcache
CacheManager otherCache =
(c instanceof OIDTDCacheable) ? nameTdCache : OIDTdCache;
Object key;
TDCacheable otherCacheEntry = null;
if (otherCache == nameTdCache)
key = new TableKey(td.getSchemaDescriptor().getUUID(), td.getName());
else
key = td.getUUID();
try
{
// insert the entry into the the other cache.
otherCacheEntry = (TDCacheable)otherCache.create(key, td);
}
catch (StandardException se)
{
// if the object already exists in cache then somebody beat us to it
// otherwise throw the error.
if (!(se.getMessageId().equals(SQLState.OBJECT_EXISTS_IN_CACHE)))
throw se;
}
finally
{
if (otherCacheEntry != null)
otherCache.release(otherCacheEntry);
}
}
/** @see DataDictionary#dropStatisticsDescriptors */
public void dropStatisticsDescriptors(UUID tableUUID, UUID referenceUUID,
TransactionController tc)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI(SYSSTATISTICS_CATALOG_NUM);
DataValueDescriptor first, second;
first = getIDValueAsCHAR(tableUUID);
ExecIndexRow keyRow;
if (referenceUUID != null)
{
keyRow = exFactory.getIndexableRow(2);
second = getIDValueAsCHAR(referenceUUID);
keyRow.setColumn(2, second);
}
else
{
keyRow = exFactory.getIndexableRow(1);
}
keyRow.setColumn(1, first);
ti.deleteRow(tc, keyRow,
SYSSTATISTICSRowFactory.SYSSTATISTICS_INDEX1_ID);
}
private static LanguageConnectionContext getLCC() {
return (LanguageConnectionContext)
ContextService.getContextOrNull(LanguageConnectionContext.CONTEXT_ID);
}
private SchemaDescriptor newSystemSchemaDesc(
String name,
String uuid)
{
return new SchemaDescriptor(
this,
name,
authorizationDatabaseOwner,
uuidFactory.recreateUUID(uuid),
true);
}
private SchemaDescriptor newDeclaredGlobalTemporaryTablesSchemaDesc( String name)
{
return new SchemaDescriptor(this,
name,
authorizationDatabaseOwner,
(UUID) null,
false);
}
/**
Check to see if a database has been upgraded to the required
level in order to use a language feature.
@param requiredMajorVersion Data Dictionary major version
@param feature Non-null to throw an error, null to return the state of the version match.
@return True if the database has been upgraded to the required level, false otherwise.
*/
public boolean checkVersion(int requiredMajorVersion, String feature) throws StandardException {
if (requiredMajorVersion == DataDictionary.DD_VERSION_CURRENT) {
requiredMajorVersion = softwareVersion.majorVersionNumber;
}
return dictionaryVersion.checkVersion(requiredMajorVersion, feature);
}
/**
** Create system built-in metadata stored prepared statements.
*/
void createSystemSps(TransactionController tc)
throws StandardException
{
// DatabaseMetadata stored plans
createSPSSet(tc, false, getSystemSchemaDescriptor().getUUID());
// network server stored plans
createSPSSet(tc, true, getSysIBMSchemaDescriptor().getUUID());
}
/**
Create a set of stored prepared statements from a properties file.
Key is the statement name, value is the SQL statement.
*/
protected void createSPSSet(TransactionController tc, boolean net, UUID schemaID)
throws StandardException
{
Properties p = getQueryDescriptions(net);
Enumeration e = p.keys();
//statement will get compiled on first execution
//Note: Don't change this to FALSE LCC is not available for compiling
boolean nocompile = true;
while (e.hasMoreElements())
{
String spsName = (String)e.nextElement();
String spsText = p.getProperty(spsName);
SPSDescriptor spsd = new SPSDescriptor(this, spsName,
getUUIDFactory().createUUID(),
schemaID,
schemaID,
SPSDescriptor.SPS_TYPE_REGULAR,
!nocompile, // it is valid, unless nocompile
spsText, //sps text
!nocompile );
addSPSDescriptor(spsd, tc);
}
}
/**
* Generic create procedure routine.
* <p>
* Takes the input procedure and inserts it into the appropriate
* catalog.
*
* Assumes all arguments are "IN" type.
*
* @param routine_name name of the routine in java and the SQL
* procedure name.
*
* @param arg_names String array of procedure argument names in order.
*
* @param arg_types Internal SQL types of the arguments
*
* @param routine_sql_control
* One of the RoutineAliasInfo constants:
* MODIFIES_SQL_DATA
* READS_SQL_DATA
* CONTAINS_SQL
* NO_SQL
*
* @param isDeterministic True if the procedure/function is DETERMINISTIC
*
* @param return_type null for procedure. For functions the return type
* of the function.
*
* @param newlyCreatedRoutines evolving set of routines, some of which may need permissions later on
* @param tc an instance of the TransactionController
*
* @param procClass the fully qualified name of the class that contains
* java definitions for the stored procedures
*
* @return UUID UUID of system routine that got created.
*
* @exception StandardException Standard exception policy.
**/
private final UUID createSystemProcedureOrFunction(
String routine_name,
UUID schema_uuid,
String[] arg_names,
TypeDescriptor[] arg_types,
int num_out_param,
int num_result_sets,
short routine_sql_control,
boolean isDeterministic,
TypeDescriptor return_type,
HashSet newlyCreatedRoutines,
TransactionController tc,
String procClass)
throws StandardException
{
int num_args = 0;
if (arg_names != null)
num_args = arg_names.length;
if (SanityManager.DEBUG)
{
if (num_args != 0)
{
SanityManager.ASSERT(arg_names != null);
SanityManager.ASSERT(arg_types != null);
SanityManager.ASSERT(arg_names.length == arg_types.length);
}
}
// all args are only "in" arguments
int[] arg_modes = null;
if (num_args != 0)
{
arg_modes = new int[num_args];
int num_in_param = num_args - num_out_param;
for (int i = 0; i < num_in_param; i++)
arg_modes[i] = JDBC30Translation.PARAMETER_MODE_IN;
for (int i = 0; i < num_out_param; i++)
arg_modes[num_in_param + i] = JDBC30Translation.PARAMETER_MODE_OUT;
}
RoutineAliasInfo routine_alias_info =
new RoutineAliasInfo(
routine_name, // name of routine
num_args, // number of params
arg_names, // names of params
arg_types, // types of params
arg_modes, // all "IN" params
num_result_sets, // number of result sets
RoutineAliasInfo.PS_JAVA, // link to java routine
routine_sql_control, // one of:
// MODIFIES_SQL_DATA
// READS_SQL_DATA
// CONTAINS_SQL
// NO_SQL
isDeterministic, // whether the procedure/function is DETERMINISTIC
false, // not definer's rights
true, // true - calledOnNullInput
return_type);
UUID routine_uuid = getUUIDFactory().createUUID();
AliasDescriptor ads =
new AliasDescriptor(
this,
routine_uuid,
routine_name,
schema_uuid,
procClass,
(return_type == null) ?
AliasInfo.ALIAS_TYPE_PROCEDURE_AS_CHAR :
AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR,
(return_type == null) ?
AliasInfo.ALIAS_NAME_SPACE_PROCEDURE_AS_CHAR :
AliasInfo.ALIAS_NAME_SPACE_FUNCTION_AS_CHAR,
false,
routine_alias_info, null);
addDescriptor(
ads, null, DataDictionary.SYSALIASES_CATALOG_NUM, false, tc);
newlyCreatedRoutines.add( routine_name );
return routine_uuid;
}
/**
* Generic create procedure routine.
* Takes the input procedure and inserts it into the appropriate
* catalog.
*
* Assumes all arguments are "IN" type.
*
* @param routine_name name of the routine in java and the SQL
* procedure name.
*
* @param arg_names String array of procedure argument names in order.
*
* @param arg_types Internal SQL types of the arguments
*
* @param routine_sql_control
* One of the RoutineAliasInfo constants:
* MODIFIES_SQL_DATA
* READS_SQL_DATA
* CONTAINS_SQL
* NO_SQL
*
*
* @param isDeterministic True if the procedure/function is DETERMINISTIC
*
* @param return_type null for procedure. For functions the return type
* of the function.
*
* @param newlyCreatedRoutines evolving set of routines, some of which may need permissions later on
* @param tc an instance of the TransactionController
*
* @return UUID UUID of system routine that got created.
*
* @throws StandardException Standard exception policy.
**/
private final UUID createSystemProcedureOrFunction(
String routine_name,
UUID schema_uuid,
String[] arg_names,
TypeDescriptor[] arg_types,
int num_out_param,
int num_result_sets,
short routine_sql_control,
boolean isDeterministic,
TypeDescriptor return_type,
HashSet newlyCreatedRoutines,
TransactionController tc)
throws StandardException
{
UUID routine_uuid = createSystemProcedureOrFunction(routine_name,
schema_uuid, arg_names, arg_types,
num_out_param, num_result_sets, routine_sql_control, isDeterministic,
return_type, newlyCreatedRoutines, tc, "org.apache.derby.catalog.SystemProcedures");
return routine_uuid;
}
/**
* Create system procedures
* <p>
* Used to add the system procedures to the database when
* it is created. System procedures are currently added to
* either SYSCS_UTIL or SQLJ schemas.
* <p>
*
* @param tc transaction controller to use. Counts on caller to
* commit.
* @param newlyCreatedRoutines evolving set of routines which may need to be given permissions later on
*
* @exception StandardException Standard exception policy.
**/
private final void create_SYSCS_procedures(
TransactionController tc, HashSet newlyCreatedRoutines )
throws StandardException
{
// Types used for routine parameters and return types, all nullable.
TypeDescriptor varchar32672Type = DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672);
/*
** SYSCS_UTIL routines.
*/
UUID routine_uuid = null;
// used to put procedure into the SYSCS_UTIL schema
UUID sysUtilUUID = getSystemUtilSchemaDescriptor().getUUID();
// void SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(
// varchar(128), varchar(Limits.DB2_VARCHAR_MAXWIDTH))
{
// procedure argument names
String[] arg_names = {
"KEY",
"VALUE"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH)
};
createSystemProcedureOrFunction(
"SYSCS_SET_DATABASE_PROPERTY",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_COMPRESS_TABLE(varchar(128), varchar(128), SMALLINT)
{
// procedure argument names
String[] arg_names = {"SCHEMANAME", "TABLENAME", "SEQUENTIAL"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.SMALLINT
};
routine_uuid = createSystemProcedureOrFunction(
"SYSCS_COMPRESS_TABLE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_CHECKPOINT_DATABASE()
{
createSystemProcedureOrFunction(
"SYSCS_CHECKPOINT_DATABASE",
sysUtilUUID,
null,
null,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_FREEZE_DATABASE()
{
createSystemProcedureOrFunction(
"SYSCS_FREEZE_DATABASE",
sysUtilUUID,
null,
null,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_UNFREEZE_DATABASE()
{
createSystemProcedureOrFunction(
"SYSCS_UNFREEZE_DATABASE",
sysUtilUUID,
null,
null,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_BACKUP_DATABASE(varchar Limits.DB2_VARCHAR_MAXWIDTH)
{
// procedure argument names
String[] arg_names = {"BACKUPDIR"};
// procedure argument types
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH)
};
createSystemProcedureOrFunction(
"SYSCS_BACKUP_DATABASE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE(
// varchar Limits.DB2_VARCHAR_MAXWIDTH, smallint)
{
// procedure argument names
String[] arg_names = {"BACKUPDIR", "DELETE_ARCHIVED_LOG_FILES"};
// procedure argument types
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH),
TypeDescriptor.SMALLINT
};
createSystemProcedureOrFunction(
"SYSCS_BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_DISABLE_LOG_ARCHIVE_MODE(smallint)
{
// procedure argument names
String[] arg_names = {"DELETE_ARCHIVED_LOG_FILES"};
// procedure argument types
TypeDescriptor[] arg_types = {TypeDescriptor.SMALLINT};
createSystemProcedureOrFunction(
"SYSCS_DISABLE_LOG_ARCHIVE_MODE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_SET_RUNTIMESTTISTICS(smallint)
{
// procedure argument names
String[] arg_names = {"ENABLE"};
// procedure argument types
TypeDescriptor[] arg_types = {TypeDescriptor.SMALLINT};
routine_uuid = createSystemProcedureOrFunction(
"SYSCS_SET_RUNTIMESTATISTICS",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_SET_STATISTICS_TIMING(smallint)
{
// procedure argument names
String[] arg_names = {"ENABLE"};
// procedure argument types
TypeDescriptor[] arg_types = {TypeDescriptor.SMALLINT};
routine_uuid = createSystemProcedureOrFunction(
"SYSCS_SET_STATISTICS_TIMING",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSCS_UTIL functions
//
// TODO (mikem) -
// the following need to be functions when that is supported.
// until then calling them will not work.
// VARCHAR(Limits.DB2_VARCHAR_MAXWIDTH)
// SYSCS_UTIL.SYSCS_GET_DATABASE_PROPERTY(varchar(128))
{
// procedure argument names
String[] arg_names = {"KEY"};
// procedure argument types
TypeDescriptor[] arg_types = {CATALOG_TYPE_SYSTEM_IDENTIFIER};
createSystemProcedureOrFunction(
"SYSCS_GET_DATABASE_PROPERTY",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH),
newlyCreatedRoutines,
tc);
}
// SMALLINT SYSCS_UTIL.SYSCS_CHECK_TABLE(varchar(128), varchar(128))
{
// procedure argument names
String[] arg_names = {"SCHEMANAME", "TABLENAME"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER
};
createSystemProcedureOrFunction(
"SYSCS_CHECK_TABLE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
TypeDescriptor.INTEGER,
newlyCreatedRoutines,
tc);
}
// CLOB SYSCS_UTIL.SYSCS_GET_RUNTIMESTATISTICS()
{
routine_uuid = createSystemProcedureOrFunction(
"SYSCS_GET_RUNTIMESTATISTICS",
sysUtilUUID,
null,
null,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH),
newlyCreatedRoutines,
/*
TODO - mikem, wants to be a CLOB, but don't know how to do
that yet. Testing it with varchar for now.
DataTypeDescriptor.getCatalogType(
Types.CLOB, Limits.DB2_LOB_MAXWIDTH),
*/
tc);
}
/*
** SQLJ routine.
*/
UUID sqlJUUID =
getSchemaDescriptor(
SchemaDescriptor.STD_SQLJ_SCHEMA_NAME, tc, true).getUUID();
// SQLJ.INSTALL_JAR(URL VARCHAR(??), JAR VARCHAR(128), DEPLOY INT)
{
String[] arg_names = {"URL", "JAR", "DEPLOY"};
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 256),
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.INTEGER
};
createSystemProcedureOrFunction(
"INSTALL_JAR",
sqlJUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SQLJ.REPLACE_JAR(URL VARCHAR(??), JAR VARCHAR(128))
{
String[] arg_names = {"URL", "JAR"};
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 256),
CATALOG_TYPE_SYSTEM_IDENTIFIER
};
createSystemProcedureOrFunction(
"REPLACE_JAR",
sqlJUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SQLJ.REMOVE_JAR(JAR VARCHAR(128), UNDEPLOY INT)
{
String[] arg_names = {"JAR", "UNDEPLOY"};
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.INTEGER
};
createSystemProcedureOrFunction(
"REMOVE_JAR",
sqlJUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
/* SYSCS_EXPORT_TABLE (IN SCHEMANAME VARCHAR(128),
* IN TABLENAME VARCHAR(128), IN FILENAME VARCHAR(32672) ,
* IN COLUMNDELIMITER CHAR(1), IN CHARACTERDELIMITER CHAR(1) ,
* IN CODESET VARCHAR(128))
*/
{
// procedure argument names
String[] arg_names = {"schemaName", "tableName" ,
"fileName"," columnDelimiter",
"characterDelimiter", "codeset"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
varchar32672Type,
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
CATALOG_TYPE_SYSTEM_IDENTIFIER
};
createSystemProcedureOrFunction(
"SYSCS_EXPORT_TABLE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
/* SYSCS_EXPORT_QUERY (IN SELECTSTATEMENT VARCHAR(32672),
* IN FILENAME VARCHAR(32672) ,
* IN COLUMNDELIMITER CHAR(1), IN CHARACTERDELIMITER CHAR(1) ,
* IN CODESET VARCHAR(128))
*/
{
// procedure argument names
String[] arg_names = {"selectStatement", "fileName",
" columnDelimiter", "characterDelimiter",
"codeset"};
// procedure argument types
TypeDescriptor[] arg_types = {
varchar32672Type,
varchar32672Type,
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
CATALOG_TYPE_SYSTEM_IDENTIFIER
};
createSystemProcedureOrFunction(
"SYSCS_EXPORT_QUERY",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
/* SYSCS_IMPORT_TABLE(IN SCHEMANAME VARCHAR(128),
* IN TABLENAME VARCHAR(128), IN FILENAME VARCHAR(32762),
* IN COLUMNDELIMITER CHAR(1), IN CHARACTERDELIMITER CHAR(1),
* IN CODESET VARCHAR(128) , IN REPLACE SMALLINT)
*/
{
// procedure argument names
String[] arg_names = {"schemaName", "tableName", "fileName",
" columnDelimiter", "characterDelimiter",
"codeset", "replace"};
// procedure argument types
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
varchar32672Type,
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.SMALLINT,
};
createSystemProcedureOrFunction(
"SYSCS_IMPORT_TABLE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
/* SYSCS_IMPORT_DATA(IN SCHEMANAME VARCHAR(128),
* IN TABLENAME VARCHAR(128), IN INSERTCOLUMNLIST VARCHAR(32762),
* IN COLUMNINDEXES VARCHAR(32762), IN IN FILENAME VARCHAR(32762),
* IN COLUMNDELIMITER CHAR(1), IN CHARACTERDELIMITER CHAR(1),
* IN CODESET VARCHAR(128) , IN REPLACE SMALLINT)
*/
{
// procedure argument names
String[] arg_names = {"schemaName", "tableName", "insertColumnList","columnIndexes",
"fileName", " columnDelimiter", "characterDelimiter",
"codeset", "replace"};
// procedure argument types
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
varchar32672Type,
varchar32672Type,
varchar32672Type,
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.SMALLINT,
};
createSystemProcedureOrFunction(
"SYSCS_IMPORT_DATA",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
/*
* SYSCS_BULK_INSERT(
* IN SCHEMANAME VARCHAR(128),
* IN TABLENAME VARCHAR(128),
* IN VTINAME VARCHAR(32762),
* IN VTIARG VARCHAR(32762))
*/
{
// procedure argument names
String[] arg_names = {"schemaName", "tableName", "vtiName","vtiArg"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
varchar32672Type,
varchar32672Type,
};
createSystemProcedureOrFunction(
"SYSCS_BULK_INSERT",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// add 10.1 specific system procedures
create_10_1_system_procedures(tc, newlyCreatedRoutines, sysUtilUUID);
// add 10.2 specific system procedures
create_10_2_system_procedures(tc, newlyCreatedRoutines, sysUtilUUID);
// add 10.3 specific system procedures
create_10_3_system_procedures(tc, newlyCreatedRoutines );
// add 10.5 specific system procedures
create_10_5_system_procedures(tc, newlyCreatedRoutines );
// add 10.6 specific system procedures
create_10_6_system_procedures(tc, newlyCreatedRoutines );
}
/**
* Create system procedures in SYSIBM
* <p>
* Used to add the system procedures to the database when
* it is created. Full upgrade from version 5.1 or earlier also
* calls this method.
* <p>
*
* @param newlyCreatedRoutines evolving set of routines which we're adding (some may need permissions later on)
* @param tc transaction controller to use. Counts on caller to
* commit.
*
* @exception StandardException Standard exception policy.
**/
protected final void create_SYSIBM_procedures(
TransactionController tc, HashSet newlyCreatedRoutines )
throws StandardException
{
/*
** SYSIBM routines.
*/
// used to put procedure into the SYSIBM schema
UUID sysIBMUUID = getSysIBMSchemaDescriptor().getUUID();
// SYSIBM.SQLCAMESSAGE(
{
// procedure argument names
String[] arg_names = {
"SQLCODE",
"SQLERRML",
"SQLERRMC",
"SQLERRP",
"SQLERRD0",
"SQLERRD1",
"SQLERRD2",
"SQLERRD3",
"SQLERRD4",
"SQLERRD5",
"SQLWARN",
"SQLSTATE",
"FILE",
"LOCALE",
"MESSAGE",
"RETURNCODE"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
TypeDescriptor.SMALLINT,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_JCC_MAX_EXCEPTION_PARAM_LENGTH),
DataTypeDescriptor.getCatalogType(Types.CHAR, 8),
TypeDescriptor.INTEGER,
TypeDescriptor.INTEGER,
TypeDescriptor.INTEGER,
TypeDescriptor.INTEGER,
TypeDescriptor.INTEGER,
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(Types.CHAR, 11),
DataTypeDescriptor.getCatalogType(Types.CHAR, 5),
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 50),
DataTypeDescriptor.getCatalogType(Types.CHAR, 5),
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 2400),
TypeDescriptor.INTEGER
};
createSystemProcedureOrFunction(
"SQLCAMESSAGE",
sysIBMUUID,
arg_names,
arg_types,
2,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLPROCEDURES(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"PROCNAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLPROCEDURES",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLTABLEPRIVILEGES(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"TABLENAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLTABLEPRIVILEGES",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLPRIMARYKEYS(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"TABLENAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLPRIMARYKEYS",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLTABLES(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(4000), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"TABLENAME",
"TABLETYPE",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000),
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLTABLES",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLPROCEDURECOLS(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"PROCNAME",
"PARAMNAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLPROCEDURECOLS",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLCOLUMNS(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"TABLENAME",
"COLUMNNAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLCOLUMNS",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLCOLPRIVILEGES(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"TABLENAME",
"COLUMNNAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLCOLPRIVILEGES",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLUDTS(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMAPATTERN",
"TYPENAMEPATTERN",
"UDTTYPES",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLUDTS",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLFOREIGNKEYS(VARCHAR(128), VARCHAR(128), VARCHAR(128), VARCHAR(128),
// VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"PKCATALOGNAME",
"PKSCHEMANAME",
"PKTABLENAME",
"FKCATALOGNAME",
"FKSCHEMANAME",
"FKTABLENAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLFOREIGNKEYS",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLSPECIALCOLUMNS(SMALLINT, VARCHAR(128), VARCHAR(128), VARCHAR(128),
// SMALLINT, SMALLINT, VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"COLTYPE",
"CATALOGNAME",
"SCHEMANAME",
"TABLENAME",
"SCOPE",
"NULLABLE",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.SMALLINT,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.SMALLINT,
TypeDescriptor.SMALLINT,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLSPECIALCOLUMNS",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLGETTYPEINFO(SMALLINT, VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"DATATYPE",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.SMALLINT,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLGETTYPEINFO",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLSTATISTICS(VARCHAR(128), VARCHAR(128), VARCHAR(128),
// SMALLINT, SMALLINT, VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"TABLENAME",
"UNIQUE",
"RESERVED",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.SMALLINT,
TypeDescriptor.SMALLINT,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLSTATISTICS",
sysIBMUUID,
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSIBM.METADATA()
{
createSystemProcedureOrFunction(
"METADATA",
sysIBMUUID,
null,
null,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
}
/**
* Grant PUBLIC access to specific system routines. Currently, this is
* done for some routines in SYSCS_UTIL schema. We grant access to routines
* which we have just added. Doing it this way lets us declare these
* routines in one place and re-use this logic during database creation and
* during upgrade.
*
* @param tc TransactionController to use
* @param authorizationID authorization ID of the permission grantor
* @throws StandardException Standard exception policy.
*/
public void grantPublicAccessToSystemRoutines(HashSet newlyCreatedRoutines, TransactionController tc,
String authorizationID) throws StandardException {
// Get schema ID for SYSCS_UTIL schema
String schemaID = getSystemUtilSchemaDescriptor().getUUID().toString();
for(int i=0; i < sysUtilProceduresWithPublicAccess.length; i++) {
String routineName = sysUtilProceduresWithPublicAccess[i];
if ( !newlyCreatedRoutines.contains( routineName ) ) { continue; }
grantPublicAccessToSystemRoutine(schemaID,
routineName,
AliasInfo.ALIAS_NAME_SPACE_PROCEDURE_AS_CHAR,
tc, authorizationID);
}
for(int i=0; i < sysUtilFunctionsWithPublicAccess.length; i++) {
String routineName = sysUtilFunctionsWithPublicAccess[i];
if ( !newlyCreatedRoutines.contains( routineName ) ) { continue; }
grantPublicAccessToSystemRoutine(schemaID,
routineName,
AliasInfo.ALIAS_NAME_SPACE_FUNCTION_AS_CHAR,
tc, authorizationID);
}
}
/**
* Grant PUBLIC access to a system routine. This method should be used only
* for granting access to a system routine (other than routines in SYSFUN
* schema). It expects the routine to be present in SYSALIASES catalog.
*
* @param schemaID Schema ID
* @param routineName Routine Name
* @param nameSpace Indicates whether the routine is a function/procedure.
* @param tc TransactionController to use
* @param authorizationID authorization ID of the permission grantor
* @throws StandardException Standard exception policy.
*/
private void grantPublicAccessToSystemRoutine(String schemaID,
String routineName,
char nameSpace,
TransactionController tc,
String authorizationID)
throws StandardException {
// For system routines, a valid alias descriptor will be returned.
AliasDescriptor ad = getAliasDescriptor(schemaID, routineName,
nameSpace);
//
// When upgrading from 10.1, it can happen that we haven't yet created
// all public procedures. We forgive that possibility here and just return.
//
if ( ad == null ) { return; }
UUID routineUUID = ad.getUUID();
createRoutinePermPublicDescriptor(routineUUID, tc, authorizationID);
}
/**
* Create RoutinePermDescriptor to grant access to PUBLIC for
* this system routine using the grantor specified in authorizationID.
*
* @param routineUUID uuid of the routine
* @param tc TransactionController to use
* @param authorizationID authorization ID of the permission grantor
* @throws StandardException Standard exception policy.
*/
void createRoutinePermPublicDescriptor(
UUID routineUUID,
TransactionController tc,
String authorizationID) throws StandardException
{
RoutinePermsDescriptor routinePermDesc =
new RoutinePermsDescriptor(
this,
"PUBLIC",
authorizationID,
routineUUID);
addDescriptor(
routinePermDesc, null, DataDictionary.SYSROUTINEPERMS_CATALOG_NUM, false, tc);
}
/**
* Create system procedures added in version 10.1.
* <p>
* Create 10.1 system procedures, called by either code creating new
* database, or code doing hard upgrade from previous version.
* <p>
*
* @param tc booting transaction
* @param newlyCreatedRoutines set of routines we are creating (used to add permissions later on)
* @param sysUtilUUID uuid of the SYSUTIL schema.
*
* @exception StandardException Standard exception policy.
**/
void create_10_1_system_procedures(
TransactionController tc,
HashSet newlyCreatedRoutines,
UUID sysUtilUUID)
throws StandardException
{
UUID routine_uuid = null;
// void SYSCS_UTIL.SYSCS_INPLACE_COMPRESS_TABLE(
// IN SCHEMANAME VARCHAR(128),
// IN TABLENAME VARCHAR(128),
// IN PURGE_ROWS SMALLINT,
// IN DEFRAGMENT_ROWS SMALLINT,
// IN TRUNCATE_END SMALLINT
// )
{
// procedure argument names
String[] arg_names = {
"SCHEMANAME",
"TABLENAME",
"PURGE_ROWS",
"DEFRAGMENT_ROWS",
"TRUNCATE_END"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.SMALLINT,
TypeDescriptor.SMALLINT,
TypeDescriptor.SMALLINT
};
routine_uuid = createSystemProcedureOrFunction(
"SYSCS_INPLACE_COMPRESS_TABLE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
}
/**
* Create system procedures added in version 10.2.
* <p>
* Create 10.2 system procedures, called by either code creating new
* database, or code doing hard upgrade from previous version.
* <p>
*
* @param tc booting transaction
* @param newlyCreatedRoutines set of routines we are creating (used to add permissions later on)
* @param sysUtilUUID uuid of the SYSUTIL schema.
*
* @exception StandardException Standard exception policy.
**/
void create_10_2_system_procedures(
TransactionController tc,
HashSet newlyCreatedRoutines,
UUID sysUtilUUID)
throws StandardException
{
// void SYSCS_UTIL.SYSCS_BACKUP_DATABASE_NOWAIT(
// IN BACKUPDIR VARCHAR(Limits.DB2_VARCHAR_MAXWIDTH)
// )
{
// procedure argument names
String[] arg_names = {"BACKUPDIR"};
// procedure argument types
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH)
};
createSystemProcedureOrFunction(
"SYSCS_BACKUP_DATABASE_NOWAIT",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void
// SYSCS_UTIL.SYSCS_BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE_NOWAIT(
// IN BACKUPDIR VARCHAR(Limits.DB2_VARCHAR_MAXWIDTH),
// IN DELETE_ARCHIVED_LOG_FILES SMALLINT
// )
{
// procedure argument names
String[] arg_names =
{"BACKUPDIR", "DELETE_ARCHIVED_LOG_FILES"};
// procedure argument types
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, Limits.DB2_VARCHAR_MAXWIDTH),
TypeDescriptor.SMALLINT
};
createSystemProcedureOrFunction(
"SYSCS_BACKUP_DATABASE_AND_ENABLE_LOG_ARCHIVE_MODE_NOWAIT",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLFUNCTIONS(VARCHAR(128), VARCHAR(128), VARCHAR(128),
// VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"FUNCNAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLFUNCTIONS",
getSysIBMSchemaDescriptor().getUUID(),
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SYSIBM.SQLFUNCTIONPARAMS(VARCHAR(128), VARCHAR(128),
// VARCHAR(128), VARCHAR(128), VARCHAR(4000))
{
// procedure argument names
String[] arg_names = {
"CATALOGNAME",
"SCHEMANAME",
"FUNCNAME",
"PARAMNAME",
"OPTIONS"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(Types.VARCHAR, 4000)};
createSystemProcedureOrFunction(
"SQLFUNCTIONPARAMS",
getSysIBMSchemaDescriptor().getUUID(),
arg_names,
arg_types,
0,
1,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
}
/**
* Create system procedures added in version 10.3.
* Create 10.3 system procedures related to the LOB Methods ,
* called by either code creating new
* database, or code doing hard upgrade from previous version.
*
* @param tc an instance of the TransactionController class.
* @param newlyCreatedRoutines set of routines we are creating (used to add permissions later on)
*
* @throws StandardException Standard exception policy.
**/
private void create_10_3_system_procedures_SYSIBM(
TransactionController tc,
HashSet newlyCreatedRoutines )
throws StandardException {
//create 10.3 functions used by LOB methods.
UUID schema_uuid = getSysIBMSchemaDescriptor().getUUID();
{
UUID routine_uuid = null;
String[] arg_names = null;
TypeDescriptor[] arg_types = null;
routine_uuid = createSystemProcedureOrFunction(
"CLOBCREATELOCATOR",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
TypeDescriptor.INTEGER,
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR"};
TypeDescriptor[] arg_types = {TypeDescriptor.INTEGER};
routine_uuid = createSystemProcedureOrFunction(
"CLOBRELEASELOCATOR",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
null,
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","SEARCHSTR","POS"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR),
DataTypeDescriptor.getCatalogType(
Types.BIGINT)
};
routine_uuid = createSystemProcedureOrFunction(
"CLOBGETPOSITIONFROMSTRING",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","SEARCHLOCATOR","POS"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.BIGINT)
};
routine_uuid = createSystemProcedureOrFunction(
"CLOBGETPOSITIONFROMLOCATOR",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR"};
// procedure argument types
TypeDescriptor[] arg_types = {TypeDescriptor.INTEGER};
routine_uuid = createSystemProcedureOrFunction(
"CLOBGETLENGTH",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","POS","LEN"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
TypeDescriptor.INTEGER
};
routine_uuid = createSystemProcedureOrFunction(
"CLOBGETSUBSTRING",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR,
LOBStoredProcedure.MAX_CLOB_RETURN_LEN),
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","POS","LEN","REPLACESTR"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR)
};
routine_uuid = createSystemProcedureOrFunction(
"CLOBSETSTRING",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
null,
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","LEN"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.BIGINT)
};
routine_uuid = createSystemProcedureOrFunction(
"CLOBTRUNCATE",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
null,
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
//Now create the Stored procedures required for BLOB
{
UUID routine_uuid = null;
String[] arg_names = null;
TypeDescriptor[] arg_types = null;
routine_uuid = createSystemProcedureOrFunction(
"BLOBCREATELOCATOR",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
TypeDescriptor.INTEGER,
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR"};
TypeDescriptor[] arg_types = {TypeDescriptor.INTEGER};
routine_uuid = createSystemProcedureOrFunction(
"BLOBRELEASELOCATOR",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
null,
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","SEARCHBYTES","POS"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.VARBINARY),
DataTypeDescriptor.getCatalogType(
Types.BIGINT)
};
routine_uuid = createSystemProcedureOrFunction(
"BLOBGETPOSITIONFROMBYTES",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","SEARCHLOCATOR","POS"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.BIGINT)
};
routine_uuid = createSystemProcedureOrFunction(
"BLOBGETPOSITIONFROMLOCATOR",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER
};
routine_uuid = createSystemProcedureOrFunction(
"BLOBGETLENGTH",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","POS","LEN"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
TypeDescriptor.INTEGER
};
routine_uuid = createSystemProcedureOrFunction(
"BLOBGETBYTES",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
DataTypeDescriptor.getCatalogType(
Types.VARBINARY,
LOBStoredProcedure.MAX_BLOB_RETURN_LEN),
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","POS","LEN","REPLACEBYTES"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.BIGINT),
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.VARBINARY)
};
routine_uuid = createSystemProcedureOrFunction(
"BLOBSETBYTES",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
null,
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
{
UUID routine_uuid = null;
String[] arg_names = {"LOCATOR","LEN"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
DataTypeDescriptor.getCatalogType(
Types.BIGINT)
};
routine_uuid = createSystemProcedureOrFunction(
"BLOBTRUNCATE",
schema_uuid,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
null,
newlyCreatedRoutines,
tc,
"org.apache.derby.impl.jdbc.LOBStoredProcedure");
}
}
/**
* Create the System procedures that are added to 10.5.
*
* @param tc an instance of the TransactionController.
* @param newlyCreatedRoutines set of routines we are creating (used to add permissions later on)
* @throws StandardException Standard exception policy.
*/
void create_10_5_system_procedures(TransactionController tc, HashSet newlyCreatedRoutines )
throws StandardException
{
// Create the procedures in the SYSCS_UTIL schema.
UUID sysUtilUUID = getSystemUtilSchemaDescriptor().getUUID();
// void SYSCS_UTIL.SYSCS_UPDATE_STATISTICS(varchar(128), varchar(128), varchar(128))
{
// procedure argument names
String[] arg_names = {"SCHEMANAME", "TABLENAME", "INDEXNAME"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER
};
UUID routine_uuid = createSystemProcedureOrFunction(
"SYSCS_UPDATE_STATISTICS",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
}
/**
* Create the System procedures that are added to 10.6.
*
* @param tc an instance of the TransactionController.
* @throws StandardException Standard exception policy.
*/
void create_10_6_system_procedures(TransactionController tc,
HashSet newlyCreatedRoutines)
throws StandardException
{
// Create the procedures in the SYSCS_UTIL schema.
UUID sysUtilUUID = getSystemUtilSchemaDescriptor().getUUID();
// void SYSCS_UTIL.SYSCS_SET_XPLAIN_MODE(smallint mode)
{
// procedure argument names
String[] arg_names = {"ENABLE"};
// procedure argument types
TypeDescriptor[] arg_types = {
TypeDescriptor.INTEGER,
};
createSystemProcedureOrFunction(
"SYSCS_SET_XPLAIN_MODE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.CONTAINS_SQL,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// SMALLINT SYSCS_UTIL.SYSCS_GET_XPLAIN_MODE()
{
createSystemProcedureOrFunction(
"SYSCS_GET_XPLAIN_MODE",
sysUtilUUID,
null,
null,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
TypeDescriptor.INTEGER,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_SET_XPLAIN_SCHEMA(String schemaName)
{
// procedure argument names
String[] arg_names = {"SCHEMANAME"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
};
createSystemProcedureOrFunction(
"SYSCS_SET_XPLAIN_SCHEMA",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// STRING SYSCS_UTIL.SYSCS_GET_XPLAIN_SCHEMA()
{
createSystemProcedureOrFunction(
"SYSCS_GET_XPLAIN_SCHEMA",
sysUtilUUID,
null,
null,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
newlyCreatedRoutines,
tc);
}
}
/**
* Create the System procedures that are added in 10.3.
*
* @param tc an instance of the TransactionController.
* @param newlyCreatedRoutines set of routines we are creating (used to add permissions later on)
* @throws StandardException Standard exception policy.
*/
void create_10_3_system_procedures(TransactionController tc, HashSet newlyCreatedRoutines )
throws StandardException {
// Create the procedures in the SYSCS_UTIL schema.
create_10_3_system_procedures_SYSCS_UTIL(tc, newlyCreatedRoutines );
//create the procedures in the SYSIBM schema
create_10_3_system_procedures_SYSIBM(tc, newlyCreatedRoutines );
}
/**
* Create system procedures that are part of the
* SYSCS_UTIL schema added in version 10.3.
* <p>
* Create 10.3 system procedures, called by either code creating new
* database, or code doing hard upgrade from previous version.
* <p>
*
* @param tc an instance of the Transaction Controller.
* @param newlyCreatedRoutines set of routines we are creating (used to add permissions later on)
* @exception StandardException Standard exception policy.
**/
void create_10_3_system_procedures_SYSCS_UTIL( TransactionController tc, HashSet newlyCreatedRoutines )
throws StandardException
{
UUID sysUtilUUID = getSystemUtilSchemaDescriptor().getUUID();
/* SYSCS_EXPORT_TABLE_LOBS_TO_EXTFILE(IN SCHEMANAME VARCHAR(128),
* IN TABLENAME VARCHAR(128), IN FILENAME VARCHAR(32672) ,
* IN COLUMNDELIMITER CHAR(1), IN CHARACTERDELIMITER CHAR(1) ,
* IN CODESET VARCHAR(128), IN LOBSFILENAME VARCHAR(32672) )
*/
{
// procedure argument names
String[] arg_names = {"schemaName", "tableName" ,
"fileName"," columnDelimiter",
"characterDelimiter", "codeset",
"lobsFileName"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672)
};
createSystemProcedureOrFunction(
"SYSCS_EXPORT_TABLE_LOBS_TO_EXTFILE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
/* SYSCS_EXPORT_QUERY_LOBS_TO_EXTFILE(
* IN SELECTSTATEMENT VARCHAR(32672),
* IN FILENAME VARCHAR(32672) ,
* IN COLUMNDELIMITER CHAR(1), IN CHARACTERDELIMITER CHAR(1) ,
* IN CODESET VARCHAR(128), IN LOBSFILENAME VARCHAR(32672))
*/
{
// procedure argument names
String[] arg_names = {"selectStatement", "fileName",
" columnDelimiter", "characterDelimiter",
"codeset", "lobsFileName"};
// procedure argument types
TypeDescriptor[] arg_types = {
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672),
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672)
};
createSystemProcedureOrFunction(
"SYSCS_EXPORT_QUERY_LOBS_TO_EXTFILE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
/* SYSCS_IMPORT_TABLE_LOBS_FROM_EXTFILE(IN SCHEMANAME VARCHAR(128),
* IN TABLENAME VARCHAR(128), IN FILENAME VARCHAR(32762),
* IN COLUMNDELIMITER CHAR(1), IN CHARACTERDELIMITER CHAR(1),
* IN CODESET VARCHAR(128) , IN REPLACE SMALLINT)
*/
{
// procedure argument names
String[] arg_names = {"schemaName", "tableName", "fileName",
" columnDelimiter", "characterDelimiter",
"codeset", "replace"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.SMALLINT,
};
createSystemProcedureOrFunction(
"SYSCS_IMPORT_TABLE_LOBS_FROM_EXTFILE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
/* SYSCS_IMPORT_DATA_LOBS_FROM_EXTFILE(IN SCHEMANAME VARCHAR(128),
* IN TABLENAME VARCHAR(128), IN INSERTCOLUMNLIST VARCHAR(32762),
* IN COLUMNINDEXES VARCHAR(32762), IN IN FILENAME VARCHAR(32762),
* IN COLUMNDELIMITER CHAR(1), IN CHARACTERDELIMITER CHAR(1),
* IN CODESET VARCHAR(128) , IN REPLACE SMALLINT)
*/
{
// procedure argument names
String[] arg_names = {"schemaName", "tableName",
"insertColumnList","columnIndexes",
"fileName", " columnDelimiter",
"characterDelimiter",
"codeset", "replace"};
// procedure argument types
TypeDescriptor[] arg_types = {
CATALOG_TYPE_SYSTEM_IDENTIFIER,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672),
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672),
DataTypeDescriptor.getCatalogType(
Types.VARCHAR, 32672),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
DataTypeDescriptor.getCatalogType(
Types.CHAR, 1),
CATALOG_TYPE_SYSTEM_IDENTIFIER,
TypeDescriptor.SMALLINT,
};
createSystemProcedureOrFunction(
"SYSCS_IMPORT_DATA_LOBS_FROM_EXTFILE",
sysUtilUUID,
arg_names,
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_RELOAD_SECURITY_POLICY()
{
createSystemProcedureOrFunction(
"SYSCS_RELOAD_SECURITY_POLICY",
sysUtilUUID,
null,
null,
0,
0,
RoutineAliasInfo.NO_SQL,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_SET_USER_ACCESS(USER_NAME VARCHAR(128),
// CONNECTION_PERMISSION VARCHAR(128))
{
TypeDescriptor[] arg_types = {CATALOG_TYPE_SYSTEM_IDENTIFIER, CATALOG_TYPE_SYSTEM_IDENTIFIER};
createSystemProcedureOrFunction(
"SYSCS_SET_USER_ACCESS",
sysUtilUUID,
new String[] {"USERNAME", "CONNECTIONPERMISSION"},
arg_types,
0,
0,
RoutineAliasInfo.MODIFIES_SQL_DATA,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
// VARCHAR(128) SYSCS_UTIL.SYSCS_SET_USER_ACCESS(USER_NAME VARCHAR(128))
{
TypeDescriptor[] arg_types = { CATALOG_TYPE_SYSTEM_IDENTIFIER };
createSystemProcedureOrFunction(
"SYSCS_GET_USER_ACCESS",
sysUtilUUID,
new String[] {"USERNAME"},
arg_types,
0,
0,
RoutineAliasInfo.READS_SQL_DATA,
false,
CATALOG_TYPE_SYSTEM_IDENTIFIER,
newlyCreatedRoutines,
tc);
}
// void SYSCS_UTIL.SYSCS_EMPTY_STATEMENT_CACHE()
{
createSystemProcedureOrFunction(
"SYSCS_EMPTY_STATEMENT_CACHE",
sysUtilUUID,
(String[]) null,
(TypeDescriptor[]) null,
0,
0,
RoutineAliasInfo.NO_SQL,
false,
(TypeDescriptor) null,
newlyCreatedRoutines,
tc);
}
}
/*
** Priv block code to load net work server meta data queries.
*/
private String spsSet;
private final synchronized Properties getQueryDescriptions(boolean net) {
spsSet = net ? "metadata_net.properties" : "/org/apache/derby/impl/jdbc/metadata.properties";
return (Properties) java.security.AccessController.doPrivileged(this);
}
public final Object run() {
// SECURITY PERMISSION - IP3
Properties p = new Properties();
try {
// SECURITY PERMISSION - IP3
InputStream is = getClass().getResourceAsStream(spsSet);
p.load(is);
is.close();
} catch (IOException ioe) {}
return p;
}
private static List newSList() {
return java.util.Collections.synchronizedList(new java.util.LinkedList());
}
/**
* Get one user's privileges on a table
*
* @param tableUUID
* @param authorizationId The user name
*
* @return a TablePermsDescriptor or null if the user has no permissions on the table.
*
* @exception StandardException
*/
public TablePermsDescriptor getTablePermissions( UUID tableUUID, String authorizationId)
throws StandardException
{
TablePermsDescriptor key = new TablePermsDescriptor( this, authorizationId, (String) null, tableUUID);
return (TablePermsDescriptor) getPermissions( key);
} // end of getTablePermissions
/* @see org.apache.derby.iapi.sql.dictionary.DataDictionary#getTablePermissions */
public TablePermsDescriptor getTablePermissions( UUID tablePermsUUID)
throws StandardException
{
TablePermsDescriptor key = new TablePermsDescriptor( this, tablePermsUUID);
return getUncachedTablePermsDescriptor( key );
}
private Object getPermissions( PermissionsDescriptor key) throws StandardException
{
// RESOLVE get a READ COMMITTED (shared) lock on the permission row
Cacheable entry = getPermissionsCache().find( key);
if( entry == null)
return null;
Object perms = entry.getIdentity();
getPermissionsCache().release( entry);
return perms;
}
/* @see org.apache.derby.iapi.sql.dictionary.DataDictionary#getColumnPermissions */
public ColPermsDescriptor getColumnPermissions( UUID colPermsUUID)
throws StandardException
{
ColPermsDescriptor key = new ColPermsDescriptor( this, colPermsUUID);
return getUncachedColPermsDescriptor( key );
}
/**
* Get one user's column privileges for a table.
*
* @param tableUUID
* @param privType (as int) Authorizer.SELECT_PRIV, Authorizer.UPDATE_PRIV, or Authorizer.REFERENCES_PRIV
* @param forGrant
* @param authorizationId The user name
*
* @return a ColPermsDescriptor or null if the user has no separate column
* permissions of the specified type on the table. Note that the user may have been granted
* permission on all the columns of the table (no column list), in which case this routine
* will return null. You must also call getTablePermissions to see if the user has permission
* on a set of columns.
*
* @exception StandardException
*/
public ColPermsDescriptor getColumnPermissions( UUID tableUUID,
int privType,
boolean forGrant,
String authorizationId)
throws StandardException
{
String privTypeStr = forGrant ? colPrivTypeMapForGrant[privType] : colPrivTypeMap[privType];
if( SanityManager.DEBUG)
SanityManager.ASSERT( privTypeStr != null,
"Invalid column privilege type: " + privType);
ColPermsDescriptor key = new ColPermsDescriptor( this,
authorizationId,
(String) null,
tableUUID,
privTypeStr);
return (ColPermsDescriptor) getPermissions( key);
} // end of getColumnPermissions
/**
* Get one user's column privileges for a table. This routine gets called
* during revoke privilege processing
*
* @param tableUUID
* @param privTypeStr (as String) Authorizer.SELECT_PRIV, Authorizer.UPDATE_PRIV, or Authorizer.REFERENCES_PRIV
* @param forGrant
* @param authorizationId The user name
*
* @return a ColPermsDescriptor or null if the user has no separate column
* permissions of the specified type on the table. Note that the user may have been granted
* permission on all the columns of the table (no column list), in which case this routine
* will return null. You must also call getTablePermissions to see if the user has permission
* on a set of columns.
*
* @exception StandardException
*/
public ColPermsDescriptor getColumnPermissions( UUID tableUUID,
String privTypeStr,
boolean forGrant,
String authorizationId)
throws StandardException
{
ColPermsDescriptor key = new ColPermsDescriptor( this,
authorizationId,
(String) null,
tableUUID,
privTypeStr);
return (ColPermsDescriptor) getPermissions( key);
}
private static final String[] colPrivTypeMap;
private static final String[] colPrivTypeMapForGrant;
static {
colPrivTypeMap = new String[ Authorizer.PRIV_TYPE_COUNT];
colPrivTypeMapForGrant = new String[ Authorizer.PRIV_TYPE_COUNT];
colPrivTypeMap[ Authorizer.MIN_SELECT_PRIV] = "s";
colPrivTypeMapForGrant[ Authorizer.MIN_SELECT_PRIV] = "S";
colPrivTypeMap[ Authorizer.SELECT_PRIV] = "s";
colPrivTypeMapForGrant[ Authorizer.SELECT_PRIV] = "S";
colPrivTypeMap[ Authorizer.UPDATE_PRIV] = "u";
colPrivTypeMapForGrant[ Authorizer.UPDATE_PRIV] = "U";
colPrivTypeMap[ Authorizer.REFERENCES_PRIV] = "r";
colPrivTypeMapForGrant[ Authorizer.REFERENCES_PRIV] = "R";
}
/**
* Get one user's permissions for a routine (function or procedure).
*
* @param routineUUID
* @param authorizationId The user's name
*
* @return The descriptor of the users permissions for the routine.
*
* @exception StandardException
*/
public RoutinePermsDescriptor getRoutinePermissions( UUID routineUUID, String authorizationId)
throws StandardException
{
RoutinePermsDescriptor key = new RoutinePermsDescriptor( this, authorizationId, (String) null, routineUUID);
return (RoutinePermsDescriptor) getPermissions( key);
} // end of getRoutinePermissions
/* @see org.apache.derby.iapi.sql.dictionary.DataDictionary#getRoutinePermissions */
public RoutinePermsDescriptor getRoutinePermissions( UUID routinePermsUUID)
throws StandardException
{
RoutinePermsDescriptor key = new RoutinePermsDescriptor( this, routinePermsUUID);
return getUncachedRoutinePermsDescriptor( key );
}
/**
* Add or remove a permission to/from the permission database.
*
* @param add if true then the permission is added, if false the permission is removed
* @param perm
* @param grantee
* @param tc
*
* @return True means revoke has removed a privilege from system
* table and hence the caller of this method should send invalidation
* actions to PermssionDescriptor's dependents.
*/
public boolean addRemovePermissionsDescriptor( boolean add,
PermissionsDescriptor perm,
String grantee,
TransactionController tc)
throws StandardException
{
int catalogNumber = perm.getCatalogNumber();
// It is possible for grant statements to look like following
// grant execute on function f_abs to mamata2, mamata3;
// grant all privileges on t11 to mamata2, mamata3;
// This means that dd.addRemovePermissionsDescriptor will be called
// twice for TablePermsDescriptor and twice for RoutinePermsDescriptor,
// once for each grantee.
// First it's called for mamta2. When a row is inserted for mamta2
// into the correct system table for the permission descriptor, the
// permission descriptor's uuid gets populated with the uuid of
// the row that just got inserted into the system table for mamta2
// Now, when dd.addRemovePermissionsDescriptor gets called again for
// mamta3, the permission descriptor's uuid will still be set to
// the uuid that was used for mamta2. If we do not reset the
// uuid to null, we will think that there is a duplicate row getting
// inserted for the same uuid. In order to get around this, we should
// reset the UUID of passed PermissionDescriptor everytime this method
// is called. This way, there will be no leftover values from previous
// call of this method.
perm.setUUID(null);
perm.setGrantee( grantee);
TabInfoImpl ti = getNonCoreTI( catalogNumber);
PermissionsCatalogRowFactory rf = (PermissionsCatalogRowFactory) ti.getCatalogRowFactory();
int primaryIndexNumber = rf.getPrimaryKeyIndexNumber();
ConglomerateController heapCC = tc.openConglomerate( ti.getHeapConglomerate(),
false, // do not keep open across commits
0,
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_REPEATABLE_READ);
RowLocation rl = null;
try
{
rl = heapCC.newRowLocationTemplate();
}
finally
{
heapCC.close();
heapCC = null;
}
ExecIndexRow key = rf.buildIndexKeyRow( primaryIndexNumber, perm);
ExecRow existingRow = ti.getRow( tc, key, primaryIndexNumber);
if( existingRow == null)
{
if( ! add)
{
//we didn't find an entry in system catalog and this is revoke
//so that means there is nothing to revoke. Simply return.
//No need to reset permission descriptor's uuid because
//no row was ever found in system catalog for the given
//permission and hence uuid can't be non-null
return false;
}
else
{
//We didn't find an entry in system catalog and this is grant so
//so that means we have to enter a new row in system catalog for
//this grant.
ExecRow row = ti.getCatalogRowFactory().makeRow( perm, (TupleDescriptor) null);
int insertRetCode = ti.insertRow(row, tc);
if( SanityManager.DEBUG)
{
SanityManager.ASSERT( insertRetCode == TabInfoImpl.ROWNOTDUPLICATE,
"Race condition in inserting table privilege.");
}
}
}
else
{
// add/remove these permissions to/from the existing permissions
boolean[] colsChanged = new boolean[ existingRow.nColumns()];
boolean[] indicesToUpdate = new boolean[ rf.getNumIndexes()];
int changedColCount = 0;
if( add)
{
changedColCount = rf.orPermissions( existingRow, perm, colsChanged);
}
else
{
changedColCount = rf.removePermissions( existingRow, perm, colsChanged);
}
if( changedColCount == 0)
{
//grant/revoke privilege didn't change anything and hence
//just return
return false;
}
if (!add)
{
//set the uuid of the passed permission descriptor to
//corresponding rows's uuid in permissions system table. The
//permission descriptor's uuid is required to have the
//dependency manager send the revoke privilege action to
//all the dependent objects on that permission descriptor.
rf.setUUIDOfThePassedDescriptor(existingRow, perm);
}
if( changedColCount < 0)
{
// No permissions left in the current row
ti.deleteRow( tc, key, primaryIndexNumber);
}
else if( changedColCount > 0)
{
int[] colsToUpdate = new int[changedColCount];
changedColCount = 0;
for( int i = 0; i < colsChanged.length; i++)
{
if( colsChanged[i])
colsToUpdate[ changedColCount++] = i + 1;
}
if( SanityManager.DEBUG)
{
SanityManager.ASSERT(
changedColCount == colsToUpdate.length,
"return value of " + rf.getClass().getName() +
".orPermissions does not match the number of booleans it set in colsChanged.");
}
ti.updateRow(key, existingRow, primaryIndexNumber,
indicesToUpdate, colsToUpdate, tc);
}
}
// Remove cached permissions data. The cache may hold permissions data for this key even if
// the row in the permissions table is new. In that case the cache may have an entry indicating no
// permissions
removePermEntryInCache(perm);
//If we are dealing with grant, then the caller does not need to send
//any invalidation actions to anyone and hence return false
if (add)
{
return false;
}
else
{
return true;
}
} // end of addPermissionsDescriptor
/**
* Get a table permissions descriptor from the system tables, without going through the cache.
* This method is called to fill the permissions cache.
*
* @return a TablePermsDescriptor that describes the table permissions granted to the grantee, null
* if no table-level permissions have been granted to him on the table.
*
* @exception StandardException
*/
TablePermsDescriptor getUncachedTablePermsDescriptor( TablePermsDescriptor key)
throws StandardException
{
if (key.getObjectID() == null)
{
//the TABLEPERMSID for SYSTABLEPERMS is not known, so use
//table id, grantor and granteee to find TablePermsDescriptor
return (TablePermsDescriptor)
getUncachedPermissionsDescriptor( SYSTABLEPERMS_CATALOG_NUM,
SYSTABLEPERMSRowFactory.GRANTEE_TABLE_GRANTOR_INDEX_NUM,
key);
} else
{
//we know the TABLEPERMSID for SYSTABLEPERMS, so use that to
//find TablePermsDescriptor from the sytem table
return (TablePermsDescriptor)
getUncachedPermissionsDescriptor(SYSTABLEPERMS_CATALOG_NUM,
SYSTABLEPERMSRowFactory.TABLEPERMSID_INDEX_NUM,key);
}
} // end of getUncachedTablePermsDescriptor
/**
* Get a column permissions descriptor from the system tables, without going through the cache.
* This method is called to fill the permissions cache.
*
*
* @return a ColPermsDescriptor that describes the column permissions granted to the grantee, null
* if no column permissions have been granted to him on the table.
*
* @exception StandardException
*/
ColPermsDescriptor getUncachedColPermsDescriptor( ColPermsDescriptor key)
throws StandardException
{
if (key.getObjectID() == null)
{
//the COLPERMSID for SYSCOLPERMS is not known, so use tableid,
//privilege type, grantor and granteee to find ColPermsDescriptor
return (ColPermsDescriptor)
getUncachedPermissionsDescriptor( SYSCOLPERMS_CATALOG_NUM,
SYSCOLPERMSRowFactory.GRANTEE_TABLE_TYPE_GRANTOR_INDEX_NUM,
key);
}else
{
//we know the COLPERMSID for SYSCOLPERMS, so use that to
//find ColPermsDescriptor from the sytem table
return (ColPermsDescriptor)
getUncachedPermissionsDescriptor( SYSCOLPERMS_CATALOG_NUM,
SYSCOLPERMSRowFactory.COLPERMSID_INDEX_NUM,
key);
}
} // end of getUncachedColPermsDescriptor
private TupleDescriptor getUncachedPermissionsDescriptor( int catalogNumber,
int indexNumber,
PermissionsDescriptor key)
throws StandardException
{
TabInfoImpl ti = getNonCoreTI( catalogNumber);
PermissionsCatalogRowFactory rowFactory = (PermissionsCatalogRowFactory) ti.getCatalogRowFactory();
ExecIndexRow keyRow = rowFactory.buildIndexKeyRow( indexNumber, key);
return
getDescriptorViaIndex( indexNumber,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
} // end of getUncachedPermissionsDescriptor
/**
* Get a routine permissions descriptor from the system tables, without going through the cache.
* This method is called to fill the permissions cache.
*
* @return a RoutinePermsDescriptor that describes the table permissions granted to the grantee, null
* if no table-level permissions have been granted to him on the table.
*
* @exception StandardException
*/
RoutinePermsDescriptor getUncachedRoutinePermsDescriptor( RoutinePermsDescriptor key)
throws StandardException
{
if (key.getObjectID() == null)
{
//the ROUTINEPERMSID for SYSROUTINEPERMS is not known, so use aliasid,
//grantor and granteee to find RoutinePermsDescriptor
return (RoutinePermsDescriptor)
getUncachedPermissionsDescriptor( SYSROUTINEPERMS_CATALOG_NUM,
SYSROUTINEPERMSRowFactory.GRANTEE_ALIAS_GRANTOR_INDEX_NUM,
key);
} else
{
//we know the ROUTINEPERMSID for SYSROUTINEPERMS, so use that to
//find RoutinePermsDescriptor from the sytem table
return (RoutinePermsDescriptor)
getUncachedPermissionsDescriptor(SYSROUTINEPERMS_CATALOG_NUM,
SYSROUTINEPERMSRowFactory.ROUTINEPERMSID_INDEX_NUM,key);
}
} // end of getUncachedRoutinePermsDescriptor
private String[][] DIAG_VTI_TABLE_CLASSES =
{
{"LOCK_TABLE", "org.apache.derby.diag.LockTable"},
{"STATEMENT_CACHE", "org.apache.derby.diag.StatementCache"},
{"TRANSACTION_TABLE", "org.apache.derby.diag.TransactionTable"},
{"ERROR_MESSAGES", "org.apache.derby.diag.ErrorMessages"},
};
private String[][] DIAG_VTI_TABLE_FUNCTION_CLASSES =
{
{"SPACE_TABLE", "org.apache.derby.diag.SpaceTable"},
{"ERROR_LOG_READER", "org.apache.derby.diag.ErrorLogReader"},
{"STATEMENT_DURATION", "org.apache.derby.diag.StatementDuration"},
{"CONTAINED_ROLES", "org.apache.derby.diag.ContainedRoles"},
};
/**
* @see DataDictionary#getVTIClass(TableDescriptor, boolean)
*/
public String getVTIClass(TableDescriptor td, boolean asTableFunction)
throws StandardException
{
if (SchemaDescriptor.STD_SYSTEM_DIAG_SCHEMA_NAME.equals(
td.getSchemaName()))
{ return getBuiltinVTIClass( td, asTableFunction ); }
else // see if it's a user-defined table function
{
String schemaName = td.getSchemaName();
String functionName = td.getDescriptorName();
SchemaDescriptor sd = getSchemaDescriptor( td.getSchemaName(), null, true );
if ( sd != null )
{
AliasDescriptor ad = getAliasDescriptor( sd.getUUID().toString(), functionName, AliasInfo.ALIAS_TYPE_FUNCTION_AS_CHAR );
if ( (ad != null) && ad.isTableFunction() ) { return ad.getJavaClassName(); }
throw StandardException.newException
( SQLState.LANG_NOT_TABLE_FUNCTION, schemaName, functionName );
}
}
return null;
}
/**
* @see DataDictionary#getBuiltinVTIClass(TableDescriptor, boolean)
*/
public String getBuiltinVTIClass(TableDescriptor td, boolean asTableFunction)
throws StandardException
{
if (SanityManager.DEBUG)
{
if (td.getTableType() != TableDescriptor.VTI_TYPE)
SanityManager.THROWASSERT("getVTIClass: Invalid table type " + td);
}
/* First check to see if this is a system VTI. Note that if no schema was specified then the
* call to "td.getSchemaName()" will return the current schema.
*/
if (SchemaDescriptor.STD_SYSTEM_DIAG_SCHEMA_NAME.equals(
td.getSchemaName()))
{
String [][] vtiMappings = asTableFunction
? DIAG_VTI_TABLE_FUNCTION_CLASSES
: DIAG_VTI_TABLE_CLASSES;
for (int i = 0; i < vtiMappings.length; i++)
{
String[] entry = vtiMappings[i];
if (entry[0].equals(td.getDescriptorName()))
return entry[1];
}
}
return null;
}
/**
* @see DataDictionary#getRoleGrantDescriptor(UUID)
*/
public RoleGrantDescriptor getRoleGrantDescriptor(UUID uuid)
throws StandardException
{
DataValueDescriptor UUIDStringOrderable;
TabInfoImpl ti = getNonCoreTI(SYSROLES_CATALOG_NUM);
/* Use UUIDStringOrderable in both start and stop position for
* scan.
*/
UUIDStringOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, UUIDStringOrderable);
return (RoleGrantDescriptor)
getDescriptorViaIndex(
SYSROLESRowFactory.SYSROLES_INDEX_UUID_IDX,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Get the target role definition by searching for a matching row
* in SYSROLES by rolename where isDef==true. Read only scan.
* Uses index on (rolename, isDef) columns.
*
* @param roleName The name of the role we're interested in.
*
* @return The descriptor (row) for the role
* @exception StandardException Thrown on error
*
* @see DataDictionary#getRoleDefinitionDescriptor
*/
public RoleGrantDescriptor getRoleDefinitionDescriptor(String roleName)
throws StandardException
{
DataValueDescriptor roleNameOrderable;
DataValueDescriptor isDefOrderable;
TabInfoImpl ti = getNonCoreTI(SYSROLES_CATALOG_NUM);
/* Use aliasNameOrderable , isDefOrderable in both start
* and stop position for scan.
*/
roleNameOrderable = new SQLVarchar(roleName);
isDefOrderable = new SQLVarchar("Y");
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, roleNameOrderable);
keyRow.setColumn(2, isDefOrderable);
return (RoleGrantDescriptor)
getDescriptorViaIndex(
SYSROLESRowFactory.SYSROLES_INDEX_ID_DEF_IDX,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Get the target role by searching for a matching row
* in SYSROLES by rolename, grantee and grantor. Read only scan.
* Uses index on roleid, grantee and grantor columns.
*
* @param roleName The name of the role we're interested in.
* @param grantee The grantee
* @param grantor The grantor
*
* @return The descriptor for the role grant
*
* @exception StandardException Thrown on error
*
* @see DataDictionary#getRoleGrantDescriptor(String, String, String)
*/
public RoleGrantDescriptor getRoleGrantDescriptor(String roleName,
String grantee,
String grantor)
throws StandardException
{
DataValueDescriptor roleNameOrderable;
DataValueDescriptor granteeOrderable;
DataValueDescriptor grantorOrderable;
TabInfoImpl ti = getNonCoreTI(SYSROLES_CATALOG_NUM);
/* Use aliasNameOrderable, granteeOrderable and
* grantorOrderable in both start and stop position for scan.
*/
roleNameOrderable = new SQLVarchar(roleName);
granteeOrderable = new SQLVarchar(grantee);
grantorOrderable = new SQLVarchar(grantor);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(3);
keyRow.setColumn(1, roleNameOrderable);
keyRow.setColumn(2, granteeOrderable);
keyRow.setColumn(3, grantorOrderable);
return (RoleGrantDescriptor)
getDescriptorViaIndex(
SYSROLESRowFactory.SYSROLES_INDEX_ID_EE_OR_IDX,
keyRow,
(ScanQualifier [][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Check all dictionary tables and return true if there is any GRANT
* descriptor containing <code>authId</code> as its grantee.
*
* @param authId grantee for which a grant exists or not
* @param tc TransactionController for the transaction
* @return boolean true if such a grant exists
*/
public boolean existsGrantToAuthid(String authId,
TransactionController tc)
throws StandardException {
return
(existsPermByGrantee(
authId,
tc,
SYSTABLEPERMS_CATALOG_NUM,
SYSTABLEPERMSRowFactory.GRANTEE_TABLE_GRANTOR_INDEX_NUM,
SYSTABLEPERMSRowFactory.
GRANTEE_COL_NUM_IN_GRANTEE_TABLE_GRANTOR_INDEX) ||
existsPermByGrantee(
authId,
tc,
SYSCOLPERMS_CATALOG_NUM,
SYSCOLPERMSRowFactory.GRANTEE_TABLE_TYPE_GRANTOR_INDEX_NUM,
SYSCOLPERMSRowFactory.
GRANTEE_COL_NUM_IN_GRANTEE_TABLE_TYPE_GRANTOR_INDEX) ||
existsPermByGrantee(
authId,
tc,
SYSROUTINEPERMS_CATALOG_NUM,
SYSROUTINEPERMSRowFactory.GRANTEE_ALIAS_GRANTOR_INDEX_NUM,
SYSROUTINEPERMSRowFactory.
GRANTEE_COL_NUM_IN_GRANTEE_ALIAS_GRANTOR_INDEX) ||
existsRoleGrantByGrantee(authId, tc));
}
/**
* Remove metadata stored prepared statements.
* @param tc the xact
*
*
*/
private void dropJDBCMetadataSPSes(TransactionController tc) throws StandardException
{
for (java.util.Iterator it = getAllSPSDescriptors().iterator(); it.hasNext(); )
{
SPSDescriptor spsd = (SPSDescriptor) it.next();
SchemaDescriptor sd = spsd.getSchemaDescriptor();
// don't drop statements in non-system schemas
if (!sd.isSystemSchema()) {
continue;
}
dropSPSDescriptor(spsd, tc);
dropDependentsStoredDependencies(spsd.getUUID(), tc);
}
}
/**
* Drop and recreate metadata stored prepared statements.
*
* @param tc the xact
* @throws StandardException
*/
public void updateMetadataSPSes(TransactionController tc) throws StandardException {
dropJDBCMetadataSPSes(tc);
createSystemSps(tc);
}
/**
* Drops a sequence descriptor
*
* @param descriptor The descriptor to drop
* @param tc The TransactionController.
* @throws StandardException Thrown on failure
*/
public void dropSequenceDescriptor(SequenceDescriptor descriptor, TransactionController tc)
throws StandardException {
DataValueDescriptor sequenceIdOrderable;
TabInfoImpl ti = getNonCoreTI(SYSSEQUENCES_CATALOG_NUM);
sequenceIdOrderable = getIDValueAsCHAR(descriptor.getUUID());
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = (ExecIndexRow) exFactory.getIndexableRow(1);
keyRow.setColumn(1, sequenceIdOrderable);
ti.deleteRow(tc, keyRow, SYSSEQUENCESRowFactory.SYSSEQUENCES_INDEX1_ID);
}
public SequenceDescriptor getSequenceDescriptor(UUID uuid) throws StandardException {
DataValueDescriptor UUIDStringOrderable;
TabInfoImpl ti = getNonCoreTI(SYSSEQUENCES_CATALOG_NUM);
/* Use UUIDStringOrderable in both start and stop position for
* scan.
*/
UUIDStringOrderable = getIDValueAsCHAR(uuid);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, UUIDStringOrderable);
return (SequenceDescriptor)
getDescriptorViaIndex(
SYSSEQUENCESRowFactory.SYSSEQUENCES_INDEX1_ID,
keyRow,
(ScanQualifier[][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Get the sequence descriptor given a sequence name and a schema Id.
*
* @param sequenceName The sequence name, guaranteed to be unique only within its schema.
* @param sd The schema descriptor.
* @return The SequenceDescriptor for the constraints.
* @throws StandardException Thrown on failure
*/
public SequenceDescriptor getSequenceDescriptor(SchemaDescriptor sd, String sequenceName)
throws StandardException {
DataValueDescriptor schemaIDOrderable;
DataValueDescriptor sequenceNameOrderable;
TabInfoImpl ti = getNonCoreTI(SYSSEQUENCES_CATALOG_NUM);
/* Use sequenceNameOrderable and schemaIdOrderable in both start
* and stop position for scan.
*/
sequenceNameOrderable = new SQLVarchar(sequenceName);
schemaIDOrderable = getIDValueAsCHAR(sd.getUUID());
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(2);
keyRow.setColumn(1, schemaIDOrderable);
keyRow.setColumn(2, sequenceNameOrderable);
return (SequenceDescriptor)
getDescriptorViaIndex(
SYSSEQUENCESRowFactory.SYSSEQUENCES_INDEX2_ID,
keyRow,
(ScanQualifier[][]) null,
ti,
(TupleDescriptor) null,
(List) null,
false);
}
/**
* Get an object's permission descriptor from the system tables, without going through the cache.
* This method is called to fill the permissions cache.
*
* @return a PermDescriptor that describes the table permissions granted to the grantee on an objcet
* , null if no table-level permissions have been granted to him on the table.
* @throws StandardException
*/
PermDescriptor getUncachedGenericPermDescriptor(PermDescriptor key)
throws StandardException
{
if (key.getObjectID() == null)
{
//the PERMISSSIONID for SYSRPERMS is not known, so use the id of the
//protected object plus the
//grantor and granteee to find a PermDescriptor
return (PermDescriptor)
getUncachedPermissionsDescriptor(SYSPERMS_CATALOG_NUM,
SYSPERMSRowFactory.GRANTEE_OBJECTID_GRANTOR_INDEX_NUM, key);
} else
{
//we know the PERMISSIONID for SYSPERMS, so use that to
//find a PermDescriptor from the sytem table
return (PermDescriptor)
getUncachedPermissionsDescriptor(SYSPERMS_CATALOG_NUM,
SYSPERMSRowFactory.PERMS_UUID_IDX_NUM,key);
}
} // end of getUncachedGenericPermDescriptor
/**
* Get permissions granted to one user for an object using the object's Id
* and the user's authorization Id.
*
* @param objectUUID The id of the protected object
* @param objectType Type of the object (e.g., SEQUENCE)
* @param privilege The kind of privilege needed (e.g., PermDescriptor.USAGE_PRIV)
* @param granteeAuthId The user or role who wants to have permission on this object
*
* @return The descriptor of the permissions for the object
*
* @exception StandardException
*/
public PermDescriptor getGenericPermissions(UUID objectUUID, String objectType, String privilege, String granteeAuthId)
throws StandardException
{
PermDescriptor key = new PermDescriptor( this, null, objectType, objectUUID, privilege, null, granteeAuthId, false );
return (PermDescriptor) getPermissions( key);
}
/**
* Get one user's privileges for an object using the permUUID.
*
* @param permUUID
* @return The descriptor of the user's permissions for the object.
* @throws StandardException
*/
public PermDescriptor getGenericPermissions(UUID permUUID)
throws StandardException {
PermDescriptor key = new PermDescriptor(this, permUUID);
return getUncachedGenericPermDescriptor(key);
}
/**
* Drops all permission descriptors for the object whose Id is given.
*
* @param objectID The UUID of the object from which to drop
* all the permission descriptors
* @param tc TransactionController for the transaction
* @throws StandardException Thrown on error
*/
public void dropAllPermDescriptors(UUID objectID, TransactionController tc)
throws StandardException {
TabInfoImpl ti = getNonCoreTI(SYSPERMS_CATALOG_NUM);
SYSPERMSRowFactory rf = (SYSPERMSRowFactory) ti.getCatalogRowFactory();
DataValueDescriptor objIdOrderable;
ExecRow curRow;
PermissionsDescriptor perm;
// In Derby authorization mode, permission catalogs may not be present
if (!usesSqlAuthorization)
return;
/* Use objIDOrderable in both start and stop position for scan. */
objIdOrderable = getIDValueAsCHAR(objectID);
/* Set up the start/stop position for the scan */
ExecIndexRow keyRow = exFactory.getIndexableRow(1);
keyRow.setColumn(1, objIdOrderable);
while ((curRow = ti.getRow(tc, keyRow, rf.PERMS_OBJECTID_IDX_NUM)) != null) {
perm = (PermDescriptor) rf.buildDescriptor(curRow, (TupleDescriptor) null, this);
removePermEntryInCache(perm);
// Build new key based on UUID and drop the entry as we want to drop
// only this row
ExecIndexRow uuidKey;
uuidKey = rf.buildIndexKeyRow(rf.PERMS_UUID_IDX_NUM, perm);
ti.deleteRow(tc, uuidKey, rf.PERMS_UUID_IDX_NUM);
}
}
}
| [
"durieuxthomas@hotmail.com"
] | durieuxthomas@hotmail.com |
0625e0b9512cfffe8ec485182efb01074daf8aea | 4d1fd0e061d2bad1a6562ec58d071164b813d2f5 | /task17/task1711/Solution.java | 49415db5a8c889f3db8a6de266336992d6146f45 | [] | no_license | Zeegen/JavaCore | f12b117d7c9d940e0abc29aa26888c8d354d5387 | 8b366ff2ddac82200e57e8d7f6babb9372c4cb6a | refs/heads/master | 2021-01-19T12:01:19.476752 | 2017-03-26T08:35:31 | 2017-03-26T08:35:31 | 82,290,865 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,261 | java | package com.javarush.task.task17.task1711;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
/*
CRUD 2
*/
public class Solution {
public static volatile List<Person> allPeople = new ArrayList<Person>();
static {
allPeople.add(Person.createMale("Иванов Иван", new Date())); //сегодня родился id=0
allPeople.add(Person.createMale("Петров Петр", new Date())); //сегодня родился id=1
//
}
public static void main(String[] args) {
//start here - начни тут
switch (args[0]){
case "-c":
synchronized (allPeople){
inPeople(args);}
break;
case "-u":
synchronized (allPeople){
updatePeople(args);}
break;
case "-d":
synchronized (allPeople){
killPeople(args);}
break;
case "-i":
synchronized (allPeople){
infoPeople(args);}
break;
}
}
public static void inPeople (String[] args){
SimpleDateFormat inDate = new SimpleDateFormat("d/MM/yyyy", Locale.ENGLISH);
for (int i = 1; i < args.length; i = i + 3) {
try {
if (args[i + 1].contains("м")) {
allPeople.add(Person.createMale(args[i], inDate.parse(args[i + 2])));
} else {
allPeople.add(Person.createFemale(args[i], inDate.parse(args[i + 2])));
}
} catch (ParseException e) {
}
System.out.println((allPeople.size() - 1));
}
}
public static void updatePeople (String[] args){
Person tmp;
SimpleDateFormat inDate = new SimpleDateFormat("d/MM/yyyy", Locale.ENGLISH);
for (int i = 1; i < args.length; i = i + 4){
tmp = allPeople.get(Integer.parseInt(args[i]));
tmp.setName(args[i + 1]);
tmp.setSex((args[i + 2].equals("м")) ? Sex.MALE : Sex.FEMALE);
try {
tmp.setBirthDay(inDate.parse(args[i + 3]));
} catch (ParseException e) {
}
allPeople.set(Integer.parseInt(args[i]), tmp);
}
}
public static void killPeople (String[] args){
Person tmp;
for (int i = 1; i < args.length; i++){
tmp = allPeople.get(Integer.parseInt(args[i]));
tmp.setName(null);
tmp.setSex(null);
tmp.setBirthDay(null);
allPeople.set(Integer.parseInt(args[i]), tmp);
}
}
public static void infoPeople(String[] args) {
Person tmp;
SimpleDateFormat outDate = new SimpleDateFormat("dd-MMM-yyyy", Locale.ENGLISH);
for (int i = 1; i < args.length; i++) {
tmp = allPeople.get(Integer.parseInt(args[i]));
System.out.println(tmp.getName()
+ " "
+ ((tmp.getSex().equals(Sex.MALE)) ? "м " : "ж ")
+ outDate.format(tmp.getBirthDay()));
}
}
}
| [
"dogy@Nikolajs-MacBook-Pro.local"
] | dogy@Nikolajs-MacBook-Pro.local |
fee30d55c479f7843d01d217d293fd1c6b32d28b | 86b2ce8eeee9799fa4358616d85d2f44eee0c3e0 | /maturabeispiele/calendar/Server/CalendarService/src/main/java/at/htl/persistence/ApiResource.java | c652a15d07d3f37d0eae766f8494be412355cf9e | [] | no_license | jakobhanner/GITschule5ahitm | b88a4e95fe9e09f64663edbd7af7da4bf9b2fd30 | f9b939a373f88ecb6fc2f532fba26be46bf40010 | refs/heads/master | 2021-09-06T14:12:26.996718 | 2018-02-07T10:39:55 | 2018-02-07T10:39:55 | 113,782,159 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 715 | java | package at.htl.persistence;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.Consumes;
import javax.ws.rs.Produces;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PUT;
import javax.ws.rs.core.MediaType;
/**
* REST Web Service
*
* @author Jakob
*/
@Path("api")
public class ApiResource {
@Context
private UriInfo context;
public ApiResource() {
}
@GET
@Produces(MediaType.APPLICATION_XML)
public String getXml() {
//TODO return proper representation object
throw new UnsupportedOperationException();
}
@PUT
@Consumes(MediaType.APPLICATION_XML)
public void putXml(String content) {
}
}
| [
"jakob.hanner@gmx.at"
] | jakob.hanner@gmx.at |
0e1beb4f2636f97947bd499d5840bc675cd5efbc | 2b869b2bfc93823b4996f8f9f06f7a4d6c6f697a | /src/main/java/com/example/sp/maxsat/Services/ParkingLotServiceImpl.java | 0f1e46e7e3f7471ddddf1cea06a0f99048fa51a7 | [] | no_license | PawelHanzlik/CarParkingMax-SatSolver | 0754ffa633e623324ed5739b347c98c52bc176b9 | d26e1b30a0a25b0482560af2e7be8f73164bd89c | refs/heads/main | 2023-05-31T15:27:13.875992 | 2021-06-27T18:22:06 | 2021-06-27T18:22:06 | 364,345,700 | 0 | 0 | null | 2021-06-27T18:22:06 | 2021-05-04T18:11:34 | Java | UTF-8 | Java | false | false | 2,358 | java | package com.example.sp.maxsat.Services;
import com.example.sp.maxsat.Entities.ParkingLotEntity;
import com.example.sp.maxsat.Exceptions.Classes.NoSuchParkingLotException;
import com.example.sp.maxsat.Repositories.ParkingLotRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Optional;
@Service
public class ParkingLotServiceImpl implements ParkingLotService{
private final ParkingLotRepository parkingLotRepository;
@Autowired
public ParkingLotServiceImpl(ParkingLotRepository parkingLotRepository) {
this.parkingLotRepository = parkingLotRepository;
}
@Override
public ParkingLotEntity getParkingLot(Long parkingLotId) throws NoSuchParkingLotException {
Optional<ParkingLotEntity> parkingLotOptional = this.parkingLotRepository.findById(parkingLotId);
if (parkingLotOptional.isEmpty()){
throw new NoSuchParkingLotException();
}
return parkingLotOptional.get();
}
@Override
public List<ParkingLotEntity> getAllParkingLots() {
return this.parkingLotRepository.findAll();
}
@Override
public ParkingLotEntity addParkingLot(ParkingLotEntity parkingLot) {
return this.parkingLotRepository.save(parkingLot);
}
@Override
public void deleteParkingLot(Long parkingLotId) throws NoSuchParkingLotException{
Optional<ParkingLotEntity> parkingLotOptional = this.parkingLotRepository.findById(parkingLotId);
if (parkingLotOptional.isEmpty()){
throw new NoSuchParkingLotException();
}
else{
ParkingLotEntity parkingLot = parkingLotOptional.get();
this.parkingLotRepository.delete(parkingLot);
}
}
@Override
public void changeParkingLotOccupancy(Long parkingLotId, Integer newOccupancy) throws NoSuchParkingLotException{
Optional<ParkingLotEntity> parkingLotOptional = this.parkingLotRepository.findById(parkingLotId);
if (parkingLotOptional.isEmpty()){
throw new NoSuchParkingLotException();
}
else{
ParkingLotEntity parkingLot = parkingLotOptional.get();
parkingLot.setFreeSpaces(newOccupancy);
this.parkingLotRepository.save(parkingLot);
}
}
}
| [
"hanzlik_pawel@o2.pl"
] | hanzlik_pawel@o2.pl |
b80ebc12530d411f89382e6f407de60c1ae787a0 | 8b37f37503b8ff9fe1d59b9f5abce70939cd2190 | /app/src/main/java/com/material/travel/WebViewActivity.java | 9c86d8e5da9542f55638dd7b6ee599cd4cb056b8 | [
"Apache-2.0"
] | permissive | ymcao/TravelGuide | 1d2a5fbd2f84401c018e529a29a4ec5e886bd961 | d0a0d7e8767b9352195e7323028a6c97098d2080 | refs/heads/master | 2021-01-10T01:09:49.412985 | 2016-03-21T14:49:13 | 2016-03-21T14:49:13 | 54,396,674 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,308 | java | package com.material.travel;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.webkit.WebView;
import android.webkit.WebViewClient;
/**
* Created by caoyamin on 15/10/15.
*/
public class WebViewActivity extends BaseActivity{
private WebView v_webview;
private String url=null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.w_view);
v_webview=(WebView)findViewById(R.id.v_webview);
titleView.setVisibility(View.GONE);
//mToolbar.setTitleTextColor(0XFFFFFF);
mToolbar.setNavigationIcon(R.drawable.ic_back);
if(!TextUtils.isEmpty(getIntent().getStringExtra("load_title"))) {
mToolbar.setSubtitle(getIntent().getStringExtra("load_title"));
}
url=getIntent().getStringExtra("load_url");
if(!TextUtils.isEmpty(url)) {
v_webview.loadUrl(url);
}
v_webview.setWebViewClient(new WebViewClient() {
@Override
public boolean shouldOverrideUrlLoading(WebView view, String url) {
// TODO Auto-generated method stub
view.loadUrl(url);
return true;
}
});
}
}
| [
"caoyamin@192.168.7.43"
] | caoyamin@192.168.7.43 |
01c13f3006fba9b17cd33452770575aa8528ba76 | 845ada2a01f32c9d2cda48cfd3814684b6991e9a | /src/main/java/ci/weget/web/dao/PaiementRepository.java | 818bf0e2e80ddf81f815f1a3ff05f252b840aa65 | [] | no_license | togetWeget/weget-web-metier-dao-jpa | 03e96cd84a58c70365d3af40352273e9e48d4518 | cc8224e0b7322bf8c3275631a6f2d09945ebdfd3 | refs/heads/master | 2020-03-23T15:19:00.293782 | 2019-01-14T09:58:34 | 2019-01-14T09:58:34 | 141,737,884 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 534 | java | package ci.weget.web.dao;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import ci.weget.web.entites.Paiement;
public interface PaiementRepository extends JpaRepository<Paiement, Long> {
// ramener un block a partir de son identifiant
@Query("select p from Paiement p where p.id=?1")
Paiement getByid(Long id);
/*//obtenir la commade d'un paiement
@Query("select p from Paiement p where p.commande.id=?1")
Paiement getPaiementDeCommande(Long id);*/
}
| [
"abdoulaye.traore@togetop.com"
] | abdoulaye.traore@togetop.com |
0bcec4c31a1e45903f03e624d8498bbbffdb3913 | 398653679d5ea11a38ad645f35e12c5f709ed8d8 | /petsdemoa/src/main/java/com/nfinity/demo/petsdemoa/application/authorization/userpermission/UserpermissionAppService.java | f8b5bc99f74d228c05b47243b5c985c2ce3ce237 | [] | no_license | musman013/testLab91 | 3ddd868e38629eb45a63c2a526a7b0c0fe90eb7f | 4e52a5551e7b73d9545a4644881b23616d349ddf | refs/heads/master | 2022-09-12T21:46:09.977259 | 2020-04-02T05:31:22 | 2020-04-02T05:31:22 | 252,364,161 | 0 | 0 | null | 2022-09-08T01:07:01 | 2020-04-02T05:32:05 | Java | UTF-8 | Java | false | false | 10,234 | java | package com.nfinity.demo.petsdemoa.application.authorization.userpermission;
import com.nfinity.demo.petsdemoa.application.authorization.userpermission.dto.*;
import com.nfinity.demo.petsdemoa.domain.authorization.userpermission.IUserpermissionManager;
import com.nfinity.demo.petsdemoa.domain.model.QUserpermissionEntity;
import com.nfinity.demo.petsdemoa.domain.model.UserpermissionEntity;
import com.nfinity.demo.petsdemoa.domain.model.UserpermissionId;
import com.nfinity.demo.petsdemoa.domain.authorization.user.IUserManager;
import com.nfinity.demo.petsdemoa.domain.model.UserEntity;
import com.nfinity.demo.petsdemoa.domain.authorization.permission.IPermissionManager;
import com.nfinity.demo.petsdemoa.domain.model.PermissionEntity;
import com.nfinity.demo.petsdemoa.domain.model.RolepermissionEntity;
import com.nfinity.demo.petsdemoa.commons.search.*;
import com.nfinity.demo.petsdemoa.commons.logging.LoggingHelper;
import com.querydsl.core.BooleanBuilder;
import org.springframework.cache.annotation.*;
import java.util.Date;
import java.util.Map;
import java.util.Set;
import java.util.List;
import java.util.HashMap;
import java.util.Iterator;
import java.util.ArrayList;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@Service
@Validated
public class UserpermissionAppService implements IUserpermissionAppService {
static final int case1=1;
static final int case2=2;
static final int case3=3;
@Autowired
private IUserpermissionManager _userpermissionManager;
@Autowired
private IUserManager _userManager;
@Autowired
private IPermissionManager _permissionManager;
@Autowired
private LoggingHelper logHelper;
@Autowired
private UserpermissionMapper mapper;
@Transactional(propagation = Propagation.REQUIRED)
public CreateUserpermissionOutput create(CreateUserpermissionInput input) {
UserpermissionEntity userpermission = mapper.createUserpermissionInputToUserpermissionEntity(input);
if(input.getUserId()!=null || input.getPermissionId()!=null)
{
UserEntity foundUser = _userManager.findById(input.getUserId());
PermissionEntity foundPermission = _permissionManager.findById(input.getPermissionId());
if(foundUser!=null || foundPermission!=null)
{
if(!checkIfPermissionAlreadyAssigned(foundUser, foundPermission))
{
userpermission.setPermission(foundPermission);
userpermission.setUser(foundUser);
userpermission.setRevoked(input.getRevoked());
}
else return null;
}
else return null;
}
else return null;
UserpermissionEntity createdUserpermission = _userpermissionManager.create(userpermission);
return mapper.userpermissionEntityToCreateUserpermissionOutput(createdUserpermission);
}
@Transactional(propagation = Propagation.REQUIRED)
@CacheEvict(value="Userpermission", key = "#p0")
public UpdateUserpermissionOutput update(UserpermissionId userpermissionId , UpdateUserpermissionInput input) {
UserpermissionEntity userpermission = mapper.updateUserpermissionInputToUserpermissionEntity(input);
if(input.getUserId()!=null || input.getPermissionId()!=null)
{
UserEntity foundUser = _userManager.findById(input.getUserId());
PermissionEntity foundPermission = _permissionManager.findById(input.getPermissionId());
if(foundUser!=null || foundPermission!=null)
{
if(checkIfPermissionAlreadyAssigned(foundUser, foundPermission))
{
userpermission.setPermission(foundPermission);
userpermission.setUser(foundUser);
userpermission.setRevoked(input.getRevoked());
}
else return null;
}
else return null;
}
else return null;
UserpermissionEntity updatedUserpermission = _userpermissionManager.update(userpermission);
return mapper.userpermissionEntityToUpdateUserpermissionOutput(updatedUserpermission);
}
public boolean checkIfPermissionAlreadyAssigned(UserEntity foundUser,PermissionEntity foundPermission)
{
Set<UserpermissionEntity> userPermission = foundUser.getUserpermissionSet();
Iterator pIterator = userPermission.iterator();
while (pIterator.hasNext()) {
UserpermissionEntity pe = (UserpermissionEntity) pIterator.next();
if (pe.getPermission() == foundPermission ) {
return true;
}
}
return false;
}
@Transactional(propagation = Propagation.REQUIRED)
@CacheEvict(value="Userpermission", key = "#p0")
public void delete(UserpermissionId userpermissionId) {
UserpermissionEntity existing = _userpermissionManager.findById(userpermissionId) ;
_userpermissionManager.delete(existing);
}
@Transactional(propagation = Propagation.NOT_SUPPORTED)
@Cacheable(value = "Userpermission", key = "#p0")
public FindUserpermissionByIdOutput findById(UserpermissionId userpermissionId ) {
UserpermissionEntity foundUserpermission = _userpermissionManager.findById(userpermissionId);
if (foundUserpermission == null)
return null ;
FindUserpermissionByIdOutput output=mapper.userpermissionEntityToFindUserpermissionByIdOutput(foundUserpermission);
return output;
}
//User
// ReST API Call - GET /userpermission/1/user
@Transactional(propagation = Propagation.NOT_SUPPORTED)
@Cacheable (value = "Userpermission", key="#p0")
public GetUserOutput getUser(UserpermissionId userpermissionId ) {
UserpermissionEntity foundUserpermission = _userpermissionManager.findById(userpermissionId);
if (foundUserpermission == null) {
logHelper.getLogger().error("There does not exist a userpermission wth a id=%s", userpermissionId);
return null;
}
UserEntity re = _userpermissionManager.getUser(userpermissionId);
return mapper.userEntityToGetUserOutput(re, foundUserpermission);
}
//Permission
// ReST API Call - GET /userpermission/1/permission
@Transactional(propagation = Propagation.NOT_SUPPORTED)
@Cacheable (value = "Userpermission", key="#p0")
public GetPermissionOutput getPermission(UserpermissionId userpermissionId ) {
UserpermissionEntity foundUserpermission = _userpermissionManager.findById(userpermissionId);
if (foundUserpermission == null) {
logHelper.getLogger().error("There does not exist a userpermission wth a id=%s", userpermissionId);
return null;
}
PermissionEntity re = _userpermissionManager.getPermission(userpermissionId);
return mapper.permissionEntityToGetPermissionOutput(re, foundUserpermission);
}
@Transactional(propagation = Propagation.NOT_SUPPORTED)
@Cacheable(value = "Userpermission")
public List<FindUserpermissionByIdOutput> find(SearchCriteria search, Pageable pageable) throws Exception {
Page<UserpermissionEntity> foundUserpermission = _userpermissionManager.findAll(search(search), pageable);
List<UserpermissionEntity> userpermissionList = foundUserpermission.getContent();
Iterator<UserpermissionEntity> userpermissionIterator = userpermissionList.iterator();
List<FindUserpermissionByIdOutput> output = new ArrayList<>();
while (userpermissionIterator.hasNext()) {
output.add(mapper.userpermissionEntityToFindUserpermissionByIdOutput(userpermissionIterator.next()));
}
return output;
}
public BooleanBuilder search(SearchCriteria search) throws Exception {
QUserpermissionEntity userpermission= QUserpermissionEntity.userpermissionEntity;
if(search != null) {
Map<String,SearchFields> map = new HashMap<>();
for(SearchFields fieldDetails: search.getFields())
{
map.put(fieldDetails.getFieldName(),fieldDetails);
}
List<String> keysList = new ArrayList<String>(map.keySet());
checkProperties(keysList);
return searchKeyValuePair(userpermission, map,search.getJoinColumns());
}
return null;
}
public void checkProperties(List<String> list) throws Exception {
for (int i = 0; i < list.size(); i++) {
if(!(
list.get(i).replace("%20","").trim().equals("userId")||
list.get(i).replace("%20","").trim().equals("permission") ||
list.get(i).replace("%20","").trim().equals("permissionId") ||
list.get(i).replace("%20","").trim().equals("user")
))
{
throw new Exception("Wrong URL Format: Property " + list.get(i) + " not found!" );
}
}
}
public BooleanBuilder searchKeyValuePair(QUserpermissionEntity userpermission, Map<String,SearchFields> map,Map<String,String> joinColumns) {
BooleanBuilder builder = new BooleanBuilder();
for (Map.Entry<String, String> joinCol : joinColumns.entrySet()) {
if(joinCol != null && joinCol.getKey().equals("userId")) {
builder.and(userpermission.user.id.eq(Long.parseLong(joinCol.getValue())));
}
}
for (Map.Entry<String, String> joinCol : joinColumns.entrySet()) {
if(joinCol != null && joinCol.getKey().equals("permissionId")) {
builder.and(userpermission.permission.id.eq(Long.parseLong(joinCol.getValue())));
}
}
return builder;
}
public UserpermissionId parseUserpermissionKey(String keysString) {
String[] keyEntries = keysString.split(",");
UserpermissionId userpermissionId = new UserpermissionId();
Map<String,String> keyMap = new HashMap<String,String>();
if(keyEntries.length > 1) {
for(String keyEntry: keyEntries)
{
String[] keyEntryArr = keyEntry.split(":");
if(keyEntryArr.length > 1) {
keyMap.put(keyEntryArr[0], keyEntryArr[1]);
}
else {
return null;
}
}
}
else {
return null;
}
userpermissionId.setPermissionId(Long.valueOf(keyMap.get("permissionId")));
userpermissionId.setUserId(Long.valueOf(keyMap.get("userId")));
return userpermissionId;
}
}
| [
"musman@nfinityllc.com"
] | musman@nfinityllc.com |
ad41a023fc5783b80345236e52b97394387ba7ed | ad8c3e2840a6f213feb2a1c3180bbba68c909070 | /src/test/java/guru/springframework/sfgpetclinic/SfgPetClinicApplicationTests.java | 07820e76db73b0ff696adebc94290da39f8e0916 | [] | no_license | basoltanifarWVUMix/sping-pet-clinic | 11fb5cf9c66e2b837fd059e2936c6c05e96f18db | 6006dc684b038450de46f583ac9019a9d60db9f7 | refs/heads/master | 2020-04-01T16:29:24.772798 | 2018-10-17T02:29:10 | 2018-10-17T02:29:10 | 153,382,596 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 388 | java | //package guru.springframework.sfgpetclinic;
//
//import org.junit.Test;
//import org.junit.runner.RunWith;
//import org.springframework.boot.test.context.SpringBootTest;
//import org.springframework.test.context.junit4.SpringRunner;
//
//@RunWith(SpringRunner.class)
//@SpringBootTest
//public class SfgPetClinicApplicationTests {
//
// @Test
// public void contextLoads() {
// }
//
//}
| [
"basoltanifar@mix.wvu.edu"
] | basoltanifar@mix.wvu.edu |
82dba201ff6c25806a1b8a1da55e783e1326f321 | 9284056ad504e81646f207a94f9c30e59398111e | /SDPlatform_1.0/src/main/java/cn/yufu/posp/queryManager/dao/hibernate/hql/CurTranLsWechatAlipayHibernateHQLImpl.java | 86e377535a72400945a36a7dc4b8f6ff39e9b9b7 | [] | no_license | ice24for/learnCode | 9e3fd6fe5d5c19799b5010e690dc28fa676b9fd5 | 46baa3c97253127852b3b044f48c4c95b24c0c61 | refs/heads/master | 2023-07-24T10:03:13.820723 | 2019-09-30T02:09:02 | 2019-09-30T02:09:02 | null | 0 | 0 | null | null | null | null | GB18030 | Java | false | false | 14,087 | java | /**
*包名:cn.yufu.posp.queryManager.dao.hibernate.hql
*描述:package cn.yufu.posp.queryManager.dao.hibernate.hql;
*/
package cn.yufu.posp.queryManager.dao.hibernate.hql;
import java.math.BigDecimal;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import cn.yufu.posp.common.common.exception.OAException;
import cn.yufu.posp.common.dao.hibernate.hql.OABaseDaoHibernateHQLImpl;
import cn.yufu.posp.common.domain.model.UserData;
import cn.yufu.posp.queryManager.domain.model.TblExpCurTranLog;
/**
* CurTranLsWechatAlipayHibernateHQLImpl.java
* 版权所有(C) 2017 裕福控股有限公司
* 创建:gll
* 时间:2017年7月21日
* 描述:HQL实现
*/
public class CurTranLsWechatAlipayHibernateHQLImpl extends OABaseDaoHibernateHQLImpl
implements CurTranLsWechatAlipayHibernateHQL {
private static final Log log = LogFactory.getLog("query");
@SuppressWarnings("rawtypes")
@Override
public int queryCount(TblExpCurTranLog queryModel, UserData sessionUserData, String startDate, String endDate) throws OAException {
int count = 0;
try {
// 按车查询
log.info("CurTranLsWechatAlipayHibernateHQLImpl.queryCount开始调用:查询符合条件的数量。");
// 此处的表名应用映射对象表示
StringBuffer bufferHql = new StringBuffer("select count(*) from TblExpCurTranLog where 1=1 ");
// //按条件查询
if (queryModel.getMerchantId() != null && !queryModel.getMerchantId().equals(""))
bufferHql.append(" and merchantId = " + queryModel.getMerchantId() + " ");
if (queryModel.getTerminalId() != null && !queryModel.getTerminalId().equals(""))
bufferHql.append(" and terminalId = " + queryModel.getTerminalId() + " ");
if (startDate != null && !startDate.equals(""))
bufferHql.append(" and sysTimeStamp >= " + startDate + "000000");
if (endDate != null && !endDate.equals(""))
bufferHql.append(" and sysTimeStamp <= " + endDate + "235959");
if (queryModel.getTranRrn() != null && !queryModel.getTranRrn().equals(""))
bufferHql.append(" and tranRrn = '" + queryModel.getTranRrn() + "' ");
if (queryModel.getScanCode() != null && !queryModel.getScanCode().equals(""))
bufferHql.append(" and scanCode = '" + queryModel.getScanCode() + "'");
if (queryModel.getTranType() != null && !queryModel.getTranType().equals(""))
bufferHql.append(" and tranType = '" + queryModel.getTranType() + "'");
if (queryModel.getAcqRespCode() != null && !queryModel.getAcqRespCode().equals("")){
if("200".equals(queryModel.getAcqRespCode().toString())){
bufferHql.append(" and acqRespCode = '" + queryModel.getAcqRespCode() + "'");
}else{
bufferHql.append(" and acqRespCode != '" + new BigDecimal(200) + "'");
}
}
String hql = bufferHql.toString();
log.info("流水查询 HQL语句: " + hql );
List list = findByHQL(hql, sessionUserData);
if (list.size() > 0)
count = Integer.parseInt(list.get(0).toString());
log.info("CurTranLsWechatAlipayHibernateHQLImpl.queryCount结束调用:查询符合条件的数量。");
} catch (Exception e) {
if (log.isDebugEnabled())
e.printStackTrace();
log.error("CurTranLsWechatAlipayHibernateHQLImpl.queryCount调用时出现异常。");
log.error(e, e.fillInStackTrace());
throw new OAException("查询模板对象时抛异常!");
}
return count;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public List query(TblExpCurTranLog queryModel, int startIndex, int pageSize, String orderField, String orderType,
UserData sessionUserData, String startDate, String endDate) throws OAException {
List list = null;
try {
// log.info("CurTranLsWechatAlipayHibernateHQLImpl.query开始调用:通过查询条件分页查询信息。");
//
// DetachedCriteria dcr = DetachedCriteria.forClass(TblExpCurTranLog.class);
// // //按名程序模板查询
// if (queryModel.getMerchantId() != null && !queryModel.getMerchantId().equals(""))
// dcr.add(Restrictions.eq("merchantId" ,queryModel.getMerchantId()));
// if (queryModel.getTerminalId() != null && !queryModel.getTerminalId().equals(""))
// dcr.add(Restrictions.eq("terminalId" ,queryModel.getTerminalId()));
// if (startDate != null && !startDate.equals(""))
// dcr.add(Restrictions.ge("sysTimeStamp" ,startDate+"000000"));
// if (endDate != null && !endDate.equals(""))
// dcr.add(Restrictions.le("sysTimeStamp" ,endDate+"235959"));
// if (queryModel.getTranRrn() != null && !queryModel.getTranRrn().equals(""))
// dcr.add(Restrictions.eq("tranRrn" ,queryModel.getTranRrn()));
// if (queryModel.getScanCode() != null && !queryModel.getScanCode().equals(""))
// dcr.add(Restrictions.eq("scanCode" ,queryModel.getScanCode()));
// if (queryModel.getTranType() != null && !queryModel.getTranType().equals(""))
// dcr.add(Restrictions.eq("tranType" ,queryModel.getTranType()));
// if (queryModel.getAcqRespCode() != null && !queryModel.getAcqRespCode().equals("")){
// if("200".equals(queryModel.getAcqRespCode().toString())){
// dcr.add(Restrictions.eq("acqRespCode" ,queryModel.getAcqRespCode()));
// }else{
// dcr.add(Restrictions.not(Restrictions.in("acqRespCode",new Object[] {new BigDecimal(200)})));
// }
// }
// // 添加排序信息
// if (orderType != null && orderField != null) {
// if (orderType.equals("asc"))
// dcr.addOrder(Order.asc(orderField));
// else
// dcr.addOrder(Order.desc(orderField));
// } else {
// // 按 id 排序
//// dcr.addOrder(Order.desc("cardNo"));
// dcr.addOrder(Order.desc("terminalId"));
// dcr.addOrder(Order.desc("sysTimeStamp"));
// }
// list = findBYCriteria(dcr, startIndex, pageSize, sessionUserData);
// log.info("CurTranLsWechatAlipayHibernateHQLImpl.query结束调用:通过查询条件分页查询信息。");
// 按车查询
log.info("CurTranLsWechatAlipayHibernateHQLImpl.queryCount开始调用:查询符合条件的数量。");
// 此处的表名应用映射对象表示
StringBuffer bufferHql = new StringBuffer("select nvl(merchantId,' '),"
+ "nvl(terminalId,' '),nvl(tranRrn,' '),nvl(tranType,0),nvl(sysOrderId,' '),"
+ "nvl(sysVoidOrderId,' '),nvl(sysOrderDtl,' '),nvl(tranAmt,0.00),nvl(tranVoidAmt,0.00),"
+ "nvl(sysTimeStamp,' '),nvl(acqRespMsg,' ') from TblExpCurTranLog where 1=1 ");
// //按条件查询
if (queryModel.getMerchantId() != null && !queryModel.getMerchantId().equals(""))
bufferHql.append(" and merchantId = " + queryModel.getMerchantId() + " ");
if (queryModel.getTerminalId() != null && !queryModel.getTerminalId().equals(""))
bufferHql.append(" and terminalId = " + queryModel.getTerminalId() + " ");
if (startDate != null && !startDate.equals(""))
bufferHql.append(" and sysTimeStamp >= " + startDate + "000000");
if (endDate != null && !endDate.equals(""))
bufferHql.append(" and sysTimeStamp <= " + endDate + "235959");
if (queryModel.getTranRrn() != null && !queryModel.getTranRrn().equals(""))
bufferHql.append(" and tranRrn = '" + queryModel.getTranRrn() + "' ");
if (queryModel.getScanCode() != null && !queryModel.getScanCode().equals(""))
bufferHql.append(" and scanCode = '" + queryModel.getScanCode() + "'");
if (queryModel.getTranType() != null && !queryModel.getTranType().equals(""))
bufferHql.append(" and tranType = '" + queryModel.getTranType() + "'");
if (queryModel.getAcqRespCode() != null && !queryModel.getAcqRespCode().equals("")){
if("200".equals(queryModel.getAcqRespCode().toString())){
bufferHql.append(" and acqRespCode = '" + queryModel.getAcqRespCode() + "'");
}else{
bufferHql.append(" and acqRespCode != '" + new BigDecimal(200) + "'");
}
}
bufferHql.append(" order by terminalId , sysTimeStamp desc");
String hql = bufferHql.toString();
log.info("流水查询 HQL语句: " + hql );
// list = findBySQL(hql, sessionUserData);
// list = findByHQL(hql, startIndex, pageSize, sessionUserData);
list = objectTo(findByHQL(hql, startIndex, pageSize, sessionUserData));
log.info("CurTranLsWechatAlipayHibernateHQLImpl.queryCount结束调用:查询符合条件的数量。");
} catch (Exception e) {
if (log.isDebugEnabled())
e.printStackTrace();
log.error("CurTranLsWechatAlipayHibernateHQLImpl.query通过查询条件分页查询信息,出现异常。");
log.error(e, e.fillInStackTrace());
throw new OAException("查询模板对象个数时抛异常!");
}
return list;
}
public List<TblExpCurTranLog> objectTo(List<Object> list ){
List<TblExpCurTranLog> li = new LinkedList<TblExpCurTranLog>();
for(int i=0;i<list.size(); i++){
TblExpCurTranLog tblExpCurTranLog = new TblExpCurTranLog();
Object[] objects=(Object[])list.get(i);
tblExpCurTranLog.setMerchantId(objects[0].toString());
tblExpCurTranLog.setTerminalId(objects[1].toString());
tblExpCurTranLog.setTranRrn(objects[2].toString());
tblExpCurTranLog.setTranType(new BigDecimal(objects[3].toString()));
tblExpCurTranLog.setSysOrderId(objects[4].toString());
tblExpCurTranLog.setSysVoidOrderId(objects[5].toString());
tblExpCurTranLog.setSysOrderDtl(objects[6].toString());
tblExpCurTranLog.setTranAmt(new Double(objects[7].toString()));
tblExpCurTranLog.setTranVoidAmt(new Double(objects[8].toString()));
tblExpCurTranLog.setSysTimeStamp(objects[9].toString());
tblExpCurTranLog.setAcqRespMsg(objects[10].toString());
li.add(tblExpCurTranLog);
}
return li;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public List queryExport(TblExpCurTranLog queryModel, UserData sessionUserData, String startDate, String endDate) throws OAException {
List list = null;
try{
log.info("CurTranLsWechatAlipayHibernateHQLImpl.query开始调用:通过查询条件分页查询信息。");
// DetachedCriteria dcr = DetachedCriteria.forClass(TblExpCurTranLog.class);
// // //按名程序模板查询
// if (queryModel.getMerchantId() != null && !queryModel.getMerchantId().equals(""))
// dcr.add(Restrictions.eq("merchantId" ,queryModel.getMerchantId()));
// if (queryModel.getTerminalId() != null && !queryModel.getTerminalId().equals(""))
// dcr.add(Restrictions.eq("terminalId" ,queryModel.getTerminalId()));
// if (startDate != null && !startDate.equals(""))
// dcr.add(Restrictions.ge("sysTimeStamp" ,startDate+"000000"));
// if (endDate != null && !endDate.equals(""))
// dcr.add(Restrictions.le("sysTimeStamp" ,endDate+"235959"));
// if (queryModel.getTranRrn() != null && !queryModel.getTranRrn().equals(""))
// dcr.add(Restrictions.eq("tranRrn" ,queryModel.getTranRrn()));
// if (queryModel.getScanCode() != null && !queryModel.getScanCode().equals(""))
// dcr.add(Restrictions.eq("scanCode" ,queryModel.getScanCode()));
// if (queryModel.getTranType() != null && !queryModel.getTranType().equals(""))
// dcr.add(Restrictions.eq("tranType" ,queryModel.getTranType()));
// if (queryModel.getAcqRespCode() != null && !queryModel.getAcqRespCode().equals("")){
// if("200".equals(queryModel.getAcqRespCode().toString())){
// dcr.add(Restrictions.eq("acqRespCode" ,queryModel.getAcqRespCode()));
// }else{
// dcr.add(Restrictions.not(Restrictions.in("acqRespCode",new Object[] {new BigDecimal(200)})));
// }
// }
// dcr.addOrder(Order.desc("terminalId"));
// dcr.addOrder(Order.desc("sysTimeStamp"));
// list = findBYCriteria(dcr, sessionUserData);
// 按车查询
log.info("CurTranLsWechatAlipayHibernateHQLImpl.queryCount开始调用:查询符合条件的数量。");
// 此处的表名应用映射对象表示
StringBuffer bufferHql = new StringBuffer("select nvl(merchantId,' '),"
+ "nvl(terminalId,' '),nvl(tranRrn,' '),nvl(tranType,0),nvl(sysOrderId,' '),"
+ "nvl(sysVoidOrderId,' '),nvl(sysOrderDtl,' '),nvl(tranAmt,0.00),nvl(tranVoidAmt,0.00),"
+ "nvl(sysTimeStamp,' '),nvl(acqRespMsg,' ') from TblExpCurTranLog where 1=1 ");
// //按条件查询
if (queryModel.getMerchantId() != null && !queryModel.getMerchantId().equals(""))
bufferHql.append(" and merchantId = " + queryModel.getMerchantId() + " ");
if (queryModel.getTerminalId() != null && !queryModel.getTerminalId().equals(""))
bufferHql.append(" and terminalId = " + queryModel.getTerminalId() + " ");
if (startDate != null && !startDate.equals(""))
bufferHql.append(" and sysTimeStamp >= " + startDate + "000000");
if (endDate != null && !endDate.equals(""))
bufferHql.append(" and sysTimeStamp <= " + endDate + "235959");
if (queryModel.getTranRrn() != null && !queryModel.getTranRrn().equals(""))
bufferHql.append(" and tranRrn = '" + queryModel.getTranRrn() + "' ");
if (queryModel.getScanCode() != null && !queryModel.getScanCode().equals(""))
bufferHql.append(" and scanCode = '" + queryModel.getScanCode() + "'");
if (queryModel.getTranType() != null && !queryModel.getTranType().equals(""))
bufferHql.append(" and tranType = '" + queryModel.getTranType() + "'");
if (queryModel.getAcqRespCode() != null && !queryModel.getAcqRespCode().equals("")){
if("200".equals(queryModel.getAcqRespCode().toString())){
bufferHql.append(" and acqRespCode = '" + queryModel.getAcqRespCode() + "'");
}else{
bufferHql.append(" and acqRespCode != '" + new BigDecimal(200) + "'");
}
}
bufferHql.append(" order by terminalId , sysTimeStamp desc");
String hql = bufferHql.toString();
log.info("流水查询 HQL语句: " + hql );
list = objectTo(findByHQL(hql, sessionUserData));
log.info("CurTranLsWechatAlipayHibernateHQLImpl.query结束调用:通过查询条件分页查询信息。");
} catch (Exception e) {
if (log.isDebugEnabled())
e.printStackTrace();
log.error("CurTranLsHibernateHQLImpl.query通过查询条件分页查询信息,出现异常。");
log.error(e, e.fillInStackTrace());
throw new OAException("查询模板对象个数时抛异常!");
}
return list;
}
}
| [
"guolonglong80@163.com"
] | guolonglong80@163.com |
85017a5965a11f0be3ae0cefb82669007e50dbf8 | b84d59e44cbdcec6f29ef0ed57e4d8a40ab9d954 | /src/test/java/za/ac/cput/yaseen/set/PersonSetTest.java | 73804167762206a9f7f6f317c6bf630a71503d1a | [] | no_license | leavesza/ADPAssignment2 | 08efea5e648b0239b15ce5bb820d6bd039d5c61e | 0e92607093fe68937116a070f83f91836c2ac44d | refs/heads/master | 2023-04-20T12:40:03.353210 | 2021-05-16T20:20:35 | 2021-05-16T20:20:35 | 367,977,294 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,264 | java | package za.ac.cput.yaseen.set;
import org.junit.Assert;
import org.junit.jupiter.api.Test;
import java.util.HashSet;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.*;
public class PersonSetTest {
@Test
public void testAddToSet() {
Set<Person> set = new HashSet<>();
PersonSet personSet = new PersonSet(set);
Person person = new Person();
person.setName("Yaseen");
personSet.add(person);
Assert.assertTrue(set.contains(person));
}
@Test
public void testFindInSet() {
Set<Person> set = new HashSet<>();
PersonSet personSet = new PersonSet(set);
Person person = new Person();
person.setName("Yaseen");
personSet.add(person);
Person foundPerson = personSet.find("Yaseen");
Assert.assertEquals(foundPerson, person);
System.out.println(person.getName());
}
@Test
public void testRemoveFromSet() {
Set<Person> set = new HashSet<>();
PersonSet personSet = new PersonSet(set);
Person person = new Person();
person.setName("Yaseen");
personSet.add(person);
personSet.remove(person);
Assert.assertFalse(set.contains(person));
}
} | [
"yaseenlevy21@gmail.com"
] | yaseenlevy21@gmail.com |
f93fe8f1a82d0d72a3a5caf92d2cc913bb771c69 | 2de23fbcec16cb2b152bd66c82e949c2e3e44b6c | /ProyectoFinalProcesos/src/controlador/VistaMostrarCursosController.java | 8c975c857e95c80e6964b0124da47a8842a83373 | [] | no_license | LuisGerardoRendon/ProyectoProcesos | 1e3e291427e5b8a055e3ca81d1ee46617040e708 | fafe2648e8dd3d5108e5c8f16c36822cfade9380 | refs/heads/master | 2022-09-12T17:27:46.547868 | 2020-06-01T00:05:26 | 2020-06-01T00:05:26 | 263,782,907 | 0 | 1 | null | null | null | null | UTF-8 | Java | false | false | 5,026 | java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package controlador;
import java.io.IOException;
import java.net.URL;
import java.util.ResourceBundle;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.RadioButton;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.ToggleGroup;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.stage.Modality;
import javafx.stage.Stage;
import modelo.Alertas;
import modelo.CursoVO;
import modelo.Curso_DAO_Implements;
/**
* FXML Controller class
*
* @author Sammy Guergachi <sguergachi at gmail.com>
*/
public class VistaMostrarCursosController implements Initializable {
@FXML
private Label labelMostrarCursos;
@FXML
private RadioButton radioBotonTodos;
@FXML
private RadioButton radioBotonDisponibles;
@FXML
private RadioButton radioBotonCursando;
@FXML
private RadioButton radioBotonConcluido;
@FXML
private TableView<CursoVO> tablaCursos;
@FXML
private TableColumn<?, ?> comlumnaCursos;
@FXML
private Button botonInformacionCurso;
@FXML
private Button botonRegistrarCurso;
ObservableList<CursoVO> cursos = FXCollections.observableArrayList();
Curso_DAO_Implements curso_DAO = new Curso_DAO_Implements();
Alertas alerta = new Alertas();
@FXML
private Button botonMostrarCursos;
/**
* Initializes the controller class.
*/
@Override
public void initialize(URL url, ResourceBundle rb) {
ToggleGroup tg = new ToggleGroup();
radioBotonConcluido.setToggleGroup(tg);
radioBotonCursando.setToggleGroup(tg);
radioBotonDisponibles.setToggleGroup(tg);
radioBotonTodos.setToggleGroup(tg);
}
public void inicializarTabla(String filtro) {
//this.columnaClave.setCellValueFactory(new PropertyValueFactory("clave"));
this.comlumnaCursos.setCellValueFactory(new PropertyValueFactory("nombre"));
cursos = curso_DAO.recuperarCursos(filtro);
tablaCursos.setItems(cursos);
}
@FXML
private void informacionCurso(ActionEvent event) {
mostrarVistaInformacionCurso();
}
@FXML
private void registrarCurso(ActionEvent event) {
mostrarVentanaRegistrarCurso();
}
public void mostrarVentanaRegistrarCurso() {
try {
FXMLLoader loader = new FXMLLoader(getClass().getResource("/vista/vistaRegistrarCurso.fxml"));
Parent root = loader.load();
VistaRegistrarCursoController controlador = loader.getController();
Scene scene = new Scene(root);
Stage stage = new Stage();
stage.initModality(Modality.APPLICATION_MODAL);
stage.setScene(scene);
stage.showAndWait();
Stage stage1 = (Stage) this.botonRegistrarCurso.getScene().getWindow();
} catch (IOException e) {
}
}
@FXML
private void mostrarCursos(ActionEvent event) {
String filtro = "Todos";
if (radioBotonConcluido.isSelected()) {
filtro = "Concluido";
//inicializarTabla("Concluido");
} else if (radioBotonCursando.isSelected()) {
filtro = "Cursando";
//inicializarTabla("Cursando");
} else if (radioBotonDisponibles.isSelected()) {
filtro = "Disponible";
//inicializarTabla("Disponible");
} else if (radioBotonTodos.isSelected()) {
filtro = "Todos";
//inicializarTabla("Todos");
}
inicializarTabla(filtro);
}
public void mostrarVistaInformacionCurso() {
CursoVO curso = tablaCursos.getSelectionModel().getSelectedItem();
if (curso != null) {
try {
FXMLLoader loader = new FXMLLoader(getClass().getResource("/vista/vistaInformacionDelCurso.fxml"));
Parent root = loader.load();
VistaInformacionDelCursoController controlador = loader.getController();
controlador.initDatos(cursos, curso);
Scene scene = new Scene(root);
Stage stage = new Stage();
stage.initModality(Modality.APPLICATION_MODAL);
stage.setScene(scene);
stage.showAndWait();
//Stage stage1 = (Stage) this.botonRegistrarCurso.getScene().getWindow();
} catch (IOException e) {
}
}else{
alerta.alertaInformacion("Por favor, selecciona un curso");
}
}
}
| [
"rendo@DESKTOP-0FGPCGK"
] | rendo@DESKTOP-0FGPCGK |
caaf058fae815e3a1222c6ea2866bc85e556c053 | 5cd2c3cd32532d98f8f7b70d5f86d077c209ed6a | /swocean/src/main/java/com/dct/swocean/entitys/SysRegion.java | be4e335e59b8610505726fc8caa0364feac483cf | [] | no_license | eric218/genealogy-boot1 | 6022ad3410a167fe327775bc2a32c45bde9d3fb6 | d6c1d58af82041b6ff42ba5af701b4fdd944a390 | refs/heads/master | 2020-04-02T22:00:56.211169 | 2018-11-02T10:44:24 | 2018-11-02T10:44:24 | 154,818,792 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 908 | java | package com.dct.swocean.entitys;
public class SysRegion {
private Integer id;
private Integer code;
private String name;
private Integer parentCode;
private Integer type;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getCode() {
return code;
}
public void setCode(Integer code) {
this.code = code;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name == null ? null : name.trim();
}
public Integer getParentCode() {
return parentCode;
}
public void setParentCode(Integer parentCode) {
this.parentCode = parentCode;
}
public Integer getType() {
return type;
}
public void setType(Integer type) {
this.type = type;
}
} | [
"554221180@qq.com"
] | 554221180@qq.com |
d2dbdb5c25042c3e5d24cbbb829a7b5aff6404ca | 31220236373645673ec55cc82c782b10c73e0cd6 | /src/main/java/org/clueweb/ranking/PrintDecomDocvector.java | 666db3ee0479d3ddb0f098f4e371e94062422e1c | [
"Apache-2.0"
] | permissive | jiaul/clueweb-working-code | 045c03e876609bd5d751d3b98036ecdc040dec3a | b25e7169f0b47314ae3f7ef1ec5c13632d06ada3 | refs/heads/master | 2020-04-14T17:54:02.374100 | 2015-11-11T17:46:13 | 2015-11-11T17:46:13 | 38,913,397 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,240 | java | package org.clueweb.ranking;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.clueweb.data.PForDocVector;
//import org.clueweb.data.MTPForDocVector;
import java.io.*;
import tl.lin.data.array.IntArrayWritable;
public class PrintDecomDocvector {
private static final TupleFactory TUPLE_FACTORY = TupleFactory.getInstance();
private PrintDecomDocvector() {}
public static DecomKeyValue[] data = new DecomKeyValue[1000000000];
public static int numDocRead = 0;
public static int numFile = 0;
private static final PForDocVector DOC = new PForDocVector();
public static void main(String[] args) throws IOException {
if (args.length < 3) {
System.out.println("args: [compressed document vectors path] [max-num-of-records-per-file] [output file name]");
System.exit(-1);
}
String f = args[0];
int numDoc;
int max = Integer.parseInt(args[1]);
FileSystem fs = FileSystem.get(new Configuration());
Path p = new Path(f);
if (fs.getFileStatus(p).isDir()) {
numDoc = readSequenceFilesInDir(p, fs, max);
} else {
numDoc = readSequenceFile(p, fs, max);
}
PrintWriter out = new PrintWriter(new FileWriter(args[2]));
// print decompressed vectors
for(int i = 0; i < numDoc; i++) {
out.print(data[i].key + " " + data[i].doc.length);
for(int j = 0; j < data[i].doc.length; j++)
out.print(" " + data[i].doc[j]);
out.print("\n");
}
out.close();
}
private static int readSequenceFile(Path path, FileSystem fs, int max) throws IOException {
SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, fs.getConf());
Writable key;
IntArrayWritable value;
int n = 0;
try {
if ( Tuple.class.isAssignableFrom(reader.getKeyClass())) {
key = TUPLE_FACTORY.newTuple();
} else {
key = (Writable) reader.getKeyClass().newInstance();
}
if ( Tuple.class.isAssignableFrom(reader.getValueClass())) {
value = (IntArrayWritable) TUPLE_FACTORY.newTuple();
} else {
value = (IntArrayWritable) reader.getValueClass().newInstance();
}
while (reader.next(key, value)) {
DOC.fromIntArrayWritable(value, DOC);
data[numDocRead] = new DecomKeyValue(key.toString(), DOC.getTermIds());
numDocRead++;
n++;
if (n >= max)
break;
}
reader.close();
System.out.println(n + " records read.\n");
} catch (Exception e) {
e.printStackTrace();
}
return n;
}
private static int readSequenceFilesInDir(Path path, FileSystem fs, int max) {
int n = 0;
try {
FileStatus[] stat = fs.listStatus(path);
for (int i = 0; i < stat.length; ++i) {
n += readSequenceFile(stat[i].getPath(), fs ,max);
}
} catch (IOException e) {
e.printStackTrace();
}
System.out.println(n + " records read in total.");
return n;
}
}
| [
"jiaul@jiaul.(none)"
] | jiaul@jiaul.(none) |
981b4575400dd2cefb9113084108bc00a2edec15 | f6c8ef02c43ab6ec6d941cf93b9c454e19c6a6ee | /src/com/company/Lesson_34/Task_01/Task_02/SleepingThread.java | 8ec6714edf783025306143f26c09b58eefca0349 | [] | no_license | VasylN/Tutor_project | 0b96e3437b15b4e55c9806dfde13c4338cf28784 | 835080f6c767f5e8d14d800f0d6d032445677626 | refs/heads/master | 2021-01-22T21:48:57.478089 | 2017-05-24T18:02:39 | 2017-05-24T18:02:39 | 85,478,361 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,016 | java | package com.company.Lesson_34.Task_01.Task_02;
/**
* Created by Pc on 18.01.2017.
*//* Последовательные выполнения нитей
1. В выполняющем классе создать статическую переменную int COUNT = 4
2. Создать класс SleepingThread, унаследовать его от Thread
3. В классе SleepingThread переопределить метод toString()
- Определить формат вывода, что бы он выводил нити, так как показано в примере: "#" + getName() + ": " + countDownIndex,
где countDownIndex - число от COUNT до 1
4. Сделай так, чтобы все нити выполнялись последовательно: сначала для нити №1 отсчет с COUNT до 1,
потом для нити №2 с COUNT до 1 и т.д.
5. В методе run после всех действий поставь задержку в 10 миллисекунд. Выведи "Нить прервана", если нить будет прервана.
6. Подумать, как должен быть реализован метод main
Пример:
#1: 4
#1: 3
...
#1: 1
#2: 4
...
*/
public class SleepingThread extends Thread {
int countDownIndex = Test_01.COUNT;
static int countThreads = 0;
public SleepingThread() throws InterruptedException {
super(String.valueOf(++countThreads));
start();
// join();
}
@Override
public void run() {
while (true) {
System.out.println(this);
if (--countDownIndex == 0)
{
return;
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
System.out.println("Thread interrupted");
}
}
}
@Override
public String toString() {
return "#" + getName() + ": " + countDownIndex;
}
}
| [
"vasyln86@gmail.com"
] | vasyln86@gmail.com |
de4ac6e7cf7396afb9eeb0917e615511d07a0099 | 4bb70bda1888f5f17a82a6160fbd53d2687ce411 | /src/main/java/com/controller/LoginController.java | 1cb33a84483a5611f9f3714aa5df367e3db35cd4 | [] | no_license | wudengyu1997/ssmTest | d851a0d69ac909e7836f436bea20f31c297a991f | 174103528a0821744c4a299ca2491f3cd64b2b8d | refs/heads/master | 2022-12-25T06:27:18.546994 | 2019-08-19T01:07:29 | 2019-08-19T01:07:29 | 203,073,677 | 0 | 0 | null | 2022-12-16T07:25:58 | 2019-08-19T00:59:22 | Java | UTF-8 | Java | false | false | 272 | java | package com.controller;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
public class LoginController {
@RequestMapping(value = "/login")
public String Login() {
return "login";
}
}
| [
"1654805021@qq.com"
] | 1654805021@qq.com |
db9456121eeb869cf2bb0ae82078987c2a53a5fe | 06a4fd410cdaf423cd5a90a19d3193429b84e012 | /android/app/src/debug/java/com/mtgdecklist/ReactNativeFlipper.java | 102f7ab4b2824f24ea1275d6107b83335b689d5b | [] | no_license | sharkb8i/mtg-deck-list | 70c2a768979e416ab5286de85b521fe4a9a987c6 | 118e68b6414fdc2e78b5646718fe813eb9d2b734 | refs/heads/master | 2022-12-14T03:49:47.309783 | 2020-09-18T05:24:28 | 2020-09-18T05:24:28 | 292,421,837 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 3,266 | java | /**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* <p>This source code is licensed under the MIT license found in the LICENSE file in the root
* directory of this source tree.
*/
package com.mtgdecklist;
import android.content.Context;
import com.facebook.flipper.android.AndroidFlipperClient;
import com.facebook.flipper.android.utils.FlipperUtils;
import com.facebook.flipper.core.FlipperClient;
import com.facebook.flipper.plugins.crashreporter.CrashReporterPlugin;
import com.facebook.flipper.plugins.databases.DatabasesFlipperPlugin;
import com.facebook.flipper.plugins.fresco.FrescoFlipperPlugin;
import com.facebook.flipper.plugins.inspector.DescriptorMapping;
import com.facebook.flipper.plugins.inspector.InspectorFlipperPlugin;
import com.facebook.flipper.plugins.network.FlipperOkhttpInterceptor;
import com.facebook.flipper.plugins.network.NetworkFlipperPlugin;
import com.facebook.flipper.plugins.react.ReactFlipperPlugin;
import com.facebook.flipper.plugins.sharedpreferences.SharedPreferencesFlipperPlugin;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.modules.network.NetworkingModule;
import okhttp3.OkHttpClient;
public class ReactNativeFlipper {
public static void initializeFlipper(Context context, ReactInstanceManager reactInstanceManager) {
if (FlipperUtils.shouldEnableFlipper(context)) {
final FlipperClient client = AndroidFlipperClient.getInstance(context);
client.addPlugin(new InspectorFlipperPlugin(context, DescriptorMapping.withDefaults()));
client.addPlugin(new ReactFlipperPlugin());
client.addPlugin(new DatabasesFlipperPlugin(context));
client.addPlugin(new SharedPreferencesFlipperPlugin(context));
client.addPlugin(CrashReporterPlugin.getInstance());
NetworkFlipperPlugin networkFlipperPlugin = new NetworkFlipperPlugin();
NetworkingModule.setCustomClientBuilder(
new NetworkingModule.CustomClientBuilder() {
@Override
public void apply(OkHttpClient.Builder builder) {
builder.addNetworkInterceptor(new FlipperOkhttpInterceptor(networkFlipperPlugin));
}
});
client.addPlugin(networkFlipperPlugin);
client.start();
// Fresco Plugin needs to ensure that ImagePipelineFactory is initialized
// Hence we run if after all native modules have been initialized
ReactContext reactContext = reactInstanceManager.getCurrentReactContext();
if (reactContext == null) {
reactInstanceManager.addReactInstanceEventListener(
new ReactInstanceManager.ReactInstanceEventListener() {
@Override
public void onReactContextInitialized(ReactContext reactContext) {
reactInstanceManager.removeReactInstanceEventListener(this);
reactContext.runOnNativeModulesQueueThread(
new Runnable() {
@Override
public void run() {
client.addPlugin(new FrescoFlipperPlugin());
}
});
}
});
} else {
client.addPlugin(new FrescoFlipperPlugin());
}
}
}
}
| [
"sharkb8i97@gmail.com"
] | sharkb8i97@gmail.com |
faea6b3a90bbf1123b7ada51a9bdb5e97a12fd21 | 66b8e87bf988de5c39e981a4e5e9ab0b037909e6 | /eureka-client-demo2/src/main/java/com/alex/eureka/client/demo2/MainApplication.java | 2a586aeccc081028b6e0276750dadc8a0646e927 | [] | no_license | solodemonpixy/eureka-demos | 347b4416012833fc5bb68d4ffb516d08b817dc99 | 52f23acefb78e30b66650bdada4c46680adba2ed | refs/heads/master | 2021-07-24T13:47:30.196581 | 2017-11-06T16:10:44 | 2017-11-06T16:10:44 | 109,598,160 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 415 | java | package com.alex.eureka.client.demo2;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
@SpringBootApplication
@EnableDiscoveryClient
public class MainApplication {
public static void main(String[] args) {
SpringApplication.run(MainApplication.class, args);
}
}
| [
"zhouswim@163.com"
] | zhouswim@163.com |
c988997409a0155dd7a69ae7428e199551dde605 | 83dc73eb23148f7aaee832e1f93fcb06e53537c3 | /desafio-testes-unitarios-master/src/main/java/gft/com/ctas/entities/StatusReason.java | 4dc2496dab9b9b0b790ff4aac7da067c641f8395 | [] | no_license | LucasPedrosoTI/desafio-tdd | 8a904ca3d074636b2fabfb49ed919c1b42aa7014 | 03bcc3faefe40afeaacb3c2553d7517f2f13e840 | refs/heads/master | 2023-02-03T17:59:52.180377 | 2020-12-22T17:50:35 | 2020-12-22T17:50:35 | 323,412,764 | 1 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,192 | java | package gft.com.ctas.entities;
import java.io.Serializable;
import java.math.BigDecimal;
import java.sql.Timestamp;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import lombok.Data;
@Data
@Entity
@Table(name="cyberbank_core.STATUS_REASON")
public class StatusReason implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@SequenceGenerator(name="STATUS_REASON_ID_GENERATOR" )
@GeneratedValue(strategy=GenerationType.SEQUENCE, generator="STATUS_REASON_ID_GENERATOR")
private long id;
@Column(name="LONG_DESC")
private String longDesc;
private String nemotecnico;
@Column(name="OFFICIAL_ID")
private String officialId;
@Column(name="SHORT_DESC")
private String shortDesc;
@Column(name="STAMP_ADDITIONAL")
private String stampAdditional;
@Column(name="STAMP_DATE_TIME")
private Timestamp stampDateTime;
@Column(name="STATUS_DATE")
private Timestamp statusDate;
@Column(name="STATUS_REASON_ID")
private BigDecimal statusReasonId;
} | [
"Lucas.PedrosoTI@gmail.com"
] | Lucas.PedrosoTI@gmail.com |
1c3d6a2b8e4bb9b71bb9a3cd97107c781f865ae0 | 611b2f6227b7c3b4b380a4a410f357c371a05339 | /src/main/java/android/support/v7/widget/AppCompatCompoundButtonHelper.java | cea75242b10518ddc0e4ff2d8a261817a105a9af | [] | no_license | obaby/bjqd | 76f35fcb9bbfa4841646a8888c9277ad66b171dd | 97c56f77380835e306ea12401f17fb688ca1373f | refs/heads/master | 2022-12-04T21:33:17.239023 | 2020-08-25T10:53:15 | 2020-08-25T10:53:15 | 290,186,830 | 3 | 1 | null | null | null | null | UTF-8 | Java | false | false | 5,255 | java | package android.support.v7.widget;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.support.annotation.Nullable;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v4.widget.CompoundButtonCompat;
import android.support.v7.appcompat.R;
import android.support.v7.content.res.AppCompatResources;
import android.util.AttributeSet;
import android.widget.CompoundButton;
class AppCompatCompoundButtonHelper {
private ColorStateList mButtonTintList = null;
private PorterDuff.Mode mButtonTintMode = null;
private boolean mHasButtonTint = false;
private boolean mHasButtonTintMode = false;
private boolean mSkipNextApply;
private final CompoundButton mView;
interface DirectSetButtonDrawableInterface {
void setButtonDrawable(Drawable drawable);
}
AppCompatCompoundButtonHelper(CompoundButton compoundButton) {
this.mView = compoundButton;
}
/* access modifiers changed from: package-private */
public void loadFromAttributes(AttributeSet attributeSet, int i) {
int resourceId;
TypedArray obtainStyledAttributes = this.mView.getContext().obtainStyledAttributes(attributeSet, R.styleable.CompoundButton, i, 0);
try {
if (obtainStyledAttributes.hasValue(R.styleable.CompoundButton_android_button) && (resourceId = obtainStyledAttributes.getResourceId(R.styleable.CompoundButton_android_button, 0)) != 0) {
this.mView.setButtonDrawable(AppCompatResources.getDrawable(this.mView.getContext(), resourceId));
}
if (obtainStyledAttributes.hasValue(R.styleable.CompoundButton_buttonTint)) {
CompoundButtonCompat.setButtonTintList(this.mView, obtainStyledAttributes.getColorStateList(R.styleable.CompoundButton_buttonTint));
}
if (obtainStyledAttributes.hasValue(R.styleable.CompoundButton_buttonTintMode)) {
CompoundButtonCompat.setButtonTintMode(this.mView, DrawableUtils.parseTintMode(obtainStyledAttributes.getInt(R.styleable.CompoundButton_buttonTintMode, -1), (PorterDuff.Mode) null));
}
} finally {
obtainStyledAttributes.recycle();
}
}
/* access modifiers changed from: package-private */
public void setSupportButtonTintList(ColorStateList colorStateList) {
this.mButtonTintList = colorStateList;
this.mHasButtonTint = true;
applyButtonTint();
}
/* access modifiers changed from: package-private */
public ColorStateList getSupportButtonTintList() {
return this.mButtonTintList;
}
/* access modifiers changed from: package-private */
public void setSupportButtonTintMode(@Nullable PorterDuff.Mode mode) {
this.mButtonTintMode = mode;
this.mHasButtonTintMode = true;
applyButtonTint();
}
/* access modifiers changed from: package-private */
public PorterDuff.Mode getSupportButtonTintMode() {
return this.mButtonTintMode;
}
/* access modifiers changed from: package-private */
public void onSetButtonDrawable() {
if (this.mSkipNextApply) {
this.mSkipNextApply = false;
return;
}
this.mSkipNextApply = true;
applyButtonTint();
}
/* access modifiers changed from: package-private */
public void applyButtonTint() {
Drawable buttonDrawable = CompoundButtonCompat.getButtonDrawable(this.mView);
if (buttonDrawable == null) {
return;
}
if (this.mHasButtonTint || this.mHasButtonTintMode) {
Drawable mutate = DrawableCompat.wrap(buttonDrawable).mutate();
if (this.mHasButtonTint) {
DrawableCompat.setTintList(mutate, this.mButtonTintList);
}
if (this.mHasButtonTintMode) {
DrawableCompat.setTintMode(mutate, this.mButtonTintMode);
}
if (mutate.isStateful()) {
mutate.setState(this.mView.getDrawableState());
}
this.mView.setButtonDrawable(mutate);
}
}
/* access modifiers changed from: package-private */
/* JADX WARNING: Code restructure failed: missing block: B:2:0x0006, code lost:
r0 = android.support.v4.widget.CompoundButtonCompat.getButtonDrawable(r2.mView);
*/
/* Code decompiled incorrectly, please refer to instructions dump. */
public int getCompoundPaddingLeft(int r3) {
/*
r2 = this;
int r0 = android.os.Build.VERSION.SDK_INT
r1 = 17
if (r0 >= r1) goto L_0x0013
android.widget.CompoundButton r0 = r2.mView
android.graphics.drawable.Drawable r0 = android.support.v4.widget.CompoundButtonCompat.getButtonDrawable(r0)
if (r0 == 0) goto L_0x0013
int r0 = r0.getIntrinsicWidth()
int r3 = r3 + r0
L_0x0013:
return r3
*/
throw new UnsupportedOperationException("Method not decompiled: android.support.v7.widget.AppCompatCompoundButtonHelper.getCompoundPaddingLeft(int):int");
}
}
| [
"obaby.lh@gmail.com"
] | obaby.lh@gmail.com |
886c87dc686318a8fac40b6fd483c2e58363a388 | 88c535b38d2991651f95b5caeb2b380c0cbb3cb7 | /src/main/java/life/littleliu/community/controller/QuestionController.java | d64ab914eb75545c321b544ccea2aa2a4b484b43 | [] | no_license | icheckmateu/community | e23d131379ba6a5be1c19c56045dd7b68f1cf2ca | 9e20a04fe6ce38752badda0f6706a699404a84ab | refs/heads/master | 2023-04-05T15:18:35.435502 | 2021-04-14T08:49:20 | 2021-04-14T08:49:20 | 352,822,878 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 1,401 | java | package life.littleliu.community.controller;
import life.littleliu.community.dto.CommentDTO;
import life.littleliu.community.dto.QuestionDTO;
import life.littleliu.community.enums.CommentTypeEnum;
import life.littleliu.community.service.CommentService;
import life.littleliu.community.service.QuestionService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import java.util.List;
@Controller
public class QuestionController {
@Autowired
private QuestionService questionService;
@Autowired
private CommentService commentService;
@GetMapping("/question/{id}")
public String question(@PathVariable(name = "id") Long id, Model model) {
QuestionDTO questionDTO = questionService.getById(id);
List<QuestionDTO> relatedQuestions = questionService.selectRelated(questionDTO);
List<CommentDTO> comments = commentService.listByTargetId(id, CommentTypeEnum.QUESTION);
//累加阅读数
questionService.incView(id);
model.addAttribute("question", questionDTO);
model.addAttribute("comments", comments);
model.addAttribute("relatedQuestions", relatedQuestions);
return "question";
}
}
| [
"522448949@qq.com"
] | 522448949@qq.com |
91e9f18f88157600fc733e1bf739c330fc5de565 | 40df58d026d81d8ebf2a71175c00b92f30b6b241 | /src/main/java/com/example/demo/ResponseFilter.java | 73aeae8874a4b0c3fd3afa6d0f7bf4ac7af7fc1b | [] | no_license | muneer-ahmed-ge/blue-green | e44ce3e91c13c8ddab4c9535ca1cd45e2968e01c | 9c0d2389b829e6f87df194d422e04775c77bc4e0 | refs/heads/master | 2020-04-23T19:20:05.757999 | 2019-03-14T04:14:34 | 2019-03-14T04:14:34 | 171,399,588 | 0 | 0 | null | null | null | null | UTF-8 | Java | false | false | 2,140 | java | package com.example.demo;
import java.io.IOException;
import java.util.Random;
import javax.annotation.PostConstruct;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.MDC;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import org.springframework.web.filter.OncePerRequestFilter;
/**
* @author Muneer Ahmed
* @version 1.0
* @since 2019-03-13
*/
@Slf4j
@Order(1)
@Component
/**
*
* Add a Host header with value as the Application Version configured by the property gateway.version
*
* It is expected for Client App to send this header in every request
*
* This is useful to route the requests during Blue/Green deployment
*
*/
public class ResponseFilter extends OncePerRequestFilter {
private static final String HOST = "Host";
private static final String SYNC_ID = "SYNC_ID";
@Value("${bg.version}")
private String applicationVersion;
@PostConstruct
private void init() {
applicationVersion += ".com";
}
@Override
protected void doFilterInternal(HttpServletRequest httpServletRequest,
HttpServletResponse httpServletResponse,
FilterChain filterChain)
throws ServletException, IOException {
addSyncId(httpServletRequest);
httpServletResponse.addHeader(HOST, applicationVersion);
filterChain.doFilter(httpServletRequest, httpServletResponse);
}
private void addSyncId(HttpServletRequest httpServletRequest) {
HttpSession session = httpServletRequest.getSession(true);
String syncId = (String) session.getAttribute(SYNC_ID);
if (null == syncId) {
syncId = String.valueOf(new Random().nextInt(900) + 100);
session.setAttribute(SYNC_ID, syncId);
}
MDC.put(SYNC_ID, syncId);
}
}
| [
"muneer.ahmed@ge.com"
] | muneer.ahmed@ge.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.