Dataset Preview
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code: DatasetGenerationError
Exception: TypeError
Message: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback: Traceback (most recent call last):
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
writer.write_table(table)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
pa_table = table_cast(pa_table, self._schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
return cast_table_to_schema(table, schema)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
TypeError: Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
parquet_operations = convert_to_parquet(builder)
File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
builder.download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
self._download_and_prepare(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
self._prepare_split(split_generator, **prepare_split_kwargs)
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
for job_id, done, content in self._prepare_split_single(
File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
raise DatasetGenerationError("An error occurred while generating the dataset") from e
datasets.exceptions.DatasetGenerationError: An error occurred while generating the datasetNeed help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.
text
string | meta
dict |
|---|---|
module SpreeProductGridSort
module Search
module Ordered
def ordered
if taxon
base_scope = Spree::Product.active
base_scope.join(:grid_orders).where("#{Spree::GridOrder.table_name}.taxonomy_id" => taxon.taxonomy.id)
base_scope.in_taxon taxon
else
retrieve_products
end
end
end
end
end
|
{
"content_hash": "c0fbf296f4ab92f6339fc8d5492129f4",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 112,
"avg_line_length": 23.375,
"alnum_prop": 0.606951871657754,
"repo_name": "ginlane/spree_product_grid_sort",
"id": "85d124c1a607ac17bdf5d756623c6fbd0309ecc3",
"size": "374",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/spree_product_grid_sort/search/ordered.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "510"
},
{
"name": "CoffeeScript",
"bytes": "1255"
},
{
"name": "JavaScript",
"bytes": "115666"
},
{
"name": "Ruby",
"bytes": "18764"
}
]
}
|
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.testutil.MoreAsserts.assertEventCount;
import static com.google.devtools.build.lib.testutil.MoreAsserts.assertEventCountAtLeast;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import com.google.common.truth.Truth;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.Actions;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.FailAction;
import com.google.devtools.build.lib.analysis.BuildView.AnalysisResult;
import com.google.devtools.build.lib.analysis.config.ConfigurationFactory;
import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException;
import com.google.devtools.build.lib.analysis.util.AnalysisMock;
import com.google.devtools.build.lib.analysis.util.BuildViewTestBase;
import com.google.devtools.build.lib.analysis.util.ExpectedDynamicConfigurationErrors;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.packages.AspectDescriptor;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.skyframe.SkyFunctions;
import com.google.devtools.build.lib.skyframe.TargetPatternValue.TargetPatternKey;
import com.google.devtools.build.lib.skyframe.util.SkyframeExecutorTestUtils;
import com.google.devtools.build.lib.testutil.Suite;
import com.google.devtools.build.lib.testutil.TestSpec;
import com.google.devtools.build.lib.testutil.TestUtils;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.skyframe.NotifyingHelper.EventType;
import com.google.devtools.build.skyframe.NotifyingHelper.Listener;
import com.google.devtools.build.skyframe.NotifyingHelper.Order;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.TrackingAwaiter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for the {@link BuildView}.
*/
@TestSpec(size = Suite.SMALL_TESTS)
@RunWith(JUnit4.class)
public class BuildViewTest extends BuildViewTestBase {
private static final Function<AnalysisFailureEvent, Pair<String, String>>
ANALYSIS_EVENT_TO_STRING_PAIR = new Function<AnalysisFailureEvent, Pair<String, String>>() {
@Override
public Pair<String, String> apply(AnalysisFailureEvent event) {
return Pair.of(
event.getFailedTarget().getLabel().toString(), event.getFailureReason().toString());
}
};
@Test
public void testRuleConfiguredTarget() throws Exception {
scratch.file("pkg/BUILD",
"genrule(name='foo', ",
" cmd = '',",
" srcs=['a.src'],",
" outs=['a.out'])");
update("//pkg:foo");
Rule ruleTarget = (Rule) getTarget("//pkg:foo");
assertEquals("genrule", ruleTarget.getRuleClass());
ConfiguredTarget ruleCT = getConfiguredTarget("//pkg:foo");
assertSame(ruleTarget, ruleCT.getTarget());
}
@Test
public void testFilterByTargets() throws Exception {
scratch.file("tests/BUILD",
"sh_test(name = 'small_test_1',",
" srcs = ['small_test_1.sh'],",
" data = [':xUnit'],",
" size = 'small',",
" tags = ['tag1'])",
"",
"sh_test(name = 'small_test_2',",
" srcs = ['small_test_2.sh'],",
" size = 'small',",
" tags = ['tag2'])",
"",
"",
"test_suite( name = 'smallTests', tags=['small'])");
//scratch.file("tests/small_test_1.py");
update("//tests:smallTests");
ConfiguredTarget test1 = getConfiguredTarget("//tests:small_test_1");
ConfiguredTarget test2 = getConfiguredTarget("//tests:small_test_2");
ConfiguredTarget suite = getConfiguredTarget("//tests:smallTests");
assertNoEvents(); // start from a clean slate
Collection<ConfiguredTarget> targets =
new LinkedHashSet<>(ImmutableList.of(test1, test2, suite));
targets = Lists.<ConfiguredTarget>newArrayList(
BuildView.filterTestsByTargets(targets,
Sets.newHashSet(test1.getTarget(), suite.getTarget())));
assertThat(targets).containsExactlyElementsIn(Sets.newHashSet(test1, suite));
}
@Test
public void testSourceArtifact() throws Exception {
setupDummyRule();
update("//pkg:a.src");
InputFileConfiguredTarget inputCT = getInputFileConfiguredTarget("//pkg:a.src");
Artifact inputArtifact = inputCT.getArtifact();
assertNull(getGeneratingAction(inputArtifact));
assertEquals("pkg/a.src", inputArtifact.getExecPathString());
}
@Test
public void testGeneratedArtifact() throws Exception {
setupDummyRule();
update("//pkg:a.out");
OutputFileConfiguredTarget outputCT = (OutputFileConfiguredTarget)
getConfiguredTarget("//pkg:a.out");
Artifact outputArtifact = outputCT.getArtifact();
assertEquals(
outputCT.getConfiguration().getBinDirectory(
outputCT.getTarget().getLabel().getPackageIdentifier().getRepository()),
outputArtifact.getRoot());
assertEquals(outputCT.getConfiguration().getBinFragment().getRelative("pkg/a.out"),
outputArtifact.getExecPath());
assertEquals(new PathFragment("pkg/a.out"), outputArtifact.getRootRelativePath());
Action action = getGeneratingAction(outputArtifact);
assertSame(FailAction.class, action.getClass());
}
@Test
public void testSyntaxErrorInDepPackage() throws Exception {
// Check that a loading error in a dependency is properly reported.
scratch.file("a/BUILD",
"genrule(name='x',",
" srcs = ['file.txt'],",
" outs = ['foo'],",
" cmd = 'echo')",
"@"); // syntax error
scratch.file("b/BUILD",
"genrule(name= 'cc',",
" tools = ['//a:x'],",
" outs = ['bar'],",
" cmd = 'echo')");
reporter.removeHandler(failFastHandler);
EventBus eventBus = new EventBus();
AnalysisResult result = update(eventBus, defaultFlags().with(Flag.KEEP_GOING), "//b:cc");
assertContainsEvent("invalid character: '@'");
assertThat(result.hasError()).isTrue();
}
@Test
public void testReportsAnalysisRootCauses() throws Exception {
scratch.file("private/BUILD",
"genrule(",
" name='private',",
" outs=['private.out'],",
" cmd='',",
" visibility=['//visibility:private'])");
scratch.file("foo/BUILD",
"genrule(",
" name='foo',",
" tools=[':bar'],",
" outs=['foo.out'],",
" cmd='')",
"genrule(",
" name='bar',",
" tools=['//private'],",
" outs=['bar.out'],",
" cmd='')");
reporter.removeHandler(failFastHandler);
EventBus eventBus = new EventBus();
AnalysisFailureRecorder recorder = new AnalysisFailureRecorder();
eventBus.register(recorder);
AnalysisResult result = update(eventBus, defaultFlags().with(Flag.KEEP_GOING), "//foo");
assertThat(result.hasError()).isTrue();
assertThat(recorder.events).hasSize(1);
AnalysisFailureEvent event = recorder.events.get(0);
assertEquals("//foo:bar", event.getFailureReason().toString());
assertEquals("//foo:foo", event.getFailedTarget().getLabel().toString());
}
@Test
public void testReportsLoadingRootCauses() throws Exception {
// This test checks that two simultaneous errors are both reported:
// - missing outs attribute,
// - package referenced in tools does not exist
scratch.file("pkg/BUILD",
"genrule(name='foo',",
" tools=['//nopackage:missing'],",
" cmd='')");
reporter.removeHandler(failFastHandler);
EventBus eventBus = new EventBus();
LoadingFailureRecorder recorder = new LoadingFailureRecorder();
eventBus.register(recorder);
// Note: no need to run analysis for a loading failure.
AnalysisResult result = update(eventBus, defaultFlags().with(Flag.KEEP_GOING), "//pkg:foo");
assertThat(result.hasError()).isTrue();
assertThat(recorder.events)
.contains(
Pair.of(Label.parseAbsolute("//pkg:foo"), Label.parseAbsolute("//nopackage:missing")));
assertContainsEvent("missing value for mandatory attribute 'outs'");
assertContainsEvent("no such package 'nopackage'");
// Skyframe correctly reports the other root cause as the genrule itself (since it is
// missing attributes).
assertThat(recorder.events).hasSize(2);
assertThat(recorder.events)
.contains(Pair.of(Label.parseAbsolute("//pkg:foo"), Label.parseAbsolute("//pkg:foo")));
}
@Test
public void testConvolutedLoadRootCauseAnalysis() throws Exception {
// You need license declarations in third_party. We use this constraint to
// create targets that are loadable, but are in error.
scratch.file("third_party/first/BUILD",
"sh_library(name='first', deps=['//third_party/second'], licenses=['notice'])");
scratch.file("third_party/second/BUILD",
"sh_library(name='second', deps=['//third_party/third'], licenses=['notice'])");
scratch.file("third_party/third/BUILD",
"sh_library(name='third', deps=['//third_party/fourth'], licenses=['notice'])");
scratch.file("third_party/fourth/BUILD",
"sh_library(name='fourth', deps=['//third_party/fifth'])");
scratch.file("third_party/fifth/BUILD",
"sh_library(name='fifth', licenses=['notice'])");
reporter.removeHandler(failFastHandler);
EventBus eventBus = new EventBus();
LoadingFailureRecorder recorder = new LoadingFailureRecorder();
eventBus.register(recorder);
// Note: no need to run analysis for a loading failure.
AnalysisResult result = update(eventBus, defaultFlags().with(Flag.KEEP_GOING),
"//third_party/first", "//third_party/third");
assertThat(result.hasError()).isTrue();
assertThat(recorder.events).hasSize(2);
assertTrue(recorder.events.toString(), recorder.events.contains(
Pair.of(Label.parseAbsolute("//third_party/first"),
Label.parseAbsolute("//third_party/fourth"))));
assertThat(recorder.events)
.contains(Pair.of(
Label.parseAbsolute("//third_party/third"),
Label.parseAbsolute("//third_party/fourth")));
}
@Test
public void testMultipleRootCauseReporting() throws Exception {
scratch.file("gp/BUILD",
"sh_library(name = 'gp', deps = ['//p:p'])");
scratch.file("p/BUILD",
"sh_library(name = 'p', deps = ['//c1:not', '//c2:not'])");
scratch.file("c1/BUILD");
scratch.file("c2/BUILD");
reporter.removeHandler(failFastHandler);
EventBus eventBus = new EventBus();
LoadingFailureRecorder recorder = new LoadingFailureRecorder();
eventBus.register(recorder);
AnalysisResult result = update(eventBus, defaultFlags().with(Flag.KEEP_GOING), "//gp");
assertThat(result.hasError()).isTrue();
assertThat(recorder.events).hasSize(2);
assertTrue(recorder.events.toString(), recorder.events.contains(
Pair.of(Label.parseAbsolute("//gp"),
Label.parseAbsolute("//c1:not"))));
assertThat(recorder.events)
.contains(Pair.of(Label.parseAbsolute("//gp"), Label.parseAbsolute("//c2:not")));
}
/**
* Regression test for: "Package group includes are broken"
*/
@Test
public void testTopLevelPackageGroup() throws Exception {
scratch.file("tropical/BUILD",
"package_group(name='guava', includes=[':mango'])",
"package_group(name='mango')");
// If the analysis phase results in an error, this will throw an exception
update("//tropical:guava");
// Check if the included package group also got analyzed
assertNotNull(getConfiguredTarget("//tropical:mango", null));
}
@Test
public void testTopLevelInputFile() throws Exception {
scratch.file("tropical/BUILD",
"exports_files(['file.txt'])");
update("//tropical:file.txt");
assertNotNull(getConfiguredTarget("//tropical:file.txt", null));
}
@Test
public void testGetDirectPrerequisites() throws Exception {
scratch.file(
"package/BUILD",
"filegroup(name='top', srcs=[':inner', 'file'])",
"sh_binary(name='inner', srcs=['script.sh'])");
update("//package:top");
ConfiguredTarget top = getConfiguredTarget("//package:top", getTargetConfiguration());
Iterable<ConfiguredTarget> targets = getView().getDirectPrerequisitesForTesting(
reporter, top, getBuildConfigurationCollection());
Iterable<Label> labels =
Iterables.transform(
targets,
new Function<ConfiguredTarget, Label>() {
@Override
public Label apply(ConfiguredTarget target) {
return target.getLabel();
}
});
assertThat(labels)
.containsExactly(
Label.parseAbsolute("//package:inner"), Label.parseAbsolute("//package:file"));
}
@Test
public void testGetDirectPrerequisiteDependencies() throws Exception {
scratch.file(
"package/BUILD",
"filegroup(name='top', srcs=[':inner', 'file'])",
"sh_binary(name='inner', srcs=['script.sh'])");
update("//package:top");
ConfiguredTarget top = getConfiguredTarget("//package:top", getTargetConfiguration());
Iterable<Dependency> targets = getView().getDirectPrerequisiteDependenciesForTesting(
reporter, top, getBuildConfigurationCollection()).values();
Dependency innerDependency;
Dependency fileDependency;
if (top.getConfiguration().useDynamicConfigurations()) {
innerDependency =
Dependency.withTransitionAndAspects(
Label.parseAbsolute("//package:inner"),
Attribute.ConfigurationTransition.NONE,
ImmutableSet.<AspectDescriptor>of());
} else {
innerDependency =
Dependency.withConfiguration(
Label.parseAbsolute("//package:inner"),
getTargetConfiguration());
}
fileDependency =
Dependency.withNullConfiguration(
Label.parseAbsolute("//package:file"));
assertThat(targets).containsExactly(innerDependency, fileDependency);
}
/**
* Tests that the {@code --configuration short name} option cannot be used on
* the command line.
*/
@Test
public void testConfigurationShortName() throws Exception {
useConfiguration("--output directory name=foo");
reporter.removeHandler(failFastHandler);
try {
update(defaultFlags());
fail();
} catch (InvalidConfigurationException e) {
assertThat(e).hasMessage("Build options are invalid");
assertContainsEvent(
"The internal '--output directory name' option cannot be used on the command line");
}
}
@Test
public void testFileTranslations() throws Exception {
scratch.file("foo/file");
scratch.file("foo/BUILD",
"exports_files(['file'])");
useConfiguration("--message_translations=//foo:file");
scratch.file("bar/BUILD",
"sh_library(name = 'bar')");
update("//bar");
}
// Regression test: "output_filter broken (but in a different way)"
@Test
public void testOutputFilterSeeWarning() throws Exception {
runAnalysisWithOutputFilter(Pattern.compile(".*"));
assertContainsEvent("please do not import '//java/a:A.java'");
}
// Regression test: "output_filter broken (but in a different way)"
@Test
public void testOutputFilter() throws Exception {
runAnalysisWithOutputFilter(Pattern.compile("^//java/c"));
assertNoEvents();
}
@Test
public void testAnalysisErrorMessageWithKeepGoing() throws Exception {
scratch.file("a/BUILD", "sh_binary(name='a', srcs=['a1.sh', 'a2.sh'])");
reporter.removeHandler(failFastHandler);
AnalysisResult result = update(defaultFlags().with(Flag.KEEP_GOING), "//a");
assertThat(result.hasError()).isTrue();
assertContainsEvent("errors encountered while analyzing target '//a:a'");
}
/**
* Regression test: Exception in ConfiguredTargetGraph.checkForCycles()
* when multiple top-level targets depend on the same cycle.
*/
@Test
public void testCircularDependencyBelowTwoTargets() throws Exception {
scratch.file("foo/BUILD",
"sh_library(name = 'top1', srcs = ['top1.sh'], deps = [':rec1'])",
"sh_library(name = 'top2', srcs = ['top2.sh'], deps = [':rec1'])",
"sh_library(name = 'rec1', srcs = ['rec1.sh'], deps = [':rec2'])",
"sh_library(name = 'rec2', srcs = ['rec2.sh'], deps = [':rec1'])"
);
reporter.removeHandler(failFastHandler);
AnalysisResult result =
update(defaultFlags().with(Flag.KEEP_GOING), "//foo:top1", "//foo:top2");
assertThat(result.hasError()).isTrue();
assertContainsEvent("in sh_library rule //foo:rec1: cycle in dependency graph:\n");
assertContainsEvent("in sh_library rule //foo:top");
}
// Regression test: cycle node depends on error.
@Test
public void testErrorBelowCycle() throws Exception {
scratch.file("foo/BUILD",
"sh_library(name = 'top', deps = ['mid'])",
"sh_library(name = 'mid', deps = ['bad', 'cycle1'])",
"sh_library(name = 'bad', srcs = ['//badbuild:isweird'])",
"sh_library(name = 'cycle1', deps = ['cycle2', 'mid'])",
"sh_library(name = 'cycle2', deps = ['cycle1'])");
scratch.file("badbuild/BUILD", "");
reporter.removeHandler(failFastHandler);
injectGraphListenerForTesting(
new Listener() {
@Override
public void accept(SkyKey key, EventType type, Order order, Object context) {}
},
/*deterministic=*/ true);
try {
update("//foo:top");
fail();
} catch (ViewCreationFailedException e) {
// Expected.
}
assertContainsEvent("no such target '//badbuild:isweird': target 'isweird' not declared in "
+ "package 'badbuild'");
assertContainsEvent("and referenced by '//foo:bad'");
assertContainsEvent("in sh_library rule //foo");
assertContainsEvent("cycle in dependency graph");
assertEventCountAtLeast(2, eventCollector);
}
@Test
public void testErrorBelowCycleKeepGoing() throws Exception {
scratch.file("foo/BUILD",
"sh_library(name = 'top', deps = ['mid'])",
"sh_library(name = 'mid', deps = ['bad', 'cycle1'])",
"sh_library(name = 'bad', srcs = ['//badbuild:isweird'])",
"sh_library(name = 'cycle1', deps = ['cycle2', 'mid'])",
"sh_library(name = 'cycle2', deps = ['cycle1'])");
scratch.file("badbuild/BUILD", "");
reporter.removeHandler(failFastHandler);
update(defaultFlags().with(Flag.KEEP_GOING), "//foo:top");
assertContainsEvent("no such target '//badbuild:isweird': target 'isweird' not declared in "
+ "package 'badbuild'");
assertContainsEvent("and referenced by '//foo:bad'");
assertContainsEvent("in sh_library rule //foo");
assertContainsEvent("cycle in dependency graph");
// Dynamic configurations trigger this error both in configuration trimming (which visits
// the transitive target closure) and in the normal configured target cycle detection path.
// So we get an additional instance of this check (which varies depending on whether Skyframe
// loading phase is enabled).
// TODO(gregce): refactor away this variation. Note that the duplicate doesn't make it into
// real user output (it only affects tests).
if (!getTargetConfiguration().useDynamicConfigurations()) {
assertEventCount(3, eventCollector);
}
}
@Test
public void testAnalysisEntryHasActionsEvenWithError() throws Exception {
scratch.file("foo/BUILD",
"cc_binary(name = 'foo', linkshared = 1, srcs = ['foo.cc'])");
reporter.removeHandler(failFastHandler);
try {
update("//foo:foo");
fail(); // Expected ViewCreationFailedException.
} catch (ViewCreationFailedException e) {
// ok.
}
}
@Test
public void testHelpfulErrorForWrongPackageLabels() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("x/BUILD",
"cc_library(name='x', srcs=['x.cc'])");
scratch.file("y/BUILD",
"cc_library(name='y', srcs=['y.cc'], deps=['//x:z'])");
AnalysisResult result = update(defaultFlags().with(Flag.KEEP_GOING), "//y:y");
assertThat(result.hasError()).isTrue();
assertContainsEvent("no such target '//x:z': "
+ "target 'z' not declared in package 'x' "
+ "defined by /workspace/x/BUILD and referenced by '//y:y'");
}
@Test
public void testNewActionsAreDifferentAndDontConflict() throws Exception {
scratch.file("pkg/BUILD",
"genrule(name='a', ",
" cmd='',",
" outs=['a.out'])");
update("//pkg:a.out");
OutputFileConfiguredTarget outputCT = (OutputFileConfiguredTarget)
getConfiguredTarget("//pkg:a.out");
Artifact outputArtifact = outputCT.getArtifact();
Action action = getGeneratingAction(outputArtifact);
assertNotNull(action);
scratch.overwriteFile("pkg/BUILD",
"genrule(name='a', ",
" cmd='false',",
" outs=['a.out'])");
update("//pkg:a.out");
assertFalse("Actions should not be compatible",
Actions.canBeShared(action, getGeneratingAction(outputArtifact)));
}
/**
* This test exercises the case where we invalidate (mark dirty) a node in one build command
* invocation and the revalidation (because it did not change) happens in a subsequent build
* command call.
*
* - In the first update call we construct A.
*
* - Then we construct B and we make the glob get invalidated. We do that by deleting a file
* because it depends on the directory listing. Because of that A gets invalidated.
*
* - Then we construct A again. The glob gets revalidated because it is still matching just A.java
* and A configured target gets revalidated too. At the end of the analysis A java action should
* be in the action graph.
*/
@Test
public void testMultiBuildInvalidationRevalidation() throws Exception {
scratch.file("java/a/A.java", "bla1");
scratch.file("java/a/C.java", "bla2");
scratch.file("java/a/BUILD",
"java_test(name = 'A',",
" srcs = glob(['A*.java']))",
"java_test(name = 'B',",
" srcs = ['B.java'])");
update("//java/a:A");
ConfiguredTarget ct = getConfiguredTarget("//java/a:A");
scratch.deleteFile("java/a/C.java");
update("//java/a:B");
update("//java/a:A");
assertNotNull(getGeneratingAction(
getBinArtifact("A_deploy.jar", ct)));
}
/**
* Regression test: ClassCastException in SkyframeLabelVisitor.updateRootCauses.
*/
@Test
public void testDepOnGoodTargetInBadPkgAndTransitivelyBadTarget() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("parent/BUILD",
"sh_library(name = 'foo',",
" srcs = ['//badpkg1:okay-target', '//okaypkg:transitively-bad-target'])");
Path badpkg1BuildFile = scratch.file("badpkg1/BUILD",
"exports_files(['okay-target'])",
"invalidbuildsyntax");
scratch.file("okaypkg/BUILD",
"sh_library(name = 'transitively-bad-target',",
" srcs = ['//badpkg2:bad-target'])");
Path badpkg2BuildFile = scratch.file("badpkg2/BUILD",
"sh_library(name = 'bad-target')",
"invalidbuildsyntax");
update(defaultFlags().with(Flag.KEEP_GOING), "//parent:foo");
assertEquals(1, getFrequencyOfErrorsWithLocation(
badpkg1BuildFile.asFragment(), eventCollector));
assertEquals(1, getFrequencyOfErrorsWithLocation(
badpkg2BuildFile.asFragment(), eventCollector));
}
@Test
public void testDepOnGoodTargetInBadPkgAndTransitiveCycle_NotIncremental() throws Exception {
runTestDepOnGoodTargetInBadPkgAndTransitiveCycle(/*incremental=*/false);
}
@Test
public void testDepOnGoodTargetInBadPkgAndTransitiveCycle_Incremental() throws Exception {
runTestDepOnGoodTargetInBadPkgAndTransitiveCycle(/*incremental=*/true);
}
/**
* Regression test: in keep_going mode, cycles in target graph aren't reported
* if package is in error.
*/
@Test
public void testCycleReporting_TargetCycleWhenPackageInError() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("cycles/BUILD",
"sh_library(name = 'a', deps = [':b'])",
"sh_library(name = 'b', deps = [':a'])",
"notvalidbuildsyntax");
update(defaultFlags().with(Flag.KEEP_GOING), "//cycles:a");
assertContainsEvent("'notvalidbuildsyntax'");
assertContainsEvent("cycle in dependency graph");
}
@Test
public void testTransitiveLoadingDoesntShortCircuitInKeepGoing() throws Exception {
reporter.removeHandler(failFastHandler);
scratch.file("parent/BUILD",
"sh_library(name = 'a', deps = ['//child:b'])",
"parentisbad");
scratch.file("child/BUILD",
"sh_library(name = 'b')",
"childisbad");
update(defaultFlags().with(Flag.KEEP_GOING), "//parent:a");
assertContainsEventWithFrequency("parentisbad", 1);
assertContainsEventWithFrequency("childisbad", 1);
assertContainsEventWithFrequency("and referenced by '//parent:a'", 1);
}
/**
* Smoke test for the Skyframe code path.
*/
@Test
public void testSkyframe() throws Exception {
setupDummyRule();
String aoutLabel = "//pkg:a.out";
update(aoutLabel);
// However, a ConfiguredTarget was actually produced.
ConfiguredTarget target = Iterables.getOnlyElement(getAnalysisResult().getTargetsToBuild());
assertEquals(aoutLabel, target.getLabel().toString());
Artifact aout = Iterables.getOnlyElement(
target.getProvider(FileProvider.class).getFilesToBuild());
Action action = getGeneratingAction(aout);
assertSame(FailAction.class, action.getClass());
}
/**
* ConfiguredTargetFunction should not register actions in legacy Blaze ActionGraph unless
* the creation of the node is successful.
*/
@Test
public void testActionsNotRegisteredInLegacyWhenError() throws Exception {
// First find the artifact we want to make sure is not generated by an action with an error.
// Then update the BUILD file and re-analyze.
scratch.file("actions_not_registered/BUILD",
"cc_binary(name = 'foo', srcs = ['foo.cc'])");
update("//actions_not_registered:foo");
Artifact fooOut = Iterables.getOnlyElement(
getConfiguredTarget("//actions_not_registered:foo")
.getProvider(FileProvider.class).getFilesToBuild());
assertNotNull(getActionGraph().getGeneratingAction(fooOut));
clearAnalysisResult();
scratch.overwriteFile("actions_not_registered/BUILD",
"cc_binary(name = 'foo', linkshared = 1, srcs = ['foo.cc'])");
reporter.removeHandler(failFastHandler);
try {
update("//actions_not_registered:foo");
fail("This build should fail because: 'linkshared' used in non-shared library");
} catch (ViewCreationFailedException e) {
assertNull(getActionGraph().getGeneratingAction(fooOut));
}
}
/**
* Regression test:
* "skyframe: ArtifactFactory and ConfiguredTargets out of sync".
*/
@Test
public void testSkyframeAnalyzeRuleThenItsOutputFile() throws Exception {
scratch.file("pkg/BUILD",
"testing_dummy_rule(name='foo', ",
" srcs=['a.src'],",
" outs=['a.out'])");
scratch.file("pkg2/BUILD",
"testing_dummy_rule(name='foo', ",
" srcs=['a.src'],",
" outs=['a.out'])");
String aoutLabel = "//pkg:a.out";
update("//pkg2:foo");
update("//pkg:foo");
scratch.overwriteFile("pkg2/BUILD",
"testing_dummy_rule(name='foo', ",
" srcs=['a.src'],",
" outs=['a.out'])",
"# Comment");
update("//pkg:a.out");
// However, a ConfiguredTarget was actually produced.
ConfiguredTarget target = Iterables.getOnlyElement(getAnalysisResult().getTargetsToBuild());
assertEquals(aoutLabel, target.getLabel().toString());
Artifact aout = Iterables.getOnlyElement(
target.getProvider(FileProvider.class).getFilesToBuild());
Action action = getGeneratingAction(aout);
assertSame(FailAction.class, action.getClass());
}
/**
* Tests that skyframe reports the root cause as being the target that depended on the symlink
* cycle.
*/
@Test
public void testRootCauseReportingFileSymlinks() throws Exception {
scratch.file("gp/BUILD",
"sh_library(name = 'gp', deps = ['//p'])");
scratch.file("p/BUILD",
"sh_library(name = 'p', deps = ['//c'])");
scratch.file("c/BUILD",
"sh_library(name = 'c', deps = [':c1', ':c2'])",
"sh_library(name = 'c1', deps = ['//cycles1'])",
"sh_library(name = 'c2', deps = ['//cycles2'])");
Path cycles1BuildFilePath = scratch.file("cycles1/BUILD",
"sh_library(name = 'cycles1', srcs = glob(['*.sh']))");
Path cycles2BuildFilePath = scratch.file("cycles2/BUILD",
"sh_library(name = 'cycles2', srcs = glob(['*.sh']))");
cycles1BuildFilePath.getParentDirectory().getRelative("cycles1.sh").createSymbolicLink(
new PathFragment("cycles1.sh"));
cycles2BuildFilePath.getParentDirectory().getRelative("cycles2.sh").createSymbolicLink(
new PathFragment("cycles2.sh"));
reporter.removeHandler(failFastHandler);
EventBus eventBus = new EventBus();
LoadingFailureRecorder recorder = new LoadingFailureRecorder();
eventBus.register(recorder);
AnalysisResult result = update(eventBus, defaultFlags().with(Flag.KEEP_GOING), "//gp");
assertThat(result.hasError()).isTrue();
assertThat(recorder.events).hasSize(2);
assertTrue(recorder.events.toString(), recorder.events.contains(
Pair.of(Label.parseAbsolute("//gp"), Label.parseAbsolute("//cycles1"))));
assertTrue(recorder.events.toString(), recorder.events.contains(
Pair.of(Label.parseAbsolute("//gp"), Label.parseAbsolute("//cycles2"))));
}
/**
* Regression test for bug when a configured target has missing deps, but also depends
* transitively on an error. We build //foo:query, which depends on a valid and an invalid target
* pattern. We ensure that by the time it requests its dependent target patterns, the invalid one
* is ready, and throws (though not before the request is registered). Then, when bubbling the
* invalid target pattern error up, we ensure that it bubbles into //foo:query, which must cope
* with the combination of an error and a missing dep.
*/
@Test
public void testGenQueryWithBadTargetAndUnfinishedTarget() throws Exception {
// The target //foo:zquery is used to force evaluation of //foo:nosuchtarget before the target
// patterns in //foo:query are enqueued for evaluation. That way, //foo:query will depend on one
// invalid target pattern and two target patterns that haven't been evaluated yet.
// It is important that 'query' come before 'zquery' alphabetically, so that when the error is
// bubbling up, it goes to the //foo:query node -- we use a graph implementation in which the
// reverse deps of each entry are ordered alphabetically. It is also important that a missing
// target pattern is requested before the exception is thrown, so we have both //foo:b and
// //foo:z missing from the deps, in the hopes that at least one of them will come before
// //foo:nosuchtarget.
scratch.file("foo/BUILD",
"genquery(name = 'query',",
" expression = 'deps(//foo:b) except //foo:nosuchtarget except //foo:z',",
" scope = ['//foo:a'])",
"genquery(name = 'zquery',",
" expression = 'deps(//foo:nosuchtarget)',",
" scope = ['//foo:a'])",
"sh_library(name = 'a')",
"sh_library(name = 'b')",
"sh_library(name = 'z')"
);
Listener listener =
new Listener() {
private final CountDownLatch errorDone = new CountDownLatch(1);
private final CountDownLatch realQueryStarted = new CountDownLatch(1);
@Override
public void accept(SkyKey key, EventType type, Order order, Object context) {
if (!key.functionName().equals(SkyFunctions.TARGET_PATTERN)) {
return;
}
String label = ((TargetPatternKey) key.argument()).getPattern();
if (label.equals("//foo:nosuchtarget")) {
if (type == EventType.SET_VALUE) {
// Inform //foo:query-dep-registering thread that it may proceed.
errorDone.countDown();
// Wait to make sure //foo:query-dep-registering process has started.
TrackingAwaiter.INSTANCE.awaitLatchAndTrackExceptions(
realQueryStarted, "//foo:query did not request dep in time");
} else if (type == EventType.ADD_REVERSE_DEP
&& context.toString().contains("foo:query")) {
// Make sure that when foo:query requests foo:nosuchtarget, it's already done.
TrackingAwaiter.INSTANCE.awaitLatchAndTrackExceptions(
errorDone, "//foo:nosuchtarget did not evaluate in time");
}
} else if ((label.equals("//foo:b") || label.equals("//foo:z"))
&& type == EventType.CREATE_IF_ABSENT) {
// Inform error-evaluating thread that it may throw an exception.
realQueryStarted.countDown();
TrackingAwaiter.INSTANCE.awaitLatchAndTrackExceptions(
errorDone, "//foo:nosuchtarget did not evaluate in time");
// Don't let the target pattern //foo:{b,z} get enqueued for evaluation until we
// receive an interrupt signal from the threadpool. The interrupt means that
// evaluation is shutting down, and so //foo:{b,z} definitely won't get evaluated.
CountDownLatch waitForInterrupt = new CountDownLatch(1);
try {
waitForInterrupt.await(TestUtils.WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS);
throw new IllegalStateException("node was not interrupted in time");
} catch (InterruptedException e) {
// Expected.
Thread.currentThread().interrupt();
}
}
}
};
injectGraphListenerForTesting(listener, /*deterministic=*/ true);
reporter.removeHandler(failFastHandler);
try {
update("//foo:query", "//foo:zquery");
fail();
} catch (ViewCreationFailedException e) {
Truth.assertThat(e.getMessage())
.contains("Analysis of target '//foo:query' failed; build aborted");
}
TrackingAwaiter.INSTANCE.assertNoErrors();
}
/**
* Tests that rules with configurable attributes can be accessed through {@link
* com.google.devtools.build.lib.skyframe.PostConfiguredTargetFunction}.
* This is a regression test for a Bazel crash.
*/
@Test
public void testPostProcessedConfigurableAttributes() throws Exception {
useConfiguration("--cpu=k8");
reporter.removeHandler(failFastHandler); // Expect errors from action conflicts.
scratch.file("conflict/BUILD",
"config_setting(name = 'a', values = {'test_arg': 'a'})",
"cc_library(name='x', srcs=select({':a': ['a.cc'], '//conditions:default': ['foo.cc']}))",
"cc_binary(name='_objs/x/conflict/foo.pic.o', srcs=['bar.cc'])");
AnalysisResult result = update(
defaultFlags().with(Flag.KEEP_GOING),
"//conflict:_objs/x/conflict/foo.pic.o",
"//conflict:x");
assertThat(result.hasError()).isTrue();
// Expect to reach this line without a Precondition-triggered NullPointerException.
assertContainsEvent(
"file 'conflict/_objs/x/conflict/foo.pic.o' is generated by these conflicting actions");
}
@Test
public void testCycleDueToJavaLauncherConfiguration() throws Exception {
if (defaultFlags().contains(Flag.DYNAMIC_CONFIGURATIONS)) {
// Dynamic configurations don't yet support late-bound attributes. Development testing already
// runs all tests with dynamic configurations enabled, so this will still fail for developers
// and won't get lost in the fog.
return;
}
scratch.file("foo/BUILD",
"java_binary(name = 'java', srcs = ['DoesntMatter.java'])",
"cc_binary(name = 'cpp', data = [':java'])");
// Everything is fine - the dependency graph is acyclic.
update("//foo:java", "//foo:cpp");
if (getTargetConfiguration().trimConfigurations()) {
fail(ExpectedDynamicConfigurationErrors.LATE_BOUND_ATTRIBUTES_UNSUPPORTED);
}
// Now there will be an analysis-phase cycle because the java_binary now has an implicit dep on
// the cc_binary launcher.
useConfiguration("--java_launcher=//foo:cpp");
reporter.removeHandler(failFastHandler);
try {
update("//foo:java", "//foo:cpp");
fail();
} catch (ViewCreationFailedException expected) {
Truth.assertThat(expected.getMessage())
.matches("Analysis of target '//foo:(java|cpp)' failed; build aborted.*");
}
assertContainsEvent("cycle in dependency graph");
}
@Test
public void testDependsOnBrokenTarget() throws Exception {
scratch.file("foo/BUILD",
"sh_test(name = 'test', srcs = ['test.sh'], data = ['//bar:data'])");
scratch.file("bar/BUILD",
"BROKEN BROKEN BROKEN!!!");
reporter.removeHandler(failFastHandler);
try {
update("//foo:test");
fail();
} catch (ViewCreationFailedException expected) {
Truth.assertThat(expected.getMessage())
.matches("Analysis of target '//foo:test' failed; build aborted.*");
}
}
/**
* Regression test: IllegalStateException in BuildView.update() on circular dependency instead of
* graceful failure.
*/
@Test
public void testCircularDependency() throws Exception {
scratch.file("cycle/BUILD",
"cc_library(name = 'foo', srcs = ['foo.cc'], deps = [':bar'])",
"cc_library(name = 'bar', srcs = ['bar.cc'], deps = [':foo'])");
reporter.removeHandler(failFastHandler);
try {
update("//cycle:foo");
fail();
} catch (ViewCreationFailedException expected) {
assertContainsEvent("in cc_library rule //cycle:foo: cycle in dependency graph:");
assertThat(expected.getMessage())
.contains("Analysis of target '//cycle:foo' failed; build aborted");
}
}
/**
* Regression test: IllegalStateException in BuildView.update() on circular dependency instead of
* graceful failure.
*/
@Test
public void testCircularDependencyWithKeepGoing() throws Exception {
scratch.file("cycle/BUILD",
"cc_library(name = 'foo', srcs = ['foo.cc'], deps = [':bar'])",
"cc_library(name = 'bar', srcs = ['bar.cc'], deps = [':foo'])",
"cc_library(name = 'bat', srcs = ['bat.cc'], deps = [':bas'])",
"cc_library(name = 'bas', srcs = ['bas.cc'], deps = [':bau'])",
"cc_library(name = 'bau', srcs = ['bas.cc'], deps = [':bas'])",
"cc_library(name = 'baz', srcs = ['baz.cc'])");
reporter.removeHandler(failFastHandler);
EventBus eventBus = new EventBus();
LoadingFailureRecorder loadingFailureRecorder = new LoadingFailureRecorder();
AnalysisFailureRecorder analysisFailureRecorder = new AnalysisFailureRecorder();
eventBus.register(loadingFailureRecorder);
eventBus.register(analysisFailureRecorder);
update(eventBus, defaultFlags().with(Flag.KEEP_GOING),
"//cycle:foo", "//cycle:bat", "//cycle:baz");
assertContainsEvent("in cc_library rule //cycle:foo: cycle in dependency graph:");
assertContainsEvent("in cc_library rule //cycle:bas: cycle in dependency graph:");
assertContainsEvent(
"errors encountered while analyzing target '//cycle:foo': it will not be built");
assertContainsEvent(
"errors encountered while analyzing target '//cycle:bat': it will not be built");
// With interleaved loading and analysis, we can no longer distinguish loading-phase cycles
// and analysis-phase cycles. This was previously reported as a loading-phase cycle, as it
// happens with any configuration (cycle is hard-coded in the BUILD files). Also see the
// test below.
assertThat(Iterables.transform(analysisFailureRecorder.events, ANALYSIS_EVENT_TO_STRING_PAIR))
.containsExactly(
Pair.of("//cycle:foo", "//cycle:foo"), Pair.of("//cycle:bat", "//cycle:bas"));
}
@Test
public void testCircularDependencyWithLateBoundLabel() throws Exception {
scratch.file("cycle/BUILD",
"cc_library(name = 'foo', deps = [':bar'])",
"cc_library(name = 'bar')");
useConfiguration("--experimental_stl=//cycle:foo");
reporter.removeHandler(failFastHandler);
EventBus eventBus = new EventBus();
LoadingFailureRecorder loadingFailureRecorder = new LoadingFailureRecorder();
AnalysisFailureRecorder analysisFailureRecorder = new AnalysisFailureRecorder();
eventBus.register(loadingFailureRecorder);
eventBus.register(analysisFailureRecorder);
AnalysisResult result = update(eventBus, defaultFlags().with(Flag.KEEP_GOING), "//cycle:foo");
assertThat(result.hasError()).isTrue();
assertContainsEvent("in cc_library rule //cycle:foo: cycle in dependency graph:");
// This needs to be reported as an anlysis-phase cycle; the cycle only occurs due to the stl
// command-line option, which is part of the configuration, and which is used due to the
// late-bound label.
assertThat(Iterables.transform(analysisFailureRecorder.events, ANALYSIS_EVENT_TO_STRING_PAIR))
.containsExactly(Pair.of("//cycle:foo", "//cycle:foo"));
assertThat(loadingFailureRecorder.events).isEmpty();
}
@Test
public void testLoadingErrorReportedCorrectly() throws Exception {
scratch.file("a/BUILD", "cc_library(name='a')");
scratch.file("b/BUILD", "cc_library(name='b', deps = ['//missing:lib'])");
reporter.removeHandler(failFastHandler);
AnalysisResult result = update(defaultFlags().with(Flag.KEEP_GOING), "//a", "//b");
assertThat(result.hasError()).isTrue();
assertThat(result.getError())
.contains("command succeeded, but there were loading phase errors");
}
@Test
public void testBadLabelInConfiguration() throws Exception {
useConfiguration("--crosstool_top=//third_party/crosstool/v2");
reporter.removeHandler(failFastHandler);
try {
update(defaultFlags().with(Flag.KEEP_GOING));
fail();
} catch (InvalidConfigurationException e) {
assertThat(e.getMessage()).contains("third_party/crosstool/v2");
}
}
@Test
public void testMissingFdoOptimize() throws Exception {
// The fdo_optimize flag uses a different code path, because it also accepts paths.
useConfiguration("--fdo_optimize=//does/not/exist");
reporter.removeHandler(failFastHandler);
try {
update(defaultFlags().with(Flag.KEEP_GOING));
fail();
} catch (InvalidConfigurationException e) {
assertContainsEvent(
"no such package 'does/not/exist': BUILD file not found on package path");
}
}
@Test
public void testMissingJavabase() throws Exception {
// The javabase flag uses yet another code path with its own redirection logic on top of the
// redirect chaser.
scratch.file("jdk/BUILD",
"filegroup(name = 'jdk', srcs = [",
" '//does/not/exist:a-piii', '//does/not/exist:b-k8', '//does/not/exist:c-default'])");
scratch.file("does/not/exist/BUILD");
useConfigurationFactory(AnalysisMock.get().createConfigurationFactory());
useConfiguration("--javabase=//jdk");
reporter.removeHandler(failFastHandler);
try {
update(defaultFlags().with(Flag.KEEP_GOING));
fail();
} catch (InvalidConfigurationException e) {
// Expected
}
}
@Test
public void testMissingXcodeVersion() throws Exception {
// The xcode_version flag uses yet another code path on top of the redirect chaser.
// Note that the redirect chaser throws if it can't find a package, but doesn't throw if it
// can't find a label in a package - that's why we use an empty package here.
scratch.file("xcode/BUILD");
useConfiguration("--xcode_version=1.2", "--xcode_version_config=//xcode:does_not_exist");
reporter.removeHandler(failFastHandler);
try {
update(defaultFlags().with(Flag.KEEP_GOING));
fail();
} catch (InvalidConfigurationException e) {
assertThat(e.getMessage()).contains("//xcode:does_not_exist");
}
}
@Test
public void testVisibilityReferencesNonexistentPackage() throws Exception {
scratch.file("z/a/BUILD",
"py_library(name='a', visibility=['//nonexistent:nothing'])");
scratch.file("z/b/BUILD",
"py_library(name='b', deps=['//z/a:a'])");
reporter.removeHandler(failFastHandler);
try {
update("//z/b:b");
fail();
} catch (ViewCreationFailedException expected) {
assertContainsEvent("no such package 'nonexistent'");
}
}
// regression test ("java.lang.IllegalStateException: cannot happen")
@Test
public void testDefaultVisibilityInNonexistentPackage() throws Exception {
scratch.file("z/a/BUILD",
"package(default_visibility=['//b'])",
"py_library(name='alib')");
scratch.file("z/b/BUILD",
"py_library(name='b', deps=['//z/a:alib'])");
reporter.removeHandler(failFastHandler);
try {
update("//z/b:b");
fail();
} catch (ViewCreationFailedException expected) {
assertContainsEvent("no such package 'b'");
}
}
@Test
public void testNonTopLevelErrorsPrintedExactlyOnce() throws Exception {
scratch.file("parent/BUILD",
"sh_library(name = 'a', deps = ['//child:b'])");
scratch.file("child/BUILD",
"sh_library(name = 'b')",
"undefined_symbol");
reporter.removeHandler(failFastHandler);
try {
update("//parent:a");
fail();
} catch (ViewCreationFailedException expected) {
}
assertContainsEventWithFrequency("name 'undefined_symbol' is not defined", 1);
assertContainsEventWithFrequency(
"Target '//child:b' contains an error and its package is in error and referenced "
+ "by '//parent:a'", 1);
}
@Test
public void testNonTopLevelErrorsPrintedExactlyOnce_KeepGoing() throws Exception {
scratch.file("parent/BUILD",
"sh_library(name = 'a', deps = ['//child:b'])");
scratch.file("child/BUILD",
"sh_library(name = 'b')",
"undefined_symbol");
reporter.removeHandler(failFastHandler);
update(defaultFlags().with(Flag.KEEP_GOING), "//parent:a");
assertContainsEventWithFrequency("name 'undefined_symbol' is not defined", 1);
assertContainsEventWithFrequency(
"Target '//child:b' contains an error and its package is in error and referenced "
+ "by '//parent:a'", 1);
}
@Test
public void testNonTopLevelErrorsPrintedExactlyOnce_ActionListener() throws Exception {
scratch.file("parent/BUILD",
"sh_library(name = 'a', deps = ['//child:b'])");
scratch.file("child/BUILD",
"sh_library(name = 'b')",
"undefined_symbol");
scratch.file("okay/BUILD",
"sh_binary(name = 'okay', srcs = ['okay.sh'])");
useConfiguration("--experimental_action_listener=//parent:a");
reporter.removeHandler(failFastHandler);
try {
update("//okay");
fail();
} catch (ViewCreationFailedException e) {
}
assertContainsEventWithFrequency("name 'undefined_symbol' is not defined", 1);
assertContainsEventWithFrequency(
"Target '//child:b' contains an error and its package is in error and referenced "
+ "by '//parent:a'", 1);
}
@Test
public void testNonTopLevelErrorsPrintedExactlyOnce_ActionListener_KeepGoing() throws Exception {
scratch.file("parent/BUILD",
"sh_library(name = 'a', deps = ['//child:b'])");
scratch.file("child/BUILD",
"sh_library(name = 'b')",
"undefined_symbol");
scratch.file("okay/BUILD",
"sh_binary(name = 'okay', srcs = ['okay.sh'])");
useConfiguration("--experimental_action_listener=//parent:a");
reporter.removeHandler(failFastHandler);
update(defaultFlags().with(Flag.KEEP_GOING), "//okay");
assertContainsEventWithFrequency("name 'undefined_symbol' is not defined", 1);
assertContainsEventWithFrequency(
"Target '//child:b' contains an error and its package is in error and referenced "
+ "by '//parent:a'", 1);
}
@Test
public void testTopLevelTargetsAreTrimmedWithDynamicConfigurations() throws Exception {
scratch.file("foo/BUILD",
"sh_library(name='x', ",
" srcs=['x.sh'])");
useConfiguration("--experimental_dynamic_configs=on");
AnalysisResult res = update("//foo:x");
ConfiguredTarget topLevelTarget = Iterables.getOnlyElement(res.getTargetsToBuild());
assertThat(topLevelTarget.getConfiguration().getAllFragments().keySet()).containsExactly(
ruleClassProvider.getUniversalFragment());
}
@Test
public void errorOnMissingDepFragments() throws Exception {
scratch.file("foo/BUILD",
"cc_library(",
" name = 'ccbin', ",
" srcs = ['c.cc'],",
" data = [':javalib'])",
"java_library(",
" name = 'javalib',",
" srcs = ['javalib.java'])");
useConfiguration("--experimental_dynamic_configs=on", "--experimental_disable_jvm");
reporter.removeHandler(failFastHandler);
try {
update("//foo:ccbin");
fail();
} catch (ViewCreationFailedException e) {
// Expected.
}
assertContainsEvent("//foo:ccbin: dependency //foo:javalib from attribute \"data\" is missing "
+ "required config fragments: Jvm");
}
@Test
public void lateBoundSplitAttributeConfigs() throws Exception {
useRuleClassProvider(LateBoundSplitUtil.getRuleClassProvider());
// Register the latebound split fragment with the config creation environment.
useConfigurationFactory(new ConfigurationFactory(
ruleClassProvider.getConfigurationCollectionFactory(),
ruleClassProvider.getConfigurationFragments()));
scratch.file("foo/BUILD",
"rule_with_latebound_split(",
" name = 'foo')",
"rule_with_test_fragment(",
" name = 'latebound_dep')");
update("//foo:foo");
assertNotNull(getConfiguredTarget("//foo:foo"));
Iterable<ConfiguredTarget> deps = SkyframeExecutorTestUtils.getExistingConfiguredTargets(
skyframeExecutor, Label.parseAbsolute("//foo:latebound_dep"));
assertThat(deps).hasSize(2);
assertThat(
ImmutableList.of(
LateBoundSplitUtil.getOptions(Iterables.get(deps, 0).getConfiguration()).fooFlag,
LateBoundSplitUtil.getOptions(Iterables.get(deps, 1).getConfiguration()).fooFlag))
.containsExactly("one", "two");
}
/**
* Here, injecting_rule injects an aspect which acts on a action_rule() and registers an action.
* The action_rule() registers another action of its own.
*
* <p>This test asserts that both actions are reported.
*/
@Test
public void ruleExtraActionsDontHideAspectExtraActions() throws Exception {
useConfiguration("--experimental_action_listener=//pkg:listener");
scratch.file(
"x/BUILD",
"load(':extension.bzl', 'injecting_rule', 'action_rule')",
"injecting_rule(name='a', deps=[':b'])",
"action_rule(name='b')");
scratch.file(
"x/extension.bzl",
"def _aspect1_impl(target, ctx):",
" ctx.empty_action(mnemonic='Mnemonic')",
" return struct()",
"aspect1 = aspect(_aspect1_impl, attr_aspects=['deps'])",
"",
"def _injecting_rule_impl(ctx):",
" return struct()",
"injecting_rule = rule(_injecting_rule_impl, ",
" attrs = { 'deps' : attr.label_list(aspects = [aspect1]) })",
"",
"def _action_rule_impl(ctx):",
" out = ctx.new_file(ctx.label.name)",
" ctx.action(outputs = [out], command = 'dontcare', mnemonic='Mnemonic')",
" return struct()",
"action_rule = rule(_action_rule_impl, attrs = { 'deps' : attr.label_list() })");
scratch.file(
"pkg/BUILD",
"extra_action(name='xa', cmd='echo dont-care')",
"action_listener(name='listener', mnemonics=['Mnemonic'], extra_actions=[':xa'])");
BuildView.AnalysisResult analysisResult = update("//x:a");
List<String> owners = new ArrayList<>();
for (Artifact artifact : analysisResult.getAdditionalArtifactsToBuild()) {
if ("xa".equals(artifact.getExtension())) {
owners.add(artifact.getOwnerLabel().toString());
}
}
assertThat(owners).containsExactly("//x:b", "//x:b");
}
@Test
public void testErrorMessageForMissingPackageGroup() throws Exception {
scratch.file(
"apple/BUILD",
"py_library(name='apple', visibility=['//non:existent'])");
reporter.removeHandler(failFastHandler);
try {
update("//apple");
fail();
} catch (ViewCreationFailedException e) {
// Expected.
}
assertDoesNotContainEvent("implicitly depends upon");
}
/** Runs the same test with the reduced loading phase. */
@TestSpec(size = Suite.SMALL_TESTS)
@RunWith(JUnit4.class)
public static class WithSkyframeLoadingPhase extends BuildViewTest {
@Override
protected FlagBuilder defaultFlags() {
return super.defaultFlags().with(Flag.SKYFRAME_LOADING_PHASE);
}
}
/** Runs the same test with dynamic configurations. */
@TestSpec(size = Suite.SMALL_TESTS)
@RunWith(JUnit4.class)
public static class WithDynamicConfigurations extends BuildViewTest {
@Override
protected FlagBuilder defaultFlags() {
return super.defaultFlags().with(Flag.DYNAMIC_CONFIGURATIONS);
}
}
}
|
{
"content_hash": "3dd9e19364001837806fa506aad3a096",
"timestamp": "",
"source": "github",
"line_count": 1345,
"max_line_length": 100,
"avg_line_length": 41.59256505576208,
"alnum_prop": 0.6585928282864395,
"repo_name": "kchodorow/bazel",
"id": "d7be1ffaf465f59cefdcb317831bbeb2a83bcd4b",
"size": "55942",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/test/java/com/google/devtools/build/lib/analysis/BuildViewTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "24765"
},
{
"name": "C++",
"bytes": "807610"
},
{
"name": "HTML",
"bytes": "18163"
},
{
"name": "Java",
"bytes": "20635120"
},
{
"name": "JavaScript",
"bytes": "6860"
},
{
"name": "Makefile",
"bytes": "248"
},
{
"name": "PowerShell",
"bytes": "7559"
},
{
"name": "Protocol Buffer",
"bytes": "118919"
},
{
"name": "Python",
"bytes": "292233"
},
{
"name": "Shell",
"bytes": "767720"
}
]
}
|
import {ProjectorEventContext} from './projectorEventContext';
import {CameraType, LabelRenderParams, RenderContext} from './renderContext';
import {BoundingBox, ScatterPlotRectangleSelector} from './scatterPlotRectangleSelector';
import {ScatterPlotVisualizer} from './scatterPlotVisualizer';
import * as util from './util';
import {Point2D, Point3D} from './vector';
const BACKGROUND_COLOR = 0xffffff;
/**
* The length of the cube (diameter of the circumscribing sphere) where all the
* points live.
*/
const CUBE_LENGTH = 2;
const MAX_ZOOM = 5 * CUBE_LENGTH;
const MIN_ZOOM = 0.025 * CUBE_LENGTH;
// Constants relating to the camera parameters.
const PERSP_CAMERA_FOV_VERTICAL = 70;
const PERSP_CAMERA_NEAR_CLIP_PLANE = 0.01;
const PERSP_CAMERA_FAR_CLIP_PLANE = 100;
const ORTHO_CAMERA_FRUSTUM_HALF_EXTENT = 1.2;
// Key presses.
const SHIFT_KEY = 16;
const CTRL_KEY = 17;
const START_CAMERA_POS_3D = new THREE.Vector3(0.45, 0.9, 1.6);
const START_CAMERA_TARGET_3D = new THREE.Vector3(0, 0, 0);
const START_CAMERA_POS_2D = new THREE.Vector3(0, 0, 4);
const START_CAMERA_TARGET_2D = new THREE.Vector3(0, 0, 0);
const ORBIT_MOUSE_ROTATION_SPEED = 1;
const ORBIT_ANIMATION_ROTATION_CYCLE_IN_SECONDS = 7;
export type OnCameraMoveListener =
(cameraPosition: THREE.Vector3, cameraTarget: THREE.Vector3) => void;
/** Supported modes of interaction. */
export enum MouseMode {
AREA_SELECT,
CAMERA_AND_CLICK_SELECT
}
/** Defines a camera, suitable for serialization. */
export class CameraDef {
orthographic: boolean = false;
position: Point3D;
target: Point3D;
zoom: number;
}
/**
* Maintains a three.js instantiation and context,
* animation state, and all other logic that's
* independent of how a 3D scatter plot is actually rendered. Also holds an
* array of visualizers and dispatches application events to them.
*/
export class ScatterPlot {
private projectorEventContext: ProjectorEventContext;
private containerNode: HTMLElement;
private visualizers: ScatterPlotVisualizer[] = [];
private onCameraMoveListeners: OnCameraMoveListener[] = [];
private height: number;
private width: number;
private mouseMode: MouseMode;
private backgroundColor: number = BACKGROUND_COLOR;
private dimensionality: number = 3;
private renderer: THREE.WebGLRenderer;
private scene: THREE.Scene;
private pickingTexture: THREE.WebGLRenderTarget;
private light: THREE.PointLight;
private cameraDef: CameraDef = null;
private camera: THREE.Camera;
private orbitAnimationOnNextCameraCreation: boolean = false;
private orbitCameraControls: any;
private orbitAnimationId: number;
private worldSpacePointPositions: Float32Array;
private pointColors: Float32Array;
private pointScaleFactors: Float32Array;
private labels: LabelRenderParams;
private polylineColors: {[polylineIndex: number]: Float32Array};
private polylineOpacities: Float32Array;
private polylineWidths: Float32Array;
private selecting = false;
private nearestPoint: number;
private mouseIsDown = false;
private isDragSequence = false;
private rectangleSelector: ScatterPlotRectangleSelector;
constructor(
container: d3.Selection<any>,
projectorEventContext: ProjectorEventContext) {
this.containerNode = container.node() as HTMLElement;
this.projectorEventContext = projectorEventContext;
this.getLayoutValues();
this.scene = new THREE.Scene();
this.renderer = new THREE.WebGLRenderer(
{alpha: true, premultipliedAlpha: false, antialias: false});
this.renderer.setClearColor(BACKGROUND_COLOR, 1);
this.containerNode.appendChild(this.renderer.domElement);
this.light = new THREE.PointLight(0xFFECBF, 1, 0);
this.scene.add(this.light);
this.setDimensions(3);
this.recreateCamera(this.makeDefaultCameraDef(this.dimensionality));
this.renderer.render(this.scene, this.camera);
this.rectangleSelector = new ScatterPlotRectangleSelector(
this.containerNode,
(boundingBox: BoundingBox) => this.selectBoundingBox(boundingBox));
this.addInteractionListeners();
}
private addInteractionListeners() {
this.containerNode.addEventListener(
'mousemove', this.onMouseMove.bind(this));
this.containerNode.addEventListener(
'mousedown', this.onMouseDown.bind(this));
this.containerNode.addEventListener('mouseup', this.onMouseUp.bind(this));
this.containerNode.addEventListener('click', this.onClick.bind(this));
window.addEventListener('keydown', this.onKeyDown.bind(this), false);
window.addEventListener('keyup', this.onKeyUp.bind(this), false);
}
private addCameraControlsEventListeners(cameraControls: any) {
// Start is called when the user stars interacting with
// controls.
cameraControls.addEventListener('start', () => {
this.stopOrbitAnimation();
this.onCameraMoveListeners.forEach(
l => l(this.camera.position, cameraControls.target));
});
// Change is called everytime the user interacts with the controls.
cameraControls.addEventListener('change', () => {
this.render();
});
// End is called when the user stops interacting with the
// controls (e.g. on mouse up, after dragging).
cameraControls.addEventListener('end', () => {});
}
private makeOrbitControls(
camera: THREE.Camera, cameraDef: CameraDef, cameraIs3D: boolean) {
if (this.orbitCameraControls != null) {
this.orbitCameraControls.dispose();
}
const occ =
new (THREE as any).OrbitControls(camera, this.renderer.domElement);
occ.target0 = new THREE.Vector3(
cameraDef.target[0], cameraDef.target[1], cameraDef.target[2]);
occ.position0 = new THREE.Vector3().copy(camera.position);
occ.zoom0 = cameraDef.zoom;
occ.enableRotate = cameraIs3D;
occ.autoRotate = false;
occ.rotateSpeed = ORBIT_MOUSE_ROTATION_SPEED;
if (cameraIs3D) {
occ.mouseButtons.ORBIT = THREE.MOUSE.LEFT;
occ.mouseButtons.PAN = THREE.MOUSE.RIGHT;
} else {
occ.mouseButtons.ORBIT = null;
occ.mouseButtons.PAN = THREE.MOUSE.LEFT;
}
occ.reset();
this.camera = camera;
this.orbitCameraControls = occ;
this.addCameraControlsEventListeners(this.orbitCameraControls);
}
private makeCamera3D(cameraDef: CameraDef, w: number, h: number) {
let camera: THREE.PerspectiveCamera;
{
const aspectRatio = w / h;
camera = new THREE.PerspectiveCamera(
PERSP_CAMERA_FOV_VERTICAL, aspectRatio, PERSP_CAMERA_NEAR_CLIP_PLANE,
PERSP_CAMERA_FAR_CLIP_PLANE);
camera.position.set(
cameraDef.position[0], cameraDef.position[1], cameraDef.position[2]);
const at = new THREE.Vector3(
cameraDef.target[0], cameraDef.target[1], cameraDef.target[2]);
camera.lookAt(at);
camera.zoom = cameraDef.zoom;
camera.updateProjectionMatrix();
}
this.camera = camera;
this.makeOrbitControls(camera, cameraDef, true);
}
private makeCamera2D(cameraDef: CameraDef, w: number, h: number) {
let camera: THREE.OrthographicCamera;
const target = new THREE.Vector3(
cameraDef.target[0], cameraDef.target[1], cameraDef.target[2]);
{
const aspectRatio = w / h;
let left = -ORTHO_CAMERA_FRUSTUM_HALF_EXTENT;
let right = ORTHO_CAMERA_FRUSTUM_HALF_EXTENT;
let bottom = -ORTHO_CAMERA_FRUSTUM_HALF_EXTENT;
let top = ORTHO_CAMERA_FRUSTUM_HALF_EXTENT;
// Scale up the larger of (w, h) to match the aspect ratio.
if (aspectRatio > 1) {
left *= aspectRatio;
right *= aspectRatio;
} else {
top /= aspectRatio;
bottom /= aspectRatio;
}
camera =
new THREE.OrthographicCamera(left, right, top, bottom, -1000, 1000);
camera.position.set(
cameraDef.position[0], cameraDef.position[1], cameraDef.position[2]);
camera.up = new THREE.Vector3(0, 1, 0);
camera.lookAt(target);
camera.zoom = cameraDef.zoom;
camera.updateProjectionMatrix();
}
this.camera = camera;
this.makeOrbitControls(camera, cameraDef, false);
}
private makeDefaultCameraDef(dimensionality: number): CameraDef {
const def = new CameraDef();
def.orthographic = (dimensionality === 2);
def.zoom = 1.0;
if (def.orthographic) {
def.position =
[START_CAMERA_POS_2D.x, START_CAMERA_POS_2D.y, START_CAMERA_POS_2D.z];
def.target = [
START_CAMERA_TARGET_2D.x, START_CAMERA_TARGET_2D.y,
START_CAMERA_TARGET_2D.z
];
} else {
def.position =
[START_CAMERA_POS_3D.x, START_CAMERA_POS_3D.y, START_CAMERA_POS_3D.z];
def.target = [
START_CAMERA_TARGET_3D.x, START_CAMERA_TARGET_3D.y,
START_CAMERA_TARGET_3D.z
];
}
return def;
}
/** Recreate the scatter plot camera from a definition structure. */
recreateCamera(cameraDef: CameraDef) {
if (cameraDef.orthographic) {
this.makeCamera2D(cameraDef, this.width, this.height);
} else {
this.makeCamera3D(cameraDef, this.width, this.height);
}
this.orbitCameraControls.minDistance = MIN_ZOOM;
this.orbitCameraControls.maxDistance = MAX_ZOOM;
this.orbitCameraControls.update();
if (this.orbitAnimationOnNextCameraCreation) {
this.startOrbitAnimation();
}
}
private onClick(e?: MouseEvent, notify = true) {
if (e && this.selecting) {
return;
}
// Only call event handlers if the click originated from the scatter plot.
if (!this.isDragSequence && notify) {
const selection = (this.nearestPoint != null) ? [this.nearestPoint] : [];
this.projectorEventContext.notifySelectionChanged(selection);
}
this.isDragSequence = false;
this.render();
}
private onMouseDown(e: MouseEvent) {
this.isDragSequence = false;
this.mouseIsDown = true;
if (this.selecting) {
this.orbitCameraControls.enabled = false;
this.rectangleSelector.onMouseDown(e.offsetX, e.offsetY);
this.setNearestPointToMouse(e);
} else if (
!e.ctrlKey && this.sceneIs3D() &&
this.orbitCameraControls.mouseButtons.ORBIT === THREE.MOUSE.RIGHT) {
// The user happened to press the ctrl key when the tab was active,
// unpressed the ctrl when the tab was inactive, and now he/she
// is back to the projector tab.
this.orbitCameraControls.mouseButtons.ORBIT = THREE.MOUSE.LEFT;
this.orbitCameraControls.mouseButtons.PAN = THREE.MOUSE.RIGHT;
} else if (
e.ctrlKey && this.sceneIs3D() &&
this.orbitCameraControls.mouseButtons.ORBIT === THREE.MOUSE.LEFT) {
// Similarly to the situation above.
this.orbitCameraControls.mouseButtons.ORBIT = THREE.MOUSE.RIGHT;
this.orbitCameraControls.mouseButtons.PAN = THREE.MOUSE.LEFT;
}
}
/** When we stop dragging/zooming, return to normal behavior. */
private onMouseUp(e: any) {
if (this.selecting) {
this.orbitCameraControls.enabled = true;
this.rectangleSelector.onMouseUp();
this.render();
}
this.mouseIsDown = false;
}
/**
* When the mouse moves, find the nearest point (if any) and send it to the
* hoverlisteners (usually called from embedding.ts)
*/
private onMouseMove(e: MouseEvent) {
this.isDragSequence = this.mouseIsDown;
// Depending if we're selecting or just navigating, handle accordingly.
if (this.selecting && this.mouseIsDown) {
this.rectangleSelector.onMouseMove(e.offsetX, e.offsetY);
this.render();
} else if (!this.mouseIsDown) {
this.setNearestPointToMouse(e);
this.projectorEventContext.notifyHoverOverPoint(this.nearestPoint);
}
}
/** For using ctrl + left click as right click, and for circle select */
private onKeyDown(e: any) {
// If ctrl is pressed, use left click to orbit
if (e.keyCode === CTRL_KEY && this.sceneIs3D()) {
this.orbitCameraControls.mouseButtons.ORBIT = THREE.MOUSE.RIGHT;
this.orbitCameraControls.mouseButtons.PAN = THREE.MOUSE.LEFT;
}
// If shift is pressed, start selecting
if (e.keyCode === SHIFT_KEY) {
this.selecting = true;
this.containerNode.style.cursor = 'crosshair';
}
}
/** For using ctrl + left click as right click, and for circle select */
private onKeyUp(e: any) {
if (e.keyCode === CTRL_KEY && this.sceneIs3D()) {
this.orbitCameraControls.mouseButtons.ORBIT = THREE.MOUSE.LEFT;
this.orbitCameraControls.mouseButtons.PAN = THREE.MOUSE.RIGHT;
}
// If shift is released, stop selecting
if (e.keyCode === SHIFT_KEY) {
this.selecting = (this.getMouseMode() === MouseMode.AREA_SELECT);
if (!this.selecting) {
this.containerNode.style.cursor = 'default';
}
this.render();
}
}
/**
* Returns a list of indices of points in a bounding box from the picking
* texture.
* @param boundingBox The bounding box to select from.
*/
private getPointIndicesFromPickingTexture(boundingBox: BoundingBox):
number[] {
if (this.worldSpacePointPositions == null) {
return null;
}
const pointCount = this.worldSpacePointPositions.length / 3;
const dpr = window.devicePixelRatio || 1;
const x = Math.floor(boundingBox.x * dpr);
const y = Math.floor(boundingBox.y * dpr);
const width = Math.floor(boundingBox.width * dpr);
const height = Math.floor(boundingBox.height * dpr);
// Create buffer for reading all of the pixels from the texture.
let pixelBuffer = new Uint8Array(width * height * 4);
// Read the pixels from the bounding box.
this.renderer.readRenderTargetPixels(
this.pickingTexture, x, this.pickingTexture.height - y, width, height,
pixelBuffer);
// Keep a flat list of each point and whether they are selected or not. This
// approach is more efficient than using an object keyed by the index.
let pointIndicesSelection =
new Uint8Array(this.worldSpacePointPositions.length);
for (let i = 0; i < width * height; i++) {
const id = (pixelBuffer[i * 4] << 16) | (pixelBuffer[i * 4 + 1] << 8) |
pixelBuffer[i * 4 + 2];
if (id !== 0xffffff && (id < pointCount)) {
pointIndicesSelection[id] = 1;
}
}
let pointIndices: number[] = [];
for (let i = 0; i < pointIndicesSelection.length; i++) {
if (pointIndicesSelection[i] === 1) {
pointIndices.push(i);
}
}
return pointIndices;
}
private selectBoundingBox(boundingBox: BoundingBox) {
let pointIndices = this.getPointIndicesFromPickingTexture(boundingBox);
this.projectorEventContext.notifySelectionChanged(pointIndices);
}
private setNearestPointToMouse(e: MouseEvent) {
if (this.pickingTexture == null) {
this.nearestPoint = null;
return;
}
const boundingBox:
BoundingBox = {x: e.offsetX, y: e.offsetY, width: 1, height: 1};
const pointIndices = this.getPointIndicesFromPickingTexture(boundingBox);
this.nearestPoint = (pointIndices != null) ? pointIndices[0] : null;
}
private getLayoutValues(): Point2D {
this.width = this.containerNode.offsetWidth;
this.height = Math.max(1, this.containerNode.offsetHeight);
return [this.width, this.height];
}
private sceneIs3D(): boolean {
return this.dimensionality === 3;
}
private remove3dAxisFromScene(): THREE.Object3D {
const axes = this.scene.getObjectByName('axes');
if (axes != null) {
this.scene.remove(axes);
}
return axes;
}
private add3dAxis() {
const axes = new THREE.AxisHelper();
axes.name = 'axes';
this.scene.add(axes);
}
/** Set 2d vs 3d mode. */
setDimensions(dimensionality: number) {
if ((dimensionality !== 2) && (dimensionality !== 3)) {
throw new RangeError('dimensionality must be 2 or 3');
}
this.dimensionality = dimensionality;
const def = this.cameraDef || this.makeDefaultCameraDef(dimensionality);
this.recreateCamera(def);
this.remove3dAxisFromScene();
if (dimensionality === 3) {
this.add3dAxis();
}
}
/** Gets the current camera information, suitable for serialization. */
getCameraDef(): CameraDef {
const def = new CameraDef();
const pos = this.camera.position;
const tgt = this.orbitCameraControls.target;
def.orthographic = !this.sceneIs3D();
def.position = [pos.x, pos.y, pos.z];
def.target = [tgt.x, tgt.y, tgt.z];
def.zoom = (this.camera as any).zoom;
return def;
}
/** Sets parameters for the next camera recreation. */
setCameraParametersForNextCameraCreation(
def: CameraDef, orbitAnimation: boolean) {
this.cameraDef = def;
this.orbitAnimationOnNextCameraCreation = orbitAnimation;
}
/** Gets the current camera position. */
getCameraPosition(): Point3D {
const currPos = this.camera.position;
return [currPos.x, currPos.y, currPos.z];
}
/** Gets the current camera target. */
getCameraTarget(): Point3D {
let currTarget = this.orbitCameraControls.target;
return [currTarget.x, currTarget.y, currTarget.z];
}
/** Sets up the camera from given position and target coordinates. */
setCameraPositionAndTarget(position: Point3D, target: Point3D) {
this.stopOrbitAnimation();
this.camera.position.set(position[0], position[1], position[2]);
this.orbitCameraControls.target.set(target[0], target[1], target[2]);
this.orbitCameraControls.update();
this.render();
}
/** Starts orbiting the camera around its current lookat target. */
startOrbitAnimation() {
if (!this.sceneIs3D()) {
return;
}
if (this.orbitAnimationId != null) {
this.stopOrbitAnimation();
}
this.orbitCameraControls.autoRotate = true;
this.orbitCameraControls.rotateSpeed =
ORBIT_ANIMATION_ROTATION_CYCLE_IN_SECONDS;
this.updateOrbitAnimation();
}
private updateOrbitAnimation() {
this.orbitCameraControls.update();
this.orbitAnimationId =
requestAnimationFrame(() => this.updateOrbitAnimation());
}
/** Stops the orbiting animation on the camera. */
stopOrbitAnimation() {
this.orbitCameraControls.autoRotate = false;
this.orbitCameraControls.rotateSpeed = ORBIT_MOUSE_ROTATION_SPEED;
if (this.orbitAnimationId != null) {
cancelAnimationFrame(this.orbitAnimationId);
this.orbitAnimationId = null;
}
}
/** Adds a visualizer to the set, will start dispatching events to it */
addVisualizer(visualizer: ScatterPlotVisualizer) {
if (this.scene) {
visualizer.setScene(this.scene);
}
visualizer.onResize(this.width, this.height);
visualizer.onPointPositionsChanged(this.worldSpacePointPositions);
this.visualizers.push(visualizer);
}
/** Removes all visualizers attached to this scatter plot. */
removeAllVisualizers() {
this.visualizers.forEach(v => v.dispose());
this.visualizers = [];
}
/** Update scatter plot with a new array of packed xyz point positions. */
setPointPositions(worldSpacePointPositions: Float32Array) {
this.worldSpacePointPositions = worldSpacePointPositions;
this.visualizers.forEach(
v => v.onPointPositionsChanged(worldSpacePointPositions));
}
render() {
{
const lightPos = this.camera.position.clone();
lightPos.x += 1;
lightPos.y += 1;
this.light.position.set(lightPos.x, lightPos.y, lightPos.z);
}
const cameraType = (this.camera instanceof THREE.PerspectiveCamera) ?
CameraType.Perspective :
CameraType.Orthographic;
let cameraSpacePointExtents: [number, number] = [0, 0];
if (this.worldSpacePointPositions != null) {
cameraSpacePointExtents = util.getNearFarPoints(
this.worldSpacePointPositions, this.camera.position,
this.orbitCameraControls.target);
}
const rc = new RenderContext(
this.camera, cameraType, this.orbitCameraControls.target, this.width,
this.height, cameraSpacePointExtents[0], cameraSpacePointExtents[1],
this.backgroundColor, this.pointColors, this.pointScaleFactors,
this.labels, this.polylineColors, this.polylineOpacities,
this.polylineWidths);
// Render first pass to picking target. This render fills pickingTexture
// with colors that are actually point ids, so that sampling the texture at
// the mouse's current x,y coordinates will reveal the data point that the
// mouse is over.
this.visualizers.forEach(v => v.onPickingRender(rc));
{
const axes = this.remove3dAxisFromScene();
this.renderer.render(this.scene, this.camera, this.pickingTexture);
if (axes != null) {
this.scene.add(axes);
}
}
// Render second pass to color buffer, to be displayed on the canvas.
this.visualizers.forEach(v => v.onRender(rc));
this.renderer.render(this.scene, this.camera);
}
setMouseMode(mouseMode: MouseMode) {
this.mouseMode = mouseMode;
if (mouseMode === MouseMode.AREA_SELECT) {
this.selecting = true;
this.containerNode.style.cursor = 'crosshair';
} else {
this.selecting = false;
this.containerNode.style.cursor = 'default';
}
}
/** Set the colors for every data point. (RGB triplets) */
setPointColors(colors: Float32Array) {
this.pointColors = colors;
}
/** Set the scale factors for every data point. (scalars) */
setPointScaleFactors(scaleFactors: Float32Array) {
this.pointScaleFactors = scaleFactors;
}
/** Set the labels to rendered */
setLabels(labels: LabelRenderParams) {
this.labels = labels;
}
/** Set the colors for every data polyline. (RGB triplets) */
setPolylineColors(colors: {[polylineIndex: number]: Float32Array}) {
this.polylineColors = colors;
}
setPolylineOpacities(opacities: Float32Array) {
this.polylineOpacities = opacities;
}
setPolylineWidths(widths: Float32Array) {
this.polylineWidths = widths;
}
getMouseMode(): MouseMode {
return this.mouseMode;
}
resetZoom() {
this.recreateCamera(this.makeDefaultCameraDef(this.dimensionality));
this.render();
}
setDayNightMode(isNight: boolean) {
d3.select(this.containerNode)
.selectAll('canvas')
.style('filter', isNight ? 'invert(100%)' : null);
}
resize(render = true) {
const [oldW, oldH] = [this.width, this.height];
const [newW, newH] = this.getLayoutValues();
if (this.dimensionality === 3) {
const camera = (this.camera as THREE.PerspectiveCamera);
camera.aspect = newW / newH;
camera.updateProjectionMatrix();
} else {
const camera = (this.camera as THREE.OrthographicCamera);
// Scale the ortho frustum by however much the window changed.
const scaleW = newW / oldW;
const scaleH = newH / oldH;
const newCamHalfWidth = ((camera.right - camera.left) * scaleW) / 2;
const newCamHalfHeight = ((camera.top - camera.bottom) * scaleH) / 2;
camera.top = newCamHalfHeight;
camera.bottom = -newCamHalfHeight;
camera.left = -newCamHalfWidth;
camera.right = newCamHalfWidth;
camera.updateProjectionMatrix();
}
// Accouting for retina displays.
const dpr = window.devicePixelRatio || 1;
this.renderer.setPixelRatio(dpr);
this.renderer.setSize(newW, newH);
// the picking texture needs to be exactly the same as the render texture.
{
const renderCanvasSize = this.renderer.getSize();
const pixelRatio = this.renderer.getPixelRatio();
this.pickingTexture = new THREE.WebGLRenderTarget(
renderCanvasSize.width * pixelRatio,
renderCanvasSize.height * pixelRatio);
this.pickingTexture.texture.minFilter = THREE.LinearFilter;
}
this.visualizers.forEach(v => v.onResize(newW, newH));
if (render) {
this.render();
};
}
onCameraMove(listener: OnCameraMoveListener) {
this.onCameraMoveListeners.push(listener);
}
clickOnPoint(pointIndex: number) {
this.nearestPoint = pointIndex;
this.onClick(null, false);
}
}
|
{
"content_hash": "3dc7d0c6b6828cf75b5455ef67fba37b",
"timestamp": "",
"source": "github",
"line_count": 715,
"max_line_length": 89,
"avg_line_length": 33.73006993006993,
"alnum_prop": 0.6825061160177468,
"repo_name": "MoamerEncsConcordiaCa/tensorflow",
"id": "b40863cead1555c7d397caada96ba742b36c15c1",
"size": "24781",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "tensorflow/tensorboard/components/vz_projector/scatterPlot.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7583"
},
{
"name": "C",
"bytes": "176871"
},
{
"name": "C++",
"bytes": "22197536"
},
{
"name": "CMake",
"bytes": "137754"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "786935"
},
{
"name": "HTML",
"bytes": "579704"
},
{
"name": "Java",
"bytes": "286255"
},
{
"name": "JavaScript",
"bytes": "13406"
},
{
"name": "Jupyter Notebook",
"bytes": "1833623"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37227"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "64656"
},
{
"name": "Protocol Buffer",
"bytes": "207866"
},
{
"name": "Python",
"bytes": "19632899"
},
{
"name": "Shell",
"bytes": "334269"
},
{
"name": "TypeScript",
"bytes": "786973"
}
]
}
|
package fi.iki.elonen;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import fi.iki.elonen.NanoHTTPD.IHTTPSession;
import fi.iki.elonen.NanoHTTPD.Response;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import static junit.framework.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class WebSocketResponseHandlerTest {
@Mock
private IHTTPSession session;
@Mock
private WebSocket webSocket;
@Mock
private IWebSocketFactory webSocketFactory;
@Mock
private Response response;
@Captor
private ArgumentCaptor<String> headerNameCaptor;
@Captor
private ArgumentCaptor<String> headerCaptor;
private Map<String, String> headers;
private WebSocketResponseHandler responseHandler;
@Before
public void setUp() {
headers = new HashMap<String, String>();
headers.put("upgrade", "websocket");
headers.put("connection", "Upgrade");
headers.put("sec-websocket-key", "x3JJHMbDL1EzLkh9GBhXDw==");
headers.put("sec-websocket-protocol", "chat, superchat");
headers.put("sec-websocket-version", "13");
when(session.getHeaders()).thenReturn(headers);
when(webSocketFactory.openWebSocket(any(IHTTPSession.class))).thenReturn(webSocket);
when(webSocket.getHandshakeResponse()).thenReturn(response);
responseHandler = new WebSocketResponseHandler(webSocketFactory);
}
@Test
public void testHandshakeReturnsResponseWithExpectedHeaders() {
Response handshakeResponse = responseHandler.serve(session);
verify(webSocket).getHandshakeResponse();
assertNotNull(handshakeResponse);
assertSame(response, handshakeResponse);
verify(response, atLeast(1)).addHeader(headerNameCaptor.capture(), headerCaptor.capture());
assertHeader(0, "sec-websocket-accept", "HSmrc0sMlYUkAGmm5OPpG2HaGWk=");
assertHeader(1, "sec-websocket-protocol", "chat");
}
@Test
public void testWrongWebsocketVersionReturnsErrorResponse() {
headers.put("sec-websocket-version", "12");
Response handshakeResponse = responseHandler.serve(session);
assertNotNull(handshakeResponse);
assertEquals(Response.Status.BAD_REQUEST, handshakeResponse.getStatus());
}
@Test
public void testMissingKeyReturnsErrorResponse() {
headers.remove("sec-websocket-key");
Response handshakeResponse = responseHandler.serve(session);
assertNotNull(handshakeResponse);
assertEquals(Response.Status.BAD_REQUEST, handshakeResponse.getStatus());
}
@Test
public void testWrongUpgradeHeaderReturnsNullResponse() {
headers.put("upgrade", "not a websocket");
Response handshakeResponse = responseHandler.serve(session);
assertNull(handshakeResponse);
}
@Test
public void testWrongConnectionHeaderReturnsNullResponse() {
headers.put("connection", "Junk");
Response handshakeResponse = responseHandler.serve(session);
assertNull(handshakeResponse);
}
@Test
public void testConnectionHeaderHandlesKeepAlive_FixingFirefoxConnectIssue() {
headers.put("connection", "keep-alive, Upgrade");
Response handshakeResponse = responseHandler.serve(session);
verify(webSocket).getHandshakeResponse();
assertNotNull(handshakeResponse);
assertSame(response, handshakeResponse);
}
private void assertHeader(int index, String name, String value) {
assertEquals(name, headerNameCaptor.getAllValues().get(index));
assertEquals(value, headerCaptor.getAllValues().get(index));
}
}
|
{
"content_hash": "72a449778f0be4d9d71db3de7808f683",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 99,
"avg_line_length": 33.141666666666666,
"alnum_prop": 0.7196379180286648,
"repo_name": "Omegaphora/external_nanohttpd",
"id": "04a497bad2946c2501d7dcbe6a30324680f2ea54",
"size": "3977",
"binary": false,
"copies": "9",
"ref": "refs/heads/lp5.1",
"path": "websocket/src/test/java/fi/iki/elonen/WebSocketResponseHandlerTest.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2981"
},
{
"name": "Java",
"bytes": "168553"
},
{
"name": "Makefile",
"bytes": "344"
}
]
}
|
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/stock.iml" filepath="$PROJECT_DIR$/.idea/stock.iml" />
</modules>
</component>
</project>
|
{
"content_hash": "428865ee4f8c989657da07a0189a944a",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 104,
"avg_line_length": 32.75,
"alnum_prop": 0.6526717557251909,
"repo_name": "Shaharking/stocksApp",
"id": "20fb10ea0ba4ca4d21c4b3d21ffb50cc8ead549e",
"size": "262",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": ".idea/modules.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2073"
},
{
"name": "HTML",
"bytes": "4477"
},
{
"name": "JavaScript",
"bytes": "25633"
}
]
}
|
package org.oep.dossiermgt.model.impl;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.model.CacheModel;
import org.oep.dossiermgt.model.StatisticByDay;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Date;
/**
* The cache model class for representing StatisticByDay in entity cache.
*
* @author trungdk
* @see StatisticByDay
* @generated
*/
public class StatisticByDayCacheModel implements CacheModel<StatisticByDay>,
Externalizable {
@Override
public String toString() {
StringBundler sb = new StringBundler(27);
sb.append("{statisticByDayId=");
sb.append(statisticByDayId);
sb.append(", groupId=");
sb.append(groupId);
sb.append(", companyId=");
sb.append(companyId);
sb.append(", createDate=");
sb.append(createDate);
sb.append(", statisticDate=");
sb.append(statisticDate);
sb.append(", statisticWeek=");
sb.append(statisticWeek);
sb.append(", receiveNumber=");
sb.append(receiveNumber);
sb.append(", onlineNumber=");
sb.append(onlineNumber);
sb.append(", onlineRatio=");
sb.append(onlineRatio);
sb.append(", finishNumber=");
sb.append(finishNumber);
sb.append(", ontimeNumber=");
sb.append(ontimeNumber);
sb.append(", ontimeRatio=");
sb.append(ontimeRatio);
sb.append(", delayDaysAvg=");
sb.append(delayDaysAvg);
sb.append("}");
return sb.toString();
}
@Override
public StatisticByDay toEntityModel() {
StatisticByDayImpl statisticByDayImpl = new StatisticByDayImpl();
statisticByDayImpl.setStatisticByDayId(statisticByDayId);
statisticByDayImpl.setGroupId(groupId);
statisticByDayImpl.setCompanyId(companyId);
if (createDate == Long.MIN_VALUE) {
statisticByDayImpl.setCreateDate(null);
}
else {
statisticByDayImpl.setCreateDate(new Date(createDate));
}
if (statisticDate == Long.MIN_VALUE) {
statisticByDayImpl.setStatisticDate(null);
}
else {
statisticByDayImpl.setStatisticDate(new Date(statisticDate));
}
statisticByDayImpl.setStatisticWeek(statisticWeek);
statisticByDayImpl.setReceiveNumber(receiveNumber);
statisticByDayImpl.setOnlineNumber(onlineNumber);
statisticByDayImpl.setOnlineRatio(onlineRatio);
statisticByDayImpl.setFinishNumber(finishNumber);
statisticByDayImpl.setOntimeNumber(ontimeNumber);
statisticByDayImpl.setOntimeRatio(ontimeRatio);
statisticByDayImpl.setDelayDaysAvg(delayDaysAvg);
statisticByDayImpl.resetOriginalValues();
return statisticByDayImpl;
}
@Override
public void readExternal(ObjectInput objectInput) throws IOException {
statisticByDayId = objectInput.readLong();
groupId = objectInput.readLong();
companyId = objectInput.readLong();
createDate = objectInput.readLong();
statisticDate = objectInput.readLong();
statisticWeek = objectInput.readInt();
receiveNumber = objectInput.readLong();
onlineNumber = objectInput.readLong();
onlineRatio = objectInput.readDouble();
finishNumber = objectInput.readInt();
ontimeNumber = objectInput.readInt();
ontimeRatio = objectInput.readDouble();
delayDaysAvg = objectInput.readDouble();
}
@Override
public void writeExternal(ObjectOutput objectOutput)
throws IOException {
objectOutput.writeLong(statisticByDayId);
objectOutput.writeLong(groupId);
objectOutput.writeLong(companyId);
objectOutput.writeLong(createDate);
objectOutput.writeLong(statisticDate);
objectOutput.writeInt(statisticWeek);
objectOutput.writeLong(receiveNumber);
objectOutput.writeLong(onlineNumber);
objectOutput.writeDouble(onlineRatio);
objectOutput.writeInt(finishNumber);
objectOutput.writeInt(ontimeNumber);
objectOutput.writeDouble(ontimeRatio);
objectOutput.writeDouble(delayDaysAvg);
}
public long statisticByDayId;
public long groupId;
public long companyId;
public long createDate;
public long statisticDate;
public int statisticWeek;
public long receiveNumber;
public long onlineNumber;
public double onlineRatio;
public int finishNumber;
public int ontimeNumber;
public double ontimeRatio;
public double delayDaysAvg;
}
|
{
"content_hash": "58b99abfb8a75f369f77437bef8779bf",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 76,
"avg_line_length": 28.44137931034483,
"alnum_prop": 0.7708535402521823,
"repo_name": "openegovplatform/OEPv2",
"id": "88153668a698026a783fa5e3c15e8e40e0ab419f",
"size": "4715",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oep-dossier-portlet/docroot/WEB-INF/src/org/oep/dossiermgt/model/impl/StatisticByDayCacheModel.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5166"
},
{
"name": "Java",
"bytes": "45541534"
}
]
}
|
'use strict'
try {
module.exports = require('./bindings')
} catch (err) {
if (process.env.DEBUG) {
console.error('Secp256k1 bindings are not compiled. Pure JS implementation will be used.')
}
module.exports = require('./elliptic')
}
|
{
"content_hash": "01d484035b5d81ade59ff1c60d445169",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 94,
"avg_line_length": 24.6,
"alnum_prop": 0.6747967479674797,
"repo_name": "DigixGlobal/truffle-lightwallet-provider",
"id": "8ccc45339f622103b119620360e4aa81cfa8a73c",
"size": "246",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "node_modules/secp256k1/index.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "23336"
}
]
}
|
using System.Data.Common;
namespace Rocks.Profiling.Models
{
/// <summary>
/// Predefined names of profile operations.
/// </summary>
public static class ProfileOperationNames
{
/// <summary>
/// Execution of <see cref="DbCommand.ExecuteNonQuery"/> method.
/// </summary>
public static string DbCommandExecuteNonQuery = "DbCommand_ExecuteNonQuery";
/// <summary>
/// Execution of <see cref="DbCommand.ExecuteNonQueryAsync()"/> method.
/// </summary>
public static string DbCommandExecuteNonQueryAsync = "DbCommand_ExecuteNonQueryAsync";
/// <summary>
/// Execution of <see cref="DbCommand.ExecuteScalar"/> method.
/// </summary>
public static string DbCommandExecuteScalar = "DbCommand_ExecuteScalar";
/// <summary>
/// Execution of <see cref="DbCommand.ExecuteScalarAsync()"/> method.
/// </summary>
public static string DbCommandExecuteScalarAsync = "DbCommand_ExecuteScalarAsync";
/// <summary>
/// Execution of <see cref="DbCommand.ExecuteReader()"/> method.
/// </summary>
public static string DbCommandExecuteReader = "DbCommand_ExecuteReader";
/// <summary>
/// Execution of <see cref="DbCommand.ExecuteReaderAsync()"/> method.
/// </summary>
public static string DbCommandExecuteReaderAsync = "DbCommand_ExecuteReaderAsync";
}
}
|
{
"content_hash": "18c5a0f58b98bc4745edf66cf2b6b82a",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 94,
"avg_line_length": 37.175,
"alnum_prop": 0.6220578345662408,
"repo_name": "MichaelLogutov/Rocks.Profiling",
"id": "90fbcfa69a0f829a479e610e6dbb6b159cb5493c",
"size": "1489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Rocks.Profiling/Models/ProfileOperationNames.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP.NET",
"bytes": "114"
},
{
"name": "C#",
"bytes": "165016"
},
{
"name": "PowerShell",
"bytes": "596"
}
]
}
|
void usage(int exitCode = 0)
{
QString usage = QLatin1String("usage: ") + QLatin1String(_NAME_) + QLatin1String(" ");
QString space = usage;
space = space.fill(' ');
QTextStream stream(stdout);
stream << usage << "[-s|--scheduler] [--iface=<name>]\n";
stream << space << "[-h|--help] [--version]\n";
stream << "\n";
stream << "Arguments:\n";
stream << " -s or --scheduler Run the daemon as the scheduler for the network\n";
stream << " --iface=<name> Use the network interface <name> for all connections\n";
stream << " -h or --help Print help (this message) and exit\n";
stream << " --version Print version information and exit\n";
stream.flush();
exit(exitCode);
}
void version()
{
QTextStream stream(stdout);
stream << _NAME_ << " version " << _VERSION_<< "\n";
stream.flush();
exit(0);
}
int main(int argc, char* argv[])
{
QCoreApplication app(argc, argv);
app.setApplicationName(_NAME_);
app.setApplicationVersion(_VERSION_);
QStringList arguments = app.arguments();
Q_ASSERT(!arguments.isEmpty());
arguments.removeFirst(); // remove the app name
bool scheduler = false;
QString iface;
foreach(QString string, arguments) {
if (string == QLatin1String("-s") || string == QLatin1String("--scheduler"))
scheduler = true;
else if (string.startsWith("--iface="))
iface = string.mid(string.indexOf('=') + 1);
else if (string == QLatin1String("-h") || string == QLatin1String("--help"))
usage();
else if (string == QLatin1String("--version"))
version();
else
usage(1);
}
QNetworkAddressEntry address = Global::firstIPv4Address(iface);
if (address.ip().isNull()) {
qDebug() << "ERROR: Could not find an up and running network interface that supports IPv4 on this machine!";
exit(1);
}
Server server(address, scheduler, &app);
return app.exec();
}
|
{
"content_hash": "6a97a63b66de17f781e993d2e3a4d15e",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 116,
"avg_line_length": 32.83870967741935,
"alnum_prop": 0.5849705304518664,
"repo_name": "manyoso/mob",
"id": "039cae0264615e548a81bbb2da54dbfd2650d89a",
"size": "2143",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/main.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "92120"
},
{
"name": "IDL",
"bytes": "938"
},
{
"name": "Shell",
"bytes": "877"
}
]
}
|
from __future__ import absolute_import
import os
import platform
import tempfile
# Find the best implementation available
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import mock
from nose.tools import assert_raises
import numpy as np
import PIL.Image
from . import errors
from . import image as image_utils
import digits
class TestLoadImage():
def test_bad_path(self):
for path in [
'some string',
'/tmp/not-a-file',
'http://not-a-url',
]:
yield self.check_none, path
def check_none(self, path):
assert_raises(
errors.LoadImageError,
image_utils.load_image,
path,
)
def test_good_file(self):
for args in [
# created mode, file extension, pixel value, loaded mode (expected)
# Grayscale
('1', 'png', 1, 'L'),
('1', 'ppm', 1, 'L'),
('L', 'png', 127, 'L'),
('L', 'jpg', 127, 'L'),
('L', 'ppm', 127, 'L'),
('LA', 'png', (127, 255), 'L'),
# Color
('RGB', 'png', (127, 127, 127), 'RGB'),
('RGB', 'jpg', (127, 127, 127), 'RGB'),
('RGB', 'ppm', (127, 127, 127), 'RGB'),
('RGBA','png', (127, 127, 127, 255), 'RGB'),
('P', 'png', 127, 'RGB'),
('CMYK','jpg', (127, 127, 127, 127), 'RGB'),
('YCbCr','jpg', (127, 127, 127), 'RGB'),
]:
yield self.check_good_file, args
def check_good_file(self, args):
orig_mode, suffix, pixel, new_mode = args
orig = PIL.Image.new(orig_mode, (10,10), pixel)
# temp files cause permission errors so just generate the name
tmp = tempfile.mkstemp(suffix='.' + suffix)
orig.save(tmp[1])
new = image_utils.load_image(tmp[1])
try:
# sometimes on windows the file is not closed yet
# which can cause an exception
os.close(tmp[0])
os.remove(tmp[1])
except:
pass
assert new is not None, 'load_image should never return None'
assert new.mode == new_mode, 'Image mode should be "%s", not "%s\nargs - %s' % (new_mode, new.mode, args)
@mock.patch('digits.utils.image.requests')
def test_good_url(self, mock_requests):
# requests
response = mock.Mock()
response.status_code = mock_requests.codes.ok
img_file = os.path.join(
os.path.dirname(digits.__file__),
'static',
'images',
'mona_lisa.jpg',
)
with open(img_file, 'rb') as infile:
response.content = infile.read()
mock_requests.get.return_value = response
img = image_utils.load_image('http://some-url')
assert img is not None
def test_corrupted_file(self):
image = PIL.Image.fromarray(np.zeros((10,10,3),dtype=np.uint8))
# Save image to a JPEG buffer.
buffer_io = StringIO()
image.save(buffer_io, format='jpeg')
encoded = buffer_io.getvalue()
buffer_io.close()
# Corrupt the second half of the image buffer.
size = len(encoded)
corrupted = encoded[:size/2] + encoded[size/2:][::-1]
# Save the corrupted image to a temporary file.
fname = tempfile.mkstemp(suffix='.bin')
f = os.fdopen(fname[0],'wb')
fname = fname[1]
f.write(corrupted)
f.close()
assert_raises(
errors.LoadImageError,
image_utils.load_image,
fname,
)
os.remove(fname)
class TestResizeImage():
@classmethod
def setup_class(cls):
cls.np_gray = np.random.randint(0, 255, (10,10)).astype('uint8')
cls.pil_gray = PIL.Image.fromarray(cls.np_gray)
cls.np_color = np.random.randint(0, 255, (10,10,3)).astype('uint8')
cls.pil_color = PIL.Image.fromarray(cls.np_color)
def test_configs(self):
# lots of configs tested here
for h in [10, 15]:
for w in [10, 16]:
for t in ['gray', 'color']:
# test channels=None (should autodetect channels)
if t == 'color':
s = (h, w, 3)
else:
s = (h, w)
yield self.verify_pil, (h, w, None, None, t, s)
yield self.verify_np, (h, w, None, None, t, s)
# test channels={3,1}
for c in [3, 1]:
for m in ['squash', 'crop', 'fill', 'half_crop']:
if c == 3:
s = (h, w, 3)
else:
s = (h, w)
yield self.verify_pil, (h, w, c, m, t, s)
yield self.verify_np, (h, w, c, m, t, s)
def verify_pil(self, args):
# pass a PIL.Image to resize_image and check the returned dimensions
h, w, c, m, t, s = args
if t == 'gray':
i = self.pil_gray
else:
i = self.pil_color
r = image_utils.resize_image(i, h, w, c, m)
assert r.shape == s, 'Resized PIL.Image (orig=%s) should have been %s, but was %s %s' % (i.size, s, r.shape, self.args_to_str(args))
assert r.dtype == np.uint8, 'image.dtype should be uint8, not %s' % r.dtype
def verify_np(self, args):
# pass a numpy.ndarray to resize_image and check the returned dimensions
h, w, c, m, t, s = args
if t == 'gray':
i = self.np_gray
else:
i = self.np_color
r = image_utils.resize_image(i, h, w, c, m)
assert r.shape == s, 'Resized np.ndarray (orig=%s) should have been %s, but was %s %s' % (i.shape, s, r.shape, self.args_to_str(args))
assert r.dtype == np.uint8, 'image.dtype should be uint8, not %s' % r.dtype
def args_to_str(self, args):
return """
height=%s
width=%s
channels=%s
resize_mode=%s
image_type=%s
shape=%s""" % args
|
{
"content_hash": "2a6510debb4ec94d1590dd84724bbbc6",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 142,
"avg_line_length": 34.40425531914894,
"alnum_prop": 0.48144712430426717,
"repo_name": "bygreencn/DIGITS",
"id": "a5552e7c4c40c7a6ef29e8f258847e4fef2d5635",
"size": "6537",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "digits/utils/test_image.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "4032"
},
{
"name": "HTML",
"bytes": "282185"
},
{
"name": "JavaScript",
"bytes": "45826"
},
{
"name": "Lua",
"bytes": "110640"
},
{
"name": "Makefile",
"bytes": "87"
},
{
"name": "Protocol Buffer",
"bytes": "384"
},
{
"name": "Python",
"bytes": "918641"
},
{
"name": "Shell",
"bytes": "11290"
}
]
}
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<!--
Licensed to Apereo under one or more contributor license
agreements. See the NOTICE file distributed with this work
for additional information regarding copyright ownership.
Apereo licenses this file to you under the Apache License,
Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a
copy of the License at the following location:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<portlet-type xmlns="https://source.jasig.org/schemas/uportal/io/portlet-type" xmlns:ns2="https://source.jasig.org/schemas/uportal/io/permission-owner" xmlns:ns3="https://source.jasig.org/schemas/uportal/io/stylesheet-descriptor" xmlns:ns4="https://source.jasig.org/schemas/uportal/io/portlet-definition" xmlns:ns5="https://source.jasig.org/schemas/uportal" xmlns:ns6="https://source.jasig.org/schemas/uportal/io/user" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" version="4.0" xsi:schemaLocation="https://source.jasig.org/schemas/uportal/io/portlet-type https://source.jasig.org/schemas/uportal/io/portlet-type/portlet-type-4.0.xsd">
<name>Advanced CMS</name>
<description>Displays configured HTML content.</description>
<uri>/org/jasig/portal/portlets/CMS/AdvancedCMSPortlet.cpd.xml</uri>
</portlet-type>
|
{
"content_hash": "c27639c08abbd5dbe5b95f5023cc49b1",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 643,
"avg_line_length": 65.26923076923077,
"alnum_prop": 0.7554507955215085,
"repo_name": "vbonamy/esup-uportal",
"id": "09c1fe068d1f1dd92932a6bbc3883959ff28df14",
"size": "1697",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "uportal-war/src/main/data/default_entities/portlet-type/Advanced_CMS.portlet-type.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "907"
},
{
"name": "CSS",
"bytes": "387304"
},
{
"name": "Groovy",
"bytes": "61405"
},
{
"name": "HTML",
"bytes": "262691"
},
{
"name": "Java",
"bytes": "9560820"
},
{
"name": "JavaScript",
"bytes": "817364"
},
{
"name": "Perl",
"bytes": "1769"
},
{
"name": "Shell",
"bytes": "7292"
},
{
"name": "XSLT",
"bytes": "295322"
}
]
}
|
/*************************************************************************/
/* library_godot_webxr.js */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
const GodotWebXR = {
$GodotWebXR__deps: ['$Browser', '$GL', '$GodotRuntime'],
$GodotWebXR: {
gl: null,
texture_ids: [null, null],
textures: [null, null],
session: null,
space: null,
frame: null,
pose: null,
// Monkey-patch the requestAnimationFrame() used by Emscripten for the main
// loop, so that we can swap it out for XRSession.requestAnimationFrame()
// when an XR session is started.
orig_requestAnimationFrame: null,
requestAnimationFrame: (callback) => {
if (GodotWebXR.session && GodotWebXR.space) {
const onFrame = function (time, frame) {
GodotWebXR.frame = frame;
GodotWebXR.pose = frame.getViewerPose(GodotWebXR.space);
callback(time);
GodotWebXR.frame = null;
GodotWebXR.pose = null;
};
GodotWebXR.session.requestAnimationFrame(onFrame);
} else {
GodotWebXR.orig_requestAnimationFrame(callback);
}
},
monkeyPatchRequestAnimationFrame: (enable) => {
if (GodotWebXR.orig_requestAnimationFrame === null) {
GodotWebXR.orig_requestAnimationFrame = Browser.requestAnimationFrame;
}
Browser.requestAnimationFrame = enable
? GodotWebXR.requestAnimationFrame : GodotWebXR.orig_requestAnimationFrame;
},
pauseResumeMainLoop: () => {
// Once both GodotWebXR.session and GodotWebXR.space are set or
// unset, our monkey-patched requestAnimationFrame() should be
// enabled or disabled. When using the WebXR API Emulator, this
// gets picked up automatically, however, in the Oculus Browser
// on the Quest, we need to pause and resume the main loop.
Browser.mainLoop.pause();
window.setTimeout(function () {
Browser.mainLoop.resume();
}, 0);
},
// Some custom WebGL code for blitting our eye textures to the
// framebuffer we get from WebXR.
shaderProgram: null,
programInfo: null,
buffer: null,
// Vertex shader source.
vsSource: `
const vec2 scale = vec2(0.5, 0.5);
attribute vec4 aVertexPosition;
varying highp vec2 vTextureCoord;
void main () {
gl_Position = aVertexPosition;
vTextureCoord = aVertexPosition.xy * scale + scale;
}
`,
// Fragment shader source.
fsSource: `
varying highp vec2 vTextureCoord;
uniform sampler2D uSampler;
void main() {
gl_FragColor = texture2D(uSampler, vTextureCoord);
}
`,
initShaderProgram: (gl, vsSource, fsSource) => {
const vertexShader = GodotWebXR.loadShader(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = GodotWebXR.loadShader(gl, gl.FRAGMENT_SHADER, fsSource);
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
GodotRuntime.error(`Unable to initialize the shader program: ${gl.getProgramInfoLog(shaderProgram)}`);
return null;
}
return shaderProgram;
},
loadShader: (gl, type, source) => {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
GodotRuntime.error(`An error occurred compiling the shader: ${gl.getShaderInfoLog(shader)}`);
gl.deleteShader(shader);
return null;
}
return shader;
},
initBuffer: (gl) => {
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
const positions = [
-1.0, -1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, 1.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
return positionBuffer;
},
blitTexture: (gl, texture) => {
if (GodotWebXR.shaderProgram === null) {
GodotWebXR.shaderProgram = GodotWebXR.initShaderProgram(gl, GodotWebXR.vsSource, GodotWebXR.fsSource);
GodotWebXR.programInfo = {
program: GodotWebXR.shaderProgram,
attribLocations: {
vertexPosition: gl.getAttribLocation(GodotWebXR.shaderProgram, 'aVertexPosition'),
},
uniformLocations: {
uSampler: gl.getUniformLocation(GodotWebXR.shaderProgram, 'uSampler'),
},
};
GodotWebXR.buffer = GodotWebXR.initBuffer(gl);
}
const orig_program = gl.getParameter(gl.CURRENT_PROGRAM);
gl.useProgram(GodotWebXR.shaderProgram);
gl.bindBuffer(gl.ARRAY_BUFFER, GodotWebXR.buffer);
gl.vertexAttribPointer(GodotWebXR.programInfo.attribLocations.vertexPosition, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(GodotWebXR.programInfo.attribLocations.vertexPosition);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(GodotWebXR.programInfo.uniformLocations.uSampler, 0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
// Restore state.
gl.bindTexture(gl.TEXTURE_2D, null);
gl.disableVertexAttribArray(GodotWebXR.programInfo.attribLocations.vertexPosition);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.useProgram(orig_program);
},
// Holds the controllers list between function calls.
controllers: [],
// Updates controllers array, where the left hand (or sole tracker) is
// the first element, and the right hand is the second element, and any
// others placed at the 3rd position and up.
sampleControllers: () => {
if (!GodotWebXR.session || !GodotWebXR.frame) {
return;
}
let other_index = 2;
const controllers = [];
GodotWebXR.session.inputSources.forEach((input_source) => {
if (input_source.targetRayMode === 'tracked-pointer') {
if (input_source.handedness === 'right') {
controllers[1] = input_source;
} else if (input_source.handedness === 'left' || !controllers[0]) {
controllers[0] = input_source;
}
} else {
controllers[other_index++] = input_source;
}
});
GodotWebXR.controllers = controllers;
},
getControllerId: (input_source) => GodotWebXR.controllers.indexOf(input_source),
},
godot_webxr_is_supported__proxy: 'sync',
godot_webxr_is_supported__sig: 'i',
godot_webxr_is_supported: function () {
return !!navigator.xr;
},
godot_webxr_is_session_supported__proxy: 'sync',
godot_webxr_is_session_supported__sig: 'vii',
godot_webxr_is_session_supported: function (p_session_mode, p_callback) {
const session_mode = GodotRuntime.parseString(p_session_mode);
const cb = GodotRuntime.get_func(p_callback);
if (navigator.xr) {
navigator.xr.isSessionSupported(session_mode).then(function (supported) {
const c_str = GodotRuntime.allocString(session_mode);
cb(c_str, supported ? 1 : 0);
GodotRuntime.free(c_str);
});
} else {
const c_str = GodotRuntime.allocString(session_mode);
cb(c_str, 0);
GodotRuntime.free(c_str);
}
},
godot_webxr_initialize__deps: ['emscripten_webgl_get_current_context'],
godot_webxr_initialize__proxy: 'sync',
godot_webxr_initialize__sig: 'viiiiiiiiii',
godot_webxr_initialize: function (p_session_mode, p_required_features, p_optional_features, p_requested_reference_spaces, p_on_session_started, p_on_session_ended, p_on_session_failed, p_on_controller_changed, p_on_input_event, p_on_simple_event) {
GodotWebXR.monkeyPatchRequestAnimationFrame(true);
const session_mode = GodotRuntime.parseString(p_session_mode);
const required_features = GodotRuntime.parseString(p_required_features).split(',').map((s) => s.trim()).filter((s) => s !== '');
const optional_features = GodotRuntime.parseString(p_optional_features).split(',').map((s) => s.trim()).filter((s) => s !== '');
const requested_reference_space_types = GodotRuntime.parseString(p_requested_reference_spaces).split(',').map((s) => s.trim());
const onstarted = GodotRuntime.get_func(p_on_session_started);
const onended = GodotRuntime.get_func(p_on_session_ended);
const onfailed = GodotRuntime.get_func(p_on_session_failed);
const oncontroller = GodotRuntime.get_func(p_on_controller_changed);
const oninputevent = GodotRuntime.get_func(p_on_input_event);
const onsimpleevent = GodotRuntime.get_func(p_on_simple_event);
const session_init = {};
if (required_features.length > 0) {
session_init['requiredFeatures'] = required_features;
}
if (optional_features.length > 0) {
session_init['optionalFeatures'] = optional_features;
}
navigator.xr.requestSession(session_mode, session_init).then(function (session) {
GodotWebXR.session = session;
session.addEventListener('end', function (evt) {
onended();
});
session.addEventListener('inputsourceschange', function (evt) {
let controller_changed = false;
[evt.added, evt.removed].forEach((lst) => {
lst.forEach((input_source) => {
if (input_source.targetRayMode === 'tracked-pointer') {
controller_changed = true;
}
});
});
if (controller_changed) {
oncontroller();
}
});
['selectstart', 'select', 'selectend', 'squeezestart', 'squeeze', 'squeezeend'].forEach((input_event) => {
session.addEventListener(input_event, function (evt) {
const c_str = GodotRuntime.allocString(input_event);
oninputevent(c_str, GodotWebXR.getControllerId(evt.inputSource));
GodotRuntime.free(c_str);
});
});
session.addEventListener('visibilitychange', function (evt) {
const c_str = GodotRuntime.allocString('visibility_state_changed');
onsimpleevent(c_str);
GodotRuntime.free(c_str);
});
const gl_context_handle = _emscripten_webgl_get_current_context(); // eslint-disable-line no-undef
const gl = GL.getContext(gl_context_handle).GLctx;
GodotWebXR.gl = gl;
gl.makeXRCompatible().then(function () {
session.updateRenderState({
baseLayer: new XRWebGLLayer(session, gl),
});
function onReferenceSpaceSuccess(reference_space, reference_space_type) {
GodotWebXR.space = reference_space;
// Using reference_space.addEventListener() crashes when
// using the polyfill with the WebXR Emulator extension,
// so we set the event property instead.
reference_space.onreset = function (evt) {
const c_str = GodotRuntime.allocString('reference_space_reset');
onsimpleevent(c_str);
GodotRuntime.free(c_str);
};
// Now that both GodotWebXR.session and GodotWebXR.space are
// set, we need to pause and resume the main loop for the XR
// main loop to kick in.
GodotWebXR.pauseResumeMainLoop();
// Call in setTimeout() so that errors in the onstarted()
// callback don't bubble up here and cause Godot to try the
// next reference space.
window.setTimeout(function () {
const c_str = GodotRuntime.allocString(reference_space_type);
onstarted(c_str);
GodotRuntime.free(c_str);
}, 0);
}
function requestReferenceSpace() {
const reference_space_type = requested_reference_space_types.shift();
session.requestReferenceSpace(reference_space_type)
.then((refSpace) => {
onReferenceSpaceSuccess(refSpace, reference_space_type);
})
.catch(() => {
if (requested_reference_space_types.length === 0) {
const c_str = GodotRuntime.allocString('Unable to get any of the requested reference space types');
onfailed(c_str);
GodotRuntime.free(c_str);
} else {
requestReferenceSpace();
}
});
}
requestReferenceSpace();
}).catch(function (error) {
const c_str = GodotRuntime.allocString(`Unable to make WebGL context compatible with WebXR: ${error}`);
onfailed(c_str);
GodotRuntime.free(c_str);
});
}).catch(function (error) {
const c_str = GodotRuntime.allocString(`Unable to start session: ${error}`);
onfailed(c_str);
GodotRuntime.free(c_str);
});
},
godot_webxr_uninitialize__proxy: 'sync',
godot_webxr_uninitialize__sig: 'v',
godot_webxr_uninitialize: function () {
if (GodotWebXR.session) {
GodotWebXR.session.end()
// Prevent exception when session has already ended.
.catch((e) => { });
}
// Clean-up the textures we allocated for each view.
const gl = GodotWebXR.gl;
for (let i = 0; i < GodotWebXR.textures.length; i++) {
const texture = GodotWebXR.textures[i];
if (texture !== null) {
gl.deleteTexture(texture);
}
GodotWebXR.textures[i] = null;
const texture_id = GodotWebXR.texture_ids[i];
if (texture_id !== null) {
GL.textures[texture_id] = null;
}
GodotWebXR.texture_ids[i] = null;
}
GodotWebXR.session = null;
GodotWebXR.space = null;
GodotWebXR.frame = null;
GodotWebXR.pose = null;
// Disable the monkey-patched window.requestAnimationFrame() and
// pause/restart the main loop to activate it on all platforms.
GodotWebXR.monkeyPatchRequestAnimationFrame(false);
GodotWebXR.pauseResumeMainLoop();
},
godot_webxr_get_view_count__proxy: 'sync',
godot_webxr_get_view_count__sig: 'i',
godot_webxr_get_view_count: function () {
if (!GodotWebXR.session || !GodotWebXR.pose) {
return 0;
}
return GodotWebXR.pose.views.length;
},
godot_webxr_get_render_targetsize__proxy: 'sync',
godot_webxr_get_render_targetsize__sig: 'i',
godot_webxr_get_render_targetsize: function () {
if (!GodotWebXR.session || !GodotWebXR.pose) {
return 0;
}
const glLayer = GodotWebXR.session.renderState.baseLayer;
const view = GodotWebXR.pose.views[0];
const viewport = glLayer.getViewport(view);
const buf = GodotRuntime.malloc(2 * 4);
GodotRuntime.setHeapValue(buf + 0, viewport.width, 'i32');
GodotRuntime.setHeapValue(buf + 4, viewport.height, 'i32');
return buf;
},
godot_webxr_get_transform_for_eye__proxy: 'sync',
godot_webxr_get_transform_for_eye__sig: 'ii',
godot_webxr_get_transform_for_eye: function (p_eye) {
if (!GodotWebXR.session || !GodotWebXR.pose) {
return 0;
}
const views = GodotWebXR.pose.views;
let matrix;
if (p_eye === 0) {
matrix = GodotWebXR.pose.transform.matrix;
} else {
matrix = views[p_eye - 1].transform.matrix;
}
const buf = GodotRuntime.malloc(16 * 4);
for (let i = 0; i < 16; i++) {
GodotRuntime.setHeapValue(buf + (i * 4), matrix[i], 'float');
}
return buf;
},
godot_webxr_get_projection_for_eye__proxy: 'sync',
godot_webxr_get_projection_for_eye__sig: 'ii',
godot_webxr_get_projection_for_eye: function (p_eye) {
if (!GodotWebXR.session || !GodotWebXR.pose) {
return 0;
}
const view_index = (p_eye === 2 /* ARVRInterface::EYE_RIGHT */) ? 1 : 0;
const matrix = GodotWebXR.pose.views[view_index].projectionMatrix;
const buf = GodotRuntime.malloc(16 * 4);
for (let i = 0; i < 16; i++) {
GodotRuntime.setHeapValue(buf + (i * 4), matrix[i], 'float');
}
return buf;
},
godot_webxr_get_external_texture_for_eye__proxy: 'sync',
godot_webxr_get_external_texture_for_eye__sig: 'ii',
godot_webxr_get_external_texture_for_eye: function (p_eye) {
if (!GodotWebXR.session) {
return 0;
}
const view_index = (p_eye === 2 /* ARVRInterface::EYE_RIGHT */) ? 1 : 0;
if (GodotWebXR.texture_ids[view_index]) {
return GodotWebXR.texture_ids[view_index];
}
// Check pose separately and after returning the cached texture id,
// because we won't get a pose in some cases if we lose tracking, and
// we don't want to return 0 just because tracking was lost.
if (!GodotWebXR.pose) {
return 0;
}
const glLayer = GodotWebXR.session.renderState.baseLayer;
const view = GodotWebXR.pose.views[view_index];
const viewport = glLayer.getViewport(view);
const gl = GodotWebXR.gl;
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, viewport.width, viewport.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.bindTexture(gl.TEXTURE_2D, null);
const texture_id = GL.getNewId(GL.textures);
GL.textures[texture_id] = texture;
GodotWebXR.textures[view_index] = texture;
GodotWebXR.texture_ids[view_index] = texture_id;
return texture_id;
},
godot_webxr_commit_for_eye__proxy: 'sync',
godot_webxr_commit_for_eye__sig: 'vi',
godot_webxr_commit_for_eye: function (p_eye) {
if (!GodotWebXR.session || !GodotWebXR.pose) {
return;
}
const view_index = (p_eye === 2 /* ARVRInterface::EYE_RIGHT */) ? 1 : 0;
const glLayer = GodotWebXR.session.renderState.baseLayer;
const view = GodotWebXR.pose.views[view_index];
const viewport = glLayer.getViewport(view);
const gl = GodotWebXR.gl;
const orig_framebuffer = gl.getParameter(gl.FRAMEBUFFER_BINDING);
const orig_viewport = gl.getParameter(gl.VIEWPORT);
// Bind to WebXR's framebuffer.
gl.bindFramebuffer(gl.FRAMEBUFFER, glLayer.framebuffer);
gl.viewport(viewport.x, viewport.y, viewport.width, viewport.height);
GodotWebXR.blitTexture(gl, GodotWebXR.textures[view_index]);
// Restore state.
gl.bindFramebuffer(gl.FRAMEBUFFER, orig_framebuffer);
gl.viewport(orig_viewport[0], orig_viewport[1], orig_viewport[2], orig_viewport[3]);
},
godot_webxr_sample_controller_data__proxy: 'sync',
godot_webxr_sample_controller_data__sig: 'v',
godot_webxr_sample_controller_data: function () {
GodotWebXR.sampleControllers();
},
godot_webxr_get_controller_count__proxy: 'sync',
godot_webxr_get_controller_count__sig: 'i',
godot_webxr_get_controller_count: function () {
if (!GodotWebXR.session || !GodotWebXR.frame) {
return 0;
}
return GodotWebXR.controllers.length;
},
godot_webxr_is_controller_connected__proxy: 'sync',
godot_webxr_is_controller_connected__sig: 'ii',
godot_webxr_is_controller_connected: function (p_controller) {
if (!GodotWebXR.session || !GodotWebXR.frame) {
return false;
}
return !!GodotWebXR.controllers[p_controller];
},
godot_webxr_get_controller_transform__proxy: 'sync',
godot_webxr_get_controller_transform__sig: 'ii',
godot_webxr_get_controller_transform: function (p_controller) {
if (!GodotWebXR.session || !GodotWebXR.frame) {
return 0;
}
const controller = GodotWebXR.controllers[p_controller];
if (!controller) {
return 0;
}
const frame = GodotWebXR.frame;
const space = GodotWebXR.space;
const pose = frame.getPose(controller.targetRaySpace, space);
if (!pose) {
// This can mean that the controller lost tracking.
return 0;
}
const matrix = pose.transform.matrix;
const buf = GodotRuntime.malloc(16 * 4);
for (let i = 0; i < 16; i++) {
GodotRuntime.setHeapValue(buf + (i * 4), matrix[i], 'float');
}
return buf;
},
godot_webxr_get_controller_buttons__proxy: 'sync',
godot_webxr_get_controller_buttons__sig: 'ii',
godot_webxr_get_controller_buttons: function (p_controller) {
if (GodotWebXR.controllers.length === 0) {
return 0;
}
const controller = GodotWebXR.controllers[p_controller];
if (!controller || !controller.gamepad) {
return 0;
}
const button_count = controller.gamepad.buttons.length;
const buf = GodotRuntime.malloc((button_count + 1) * 4);
GodotRuntime.setHeapValue(buf, button_count, 'i32');
for (let i = 0; i < button_count; i++) {
GodotRuntime.setHeapValue(buf + 4 + (i * 4), controller.gamepad.buttons[i].value, 'float');
}
return buf;
},
godot_webxr_get_controller_axes__proxy: 'sync',
godot_webxr_get_controller_axes__sig: 'ii',
godot_webxr_get_controller_axes: function (p_controller) {
if (GodotWebXR.controllers.length === 0) {
return 0;
}
const controller = GodotWebXR.controllers[p_controller];
if (!controller || !controller.gamepad) {
return 0;
}
const axes_count = controller.gamepad.axes.length;
const buf = GodotRuntime.malloc((axes_count + 1) * 4);
GodotRuntime.setHeapValue(buf, axes_count, 'i32');
for (let i = 0; i < axes_count; i++) {
let value = controller.gamepad.axes[i];
if (i === 1 || i === 3) {
// Invert the Y-axis on thumbsticks and trackpads, in order to
// match OpenXR and other XR platform SDKs.
value *= -1.0;
}
GodotRuntime.setHeapValue(buf + 4 + (i * 4), value, 'float');
}
return buf;
},
godot_webxr_get_visibility_state__proxy: 'sync',
godot_webxr_get_visibility_state__sig: 'i',
godot_webxr_get_visibility_state: function () {
if (!GodotWebXR.session || !GodotWebXR.session.visibilityState) {
return 0;
}
return GodotRuntime.allocString(GodotWebXR.session.visibilityState);
},
godot_webxr_get_bounds_geometry__proxy: 'sync',
godot_webxr_get_bounds_geometry__sig: 'i',
godot_webxr_get_bounds_geometry: function () {
if (!GodotWebXR.space || !GodotWebXR.space.boundsGeometry) {
return 0;
}
const point_count = GodotWebXR.space.boundsGeometry.length;
if (point_count === 0) {
return 0;
}
const buf = GodotRuntime.malloc(((point_count * 3) + 1) * 4);
GodotRuntime.setHeapValue(buf, point_count, 'i32');
for (let i = 0; i < point_count; i++) {
const point = GodotWebXR.space.boundsGeometry[i];
GodotRuntime.setHeapValue(buf + ((i * 3) + 1) * 4, point.x, 'float');
GodotRuntime.setHeapValue(buf + ((i * 3) + 2) * 4, point.y, 'float');
GodotRuntime.setHeapValue(buf + ((i * 3) + 3) * 4, point.z, 'float');
}
return buf;
},
};
autoAddDeps(GodotWebXR, '$GodotWebXR');
mergeInto(LibraryManager.library, GodotWebXR);
|
{
"content_hash": "9c9dbc2563c89e86729fb21aa376fdfb",
"timestamp": "",
"source": "github",
"line_count": 670,
"max_line_length": 249,
"avg_line_length": 35.09701492537314,
"alnum_prop": 0.6638741229002764,
"repo_name": "honix/godot",
"id": "6e19a8ac6e5f975c47922a944588df1561f86f99",
"size": "23515",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "modules/webxr/native/library_godot_webxr.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "50004"
},
{
"name": "C++",
"bytes": "16813390"
},
{
"name": "HTML",
"bytes": "10302"
},
{
"name": "Java",
"bytes": "497061"
},
{
"name": "Makefile",
"bytes": "451"
},
{
"name": "Objective-C",
"bytes": "2644"
},
{
"name": "Objective-C++",
"bytes": "145442"
},
{
"name": "Python",
"bytes": "262658"
},
{
"name": "Shell",
"bytes": "11105"
}
]
}
|
"""Abstraction for Arcyd's git operations."""
# =============================================================================
# CONTENTS
# -----------------------------------------------------------------------------
# abdt_git
#
# Public Classes:
# Repo
# .is_identical
# .get_remote_branches
# .checkout_forced_new_branch
# .raw_diff_range
# .get_range_hashes
# .make_revisions_from_hashes
# .squash_merge
# .archive_to_landed
# .push_landed
# .archive_to_abandoned
# .push_abandoned
# .push_asymmetrical
# .push
# .push_delete
# .checkout_master_fetch_prune
# .hash_ref_pairs
# .checkout_make_raw_diff
# .get_remote
#
# Public Functions:
# get_managed_branches
# checkout_master_fetch_special_refs
#
# Public Assignments:
# ARCYD_LANDED_REF
# ARCYD_LANDED_BRANCH_FQ
# ARCYD_ABANDONED_REF
# ARCYD_ABANDONED_BRANCH_FQ
#
# -----------------------------------------------------------------------------
# (this contents block is generated, edits will be lost)
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import phlgit_branch
import phlgit_checkout
import phlgit_commit
import phlgit_diff
import phlgit_fetch
import phlgit_log
import phlgit_merge
import phlgit_push
import phlgit_showref
import phlgitu_ref
import abdt_branch
import abdt_lander
import abdt_logging
import abdt_naming
_ARCYD_REFSPACE = 'refs/arcyd'
_PRIVATE_ARCYD_BRANCHSPACE = '__private_arcyd'
_LANDED_ARCHIVE_BRANCH_MESSAGE = """
Create an archive branch for landed branches
Landed branches will be automatically merged here by Arcyd for your
reference.
This branch is useful for:
o: cleaning up branches contained by the landed branch
(see 'git branch --merged')
o: finding the pre-landed version of a commit
(see 'git log --grep' - you can search for the landed sha1)
o: keeping track of Arcyd's landing activity
(see 'git log --first-parent')
""".strip()
ARCYD_LANDED_REF = "{}/landed".format(_ARCYD_REFSPACE)
_ARCYD_LANDED_BRANCH = "{}/landed".format(_PRIVATE_ARCYD_BRANCHSPACE)
ARCYD_LANDED_BRANCH_FQ = "refs/heads/" + _ARCYD_LANDED_BRANCH
_ABANDONED_ARCHIVE_BRANCH_MESSAGE = """
Create an archive branch for abandoned branches
Abandoned branches will be automatically merged here by Arcyd for your
reference.
This branch is useful for:
o: keeping track of Arcyd's abandoning activity
(see 'git log --first-parent')
o: recovering abandoned branches
(use 'git branch <branch name> <commit hash>')
""".strip()
ARCYD_ABANDONED_REF = "{}/abandoned".format(_ARCYD_REFSPACE)
_ARCYD_ABANDONED_BRANCH = "{}/abandoned".format(_PRIVATE_ARCYD_BRANCHSPACE)
ARCYD_ABANDONED_BRANCH_FQ = "refs/heads/" + _ARCYD_ABANDONED_BRANCH
class Repo(object):
def __init__(
self, refcache_repo, differ_cache, remote, description):
"""Initialise a new Repo.
:repo: a callable supporting git commands, e.g. repo("status")
:remote: name of the remote to use
:description: short identification of the repo for humans
:returns: None
"""
super(Repo, self).__init__()
self._repo = refcache_repo
self._remote = remote
self._description = description
self._is_landing_archive_enabled = None
self._differ_cache = differ_cache
def is_identical(self, branch1, branch2):
"""Return True if the branches point to the same commit.
:branch1: string name of branch
:branch2: string name of branch
:returns: True if the branches point to the same commit
"""
return phlgit_branch.is_identical(self, branch1, branch2)
def _is_ref(self, ref):
"""Return True if the specified ref exists, otherwise False.
:ref: the string name of the ref to look up
:return: True if the specified ref exists, otherwise False
"""
ref_names = phlgit_showref.names(self)
return ref in ref_names
def get_remote_branches(self):
"""Return a list of string names of remote branches.
:returns: list of string names
"""
return phlgit_branch.get_remote(self, self._remote)
def checkout_forced_new_branch(self, new_name, based_on):
"""Overwrite and checkout 'new_name' as a new branch from 'based_on'.
:new_name: the string name of the branch to create and overwrite
:based_on: the string name of the branch to copy
:returns: None
"""
phlgit_checkout.new_branch_force_based_on(
self, new_name, based_on)
# TODO: split this into more functions with varying context
def raw_diff_range(self, base, to, context=None):
"""Return a string of the unified diff between 'base' and 'to'.
Note that the output is based on 'git diff base...to', so the commits
are diff'ed via thier common ancestry.
:base: the commit or branch name to start from
:to: the commit or branch name to end with
:context: integer amount of surrounding context to include
:returns: string of the unified diff
"""
return phlgit_diff.raw_diff_range(self, base, to, context)
def get_range_hashes(self, start, end):
"""Return a list of strings of commit hashes from 'start' to 'end'.
The list begins with the revision closest to but not including
'start'. Raise a ValueError if any of the returned values are not
valid hexadecimal.
:start: a reference that log will understand
:end: a reference that log will understand
:returns: a list of strings of commit hashes from 'start' to 'end'.
"""
return phlgit_log.get_range_hashes(self, start, end)
def make_revisions_from_hashes(self, hashes):
"""Return a list of 'phlgit_log__Revision' from 'hashes'.
Raise an exception if the repo does not return a valid FullMessage
from any of 'hashes'.
:hashes: a list of commit hash strings
:returns: a list of 'phlgit_log__Revision'
"""
return phlgit_log.make_revisions_from_hashes(self, hashes)
def squash_merge(self, branch, message, author_name, author_email):
"""Return output from Git performing a squash merge.
:branch: string name of branch to merge into HEAD
:message: string message for the merge commit
:author_name: string name of author for the merge commit
:author_email: string email of author for the merge commit
:returns: string of Git output
"""
# TODO: test that the author is set correctly
return phlgit_merge.squash(
self,
branch,
message,
author_name + " <" + author_email + ">")
def _checkout_archive_ref_branch(
self, short_branch_name, fq_branch_name, initial_message):
if self._is_ref(fq_branch_name):
phlgit_checkout.branch(self, short_branch_name)
else:
phlgit_checkout.orphan_clean(self, short_branch_name)
phlgit_commit.allow_empty(self, initial_message)
def archive_to_landed(
self, review_hash, review_branch, base_branch, land_hash, message):
"""Merge the specified review branch to the 'landed' archive branch.
:review_hash: the string of the commit hash to archive
:review_branch: the string name of the branch to archive
:base_branch: the string name of the branch the review is branched off
:land_hash: the string of the commit hash the branch landed with
:message: the string commit message the the branch landed with
:returns: None
"""
self._checkout_archive_ref_branch(
_ARCYD_LANDED_BRANCH,
ARCYD_LANDED_BRANCH_FQ,
_LANDED_ARCHIVE_BRANCH_MESSAGE)
new_message = "landed {} on {} as {}\n\nwith message:\n{}".format(
review_branch, base_branch, land_hash, message)
phlgit_merge.ours(self, review_hash, new_message)
def push_landed(self):
"""Push the 'landed' archive branch to the remote.
:returns: None
"""
self.push_asymmetrical(ARCYD_LANDED_BRANCH_FQ, ARCYD_LANDED_REF)
def archive_to_abandoned(
self, review_hash, review_branch, base_branch):
"""Merge the specified review branch to the 'abandoned' archive branch.
:review_hash: the string of the commit hash to archive
:review_branch: the string name of the branch to archive
:base_branch: the string name of the branch the review is branched off
:returns: None
"""
# get on the archive branch, create new orphan if necessary
self._checkout_archive_ref_branch(
_ARCYD_ABANDONED_BRANCH,
ARCYD_ABANDONED_BRANCH_FQ,
_ABANDONED_ARCHIVE_BRANCH_MESSAGE)
new_message = "abandoned {}, branched from {}".format(
review_branch, base_branch)
phlgit_merge.ours(self, review_hash, new_message)
def push_abandoned(self):
"""Push the 'abandoned' archive branch to the remote.
:returns: None
"""
self.push_asymmetrical(
ARCYD_ABANDONED_BRANCH_FQ, ARCYD_ABANDONED_REF)
def push_asymmetrical(self, local_branch, remote_branch):
"""Push 'local_branch' as 'remote_branch' to the remote.
:local_branch: string name of the branch to push
:remote_branch: string name of the branch on the remote
:returns: None
"""
phlgit_push.push_asymmetrical(
self, local_branch, remote_branch, self._remote)
def push(self, branch):
"""Push 'branch' to the remote.
:branch: string name of the branch to push
:returns: None
"""
phlgit_push.push(self, branch, self._remote)
def push_delete(self, branch, *args):
"""Delete 'branch' from the remote.
:branch: string name of the branch
:*args: (optional) more string names of branches
:returns: None
"""
phlgit_push.delete(self, self._remote, branch, *args)
def checkout_master_fetch_prune(self):
"""Checkout master, fetch from the remote and prune branches.
Please see checkout_master_fetch_special_refs() for why we must
checkout master first.
:returns: None
"""
checkout_master_fetch_special_refs(self, self._remote)
@property
def hash_ref_pairs(self):
"""Return a list of (sha1, name) tuples from the repo's list of refs.
:repo: a callable supporting git commands, e.g. repo("status")
:returns: a list of (sha1, name)
"""
return self._repo.hash_ref_pairs
def checkout_make_raw_diff(
self, from_branch, to_branch, max_diff_size_utf8_bytes):
"""Return an abdt_differ.DiffResult of the changes on the branch.
If the diff would exceed the pre-specified max diff size then take
measures to reduce the diff.
Potentially checkout onto the 'to_branch' so that changes to
.gitattributes files will be considered.
:from_branch: string name of the merge-base of 'branch'
:to_branch: string name of the branch to diff
:max_diff_size_utf8_bytes: the maximum allowed size of the diff as utf8
:returns: the string diff of the changes on the branch
"""
return self._differ_cache.checkout_make_raw_diff(
from_branch, to_branch, max_diff_size_utf8_bytes)
def _log_read_call(self, args, kwargs):
with abdt_logging.remote_io_read_event_context(
'git-{}'.format(args[0]),
'{}: {} {}'.format(
self._description, ' '.join(args), kwargs)):
return self._repo(*args, **kwargs)
def __call__(self, *args, **kwargs):
if args:
if args[0] == 'push':
with abdt_logging.remote_io_write_event_context(
'git-push',
'{}: {} {}'.format(
self._description, ' '.join(args), kwargs)):
return self._repo(*args, **kwargs)
elif args[0] in ('fetch', 'pull', 'ls-remote'):
# N.B. git-archive may also read but we're not using it
return self._log_read_call(args, kwargs)
elif len(args) >= 2 and args[:2] == ('remote', 'prune'):
return self._log_read_call(args, kwargs)
return self._repo(*args, **kwargs)
def get_remote(self):
return self._remote
def _get_branch_to_hash(repo):
remote = repo.get_remote()
hash_ref_list = repo.hash_ref_pairs
def is_remote(ref):
return phlgitu_ref.is_under_remote(ref, remote)
# XXX: can't use dictionary comprehensions until the linters don't complain
full_to_short = phlgitu_ref.fq_remote_to_short_local
branch_to_hash = dict([
(full_to_short(r), h) for h, r in hash_ref_list if is_remote(r)
])
return branch_to_hash
def get_managed_branches(repo, repo_desc, naming, branch_link_callable=None):
branch_to_hash = _get_branch_to_hash(repo)
branch_pairs = abdt_naming.get_branch_pairs(branch_to_hash.keys(), naming)
managed_branches = []
lander = abdt_lander.squash
for b in branch_pairs:
branch_url = None
review_branch = b.review
tracker_branch = b.tracker
assert review_branch is not None or tracker_branch is not None
review_hash = None
tracker_hash = None
if review_branch is not None:
review_hash = branch_to_hash[review_branch.branch]
if branch_link_callable:
branch_url = branch_link_callable(review_branch.branch)
if tracker_branch is not None:
tracker_hash = branch_to_hash[tracker_branch.branch]
managed_branches.append(
abdt_branch.Branch(
repo,
review_branch,
review_hash,
tracker_branch,
tracker_hash,
lander,
repo_desc,
branch_url))
return managed_branches
def checkout_master_fetch_special_refs(repo, remote):
# fetch the 'landed' and 'abandoned' refs, if they exist
# We must checkout master before fetching the special refs to the
# local branches. Otherwise we might be attempting to overwrite the
# current branch with fetch, which would fail.
phlgit_checkout.branch(repo, 'master')
branch_refspec = '+refs/heads/*:refs/remotes/origin/*'
arcyd_refspec = '+{refspace}/*:refs/heads/{branchspace}/*'.format(
refspace=_ARCYD_REFSPACE, branchspace=_PRIVATE_ARCYD_BRANCHSPACE)
refspec_list = [branch_refspec, arcyd_refspec]
phlgit_fetch.prune_safe(repo, remote, refspec_list)
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
{
"content_hash": "babefd13a73a00ffb5689b04e2bc9773",
"timestamp": "",
"source": "github",
"line_count": 475,
"max_line_length": 79,
"avg_line_length": 33.265263157894736,
"alnum_prop": 0.6164799696221758,
"repo_name": "cs-shadow/phabricator-tools",
"id": "d87632b3e3c758d65da02884923263acb24dcc52",
"size": "15801",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "py/abd/abdt_git.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "342"
},
{
"name": "HTML",
"bytes": "471"
},
{
"name": "Puppet",
"bytes": "4016"
},
{
"name": "Python",
"bytes": "1068336"
},
{
"name": "Ruby",
"bytes": "1945"
},
{
"name": "Shell",
"bytes": "134929"
}
]
}
|
#import "FBSession.h"
#import "FBSession-Private.h"
@implementation FBSession
static FBSession *_activeSession;
+ (instancetype)activeSession
{
return _activeSession;
}
+ (void)markSessionActive:(FBSession *)session
{
_activeSession = session;
}
+ (instancetype)sessionWithIdentifier:(NSString *)identifier
{
if (!identifier) {
return nil;
}
if (![identifier isEqualToString:_activeSession.identifier]) {
return nil;
}
return _activeSession;
}
- (void)kill
{
_activeSession = nil;
}
@end
|
{
"content_hash": "08ed660afec52a643625479e2e04e683",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 64,
"avg_line_length": 14.857142857142858,
"alnum_prop": 0.7076923076923077,
"repo_name": "truveris/WebDriverAgent",
"id": "7f13bb7e49a5eea8fd80d2a3051f487c965f35f4",
"size": "828",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "WebDriverAgentCore/Routing/FBSession.m",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "5834"
},
{
"name": "CSS",
"bytes": "2413"
},
{
"name": "HTML",
"bytes": "183"
},
{
"name": "JavaScript",
"bytes": "16636"
},
{
"name": "Objective-C",
"bytes": "387655"
},
{
"name": "Ruby",
"bytes": "1411"
},
{
"name": "Shell",
"bytes": "2625"
}
]
}
|
<?php
$ext = new ReflectionExtension('reflection');
var_dump($ext->getClasses());
?>
==DONE==
|
{
"content_hash": "8e10699fefd798444c13e5e19bca7cd3",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 45,
"avg_line_length": 18.8,
"alnum_prop": 0.6595744680851063,
"repo_name": "evnix/go-php-parser",
"id": "26b2d31368fffa44352b0b2655a6a2083d1c26f7",
"size": "94",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "testdata/fuzzdir/corpus/ext_reflection_tests_ReflectionExtension_getClasses_basic.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Go",
"bytes": "176842"
},
{
"name": "PHP",
"bytes": "9752494"
},
{
"name": "Shell",
"bytes": "227"
}
]
}
|
package org.jboss.pnc.integration.endpoints;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.pnc.client.BuildConfigurationClient;
import org.jboss.pnc.client.ClientException;
import org.jboss.pnc.client.GroupConfigurationClient;
import org.jboss.pnc.client.ProductClient;
import org.jboss.pnc.client.ProductVersionClient;
import org.jboss.pnc.client.RemoteCollection;
import org.jboss.pnc.client.RemoteResourceException;
import org.jboss.pnc.client.patch.PatchBuilderException;
import org.jboss.pnc.client.patch.ProductVersionPatchBuilder;
import org.jboss.pnc.constants.Attributes;
import org.jboss.pnc.dto.BuildConfiguration;
import org.jboss.pnc.dto.GroupConfiguration;
import org.jboss.pnc.dto.GroupConfigurationRef;
import org.jboss.pnc.dto.Product;
import org.jboss.pnc.dto.ProductMilestone;
import org.jboss.pnc.dto.ProductRef;
import org.jboss.pnc.dto.ProductRelease;
import org.jboss.pnc.dto.ProductVersion;
import org.jboss.pnc.integration.setup.Deployments;
import org.jboss.pnc.integration.setup.RestClientConfiguration;
import org.jboss.pnc.test.category.ContainerTest;
import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/**
* @author <a href="mailto:jbrazdil@redhat.com">Honza Brazdil</a>
* @see org.jboss.pnc.demo.data.DatabaseDataInitializer
*/
@RunAsClient
@RunWith(Arquillian.class)
@Category(ContainerTest.class)
public class ProductVersionEndpointTest {
private static final Logger logger = LoggerFactory.getLogger(ProductVersionEndpointTest.class);
private static Product product;
private static String productVersionsId;
private static String productVersionsId2;
@Deployment
public static EnterpriseArchive deploy() {
return Deployments.testEar();
}
@BeforeClass
public static void prepareData() throws Exception {
ProductClient productClient = new ProductClient(RestClientConfiguration.asAnonymous());
product = productClient.getAll().iterator().next();
Iterator<ProductVersion> it = productClient.getProductVersions(product.getId()).iterator();
productVersionsId = it.next().getId();
productVersionsId2 = it.next().getId();
}
@Test
public void testCreateNew() throws ClientException {
// given
ProductVersion productVersion = ProductVersion.builder().product(product).version("42.0").build();
// when
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
ProductVersion created = client.createNew(productVersion);
// then
assertThat(created.getId()).isNotEmpty();
ProductVersion retrieved = client.getSpecific(created.getId());
ProductVersion toCompare = productVersion.toBuilder()
.productMilestones(Collections.emptyMap()) // query had null, but server responds with empty map
.productReleases(Collections.emptyMap()) // query had null, but server responds with empty map
.groupConfigs(Collections.emptyMap()) // query had null, but server responds with empty map
.buildConfigs(Collections.emptyMap()) // query had null, but server responds with empty map
.build();
assertThat(created.getProduct().getId()).isEqualTo(toCompare.getProduct().getId());
assertThat(created).isEqualToIgnoringGivenFields(toCompare, "id", "product", "attributes");
assertThat(retrieved).isEqualTo(created);
}
@Test
public void testGetSpecific() throws ClientException {
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asAnonymous());
ProductVersion dto = client.getSpecific(productVersionsId);
assertThat(dto.getVersion()).isEqualTo("1.0"); // from DatabaseDataInitializer
assertThat(dto.getProduct().getId()).isEqualTo(product.getId()); // from DatabaseDataInitializer
}
@Test
public void testUpdate() throws ClientException {
// given
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
final String version = "2.1";
ProductVersion dto = client.getSpecific(productVersionsId2);
ProductVersion toUpdate = dto.toBuilder().version(version).build();
// when
client.update(productVersionsId2, toUpdate);
// then
ProductVersion retrieved = client.getSpecific(dto.getId());
assertThat(retrieved).isEqualTo(toUpdate);
assertThat(retrieved).isEqualToIgnoringGivenFields(dto, "version");
assertThat(retrieved.getVersion()).isEqualTo(version);
}
@Test
public void testGetBuildConfigurations() throws ClientException {
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asAnonymous());
RemoteCollection<BuildConfiguration> all = client.getBuildConfigs(productVersionsId);
assertThat(all).hasSize(2).allMatch(v -> v.getProductVersion().getId().equals(productVersionsId));
}
@Test
public void testGetGroupConfigurations() throws ClientException {
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asAnonymous());
RemoteCollection<GroupConfiguration> all = client.getGroupConfigs(productVersionsId);
assertThat(all).hasSize(2).allMatch(v -> v.getProductVersion().getId().equals(productVersionsId));
}
@Test
public void testGetMilestones() throws ClientException {
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asAnonymous());
RemoteCollection<ProductMilestone> all = client.getMilestones(productVersionsId);
assertThat(all).hasSize(3).allMatch(v -> v.getProductVersion().getId().equals(productVersionsId));
}
@Test
public void testGetReleases() throws ClientException {
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asAnonymous());
RemoteCollection<ProductRelease> all = client.getReleases(productVersionsId);
assertThat(all).hasSize(1).allMatch(v -> v.getProductVersion().getId().equals(productVersionsId));
}
@Test
public void shouldFailGracefullyOnNonExistentProduct() {
// given
String nonExistentProductId = "384583";
ProductVersion productVersion = ProductVersion.builder()
.product(ProductRef.refBuilder().id(nonExistentProductId).build())
.version("42.2")
.build();
// whenthen
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
assertThatThrownBy(() -> client.createNew(productVersion)).isInstanceOf(ClientException.class);
}
@Test
public void shouldGenerateBrewTagWhenCreatingProductVersion() throws Exception {
// given
final String version = "42.3";
ProductVersion productVersion = ProductVersion.builder().product(product).version(version).build();
// when
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
ProductVersion created = client.createNew(productVersion);
// then
assertThat(created.getAttributes()).containsKey(Attributes.BREW_TAG_PREFIX);
assertThat(created.getAttributes().get(Attributes.BREW_TAG_PREFIX))
.isEqualTo(product.getAbbreviation().toLowerCase() + "-" + version + "-pnc");
}
@Test
public void shouldUpdateGroupConfigs() throws ClientException {
// given
GroupConfiguration gc = GroupConfiguration.builder().name("New GC").build();
GroupConfigurationClient gcc = new GroupConfigurationClient(RestClientConfiguration.asUser());
GroupConfiguration gcToAdd = gcc.createNew(gc);
Map<String, GroupConfigurationRef> groupConfis = new HashMap<>();
// when
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
ProductVersion productVersion = client.getSpecific(productVersionsId2);
groupConfis.putAll(productVersion.getGroupConfigs());
groupConfis.put(gcToAdd.getId(), gcToAdd);
ProductVersion toUpdate = productVersion.toBuilder().groupConfigs(groupConfis).build();
client.update(productVersion.getId(), toUpdate);
ProductVersion retrieved = client.getSpecific(productVersion.getId());
// then
assertThat(retrieved.getGroupConfigs()).hasSameSizeAs(groupConfis).containsKey(gcToAdd.getId());
}
@Test
public void shouldUpdateGroupConfigsUsingPatch() throws PatchBuilderException, RemoteResourceException {
// given
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
GroupConfiguration gc = GroupConfiguration.builder().name("GC patch test").build();
GroupConfigurationClient gcc = new GroupConfigurationClient(RestClientConfiguration.asUser());
GroupConfiguration gcToAdd = gcc.createNew(gc);
ProductVersion productVersion = client.getSpecific(productVersionsId2);
Map<String, GroupConfigurationRef> groupConfigs = productVersion.getGroupConfigs();
// when
ProductVersionPatchBuilder patchBuilder = new ProductVersionPatchBuilder();
ProductVersionPatchBuilder patch = patchBuilder
.addGroupConfigs(Collections.singletonMap(gcToAdd.getId(), gcToAdd));
client.patch(productVersionsId2, patch);
// then
groupConfigs.put(gcToAdd.getId(), gcToAdd);
ProductVersion productVersionUpdated = client.getSpecific(productVersionsId2);
assertThat(productVersionUpdated.getGroupConfigs())
.containsKeys(groupConfigs.keySet().toArray(new String[groupConfigs.keySet().size()]));
}
@Test
public void shouldNotUpdateGroupConfigsWhenOneIsAlreadyAsssociatedWithAnotherProductVersion()
throws ClientException {
// given
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
GroupConfigurationRef alreadyAssignedGC = client.getSpecific(productVersionsId)
.getGroupConfigs()
.values()
.iterator()
.next();
Map<String, GroupConfigurationRef> groupConfis = new HashMap<>();
assertThat(alreadyAssignedGC).isNotNull();
// when
ProductVersion productVersion = client.getSpecific(productVersionsId2);
groupConfis.putAll(productVersion.getGroupConfigs());
groupConfis.put(alreadyAssignedGC.getId(), alreadyAssignedGC);
ProductVersion toUpdate = productVersion.toBuilder().groupConfigs(groupConfis).build();
// then
assertThatThrownBy(() -> client.update(productVersion.getId(), toUpdate)).isInstanceOf(ClientException.class);
}
@Test
public void shouldNotUpdateGroupConfigsWhenOneIsAlreadyAsssociatedWithAnotherProductVersionUsingPatch()
throws ClientException, PatchBuilderException {
// given
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
GroupConfigurationRef alreadyAssignedGC = client.getSpecific(productVersionsId)
.getGroupConfigs()
.values()
.iterator()
.next();
Map<String, GroupConfigurationRef> groupConfis = new HashMap<>();
assertThat(alreadyAssignedGC).isNotNull();
// when
ProductVersionPatchBuilder patchBuilder = new ProductVersionPatchBuilder();
ProductVersionPatchBuilder patch = patchBuilder
.addGroupConfigs(Collections.singletonMap(alreadyAssignedGC.getId(), alreadyAssignedGC));
// then
assertThatThrownBy(() -> client.patch(productVersionsId2, patch)).isInstanceOf(ClientException.class);
}
@Test
public void shouldNotUpdateGroupConfigsWithNonExistantGroupConfig() throws ClientException {
// given
GroupConfigurationRef notExistingGC = GroupConfigurationRef.refBuilder()
.id("9999")
.name("i-dont-exist")
.build();
Map<String, GroupConfigurationRef> groupConfis = new HashMap<>();
// when
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
ProductVersion productVersion = client.getSpecific(productVersionsId2);
groupConfis.putAll(productVersion.getGroupConfigs());
groupConfis.put(notExistingGC.getId(), notExistingGC);
ProductVersion toUpdate = productVersion.toBuilder().groupConfigs(groupConfis).build();
// then
assertThatThrownBy(() -> client.update(productVersion.getId(), toUpdate)).isInstanceOf(ClientException.class);
}
@Test
public void shouldNotUpdateWithClosedMilestone() throws ClientException {
// given
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
ProductVersion productVersion = client.getSpecific(productVersionsId); // has closed milestone, from
// DatabaseDataInitializer
// when
ProductVersion toUpdate = productVersion.toBuilder().version("2.0").build();
// then
assertThatThrownBy(() -> client.update(productVersion.getId(), toUpdate)).isInstanceOf(ClientException.class);
}
@Test
public void shouldAdd2GroupConfigsWithPatch() throws Exception {
// given #1
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
GroupConfigurationClient gcClient = new GroupConfigurationClient(RestClientConfiguration.asUser());
ProductVersion productVersion = client.getSpecific(productVersionsId2);
GroupConfiguration toAdd = gcClient.createNew(GroupConfiguration.builder().name("New GC1").build());
ProductVersionPatchBuilder builder = new ProductVersionPatchBuilder();
Map<String, GroupConfigurationRef> toAddMap = new HashMap<>();
toAddMap.put(toAdd.getId(), toAdd);
builder.addGroupConfigs(toAddMap);
// when #1
client.patch(productVersion.getId(), builder);
// then #1
ProductVersion refresh = client.getSpecific(productVersionsId2);
assertThat(refresh.getGroupConfigs()).containsKey(toAdd.getId());
// given #2 add second GC
GroupConfiguration toAdd2 = gcClient.createNew(GroupConfiguration.builder().name("New GC2").build());
builder = new ProductVersionPatchBuilder();
toAddMap.clear();
toAddMap.put(toAdd2.getId(), toAdd2);
builder.addGroupConfigs(toAddMap);
// when #2
client.patch(productVersion.getId(), builder);
// then #2
refresh = client.getSpecific(productVersionsId2);
assertThat(refresh.getGroupConfigs()).containsKey(toAdd2.getId()).containsKey(toAdd.getId());
}
@Test
public void shouldDeleteBuildConfigWithPatch() throws Exception {
// given
ProductVersionClient client = new ProductVersionClient(RestClientConfiguration.asUser());
BuildConfigurationClient bcClient = new BuildConfigurationClient(RestClientConfiguration.asUser());
ProductVersion productVersion = client.getSpecific(productVersionsId2);
assertThat(productVersion.getBuildConfigs()).isNotEmpty();
BuildConfiguration toRemove = bcClient.getSpecific(productVersion.getBuildConfigs().keySet().iterator().next());
ProductVersionPatchBuilder builder = new ProductVersionPatchBuilder();
builder.removeBuildConfigs(Collections.singletonList(toRemove.getId()));
// when
client.patch(productVersion.getId(), builder);
// then
ProductVersion refresh = client.getSpecific(productVersionsId2);
assertThat(refresh.getBuildConfigs().keySet()).doesNotContain(toRemove.getId());
}
}
|
{
"content_hash": "7b42d3aed4ca63ced8f56e58936b3cf2",
"timestamp": "",
"source": "github",
"line_count": 381,
"max_line_length": 120,
"avg_line_length": 43.485564304461946,
"alnum_prop": 0.714087397392564,
"repo_name": "alexcreasy/pnc",
"id": "d7aea201ce605405e4af11588021a0a114cc5187",
"size": "17276",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integration-test/src/test/java/org/jboss/pnc/integration/endpoints/ProductVersionEndpointTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "113413"
},
{
"name": "HTML",
"bytes": "394550"
},
{
"name": "Java",
"bytes": "4283733"
},
{
"name": "JavaScript",
"bytes": "3099544"
},
{
"name": "TSQL",
"bytes": "47567"
}
]
}
|
End of preview.
No dataset card yet
- Downloads last month
- 6