code
stringlengths 1
25.8M
| language
stringclasses 18
values | source
stringclasses 4
values | repo
stringclasses 78
values | path
stringlengths 0
268
|
|---|---|---|---|---|
from pandas.core.groupby.base import transformation_kernels
# There is no Series.cumcount or DataFrame.cumcount
series_transform_kernels = [
x for x in sorted(transformation_kernels) if x != "cumcount"
]
frame_transform_kernels = [x for x in sorted(transformation_kernels) if x != "cumcount"]
|
python
|
github
|
https://github.com/pandas-dev/pandas
|
pandas/tests/apply/common.py
|
from __future__ import division, absolute_import, print_function
import platform
from distutils.unixccompiler import UnixCCompiler
from numpy.distutils.exec_command import find_executable
from numpy.distutils.ccompiler import simple_version_match
if platform.system() == 'Windows':
from numpy.distutils.msvc9compiler import MSVCCompiler
class IntelCCompiler(UnixCCompiler):
"""A modified Intel compiler compatible with a GCC-built Python."""
compiler_type = 'intel'
cc_exe = 'icc'
cc_args = 'fPIC'
def __init__(self, verbose=0, dry_run=0, force=0):
UnixCCompiler.__init__(self, verbose, dry_run, force)
v = self.get_version()
mpopt = 'openmp' if v and int(v.split('.')[0]) < 15 else 'qopenmp'
self.cc_exe = ('icc -fPIC -fp-model strict -O3 '
'-fomit-frame-pointer -{}').format(mpopt)
compiler = self.cc_exe
if platform.system() == 'Darwin':
shared_flag = '-Wl,-undefined,dynamic_lookup'
else:
shared_flag = '-shared'
self.set_executables(compiler=compiler,
compiler_so=compiler,
compiler_cxx=compiler,
archiver='xiar' + ' cru',
linker_exe=compiler + ' -shared-intel',
linker_so=compiler + ' ' + shared_flag +
' -shared-intel')
class IntelItaniumCCompiler(IntelCCompiler):
compiler_type = 'intele'
# On Itanium, the Intel Compiler used to be called ecc, let's search for
# it (now it's also icc, so ecc is last in the search).
for cc_exe in map(find_executable, ['icc', 'ecc']):
if cc_exe:
break
class IntelEM64TCCompiler(UnixCCompiler):
"""
A modified Intel x86_64 compiler compatible with a 64bit GCC-built Python.
"""
compiler_type = 'intelem'
cc_exe = 'icc -m64'
cc_args = '-fPIC'
def __init__(self, verbose=0, dry_run=0, force=0):
UnixCCompiler.__init__(self, verbose, dry_run, force)
v = self.get_version()
mpopt = 'openmp' if v and int(v.split('.')[0]) < 15 else 'qopenmp'
self.cc_exe = ('icc -m64 -fPIC -fp-model strict -O3 '
'-fomit-frame-pointer -{}').format(mpopt)
compiler = self.cc_exe
if platform.system() == 'Darwin':
shared_flag = '-Wl,-undefined,dynamic_lookup'
else:
shared_flag = '-shared'
self.set_executables(compiler=compiler,
compiler_so=compiler,
compiler_cxx=compiler,
archiver='xiar' + ' cru',
linker_exe=compiler + ' -shared-intel',
linker_so=compiler + ' ' + shared_flag +
' -shared-intel')
if platform.system() == 'Windows':
class IntelCCompilerW(MSVCCompiler):
"""
A modified Intel compiler compatible with an MSVC-built Python.
"""
compiler_type = 'intelw'
compiler_cxx = 'icl'
def __init__(self, verbose=0, dry_run=0, force=0):
MSVCCompiler.__init__(self, verbose, dry_run, force)
version_match = simple_version_match(start=r'Intel\(R\).*?32,')
self.__version = version_match
def initialize(self, plat_name=None):
MSVCCompiler.initialize(self, plat_name)
self.cc = self.find_exe('icl.exe')
self.lib = self.find_exe('xilib')
self.linker = self.find_exe('xilink')
self.compile_options = ['/nologo', '/O3', '/MD', '/W3',
'/Qstd=c99']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
'/Qstd=c99', '/Z7', '/D_DEBUG']
class IntelEM64TCCompilerW(IntelCCompilerW):
"""
A modified Intel x86_64 compiler compatible with
a 64bit MSVC-built Python.
"""
compiler_type = 'intelemw'
def __init__(self, verbose=0, dry_run=0, force=0):
MSVCCompiler.__init__(self, verbose, dry_run, force)
version_match = simple_version_match(start=r'Intel\(R\).*?64,')
self.__version = version_match
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
sentry.runner.commands.cleanup
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import click
from datetime import timedelta
from django.utils import timezone
from sentry.runner.decorators import configuration
def get_project(value):
from sentry.models import Project
try:
if value.isdigit():
return int(value)
if '/' not in value:
return None
org, proj = value.split('/', 1)
return Project.objects.get_from_cache(
organization__slug=org,
slug=proj,
).id
except Project.DoesNotExist:
return None
@click.command()
@click.option('--days', default=30, show_default=True, help='Numbers of days to truncate on.')
@click.option('--project', help='Limit truncation to only entries from project.')
@click.option('--concurrency', type=int, default=1, show_default=True, help='The number of concurrent workers to run.')
@click.option('--silent', '-q', default=False, is_flag=True, help='Run quietly. No output on success.')
@click.option('--model', '-m', multiple=True)
@configuration
def cleanup(days, project, concurrency, silent, model):
"""Delete a portion of trailing data based on creation date.
All data that is older than `--days` will be deleted. The default for
this is 30 days. In the default setting all projects will be truncated
but if you have a specific project you want to limit this to this can be
done with the `--project` flag which accepts a project ID or a string
with the form `org/project` where both are slugs.
"""
from sentry.app import nodestore
from sentry.db.deletion import BulkDeleteQuery
from sentry.models import (
Event, EventMapping, Group, GroupRuleStatus, GroupTagValue,
LostPasswordHash, TagValue, GroupEmailThread,
)
models = {m.lower() for m in model}
def is_filtered(model):
if not models:
return False
return model.lower() not in models
# these models should be safe to delete without cascades, in order
BULK_DELETES = (
(GroupRuleStatus, 'date_added'),
(GroupTagValue, 'last_seen'),
(TagValue, 'last_seen'),
(GroupEmailThread, 'date'),
)
GENERIC_DELETES = (
(Event, 'datetime'),
(Group, 'last_seen'),
)
if not silent:
click.echo("Removing expired values for LostPasswordHash")
if is_filtered('LostPasswordHash'):
if not silent:
click.echo('>> Skipping LostPasswordHash')
else:
LostPasswordHash.objects.filter(
date_added__lte=timezone.now() - timedelta(hours=48)
).delete()
project_id = None
if project:
click.echo("Bulk NodeStore deletion not available for project selection", err=True)
project_id = get_project(project)
if project_id is None:
click.echo('Error: Project not found', err=True)
raise click.Abort()
else:
if not silent:
click.echo("Removing old NodeStore values")
if is_filtered('NodeStore'):
if not silent:
click.echo('>> Skipping NodeStore')
else:
cutoff = timezone.now() - timedelta(days=days)
try:
nodestore.cleanup(cutoff)
except NotImplementedError:
click.echo("NodeStore backend does not support cleanup operation", err=True)
for model, dtfield in BULK_DELETES:
if not silent:
click.echo("Removing {model} for days={days} project={project}".format(
model=model.__name__,
days=days,
project=project or '*',
))
if is_filtered(model.__name__):
if not silent:
click.echo('>> Skipping %s' % model.__name__)
else:
BulkDeleteQuery(
model=model,
dtfield=dtfield,
days=days,
project_id=project_id,
).execute()
# EventMapping is fairly expensive and is special cased as it's likely you
# won't need a reference to an event for nearly as long
if not silent:
click.echo("Removing expired values for EventMapping")
if is_filtered('EventMapping'):
if not silent:
click.echo('>> Skipping EventMapping')
else:
BulkDeleteQuery(
model=EventMapping,
dtfield='date_added',
days=min(days, 7),
project_id=project_id,
).execute()
# Clean up FileBlob instances which are no longer used and aren't super
# recent (as there could be a race between blob creation and reference)
if not silent:
click.echo("Cleaning up unused FileBlob references")
if is_filtered('FileBlob'):
if not silent:
click.echo('>> Skipping FileBlob')
else:
cleanup_unused_files(silent)
for model, dtfield in GENERIC_DELETES:
if not silent:
click.echo("Removing {model} for days={days} project={project}".format(
model=model.__name__,
days=days,
project=project or '*',
))
if is_filtered(model.__name__):
if not silent:
click.echo('>> Skipping %s' % model.__name__)
else:
BulkDeleteQuery(
model=model,
dtfield=dtfield,
days=days,
project_id=project_id,
).execute_generic()
def cleanup_unused_files(quiet=False):
"""
Remove FileBlob's (and thus the actual files) if they are no longer
referenced by any File.
We set a minimum-age on the query to ensure that we don't try to remove
any blobs which are brand new and potentially in the process of being
referenced.
"""
from sentry.models import File, FileBlob, FileBlobIndex
if quiet:
from sentry.utils.query import RangeQuerySetWrapper
else:
from sentry.utils.query import RangeQuerySetWrapperWithProgressBar as RangeQuerySetWrapper
cutoff = timezone.now() - timedelta(days=1)
queryset = FileBlob.objects.filter(
timestamp__lte=cutoff,
)
for blob in RangeQuerySetWrapper(queryset):
if FileBlobIndex.objects.filter(blob=blob).exists():
continue
if File.objects.filter(blob=blob).exists():
continue
blob.delete()
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
============================================================
Empirical evaluation of the impact of k-means initialization
============================================================
Evaluate the ability of k-means initializations strategies to make
the algorithm convergence robust as measured by the relative standard
deviation of the inertia of the clustering (i.e. the sum of distances
to the nearest cluster center).
The first plot shows the best inertia reached for each combination
of the model (``KMeans`` or ``MiniBatchKMeans``) and the init method
(``init="random"`` or ``init="kmeans++"``) for increasing values of the
``n_init`` parameter that controls the number of initializations.
The second plot demonstrate one single run of the ``MiniBatchKMeans``
estimator using a ``init="random"`` and ``n_init=1``. This run leads to
a bad convergence (local optimum) with estimated centers stuck
between ground truth clusters.
The dataset used for evaluation is a 2D grid of isotropic Gaussian
clusters widely spaced.
"""
print(__doc__)
# Author: Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from sklearn.utils import shuffle
from sklearn.utils import check_random_state
from sklearn.cluster import MiniBatchKMeans
from sklearn.cluster import KMeans
random_state = np.random.RandomState(0)
# Number of run (with randomly generated dataset) for each strategy so as
# to be able to compute an estimate of the standard deviation
n_runs = 5
# k-means models can do several random inits so as to be able to trade
# CPU time for convergence robustness
n_init_range = np.array([1, 5, 10, 15, 20])
# Datasets generation parameters
n_samples_per_center = 100
grid_size = 3
scale = 0.1
n_clusters = grid_size ** 2
def make_data(random_state, n_samples_per_center, grid_size, scale):
random_state = check_random_state(random_state)
centers = np.array([[i, j]
for i in range(grid_size)
for j in range(grid_size)])
n_clusters_true, n_features = centers.shape
noise = random_state.normal(
scale=scale, size=(n_samples_per_center, centers.shape[1]))
X = np.concatenate([c + noise for c in centers])
y = np.concatenate([[i] * n_samples_per_center
for i in range(n_clusters_true)])
return shuffle(X, y, random_state=random_state)
# Part 1: Quantitative evaluation of various init methods
fig = plt.figure()
plots = []
legends = []
cases = [
(KMeans, 'k-means++', {}),
(KMeans, 'random', {}),
(MiniBatchKMeans, 'k-means++', {'max_no_improvement': 3}),
(MiniBatchKMeans, 'random', {'max_no_improvement': 3, 'init_size': 500}),
]
for factory, init, params in cases:
print("Evaluation of %s with %s init" % (factory.__name__, init))
inertia = np.empty((len(n_init_range), n_runs))
for run_id in range(n_runs):
X, y = make_data(run_id, n_samples_per_center, grid_size, scale)
for i, n_init in enumerate(n_init_range):
km = factory(n_clusters=n_clusters, init=init, random_state=run_id,
n_init=n_init, **params).fit(X)
inertia[i, run_id] = km.inertia_
p = plt.errorbar(n_init_range, inertia.mean(axis=1), inertia.std(axis=1))
plots.append(p[0])
legends.append("%s with %s init" % (factory.__name__, init))
plt.xlabel('n_init')
plt.ylabel('inertia')
plt.legend(plots, legends)
plt.title("Mean inertia for various k-means init across %d runs" % n_runs)
# Part 2: Qualitative visual inspection of the convergence
X, y = make_data(random_state, n_samples_per_center, grid_size, scale)
km = MiniBatchKMeans(n_clusters=n_clusters, init='random', n_init=1,
random_state=random_state).fit(X)
fig = plt.figure()
for k in range(n_clusters):
my_members = km.labels_ == k
color = cm.spectral(float(k) / n_clusters, 1)
plt.plot(X[my_members, 0], X[my_members, 1], 'o', marker='.', c=color)
cluster_center = km.cluster_centers_[k]
plt.plot(cluster_center[0], cluster_center[1], 'o',
markerfacecolor=color, markeredgecolor='k', markersize=6)
plt.title("Example cluster allocation with a single random init\n"
"with MiniBatchKMeans")
plt.show()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Joint copyright:
# - Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from testtools import TestCase, ExpectedException
from testscenarios.testcase import TestWithScenarios
from jenkins_jobs.errors import JenkinsJobsException
from tests.base import SingleJobTestCase
from tests.base import get_scenarios
from tests.base import mock
class TestCaseModuleDuplicates(TestWithScenarios, TestCase,
SingleJobTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = get_scenarios(fixtures_path)
@mock.patch('jenkins_jobs.builder.logger', autospec=True)
def test_yaml_snippet(self, mock_logger):
if os.path.basename(self.in_filename).startswith("exception_"):
with ExpectedException(JenkinsJobsException, "^Duplicate .*"):
super(TestCaseModuleDuplicates, self).test_yaml_snippet()
else:
super(TestCaseModuleDuplicates, self).test_yaml_snippet()
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package main
import (
"context"
"fmt"
"os"
"testing"
"time"
expect "github.com/Netflix/go-expect"
tfe "github.com/hashicorp/go-tfe"
"github.com/hashicorp/go-uuid"
goversion "github.com/hashicorp/go-version"
tfversion "github.com/hashicorp/terraform/version"
)
const (
// We need to give the console enough time to hear back.
// 1 minute was too short in some cases, so this gives it ample time.
expectConsoleTimeout = 3 * time.Minute
)
type tfCommand struct {
command []string
expectedCmdOutput string
expectError bool
userInput []string
postInputOutput []string
}
type operationSets struct {
commands []tfCommand
prep func(t *testing.T, orgName, dir string)
}
type testCases map[string]struct {
operations []operationSets
validations func(t *testing.T, orgName string)
}
func defaultOpts() []expect.ConsoleOpt {
opts := []expect.ConsoleOpt{
expect.WithDefaultTimeout(expectConsoleTimeout),
}
if verboseMode {
opts = append(opts, expect.WithStdout(os.Stdout))
}
return opts
}
func createOrganization(t *testing.T) (*tfe.Organization, func()) {
ctx := context.Background()
org, err := tfeClient.Organizations.Create(ctx, tfe.OrganizationCreateOptions{
Name: tfe.String("tst-" + randomString(t)),
Email: tfe.String(fmt.Sprintf("%s@tfe.local", randomString(t))),
CostEstimationEnabled: tfe.Bool(false),
})
if err != nil {
t.Fatal(err)
}
_, err = tfeClient.Admin.Organizations.Update(ctx, org.Name, tfe.AdminOrganizationUpdateOptions{
AccessBetaTools: tfe.Bool(true),
})
if err != nil {
t.Fatal(err)
}
return org, func() {
if err := tfeClient.Organizations.Delete(ctx, org.Name); err != nil {
t.Errorf("Error destroying organization! WARNING: Dangling resources\n"+
"may exist! The full error is shown below.\n\n"+
"Organization: %s\nError: %s", org.Name, err)
}
}
}
func createWorkspace(t *testing.T, orgName string, wOpts tfe.WorkspaceCreateOptions) *tfe.Workspace {
ctx := context.Background()
w, err := tfeClient.Workspaces.Create(ctx, orgName, wOpts)
if err != nil {
t.Fatal(err)
}
return w
}
func getWorkspace(workspaces []*tfe.Workspace, workspace string) (*tfe.Workspace, bool) {
for _, ws := range workspaces {
if ws.Name == workspace {
return ws, false
}
}
return nil, true
}
func randomString(t *testing.T) string {
v, err := uuid.GenerateUUID()
if err != nil {
t.Fatal(err)
}
return v
}
func terraformConfigLocalBackend() string {
return `
terraform {
backend "local" {
}
}
output "val" {
value = "${terraform.workspace}"
}
`
}
func terraformConfigRemoteBackendName(org, name string) string {
return fmt.Sprintf(`
terraform {
backend "remote" {
hostname = "%s"
organization = "%s"
workspaces {
name = "%s"
}
}
}
output "val" {
value = "${terraform.workspace}"
}
`, tfeHostname, org, name)
}
func terraformConfigRemoteBackendPrefix(org, prefix string) string {
return fmt.Sprintf(`
terraform {
backend "remote" {
hostname = "%s"
organization = "%s"
workspaces {
prefix = "%s"
}
}
}
output "val" {
value = "${terraform.workspace}"
}
`, tfeHostname, org, prefix)
}
func terraformConfigCloudBackendTags(org, tag string) string {
return fmt.Sprintf(`
terraform {
cloud {
hostname = "%s"
organization = "%s"
workspaces {
tags = ["%s"]
}
}
}
output "tag_val" {
value = "%s"
}
`, tfeHostname, org, tag, tag)
}
func terraformConfigCloudBackendName(org, name string) string {
return fmt.Sprintf(`
terraform {
cloud {
hostname = "%s"
organization = "%s"
workspaces {
name = "%s"
}
}
}
output "val" {
value = "${terraform.workspace}"
}
`, tfeHostname, org, name)
}
func terraformConfigCloudBackendOmitOrg(workspaceName string) string {
return fmt.Sprintf(`
terraform {
cloud {
hostname = "%s"
workspaces {
name = "%s"
}
}
}
output "val" {
value = "${terraform.workspace}"
}
`, tfeHostname, workspaceName)
}
func terraformConfigCloudBackendOmitWorkspaces(orgName string) string {
return fmt.Sprintf(`
terraform {
cloud {
hostname = "%s"
organization = "%s"
}
}
output "val" {
value = "${terraform.workspace}"
}
`, tfeHostname, orgName)
}
func terraformConfigCloudBackendOmitConfig() string {
return `
terraform {
cloud {}
}
output "val" {
value = "${terraform.workspace}"
}
`
}
func writeMainTF(t *testing.T, block string, dir string) {
f, err := os.Create(fmt.Sprintf("%s/main.tf", dir))
if err != nil {
t.Fatal(err)
}
_, err = f.WriteString(block)
if err != nil {
t.Fatal(err)
}
f.Close()
}
// The e2e tests rely on the fact that the terraform version in HCP Terraform
// is able to run the `cloud` configuration block, which is available in 1.1
// and will continue to be available in later versions. So this function checks
// that there is a version that is >= 1.1.
func skipWithoutRemoteTerraformVersion(t *testing.T) {
version := tfversion.Version
baseVersion, err := goversion.NewVersion(version)
if err != nil {
t.Fatalf("Error instantiating go-version for %s", version)
}
opts := &tfe.AdminTerraformVersionsListOptions{
ListOptions: tfe.ListOptions{
PageNumber: 1,
PageSize: 100,
},
}
hasVersion := false
findTfVersion:
for {
// TODO: update go-tfe Read() to retrieve a terraform version by name.
// Currently you can only retrieve by ID.
tfVersionList, err := tfeClient.Admin.TerraformVersions.List(context.Background(), opts)
if err != nil {
t.Fatalf("Could not retrieve list of terraform versions: %v", err)
}
for _, item := range tfVersionList.Items {
availableVersion, err := goversion.NewVersion(item.Version)
if err != nil {
t.Logf("Error instantiating go-version for %s", item.Version)
continue
}
if availableVersion.Core().GreaterThanOrEqual(baseVersion.Core()) {
hasVersion = true
break findTfVersion
}
}
// Exit the loop when we've seen all pages.
if tfVersionList.CurrentPage >= tfVersionList.TotalPages {
break
}
// Update the page number to get the next page.
opts.PageNumber = tfVersionList.NextPage
}
if !hasVersion {
t.Skipf("Skipping test because TFC/E does not have current Terraform version to test with (%s)", version)
}
}
|
go
|
github
|
https://github.com/hashicorp/terraform
|
internal/cloud/e2e/helper_test.go
|
package kotlinx.coroutines.channels
import kotlinx.coroutines.*
import kotlinx.coroutines.testing.*
import kotlin.test.*
class CancelledChannelLeakTest : TestBase() {
/**
* Tests that cancellation removes the elements from the channel's buffer.
*/
@Test
fun testBufferedChannelLeak() = runTest {
for (capacity in listOf(Channel.CONFLATED, Channel.RENDEZVOUS, 1, 2, 5, 10)) {
val channel = Channel<X>(capacity)
val value = X()
launch(start = CoroutineStart.UNDISPATCHED) {
channel.send(value)
}
FieldWalker.assertReachableCount(1, channel) { it === value }
channel.cancel()
// the element must be removed so that there is no memory leak
FieldWalker.assertReachableCount(0, channel) { it === value }
}
}
class X
}
|
kotlin
|
github
|
https://github.com/Kotlin/kotlinx.coroutines
|
kotlinx-coroutines-core/jvm/test/channels/CancelledChannelLeakTest.kt
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jason Lowe-Power
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Jason Lowe-Power
# A wrapper around configs/learning_gem5/part1/simple.py
# For some reason, this is implicitly needed by run.py
root = None
def run_test(root):
# Called from tests/run.py
# Execute the script we are wrapping
execfile('configs/learning_gem5/part1/simple.py')
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Tests of completion xblock runtime services
"""
from completion.models import BlockCompletion
from completion.services import CompletionService
from completion.test_utils import CompletionWaffleTestMixin
import ddt
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangolib.testing.utils import skip_unless_lms
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
@ddt.ddt
@skip_unless_lms
class CompletionServiceTestCase(CompletionWaffleTestMixin, SharedModuleStoreTestCase):
"""
Test the data returned by the CompletionService.
"""
@classmethod
def setUpClass(cls):
super(CompletionServiceTestCase, cls).setUpClass()
cls.course = CourseFactory.create()
with cls.store.bulk_operations(cls.course.id):
cls.chapter = ItemFactory.create(
parent=cls.course,
category="chapter",
)
cls.sequence = ItemFactory.create(
parent=cls.chapter,
category='sequential',
)
cls.vertical = ItemFactory.create(
parent=cls.sequence,
category='vertical',
)
cls.html = ItemFactory.create(
parent=cls.vertical,
category='html',
)
cls.problem = ItemFactory.create(
parent=cls.vertical,
category="problem",
)
cls.problem2 = ItemFactory.create(
parent=cls.vertical,
category="problem",
)
cls.problem3 = ItemFactory.create(
parent=cls.vertical,
category="problem",
)
cls.problem4 = ItemFactory.create(
parent=cls.vertical,
category="problem",
)
cls.problem5 = ItemFactory.create(
parent=cls.vertical,
category="problem",
)
cls.store.update_item(cls.course, UserFactory().id)
cls.problems = [cls.problem, cls.problem2, cls.problem3, cls.problem4, cls.problem5]
def setUp(self):
super(CompletionServiceTestCase, self).setUp()
self.override_waffle_switch(True)
self.user = UserFactory.create()
self.other_user = UserFactory.create()
self.course_key = self.course.id
self.other_course_key = CourseKey.from_string("course-v1:ReedX+Hum110+1904")
self.block_keys = [problem.location for problem in self.problems]
self.completion_service = CompletionService(self.user, self.course_key)
# Proper completions for the given runtime
BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.course_key,
block_key=self.html.location,
completion=1.0,
)
for idx, block_key in enumerate(self.block_keys[0:3]):
BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.course_key,
block_key=block_key,
completion=1.0 - (0.2 * idx),
)
# Wrong user
for idx, block_key in enumerate(self.block_keys[2:]):
BlockCompletion.objects.submit_completion(
user=self.other_user,
course_key=self.course_key,
block_key=block_key,
completion=0.9 - (0.2 * idx),
)
# Wrong course
BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.other_course_key,
block_key=self.block_keys[4],
completion=0.75,
)
def test_completion_service(self):
# Only the completions for the user and course specified for the CompletionService
# are returned. Values are returned for all keys provided.
self.assertEqual(
self.completion_service.get_completions(self.block_keys),
{
self.block_keys[0]: 1.0,
self.block_keys[1]: 0.8,
self.block_keys[2]: 0.6,
self.block_keys[3]: 0.0,
self.block_keys[4]: 0.0
},
)
@ddt.data(True, False)
def test_enabled_honors_waffle_switch(self, enabled):
self.override_waffle_switch(enabled)
self.assertEqual(self.completion_service.completion_tracking_enabled(), enabled)
def test_vertical_completion(self):
self.assertEqual(
self.completion_service.vertical_is_complete(self.vertical),
False,
)
for block_key in self.block_keys:
BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.course_key,
block_key=block_key,
completion=1.0
)
self.assertEqual(
self.completion_service.vertical_is_complete(self.vertical),
True,
)
def test_vertical_partial_completion(self):
block_keys_count = len(self.block_keys)
for i in range(block_keys_count - 1):
# Mark all the child blocks completed except the last one
BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.course_key,
block_key=self.block_keys[i],
completion=1.0
)
self.assertEqual(
self.completion_service.vertical_is_complete(self.vertical),
False,
)
def test_can_mark_block_complete_on_view(self):
self.assertEqual(self.completion_service.can_mark_block_complete_on_view(self.course), False)
self.assertEqual(self.completion_service.can_mark_block_complete_on_view(self.chapter), False)
self.assertEqual(self.completion_service.can_mark_block_complete_on_view(self.sequence), False)
self.assertEqual(self.completion_service.can_mark_block_complete_on_view(self.vertical), False)
self.assertEqual(self.completion_service.can_mark_block_complete_on_view(self.html), True)
self.assertEqual(self.completion_service.can_mark_block_complete_on_view(self.problem), False)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
# Copyright (C) 2021 Microchip Technology, Inc.
%YAML 1.2
---
$id: http://devicetree.org/schemas/media/microchip,xisc.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Microchip eXtended Image Sensor Controller (XISC)
maintainers:
- Eugen Hristev <eugen.hristev@microchip.com>
description: |
The eXtended Image Sensor Controller (XISC) device provides the video input capabilities for the
Microchip AT91 SAM family of devices.
The XISC has a single internal parallel input that supports RAW Bayer, RGB or YUV video.
The source can be either a demuxer from a CSI2 type of bus, or a simple direct bridge to a
parallel sensor.
The XISC provides one clock output that is used to clock the demuxer/bridge.
properties:
compatible:
const: microchip,sama7g5-isc
reg:
maxItems: 1
interrupts:
maxItems: 1
clocks:
maxItems: 1
clock-names:
items:
- const: hclock
'#clock-cells':
const: 0
clock-output-names:
const: isc-mck
microchip,mipi-mode:
type: boolean
description:
As the XISC is usually connected to a demux/bridge, the XISC receives
the same type of input, however, it should be aware of the type of
signals received. The mipi-mode enables different internal handling
of the data and clock lines.
port:
$ref: /schemas/graph.yaml#/$defs/port-base
description:
Input port node, single endpoint describing the input pad.
properties:
endpoint:
$ref: video-interfaces.yaml#
properties:
bus-type:
enum: [5, 6]
remote-endpoint: true
bus-width:
enum: [8, 9, 10, 11, 12, 14]
default: 12
hsync-active:
enum: [0, 1]
default: 1
vsync-active:
enum: [0, 1]
default: 1
pclk-sample:
enum: [0, 1]
default: 1
required:
- remote-endpoint
- bus-type
additionalProperties: false
additionalProperties: false
required:
- compatible
- reg
- clocks
- clock-names
- '#clock-cells'
- clock-output-names
- port
additionalProperties: false
examples:
- |
#include <dt-bindings/interrupt-controller/arm-gic.h>
#include <dt-bindings/clock/at91.h>
#include <dt-bindings/interrupt-controller/irq.h>
#include <dt-bindings/media/video-interfaces.h>
xisc: xisc@e1408000 {
compatible = "microchip,sama7g5-isc";
reg = <0xe1408000 0x2000>;
interrupts = <GIC_SPI 56 IRQ_TYPE_LEVEL_HIGH>;
clocks = <&pmc PMC_TYPE_PERIPHERAL 56>;
clock-names = "hclock";
#clock-cells = <0>;
clock-output-names = "isc-mck";
port {
xisc_in: endpoint {
bus-type = <MEDIA_BUS_TYPE_PARALLEL>;
remote-endpoint = <&csi2dc_out>;
hsync-active = <1>;
vsync-active = <1>;
bus-width = <12>;
};
};
};
|
unknown
|
github
|
https://github.com/torvalds/linux
|
Documentation/devicetree/bindings/media/microchip,xisc.yaml
|
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import volatility.plugins.mac.pstasks as pstasks
from volatility.renderers import TreeGrid
class mac_psenv(pstasks.mac_tasks):
""" Prints processes with environment in user land (**envp) """
def unified_output(self, data):
return TreeGrid([("Pid", int),
("Name", str),
("Bits", str),
("Arguments", str),
], self.generator(data))
def generator(self, data):
for proc in data:
yield(0, [
int(proc.p_pid),
str(proc.p_comm),
str(proc.task.map.pmap.pm_task_map),
str(proc.get_environment()),
])
def render_text(self, outfd, data):
self.table_header(outfd, [("Pid", "8"),
("Name", "20"),
("Bits", "16"),
("Arguments", "")])
for proc in data:
self.table_row(outfd,
proc.p_pid,
proc.p_comm,
str(proc.task.map.pmap.pm_task_map or '')[9:],
proc.get_environment())
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_log_fortiguard_setting
short_description: Configure logging to FortiCloud in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify log_fortiguard feature and setting category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
log_fortiguard_setting:
description:
- Configure logging to FortiCloud.
default: null
suboptions:
enc-algorithm:
description:
- Enable/disable and set the SSL security level for for sending encrypted logs to FortiCloud.
choices:
- high-medium
- high
- low
- disable
source-ip:
description:
- Source IP address used to connect FortiCloud.
ssl-min-proto-version:
description:
- Minimum supported protocol version for SSL/TLS connections (default is to follow system global setting).
choices:
- default
- SSLv3
- TLSv1
- TLSv1-1
- TLSv1-2
status:
description:
- Enable/disable logging to FortiCloud.
choices:
- enable
- disable
upload-day:
description:
- Day of week to roll logs.
upload-interval:
description:
- Frequency of uploading log files to FortiCloud.
choices:
- daily
- weekly
- monthly
upload-option:
description:
- Configure how log messages are sent to FortiCloud.
choices:
- store-and-upload
- realtime
- 1-minute
- 5-minute
upload-time:
description:
- "Time of day to roll logs (hh:mm)."
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure logging to FortiCloud.
fortios_log_fortiguard_setting:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
log_fortiguard_setting:
enc-algorithm: "high-medium"
source-ip: "84.230.14.43"
ssl-min-proto-version: "default"
status: "enable"
upload-day: "<your_own_value>"
upload-interval: "daily"
upload-option: "store-and-upload"
upload-time: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_log_fortiguard_setting_data(json):
option_list = ['enc-algorithm', 'source-ip', 'ssl-min-proto-version',
'status', 'upload-day', 'upload-interval',
'upload-option', 'upload-time']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def log_fortiguard_setting(data, fos):
vdom = data['vdom']
log_fortiguard_setting_data = data['log_fortiguard_setting']
filtered_data = filter_log_fortiguard_setting_data(log_fortiguard_setting_data)
return fos.set('log.fortiguard',
'setting',
data=filtered_data,
vdom=vdom)
def fortios_log_fortiguard(data, fos):
login(data)
methodlist = ['log_fortiguard_setting']
for method in methodlist:
if data[method]:
resp = eval(method)(data, fos)
break
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"log_fortiguard_setting": {
"required": False, "type": "dict",
"options": {
"enc-algorithm": {"required": False, "type": "str",
"choices": ["high-medium", "high", "low",
"disable"]},
"source-ip": {"required": False, "type": "str"},
"ssl-min-proto-version": {"required": False, "type": "str",
"choices": ["default", "SSLv3", "TLSv1",
"TLSv1-1", "TLSv1-2"]},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"upload-day": {"required": False, "type": "str"},
"upload-interval": {"required": False, "type": "str",
"choices": ["daily", "weekly", "monthly"]},
"upload-option": {"required": False, "type": "str",
"choices": ["store-and-upload", "realtime", "1-minute",
"5-minute"]},
"upload-time": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_log_fortiguard(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
from __future__ import division, print_function
__all__ = [
"Sum", "Product", "Kernel",
"ConstantKernel", "WhiteKernel", "DotProductKernel",
"RadialKernel", "ExpKernel", "ExpSquaredKernel",
"CosineKernel", "ExpSine2Kernel",
"Matern32Kernel", "Matern52Kernel",
"PythonKernel",
]
import numpy as np
from functools import partial
from ._kernels import CythonKernel
from .utils import numerical_gradient
class Kernel(object):
"""
The abstract kernel type. Every kernel implemented in George should be
a subclass of this object.
:param pars:
The hyper-parameters of the kernel.
:param ndim: (optional)
The number of input dimensions of the kernel. (default: ``1``)
"""
is_kernel = True
is_radial = False
kernel_type = -1
# This function deals with weird behavior when performing arithmetic
# operations with numpy scalars.
def __array_wrap__(self, array, context=None):
if context is None:
raise TypeError("Invalid operation")
ufunc, args, _ = context
if ufunc.__name__ == "multiply":
return float(args[0]) * args[1]
elif ufunc.__name__ == "add":
return float(args[0]) + args[1]
raise TypeError("Invalid operation")
__array_priority__ = np.inf
def __init__(self, *pars, **kwargs):
self.ndim = kwargs.get("ndim", 1)
self.pars = np.array(pars)
self.dirty = True
self._kernel = None
def __getstate__(self):
odict = self.__dict__.copy()
odict["_kernel"] = None
return odict
@property
def kernel(self):
if self.dirty or self._kernel is None:
self._kernel = CythonKernel(self)
self.dirty = False
return self._kernel
def __repr__(self):
return "{0}({1})".format(self.__class__.__name__,
", ".join(map("{0}".format,
self.pars) +
["ndim={0}".format(self.ndim)]))
def lnprior(self):
return 0.0
@property
def vector(self):
return np.log(self.pars)
@vector.setter
def vector(self, v):
self.pars = np.exp(v)
@property
def pars(self):
return self._pars
@pars.setter
def pars(self, v):
self._pars = np.array(v, dtype=np.float64, order="C")
self.dirty = True
def __getitem__(self, i):
return self.vector[i]
def __setitem__(self, i, v):
vec = self.vector
vec[i] = v
self.vector = vec
def __len__(self):
return len(self.pars)
def __add__(self, b):
if not hasattr(b, "is_kernel"):
return Sum(ConstantKernel(float(b), ndim=self.ndim), self)
return Sum(self, b)
def __radd__(self, b):
return self.__add__(b)
def __mul__(self, b):
if not hasattr(b, "is_kernel"):
return Product(ConstantKernel(float(b), ndim=self.ndim), self)
return Product(self, b)
def __rmul__(self, b):
return self.__mul__(b)
def value(self, x1, x2=None):
x1 = np.ascontiguousarray(x1, dtype=np.float64)
if x2 is None:
return self.kernel.value_symmetric(x1)
x2 = np.ascontiguousarray(x2, dtype=np.float64)
return self.kernel.value_general(x1, x2)
def gradient(self, x1, x2=None):
x1 = np.ascontiguousarray(x1, dtype=np.float64)
if x2 is None:
g = self.kernel.gradient_symmetric(x1)
else:
x2 = np.ascontiguousarray(x2, dtype=np.float64)
g = self.kernel.gradient_general(x1, x2)
return g * self.vector_gradient[None, None, :]
@property
def vector_gradient(self):
return self.pars
class _operator(Kernel):
is_kernel = False
operator_type = -1
def __init__(self, k1, k2):
if k1.ndim != k2.ndim:
raise ValueError("Dimension mismatch")
self.k1 = k1
self.k2 = k2
self.ndim = k1.ndim
self._dirty = True
self._kernel = None
def lnprior(self):
return self.k1.lnprior() + self.k2.lnprior()
@property
def dirty(self):
return self._dirty or self.k1.dirty or self.k2.dirty
@dirty.setter
def dirty(self, v):
self._dirty = v
self.k1.dirty = False
self.k2.dirty = False
@property
def pars(self):
return np.append(self.k1.pars, self.k2.pars)
@pars.setter
def pars(self, v):
self._dirty = True
i = len(self.k1)
self.k1.pars = v[:i]
self.k2.pars = v[i:]
@property
def vector(self):
return np.append(self.k1.vector, self.k2.vector)
@vector.setter
def vector(self, v):
self._dirty = True
i = len(self.k1)
self.k1.vector = v[:i]
self.k2.vector = v[i:]
class Sum(_operator):
is_kernel = False
operator_type = 0
def __repr__(self):
return "{0} + {1}".format(self.k1, self.k2)
class Product(_operator):
is_kernel = False
operator_type = 1
def __repr__(self):
return "{0} * {1}".format(self.k1, self.k2)
class ConstantKernel(Kernel):
r"""
This kernel returns the constant
.. math::
k(\mathbf{x}_i,\,\mathbf{x}_j) = c
where :math:`c` is a parameter.
:param value:
The constant value :math:`c` in the above equation.
"""
kernel_type = 0
def __init__(self, value, ndim=1):
super(ConstantKernel, self).__init__(value, ndim=ndim)
class WhiteKernel(Kernel):
r"""
This kernel returns constant along the diagonal.
.. math::
k(\mathbf{x}_i,\,\mathbf{x}_j) = c \, \delta_{ij}
where :math:`c` is the parameter.
:param value:
The constant value :math:`c` in the above equation.
"""
kernel_type = 1
def __init__(self, value, ndim=1):
super(WhiteKernel, self).__init__(value, ndim=ndim)
class DotProductKernel(Kernel):
r"""
The dot-product kernel takes the form
.. math::
k(\mathbf{x}_i,\,\mathbf{x}_j) = \mathbf{x}_i^{\mathrm{T}} \cdot
\mathbf{x}_j
"""
kernel_type = 2
def __init__(self, ndim=1):
super(DotProductKernel, self).__init__(ndim=ndim)
class RadialKernel(Kernel):
r"""
This kernel (and more importantly its subclasses) computes the distance
between two samples in an arbitrary metric and applies a radial function
to this distance.
:param metric:
The specification of the metric. This can be a ``float``, in which
case the metric is considered isotropic with the variance in each
dimension given by the value of ``metric``. Alternatively, ``metric``
can be a list of variances for each dimension. In this case, it should
have length ``ndim``. The fully general (not axis-aligned) metric
hasn't been implemented yet but it's on the to do list!
:param dim: (optional)
If provided, this will apply the kernel in only the specified
dimension.
"""
is_radial = True
def __init__(self, metric, ndim=1, dim=-1, extra=[]):
self.isotropic = False
self.axis_aligned = False
try:
float(metric)
except TypeError:
metric = np.atleast_1d(metric)
if len(metric) == ndim:
# The metric is axis aligned.
self.axis_aligned = True
else:
raise NotImplementedError("The general metric isn't "
"implemented")
else:
# If we get here then the kernel is isotropic.
self.isotropic = True
if dim >= 0:
assert self.isotropic, "A 1-D kernel should also be isotropic"
self.dim = dim
super(RadialKernel, self).__init__(*(np.append(extra, metric)),
ndim=ndim)
class ExpKernel(RadialKernel):
r"""
The exponential kernel is a :class:`RadialKernel` where the value at a
given radius :math:`r^2` is given by:
.. math::
k({r_{ij}}) = \exp \left ( -|r| \right )
:param metric:
The custom metric specified as described in the :class:`RadialKernel`
description.
"""
kernel_type = 3
class ExpSquaredKernel(RadialKernel):
r"""
The exponential-squared kernel is a :class:`RadialKernel` where the value
at a given radius :math:`r^2` is given by:
.. math::
k(r^2) = \exp \left ( -\frac{r^2}{2} \right )
:param metric:
The custom metric specified as described in the :class:`RadialKernel`
description.
"""
kernel_type = 4
class Matern32Kernel(RadialKernel):
r"""
The Matern-3/2 kernel is a :class:`RadialKernel` where the value at a
given radius :math:`r^2` is given by:
.. math::
k(r^2) = \left( 1+\sqrt{3\,r^2} \right)\,
\exp \left (-\sqrt{3\,r^2} \right )
:param metric:
The custom metric specified as described in the :class:`RadialKernel`
description.
"""
kernel_type = 5
class Matern52Kernel(RadialKernel):
r"""
The Matern-5/2 kernel is a :class:`RadialKernel` where the value at a
given radius :math:`r^2` is given by:
.. math::
k(r^2) = \left( 1+\sqrt{5\,r^2} + \frac{5\,r^2}{3} \right)\,
\exp \left (-\sqrt{5\,r^2} \right )
:param metric:
The custom metric specified as described in the :class:`RadialKernel`
description.
"""
kernel_type = 6
class RationalQuadraticKernel(RadialKernel):
r"""
TODO: document this kernel.
"""
kernel_type = 7
def __init__(self, alpha, metric, ndim=1, **kwargs):
super(RationalQuadraticKernel, self).__init__(metric, extra=[alpha],
ndim=ndim, **kwargs)
class CosineKernel(Kernel):
r"""
The cosine kernel is given by:
.. math::
k(\mathbf{x}_i,\,\mathbf{x}_j) =
\cos\left(\frac{2\,\pi}{P}\,\left|x_i-x_j\right| \right)
where :math:`P` is the period.
:param period:
The period :math:`P` of the oscillation (in the same units as
:math:`\mathbf{x}`).
**Note:**
A shortcoming of this kernel is that it currently only accepts a single
period so it's not very applicable to problems with input dimension larger
than one.
"""
kernel_type = 8
def __init__(self, period, ndim=1, dim=0):
super(CosineKernel, self).__init__(period, ndim=ndim)
assert dim < self.ndim, "Invalid dimension"
self.dim = dim
class ExpSine2Kernel(Kernel):
r"""
The exp-sine-squared kernel is used to model stellar rotation and *might*
be applicable in some other contexts. It is given by the equation:
.. math::
k(\mathbf{x}_i,\,\mathbf{x}_j) =
\exp \left( -\Gamma\,\sin^2\left[
\frac{\pi}{P}\,\left|x_i-x_j\right|
\right] \right)
where :math:`\Gamma` is the "scale" of the correlation and :math:`P` is
the period of the oscillation measured in the same units as
:math:`\mathbf{x}`.
:param gamma:
The scale :math:`\Gamma` of the correlations.
:param period:
The period :math:`P` of the oscillation (in the same units as
:math:`\mathbf{x}`).
:param dim: (optional)
The dimension along which this kernel should apply. By default, this
will be the zero-th axis.
"""
kernel_type = 9
def __init__(self, gamma, period, ndim=1, dim=0):
super(ExpSine2Kernel, self).__init__(gamma, period, ndim=ndim)
assert dim < self.ndim, "Invalid dimension"
self.dim = dim
class PythonKernel(Kernel):
r"""
A custom kernel evaluated in Python. The gradient is optionally evaluated
numerically. For big problems, this type of kernel will probably be
unbearably slow because each evaluation is done point-wise. Unfortunately,
this is the only way to implement custom kernels without re-compiling
George. Hopefully we can solve this in the future!
:param f:
A callable that evaluates the kernel function given arguments
``(x1, x2, p)`` where ``x1`` and ``x2`` are numpy array defining the
coordinates of the samples and ``p`` is the numpy array giving the
current settings of the parameters.
:param g: (optional)
A function with the same calling parameters as ``f`` but it should
return the numpy array with the gradient of the kernel function. If
this function isn't given then the gradient is evaluated using
centered finite difference.
:param pars: (optional)
The initial list of parameter values. If this isn't provided then the
kernel is assumed to have no parameters.
:param dx: (optional)
The step size used for the gradient computation when using finite
difference.
"""
kernel_type = -2
def __init__(self, f, g=None, pars=(), dx=1.234e-6, ndim=1):
super(PythonKernel, self).__init__(*pars, ndim=ndim)
self.size = len(self.pars)
self.f = f
self.g = self._wrap_grad(f, g, dx=dx)
def _wrap_grad(self, f, g, dx=1.234e-6):
if g is not None:
grad = g
else:
def grad(x1, x2, p):
g = numerical_gradient(partial(f, x1, x2), p, dx=dx)
return g
return grad
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Library for operating on Python API Guide files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
def md_files_in_dir(py_guide_src_dir):
"""Returns a list of filename (full_path, base) pairs for guide files."""
all_in_dir = [(os.path.join(py_guide_src_dir, f), f)
for f in os.listdir(py_guide_src_dir)]
return [(full, f) for full, f in all_in_dir
if os.path.isfile(full) and f.endswith('.md')]
class PyGuideParser(object):
"""Simple parsing of a guide .md file.
Descendants can override the process_*() functions (called by process())
to either record infromation from the guide, or call replace_line()
to affect the return value of process().
"""
def __init__(self):
self._lines = None
def process(self, full_path):
"""Read and process the file at `full_path`."""
md_string = open(full_path).read()
self._lines = md_string.split('\n')
seen = set()
in_blockquote = False
for i, line in enumerate(self._lines):
if '```' in line:
in_blockquote = not in_blockquote
if not in_blockquote and line.startswith('# '):
self.process_title(i, line[2:])
elif not in_blockquote and line.startswith('## '):
section_title = line.strip()[3:]
existing_tag = re.search(' {([^}]+)} *$', line)
if existing_tag:
tag = existing_tag.group(1)
else:
tag = re.sub('[^a-zA-Z0-9]+', '_', section_title)
if tag in seen:
suffix = 0
while True:
candidate = '%s_%d' % (tag, suffix)
if candidate not in seen:
tag = candidate
break
seen.add(tag)
self.process_section(i, section_title, tag)
elif in_blockquote:
self.process_in_blockquote(i, line)
else:
self.process_line(i, line)
ret = '\n'.join(self._lines)
self._lines = None
return ret
def replace_line(self, line_number, line):
"""Replace the contents of line numbered `line_number` with `line`."""
self._lines[line_number] = line
def process_title(self, line_number, title):
pass
def process_section(self, line_number, section_title, tag):
pass
def process_in_blockquote(self, line_number, line):
pass
def process_line(self, line_number, line):
pass
|
unknown
|
codeparrot/codeparrot-clean
| ||
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Registry for calculations."""
import inspect
from extensions.answer_summarizers import models
class Registry(object):
"""Registry of all calculations for summarizing answers."""
# Dict mapping calculation class names to their classes.
calculations_dict = {}
@classmethod
def _refresh_registry(cls):
cls.calculations_dict.clear()
# Add new visualization instances to the registry.
for name, clazz in inspect.getmembers(models, inspect.isclass):
if name.endswith('_test') or name == 'BaseCalculation':
continue
ancestor_names = [
base_class.__name__ for base_class in inspect.getmro(clazz)]
if 'BaseCalculation' not in ancestor_names:
continue
cls.calculations_dict[clazz.__name__] = clazz
@classmethod
def get_calculation_by_id(cls, calculation_id):
"""Gets a calculation instance by its id (which is also its class name).
Refreshes once if the class is not found; subsequently, throws an
error.
"""
if calculation_id not in cls.calculations_dict:
cls._refresh_registry()
if calculation_id not in cls.calculations_dict:
raise TypeError(
'\'%s\' is not a valid calculation id.' % calculation_id)
return cls.calculations_dict[calculation_id]()
|
unknown
|
codeparrot/codeparrot-clean
| ||
package drvregistry
import (
"testing"
"github.com/moby/moby/v2/daemon/libnetwork/driverapi"
"github.com/moby/moby/v2/daemon/libnetwork/scope"
"gotest.tools/v3/assert"
is "gotest.tools/v3/assert/cmp"
)
const mockDriverName = "mock-driver"
type mockDriver struct {
driverapi.Driver
}
var mockDriverCaps = driverapi.Capability{DataScope: scope.Local}
var md = mockDriver{}
func (m *mockDriver) Type() string {
return mockDriverName
}
func (m *mockDriver) IsBuiltIn() bool {
return true
}
func TestNetworks(t *testing.T) {
t.Run("RegisterDriver", func(t *testing.T) {
var reg Networks
err := reg.RegisterDriver(mockDriverName, &md, mockDriverCaps)
assert.NilError(t, err)
})
t.Run("RegisterDuplicateDriver", func(t *testing.T) {
var reg Networks
err := reg.RegisterDriver(mockDriverName, &md, mockDriverCaps)
assert.NilError(t, err)
// Try adding the same driver
err = reg.RegisterDriver(mockDriverName, &md, mockDriverCaps)
assert.Check(t, is.ErrorContains(err, ""))
})
t.Run("Driver", func(t *testing.T) {
var reg Networks
err := reg.RegisterDriver(mockDriverName, &md, mockDriverCaps)
assert.NilError(t, err)
driver, capability := reg.Driver(mockDriverName)
assert.Check(t, driver != nil)
assert.Check(t, is.DeepEqual(capability, mockDriverCaps))
})
t.Run("WalkDrivers", func(t *testing.T) {
var reg Networks
err := reg.RegisterDriver(mockDriverName, &md, mockDriverCaps)
assert.NilError(t, err)
var driverName string
reg.WalkDrivers(func(name string, driver driverapi.Driver, capability driverapi.Capability) bool {
driverName = name
return false
})
assert.Check(t, is.Equal(driverName, mockDriverName))
})
}
|
go
|
github
|
https://github.com/moby/moby
|
daemon/libnetwork/drvregistry/networks_test.go
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Network-related utilities and helper functions.
"""
# TODO(jd) Use six.moves once
# https://bitbucket.org/gutworth/six/pull-request/28
# is merged
try:
import urllib.parse
SplitResult = urllib.parse.SplitResult
except ImportError:
import urlparse
SplitResult = urlparse.SplitResult
from six.moves.urllib import parse
def parse_host_port(address, default_port=None):
"""Interpret a string as a host:port pair.
An IPv6 address MUST be escaped if accompanied by a port,
because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334
means both [2001:db8:85a3::8a2e:370:7334] and
[2001:db8:85a3::8a2e:370]:7334.
>>> parse_host_port('server01:80')
('server01', 80)
>>> parse_host_port('server01')
('server01', None)
>>> parse_host_port('server01', default_port=1234)
('server01', 1234)
>>> parse_host_port('[::1]:80')
('::1', 80)
>>> parse_host_port('[::1]')
('::1', None)
>>> parse_host_port('[::1]', default_port=1234)
('::1', 1234)
>>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234)
('2001:db8:85a3::8a2e:370:7334', 1234)
"""
if address[0] == '[':
# Escaped ipv6
_host, _port = address[1:].split(']')
host = _host
if ':' in _port:
port = _port.split(':')[1]
else:
port = default_port
else:
if address.count(':') == 1:
host, port = address.split(':')
else:
# 0 means ipv4, >1 means ipv6.
# We prohibit unescaped ipv6 addresses with port.
host = address
port = default_port
return (host, None if port is None else int(port))
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL using urlparse.urlsplit(), splitting query and fragments.
This function papers over Python issue9374 when needed.
The parameters are the same as urlparse.urlsplit.
"""
scheme, netloc, path, query, fragment = parse.urlsplit(
url, scheme, allow_fragments)
if allow_fragments and '#' in path:
path, fragment = path.split('#', 1)
if '?' in path:
path, query = path.split('?', 1)
return SplitResult(scheme, netloc, path, query, fragment)
|
unknown
|
codeparrot/codeparrot-clean
| ||
<!--Copyright 2025 Advanced Micro Devices, Inc. and The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
⚠️ Note that this file is in Markdown but contain specific syntax for our doc-builder (similar to MDX) that may not be
rendered properly in your Markdown viewer.
-->
# Quark
[Quark](https://quark.docs.amd.com/latest/) is a deep learning quantization toolkit designed to be agnostic to specific data types, algorithms, and hardware. Different pre-processing strategies, algorithms and data-types can be combined in Quark.
The PyTorch support integrated through 🤗 Transformers primarily targets AMD CPUs and GPUs, and is primarily meant to be used for evaluation purposes. For example, it is possible to use [lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness) with 🤗 Transformers backend and evaluate a wide range of models quantized through Quark seamlessly.
Users interested in Quark can refer to its [documentation](https://quark.docs.amd.com/latest/) to get started quantizing models and using them in supported open-source libraries!
Although Quark has its own checkpoint / [configuration format](https://huggingface.co/amd/Llama-3.1-8B-Instruct-FP8-KV-Quark-test/blob/main/config.json#L26), the library also supports producing models with a serialization layout compliant with other quantization/runtime implementations ([AutoAWQ](https://huggingface.co/docs/transformers/quantization/awq), [native fp8 in 🤗 Transformers](https://huggingface.co/docs/transformers/quantization/finegrained_fp8)).
To be able to load Quark quantized models in Transformers, the library first needs to be installed:
```bash
pip install amd-quark
```
## Support matrix
Models quantized through Quark support a large range of features, that can be combined together. All quantized models independently of their configuration can seamlessly be reloaded through `PretrainedModel.from_pretrained`.
The table below shows a few features supported by Quark:
| **Feature** | **Supported subset in Quark** | |
|---------------------------------|-----------------------------------------------------------------------------------------------------------|---|
| Data types | int8, int4, int2, bfloat16, float16, fp8_e5m2, fp8_e4m3, fp6_e3m2, fp6_e2m3, fp4, OCP MX, MX6, MX9, bfp16 | |
| Pre-quantization transformation | SmoothQuant, QuaRot, SpinQuant, AWQ | |
| Quantization algorithm | GPTQ | |
| Supported operators | ``nn.Linear``, ``nn.Conv2d``, ``nn.ConvTranspose2d``, ``nn.Embedding``, ``nn.EmbeddingBag`` | |
| Granularity | per-tensor, per-channel, per-block, per-layer, per-layer type | |
| KV cache | fp8 | |
| Activation calibration | MinMax / Percentile / MSE | |
| Quantization strategy | weight-only, static, dynamic, with or without output quantization | |
## Models on Hugging Face Hub
Public models using Quark native serialization can be found at https://huggingface.co/models?other=quark.
Although Quark also supports [models using `quant_method="fp8"`](https://huggingface.co/models?other=fp8) and [models using `quant_method="awq"`](https://huggingface.co/models?other=awq), Transformers loads these models rather through [AutoAWQ](https://huggingface.co/docs/transformers/quantization/awq) or uses the [native fp8 support in 🤗 Transformers](https://huggingface.co/docs/transformers/quantization/finegrained_fp8).
## Using Quark models in Transformers
Here is an example of how one can load a Quark model in Transformers:
```python
from transformers import AutoModelForCausalLM, AutoTokenizer
model_id = "EmbeddedLLM/Llama-3.1-8B-Instruct-w_fp8_per_channel_sym"
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto")
print(model.model.layers[0].self_attn.q_proj)
# QParamsLinear(
# (weight_quantizer): ScaledRealQuantizer()
# (input_quantizer): ScaledRealQuantizer()
# (output_quantizer): ScaledRealQuantizer()
# )
tokenizer = AutoTokenizer.from_pretrained(model_id)
inp = tokenizer("Where is a good place to cycle around Tokyo?", return_tensors="pt")
inp = inp.to(model.device)
res = model.generate(**inp, min_new_tokens=50, max_new_tokens=100)
print(tokenizer.batch_decode(res)[0])
# <|begin_of_text|>Where is a good place to cycle around Tokyo? There are several places in Tokyo that are suitable for cycling, depending on your skill level and interests. Here are a few suggestions:
# 1. Yoyogi Park: This park is a popular spot for cycling and has a wide, flat path that's perfect for beginners. You can also visit the Meiji Shrine, a famous Shinto shrine located in the park.
# 2. Imperial Palace East Garden: This beautiful garden has a large, flat path that's perfect for cycling. You can also visit the
```
|
unknown
|
github
|
https://github.com/huggingface/transformers
|
docs/source/en/quantization/quark.md
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# == BEGIN DYNAMICALLY INSERTED CODE ==
ANSIBLE_VERSION = "<<ANSIBLE_VERSION>>"
MODULE_ARGS = "<<INCLUDE_ANSIBLE_MODULE_ARGS>>"
MODULE_COMPLEX_ARGS = "<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>"
BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1]
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
SELINUX_SPECIAL_FS="<<SELINUX_SPECIAL_FILESYSTEMS>>"
# ansible modules can be written in any language. To simplify
# development of Python modules, the functions available here
# can be inserted in any module source automatically by including
# #<<INCLUDE_ANSIBLE_MODULE_COMMON>> on a blank line by itself inside
# of an ansible module. The source of this common code lives
# in ansible/executor/module_common.py
import locale
import os
import re
import pipes
import shlex
import subprocess
import sys
import syslog
import types
import time
import select
import shutil
import stat
import tempfile
import traceback
import grp
import pwd
import platform
import errno
from itertools import imap, repeat
try:
import json
# Detect the python-json library which is incompatible
# Look for simplejson if that's the case
try:
if not isinstance(json.loads, types.FunctionType) or not isinstance(json.dumps, types.FunctionType):
raise ImportError
except AttributeError:
raise ImportError
except ImportError:
try:
import simplejson as json
except ImportError:
print('{"msg": "Error: ansible requires the stdlib json or simplejson module, neither was found!", "failed": true}')
sys.exit(1)
except SyntaxError:
print('{"msg": "SyntaxError: probably due to installed simplejson being for a different python version", "failed": true}')
sys.exit(1)
HAVE_SELINUX=False
try:
import selinux
HAVE_SELINUX=True
except ImportError:
pass
try:
from systemd import journal
has_journal = True
except ImportError:
has_journal = False
AVAILABLE_HASH_ALGORITHMS = dict()
try:
import hashlib
# python 2.7.9+ and 2.7.0+
for attribute in ('available_algorithms', 'algorithms'):
algorithms = getattr(hashlib, attribute, None)
if algorithms:
break
if algorithms is None:
# python 2.5+
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
for algorithm in algorithms:
AVAILABLE_HASH_ALGORITHMS[algorithm] = getattr(hashlib, algorithm)
except ImportError:
import sha
AVAILABLE_HASH_ALGORITHMS = {'sha1': sha.sha}
try:
import md5
AVAILABLE_HASH_ALGORITHMS['md5'] = md5.md5
except ImportError:
pass
try:
from ast import literal_eval as _literal_eval
except ImportError:
# a replacement for literal_eval that works with python 2.4. from:
# https://mail.python.org/pipermail/python-list/2009-September/551880.html
# which is essentially a cut/paste from an earlier (2.6) version of python's
# ast.py
from compiler import ast, parse
def _literal_eval(node_or_string):
"""
Safely evaluate an expression node or a string containing a Python
expression. The string or node provided may only consist of the following
Python literal structures: strings, numbers, tuples, lists, dicts, booleans,
and None.
"""
_safe_names = {'None': None, 'True': True, 'False': False}
if isinstance(node_or_string, basestring):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, ast.Expression):
node_or_string = node_or_string.node
def _convert(node):
if isinstance(node, ast.Const) and isinstance(node.value, (basestring, int, float, long, complex)):
return node.value
elif isinstance(node, ast.Tuple):
return tuple(map(_convert, node.nodes))
elif isinstance(node, ast.List):
return list(map(_convert, node.nodes))
elif isinstance(node, ast.Dict):
return dict((_convert(k), _convert(v)) for k, v in node.items)
elif isinstance(node, ast.Name):
if node.name in _safe_names:
return _safe_names[node.name]
elif isinstance(node, ast.UnarySub):
return -_convert(node.expr)
raise ValueError('malformed string')
return _convert(node_or_string)
FILE_COMMON_ARGUMENTS=dict(
src = dict(),
mode = dict(),
owner = dict(),
group = dict(),
seuser = dict(),
serole = dict(),
selevel = dict(),
setype = dict(),
follow = dict(type='bool', default=False),
# not taken by the file module, but other modules call file so it must ignore them.
content = dict(no_log=True),
backup = dict(),
force = dict(),
remote_src = dict(), # used by assemble
regexp = dict(), # used by assemble
delimiter = dict(), # used by assemble
directory_mode = dict(), # used by copy
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
def get_platform():
''' what's the platform? example: Linux is a platform. '''
return platform.system()
def get_distribution():
''' return the distribution name '''
if platform.system() == 'Linux':
try:
supported_dists = platform._supported_dists + ('arch',)
distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
if not distribution and os.path.isfile('/etc/system-release'):
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
if 'Amazon' in distribution:
distribution = 'Amazon'
else:
distribution = 'OtherLinux'
except:
# FIXME: MethodMissing, I assume?
distribution = platform.dist()[0].capitalize()
else:
distribution = None
return distribution
def get_distribution_version():
''' return the distribution version '''
if platform.system() == 'Linux':
try:
distribution_version = platform.linux_distribution()[1]
if not distribution_version and os.path.isfile('/etc/system-release'):
distribution_version = platform.linux_distribution(supported_dists=['system'])[1]
except:
# FIXME: MethodMissing, I assume?
distribution_version = platform.dist()[1]
else:
distribution_version = None
return distribution_version
def load_platform_subclass(cls, *args, **kwargs):
'''
used by modules like User to have different implementations based on detected platform. See User
module for an example.
'''
this_platform = get_platform()
distribution = get_distribution()
subclass = None
# get the most specific superclass for this platform
if distribution is not None:
for sc in cls.__subclasses__():
if sc.distribution is not None and sc.distribution == distribution and sc.platform == this_platform:
subclass = sc
if subclass is None:
for sc in cls.__subclasses__():
if sc.platform == this_platform and sc.distribution is None:
subclass = sc
if subclass is None:
subclass = cls
return super(cls, subclass).__new__(subclass)
def json_dict_unicode_to_bytes(d, encoding='utf-8'):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
if isinstance(d, unicode):
return d.encode(encoding)
elif isinstance(d, dict):
return dict(imap(json_dict_unicode_to_bytes, d.iteritems(), repeat(encoding)))
elif isinstance(d, list):
return list(imap(json_dict_unicode_to_bytes, d, repeat(encoding)))
elif isinstance(d, tuple):
return tuple(imap(json_dict_unicode_to_bytes, d, repeat(encoding)))
else:
return d
def json_dict_bytes_to_unicode(d, encoding='utf-8'):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
and dict container types (the containers that the json module returns)
'''
if isinstance(d, str):
return unicode(d, encoding)
elif isinstance(d, dict):
return dict(imap(json_dict_bytes_to_unicode, d.iteritems(), repeat(encoding)))
elif isinstance(d, list):
return list(imap(json_dict_bytes_to_unicode, d, repeat(encoding)))
elif isinstance(d, tuple):
return tuple(imap(json_dict_bytes_to_unicode, d, repeat(encoding)))
else:
return d
def heuristic_log_sanitize(data):
''' Remove strings that look like passwords from log messages '''
# Currently filters:
# user:pass@foo/whatever and http://username:pass@wherever/foo
# This code has false positives and consumes parts of logs that are
# not passwds
# begin: start of a passwd containing string
# end: end of a passwd containing string
# sep: char between user and passwd
# prev_begin: where in the overall string to start a search for
# a passwd
# sep_search_end: where in the string to end a search for the sep
output = []
begin = len(data)
prev_begin = begin
sep = 1
while sep:
# Find the potential end of a passwd
try:
end = data.rindex('@', 0, begin)
except ValueError:
# No passwd in the rest of the data
output.insert(0, data[0:begin])
break
# Search for the beginning of a passwd
sep = None
sep_search_end = end
while not sep:
# URL-style username+password
try:
begin = data.rindex('://', 0, sep_search_end)
except ValueError:
# No url style in the data, check for ssh style in the
# rest of the string
begin = 0
# Search for separator
try:
sep = data.index(':', begin + 3, end)
except ValueError:
# No separator; choices:
if begin == 0:
# Searched the whole string so there's no password
# here. Return the remaining data
output.insert(0, data[0:begin])
break
# Search for a different beginning of the password field.
sep_search_end = begin
continue
if sep:
# Password was found; remove it.
output.insert(0, data[end:prev_begin])
output.insert(0, '********')
output.insert(0, data[begin:sep + 1])
prev_begin = begin
return ''.join(output)
class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
required_one_of=None, add_file_common_args=False, supports_check_mode=False,
required_if=None):
'''
common code for quickly building an ansible module in Python
(although you can write modules in anything that can return JSON)
see library/* for examples
'''
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
self.no_log = no_log
self.cleanup_files = []
self.aliases = {}
if add_file_common_args:
for k, v in FILE_COMMON_ARGUMENTS.iteritems():
if k not in self.argument_spec:
self.argument_spec[k] = v
# check the locale as set by the current environment, and
# reset to LANG=C if it's an invalid/unavailable locale
self._check_locale()
self.params = self._load_params()
self._legal_inputs = ['_ansible_check_mode', '_ansible_no_log']
self.aliases = self._handle_aliases()
if check_invalid_arguments:
self._check_invalid_arguments()
self._check_for_check_mode()
self._check_for_no_log()
# check exclusive early
if not bypass_checks:
self._check_mutually_exclusive(mutually_exclusive)
self._set_defaults(pre=True)
self._CHECK_ARGUMENT_TYPES_DISPATCHER = {
'str': self._check_type_str,
'list': self._check_type_list,
'dict': self._check_type_dict,
'bool': self._check_type_bool,
'int': self._check_type_int,
'float': self._check_type_float,
'path': self._check_type_path,
}
if not bypass_checks:
self._check_required_arguments()
self._check_argument_types()
self._check_argument_values()
self._check_required_together(required_together)
self._check_required_one_of(required_one_of)
self._check_required_if(required_if)
self._set_defaults(pre=False)
if not self.no_log:
self._log_invocation()
# finally, make sure we're in a sane working dir
self._set_cwd()
def load_file_common_arguments(self, params):
'''
many modules deal with files, this encapsulates common
options that the file module accepts such that it is directly
available to all modules and they can share code.
'''
path = params.get('path', params.get('dest', None))
if path is None:
return {}
else:
path = os.path.expanduser(path)
# if the path is a symlink, and we're following links, get
# the target of the link instead for testing
if params.get('follow', False) and os.path.islink(path):
path = os.path.realpath(path)
mode = params.get('mode', None)
owner = params.get('owner', None)
group = params.get('group', None)
# selinux related options
seuser = params.get('seuser', None)
serole = params.get('serole', None)
setype = params.get('setype', None)
selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
secontext.append(selevel)
default_secontext = self.selinux_default_context(path)
for i in range(len(default_secontext)):
if i is not None and secontext[i] == '_default':
secontext[i] = default_secontext[i]
return dict(
path=path, mode=mode, owner=owner, group=group,
seuser=seuser, serole=serole, setype=setype,
selevel=selevel, secontext=secontext,
)
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
# will get the selevel as part of the context returned
# by selinux.lgetfilecon().
def selinux_mls_enabled(self):
if not HAVE_SELINUX:
return False
if selinux.is_selinux_mls_enabled() == 1:
return True
else:
return False
def selinux_enabled(self):
if not HAVE_SELINUX:
seenabled = self.get_bin_path('selinuxenabled')
if seenabled is not None:
(rc,out,err) = self.run_command(seenabled)
if rc == 0:
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
return False
if selinux.is_selinux_enabled() == 1:
return True
else:
return False
# Determine whether we need a placeholder for selevel/mls
def selinux_initial_context(self):
context = [None, None, None]
if self.selinux_mls_enabled():
context.append(None)
return context
def _to_filesystem_str(self, path):
'''Returns filesystem path as a str, if it wasn't already.
Used in selinux interactions because it cannot accept unicode
instances, and specifying complex args in a playbook leaves
you with unicode instances. This method currently assumes
that your filesystem encoding is UTF-8.
'''
if isinstance(path, unicode):
path = path.encode("utf-8")
return path
# If selinux fails to find a default, return an array of None
def selinux_default_context(self, path, mode=0):
context = self.selinux_initial_context()
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
ret = selinux.matchpathcon(self._to_filesystem_str(path), mode)
except OSError:
return context
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def selinux_context(self, path):
context = self.selinux_initial_context()
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
ret = selinux.lgetfilecon_raw(self._to_filesystem_str(path))
except OSError, e:
if e.errno == errno.ENOENT:
self.fail_json(path=path, msg='path %s does not exist' % path)
else:
self.fail_json(path=path, msg='failed to retrieve selinux context')
if ret[0] == -1:
return context
# Limit split to 4 because the selevel, the last in the list,
# may contain ':' characters
context = ret[1].split(':', 3)
return context
def user_and_group(self, filename):
filename = os.path.expanduser(filename)
st = os.lstat(filename)
uid = st.st_uid
gid = st.st_gid
return (uid, gid)
def find_mount_point(self, path):
path = os.path.abspath(os.path.expanduser(os.path.expandvars(path)))
while not os.path.ismount(path):
path = os.path.dirname(path)
return path
def is_special_selinux_path(self, path):
"""
Returns a tuple containing (True, selinux_context) if the given path is on a
NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
mount_data = f.readlines()
f.close()
except:
return (False, None)
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
if path_mount_point == mount_point:
for fs in SELINUX_SPECIAL_FS.split(','):
if fs in fstype:
special_context = self.selinux_context(path_mount_point)
return (True, special_context)
return (False, None)
def set_default_selinux_context(self, path, changed):
if not HAVE_SELINUX or not self.selinux_enabled():
return changed
context = self.selinux_default_context(path)
return self.set_context_if_different(path, context, False)
def set_context_if_different(self, path, context, changed):
if not HAVE_SELINUX or not self.selinux_enabled():
return changed
cur_context = self.selinux_context(path)
new_context = list(cur_context)
# Iterate over the current context instead of the
# argument context, which may have selevel.
(is_special_se, sp_context) = self.is_special_selinux_path(path)
if is_special_se:
new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
if context[i] is not None and context[i] != cur_context[i]:
new_context[i] = context[i]
elif context[i] is None:
new_context[i] = cur_context[i]
if cur_context != new_context:
try:
if self.check_mode:
return True
rc = selinux.lsetfilecon(self._to_filesystem_str(path),
str(':'.join(new_context)))
except OSError, e:
self.fail_json(path=path, msg='invalid selinux context: %s' % str(e), new_context=new_context, cur_context=cur_context, input_was=context)
if rc != 0:
self.fail_json(path=path, msg='set selinux context failed')
changed = True
return changed
def set_owner_if_different(self, path, owner, changed):
path = os.path.expanduser(path)
if owner is None:
return changed
orig_uid, orig_gid = self.user_and_group(path)
try:
uid = int(owner)
except ValueError:
try:
uid = pwd.getpwnam(owner).pw_uid
except KeyError:
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
if orig_uid != uid:
if self.check_mode:
return True
try:
os.lchown(path, uid, -1)
except OSError:
self.fail_json(path=path, msg='chown failed')
changed = True
return changed
def set_group_if_different(self, path, group, changed):
path = os.path.expanduser(path)
if group is None:
return changed
orig_uid, orig_gid = self.user_and_group(path)
try:
gid = int(group)
except ValueError:
try:
gid = grp.getgrnam(group).gr_gid
except KeyError:
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
if orig_gid != gid:
if self.check_mode:
return True
try:
os.lchown(path, -1, gid)
except OSError:
self.fail_json(path=path, msg='chgrp failed')
changed = True
return changed
def set_mode_if_different(self, path, mode, changed):
path = os.path.expanduser(path)
path_stat = os.lstat(path)
if mode is None:
return changed
if not isinstance(mode, int):
try:
mode = int(mode, 8)
except Exception:
try:
mode = self._symbolic_mode_to_octal(path_stat, mode)
except Exception, e:
self.fail_json(path=path,
msg="mode must be in octal or symbolic form",
details=str(e))
prev_mode = stat.S_IMODE(path_stat.st_mode)
if prev_mode != mode:
if self.check_mode:
return True
# FIXME: comparison against string above will cause this to be executed
# every time
try:
if hasattr(os, 'lchmod'):
os.lchmod(path, mode)
else:
if not os.path.islink(path):
os.chmod(path, mode)
else:
# Attempt to set the perms of the symlink but be
# careful not to change the perms of the underlying
# file while trying
underlying_stat = os.stat(path)
os.chmod(path, mode)
new_underlying_stat = os.stat(path)
if underlying_stat.st_mode != new_underlying_stat.st_mode:
os.chmod(path, stat.S_IMODE(underlying_stat.st_mode))
except OSError, e:
if os.path.islink(path) and e.errno == errno.EPERM: # Can't set mode on symbolic links
pass
elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise e
except Exception, e:
self.fail_json(path=path, msg='chmod failed', details=str(e))
path_stat = os.lstat(path)
new_mode = stat.S_IMODE(path_stat.st_mode)
if new_mode != prev_mode:
changed = True
return changed
def _symbolic_mode_to_octal(self, path_stat, symbolic_mode):
new_mode = stat.S_IMODE(path_stat.st_mode)
mode_re = re.compile(r'^(?P<users>[ugoa]+)(?P<operator>[-+=])(?P<perms>[rwxXst]*|[ugo])$')
for mode in symbolic_mode.split(','):
match = mode_re.match(mode)
if match:
users = match.group('users')
operator = match.group('operator')
perms = match.group('perms')
if users == 'a':
users = 'ugo'
for user in users:
mode_to_apply = self._get_octal_mode_from_symbolic_perms(path_stat, user, perms)
new_mode = self._apply_operation_to_mode(user, operator, mode_to_apply, new_mode)
else:
raise ValueError("bad symbolic permission for mode: %s" % mode)
return new_mode
def _apply_operation_to_mode(self, user, operator, mode_to_apply, current_mode):
if operator == '=':
if user == 'u': mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g': mask = stat.S_IRWXG | stat.S_ISGID
elif user == 'o': mask = stat.S_IRWXO | stat.S_ISVTX
# mask out u, g, or o permissions from current_mode and apply new permissions
inverse_mask = mask ^ 07777
new_mode = (current_mode & inverse_mask) | mode_to_apply
elif operator == '+':
new_mode = current_mode | mode_to_apply
elif operator == '-':
new_mode = current_mode - (current_mode & mode_to_apply)
return new_mode
def _get_octal_mode_from_symbolic_perms(self, path_stat, user, perms):
prev_mode = stat.S_IMODE(path_stat.st_mode)
is_directory = stat.S_ISDIR(path_stat.st_mode)
has_x_permissions = (prev_mode & 00111) > 0
apply_X_permission = is_directory or has_x_permissions
# Permission bits constants documented at:
# http://docs.python.org/2/library/stat.html#stat.S_ISUID
if apply_X_permission:
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
'o': {'X': stat.S_IXOTH}
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
'o': {'X': 0}
}
user_perms_to_modes = {
'u': {
'r': stat.S_IRUSR,
'w': stat.S_IWUSR,
'x': stat.S_IXUSR,
's': stat.S_ISUID,
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
'o': (prev_mode & stat.S_IRWXO) << 6 },
'g': {
'r': stat.S_IRGRP,
'w': stat.S_IWGRP,
'x': stat.S_IXGRP,
's': stat.S_ISGID,
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
'o': (prev_mode & stat.S_IRWXO) << 3 },
'o': {
'r': stat.S_IROTH,
'w': stat.S_IWOTH,
'x': stat.S_IXOTH,
's': 0,
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
'o': prev_mode & stat.S_IRWXO }
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
or_reduce = lambda mode, perm: mode | user_perms_to_modes[user][perm]
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed):
# set modes owners and context as needed
changed = self.set_context_if_different(
file_args['path'], file_args['secontext'], changed
)
changed = self.set_owner_if_different(
file_args['path'], file_args['owner'], changed
)
changed = self.set_group_if_different(
file_args['path'], file_args['group'], changed
)
changed = self.set_mode_if_different(
file_args['path'], file_args['mode'], changed
)
return changed
def set_directory_attributes_if_different(self, file_args, changed):
return self.set_fs_attributes_if_different(file_args, changed)
def set_file_attributes_if_different(self, file_args, changed):
return self.set_fs_attributes_if_different(file_args, changed)
def add_path_info(self, kwargs):
'''
for results that are files, supplement the info about the file
in the return path with stats about the file path.
'''
path = kwargs.get('path', kwargs.get('dest', None))
if path is None:
return kwargs
if os.path.exists(path):
(uid, gid) = self.user_and_group(path)
kwargs['uid'] = uid
kwargs['gid'] = gid
try:
user = pwd.getpwuid(uid)[0]
except KeyError:
user = str(uid)
try:
group = grp.getgrgid(gid)[0]
except KeyError:
group = str(gid)
kwargs['owner'] = user
kwargs['group'] = group
st = os.lstat(path)
kwargs['mode'] = oct(stat.S_IMODE(st[stat.ST_MODE]))
# secontext not yet supported
if os.path.islink(path):
kwargs['state'] = 'link'
elif os.path.isdir(path):
kwargs['state'] = 'directory'
elif os.stat(path).st_nlink > 1:
kwargs['state'] = 'hard'
else:
kwargs['state'] = 'file'
if HAVE_SELINUX and self.selinux_enabled():
kwargs['secontext'] = ':'.join(self.selinux_context(path))
kwargs['size'] = st[stat.ST_SIZE]
else:
kwargs['state'] = 'absent'
return kwargs
def _check_locale(self):
'''
Uses the locale module to test the currently set locale
(per the LANG and LC_CTYPE environment settings)
'''
try:
# setting the locale to '' uses the default locale
# as it would be returned by locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, '')
except locale.Error, e:
# fallback to the 'C' locale, which may cause unicode
# issues but is preferable to simply failing because
# of an unknown locale
locale.setlocale(locale.LC_ALL, 'C')
os.environ['LANG'] = 'C'
os.environ['LC_CTYPE'] = 'C'
os.environ['LC_MESSAGES'] = 'C'
except Exception, e:
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
def _handle_aliases(self):
aliases_results = {} #alias:canon
for (k,v) in self.argument_spec.iteritems():
self._legal_inputs.append(k)
aliases = v.get('aliases', None)
default = v.get('default', None)
required = v.get('required', False)
if default is not None and required:
# not alias specific but this is a good place to check this
self.fail_json(msg="internal error: required and default are mutually exclusive for %s" % k)
if aliases is None:
continue
if type(aliases) != list:
self.fail_json(msg='internal error: aliases must be a list')
for alias in aliases:
self._legal_inputs.append(alias)
aliases_results[alias] = k
if alias in self.params:
self.params[k] = self.params[alias]
return aliases_results
def _check_for_check_mode(self):
for (k,v) in self.params.iteritems():
if k == '_ansible_check_mode' and v:
if not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module does not support check mode")
self.check_mode = True
break
def _check_for_no_log(self):
for (k,v) in self.params.iteritems():
if k == '_ansible_no_log':
self.no_log = self.boolean(v)
def _check_invalid_arguments(self):
for (k,v) in self.params.iteritems():
# these should be in legal inputs already
#if k in ('_ansible_check_mode', '_ansible_no_log'):
# continue
if k not in self._legal_inputs:
self.fail_json(msg="unsupported parameter for module: %s" % k)
def _count_terms(self, check):
count = 0
for term in check:
if term in self.params:
count += 1
return count
def _check_mutually_exclusive(self, spec):
if spec is None:
return
for check in spec:
count = self._count_terms(check)
if count > 1:
self.fail_json(msg="parameters are mutually exclusive: %s" % (check,))
def _check_required_one_of(self, spec):
if spec is None:
return
for check in spec:
count = self._count_terms(check)
if count == 0:
self.fail_json(msg="one of the following is required: %s" % ','.join(check))
def _check_required_together(self, spec):
if spec is None:
return
for check in spec:
counts = [ self._count_terms([field]) for field in check ]
non_zero = [ c for c in counts if c > 0 ]
if len(non_zero) > 0:
if 0 in counts:
self.fail_json(msg="parameters are required together: %s" % (check,))
def _check_required_arguments(self):
''' ensure all required arguments are present '''
missing = []
for (k,v) in self.argument_spec.iteritems():
required = v.get('required', False)
if required and k not in self.params:
missing.append(k)
if len(missing) > 0:
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
def _check_required_if(self, spec):
''' ensure that parameters which conditionally required are present '''
if spec is None:
return
for (key, val, requirements) in spec:
missing = []
if key in self.params and self.params[key] == val:
for check in requirements:
count = self._count_terms((check,))
if count == 0:
missing.append(check)
if len(missing) > 0:
self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing)))
def _check_argument_values(self):
''' ensure all arguments have the requested values, and there are no stray arguments '''
for (k,v) in self.argument_spec.iteritems():
choices = v.get('choices',None)
if choices is None:
continue
if type(choices) == list:
if k in self.params:
if self.params[k] not in choices:
choices_str=",".join([str(c) for c in choices])
msg="value of %s must be one of: %s, got: %s" % (k, choices_str, self.params[k])
self.fail_json(msg=msg)
else:
self.fail_json(msg="internal error: do not know how to interpret argument_spec")
def safe_eval(self, str, locals=None, include_exceptions=False):
# do not allow method calls to modules
if not isinstance(str, basestring):
# already templated to a datastructure, perhaps?
if include_exceptions:
return (str, None)
return str
if re.search(r'\w\.\w+\(', str):
if include_exceptions:
return (str, None)
return str
# do not allow imports
if re.search(r'import \w+', str):
if include_exceptions:
return (str, None)
return str
try:
result = None
if not locals:
result = _literal_eval(str)
else:
result = _literal_eval(str, None, locals)
if include_exceptions:
return (result, None)
else:
return result
except Exception, e:
if include_exceptions:
return (str, e)
return str
def _check_type_str(self, value):
if isinstance(value, basestring):
return value
# Note: This could throw a unicode error if value's __str__() method
# returns non-ascii. Have to port utils.to_bytes() if that happens
return str(value)
def _check_type_list(self, value):
if isinstance(value, list):
return value
if isinstance(value, basestring):
return value.split(",")
elif isinstance(value, int) or isinstance(value, float):
return [ str(value) ]
raise TypeError('%s cannot be converted to a list' % type(value))
def _check_type_dict(self, value):
if isinstance(value, dict):
return value
if isinstance(value, basestring):
if value.startswith("{"):
try:
return json.loads(value)
except:
(result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
if exc is not None:
raise TypeError('unable to evaluate string as dictionary')
return result
elif '=' in value:
fields = []
field_buffer = []
in_quote = False
in_escape = False
for c in value.strip():
if in_escape:
field_buffer.append(c)
in_escape = False
elif c == '\\':
in_escape = True
elif not in_quote and c in ('\'', '"'):
in_quote = c
elif in_quote and in_quote == c:
in_quote = False
elif not in_quote and c in (',', ' '):
field = ''.join(field_buffer)
if field:
fields.append(field)
field_buffer = []
else:
field_buffer.append(c)
field = ''.join(field_buffer)
if field:
fields.append(field)
return dict(x.split("=", 1) for x in fields)
else:
raise TypeError("dictionary requested, could not parse JSON or key=value")
raise TypeError('%s cannot be converted to a dict' % type(value))
def _check_type_bool(self, value):
if isinstance(value, bool):
return value
if isinstance(value, basestring):
return self.boolean(value)
raise TypeError('%s cannot be converted to a bool' % type(value))
def _check_type_int(self, value):
if isinstance(value, int):
return value
if isinstance(value, basestring):
return int(value)
raise TypeError('%s cannot be converted to an int' % type(value))
def _check_type_float(self, value):
if isinstance(value, float):
return value
if isinstance(value, basestring):
return float(value)
raise TypeError('%s cannot be converted to a float' % type(value))
def _check_type_path(self, value):
value = self._check_type_str(value)
return os.path.expanduser(os.path.expandvars(value))
def _check_argument_types(self):
''' ensure all arguments have the requested type '''
for (k, v) in self.argument_spec.iteritems():
wanted = v.get('type', None)
if wanted is None:
continue
if k not in self.params:
continue
value = self.params[k]
try:
type_checker = self._CHECK_ARGUMENT_TYPES_DISPATCHER[wanted]
except KeyError:
self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
try:
self.params[k] = type_checker(value)
except (TypeError, ValueError):
self.fail_json(msg="argument %s is of type %s and we were unable to convert to %s" % (k, type(value), wanted))
def _set_defaults(self, pre=True):
for (k,v) in self.argument_spec.iteritems():
default = v.get('default', None)
if pre == True:
# this prevents setting defaults on required items
if default is not None and k not in self.params:
self.params[k] = default
else:
# make sure things without a default still get set None
if k not in self.params:
self.params[k] = default
def _load_params(self):
''' read the input and return a dictionary and the arguments string '''
params = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
if params is None:
params = dict()
return params
def _log_invocation(self):
''' log that ansible ran the module '''
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
passwd_keys = ['password', 'login_password']
for param in self.params:
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', False)
if self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
elif param in passwd_keys:
log_args[param] = 'NOT_LOGGING_PASSWORD'
else:
param_val = self.params[param]
if not isinstance(param_val, basestring):
param_val = str(param_val)
elif isinstance(param_val, unicode):
param_val = param_val.encode('utf-8')
log_args[param] = heuristic_log_sanitize(param_val)
module = 'ansible-%s' % os.path.basename(__file__)
msg = []
for arg in log_args:
arg_val = log_args[arg]
if not isinstance(arg_val, basestring):
arg_val = str(arg_val)
elif isinstance(arg_val, unicode):
arg_val = arg_val.encode('utf-8')
msg.append('%s=%s ' % (arg, arg_val))
if msg:
msg = 'Invoked with %s' % ''.join(msg)
else:
msg = 'Invoked'
# 6655 - allow for accented characters
if isinstance(msg, unicode):
# We should never get here as msg should be type str, not unicode
msg = msg.encode('utf-8')
if (has_journal):
journal_args = [("MODULE", os.path.basename(__file__))]
for arg in log_args:
journal_args.append((arg.upper(), str(log_args[arg])))
try:
journal.send("%s %s" % (module, msg), **dict(journal_args))
except IOError:
# fall back to syslog since logging to journal failed
syslog.openlog(str(module), 0, syslog.LOG_USER)
syslog.syslog(syslog.LOG_INFO, msg) #1
else:
syslog.openlog(str(module), 0, syslog.LOG_USER)
syslog.syslog(syslog.LOG_INFO, msg) #2
def _set_cwd(self):
try:
cwd = os.getcwd()
if not os.access(cwd, os.F_OK|os.R_OK):
raise
return cwd
except:
# we don't have access to the cwd, probably because of sudo.
# Try and move to a neutral location to prevent errors
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
if os.access(cwd, os.F_OK|os.R_OK):
os.chdir(cwd)
return cwd
except:
pass
# we won't error here, as it may *not* be a problem,
# and we don't want to break modules unnecessarily
return None
def get_bin_path(self, arg, required=False, opt_dirs=[]):
'''
find system executable in PATH.
Optional arguments:
- required: if executable is not found and required is true, fail_json
- opt_dirs: optional list of directories to search in addition to PATH
if found return full path; otherwise return None
'''
sbin_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin']
paths = []
for d in opt_dirs:
if d is not None and os.path.exists(d):
paths.append(d)
paths += os.environ.get('PATH', '').split(os.pathsep)
bin_path = None
# mangle PATH to include /sbin dirs
for p in sbin_paths:
if p not in paths and os.path.exists(p):
paths.append(p)
for d in paths:
path = os.path.join(d, arg)
if os.path.exists(path) and self.is_executable(path):
bin_path = path
break
if required and bin_path is None:
self.fail_json(msg='Failed to find required executable %s' % arg)
return bin_path
def boolean(self, arg):
''' return a bool for the arg '''
if arg is None or type(arg) == bool:
return arg
if type(arg) in types.StringTypes:
arg = arg.lower()
if arg in BOOLEANS_TRUE:
return True
elif arg in BOOLEANS_FALSE:
return False
else:
self.fail_json(msg='Boolean %s not in either boolean list' % arg)
def jsonify(self, data):
for encoding in ("utf-8", "latin-1"):
try:
return json.dumps(data, encoding=encoding)
# Old systems using old simplejson module does not support encoding keyword.
except TypeError:
try:
new_data = json_dict_bytes_to_unicode(data, encoding=encoding)
except UnicodeDecodeError:
continue
return json.dumps(new_data)
except UnicodeDecodeError:
continue
self.fail_json(msg='Invalid unicode encoding encountered')
def from_json(self, data):
return json.loads(data)
def add_cleanup_file(self, path):
if path not in self.cleanup_files:
self.cleanup_files.append(path)
def do_cleanup_files(self):
for path in self.cleanup_files:
self.cleanup(path)
def exit_json(self, **kwargs):
''' return from the module, without error '''
self.add_path_info(kwargs)
if not 'changed' in kwargs:
kwargs['changed'] = False
self.do_cleanup_files()
print self.jsonify(kwargs)
sys.exit(0)
def fail_json(self, **kwargs):
''' return from the module, with an error message '''
self.add_path_info(kwargs)
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
kwargs['failed'] = True
self.do_cleanup_files()
print self.jsonify(kwargs)
sys.exit(1)
def is_executable(self, path):
'''is the given path executable?'''
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
def digest_from_file(self, filename, algorithm):
''' Return hex digest of local file for a digest_method specified by name, or None if file is not present. '''
if not os.path.exists(filename):
return None
if os.path.isdir(filename):
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
# preserve old behaviour where the third parameter was a hash algorithm object
if hasattr(algorithm, 'hexdigest'):
digest_method = algorithm
else:
try:
digest_method = AVAILABLE_HASH_ALGORITHMS[algorithm]()
except KeyError:
self.fail_json(msg="Could not hash file '%s' with algorithm '%s'. Available algorithms: %s" %
(filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
blocksize = 64 * 1024
infile = open(filename, 'rb')
block = infile.read(blocksize)
while block:
digest_method.update(block)
block = infile.read(blocksize)
infile.close()
return digest_method.hexdigest()
def md5(self, filename):
''' Return MD5 hex digest of local file using digest_from_file().
Do not use this function unless you have no other choice for:
1) Optional backwards compatibility
2) Compatibility with a third party protocol
This function will not work on systems complying with FIPS-140-2.
Most uses of this function can use the module.sha1 function instead.
'''
if 'md5' not in AVAILABLE_HASH_ALGORITHMS:
raise ValueError('MD5 not available. Possibly running in FIPS mode')
return self.digest_from_file(filename, 'md5')
def sha1(self, filename):
''' Return SHA1 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha1')
def sha256(self, filename):
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
return self.digest_from_file(filename, 'sha256')
def backup_local(self, fn):
'''make a date-marked backup of the specified file, return True or False on success or failure'''
backupdest = ''
if os.path.exists(fn):
# backups named basename-YYYY-MM-DD@HH:MM:SS~
ext = time.strftime("%Y-%m-%d@%H:%M:%S~", time.localtime(time.time()))
backupdest = '%s.%s' % (fn, ext)
try:
shutil.copy2(fn, backupdest)
except (shutil.Error, IOError), e:
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, e))
return backupdest
def cleanup(self, tmpfile):
if os.path.exists(tmpfile):
try:
os.unlink(tmpfile)
except OSError, e:
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, e))
def atomic_move(self, src, dest):
'''atomically move src to dest, copying attributes from dest, returns true on success
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
to work around limitations, corner cases and ensure selinux context is saved if possible'''
context = None
dest_stat = None
if os.path.exists(dest):
try:
dest_stat = os.stat(dest)
os.chmod(src, dest_stat.st_mode & 07777)
os.chown(src, dest_stat.st_uid, dest_stat.st_gid)
except OSError, e:
if e.errno != errno.EPERM:
raise
if self.selinux_enabled():
context = self.selinux_context(dest)
else:
if self.selinux_enabled():
context = self.selinux_default_context(dest)
creating = not os.path.exists(dest)
try:
login_name = os.getlogin()
except OSError:
# not having a tty can cause the above to fail, so
# just get the LOGNAME environment variable instead
login_name = os.environ.get('LOGNAME', None)
# if the original login_name doesn't match the currently
# logged-in user, or if the SUDO_USER environment variable
# is set, then this user has switched their credentials
switched_user = login_name and login_name != pwd.getpwuid(os.getuid())[0] or os.environ.get('SUDO_USER')
try:
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(src, dest)
except (IOError,OSError), e:
# only try workarounds for errno 18 (cross device), 1 (not permitted), 13 (permission denied)
# and 26 (text file busy) which happens on vagrant synced folders
if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY]:
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
dest_dir = os.path.dirname(dest)
dest_file = os.path.basename(dest)
try:
tmp_dest = tempfile.NamedTemporaryFile(
prefix=".ansible_tmp", dir=dest_dir, suffix=dest_file)
except (OSError, IOError), e:
self.fail_json(msg='The destination directory (%s) is not writable by the current user.' % dest_dir)
try: # leaves tmp file behind when sudo and not root
if switched_user and os.getuid() != 0:
# cleanup will happen by 'rm' of tempdir
# copy2 will preserve some metadata
shutil.copy2(src, tmp_dest.name)
else:
shutil.move(src, tmp_dest.name)
if self.selinux_enabled():
self.set_context_if_different(
tmp_dest.name, context, False)
try:
tmp_stat = os.stat(tmp_dest.name)
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
os.chown(tmp_dest.name, dest_stat.st_uid, dest_stat.st_gid)
except OSError, e:
if e.errno != errno.EPERM:
raise
os.rename(tmp_dest.name, dest)
except (shutil.Error, OSError, IOError), e:
self.cleanup(tmp_dest.name)
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
if creating:
# make sure the file has the correct permissions
# based on the current value of umask
umask = os.umask(0)
os.umask(umask)
os.chmod(dest, 0666 & ~umask)
if switched_user:
os.chown(dest, os.getuid(), os.getgid())
if self.selinux_enabled():
# rename might not preserve context
self.set_context_if_different(dest, context, False)
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None):
'''
Execute a command, returns rc, stdout, and stderr.
args is the command to run
If args is a list, the command will be run with shell=False.
If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
If args is a string and use_unsafe_shell=True it run with shell=True.
Other arguments:
- check_rc (boolean) Whether to call fail_json in case of
non zero RC. Default is False.
- close_fds (boolean) See documentation for subprocess.Popen().
Default is True.
- executable (string) See documentation for subprocess.Popen().
Default is None.
- prompt_regex (string) A regex string (not a compiled regex) which
can be used to detect prompts in the stdout
which would otherwise cause the execution
to hang (especially if no input data is
specified)
'''
shell = False
if isinstance(args, list):
if use_unsafe_shell:
args = " ".join([pipes.quote(x) for x in args])
shell = True
elif isinstance(args, basestring) and use_unsafe_shell:
shell = True
elif isinstance(args, basestring):
args = shlex.split(args.encode('utf-8'))
else:
msg = "Argument 'args' to run_command must be list or string"
self.fail_json(rc=257, cmd=args, msg=msg)
prompt_re = None
if prompt_regex:
try:
prompt_re = re.compile(prompt_regex, re.MULTILINE)
except re.error:
self.fail_json(msg="invalid prompt regular expression given to run_command")
# expand things like $HOME and ~
if not shell:
args = [ os.path.expandvars(os.path.expanduser(x)) for x in args ]
rc = 0
msg = None
st_in = None
# Set a temporary env path if a prefix is passed
env=os.environ
if path_prefix:
env['PATH']="%s:%s" % (path_prefix, env['PATH'])
# create a printable version of the command for use
# in reporting later, which strips out things like
# passwords from the args list
if isinstance(args, basestring):
if isinstance(args, unicode):
b_args = args.encode('utf-8')
else:
b_args = args
to_clean_args = shlex.split(b_args)
del b_args
else:
to_clean_args = args
clean_args = []
is_passwd = False
for arg in to_clean_args:
if is_passwd:
is_passwd = False
clean_args.append('********')
continue
if PASSWD_ARG_RE.match(arg):
sep_idx = arg.find('=')
if sep_idx > -1:
clean_args.append('%s=********' % arg[:sep_idx])
continue
else:
is_passwd = True
clean_args.append(heuristic_log_sanitize(arg))
clean_args = ' '.join(pipes.quote(arg) for arg in clean_args)
if data:
st_in = subprocess.PIPE
kwargs = dict(
executable=executable,
shell=shell,
close_fds=close_fds,
stdin=st_in,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
if path_prefix:
kwargs['env'] = env
if cwd and os.path.isdir(cwd):
kwargs['cwd'] = cwd
# store the pwd
prev_dir = os.getcwd()
# make sure we're in the right working directory
if cwd and os.path.isdir(cwd):
try:
os.chdir(cwd)
except (OSError, IOError), e:
self.fail_json(rc=e.errno, msg="Could not open %s, %s" % (cwd, str(e)))
try:
cmd = subprocess.Popen(args, **kwargs)
# the communication logic here is essentially taken from that
# of the _communicate() function in ssh.py
stdout = ''
stderr = ''
rpipes = [cmd.stdout, cmd.stderr]
if data:
if not binary_data:
data += '\n'
cmd.stdin.write(data)
cmd.stdin.close()
while True:
rfd, wfd, efd = select.select(rpipes, [], rpipes, 1)
if cmd.stdout in rfd:
dat = os.read(cmd.stdout.fileno(), 9000)
stdout += dat
if dat == '':
rpipes.remove(cmd.stdout)
if cmd.stderr in rfd:
dat = os.read(cmd.stderr.fileno(), 9000)
stderr += dat
if dat == '':
rpipes.remove(cmd.stderr)
# if we're checking for prompts, do it now
if prompt_re:
if prompt_re.search(stdout) and not data:
return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
# only break out if no pipes are left to read or
# the pipes are completely read and
# the process is terminated
if (not rpipes or not rfd) and cmd.poll() is not None:
break
# No pipes are left to read but process is not yet terminated
# Only then it is safe to wait for the process to be finished
# NOTE: Actually cmd.poll() is always None here if rpipes is empty
elif not rpipes and cmd.poll() == None:
cmd.wait()
# The process is terminated. Since no pipes to read from are
# left, there is no need to call select() again.
break
cmd.stdout.close()
cmd.stderr.close()
rc = cmd.returncode
except (OSError, IOError), e:
self.fail_json(rc=e.errno, msg=str(e), cmd=clean_args)
except:
self.fail_json(rc=257, msg=traceback.format_exc(), cmd=clean_args)
if rc != 0 and check_rc:
msg = heuristic_log_sanitize(stderr.rstrip())
self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg)
# reset the pwd
os.chdir(prev_dir)
return (rc, stdout, stderr)
def append_to_file(self, filename, str):
filename = os.path.expandvars(os.path.expanduser(filename))
fh = open(filename, 'a')
fh.write(str)
fh.close()
def pretty_bytes(self,size):
ranges = (
(1<<70L, 'ZB'),
(1<<60L, 'EB'),
(1<<50L, 'PB'),
(1<<40L, 'TB'),
(1<<30L, 'GB'),
(1<<20L, 'MB'),
(1<<10L, 'KB'),
(1, 'Bytes')
)
for limit, suffix in ranges:
if size >= limit:
break
return '%.2f %s' % (float(size)/ limit, suffix)
def get_module_path():
return os.path.dirname(os.path.realpath(__file__))
|
unknown
|
codeparrot/codeparrot-clean
| ||
# testing/config.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import collections
requirements = None
db = None
db_url = None
db_opts = None
file_config = None
test_schema = None
test_schema_2 = None
_current = None
_skip_test_exception = None
class Config(object):
def __init__(self, db, db_opts, options, file_config):
self.db = db
self.db_opts = db_opts
self.options = options
self.file_config = file_config
self.test_schema = "test_schema"
self.test_schema_2 = "test_schema_2"
_stack = collections.deque()
_configs = {}
@classmethod
def register(cls, db, db_opts, options, file_config):
"""add a config as one of the global configs.
If there are no configs set up yet, this config also
gets set as the "_current".
"""
cfg = Config(db, db_opts, options, file_config)
cls._configs[cfg.db.name] = cfg
cls._configs[(cfg.db.name, cfg.db.dialect)] = cfg
cls._configs[cfg.db] = cfg
return cfg
@classmethod
def set_as_current(cls, config, namespace):
global db, _current, db_url, test_schema, test_schema_2, db_opts
_current = config
db_url = config.db.url
db_opts = config.db_opts
test_schema = config.test_schema
test_schema_2 = config.test_schema_2
namespace.db = db = config.db
@classmethod
def push_engine(cls, db, namespace):
assert _current, "Can't push without a default Config set up"
cls.push(
Config(
db, _current.db_opts, _current.options, _current.file_config),
namespace
)
@classmethod
def push(cls, config, namespace):
cls._stack.append(_current)
cls.set_as_current(config, namespace)
@classmethod
def reset(cls, namespace):
if cls._stack:
cls.set_as_current(cls._stack[0], namespace)
cls._stack.clear()
@classmethod
def all_configs(cls):
for cfg in set(cls._configs.values()):
yield cfg
@classmethod
def all_dbs(cls):
for cfg in cls.all_configs():
yield cfg.db
def skip_test(self, msg):
skip_test(msg)
def skip_test(msg):
raise _skip_test_exception(msg)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package securitycontext
import (
"reflect"
"testing"
v1 "k8s.io/api/core/v1"
"k8s.io/utils/ptr"
)
func TestAddNoNewPrivileges(t *testing.T) {
pfalse := false
ptrue := true
tests := map[string]struct {
sc *v1.SecurityContext
expect bool
}{
"allowPrivilegeEscalation nil security context nil": {
sc: nil,
expect: false,
},
"allowPrivilegeEscalation nil": {
sc: &v1.SecurityContext{
AllowPrivilegeEscalation: nil,
},
expect: false,
},
"allowPrivilegeEscalation false": {
sc: &v1.SecurityContext{
AllowPrivilegeEscalation: &pfalse,
},
expect: true,
},
"allowPrivilegeEscalation true": {
sc: &v1.SecurityContext{
AllowPrivilegeEscalation: &ptrue,
},
expect: false,
},
}
for k, v := range tests {
actual := AddNoNewPrivileges(v.sc)
if actual != v.expect {
t.Errorf("%s failed, expected %t but received %t", k, v.expect, actual)
}
}
}
func TestConvertToRuntimeMaskedPaths(t *testing.T) {
dPM := v1.DefaultProcMount
uPM := v1.UnmaskedProcMount
tests := map[string]struct {
pm *v1.ProcMountType
expect []string
}{
"procMount nil": {
pm: nil,
expect: defaultMaskedPaths(),
},
"procMount default": {
pm: &dPM,
expect: defaultMaskedPaths(),
},
"procMount unmasked": {
pm: &uPM,
expect: []string{},
},
}
for k, v := range tests {
actual := ConvertToRuntimeMaskedPaths(v.pm)
if !reflect.DeepEqual(actual, v.expect) {
t.Errorf("%s failed, expected %#v but received %#v", k, v.expect, actual)
}
}
}
func TestConvertToRuntimeReadonlyPaths(t *testing.T) {
dPM := v1.DefaultProcMount
uPM := v1.UnmaskedProcMount
tests := map[string]struct {
pm *v1.ProcMountType
expect []string
}{
"procMount nil": {
pm: nil,
expect: defaultReadonlyPaths,
},
"procMount default": {
pm: &dPM,
expect: defaultReadonlyPaths,
},
"procMount unmasked": {
pm: &uPM,
expect: []string{},
},
}
for k, v := range tests {
actual := ConvertToRuntimeReadonlyPaths(v.pm)
if !reflect.DeepEqual(actual, v.expect) {
t.Errorf("%s failed, expected %#v but received %#v", k, v.expect, actual)
}
}
}
func TestDetermineEffectiveRunAsUser(t *testing.T) {
tests := []struct {
desc string
pod *v1.Pod
container *v1.Container
wantRunAsUser *int64
}{
{
desc: "no securityContext in pod, no securityContext in container",
pod: &v1.Pod{
Spec: v1.PodSpec{},
},
container: &v1.Container{},
wantRunAsUser: nil,
},
{
desc: "no runAsUser in pod, no runAsUser in container",
pod: &v1.Pod{
Spec: v1.PodSpec{
SecurityContext: &v1.PodSecurityContext{},
},
},
container: &v1.Container{
SecurityContext: &v1.SecurityContext{},
},
wantRunAsUser: nil,
},
{
desc: "runAsUser in pod, no runAsUser in container",
pod: &v1.Pod{
Spec: v1.PodSpec{
SecurityContext: &v1.PodSecurityContext{
RunAsUser: new(int64),
},
},
},
container: &v1.Container{
SecurityContext: &v1.SecurityContext{},
},
wantRunAsUser: new(int64),
},
{
desc: "no runAsUser in pod, runAsUser in container",
pod: &v1.Pod{
Spec: v1.PodSpec{
SecurityContext: &v1.PodSecurityContext{},
},
},
container: &v1.Container{
SecurityContext: &v1.SecurityContext{
RunAsUser: new(int64),
},
},
wantRunAsUser: new(int64),
},
{
desc: "no runAsUser in pod, runAsUser in container",
pod: &v1.Pod{
Spec: v1.PodSpec{
SecurityContext: &v1.PodSecurityContext{
RunAsUser: new(int64),
},
},
},
container: &v1.Container{
SecurityContext: &v1.SecurityContext{
RunAsUser: ptr.To[int64](1),
},
},
wantRunAsUser: ptr.To[int64](1),
},
}
for _, test := range tests {
t.Run(test.desc, func(t *testing.T) {
runAsUser, ok := DetermineEffectiveRunAsUser(test.pod, test.container)
if !ok && test.wantRunAsUser != nil {
t.Errorf("DetermineEffectiveRunAsUser(%v, %v) = %v, want %d", test.pod, test.container, runAsUser, *test.wantRunAsUser)
}
if ok && test.wantRunAsUser == nil {
t.Errorf("DetermineEffectiveRunAsUser(%v, %v) = %d, want %v", test.pod, test.container, *runAsUser, test.wantRunAsUser)
}
if ok && test.wantRunAsUser != nil && *runAsUser != *test.wantRunAsUser {
t.Errorf("DetermineEffectiveRunAsUser(%v, %v) = %d, want %d", test.pod, test.container, *runAsUser, *test.wantRunAsUser)
}
})
}
}
|
go
|
github
|
https://github.com/kubernetes/kubernetes
|
pkg/securitycontext/util_test.go
|
/*[clinic input]
preserve
[clinic start generated code]*/
#include "pycore_modsupport.h" // _PyArg_CheckPositional()
PyDoc_STRVAR(math_integer_gcd__doc__,
"gcd($module, /, *integers)\n"
"--\n"
"\n"
"Greatest Common Divisor.");
#define MATH_INTEGER_GCD_METHODDEF \
{"gcd", _PyCFunction_CAST(math_integer_gcd), METH_FASTCALL, math_integer_gcd__doc__},
static PyObject *
math_integer_gcd_impl(PyObject *module, PyObject * const *args,
Py_ssize_t args_length);
static PyObject *
math_integer_gcd(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject * const *__clinic_args;
Py_ssize_t args_length;
__clinic_args = args;
args_length = nargs;
return_value = math_integer_gcd_impl(module, __clinic_args, args_length);
return return_value;
}
PyDoc_STRVAR(math_integer_lcm__doc__,
"lcm($module, /, *integers)\n"
"--\n"
"\n"
"Least Common Multiple.");
#define MATH_INTEGER_LCM_METHODDEF \
{"lcm", _PyCFunction_CAST(math_integer_lcm), METH_FASTCALL, math_integer_lcm__doc__},
static PyObject *
math_integer_lcm_impl(PyObject *module, PyObject * const *args,
Py_ssize_t args_length);
static PyObject *
math_integer_lcm(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject * const *__clinic_args;
Py_ssize_t args_length;
__clinic_args = args;
args_length = nargs;
return_value = math_integer_lcm_impl(module, __clinic_args, args_length);
return return_value;
}
PyDoc_STRVAR(math_integer_isqrt__doc__,
"isqrt($module, n, /)\n"
"--\n"
"\n"
"Return the integer part of the square root of the input.");
#define MATH_INTEGER_ISQRT_METHODDEF \
{"isqrt", (PyCFunction)math_integer_isqrt, METH_O, math_integer_isqrt__doc__},
PyDoc_STRVAR(math_integer_factorial__doc__,
"factorial($module, n, /)\n"
"--\n"
"\n"
"Find n!.");
#define MATH_INTEGER_FACTORIAL_METHODDEF \
{"factorial", (PyCFunction)math_integer_factorial, METH_O, math_integer_factorial__doc__},
PyDoc_STRVAR(math_integer_perm__doc__,
"perm($module, n, k=None, /)\n"
"--\n"
"\n"
"Number of ways to choose k items from n items without repetition and with order.\n"
"\n"
"Evaluates to n! / (n - k)! when k <= n and evaluates\n"
"to zero when k > n.\n"
"\n"
"If k is not specified or is None, then k defaults to n\n"
"and the function returns n!.\n"
"\n"
"Raises ValueError if either of the arguments are negative.");
#define MATH_INTEGER_PERM_METHODDEF \
{"perm", _PyCFunction_CAST(math_integer_perm), METH_FASTCALL, math_integer_perm__doc__},
static PyObject *
math_integer_perm_impl(PyObject *module, PyObject *n, PyObject *k);
static PyObject *
math_integer_perm(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject *n;
PyObject *k = Py_None;
if (!_PyArg_CheckPositional("perm", nargs, 1, 2)) {
goto exit;
}
n = args[0];
if (nargs < 2) {
goto skip_optional;
}
k = args[1];
skip_optional:
return_value = math_integer_perm_impl(module, n, k);
exit:
return return_value;
}
PyDoc_STRVAR(math_integer_comb__doc__,
"comb($module, n, k, /)\n"
"--\n"
"\n"
"Number of ways to choose k items from n items without repetition and without order.\n"
"\n"
"Evaluates to n! / (k! * (n - k)!) when k <= n and evaluates\n"
"to zero when k > n.\n"
"\n"
"Also called the binomial coefficient because it is equivalent\n"
"to the coefficient of k-th term in polynomial expansion of the\n"
"expression (1 + x)**n.\n"
"\n"
"Raises ValueError if either of the arguments are negative.");
#define MATH_INTEGER_COMB_METHODDEF \
{"comb", _PyCFunction_CAST(math_integer_comb), METH_FASTCALL, math_integer_comb__doc__},
static PyObject *
math_integer_comb_impl(PyObject *module, PyObject *n, PyObject *k);
static PyObject *
math_integer_comb(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject *n;
PyObject *k;
if (!_PyArg_CheckPositional("comb", nargs, 2, 2)) {
goto exit;
}
n = args[0];
k = args[1];
return_value = math_integer_comb_impl(module, n, k);
exit:
return return_value;
}
/*[clinic end generated code: output=34697570c923a3af input=a9049054013a1b77]*/
|
c
|
github
|
https://github.com/python/cpython
|
Modules/clinic/mathintegermodule.c.h
|
from fooster.web import web
import mock
test_request = 'GET / HTTP/1.1\r\n' + '\r\n'
def bad_read(self):
raise Exception()
def run(request, handler=None, timeout=None, keepalive=True, initial_timeout=None, read_exception=False, close=True, skip=False):
if not isinstance(request, bytes):
request = request.encode(web.http_encoding)
if not handler:
handler = mock.MockHTTPHandler
server = mock.MockHTTPServer(routes={'/': handler, '/named/(?P<named>.+)': handler})
socket = mock.MockSocket(request)
request_obj = web.HTTPRequest(socket, ('127.0.0.1', 1337), server, timeout)
request_obj.response = mock.MockHTTPResponse(socket, ('127.0.0.1', 1337), server, request_obj)
request_obj.skip = skip
if read_exception:
request_obj.rfile.read = bad_read
request_obj.rfile.readline = bad_read
request_obj.handle(keepalive, initial_timeout)
if close:
request_obj.close()
return request_obj
def test_http_version_one():
request = run('GET / HTTP/1.0\r\n' + '\r\n')
assert request.request_http == 'HTTP/1.0'
def test_initial_timeout():
request = run('', initial_timeout=5)
assert request.connection.timeout == 5
def test_timeout():
request = run(test_request, timeout=8, initial_timeout=5)
assert request.connection.timeout == 8
def test_read_exception():
request = run(test_request, timeout=8, initial_timeout=5, read_exception=True)
assert request.connection.timeout == 5
assert not request.keepalive
def test_no_request():
request = run('')
# if no request, do not keepalive
assert not request.keepalive
def test_request_too_large():
# request for 'GET aaaaaaa... HTTP/1.1\r\n' where it's length is one over the maximum line size
long_request = 'GET ' + 'a' * (web.max_line_size - 4 - 9 - 2 + 1) + ' HTTP/1.1\r\n\r\n'
request = run(long_request)
assert request.handler.error.code == 414
assert not request.keepalive
def test_no_newline():
request = run(test_request[:-4])
assert request.handler.error.code == 400
assert not request.keepalive
def test_bad_request_line():
request = run('GET /\r\n' + '\r\n')
assert request.handler.error.code == 400
assert not request.keepalive
def test_wrong_http_version():
request = run('GET / HTTP/9000\r\n' + '\r\n')
assert request.handler.error.code == 505
assert not request.keepalive
def test_header_too_large():
# create a header for 'TooLong: aaaaaaa...\r\n' where it's length is one over the maximum line size
test_header_too_long = 'TooLong: ' + 'a' * (web.max_line_size - 9 - 2 + 1) + '\r\n'
request = run('GET / HTTP/1.1\r\n' + test_header_too_long + '\r\n')
assert request.handler.error.code == 431
assert request.handler.error.status_message == 'TooLong Header Too Large'
assert not request.keepalive
def test_too_many_headers():
# create a list of headers '1: test\r\n2: test\r\n...' where the number of them is one over the maximum number of headers
headers = ''.join(str(i) + ': test\r\n' for i in range(web.max_headers + 1))
request = run('GET / HTTP/1.1\r\n' + headers + '\r\n')
assert request.handler.error.code == 431
assert not request.keepalive
def test_header_no_newline():
request = run('GET / HTTP/1.1\r\n' + 'Test: header')
assert request.handler.error.code == 400
assert not request.keepalive
def test_header_no_colon():
request = run('GET / HTTP/1.1\r\n' + 'Test header\r\n' + '\r\n')
assert request.handler.error.code == 400
assert not request.keepalive
def test_connection_close():
request = run('GET / HTTP/1.1\r\n' + 'Connection: close\r\n' + '\r\n')
assert not request.keepalive
def test_handler_not_found():
request = run('GET /nonexistent HTTP/1.1\r\n' + '\r\n')
assert request.handler.error.code == 404
assert request.keepalive
def test_handler_quoted():
request = run('GET %2f HTTP/1.1\r\n' + '\r\n')
assert request.handler.error.code == 404
assert request.keepalive
def test_keepalive():
request = run(test_request)
assert request.keepalive
def test_no_keepalive():
request = run(test_request, keepalive=False)
assert not request.keepalive
def test_handler():
request = run(test_request, handler=web.HTTPHandler)
assert isinstance(request.handler, web.HTTPHandler)
def test_read_pipelining():
request = run('GET / HTTP/1.1\r\n' + '\r\n' + 'GET /nonexistent HTTP/1.1\r\n' + '\r\n', close=False)
assert request.rfile.read() == b'GET /nonexistent HTTP/1.1\r\n\r\n'
request.close()
def test_close():
request = run('GET / HTTP/1.1\r\n' + '\r\n')
assert request.rfile.closed
assert request.response.closed
def test_skip():
request = run('', skip=True)
assert request.headers is None
def test_named_groups():
request = run('GET /named/asdf HTTP/1.1\r\n' + '\r\n')
assert request.handler.groups['named'] == 'asdf'
assert request.response.closed
|
unknown
|
codeparrot/codeparrot-clean
| ||
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
from typing import TYPE_CHECKING, Dict, Union, Optional
from typing_extensions import Literal, assert_never
import httpx
from ... import _legacy_response
from ...types import FileChunkingStrategyParam
from ..._types import Body, Omit, Query, Headers, NotGiven, FileTypes, omit, not_given
from ..._utils import is_given, maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper
from ...pagination import SyncPage, AsyncPage, SyncCursorPage, AsyncCursorPage
from ..._base_client import AsyncPaginator, make_request_options
from ...types.vector_stores import file_list_params, file_create_params, file_update_params
from ...types.file_chunking_strategy_param import FileChunkingStrategyParam
from ...types.vector_stores.vector_store_file import VectorStoreFile
from ...types.vector_stores.file_content_response import FileContentResponse
from ...types.vector_stores.vector_store_file_deleted import VectorStoreFileDeleted
__all__ = ["Files", "AsyncFiles"]
class Files(SyncAPIResource):
@cached_property
def with_raw_response(self) -> FilesWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/openai/openai-python#accessing-raw-response-data-eg-headers
"""
return FilesWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> FilesWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/openai/openai-python#with_streaming_response
"""
return FilesWithStreamingResponse(self)
def create(
self,
vector_store_id: str,
*,
file_id: str,
attributes: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
chunking_strategy: FileChunkingStrategyParam | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Create a vector store file by attaching a
[File](https://platform.openai.com/docs/api-reference/files) to a
[vector store](https://platform.openai.com/docs/api-reference/vector-stores/object).
Args:
file_id: A [File](https://platform.openai.com/docs/api-reference/files) ID that the
vector store should use. Useful for tools like `file_search` that can access
files.
attributes: Set of 16 key-value pairs that can be attached to an object. This can be useful
for storing additional information about the object in a structured format, and
querying for objects via API or the dashboard. Keys are strings with a maximum
length of 64 characters. Values are strings with a maximum length of 512
characters, booleans, or numbers.
chunking_strategy: The chunking strategy used to chunk the file(s). If not set, will use the `auto`
strategy. Only applicable if `file_ids` is non-empty.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return self._post(
f"/vector_stores/{vector_store_id}/files",
body=maybe_transform(
{
"file_id": file_id,
"attributes": attributes,
"chunking_strategy": chunking_strategy,
},
file_create_params.FileCreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=VectorStoreFile,
)
def retrieve(
self,
file_id: str,
*,
vector_store_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Retrieves a vector store file.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
if not file_id:
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return self._get(
f"/vector_stores/{vector_store_id}/files/{file_id}",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=VectorStoreFile,
)
def update(
self,
file_id: str,
*,
vector_store_id: str,
attributes: Optional[Dict[str, Union[str, float, bool]]],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Update attributes on a vector store file.
Args:
attributes: Set of 16 key-value pairs that can be attached to an object. This can be useful
for storing additional information about the object in a structured format, and
querying for objects via API or the dashboard. Keys are strings with a maximum
length of 64 characters. Values are strings with a maximum length of 512
characters, booleans, or numbers.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
if not file_id:
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return self._post(
f"/vector_stores/{vector_store_id}/files/{file_id}",
body=maybe_transform({"attributes": attributes}, file_update_params.FileUpdateParams),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=VectorStoreFile,
)
def list(
self,
vector_store_id: str,
*,
after: str | Omit = omit,
before: str | Omit = omit,
filter: Literal["in_progress", "completed", "failed", "cancelled"] | Omit = omit,
limit: int | Omit = omit,
order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncCursorPage[VectorStoreFile]:
"""
Returns a list of vector store files.
Args:
after: A cursor for use in pagination. `after` is an object ID that defines your place
in the list. For instance, if you make a list request and receive 100 objects,
ending with obj_foo, your subsequent call can include after=obj_foo in order to
fetch the next page of the list.
before: A cursor for use in pagination. `before` is an object ID that defines your place
in the list. For instance, if you make a list request and receive 100 objects,
starting with obj_foo, your subsequent call can include before=obj_foo in order
to fetch the previous page of the list.
filter: Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`.
limit: A limit on the number of objects to be returned. Limit can range between 1 and
100, and the default is 20.
order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending
order and `desc` for descending order.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return self._get_api_list(
f"/vector_stores/{vector_store_id}/files",
page=SyncCursorPage[VectorStoreFile],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"after": after,
"before": before,
"filter": filter,
"limit": limit,
"order": order,
},
file_list_params.FileListParams,
),
),
model=VectorStoreFile,
)
def delete(
self,
file_id: str,
*,
vector_store_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFileDeleted:
"""Delete a vector store file.
This will remove the file from the vector store but
the file itself will not be deleted. To delete the file, use the
[delete file](https://platform.openai.com/docs/api-reference/files/delete)
endpoint.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
if not file_id:
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return self._delete(
f"/vector_stores/{vector_store_id}/files/{file_id}",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=VectorStoreFileDeleted,
)
def create_and_poll(
self,
file_id: str,
*,
vector_store_id: str,
attributes: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
poll_interval_ms: int | Omit = omit,
chunking_strategy: FileChunkingStrategyParam | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""Attach a file to the given vector store and wait for it to be processed."""
self.create(
vector_store_id=vector_store_id,
file_id=file_id,
chunking_strategy=chunking_strategy,
attributes=attributes,
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
)
return self.poll(
file_id,
vector_store_id=vector_store_id,
poll_interval_ms=poll_interval_ms,
)
def poll(
self,
file_id: str,
*,
vector_store_id: str,
poll_interval_ms: int | Omit = omit,
) -> VectorStoreFile:
"""Wait for the vector store file to finish processing.
Note: this will return even if the file failed to process, you need to check
file.last_error and file.status to handle these cases
"""
headers: dict[str, str] = {"X-Stainless-Poll-Helper": "true"}
if is_given(poll_interval_ms):
headers["X-Stainless-Custom-Poll-Interval"] = str(poll_interval_ms)
while True:
response = self.with_raw_response.retrieve(
file_id,
vector_store_id=vector_store_id,
extra_headers=headers,
)
file = response.parse()
if file.status == "in_progress":
if not is_given(poll_interval_ms):
from_header = response.headers.get("openai-poll-after-ms")
if from_header is not None:
poll_interval_ms = int(from_header)
else:
poll_interval_ms = 1000
self._sleep(poll_interval_ms / 1000)
elif file.status == "cancelled" or file.status == "completed" or file.status == "failed":
return file
else:
if TYPE_CHECKING: # type: ignore[unreachable]
assert_never(file.status)
else:
return file
def upload(
self,
*,
vector_store_id: str,
file: FileTypes,
chunking_strategy: FileChunkingStrategyParam | Omit = omit,
) -> VectorStoreFile:
"""Upload a file to the `files` API and then attach it to the given vector store.
Note the file will be asynchronously processed (you can use the alternative
polling helper method to wait for processing to complete).
"""
file_obj = self._client.files.create(file=file, purpose="assistants")
return self.create(vector_store_id=vector_store_id, file_id=file_obj.id, chunking_strategy=chunking_strategy)
def upload_and_poll(
self,
*,
vector_store_id: str,
file: FileTypes,
attributes: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
poll_interval_ms: int | Omit = omit,
chunking_strategy: FileChunkingStrategyParam | Omit = omit,
) -> VectorStoreFile:
"""Add a file to a vector store and poll until processing is complete."""
file_obj = self._client.files.create(file=file, purpose="assistants")
return self.create_and_poll(
vector_store_id=vector_store_id,
file_id=file_obj.id,
chunking_strategy=chunking_strategy,
poll_interval_ms=poll_interval_ms,
attributes=attributes,
)
def content(
self,
file_id: str,
*,
vector_store_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> SyncPage[FileContentResponse]:
"""
Retrieve the parsed contents of a vector store file.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
if not file_id:
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return self._get_api_list(
f"/vector_stores/{vector_store_id}/files/{file_id}/content",
page=SyncPage[FileContentResponse],
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
model=FileContentResponse,
)
class AsyncFiles(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncFilesWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/openai/openai-python#accessing-raw-response-data-eg-headers
"""
return AsyncFilesWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> AsyncFilesWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/openai/openai-python#with_streaming_response
"""
return AsyncFilesWithStreamingResponse(self)
async def create(
self,
vector_store_id: str,
*,
file_id: str,
attributes: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
chunking_strategy: FileChunkingStrategyParam | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Create a vector store file by attaching a
[File](https://platform.openai.com/docs/api-reference/files) to a
[vector store](https://platform.openai.com/docs/api-reference/vector-stores/object).
Args:
file_id: A [File](https://platform.openai.com/docs/api-reference/files) ID that the
vector store should use. Useful for tools like `file_search` that can access
files.
attributes: Set of 16 key-value pairs that can be attached to an object. This can be useful
for storing additional information about the object in a structured format, and
querying for objects via API or the dashboard. Keys are strings with a maximum
length of 64 characters. Values are strings with a maximum length of 512
characters, booleans, or numbers.
chunking_strategy: The chunking strategy used to chunk the file(s). If not set, will use the `auto`
strategy. Only applicable if `file_ids` is non-empty.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return await self._post(
f"/vector_stores/{vector_store_id}/files",
body=await async_maybe_transform(
{
"file_id": file_id,
"attributes": attributes,
"chunking_strategy": chunking_strategy,
},
file_create_params.FileCreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=VectorStoreFile,
)
async def retrieve(
self,
file_id: str,
*,
vector_store_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Retrieves a vector store file.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
if not file_id:
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return await self._get(
f"/vector_stores/{vector_store_id}/files/{file_id}",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=VectorStoreFile,
)
async def update(
self,
file_id: str,
*,
vector_store_id: str,
attributes: Optional[Dict[str, Union[str, float, bool]]],
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""
Update attributes on a vector store file.
Args:
attributes: Set of 16 key-value pairs that can be attached to an object. This can be useful
for storing additional information about the object in a structured format, and
querying for objects via API or the dashboard. Keys are strings with a maximum
length of 64 characters. Values are strings with a maximum length of 512
characters, booleans, or numbers.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
if not file_id:
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return await self._post(
f"/vector_stores/{vector_store_id}/files/{file_id}",
body=await async_maybe_transform({"attributes": attributes}, file_update_params.FileUpdateParams),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=VectorStoreFile,
)
def list(
self,
vector_store_id: str,
*,
after: str | Omit = omit,
before: str | Omit = omit,
filter: Literal["in_progress", "completed", "failed", "cancelled"] | Omit = omit,
limit: int | Omit = omit,
order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[VectorStoreFile, AsyncCursorPage[VectorStoreFile]]:
"""
Returns a list of vector store files.
Args:
after: A cursor for use in pagination. `after` is an object ID that defines your place
in the list. For instance, if you make a list request and receive 100 objects,
ending with obj_foo, your subsequent call can include after=obj_foo in order to
fetch the next page of the list.
before: A cursor for use in pagination. `before` is an object ID that defines your place
in the list. For instance, if you make a list request and receive 100 objects,
starting with obj_foo, your subsequent call can include before=obj_foo in order
to fetch the previous page of the list.
filter: Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`.
limit: A limit on the number of objects to be returned. Limit can range between 1 and
100, and the default is 20.
order: Sort order by the `created_at` timestamp of the objects. `asc` for ascending
order and `desc` for descending order.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return self._get_api_list(
f"/vector_stores/{vector_store_id}/files",
page=AsyncCursorPage[VectorStoreFile],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"after": after,
"before": before,
"filter": filter,
"limit": limit,
"order": order,
},
file_list_params.FileListParams,
),
),
model=VectorStoreFile,
)
async def delete(
self,
file_id: str,
*,
vector_store_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFileDeleted:
"""Delete a vector store file.
This will remove the file from the vector store but
the file itself will not be deleted. To delete the file, use the
[delete file](https://platform.openai.com/docs/api-reference/files/delete)
endpoint.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
if not file_id:
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return await self._delete(
f"/vector_stores/{vector_store_id}/files/{file_id}",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=VectorStoreFileDeleted,
)
async def create_and_poll(
self,
file_id: str,
*,
vector_store_id: str,
attributes: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
poll_interval_ms: int | Omit = omit,
chunking_strategy: FileChunkingStrategyParam | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> VectorStoreFile:
"""Attach a file to the given vector store and wait for it to be processed."""
await self.create(
vector_store_id=vector_store_id,
file_id=file_id,
chunking_strategy=chunking_strategy,
attributes=attributes,
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
)
return await self.poll(
file_id,
vector_store_id=vector_store_id,
poll_interval_ms=poll_interval_ms,
)
async def poll(
self,
file_id: str,
*,
vector_store_id: str,
poll_interval_ms: int | Omit = omit,
) -> VectorStoreFile:
"""Wait for the vector store file to finish processing.
Note: this will return even if the file failed to process, you need to check
file.last_error and file.status to handle these cases
"""
headers: dict[str, str] = {"X-Stainless-Poll-Helper": "true"}
if is_given(poll_interval_ms):
headers["X-Stainless-Custom-Poll-Interval"] = str(poll_interval_ms)
while True:
response = await self.with_raw_response.retrieve(
file_id,
vector_store_id=vector_store_id,
extra_headers=headers,
)
file = response.parse()
if file.status == "in_progress":
if not is_given(poll_interval_ms):
from_header = response.headers.get("openai-poll-after-ms")
if from_header is not None:
poll_interval_ms = int(from_header)
else:
poll_interval_ms = 1000
await self._sleep(poll_interval_ms / 1000)
elif file.status == "cancelled" or file.status == "completed" or file.status == "failed":
return file
else:
if TYPE_CHECKING: # type: ignore[unreachable]
assert_never(file.status)
else:
return file
async def upload(
self,
*,
vector_store_id: str,
file: FileTypes,
chunking_strategy: FileChunkingStrategyParam | Omit = omit,
) -> VectorStoreFile:
"""Upload a file to the `files` API and then attach it to the given vector store.
Note the file will be asynchronously processed (you can use the alternative
polling helper method to wait for processing to complete).
"""
file_obj = await self._client.files.create(file=file, purpose="assistants")
return await self.create(
vector_store_id=vector_store_id, file_id=file_obj.id, chunking_strategy=chunking_strategy
)
async def upload_and_poll(
self,
*,
vector_store_id: str,
file: FileTypes,
attributes: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
poll_interval_ms: int | Omit = omit,
chunking_strategy: FileChunkingStrategyParam | Omit = omit,
) -> VectorStoreFile:
"""Add a file to a vector store and poll until processing is complete."""
file_obj = await self._client.files.create(file=file, purpose="assistants")
return await self.create_and_poll(
vector_store_id=vector_store_id,
file_id=file_obj.id,
poll_interval_ms=poll_interval_ms,
chunking_strategy=chunking_strategy,
attributes=attributes,
)
def content(
self,
file_id: str,
*,
vector_store_id: str,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[FileContentResponse, AsyncPage[FileContentResponse]]:
"""
Retrieve the parsed contents of a vector store file.
Args:
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
if not file_id:
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
return self._get_api_list(
f"/vector_stores/{vector_store_id}/files/{file_id}/content",
page=AsyncPage[FileContentResponse],
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
model=FileContentResponse,
)
class FilesWithRawResponse:
def __init__(self, files: Files) -> None:
self._files = files
self.create = _legacy_response.to_raw_response_wrapper(
files.create,
)
self.retrieve = _legacy_response.to_raw_response_wrapper(
files.retrieve,
)
self.update = _legacy_response.to_raw_response_wrapper(
files.update,
)
self.list = _legacy_response.to_raw_response_wrapper(
files.list,
)
self.delete = _legacy_response.to_raw_response_wrapper(
files.delete,
)
self.content = _legacy_response.to_raw_response_wrapper(
files.content,
)
class AsyncFilesWithRawResponse:
def __init__(self, files: AsyncFiles) -> None:
self._files = files
self.create = _legacy_response.async_to_raw_response_wrapper(
files.create,
)
self.retrieve = _legacy_response.async_to_raw_response_wrapper(
files.retrieve,
)
self.update = _legacy_response.async_to_raw_response_wrapper(
files.update,
)
self.list = _legacy_response.async_to_raw_response_wrapper(
files.list,
)
self.delete = _legacy_response.async_to_raw_response_wrapper(
files.delete,
)
self.content = _legacy_response.async_to_raw_response_wrapper(
files.content,
)
class FilesWithStreamingResponse:
def __init__(self, files: Files) -> None:
self._files = files
self.create = to_streamed_response_wrapper(
files.create,
)
self.retrieve = to_streamed_response_wrapper(
files.retrieve,
)
self.update = to_streamed_response_wrapper(
files.update,
)
self.list = to_streamed_response_wrapper(
files.list,
)
self.delete = to_streamed_response_wrapper(
files.delete,
)
self.content = to_streamed_response_wrapper(
files.content,
)
class AsyncFilesWithStreamingResponse:
def __init__(self, files: AsyncFiles) -> None:
self._files = files
self.create = async_to_streamed_response_wrapper(
files.create,
)
self.retrieve = async_to_streamed_response_wrapper(
files.retrieve,
)
self.update = async_to_streamed_response_wrapper(
files.update,
)
self.list = async_to_streamed_response_wrapper(
files.list,
)
self.delete = async_to_streamed_response_wrapper(
files.delete,
)
self.content = async_to_streamed_response_wrapper(
files.content,
)
|
python
|
github
|
https://github.com/openai/openai-python
|
src/openai/resources/vector_stores/files.py
|
#!/usr/bin/python
## Copyright 2017 Knossos authors, see NOTICE file
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
from __future__ import absolute_import, print_function
import sys
if __package__ is None and not hasattr(sys, 'frozen'):
import os.path
path = os.path.realpath(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(os.path.dirname(path)))
if len(sys.argv) > 1 and sys.argv[1] == '--cpuinfo':
# We don't need to initialize knossos if we only need to fetch the CPU info.
import json
from knossos.third_party import cpuinfo
info = None
try:
info = cpuinfo.get_cpu_info()
except Exception:
from knossos.launcher import logging
logging.exception('Failed to retrieve CPU info.')
print(json.dumps(info))
elif len(sys.argv) > 1 and sys.argv[1] == '--run-cpuid':
from knossos.third_party import cpuinfo
print(cpuinfo._actual_get_cpu_info_from_cpuid())
elif len(sys.argv) > 1 and sys.argv[1] == '--deviceinfo':
import json
from knossos import clibs
clibs.init_sdl()
clibs.init_openal()
if clibs.can_detect_audio():
audio_devs = clibs.list_audio_devs()
else:
audio_devs = None
print(json.dumps({
'modes': clibs.get_modes(),
'audio_devs': audio_devs,
'joysticks': clibs.list_joysticks()
}))
elif len(sys.argv) > 1 and sys.argv[1] == '--fso-config-path':
from knossos import clibs
clibs.init_sdl()
print(clibs.get_config_path())
elif len(sys.argv) > 1 and sys.argv[1] == '--lib-paths':
import json
from knossos import clibs, center
if len(sys.argv) > 3:
if sys.argv[2] == 'auto':
center.settings['sdl2_path'] = None
else:
center.settings['sdl2_path'] = sys.argv[2]
if sys.argv[3] == 'auto':
center.settings['openal_path'] = None
else:
center.settings['openal_path'] = sys.argv[3]
try:
clibs.init_sdl()
except Exception:
clibs.sdl = None
try:
clibs.init_openal()
except Exception:
clibs.acl = None
if center.settings['sdl2_path'] and clibs.sdl:
if clibs.sdl._name != center.settings['sdl2_path']:
clibs.sdl = None
if center.settings['openal_path'] and clibs.alc:
if clibs.alc._name != center.settings['openal_path']:
clibs.alc = None
print(json.dumps({
'sdl2': clibs.sdl._name if clibs.sdl else None,
'openal': clibs.alc._name if clibs.alc else None
}))
else:
from knossos import launcher
launcher.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import pytest
import numpy as np
from astropy import units as u
from astropy.coordinates.distances import Distance
from astropy.coordinates.builtin_frames import ICRS, FK5, Galactic, AltAz, SkyOffsetFrame
from astropy.coordinates import SkyCoord, EarthLocation
from astropy.time import Time
from astropy.tests.helper import assert_quantity_allclose as assert_allclose
def test_altaz_attribute_transforms():
"""Test transforms between AltAz frames with different attributes."""
el1 = EarthLocation(0*u.deg, 0*u.deg, 0*u.m)
origin1 = AltAz(0 * u.deg, 0*u.deg, obstime=Time("2000-01-01T12:00:00"),
location=el1)
frame1 = SkyOffsetFrame(origin=origin1)
coo1 = SkyCoord(1 * u.deg, 1 * u.deg, frame=frame1)
el2 = EarthLocation(0*u.deg, 0*u.deg, 0*u.m)
origin2 = AltAz(0 * u.deg, 0*u.deg, obstime=Time("2000-01-01T11:00:00"),
location=el2)
frame2 = SkyOffsetFrame(origin=origin2)
coo2 = coo1.transform_to(frame2)
coo2_expected = [1.22522446, 0.70624298] * u.deg
assert_allclose([coo2.lon.wrap_at(180*u.deg), coo2.lat],
coo2_expected, atol=convert_precision)
el3 = EarthLocation(0*u.deg, 90*u.deg, 0*u.m)
origin3 = AltAz(0 * u.deg, 90*u.deg, obstime=Time("2000-01-01T12:00:00"),
location=el3)
frame3 = SkyOffsetFrame(origin=origin3)
coo3 = coo2.transform_to(frame3)
assert_allclose([coo3.lon.wrap_at(180*u.deg), coo3.lat],
[1*u.deg, 1*u.deg], atol=convert_precision)
@pytest.mark.parametrize("inradec,expectedlatlon, tolsep", [
((45, 45)*u.deg, (0, 0)*u.deg, .001*u.arcsec),
((45, 0)*u.deg, (0, -45)*u.deg, .001*u.arcsec),
((45, 90)*u.deg, (0, 45)*u.deg, .001*u.arcsec),
((46, 45)*u.deg, (1*np.cos(45*u.deg), 0)*u.deg, 16*u.arcsec),
])
def test_skyoffset(inradec, expectedlatlon, tolsep, originradec=(45, 45)*u.deg):
origin = ICRS(*originradec)
skyoffset_frame = SkyOffsetFrame(origin=origin)
skycoord = SkyCoord(*inradec, frame=ICRS)
skycoord_inaf = skycoord.transform_to(skyoffset_frame)
assert hasattr(skycoord_inaf, 'lon')
assert hasattr(skycoord_inaf, 'lat')
expected = SkyCoord(*expectedlatlon, frame=skyoffset_frame)
assert skycoord_inaf.separation(expected) < tolsep
# Check we can also transform back (regression test for gh-11254).
roundtrip = skycoord_inaf.transform_to(ICRS())
assert roundtrip.separation(skycoord) < 1*u.uas
def test_skyoffset_functional_ra():
# we do the 12)[1:-1] business because sometimes machine precision issues
# lead to results that are either ~0 or ~360, which mucks up the final
# comparison and leads to spurious failures. So this just avoids that by
# staying away from the edges
input_ra = np.linspace(0, 360, 12)[1:-1]
input_dec = np.linspace(-90, 90, 12)[1:-1]
icrs_coord = ICRS(ra=input_ra*u.deg,
dec=input_dec*u.deg,
distance=1.*u.kpc)
for ra in np.linspace(0, 360, 24):
# expected rotation
expected = ICRS(ra=np.linspace(0-ra, 360-ra, 12)[1:-1]*u.deg,
dec=np.linspace(-90, 90, 12)[1:-1]*u.deg,
distance=1.*u.kpc)
expected_xyz = expected.cartesian.xyz
# actual transformation to the frame
skyoffset_frame = SkyOffsetFrame(origin=ICRS(ra*u.deg, 0*u.deg))
actual = icrs_coord.transform_to(skyoffset_frame)
actual_xyz = actual.cartesian.xyz
# back to ICRS
roundtrip = actual.transform_to(ICRS())
roundtrip_xyz = roundtrip.cartesian.xyz
# Verify
assert_allclose(actual_xyz, expected_xyz, atol=1E-5*u.kpc)
assert_allclose(icrs_coord.ra, roundtrip.ra, atol=1E-5*u.deg)
assert_allclose(icrs_coord.dec, roundtrip.dec, atol=1E-5*u.deg)
assert_allclose(icrs_coord.distance, roundtrip.distance, atol=1E-5*u.kpc)
def test_skyoffset_functional_dec():
# we do the 12)[1:-1] business because sometimes machine precision issues
# lead to results that are either ~0 or ~360, which mucks up the final
# comparison and leads to spurious failures. So this just avoids that by
# staying away from the edges
input_ra = np.linspace(0, 360, 12)[1:-1]
input_dec = np.linspace(-90, 90, 12)[1:-1]
input_ra_rad = np.deg2rad(input_ra)
input_dec_rad = np.deg2rad(input_dec)
icrs_coord = ICRS(ra=input_ra*u.deg,
dec=input_dec*u.deg,
distance=1.*u.kpc)
# Dec rotations
# Done in xyz space because dec must be [-90,90]
for dec in np.linspace(-90, 90, 13):
# expected rotation
dec_rad = -np.deg2rad(dec)
expected_x = (-np.sin(input_dec_rad) * np.sin(dec_rad) +
np.cos(input_ra_rad) * np.cos(input_dec_rad) * np.cos(dec_rad))
expected_y = (np.sin(input_ra_rad) * np.cos(input_dec_rad))
expected_z = (np.sin(input_dec_rad) * np.cos(dec_rad) +
np.sin(dec_rad) * np.cos(input_ra_rad) * np.cos(input_dec_rad))
expected = SkyCoord(x=expected_x,
y=expected_y,
z=expected_z, unit='kpc', representation_type='cartesian')
expected_xyz = expected.cartesian.xyz
# actual transformation to the frame
skyoffset_frame = SkyOffsetFrame(origin=ICRS(0*u.deg, dec*u.deg))
actual = icrs_coord.transform_to(skyoffset_frame)
actual_xyz = actual.cartesian.xyz
# back to ICRS
roundtrip = actual.transform_to(ICRS())
# Verify
assert_allclose(actual_xyz, expected_xyz, atol=1E-5*u.kpc)
assert_allclose(icrs_coord.ra, roundtrip.ra, atol=1E-5*u.deg)
assert_allclose(icrs_coord.dec, roundtrip.dec, atol=1E-5*u.deg)
assert_allclose(icrs_coord.distance, roundtrip.distance, atol=1E-5*u.kpc)
def test_skyoffset_functional_ra_dec():
# we do the 12)[1:-1] business because sometimes machine precision issues
# lead to results that are either ~0 or ~360, which mucks up the final
# comparison and leads to spurious failures. So this just avoids that by
# staying away from the edges
input_ra = np.linspace(0, 360, 12)[1:-1]
input_dec = np.linspace(-90, 90, 12)[1:-1]
input_ra_rad = np.deg2rad(input_ra)
input_dec_rad = np.deg2rad(input_dec)
icrs_coord = ICRS(ra=input_ra*u.deg,
dec=input_dec*u.deg,
distance=1.*u.kpc)
for ra in np.linspace(0, 360, 10):
for dec in np.linspace(-90, 90, 5):
# expected rotation
dec_rad = -np.deg2rad(dec)
ra_rad = np.deg2rad(ra)
expected_x = (-np.sin(input_dec_rad) * np.sin(dec_rad) +
np.cos(input_ra_rad) * np.cos(input_dec_rad) * np.cos(dec_rad) * np.cos(ra_rad) +
np.sin(input_ra_rad) * np.cos(input_dec_rad) * np.cos(dec_rad) * np.sin(ra_rad))
expected_y = (np.sin(input_ra_rad) * np.cos(input_dec_rad) * np.cos(ra_rad) -
np.cos(input_ra_rad) * np.cos(input_dec_rad) * np.sin(ra_rad))
expected_z = (np.sin(input_dec_rad) * np.cos(dec_rad) +
np.sin(dec_rad) * np.cos(ra_rad) * np.cos(input_ra_rad) * np.cos(input_dec_rad) +
np.sin(dec_rad) * np.sin(ra_rad) * np.sin(input_ra_rad) * np.cos(input_dec_rad))
expected = SkyCoord(x=expected_x,
y=expected_y,
z=expected_z, unit='kpc', representation_type='cartesian')
expected_xyz = expected.cartesian.xyz
# actual transformation to the frame
skyoffset_frame = SkyOffsetFrame(origin=ICRS(ra*u.deg, dec*u.deg))
actual = icrs_coord.transform_to(skyoffset_frame)
actual_xyz = actual.cartesian.xyz
# back to ICRS
roundtrip = actual.transform_to(ICRS())
# Verify
assert_allclose(actual_xyz, expected_xyz, atol=1E-5*u.kpc)
assert_allclose(icrs_coord.ra, roundtrip.ra, atol=1E-4*u.deg)
assert_allclose(icrs_coord.dec, roundtrip.dec, atol=1E-5*u.deg)
assert_allclose(icrs_coord.distance, roundtrip.distance, atol=1E-5*u.kpc)
def test_skycoord_skyoffset_frame():
m31 = SkyCoord(10.6847083, 41.26875, frame='icrs', unit=u.deg)
m33 = SkyCoord(23.4621, 30.6599417, frame='icrs', unit=u.deg)
m31_astro = m31.skyoffset_frame()
m31_in_m31 = m31.transform_to(m31_astro)
m33_in_m31 = m33.transform_to(m31_astro)
assert_allclose([m31_in_m31.lon, m31_in_m31.lat], [0, 0]*u.deg, atol=1e-10*u.deg)
assert_allclose([m33_in_m31.lon, m33_in_m31.lat], [11.13135175, -9.79084759]*u.deg)
assert_allclose(m33.separation(m31),
np.hypot(m33_in_m31.lon, m33_in_m31.lat),
atol=.1*u.deg)
# used below in the next parametrized test
m31_sys = [ICRS, FK5, Galactic]
m31_coo = [(10.6847929, 41.2690650), (10.6847929, 41.2690650), (121.1744050, -21.5729360)]
m31_dist = Distance(770, u.kpc)
convert_precision = 1 * u.arcsec
roundtrip_precision = 1e-4 * u.degree
dist_precision = 1e-9 * u.kpc
m31_params = []
for i in range(len(m31_sys)):
for j in range(len(m31_sys)):
if i < j:
m31_params.append((m31_sys[i], m31_sys[j], m31_coo[i], m31_coo[j]))
@pytest.mark.parametrize(('fromsys', 'tosys', 'fromcoo', 'tocoo'), m31_params)
def test_m31_coord_transforms(fromsys, tosys, fromcoo, tocoo):
"""
This tests a variety of coordinate conversions for the Chandra point-source
catalog location of M31 from NED, via SkyOffsetFrames
"""
from_origin = fromsys(fromcoo[0]*u.deg, fromcoo[1]*u.deg,
distance=m31_dist)
from_pos = SkyOffsetFrame(1*u.deg, 1*u.deg, origin=from_origin)
to_origin = tosys(tocoo[0]*u.deg, tocoo[1]*u.deg, distance=m31_dist)
to_astroframe = SkyOffsetFrame(origin=to_origin)
target_pos = from_pos.transform_to(to_astroframe)
assert_allclose(to_origin.separation(target_pos),
np.hypot(from_pos.lon, from_pos.lat),
atol=convert_precision)
roundtrip_pos = target_pos.transform_to(from_pos)
assert_allclose([roundtrip_pos.lon.wrap_at(180*u.deg), roundtrip_pos.lat],
[1.0*u.deg, 1.0*u.deg], atol=convert_precision)
@pytest.mark.parametrize("rotation, expectedlatlon", [
(0*u.deg, [0, 1]*u.deg),
(180*u.deg, [0, -1]*u.deg),
(90*u.deg, [-1, 0]*u.deg),
(-90*u.deg, [1, 0]*u.deg)
])
def test_rotation(rotation, expectedlatlon):
origin = ICRS(45*u.deg, 45*u.deg)
target = ICRS(45*u.deg, 46*u.deg)
aframe = SkyOffsetFrame(origin=origin, rotation=rotation)
trans = target.transform_to(aframe)
assert_allclose([trans.lon.wrap_at(180*u.deg), trans.lat],
expectedlatlon, atol=1e-10*u.deg)
@pytest.mark.parametrize("rotation, expectedlatlon", [
(0*u.deg, [0, 1]*u.deg),
(180*u.deg, [0, -1]*u.deg),
(90*u.deg, [-1, 0]*u.deg),
(-90*u.deg, [1, 0]*u.deg)
])
def test_skycoord_skyoffset_frame_rotation(rotation, expectedlatlon):
"""Test if passing a rotation argument via SkyCoord works"""
origin = SkyCoord(45*u.deg, 45*u.deg)
target = SkyCoord(45*u.deg, 46*u.deg)
aframe = origin.skyoffset_frame(rotation=rotation)
trans = target.transform_to(aframe)
assert_allclose([trans.lon.wrap_at(180*u.deg), trans.lat],
expectedlatlon, atol=1e-10*u.deg)
def test_skyoffset_names():
origin1 = ICRS(45*u.deg, 45*u.deg)
aframe1 = SkyOffsetFrame(origin=origin1)
assert type(aframe1).__name__ == 'SkyOffsetICRS'
origin2 = Galactic(45*u.deg, 45*u.deg)
aframe2 = SkyOffsetFrame(origin=origin2)
assert type(aframe2).__name__ == 'SkyOffsetGalactic'
def test_skyoffset_origindata():
origin = ICRS()
with pytest.raises(ValueError):
SkyOffsetFrame(origin=origin)
def test_skyoffset_lonwrap():
origin = ICRS(45*u.deg, 45*u.deg)
sc = SkyCoord(190*u.deg, -45*u.deg, frame=SkyOffsetFrame(origin=origin))
assert sc.lon < 180 * u.deg
sc2 = SkyCoord(-10*u.deg, -45*u.deg, frame=SkyOffsetFrame(origin=origin))
assert sc2.lon < 180 * u.deg
sc3 = sc.realize_frame(sc.represent_as('cartesian'))
assert sc3.lon < 180 * u.deg
sc4 = sc2.realize_frame(sc2.represent_as('cartesian'))
assert sc4.lon < 180 * u.deg
def test_skyoffset_velocity():
c = ICRS(ra=170.9*u.deg, dec=-78.4*u.deg,
pm_ra_cosdec=74.4134*u.mas/u.yr,
pm_dec=-93.2342*u.mas/u.yr)
skyoffset_frame = SkyOffsetFrame(origin=c)
c_skyoffset = c.transform_to(skyoffset_frame)
assert_allclose(c_skyoffset.pm_lon_coslat, c.pm_ra_cosdec)
assert_allclose(c_skyoffset.pm_lat, c.pm_dec)
@pytest.mark.parametrize("rotation, expectedpmlonlat", [
(0*u.deg, [1, 2]*u.mas/u.yr),
(45*u.deg, [-2**-0.5, 3*2**-0.5]*u.mas/u.yr),
(90*u.deg, [-2, 1]*u.mas/u.yr),
(180*u.deg, [-1, -2]*u.mas/u.yr),
(-90*u.deg, [2, -1]*u.mas/u.yr)
])
def test_skyoffset_velocity_rotation(rotation, expectedpmlonlat):
sc = SkyCoord(ra=170.9*u.deg, dec=-78.4*u.deg,
pm_ra_cosdec=1*u.mas/u.yr,
pm_dec=2*u.mas/u.yr)
c_skyoffset0 = sc.transform_to(sc.skyoffset_frame(rotation=rotation))
assert_allclose(c_skyoffset0.pm_lon_coslat, expectedpmlonlat[0])
assert_allclose(c_skyoffset0.pm_lat, expectedpmlonlat[1])
def test_skyoffset_two_frames_interfering():
"""Regression test for gh-11277, where it turned out that the
origin argument validation from one SkyOffsetFrame could interfere
with that of another.
Note that this example brought out a different bug than that at the
top of gh-11277, viz., that an attempt was made to set origin on a SkyCoord
when it should just be stay as part of the SkyOffsetFrame.
"""
# Example adapted from @bmerry's minimal example at
# https://github.com/astropy/astropy/issues/11277#issuecomment-825492335
altaz_frame = AltAz(obstime=Time('2020-04-22T13:00:00Z'),
location=EarthLocation(18, -30))
target = SkyCoord(alt=70*u.deg, az=150*u.deg, frame=altaz_frame)
dirs_altaz_offset = SkyCoord(lon=[-0.02, 0.01, 0.0, 0.0, 0.0] * u.rad,
lat=[0.0, 0.2, 0.0, -0.3, 0.1] * u.rad,
frame=target.skyoffset_frame())
dirs_altaz = dirs_altaz_offset.transform_to(altaz_frame)
dirs_icrs = dirs_altaz.transform_to(ICRS())
target_icrs = target.transform_to(ICRS())
# The line below was almost guaranteed to fail.
dirs_icrs.transform_to(target_icrs.skyoffset_frame())
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2014-2026 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.io
import io.ktor.utils.io.InternalAPI
@InternalAPI
public actual fun configurePlatform() {
// This is unused for non-JVM platforms
}
|
kotlin
|
github
|
https://github.com/ktorio/ktor
|
ktor-client/ktor-client-core/nonJvm/src/io/ktor/client/io/PlatformStaticConfig.nonJvm.kt
|
import * as React from "react";
import * as TestRenderer from "react-test-renderer";
import type { Path } from "react-router";
import {
MemoryRouter,
Routes,
Route,
useResolvedPath,
useLocation,
} from "react-router";
import { prettyDOM, render } from "@testing-library/react";
function ShowResolvedPath({ path }: { path: string | Path }) {
return <pre>{JSON.stringify(useResolvedPath(path))}</pre>;
}
describe("useResolvedPath", () => {
it("path string resolves correctly", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/"]}>
<Routes>
<Route
path="/"
element={<ShowResolvedPath path="/home?user=mj#welcome" />}
/>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/home","search":"?user=mj","hash":"#welcome"}
</pre>
`);
});
it("partial path object resolves correctly", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/"]}>
<Routes>
<Route
path="/"
element={
<ShowResolvedPath
path={{
pathname: "/home",
search: new URLSearchParams({ user: "mj" }).toString(),
hash: "#welcome",
}}
/>
}
/>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/home","search":"?user=mj","hash":"#welcome"}
</pre>
`);
});
describe("given a hash with a ? character", () => {
it("hash is not parsed as a search string", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/"]}>
<Routes>
<Route
path="/"
element={<ShowResolvedPath path="/home#welcome?user=mj" />}
/>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/home","search":"","hash":"#welcome?user=mj"}
</pre>
`);
});
});
describe("in a splat route", () => {
it("resolves . to the route path", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/users/mj"]}>
<Routes>
<Route path="/users">
<Route path="*" element={<ShowResolvedPath path="." />} />
</Route>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/users/mj","search":"","hash":""}
</pre>
`);
});
it("resolves .. to the parent route path", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/users/mj"]}>
<Routes>
<Route path="/users">
<Route path="*" element={<ShowResolvedPath path=".." />} />
</Route>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/users","search":"","hash":""}
</pre>
`);
});
it("resolves . to the route path (descendant route)", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/users/mj"]}>
<Routes>
<Route
path="/users/*"
element={
<Routes>
<Route path="mj" element={<ShowResolvedPath path="." />} />
</Routes>
}
/>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/users/mj","search":"","hash":""}
</pre>
`);
});
it("resolves .. to the parent route path (descendant route)", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/users/mj"]}>
<Routes>
<Route
path="/users/*"
element={
<Routes>
<Route path="mj" element={<ShowResolvedPath path=".." />} />
</Routes>
}
/>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/users","search":"","hash":""}
</pre>
`);
});
});
describe("in a param route", () => {
it("resolves . to the route path", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/users/mj"]}>
<Routes>
<Route path="/users">
<Route path=":name" element={<ShowResolvedPath path="." />} />
</Route>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/users/mj","search":"","hash":""}
</pre>
`);
});
it("resolves .. to the parent route", () => {
let renderer: TestRenderer.ReactTestRenderer;
TestRenderer.act(() => {
renderer = TestRenderer.create(
<MemoryRouter initialEntries={["/users/mj"]}>
<Routes>
<Route path="/users">
<Route path=":name" element={<ShowResolvedPath path=".." />} />
</Route>
</Routes>
</MemoryRouter>,
);
});
expect(renderer.toJSON()).toMatchInlineSnapshot(`
<pre>
{"pathname":"/users","search":"","hash":""}
</pre>
`);
});
});
function LogResolvedPathInfo({ desc }) {
return (
<>
{`--- Routes: ${desc} ---`}
{`useLocation(): ${useLocation().pathname}`}
{`useResolvedPath('.'): ${useResolvedPath(".").pathname}`}
{`useResolvedPath('..'): ${useResolvedPath("..").pathname}`}
{`useResolvedPath('..', { relative: 'path' }): ${
useResolvedPath("..", { relative: "path" }).pathname
}`}
{`useResolvedPath('baz/qux'): ${useResolvedPath("baz/qux").pathname}`}
{`useResolvedPath('./baz/qux'): ${
useResolvedPath("./baz/qux").pathname
}\n`}
</>
);
}
// See: https://github.com/remix-run/react-router/issues/11052#issuecomment-1836589329
it("resolves splat route relative paths the same as other routes", async () => {
function App({ enableFlag }: { enableFlag: boolean }) {
let routeConfigs = [
{
routes: (
<Route
path="/foo/bar"
element={<LogResolvedPathInfo desc='<Route path="/foo/bar" />' />}
/>
),
},
{
routes: (
<Route
path="/foo/:param"
element={
<LogResolvedPathInfo desc='<Route path="/foo/:param" />' />
}
/>
),
},
{
routes: (
<Route path="/foo">
<Route
path=":param"
element={
<LogResolvedPathInfo desc='<Route path="/foo"><Route path=":param" />' />
}
/>
</Route>
),
},
{
routes: (
<Route
path="/foo/*"
element={<LogResolvedPathInfo desc='<Route path="/foo/*" />' />}
/>
),
},
{
routes: (
<Route path="foo">
<Route
path="*"
element={
<LogResolvedPathInfo desc='<Route path="/foo"><Route path="*" />' />
}
/>
</Route>
),
},
];
return (
<>
{routeConfigs.map((config, idx) => (
<MemoryRouter initialEntries={["/foo/bar"]} key={idx}>
<Routes>{config.routes}</Routes>
</MemoryRouter>
))}
</>
);
}
let { container } = render(<App enableFlag={true} />);
let html = getHtml(container);
html = html ? html.replace(/</g, "<").replace(/>/g, ">") : html;
expect(html).toMatchInlineSnapshot(`
"<div>
--- Routes: <Route path="/foo/bar" /> ---
useLocation(): /foo/bar
useResolvedPath('.'): /foo/bar
useResolvedPath('..'): /
useResolvedPath('..', { relative: 'path' }): /foo
useResolvedPath('baz/qux'): /foo/bar/baz/qux
useResolvedPath('./baz/qux'): /foo/bar/baz/qux
--- Routes: <Route path="/foo/:param" /> ---
useLocation(): /foo/bar
useResolvedPath('.'): /foo/bar
useResolvedPath('..'): /
useResolvedPath('..', { relative: 'path' }): /foo
useResolvedPath('baz/qux'): /foo/bar/baz/qux
useResolvedPath('./baz/qux'): /foo/bar/baz/qux
--- Routes: <Route path="/foo"><Route path=":param" /> ---
useLocation(): /foo/bar
useResolvedPath('.'): /foo/bar
useResolvedPath('..'): /foo
useResolvedPath('..', { relative: 'path' }): /foo
useResolvedPath('baz/qux'): /foo/bar/baz/qux
useResolvedPath('./baz/qux'): /foo/bar/baz/qux
--- Routes: <Route path="/foo/*" /> ---
useLocation(): /foo/bar
useResolvedPath('.'): /foo/bar
useResolvedPath('..'): /
useResolvedPath('..', { relative: 'path' }): /foo
useResolvedPath('baz/qux'): /foo/bar/baz/qux
useResolvedPath('./baz/qux'): /foo/bar/baz/qux
--- Routes: <Route path="/foo"><Route path="*" /> ---
useLocation(): /foo/bar
useResolvedPath('.'): /foo/bar
useResolvedPath('..'): /foo
useResolvedPath('..', { relative: 'path' }): /foo
useResolvedPath('baz/qux'): /foo/bar/baz/qux
useResolvedPath('./baz/qux'): /foo/bar/baz/qux
</div>"
`);
});
// gh-issue #11629
it("'.' resolves to the current path including any splat paths nested in pathless routes", () => {
let { container } = render(
<MemoryRouter initialEntries={["/foo/bar"]}>
<Routes>
<Route path="foo">
<Route>
<Route
path="*"
element={
<LogResolvedPathInfo desc='<Route path="/foo"><Route><Route path="*" /></Route></Route>' />
}
/>
</Route>
</Route>
</Routes>
</MemoryRouter>,
);
let html = getHtml(container);
html = html ? html.replace(/</g, "<").replace(/>/g, ">") : html;
expect(html).toMatchInlineSnapshot(`
"<div>
--- Routes: <Route path="/foo"><Route><Route path="*" /></Route></Route> ---
useLocation(): /foo/bar
useResolvedPath('.'): /foo/bar
useResolvedPath('..'): /foo
useResolvedPath('..', { relative: 'path' }): /foo
useResolvedPath('baz/qux'): /foo/bar/baz/qux
useResolvedPath('./baz/qux'): /foo/bar/baz/qux
</div>"
`);
});
});
function getHtml(container: HTMLElement) {
return prettyDOM(container, undefined, {
highlight: false,
});
}
|
typescript
|
github
|
https://github.com/remix-run/react-router
|
packages/react-router/__tests__/useResolvedPath-test.tsx
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from magnum.common import exception
from magnum.common import k8s_manifest
from magnum.common.pythonk8sclient.swagger_client import rest
from magnum.conductor import k8s_api as k8s
from magnum.conductor import utils as conductor_utils
from magnum import objects
LOG = logging.getLogger(__name__)
class Handler(object):
"""Magnum Kubernetes RPC handler.
These are the backend operations. They are executed by the backend service.
API calls via AMQP (within the ReST API) trigger the handlers to be called.
"""
def __init__(self):
super(Handler, self).__init__()
def service_create(self, context, service):
LOG.debug("service_create")
self.k8s_api = k8s.create_k8s_api(context, service)
manifest = k8s_manifest.parse(service.manifest)
try:
self.k8s_api.create_namespaced_service(body=manifest,
namespace='default')
except rest.ApiException as err:
raise exception.KubernetesAPIFailed(err=err)
# call the service object to persist in db
service.create(context)
return service
def service_update(self, context, service):
LOG.debug("service_update %s", service.uuid)
self.k8s_api = k8s.create_k8s_api(context, service)
manifest = k8s_manifest.parse(service.manifest)
try:
self.k8s_api.replace_namespaced_service(name=str(service.name),
body=manifest,
namespace='default')
except rest.ApiException as err:
raise exception.KubernetesAPIFailed(err=err)
# call the service object to persist in db
service.refresh(context)
service.save()
return service
def service_delete(self, context, uuid):
LOG.debug("service_delete %s", uuid)
service = objects.Service.get_by_uuid(context, uuid)
self.k8s_api = k8s.create_k8s_api(context, service)
if conductor_utils.object_has_stack(context, service):
try:
self.k8s_api.delete_namespaced_service(name=str(service.name),
namespace='default')
except rest.ApiException as err:
if err.status == 404:
pass
else:
raise exception.KubernetesAPIFailed(err=err)
# call the service object to persist in db
service.destroy(context)
# Pod Operations
def pod_create(self, context, pod):
LOG.debug("pod_create")
self.k8s_api = k8s.create_k8s_api(context, pod)
manifest = k8s_manifest.parse(pod.manifest)
try:
resp = self.k8s_api.create_namespaced_pod(body=manifest,
namespace='default')
except rest.ApiException as err:
pod.status = 'failed'
if err.status != 409:
pod.create(context)
raise exception.KubernetesAPIFailed(err=err)
pod.status = resp.status.phase
pod.host = resp.spec.node_name
# call the pod object to persist in db
# TODO(yuanying): parse pod file and,
# - extract pod name and set it
# - extract pod labels and set it
# When do we get pod labels and name?
pod.create(context)
return pod
def pod_update(self, context, pod):
LOG.debug("pod_update %s", pod.uuid)
self.k8s_api = k8s.create_k8s_api(context, pod)
manifest = k8s_manifest.parse(pod.manifest)
try:
self.k8s_api.replace_namespaced_pod(name=str(pod.name),
body=manifest,
namespace='default')
except rest.ApiException as err:
raise exception.KubernetesAPIFailed(err=err)
# call the pod object to persist in db
pod.refresh(context)
pod.save()
return pod
def pod_delete(self, context, uuid):
LOG.debug("pod_delete %s", uuid)
pod = objects.Pod.get_by_uuid(context, uuid)
self.k8s_api = k8s.create_k8s_api(context, pod)
if conductor_utils.object_has_stack(context, pod):
try:
self.k8s_api.delete_namespaced_pod(name=str(pod.name), body={},
namespace='default')
except rest.ApiException as err:
if err.status == 404:
pass
else:
raise exception.KubernetesAPIFailed(err=err)
# call the pod object to persist in db
pod.destroy(context)
# Replication Controller Operations
def rc_create(self, context, rc):
LOG.debug("rc_create")
self.k8s_api = k8s.create_k8s_api(context, rc)
manifest = k8s_manifest.parse(rc.manifest)
try:
self.k8s_api.create_namespaced_replication_controller(
body=manifest, namespace='default')
except rest.ApiException as err:
raise exception.KubernetesAPIFailed(err=err)
# call the rc object to persist in db
rc.create(context)
return rc
def rc_update(self, context, rc):
LOG.debug("rc_update %s", rc.uuid)
self.k8s_api = k8s.create_k8s_api(context, rc)
manifest = k8s_manifest.parse(rc.manifest)
try:
self.k8s_api.replace_namespaced_replication_controller(
name=str(rc.name), body=manifest, namespace='default')
except rest.ApiException as err:
raise exception.KubernetesAPIFailed(err=err)
# call the rc object to persist in db
rc.refresh(context)
rc.save()
return rc
def rc_delete(self, context, uuid):
LOG.debug("rc_delete %s", uuid)
rc = objects.ReplicationController.get_by_uuid(context, uuid)
self.k8s_api = k8s.create_k8s_api(context, rc)
if conductor_utils.object_has_stack(context, rc):
try:
self.k8s_api.delete_namespaced_replication_controller(
name=str(rc.name), body={}, namespace='default')
except rest.ApiException as err:
if err.status == 404:
pass
else:
raise exception.KubernetesAPIFailed(err=err)
# call the rc object to persist in db
rc.destroy(context)
|
unknown
|
codeparrot/codeparrot-clean
| ||
// SPDX-License-Identifier: GPL-2.0
#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
#include <linux/mm.h>
#include <linux/sched.h>
#include <linux/sched/mm.h>
#include <linux/mmu_notifier.h>
#include <linux/rmap.h>
#include <linux/swap.h>
#include <linux/mm_inline.h>
#include <linux/kthread.h>
#include <linux/khugepaged.h>
#include <linux/freezer.h>
#include <linux/mman.h>
#include <linux/hashtable.h>
#include <linux/userfaultfd_k.h>
#include <linux/page_idle.h>
#include <linux/page_table_check.h>
#include <linux/rcupdate_wait.h>
#include <linux/leafops.h>
#include <linux/shmem_fs.h>
#include <linux/dax.h>
#include <linux/ksm.h>
#include <linux/pgalloc.h>
#include <linux/backing-dev.h>
#include <asm/tlb.h>
#include "internal.h"
#include "mm_slot.h"
enum scan_result {
SCAN_FAIL,
SCAN_SUCCEED,
SCAN_NO_PTE_TABLE,
SCAN_PMD_MAPPED,
SCAN_EXCEED_NONE_PTE,
SCAN_EXCEED_SWAP_PTE,
SCAN_EXCEED_SHARED_PTE,
SCAN_PTE_NON_PRESENT,
SCAN_PTE_UFFD_WP,
SCAN_PTE_MAPPED_HUGEPAGE,
SCAN_LACK_REFERENCED_PAGE,
SCAN_PAGE_NULL,
SCAN_SCAN_ABORT,
SCAN_PAGE_COUNT,
SCAN_PAGE_LRU,
SCAN_PAGE_LOCK,
SCAN_PAGE_ANON,
SCAN_PAGE_COMPOUND,
SCAN_ANY_PROCESS,
SCAN_VMA_NULL,
SCAN_VMA_CHECK,
SCAN_ADDRESS_RANGE,
SCAN_DEL_PAGE_LRU,
SCAN_ALLOC_HUGE_PAGE_FAIL,
SCAN_CGROUP_CHARGE_FAIL,
SCAN_TRUNCATED,
SCAN_PAGE_HAS_PRIVATE,
SCAN_STORE_FAILED,
SCAN_COPY_MC,
SCAN_PAGE_FILLED,
SCAN_PAGE_DIRTY_OR_WRITEBACK,
};
#define CREATE_TRACE_POINTS
#include <trace/events/huge_memory.h>
static struct task_struct *khugepaged_thread __read_mostly;
static DEFINE_MUTEX(khugepaged_mutex);
/* default scan 8*HPAGE_PMD_NR ptes (or vmas) every 10 second */
static unsigned int khugepaged_pages_to_scan __read_mostly;
static unsigned int khugepaged_pages_collapsed;
static unsigned int khugepaged_full_scans;
static unsigned int khugepaged_scan_sleep_millisecs __read_mostly = 10000;
/* during fragmentation poll the hugepage allocator once every minute */
static unsigned int khugepaged_alloc_sleep_millisecs __read_mostly = 60000;
static unsigned long khugepaged_sleep_expire;
static DEFINE_SPINLOCK(khugepaged_mm_lock);
static DECLARE_WAIT_QUEUE_HEAD(khugepaged_wait);
/*
* default collapse hugepages if there is at least one pte mapped like
* it would have happened if the vma was large enough during page
* fault.
*
* Note that these are only respected if collapse was initiated by khugepaged.
*/
unsigned int khugepaged_max_ptes_none __read_mostly;
static unsigned int khugepaged_max_ptes_swap __read_mostly;
static unsigned int khugepaged_max_ptes_shared __read_mostly;
#define MM_SLOTS_HASH_BITS 10
static DEFINE_READ_MOSTLY_HASHTABLE(mm_slots_hash, MM_SLOTS_HASH_BITS);
static struct kmem_cache *mm_slot_cache __ro_after_init;
struct collapse_control {
bool is_khugepaged;
/* Num pages scanned per node */
u32 node_load[MAX_NUMNODES];
/* nodemask for allocation fallback */
nodemask_t alloc_nmask;
};
/**
* struct khugepaged_scan - cursor for scanning
* @mm_head: the head of the mm list to scan
* @mm_slot: the current mm_slot we are scanning
* @address: the next address inside that to be scanned
*
* There is only the one khugepaged_scan instance of this cursor structure.
*/
struct khugepaged_scan {
struct list_head mm_head;
struct mm_slot *mm_slot;
unsigned long address;
};
static struct khugepaged_scan khugepaged_scan = {
.mm_head = LIST_HEAD_INIT(khugepaged_scan.mm_head),
};
#ifdef CONFIG_SYSFS
static ssize_t scan_sleep_millisecs_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
return sysfs_emit(buf, "%u\n", khugepaged_scan_sleep_millisecs);
}
static ssize_t __sleep_millisecs_store(const char *buf, size_t count,
unsigned int *millisecs)
{
unsigned int msecs;
int err;
err = kstrtouint(buf, 10, &msecs);
if (err)
return -EINVAL;
*millisecs = msecs;
khugepaged_sleep_expire = 0;
wake_up_interruptible(&khugepaged_wait);
return count;
}
static ssize_t scan_sleep_millisecs_store(struct kobject *kobj,
struct kobj_attribute *attr,
const char *buf, size_t count)
{
return __sleep_millisecs_store(buf, count, &khugepaged_scan_sleep_millisecs);
}
static struct kobj_attribute scan_sleep_millisecs_attr =
__ATTR_RW(scan_sleep_millisecs);
static ssize_t alloc_sleep_millisecs_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
return sysfs_emit(buf, "%u\n", khugepaged_alloc_sleep_millisecs);
}
static ssize_t alloc_sleep_millisecs_store(struct kobject *kobj,
struct kobj_attribute *attr,
const char *buf, size_t count)
{
return __sleep_millisecs_store(buf, count, &khugepaged_alloc_sleep_millisecs);
}
static struct kobj_attribute alloc_sleep_millisecs_attr =
__ATTR_RW(alloc_sleep_millisecs);
static ssize_t pages_to_scan_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
return sysfs_emit(buf, "%u\n", khugepaged_pages_to_scan);
}
static ssize_t pages_to_scan_store(struct kobject *kobj,
struct kobj_attribute *attr,
const char *buf, size_t count)
{
unsigned int pages;
int err;
err = kstrtouint(buf, 10, &pages);
if (err || !pages)
return -EINVAL;
khugepaged_pages_to_scan = pages;
return count;
}
static struct kobj_attribute pages_to_scan_attr =
__ATTR_RW(pages_to_scan);
static ssize_t pages_collapsed_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
return sysfs_emit(buf, "%u\n", khugepaged_pages_collapsed);
}
static struct kobj_attribute pages_collapsed_attr =
__ATTR_RO(pages_collapsed);
static ssize_t full_scans_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
return sysfs_emit(buf, "%u\n", khugepaged_full_scans);
}
static struct kobj_attribute full_scans_attr =
__ATTR_RO(full_scans);
static ssize_t defrag_show(struct kobject *kobj,
struct kobj_attribute *attr, char *buf)
{
return single_hugepage_flag_show(kobj, attr, buf,
TRANSPARENT_HUGEPAGE_DEFRAG_KHUGEPAGED_FLAG);
}
static ssize_t defrag_store(struct kobject *kobj,
struct kobj_attribute *attr,
const char *buf, size_t count)
{
return single_hugepage_flag_store(kobj, attr, buf, count,
TRANSPARENT_HUGEPAGE_DEFRAG_KHUGEPAGED_FLAG);
}
static struct kobj_attribute khugepaged_defrag_attr =
__ATTR_RW(defrag);
/*
* max_ptes_none controls if khugepaged should collapse hugepages over
* any unmapped ptes in turn potentially increasing the memory
* footprint of the vmas. When max_ptes_none is 0 khugepaged will not
* reduce the available free memory in the system as it
* runs. Increasing max_ptes_none will instead potentially reduce the
* free memory in the system during the khugepaged scan.
*/
static ssize_t max_ptes_none_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
return sysfs_emit(buf, "%u\n", khugepaged_max_ptes_none);
}
static ssize_t max_ptes_none_store(struct kobject *kobj,
struct kobj_attribute *attr,
const char *buf, size_t count)
{
int err;
unsigned long max_ptes_none;
err = kstrtoul(buf, 10, &max_ptes_none);
if (err || max_ptes_none > HPAGE_PMD_NR - 1)
return -EINVAL;
khugepaged_max_ptes_none = max_ptes_none;
return count;
}
static struct kobj_attribute khugepaged_max_ptes_none_attr =
__ATTR_RW(max_ptes_none);
static ssize_t max_ptes_swap_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
return sysfs_emit(buf, "%u\n", khugepaged_max_ptes_swap);
}
static ssize_t max_ptes_swap_store(struct kobject *kobj,
struct kobj_attribute *attr,
const char *buf, size_t count)
{
int err;
unsigned long max_ptes_swap;
err = kstrtoul(buf, 10, &max_ptes_swap);
if (err || max_ptes_swap > HPAGE_PMD_NR - 1)
return -EINVAL;
khugepaged_max_ptes_swap = max_ptes_swap;
return count;
}
static struct kobj_attribute khugepaged_max_ptes_swap_attr =
__ATTR_RW(max_ptes_swap);
static ssize_t max_ptes_shared_show(struct kobject *kobj,
struct kobj_attribute *attr,
char *buf)
{
return sysfs_emit(buf, "%u\n", khugepaged_max_ptes_shared);
}
static ssize_t max_ptes_shared_store(struct kobject *kobj,
struct kobj_attribute *attr,
const char *buf, size_t count)
{
int err;
unsigned long max_ptes_shared;
err = kstrtoul(buf, 10, &max_ptes_shared);
if (err || max_ptes_shared > HPAGE_PMD_NR - 1)
return -EINVAL;
khugepaged_max_ptes_shared = max_ptes_shared;
return count;
}
static struct kobj_attribute khugepaged_max_ptes_shared_attr =
__ATTR_RW(max_ptes_shared);
static struct attribute *khugepaged_attr[] = {
&khugepaged_defrag_attr.attr,
&khugepaged_max_ptes_none_attr.attr,
&khugepaged_max_ptes_swap_attr.attr,
&khugepaged_max_ptes_shared_attr.attr,
&pages_to_scan_attr.attr,
&pages_collapsed_attr.attr,
&full_scans_attr.attr,
&scan_sleep_millisecs_attr.attr,
&alloc_sleep_millisecs_attr.attr,
NULL,
};
struct attribute_group khugepaged_attr_group = {
.attrs = khugepaged_attr,
.name = "khugepaged",
};
#endif /* CONFIG_SYSFS */
static bool pte_none_or_zero(pte_t pte)
{
if (pte_none(pte))
return true;
return pte_present(pte) && is_zero_pfn(pte_pfn(pte));
}
int hugepage_madvise(struct vm_area_struct *vma,
vm_flags_t *vm_flags, int advice)
{
switch (advice) {
case MADV_HUGEPAGE:
*vm_flags &= ~VM_NOHUGEPAGE;
*vm_flags |= VM_HUGEPAGE;
/*
* If the vma become good for khugepaged to scan,
* register it here without waiting a page fault that
* may not happen any time soon.
*/
khugepaged_enter_vma(vma, *vm_flags);
break;
case MADV_NOHUGEPAGE:
*vm_flags &= ~VM_HUGEPAGE;
*vm_flags |= VM_NOHUGEPAGE;
/*
* Setting VM_NOHUGEPAGE will prevent khugepaged from scanning
* this vma even if we leave the mm registered in khugepaged if
* it got registered before VM_NOHUGEPAGE was set.
*/
break;
}
return 0;
}
int __init khugepaged_init(void)
{
mm_slot_cache = KMEM_CACHE(mm_slot, 0);
if (!mm_slot_cache)
return -ENOMEM;
khugepaged_pages_to_scan = HPAGE_PMD_NR * 8;
khugepaged_max_ptes_none = HPAGE_PMD_NR - 1;
khugepaged_max_ptes_swap = HPAGE_PMD_NR / 8;
khugepaged_max_ptes_shared = HPAGE_PMD_NR / 2;
return 0;
}
void __init khugepaged_destroy(void)
{
kmem_cache_destroy(mm_slot_cache);
}
static inline int hpage_collapse_test_exit(struct mm_struct *mm)
{
return atomic_read(&mm->mm_users) == 0;
}
static inline int hpage_collapse_test_exit_or_disable(struct mm_struct *mm)
{
return hpage_collapse_test_exit(mm) ||
mm_flags_test(MMF_DISABLE_THP_COMPLETELY, mm);
}
static bool hugepage_pmd_enabled(void)
{
/*
* We cover the anon, shmem and the file-backed case here; file-backed
* hugepages, when configured in, are determined by the global control.
* Anon pmd-sized hugepages are determined by the pmd-size control.
* Shmem pmd-sized hugepages are also determined by its pmd-size control,
* except when the global shmem_huge is set to SHMEM_HUGE_DENY.
*/
if (IS_ENABLED(CONFIG_READ_ONLY_THP_FOR_FS) &&
hugepage_global_enabled())
return true;
if (test_bit(PMD_ORDER, &huge_anon_orders_always))
return true;
if (test_bit(PMD_ORDER, &huge_anon_orders_madvise))
return true;
if (test_bit(PMD_ORDER, &huge_anon_orders_inherit) &&
hugepage_global_enabled())
return true;
if (IS_ENABLED(CONFIG_SHMEM) && shmem_hpage_pmd_enabled())
return true;
return false;
}
void __khugepaged_enter(struct mm_struct *mm)
{
struct mm_slot *slot;
int wakeup;
/* __khugepaged_exit() must not run from under us */
VM_BUG_ON_MM(hpage_collapse_test_exit(mm), mm);
if (unlikely(mm_flags_test_and_set(MMF_VM_HUGEPAGE, mm)))
return;
slot = mm_slot_alloc(mm_slot_cache);
if (!slot)
return;
spin_lock(&khugepaged_mm_lock);
mm_slot_insert(mm_slots_hash, mm, slot);
/*
* Insert just behind the scanning cursor, to let the area settle
* down a little.
*/
wakeup = list_empty(&khugepaged_scan.mm_head);
list_add_tail(&slot->mm_node, &khugepaged_scan.mm_head);
spin_unlock(&khugepaged_mm_lock);
mmgrab(mm);
if (wakeup)
wake_up_interruptible(&khugepaged_wait);
}
void khugepaged_enter_vma(struct vm_area_struct *vma,
vm_flags_t vm_flags)
{
if (!mm_flags_test(MMF_VM_HUGEPAGE, vma->vm_mm) &&
hugepage_pmd_enabled()) {
if (thp_vma_allowable_order(vma, vm_flags, TVA_KHUGEPAGED, PMD_ORDER))
__khugepaged_enter(vma->vm_mm);
}
}
void __khugepaged_exit(struct mm_struct *mm)
{
struct mm_slot *slot;
int free = 0;
spin_lock(&khugepaged_mm_lock);
slot = mm_slot_lookup(mm_slots_hash, mm);
if (slot && khugepaged_scan.mm_slot != slot) {
hash_del(&slot->hash);
list_del(&slot->mm_node);
free = 1;
}
spin_unlock(&khugepaged_mm_lock);
if (free) {
mm_flags_clear(MMF_VM_HUGEPAGE, mm);
mm_slot_free(mm_slot_cache, slot);
mmdrop(mm);
} else if (slot) {
/*
* This is required to serialize against
* hpage_collapse_test_exit() (which is guaranteed to run
* under mmap sem read mode). Stop here (after we return all
* pagetables will be destroyed) until khugepaged has finished
* working on the pagetables under the mmap_lock.
*/
mmap_write_lock(mm);
mmap_write_unlock(mm);
}
}
static void release_pte_folio(struct folio *folio)
{
node_stat_mod_folio(folio,
NR_ISOLATED_ANON + folio_is_file_lru(folio),
-folio_nr_pages(folio));
folio_unlock(folio);
folio_putback_lru(folio);
}
static void release_pte_pages(pte_t *pte, pte_t *_pte,
struct list_head *compound_pagelist)
{
struct folio *folio, *tmp;
while (--_pte >= pte) {
pte_t pteval = ptep_get(_pte);
unsigned long pfn;
if (pte_none(pteval))
continue;
VM_WARN_ON_ONCE(!pte_present(pteval));
pfn = pte_pfn(pteval);
if (is_zero_pfn(pfn))
continue;
folio = pfn_folio(pfn);
if (folio_test_large(folio))
continue;
release_pte_folio(folio);
}
list_for_each_entry_safe(folio, tmp, compound_pagelist, lru) {
list_del(&folio->lru);
release_pte_folio(folio);
}
}
static enum scan_result __collapse_huge_page_isolate(struct vm_area_struct *vma,
unsigned long start_addr, pte_t *pte, struct collapse_control *cc,
struct list_head *compound_pagelist)
{
struct page *page = NULL;
struct folio *folio = NULL;
unsigned long addr = start_addr;
pte_t *_pte;
int none_or_zero = 0, shared = 0, referenced = 0;
enum scan_result result = SCAN_FAIL;
for (_pte = pte; _pte < pte + HPAGE_PMD_NR;
_pte++, addr += PAGE_SIZE) {
pte_t pteval = ptep_get(_pte);
if (pte_none_or_zero(pteval)) {
++none_or_zero;
if (!userfaultfd_armed(vma) &&
(!cc->is_khugepaged ||
none_or_zero <= khugepaged_max_ptes_none)) {
continue;
} else {
result = SCAN_EXCEED_NONE_PTE;
count_vm_event(THP_SCAN_EXCEED_NONE_PTE);
goto out;
}
}
if (!pte_present(pteval)) {
result = SCAN_PTE_NON_PRESENT;
goto out;
}
if (pte_uffd_wp(pteval)) {
result = SCAN_PTE_UFFD_WP;
goto out;
}
page = vm_normal_page(vma, addr, pteval);
if (unlikely(!page) || unlikely(is_zone_device_page(page))) {
result = SCAN_PAGE_NULL;
goto out;
}
folio = page_folio(page);
VM_BUG_ON_FOLIO(!folio_test_anon(folio), folio);
/* See hpage_collapse_scan_pmd(). */
if (folio_maybe_mapped_shared(folio)) {
++shared;
if (cc->is_khugepaged &&
shared > khugepaged_max_ptes_shared) {
result = SCAN_EXCEED_SHARED_PTE;
count_vm_event(THP_SCAN_EXCEED_SHARED_PTE);
goto out;
}
}
if (folio_test_large(folio)) {
struct folio *f;
/*
* Check if we have dealt with the compound page
* already
*/
list_for_each_entry(f, compound_pagelist, lru) {
if (folio == f)
goto next;
}
}
/*
* We can do it before folio_isolate_lru because the
* folio can't be freed from under us. NOTE: PG_lock
* is needed to serialize against split_huge_page
* when invoked from the VM.
*/
if (!folio_trylock(folio)) {
result = SCAN_PAGE_LOCK;
goto out;
}
/*
* Check if the page has any GUP (or other external) pins.
*
* The page table that maps the page has been already unlinked
* from the page table tree and this process cannot get
* an additional pin on the page.
*
* New pins can come later if the page is shared across fork,
* but not from this process. The other process cannot write to
* the page, only trigger CoW.
*/
if (folio_expected_ref_count(folio) != folio_ref_count(folio)) {
folio_unlock(folio);
result = SCAN_PAGE_COUNT;
goto out;
}
/*
* Isolate the page to avoid collapsing an hugepage
* currently in use by the VM.
*/
if (!folio_isolate_lru(folio)) {
folio_unlock(folio);
result = SCAN_DEL_PAGE_LRU;
goto out;
}
node_stat_mod_folio(folio,
NR_ISOLATED_ANON + folio_is_file_lru(folio),
folio_nr_pages(folio));
VM_BUG_ON_FOLIO(!folio_test_locked(folio), folio);
VM_BUG_ON_FOLIO(folio_test_lru(folio), folio);
if (folio_test_large(folio))
list_add_tail(&folio->lru, compound_pagelist);
next:
/*
* If collapse was initiated by khugepaged, check that there is
* enough young pte to justify collapsing the page
*/
if (cc->is_khugepaged &&
(pte_young(pteval) || folio_test_young(folio) ||
folio_test_referenced(folio) ||
mmu_notifier_test_young(vma->vm_mm, addr)))
referenced++;
}
if (unlikely(cc->is_khugepaged && !referenced)) {
result = SCAN_LACK_REFERENCED_PAGE;
} else {
result = SCAN_SUCCEED;
trace_mm_collapse_huge_page_isolate(folio, none_or_zero,
referenced, result);
return result;
}
out:
release_pte_pages(pte, _pte, compound_pagelist);
trace_mm_collapse_huge_page_isolate(folio, none_or_zero,
referenced, result);
return result;
}
static void __collapse_huge_page_copy_succeeded(pte_t *pte,
struct vm_area_struct *vma,
unsigned long address,
spinlock_t *ptl,
struct list_head *compound_pagelist)
{
unsigned long end = address + HPAGE_PMD_SIZE;
struct folio *src, *tmp;
pte_t pteval;
pte_t *_pte;
unsigned int nr_ptes;
for (_pte = pte; _pte < pte + HPAGE_PMD_NR; _pte += nr_ptes,
address += nr_ptes * PAGE_SIZE) {
nr_ptes = 1;
pteval = ptep_get(_pte);
if (pte_none_or_zero(pteval)) {
add_mm_counter(vma->vm_mm, MM_ANONPAGES, 1);
if (pte_none(pteval))
continue;
/*
* ptl mostly unnecessary.
*/
spin_lock(ptl);
ptep_clear(vma->vm_mm, address, _pte);
spin_unlock(ptl);
ksm_might_unmap_zero_page(vma->vm_mm, pteval);
} else {
struct page *src_page = pte_page(pteval);
src = page_folio(src_page);
if (folio_test_large(src)) {
unsigned int max_nr_ptes = (end - address) >> PAGE_SHIFT;
nr_ptes = folio_pte_batch(src, _pte, pteval, max_nr_ptes);
} else {
release_pte_folio(src);
}
/*
* ptl mostly unnecessary, but preempt has to
* be disabled to update the per-cpu stats
* inside folio_remove_rmap_pte().
*/
spin_lock(ptl);
clear_ptes(vma->vm_mm, address, _pte, nr_ptes);
folio_remove_rmap_ptes(src, src_page, nr_ptes, vma);
spin_unlock(ptl);
free_swap_cache(src);
folio_put_refs(src, nr_ptes);
}
}
list_for_each_entry_safe(src, tmp, compound_pagelist, lru) {
list_del(&src->lru);
node_stat_sub_folio(src, NR_ISOLATED_ANON +
folio_is_file_lru(src));
folio_unlock(src);
free_swap_cache(src);
folio_putback_lru(src);
}
}
static void __collapse_huge_page_copy_failed(pte_t *pte,
pmd_t *pmd,
pmd_t orig_pmd,
struct vm_area_struct *vma,
struct list_head *compound_pagelist)
{
spinlock_t *pmd_ptl;
/*
* Re-establish the PMD to point to the original page table
* entry. Restoring PMD needs to be done prior to releasing
* pages. Since pages are still isolated and locked here,
* acquiring anon_vma_lock_write is unnecessary.
*/
pmd_ptl = pmd_lock(vma->vm_mm, pmd);
pmd_populate(vma->vm_mm, pmd, pmd_pgtable(orig_pmd));
spin_unlock(pmd_ptl);
/*
* Release both raw and compound pages isolated
* in __collapse_huge_page_isolate.
*/
release_pte_pages(pte, pte + HPAGE_PMD_NR, compound_pagelist);
}
/*
* __collapse_huge_page_copy - attempts to copy memory contents from raw
* pages to a hugepage. Cleans up the raw pages if copying succeeds;
* otherwise restores the original page table and releases isolated raw pages.
* Returns SCAN_SUCCEED if copying succeeds, otherwise returns SCAN_COPY_MC.
*
* @pte: starting of the PTEs to copy from
* @folio: the new hugepage to copy contents to
* @pmd: pointer to the new hugepage's PMD
* @orig_pmd: the original raw pages' PMD
* @vma: the original raw pages' virtual memory area
* @address: starting address to copy
* @ptl: lock on raw pages' PTEs
* @compound_pagelist: list that stores compound pages
*/
static enum scan_result __collapse_huge_page_copy(pte_t *pte, struct folio *folio,
pmd_t *pmd, pmd_t orig_pmd, struct vm_area_struct *vma,
unsigned long address, spinlock_t *ptl,
struct list_head *compound_pagelist)
{
unsigned int i;
enum scan_result result = SCAN_SUCCEED;
/*
* Copying pages' contents is subject to memory poison at any iteration.
*/
for (i = 0; i < HPAGE_PMD_NR; i++) {
pte_t pteval = ptep_get(pte + i);
struct page *page = folio_page(folio, i);
unsigned long src_addr = address + i * PAGE_SIZE;
struct page *src_page;
if (pte_none_or_zero(pteval)) {
clear_user_highpage(page, src_addr);
continue;
}
src_page = pte_page(pteval);
if (copy_mc_user_highpage(page, src_page, src_addr, vma) > 0) {
result = SCAN_COPY_MC;
break;
}
}
if (likely(result == SCAN_SUCCEED))
__collapse_huge_page_copy_succeeded(pte, vma, address, ptl,
compound_pagelist);
else
__collapse_huge_page_copy_failed(pte, pmd, orig_pmd, vma,
compound_pagelist);
return result;
}
static void khugepaged_alloc_sleep(void)
{
DEFINE_WAIT(wait);
add_wait_queue(&khugepaged_wait, &wait);
__set_current_state(TASK_INTERRUPTIBLE|TASK_FREEZABLE);
schedule_timeout(msecs_to_jiffies(khugepaged_alloc_sleep_millisecs));
remove_wait_queue(&khugepaged_wait, &wait);
}
static struct collapse_control khugepaged_collapse_control = {
.is_khugepaged = true,
};
static bool hpage_collapse_scan_abort(int nid, struct collapse_control *cc)
{
int i;
/*
* If node_reclaim_mode is disabled, then no extra effort is made to
* allocate memory locally.
*/
if (!node_reclaim_enabled())
return false;
/* If there is a count for this node already, it must be acceptable */
if (cc->node_load[nid])
return false;
for (i = 0; i < MAX_NUMNODES; i++) {
if (!cc->node_load[i])
continue;
if (node_distance(nid, i) > node_reclaim_distance)
return true;
}
return false;
}
#define khugepaged_defrag() \
(transparent_hugepage_flags & \
(1<<TRANSPARENT_HUGEPAGE_DEFRAG_KHUGEPAGED_FLAG))
/* Defrag for khugepaged will enter direct reclaim/compaction if necessary */
static inline gfp_t alloc_hugepage_khugepaged_gfpmask(void)
{
return khugepaged_defrag() ? GFP_TRANSHUGE : GFP_TRANSHUGE_LIGHT;
}
#ifdef CONFIG_NUMA
static int hpage_collapse_find_target_node(struct collapse_control *cc)
{
int nid, target_node = 0, max_value = 0;
/* find first node with max normal pages hit */
for (nid = 0; nid < MAX_NUMNODES; nid++)
if (cc->node_load[nid] > max_value) {
max_value = cc->node_load[nid];
target_node = nid;
}
for_each_online_node(nid) {
if (max_value == cc->node_load[nid])
node_set(nid, cc->alloc_nmask);
}
return target_node;
}
#else
static int hpage_collapse_find_target_node(struct collapse_control *cc)
{
return 0;
}
#endif
/*
* If mmap_lock temporarily dropped, revalidate vma
* before taking mmap_lock.
* Returns enum scan_result value.
*/
static enum scan_result hugepage_vma_revalidate(struct mm_struct *mm, unsigned long address,
bool expect_anon, struct vm_area_struct **vmap, struct collapse_control *cc)
{
struct vm_area_struct *vma;
enum tva_type type = cc->is_khugepaged ? TVA_KHUGEPAGED :
TVA_FORCED_COLLAPSE;
if (unlikely(hpage_collapse_test_exit_or_disable(mm)))
return SCAN_ANY_PROCESS;
*vmap = vma = find_vma(mm, address);
if (!vma)
return SCAN_VMA_NULL;
if (!thp_vma_suitable_order(vma, address, PMD_ORDER))
return SCAN_ADDRESS_RANGE;
if (!thp_vma_allowable_order(vma, vma->vm_flags, type, PMD_ORDER))
return SCAN_VMA_CHECK;
/*
* Anon VMA expected, the address may be unmapped then
* remapped to file after khugepaged reaquired the mmap_lock.
*
* thp_vma_allowable_order may return true for qualified file
* vmas.
*/
if (expect_anon && (!(*vmap)->anon_vma || !vma_is_anonymous(*vmap)))
return SCAN_PAGE_ANON;
return SCAN_SUCCEED;
}
static inline enum scan_result check_pmd_state(pmd_t *pmd)
{
pmd_t pmde = pmdp_get_lockless(pmd);
if (pmd_none(pmde))
return SCAN_NO_PTE_TABLE;
/*
* The folio may be under migration when khugepaged is trying to
* collapse it. Migration success or failure will eventually end
* up with a present PMD mapping a folio again.
*/
if (pmd_is_migration_entry(pmde))
return SCAN_PMD_MAPPED;
if (!pmd_present(pmde))
return SCAN_NO_PTE_TABLE;
if (pmd_trans_huge(pmde))
return SCAN_PMD_MAPPED;
if (pmd_bad(pmde))
return SCAN_NO_PTE_TABLE;
return SCAN_SUCCEED;
}
static enum scan_result find_pmd_or_thp_or_none(struct mm_struct *mm,
unsigned long address, pmd_t **pmd)
{
*pmd = mm_find_pmd(mm, address);
if (!*pmd)
return SCAN_NO_PTE_TABLE;
return check_pmd_state(*pmd);
}
static enum scan_result check_pmd_still_valid(struct mm_struct *mm,
unsigned long address, pmd_t *pmd)
{
pmd_t *new_pmd;
enum scan_result result = find_pmd_or_thp_or_none(mm, address, &new_pmd);
if (result != SCAN_SUCCEED)
return result;
if (new_pmd != pmd)
return SCAN_FAIL;
return SCAN_SUCCEED;
}
/*
* Bring missing pages in from swap, to complete THP collapse.
* Only done if hpage_collapse_scan_pmd believes it is worthwhile.
*
* Called and returns without pte mapped or spinlocks held.
* Returns result: if not SCAN_SUCCEED, mmap_lock has been released.
*/
static enum scan_result __collapse_huge_page_swapin(struct mm_struct *mm,
struct vm_area_struct *vma, unsigned long start_addr, pmd_t *pmd,
int referenced)
{
int swapped_in = 0;
vm_fault_t ret = 0;
unsigned long addr, end = start_addr + (HPAGE_PMD_NR * PAGE_SIZE);
enum scan_result result;
pte_t *pte = NULL;
spinlock_t *ptl;
for (addr = start_addr; addr < end; addr += PAGE_SIZE) {
struct vm_fault vmf = {
.vma = vma,
.address = addr,
.pgoff = linear_page_index(vma, addr),
.flags = FAULT_FLAG_ALLOW_RETRY,
.pmd = pmd,
};
if (!pte++) {
/*
* Here the ptl is only used to check pte_same() in
* do_swap_page(), so readonly version is enough.
*/
pte = pte_offset_map_ro_nolock(mm, pmd, addr, &ptl);
if (!pte) {
mmap_read_unlock(mm);
result = SCAN_NO_PTE_TABLE;
goto out;
}
}
vmf.orig_pte = ptep_get_lockless(pte);
if (pte_none(vmf.orig_pte) ||
pte_present(vmf.orig_pte))
continue;
vmf.pte = pte;
vmf.ptl = ptl;
ret = do_swap_page(&vmf);
/* Which unmaps pte (after perhaps re-checking the entry) */
pte = NULL;
/*
* do_swap_page returns VM_FAULT_RETRY with released mmap_lock.
* Note we treat VM_FAULT_RETRY as VM_FAULT_ERROR here because
* we do not retry here and swap entry will remain in pagetable
* resulting in later failure.
*/
if (ret & VM_FAULT_RETRY) {
/* Likely, but not guaranteed, that page lock failed */
result = SCAN_PAGE_LOCK;
goto out;
}
if (ret & VM_FAULT_ERROR) {
mmap_read_unlock(mm);
result = SCAN_FAIL;
goto out;
}
swapped_in++;
}
if (pte)
pte_unmap(pte);
/* Drain LRU cache to remove extra pin on the swapped in pages */
if (swapped_in)
lru_add_drain();
result = SCAN_SUCCEED;
out:
trace_mm_collapse_huge_page_swapin(mm, swapped_in, referenced, result);
return result;
}
static enum scan_result alloc_charge_folio(struct folio **foliop, struct mm_struct *mm,
struct collapse_control *cc)
{
gfp_t gfp = (cc->is_khugepaged ? alloc_hugepage_khugepaged_gfpmask() :
GFP_TRANSHUGE);
int node = hpage_collapse_find_target_node(cc);
struct folio *folio;
folio = __folio_alloc(gfp, HPAGE_PMD_ORDER, node, &cc->alloc_nmask);
if (!folio) {
*foliop = NULL;
count_vm_event(THP_COLLAPSE_ALLOC_FAILED);
return SCAN_ALLOC_HUGE_PAGE_FAIL;
}
count_vm_event(THP_COLLAPSE_ALLOC);
if (unlikely(mem_cgroup_charge(folio, mm, gfp))) {
folio_put(folio);
*foliop = NULL;
return SCAN_CGROUP_CHARGE_FAIL;
}
count_memcg_folio_events(folio, THP_COLLAPSE_ALLOC, 1);
*foliop = folio;
return SCAN_SUCCEED;
}
static enum scan_result collapse_huge_page(struct mm_struct *mm, unsigned long address,
int referenced, int unmapped, struct collapse_control *cc)
{
LIST_HEAD(compound_pagelist);
pmd_t *pmd, _pmd;
pte_t *pte;
pgtable_t pgtable;
struct folio *folio;
spinlock_t *pmd_ptl, *pte_ptl;
enum scan_result result = SCAN_FAIL;
struct vm_area_struct *vma;
struct mmu_notifier_range range;
VM_BUG_ON(address & ~HPAGE_PMD_MASK);
/*
* Before allocating the hugepage, release the mmap_lock read lock.
* The allocation can take potentially a long time if it involves
* sync compaction, and we do not need to hold the mmap_lock during
* that. We will recheck the vma after taking it again in write mode.
*/
mmap_read_unlock(mm);
result = alloc_charge_folio(&folio, mm, cc);
if (result != SCAN_SUCCEED)
goto out_nolock;
mmap_read_lock(mm);
result = hugepage_vma_revalidate(mm, address, true, &vma, cc);
if (result != SCAN_SUCCEED) {
mmap_read_unlock(mm);
goto out_nolock;
}
result = find_pmd_or_thp_or_none(mm, address, &pmd);
if (result != SCAN_SUCCEED) {
mmap_read_unlock(mm);
goto out_nolock;
}
if (unmapped) {
/*
* __collapse_huge_page_swapin will return with mmap_lock
* released when it fails. So we jump out_nolock directly in
* that case. Continuing to collapse causes inconsistency.
*/
result = __collapse_huge_page_swapin(mm, vma, address, pmd,
referenced);
if (result != SCAN_SUCCEED)
goto out_nolock;
}
mmap_read_unlock(mm);
/*
* Prevent all access to pagetables with the exception of
* gup_fast later handled by the ptep_clear_flush and the VM
* handled by the anon_vma lock + PG_lock.
*
* UFFDIO_MOVE is prevented to race as well thanks to the
* mmap_lock.
*/
mmap_write_lock(mm);
result = hugepage_vma_revalidate(mm, address, true, &vma, cc);
if (result != SCAN_SUCCEED)
goto out_up_write;
/* check if the pmd is still valid */
vma_start_write(vma);
result = check_pmd_still_valid(mm, address, pmd);
if (result != SCAN_SUCCEED)
goto out_up_write;
anon_vma_lock_write(vma->anon_vma);
mmu_notifier_range_init(&range, MMU_NOTIFY_CLEAR, 0, mm, address,
address + HPAGE_PMD_SIZE);
mmu_notifier_invalidate_range_start(&range);
pmd_ptl = pmd_lock(mm, pmd); /* probably unnecessary */
/*
* This removes any huge TLB entry from the CPU so we won't allow
* huge and small TLB entries for the same virtual address to
* avoid the risk of CPU bugs in that area.
*
* Parallel GUP-fast is fine since GUP-fast will back off when
* it detects PMD is changed.
*/
_pmd = pmdp_collapse_flush(vma, address, pmd);
spin_unlock(pmd_ptl);
mmu_notifier_invalidate_range_end(&range);
tlb_remove_table_sync_one();
pte = pte_offset_map_lock(mm, &_pmd, address, &pte_ptl);
if (pte) {
result = __collapse_huge_page_isolate(vma, address, pte, cc,
&compound_pagelist);
spin_unlock(pte_ptl);
} else {
result = SCAN_NO_PTE_TABLE;
}
if (unlikely(result != SCAN_SUCCEED)) {
if (pte)
pte_unmap(pte);
spin_lock(pmd_ptl);
BUG_ON(!pmd_none(*pmd));
/*
* We can only use set_pmd_at when establishing
* hugepmds and never for establishing regular pmds that
* points to regular pagetables. Use pmd_populate for that
*/
pmd_populate(mm, pmd, pmd_pgtable(_pmd));
spin_unlock(pmd_ptl);
anon_vma_unlock_write(vma->anon_vma);
goto out_up_write;
}
/*
* All pages are isolated and locked so anon_vma rmap
* can't run anymore.
*/
anon_vma_unlock_write(vma->anon_vma);
result = __collapse_huge_page_copy(pte, folio, pmd, _pmd,
vma, address, pte_ptl,
&compound_pagelist);
pte_unmap(pte);
if (unlikely(result != SCAN_SUCCEED))
goto out_up_write;
/*
* The smp_wmb() inside __folio_mark_uptodate() ensures the
* copy_huge_page writes become visible before the set_pmd_at()
* write.
*/
__folio_mark_uptodate(folio);
pgtable = pmd_pgtable(_pmd);
spin_lock(pmd_ptl);
BUG_ON(!pmd_none(*pmd));
pgtable_trans_huge_deposit(mm, pmd, pgtable);
map_anon_folio_pmd_nopf(folio, pmd, vma, address);
spin_unlock(pmd_ptl);
folio = NULL;
result = SCAN_SUCCEED;
out_up_write:
mmap_write_unlock(mm);
out_nolock:
if (folio)
folio_put(folio);
trace_mm_collapse_huge_page(mm, result == SCAN_SUCCEED, result);
return result;
}
static enum scan_result hpage_collapse_scan_pmd(struct mm_struct *mm,
struct vm_area_struct *vma, unsigned long start_addr, bool *mmap_locked,
struct collapse_control *cc)
{
pmd_t *pmd;
pte_t *pte, *_pte;
int none_or_zero = 0, shared = 0, referenced = 0;
enum scan_result result = SCAN_FAIL;
struct page *page = NULL;
struct folio *folio = NULL;
unsigned long addr;
spinlock_t *ptl;
int node = NUMA_NO_NODE, unmapped = 0;
VM_BUG_ON(start_addr & ~HPAGE_PMD_MASK);
result = find_pmd_or_thp_or_none(mm, start_addr, &pmd);
if (result != SCAN_SUCCEED)
goto out;
memset(cc->node_load, 0, sizeof(cc->node_load));
nodes_clear(cc->alloc_nmask);
pte = pte_offset_map_lock(mm, pmd, start_addr, &ptl);
if (!pte) {
result = SCAN_NO_PTE_TABLE;
goto out;
}
for (addr = start_addr, _pte = pte; _pte < pte + HPAGE_PMD_NR;
_pte++, addr += PAGE_SIZE) {
pte_t pteval = ptep_get(_pte);
if (pte_none_or_zero(pteval)) {
++none_or_zero;
if (!userfaultfd_armed(vma) &&
(!cc->is_khugepaged ||
none_or_zero <= khugepaged_max_ptes_none)) {
continue;
} else {
result = SCAN_EXCEED_NONE_PTE;
count_vm_event(THP_SCAN_EXCEED_NONE_PTE);
goto out_unmap;
}
}
if (!pte_present(pteval)) {
++unmapped;
if (!cc->is_khugepaged ||
unmapped <= khugepaged_max_ptes_swap) {
/*
* Always be strict with uffd-wp
* enabled swap entries. Please see
* comment below for pte_uffd_wp().
*/
if (pte_swp_uffd_wp_any(pteval)) {
result = SCAN_PTE_UFFD_WP;
goto out_unmap;
}
continue;
} else {
result = SCAN_EXCEED_SWAP_PTE;
count_vm_event(THP_SCAN_EXCEED_SWAP_PTE);
goto out_unmap;
}
}
if (pte_uffd_wp(pteval)) {
/*
* Don't collapse the page if any of the small
* PTEs are armed with uffd write protection.
* Here we can also mark the new huge pmd as
* write protected if any of the small ones is
* marked but that could bring unknown
* userfault messages that falls outside of
* the registered range. So, just be simple.
*/
result = SCAN_PTE_UFFD_WP;
goto out_unmap;
}
page = vm_normal_page(vma, addr, pteval);
if (unlikely(!page) || unlikely(is_zone_device_page(page))) {
result = SCAN_PAGE_NULL;
goto out_unmap;
}
folio = page_folio(page);
if (!folio_test_anon(folio)) {
result = SCAN_PAGE_ANON;
goto out_unmap;
}
/*
* We treat a single page as shared if any part of the THP
* is shared.
*/
if (folio_maybe_mapped_shared(folio)) {
++shared;
if (cc->is_khugepaged &&
shared > khugepaged_max_ptes_shared) {
result = SCAN_EXCEED_SHARED_PTE;
count_vm_event(THP_SCAN_EXCEED_SHARED_PTE);
goto out_unmap;
}
}
/*
* Record which node the original page is from and save this
* information to cc->node_load[].
* Khugepaged will allocate hugepage from the node has the max
* hit record.
*/
node = folio_nid(folio);
if (hpage_collapse_scan_abort(node, cc)) {
result = SCAN_SCAN_ABORT;
goto out_unmap;
}
cc->node_load[node]++;
if (!folio_test_lru(folio)) {
result = SCAN_PAGE_LRU;
goto out_unmap;
}
if (folio_test_locked(folio)) {
result = SCAN_PAGE_LOCK;
goto out_unmap;
}
/*
* Check if the page has any GUP (or other external) pins.
*
* Here the check may be racy:
* it may see folio_mapcount() > folio_ref_count().
* But such case is ephemeral we could always retry collapse
* later. However it may report false positive if the page
* has excessive GUP pins (i.e. 512). Anyway the same check
* will be done again later the risk seems low.
*/
if (folio_expected_ref_count(folio) != folio_ref_count(folio)) {
result = SCAN_PAGE_COUNT;
goto out_unmap;
}
/*
* If collapse was initiated by khugepaged, check that there is
* enough young pte to justify collapsing the page
*/
if (cc->is_khugepaged &&
(pte_young(pteval) || folio_test_young(folio) ||
folio_test_referenced(folio) ||
mmu_notifier_test_young(vma->vm_mm, addr)))
referenced++;
}
if (cc->is_khugepaged &&
(!referenced ||
(unmapped && referenced < HPAGE_PMD_NR / 2))) {
result = SCAN_LACK_REFERENCED_PAGE;
} else {
result = SCAN_SUCCEED;
}
out_unmap:
pte_unmap_unlock(pte, ptl);
if (result == SCAN_SUCCEED) {
result = collapse_huge_page(mm, start_addr, referenced,
unmapped, cc);
/* collapse_huge_page will return with the mmap_lock released */
*mmap_locked = false;
}
out:
trace_mm_khugepaged_scan_pmd(mm, folio, referenced,
none_or_zero, result, unmapped);
return result;
}
static void collect_mm_slot(struct mm_slot *slot)
{
struct mm_struct *mm = slot->mm;
lockdep_assert_held(&khugepaged_mm_lock);
if (hpage_collapse_test_exit(mm)) {
/* free mm_slot */
hash_del(&slot->hash);
list_del(&slot->mm_node);
/*
* Not strictly needed because the mm exited already.
*
* mm_flags_clear(MMF_VM_HUGEPAGE, mm);
*/
/* khugepaged_mm_lock actually not necessary for the below */
mm_slot_free(mm_slot_cache, slot);
mmdrop(mm);
}
}
/* folio must be locked, and mmap_lock must be held */
static enum scan_result set_huge_pmd(struct vm_area_struct *vma, unsigned long addr,
pmd_t *pmdp, struct folio *folio, struct page *page)
{
struct mm_struct *mm = vma->vm_mm;
struct vm_fault vmf = {
.vma = vma,
.address = addr,
.flags = 0,
};
pgd_t *pgdp;
p4d_t *p4dp;
pud_t *pudp;
mmap_assert_locked(vma->vm_mm);
if (!pmdp) {
pgdp = pgd_offset(mm, addr);
p4dp = p4d_alloc(mm, pgdp, addr);
if (!p4dp)
return SCAN_FAIL;
pudp = pud_alloc(mm, p4dp, addr);
if (!pudp)
return SCAN_FAIL;
pmdp = pmd_alloc(mm, pudp, addr);
if (!pmdp)
return SCAN_FAIL;
}
vmf.pmd = pmdp;
if (do_set_pmd(&vmf, folio, page))
return SCAN_FAIL;
folio_get(folio);
return SCAN_SUCCEED;
}
static enum scan_result try_collapse_pte_mapped_thp(struct mm_struct *mm, unsigned long addr,
bool install_pmd)
{
enum scan_result result = SCAN_FAIL;
int nr_mapped_ptes = 0;
unsigned int nr_batch_ptes;
struct mmu_notifier_range range;
bool notified = false;
unsigned long haddr = addr & HPAGE_PMD_MASK;
unsigned long end = haddr + HPAGE_PMD_SIZE;
struct vm_area_struct *vma = vma_lookup(mm, haddr);
struct folio *folio;
pte_t *start_pte, *pte;
pmd_t *pmd, pgt_pmd;
spinlock_t *pml = NULL, *ptl;
int i;
mmap_assert_locked(mm);
/* First check VMA found, in case page tables are being torn down */
if (!vma || !vma->vm_file ||
!range_in_vma(vma, haddr, haddr + HPAGE_PMD_SIZE))
return SCAN_VMA_CHECK;
/* Fast check before locking page if already PMD-mapped */
result = find_pmd_or_thp_or_none(mm, haddr, &pmd);
if (result == SCAN_PMD_MAPPED)
return result;
/*
* If we are here, we've succeeded in replacing all the native pages
* in the page cache with a single hugepage. If a mm were to fault-in
* this memory (mapped by a suitably aligned VMA), we'd get the hugepage
* and map it by a PMD, regardless of sysfs THP settings. As such, let's
* analogously elide sysfs THP settings here and force collapse.
*/
if (!thp_vma_allowable_order(vma, vma->vm_flags, TVA_FORCED_COLLAPSE, PMD_ORDER))
return SCAN_VMA_CHECK;
/* Keep pmd pgtable for uffd-wp; see comment in retract_page_tables() */
if (userfaultfd_wp(vma))
return SCAN_PTE_UFFD_WP;
folio = filemap_lock_folio(vma->vm_file->f_mapping,
linear_page_index(vma, haddr));
if (IS_ERR(folio))
return SCAN_PAGE_NULL;
if (folio_order(folio) != HPAGE_PMD_ORDER) {
result = SCAN_PAGE_COMPOUND;
goto drop_folio;
}
result = find_pmd_or_thp_or_none(mm, haddr, &pmd);
switch (result) {
case SCAN_SUCCEED:
break;
case SCAN_NO_PTE_TABLE:
/*
* All pte entries have been removed and pmd cleared.
* Skip all the pte checks and just update the pmd mapping.
*/
goto maybe_install_pmd;
default:
goto drop_folio;
}
result = SCAN_FAIL;
start_pte = pte_offset_map_lock(mm, pmd, haddr, &ptl);
if (!start_pte) /* mmap_lock + page lock should prevent this */
goto drop_folio;
/* step 1: check all mapped PTEs are to the right huge page */
for (i = 0, addr = haddr, pte = start_pte;
i < HPAGE_PMD_NR; i++, addr += PAGE_SIZE, pte++) {
struct page *page;
pte_t ptent = ptep_get(pte);
/* empty pte, skip */
if (pte_none(ptent))
continue;
/* page swapped out, abort */
if (!pte_present(ptent)) {
result = SCAN_PTE_NON_PRESENT;
goto abort;
}
page = vm_normal_page(vma, addr, ptent);
if (WARN_ON_ONCE(page && is_zone_device_page(page)))
page = NULL;
/*
* Note that uprobe, debugger, or MAP_PRIVATE may change the
* page table, but the new page will not be a subpage of hpage.
*/
if (folio_page(folio, i) != page)
goto abort;
}
pte_unmap_unlock(start_pte, ptl);
mmu_notifier_range_init(&range, MMU_NOTIFY_CLEAR, 0, mm,
haddr, haddr + HPAGE_PMD_SIZE);
mmu_notifier_invalidate_range_start(&range);
notified = true;
/*
* pmd_lock covers a wider range than ptl, and (if split from mm's
* page_table_lock) ptl nests inside pml. The less time we hold pml,
* the better; but userfaultfd's mfill_atomic_pte() on a private VMA
* inserts a valid as-if-COWed PTE without even looking up page cache.
* So page lock of folio does not protect from it, so we must not drop
* ptl before pgt_pmd is removed, so uffd private needs pml taken now.
*/
if (userfaultfd_armed(vma) && !(vma->vm_flags & VM_SHARED))
pml = pmd_lock(mm, pmd);
start_pte = pte_offset_map_rw_nolock(mm, pmd, haddr, &pgt_pmd, &ptl);
if (!start_pte) /* mmap_lock + page lock should prevent this */
goto abort;
if (!pml)
spin_lock(ptl);
else if (ptl != pml)
spin_lock_nested(ptl, SINGLE_DEPTH_NESTING);
if (unlikely(!pmd_same(pgt_pmd, pmdp_get_lockless(pmd))))
goto abort;
/* step 2: clear page table and adjust rmap */
for (i = 0, addr = haddr, pte = start_pte; i < HPAGE_PMD_NR;
i += nr_batch_ptes, addr += nr_batch_ptes * PAGE_SIZE,
pte += nr_batch_ptes) {
unsigned int max_nr_batch_ptes = (end - addr) >> PAGE_SHIFT;
struct page *page;
pte_t ptent = ptep_get(pte);
nr_batch_ptes = 1;
if (pte_none(ptent))
continue;
/*
* We dropped ptl after the first scan, to do the mmu_notifier:
* page lock stops more PTEs of the folio being faulted in, but
* does not stop write faults COWing anon copies from existing
* PTEs; and does not stop those being swapped out or migrated.
*/
if (!pte_present(ptent)) {
result = SCAN_PTE_NON_PRESENT;
goto abort;
}
page = vm_normal_page(vma, addr, ptent);
if (folio_page(folio, i) != page)
goto abort;
nr_batch_ptes = folio_pte_batch(folio, pte, ptent, max_nr_batch_ptes);
/*
* Must clear entry, or a racing truncate may re-remove it.
* TLB flush can be left until pmdp_collapse_flush() does it.
* PTE dirty? Shmem page is already dirty; file is read-only.
*/
clear_ptes(mm, addr, pte, nr_batch_ptes);
folio_remove_rmap_ptes(folio, page, nr_batch_ptes, vma);
nr_mapped_ptes += nr_batch_ptes;
}
if (!pml)
spin_unlock(ptl);
/* step 3: set proper refcount and mm_counters. */
if (nr_mapped_ptes) {
folio_ref_sub(folio, nr_mapped_ptes);
add_mm_counter(mm, mm_counter_file(folio), -nr_mapped_ptes);
}
/* step 4: remove empty page table */
if (!pml) {
pml = pmd_lock(mm, pmd);
if (ptl != pml) {
spin_lock_nested(ptl, SINGLE_DEPTH_NESTING);
if (unlikely(!pmd_same(pgt_pmd, pmdp_get_lockless(pmd)))) {
flush_tlb_mm(mm);
goto unlock;
}
}
}
pgt_pmd = pmdp_collapse_flush(vma, haddr, pmd);
pmdp_get_lockless_sync();
pte_unmap_unlock(start_pte, ptl);
if (ptl != pml)
spin_unlock(pml);
mmu_notifier_invalidate_range_end(&range);
mm_dec_nr_ptes(mm);
page_table_check_pte_clear_range(mm, haddr, pgt_pmd);
pte_free_defer(mm, pmd_pgtable(pgt_pmd));
maybe_install_pmd:
/* step 5: install pmd entry */
result = install_pmd
? set_huge_pmd(vma, haddr, pmd, folio, &folio->page)
: SCAN_SUCCEED;
goto drop_folio;
abort:
if (nr_mapped_ptes) {
flush_tlb_mm(mm);
folio_ref_sub(folio, nr_mapped_ptes);
add_mm_counter(mm, mm_counter_file(folio), -nr_mapped_ptes);
}
unlock:
if (start_pte)
pte_unmap_unlock(start_pte, ptl);
if (pml && pml != ptl)
spin_unlock(pml);
if (notified)
mmu_notifier_invalidate_range_end(&range);
drop_folio:
folio_unlock(folio);
folio_put(folio);
return result;
}
/**
* collapse_pte_mapped_thp - Try to collapse a pte-mapped THP for mm at
* address haddr.
*
* @mm: process address space where collapse happens
* @addr: THP collapse address
* @install_pmd: If a huge PMD should be installed
*
* This function checks whether all the PTEs in the PMD are pointing to the
* right THP. If so, retract the page table so the THP can refault in with
* as pmd-mapped. Possibly install a huge PMD mapping the THP.
*/
void collapse_pte_mapped_thp(struct mm_struct *mm, unsigned long addr,
bool install_pmd)
{
try_collapse_pte_mapped_thp(mm, addr, install_pmd);
}
/* Can we retract page tables for this file-backed VMA? */
static bool file_backed_vma_is_retractable(struct vm_area_struct *vma)
{
/*
* Check vma->anon_vma to exclude MAP_PRIVATE mappings that
* got written to. These VMAs are likely not worth removing
* page tables from, as PMD-mapping is likely to be split later.
*/
if (READ_ONCE(vma->anon_vma))
return false;
/*
* When a vma is registered with uffd-wp, we cannot recycle
* the page table because there may be pte markers installed.
* Other vmas can still have the same file mapped hugely, but
* skip this one: it will always be mapped in small page size
* for uffd-wp registered ranges.
*/
if (userfaultfd_wp(vma))
return false;
/*
* If the VMA contains guard regions then we can't collapse it.
*
* This is set atomically on guard marker installation under mmap/VMA
* read lock, and here we may not hold any VMA or mmap lock at all.
*
* This is therefore serialised on the PTE page table lock, which is
* obtained on guard region installation after the flag is set, so this
* check being performed under this lock excludes races.
*/
if (vma_flag_test_atomic(vma, VMA_MAYBE_GUARD_BIT))
return false;
return true;
}
static void retract_page_tables(struct address_space *mapping, pgoff_t pgoff)
{
struct vm_area_struct *vma;
i_mmap_lock_read(mapping);
vma_interval_tree_foreach(vma, &mapping->i_mmap, pgoff, pgoff) {
struct mmu_notifier_range range;
struct mm_struct *mm;
unsigned long addr;
pmd_t *pmd, pgt_pmd;
spinlock_t *pml;
spinlock_t *ptl;
bool success = false;
addr = vma->vm_start + ((pgoff - vma->vm_pgoff) << PAGE_SHIFT);
if (addr & ~HPAGE_PMD_MASK ||
vma->vm_end < addr + HPAGE_PMD_SIZE)
continue;
mm = vma->vm_mm;
if (find_pmd_or_thp_or_none(mm, addr, &pmd) != SCAN_SUCCEED)
continue;
if (hpage_collapse_test_exit(mm))
continue;
if (!file_backed_vma_is_retractable(vma))
continue;
/* PTEs were notified when unmapped; but now for the PMD? */
mmu_notifier_range_init(&range, MMU_NOTIFY_CLEAR, 0, mm,
addr, addr + HPAGE_PMD_SIZE);
mmu_notifier_invalidate_range_start(&range);
pml = pmd_lock(mm, pmd);
/*
* The lock of new_folio is still held, we will be blocked in
* the page fault path, which prevents the pte entries from
* being set again. So even though the old empty PTE page may be
* concurrently freed and a new PTE page is filled into the pmd
* entry, it is still empty and can be removed.
*
* So here we only need to recheck if the state of pmd entry
* still meets our requirements, rather than checking pmd_same()
* like elsewhere.
*/
if (check_pmd_state(pmd) != SCAN_SUCCEED)
goto drop_pml;
ptl = pte_lockptr(mm, pmd);
if (ptl != pml)
spin_lock_nested(ptl, SINGLE_DEPTH_NESTING);
/*
* Huge page lock is still held, so normally the page table must
* remain empty; and we have already skipped anon_vma and
* userfaultfd_wp() vmas. But since the mmap_lock is not held,
* it is still possible for a racing userfaultfd_ioctl() or
* madvise() to have inserted ptes or markers. Now that we hold
* ptlock, repeating the retractable checks protects us from
* races against the prior checks.
*/
if (likely(file_backed_vma_is_retractable(vma))) {
pgt_pmd = pmdp_collapse_flush(vma, addr, pmd);
pmdp_get_lockless_sync();
success = true;
}
if (ptl != pml)
spin_unlock(ptl);
drop_pml:
spin_unlock(pml);
mmu_notifier_invalidate_range_end(&range);
if (success) {
mm_dec_nr_ptes(mm);
page_table_check_pte_clear_range(mm, addr, pgt_pmd);
pte_free_defer(mm, pmd_pgtable(pgt_pmd));
}
}
i_mmap_unlock_read(mapping);
}
/**
* collapse_file - collapse filemap/tmpfs/shmem pages into huge one.
*
* @mm: process address space where collapse happens
* @addr: virtual collapse start address
* @file: file that collapse on
* @start: collapse start address
* @cc: collapse context and scratchpad
*
* Basic scheme is simple, details are more complex:
* - allocate and lock a new huge page;
* - scan page cache, locking old pages
* + swap/gup in pages if necessary;
* - copy data to new page
* - handle shmem holes
* + re-validate that holes weren't filled by someone else
* + check for userfaultfd
* - finalize updates to the page cache;
* - if replacing succeeds:
* + unlock huge page;
* + free old pages;
* - if replacing failed;
* + unlock old pages
* + unlock and free huge page;
*/
static enum scan_result collapse_file(struct mm_struct *mm, unsigned long addr,
struct file *file, pgoff_t start, struct collapse_control *cc)
{
struct address_space *mapping = file->f_mapping;
struct page *dst;
struct folio *folio, *tmp, *new_folio;
pgoff_t index = 0, end = start + HPAGE_PMD_NR;
LIST_HEAD(pagelist);
XA_STATE_ORDER(xas, &mapping->i_pages, start, HPAGE_PMD_ORDER);
enum scan_result result = SCAN_SUCCEED;
int nr_none = 0;
bool is_shmem = shmem_file(file);
VM_BUG_ON(!IS_ENABLED(CONFIG_READ_ONLY_THP_FOR_FS) && !is_shmem);
VM_BUG_ON(start & (HPAGE_PMD_NR - 1));
result = alloc_charge_folio(&new_folio, mm, cc);
if (result != SCAN_SUCCEED)
goto out;
mapping_set_update(&xas, mapping);
__folio_set_locked(new_folio);
if (is_shmem)
__folio_set_swapbacked(new_folio);
new_folio->index = start;
new_folio->mapping = mapping;
/*
* Ensure we have slots for all the pages in the range. This is
* almost certainly a no-op because most of the pages must be present
*/
do {
xas_lock_irq(&xas);
xas_create_range(&xas);
if (!xas_error(&xas))
break;
xas_unlock_irq(&xas);
if (!xas_nomem(&xas, GFP_KERNEL)) {
result = SCAN_FAIL;
goto rollback;
}
} while (1);
for (index = start; index < end;) {
xas_set(&xas, index);
folio = xas_load(&xas);
VM_BUG_ON(index != xas.xa_index);
if (is_shmem) {
if (!folio) {
/*
* Stop if extent has been truncated or
* hole-punched, and is now completely
* empty.
*/
if (index == start) {
if (!xas_next_entry(&xas, end - 1)) {
result = SCAN_TRUNCATED;
goto xa_locked;
}
}
nr_none++;
index++;
continue;
}
if (xa_is_value(folio) || !folio_test_uptodate(folio)) {
xas_unlock_irq(&xas);
/* swap in or instantiate fallocated page */
if (shmem_get_folio(mapping->host, index, 0,
&folio, SGP_NOALLOC)) {
result = SCAN_FAIL;
goto xa_unlocked;
}
/* drain lru cache to help folio_isolate_lru() */
lru_add_drain();
} else if (folio_trylock(folio)) {
folio_get(folio);
xas_unlock_irq(&xas);
} else {
result = SCAN_PAGE_LOCK;
goto xa_locked;
}
} else { /* !is_shmem */
if (!folio || xa_is_value(folio)) {
xas_unlock_irq(&xas);
page_cache_sync_readahead(mapping, &file->f_ra,
file, index,
end - index);
/* drain lru cache to help folio_isolate_lru() */
lru_add_drain();
folio = filemap_lock_folio(mapping, index);
if (IS_ERR(folio)) {
result = SCAN_FAIL;
goto xa_unlocked;
}
} else if (folio_test_dirty(folio)) {
/*
* khugepaged only works on read-only fd,
* so this page is dirty because it hasn't
* been flushed since first write. There
* won't be new dirty pages.
*
* Trigger async flush here and hope the
* writeback is done when khugepaged
* revisits this page.
*
* This is a one-off situation. We are not
* forcing writeback in loop.
*/
xas_unlock_irq(&xas);
filemap_flush(mapping);
result = SCAN_PAGE_DIRTY_OR_WRITEBACK;
goto xa_unlocked;
} else if (folio_test_writeback(folio)) {
xas_unlock_irq(&xas);
result = SCAN_PAGE_DIRTY_OR_WRITEBACK;
goto xa_unlocked;
} else if (folio_trylock(folio)) {
folio_get(folio);
xas_unlock_irq(&xas);
} else {
result = SCAN_PAGE_LOCK;
goto xa_locked;
}
}
/*
* The folio must be locked, so we can drop the i_pages lock
* without racing with truncate.
*/
VM_BUG_ON_FOLIO(!folio_test_locked(folio), folio);
/* make sure the folio is up to date */
if (unlikely(!folio_test_uptodate(folio))) {
result = SCAN_FAIL;
goto out_unlock;
}
/*
* If file was truncated then extended, or hole-punched, before
* we locked the first folio, then a THP might be there already.
* This will be discovered on the first iteration.
*/
if (folio_order(folio) == HPAGE_PMD_ORDER &&
folio->index == start) {
/* Maybe PMD-mapped */
result = SCAN_PTE_MAPPED_HUGEPAGE;
goto out_unlock;
}
if (folio_mapping(folio) != mapping) {
result = SCAN_TRUNCATED;
goto out_unlock;
}
if (!is_shmem && (folio_test_dirty(folio) ||
folio_test_writeback(folio))) {
/*
* khugepaged only works on read-only fd, so this
* folio is dirty because it hasn't been flushed
* since first write.
*/
result = SCAN_PAGE_DIRTY_OR_WRITEBACK;
goto out_unlock;
}
if (!folio_isolate_lru(folio)) {
result = SCAN_DEL_PAGE_LRU;
goto out_unlock;
}
if (!filemap_release_folio(folio, GFP_KERNEL)) {
result = SCAN_PAGE_HAS_PRIVATE;
folio_putback_lru(folio);
goto out_unlock;
}
if (folio_mapped(folio))
try_to_unmap(folio,
TTU_IGNORE_MLOCK | TTU_BATCH_FLUSH);
xas_lock_irq(&xas);
VM_BUG_ON_FOLIO(folio != xa_load(xas.xa, index), folio);
/*
* We control 2 + nr_pages references to the folio:
* - we hold a pin on it;
* - nr_pages reference from page cache;
* - one from lru_isolate_folio;
* If those are the only references, then any new usage
* of the folio will have to fetch it from the page
* cache. That requires locking the folio to handle
* truncate, so any new usage will be blocked until we
* unlock folio after collapse/during rollback.
*/
if (folio_ref_count(folio) != 2 + folio_nr_pages(folio)) {
result = SCAN_PAGE_COUNT;
xas_unlock_irq(&xas);
folio_putback_lru(folio);
goto out_unlock;
}
/*
* Accumulate the folios that are being collapsed.
*/
list_add_tail(&folio->lru, &pagelist);
index += folio_nr_pages(folio);
continue;
out_unlock:
folio_unlock(folio);
folio_put(folio);
goto xa_unlocked;
}
if (!is_shmem) {
filemap_nr_thps_inc(mapping);
/*
* Paired with the fence in do_dentry_open() -> get_write_access()
* to ensure i_writecount is up to date and the update to nr_thps
* is visible. Ensures the page cache will be truncated if the
* file is opened writable.
*/
smp_mb();
if (inode_is_open_for_write(mapping->host)) {
result = SCAN_FAIL;
filemap_nr_thps_dec(mapping);
}
}
xa_locked:
xas_unlock_irq(&xas);
xa_unlocked:
/*
* If collapse is successful, flush must be done now before copying.
* If collapse is unsuccessful, does flush actually need to be done?
* Do it anyway, to clear the state.
*/
try_to_unmap_flush();
if (result == SCAN_SUCCEED && nr_none &&
!shmem_charge(mapping->host, nr_none))
result = SCAN_FAIL;
if (result != SCAN_SUCCEED) {
nr_none = 0;
goto rollback;
}
/*
* The old folios are locked, so they won't change anymore.
*/
index = start;
dst = folio_page(new_folio, 0);
list_for_each_entry(folio, &pagelist, lru) {
int i, nr_pages = folio_nr_pages(folio);
while (index < folio->index) {
clear_highpage(dst);
index++;
dst++;
}
for (i = 0; i < nr_pages; i++) {
if (copy_mc_highpage(dst, folio_page(folio, i)) > 0) {
result = SCAN_COPY_MC;
goto rollback;
}
index++;
dst++;
}
}
while (index < end) {
clear_highpage(dst);
index++;
dst++;
}
if (nr_none) {
struct vm_area_struct *vma;
int nr_none_check = 0;
i_mmap_lock_read(mapping);
xas_lock_irq(&xas);
xas_set(&xas, start);
for (index = start; index < end; index++) {
if (!xas_next(&xas)) {
xas_store(&xas, XA_RETRY_ENTRY);
if (xas_error(&xas)) {
result = SCAN_STORE_FAILED;
goto immap_locked;
}
nr_none_check++;
}
}
if (nr_none != nr_none_check) {
result = SCAN_PAGE_FILLED;
goto immap_locked;
}
/*
* If userspace observed a missing page in a VMA with
* a MODE_MISSING userfaultfd, then it might expect a
* UFFD_EVENT_PAGEFAULT for that page. If so, we need to
* roll back to avoid suppressing such an event. Since
* wp/minor userfaultfds don't give userspace any
* guarantees that the kernel doesn't fill a missing
* page with a zero page, so they don't matter here.
*
* Any userfaultfds registered after this point will
* not be able to observe any missing pages due to the
* previously inserted retry entries.
*/
vma_interval_tree_foreach(vma, &mapping->i_mmap, start, end) {
if (userfaultfd_missing(vma)) {
result = SCAN_EXCEED_NONE_PTE;
goto immap_locked;
}
}
immap_locked:
i_mmap_unlock_read(mapping);
if (result != SCAN_SUCCEED) {
xas_set(&xas, start);
for (index = start; index < end; index++) {
if (xas_next(&xas) == XA_RETRY_ENTRY)
xas_store(&xas, NULL);
}
xas_unlock_irq(&xas);
goto rollback;
}
} else {
xas_lock_irq(&xas);
}
if (is_shmem) {
lruvec_stat_mod_folio(new_folio, NR_SHMEM, HPAGE_PMD_NR);
lruvec_stat_mod_folio(new_folio, NR_SHMEM_THPS, HPAGE_PMD_NR);
} else {
lruvec_stat_mod_folio(new_folio, NR_FILE_THPS, HPAGE_PMD_NR);
}
lruvec_stat_mod_folio(new_folio, NR_FILE_PAGES, HPAGE_PMD_NR);
/*
* Mark new_folio as uptodate before inserting it into the
* page cache so that it isn't mistaken for an fallocated but
* unwritten page.
*/
folio_mark_uptodate(new_folio);
folio_ref_add(new_folio, HPAGE_PMD_NR - 1);
if (is_shmem)
folio_mark_dirty(new_folio);
folio_add_lru(new_folio);
/* Join all the small entries into a single multi-index entry. */
xas_set_order(&xas, start, HPAGE_PMD_ORDER);
xas_store(&xas, new_folio);
WARN_ON_ONCE(xas_error(&xas));
xas_unlock_irq(&xas);
/*
* Remove pte page tables, so we can re-fault the page as huge.
* If MADV_COLLAPSE, adjust result to call try_collapse_pte_mapped_thp().
*/
retract_page_tables(mapping, start);
if (cc && !cc->is_khugepaged)
result = SCAN_PTE_MAPPED_HUGEPAGE;
folio_unlock(new_folio);
/*
* The collapse has succeeded, so free the old folios.
*/
list_for_each_entry_safe(folio, tmp, &pagelist, lru) {
list_del(&folio->lru);
lruvec_stat_mod_folio(folio, NR_FILE_PAGES,
-folio_nr_pages(folio));
if (is_shmem)
lruvec_stat_mod_folio(folio, NR_SHMEM,
-folio_nr_pages(folio));
folio->mapping = NULL;
folio_clear_active(folio);
folio_clear_unevictable(folio);
folio_unlock(folio);
folio_put_refs(folio, 2 + folio_nr_pages(folio));
}
goto out;
rollback:
/* Something went wrong: roll back page cache changes */
if (nr_none) {
xas_lock_irq(&xas);
mapping->nrpages -= nr_none;
xas_unlock_irq(&xas);
shmem_uncharge(mapping->host, nr_none);
}
list_for_each_entry_safe(folio, tmp, &pagelist, lru) {
list_del(&folio->lru);
folio_unlock(folio);
folio_putback_lru(folio);
folio_put(folio);
}
/*
* Undo the updates of filemap_nr_thps_inc for non-SHMEM
* file only. This undo is not needed unless failure is
* due to SCAN_COPY_MC.
*/
if (!is_shmem && result == SCAN_COPY_MC) {
filemap_nr_thps_dec(mapping);
/*
* Paired with the fence in do_dentry_open() -> get_write_access()
* to ensure the update to nr_thps is visible.
*/
smp_mb();
}
new_folio->mapping = NULL;
folio_unlock(new_folio);
folio_put(new_folio);
out:
VM_BUG_ON(!list_empty(&pagelist));
trace_mm_khugepaged_collapse_file(mm, new_folio, index, addr, is_shmem, file, HPAGE_PMD_NR, result);
return result;
}
static enum scan_result hpage_collapse_scan_file(struct mm_struct *mm, unsigned long addr,
struct file *file, pgoff_t start, struct collapse_control *cc)
{
struct folio *folio = NULL;
struct address_space *mapping = file->f_mapping;
XA_STATE(xas, &mapping->i_pages, start);
int present, swap;
int node = NUMA_NO_NODE;
enum scan_result result = SCAN_SUCCEED;
present = 0;
swap = 0;
memset(cc->node_load, 0, sizeof(cc->node_load));
nodes_clear(cc->alloc_nmask);
rcu_read_lock();
xas_for_each(&xas, folio, start + HPAGE_PMD_NR - 1) {
if (xas_retry(&xas, folio))
continue;
if (xa_is_value(folio)) {
swap += 1 << xas_get_order(&xas);
if (cc->is_khugepaged &&
swap > khugepaged_max_ptes_swap) {
result = SCAN_EXCEED_SWAP_PTE;
count_vm_event(THP_SCAN_EXCEED_SWAP_PTE);
break;
}
continue;
}
if (!folio_try_get(folio)) {
xas_reset(&xas);
continue;
}
if (unlikely(folio != xas_reload(&xas))) {
folio_put(folio);
xas_reset(&xas);
continue;
}
if (folio_order(folio) == HPAGE_PMD_ORDER &&
folio->index == start) {
/* Maybe PMD-mapped */
result = SCAN_PTE_MAPPED_HUGEPAGE;
/*
* For SCAN_PTE_MAPPED_HUGEPAGE, further processing
* by the caller won't touch the page cache, and so
* it's safe to skip LRU and refcount checks before
* returning.
*/
folio_put(folio);
break;
}
node = folio_nid(folio);
if (hpage_collapse_scan_abort(node, cc)) {
result = SCAN_SCAN_ABORT;
folio_put(folio);
break;
}
cc->node_load[node]++;
if (!folio_test_lru(folio)) {
result = SCAN_PAGE_LRU;
folio_put(folio);
break;
}
if (folio_expected_ref_count(folio) + 1 != folio_ref_count(folio)) {
result = SCAN_PAGE_COUNT;
folio_put(folio);
break;
}
/*
* We probably should check if the folio is referenced
* here, but nobody would transfer pte_young() to
* folio_test_referenced() for us. And rmap walk here
* is just too costly...
*/
present += folio_nr_pages(folio);
folio_put(folio);
if (need_resched()) {
xas_pause(&xas);
cond_resched_rcu();
}
}
rcu_read_unlock();
if (result == SCAN_SUCCEED) {
if (cc->is_khugepaged &&
present < HPAGE_PMD_NR - khugepaged_max_ptes_none) {
result = SCAN_EXCEED_NONE_PTE;
count_vm_event(THP_SCAN_EXCEED_NONE_PTE);
} else {
result = collapse_file(mm, addr, file, start, cc);
}
}
trace_mm_khugepaged_scan_file(mm, folio, file, present, swap, result);
return result;
}
static unsigned int khugepaged_scan_mm_slot(unsigned int pages, enum scan_result *result,
struct collapse_control *cc)
__releases(&khugepaged_mm_lock)
__acquires(&khugepaged_mm_lock)
{
struct vma_iterator vmi;
struct mm_slot *slot;
struct mm_struct *mm;
struct vm_area_struct *vma;
int progress = 0;
VM_BUG_ON(!pages);
lockdep_assert_held(&khugepaged_mm_lock);
*result = SCAN_FAIL;
if (khugepaged_scan.mm_slot) {
slot = khugepaged_scan.mm_slot;
} else {
slot = list_first_entry(&khugepaged_scan.mm_head,
struct mm_slot, mm_node);
khugepaged_scan.address = 0;
khugepaged_scan.mm_slot = slot;
}
spin_unlock(&khugepaged_mm_lock);
mm = slot->mm;
/*
* Don't wait for semaphore (to avoid long wait times). Just move to
* the next mm on the list.
*/
vma = NULL;
if (unlikely(!mmap_read_trylock(mm)))
goto breakouterloop_mmap_lock;
progress++;
if (unlikely(hpage_collapse_test_exit_or_disable(mm)))
goto breakouterloop;
vma_iter_init(&vmi, mm, khugepaged_scan.address);
for_each_vma(vmi, vma) {
unsigned long hstart, hend;
cond_resched();
if (unlikely(hpage_collapse_test_exit_or_disable(mm))) {
progress++;
break;
}
if (!thp_vma_allowable_order(vma, vma->vm_flags, TVA_KHUGEPAGED, PMD_ORDER)) {
progress++;
continue;
}
hstart = round_up(vma->vm_start, HPAGE_PMD_SIZE);
hend = round_down(vma->vm_end, HPAGE_PMD_SIZE);
if (khugepaged_scan.address > hend) {
progress++;
continue;
}
if (khugepaged_scan.address < hstart)
khugepaged_scan.address = hstart;
VM_BUG_ON(khugepaged_scan.address & ~HPAGE_PMD_MASK);
while (khugepaged_scan.address < hend) {
bool mmap_locked = true;
cond_resched();
if (unlikely(hpage_collapse_test_exit_or_disable(mm)))
goto breakouterloop;
VM_BUG_ON(khugepaged_scan.address < hstart ||
khugepaged_scan.address + HPAGE_PMD_SIZE >
hend);
if (!vma_is_anonymous(vma)) {
struct file *file = get_file(vma->vm_file);
pgoff_t pgoff = linear_page_index(vma,
khugepaged_scan.address);
mmap_read_unlock(mm);
mmap_locked = false;
*result = hpage_collapse_scan_file(mm,
khugepaged_scan.address, file, pgoff, cc);
fput(file);
if (*result == SCAN_PTE_MAPPED_HUGEPAGE) {
mmap_read_lock(mm);
if (hpage_collapse_test_exit_or_disable(mm))
goto breakouterloop;
*result = try_collapse_pte_mapped_thp(mm,
khugepaged_scan.address, false);
if (*result == SCAN_PMD_MAPPED)
*result = SCAN_SUCCEED;
mmap_read_unlock(mm);
}
} else {
*result = hpage_collapse_scan_pmd(mm, vma,
khugepaged_scan.address, &mmap_locked, cc);
}
if (*result == SCAN_SUCCEED)
++khugepaged_pages_collapsed;
/* move to next address */
khugepaged_scan.address += HPAGE_PMD_SIZE;
progress += HPAGE_PMD_NR;
if (!mmap_locked)
/*
* We released mmap_lock so break loop. Note
* that we drop mmap_lock before all hugepage
* allocations, so if allocation fails, we are
* guaranteed to break here and report the
* correct result back to caller.
*/
goto breakouterloop_mmap_lock;
if (progress >= pages)
goto breakouterloop;
}
}
breakouterloop:
mmap_read_unlock(mm); /* exit_mmap will destroy ptes after this */
breakouterloop_mmap_lock:
spin_lock(&khugepaged_mm_lock);
VM_BUG_ON(khugepaged_scan.mm_slot != slot);
/*
* Release the current mm_slot if this mm is about to die, or
* if we scanned all vmas of this mm.
*/
if (hpage_collapse_test_exit(mm) || !vma) {
/*
* Make sure that if mm_users is reaching zero while
* khugepaged runs here, khugepaged_exit will find
* mm_slot not pointing to the exiting mm.
*/
if (!list_is_last(&slot->mm_node, &khugepaged_scan.mm_head)) {
khugepaged_scan.mm_slot = list_next_entry(slot, mm_node);
khugepaged_scan.address = 0;
} else {
khugepaged_scan.mm_slot = NULL;
khugepaged_full_scans++;
}
collect_mm_slot(slot);
}
return progress;
}
static int khugepaged_has_work(void)
{
return !list_empty(&khugepaged_scan.mm_head) && hugepage_pmd_enabled();
}
static int khugepaged_wait_event(void)
{
return !list_empty(&khugepaged_scan.mm_head) ||
kthread_should_stop();
}
static void khugepaged_do_scan(struct collapse_control *cc)
{
unsigned int progress = 0, pass_through_head = 0;
unsigned int pages = READ_ONCE(khugepaged_pages_to_scan);
bool wait = true;
enum scan_result result = SCAN_SUCCEED;
lru_add_drain_all();
while (true) {
cond_resched();
if (unlikely(kthread_should_stop()))
break;
spin_lock(&khugepaged_mm_lock);
if (!khugepaged_scan.mm_slot)
pass_through_head++;
if (khugepaged_has_work() &&
pass_through_head < 2)
progress += khugepaged_scan_mm_slot(pages - progress,
&result, cc);
else
progress = pages;
spin_unlock(&khugepaged_mm_lock);
if (progress >= pages)
break;
if (result == SCAN_ALLOC_HUGE_PAGE_FAIL) {
/*
* If fail to allocate the first time, try to sleep for
* a while. When hit again, cancel the scan.
*/
if (!wait)
break;
wait = false;
khugepaged_alloc_sleep();
}
}
}
static bool khugepaged_should_wakeup(void)
{
return kthread_should_stop() ||
time_after_eq(jiffies, khugepaged_sleep_expire);
}
static void khugepaged_wait_work(void)
{
if (khugepaged_has_work()) {
const unsigned long scan_sleep_jiffies =
msecs_to_jiffies(khugepaged_scan_sleep_millisecs);
if (!scan_sleep_jiffies)
return;
khugepaged_sleep_expire = jiffies + scan_sleep_jiffies;
wait_event_freezable_timeout(khugepaged_wait,
khugepaged_should_wakeup(),
scan_sleep_jiffies);
return;
}
if (hugepage_pmd_enabled())
wait_event_freezable(khugepaged_wait, khugepaged_wait_event());
}
static int khugepaged(void *none)
{
struct mm_slot *slot;
set_freezable();
set_user_nice(current, MAX_NICE);
while (!kthread_should_stop()) {
khugepaged_do_scan(&khugepaged_collapse_control);
khugepaged_wait_work();
}
spin_lock(&khugepaged_mm_lock);
slot = khugepaged_scan.mm_slot;
khugepaged_scan.mm_slot = NULL;
if (slot)
collect_mm_slot(slot);
spin_unlock(&khugepaged_mm_lock);
return 0;
}
static void set_recommended_min_free_kbytes(void)
{
struct zone *zone;
int nr_zones = 0;
unsigned long recommended_min;
if (!hugepage_pmd_enabled()) {
calculate_min_free_kbytes();
goto update_wmarks;
}
for_each_populated_zone(zone) {
/*
* We don't need to worry about fragmentation of
* ZONE_MOVABLE since it only has movable pages.
*/
if (zone_idx(zone) > gfp_zone(GFP_USER))
continue;
nr_zones++;
}
/* Ensure 2 pageblocks are free to assist fragmentation avoidance */
recommended_min = pageblock_nr_pages * nr_zones * 2;
/*
* Make sure that on average at least two pageblocks are almost free
* of another type, one for a migratetype to fall back to and a
* second to avoid subsequent fallbacks of other types There are 3
* MIGRATE_TYPES we care about.
*/
recommended_min += pageblock_nr_pages * nr_zones *
MIGRATE_PCPTYPES * MIGRATE_PCPTYPES;
/* don't ever allow to reserve more than 5% of the lowmem */
recommended_min = min(recommended_min,
(unsigned long) nr_free_buffer_pages() / 20);
recommended_min <<= (PAGE_SHIFT-10);
if (recommended_min > min_free_kbytes) {
if (user_min_free_kbytes >= 0)
pr_info("raising min_free_kbytes from %d to %lu to help transparent hugepage allocations\n",
min_free_kbytes, recommended_min);
min_free_kbytes = recommended_min;
}
update_wmarks:
setup_per_zone_wmarks();
}
int start_stop_khugepaged(void)
{
int err = 0;
mutex_lock(&khugepaged_mutex);
if (hugepage_pmd_enabled()) {
if (!khugepaged_thread)
khugepaged_thread = kthread_run(khugepaged, NULL,
"khugepaged");
if (IS_ERR(khugepaged_thread)) {
pr_err("khugepaged: kthread_run(khugepaged) failed\n");
err = PTR_ERR(khugepaged_thread);
khugepaged_thread = NULL;
goto fail;
}
if (!list_empty(&khugepaged_scan.mm_head))
wake_up_interruptible(&khugepaged_wait);
} else if (khugepaged_thread) {
kthread_stop(khugepaged_thread);
khugepaged_thread = NULL;
}
set_recommended_min_free_kbytes();
fail:
mutex_unlock(&khugepaged_mutex);
return err;
}
void khugepaged_min_free_kbytes_update(void)
{
mutex_lock(&khugepaged_mutex);
if (hugepage_pmd_enabled() && khugepaged_thread)
set_recommended_min_free_kbytes();
mutex_unlock(&khugepaged_mutex);
}
bool current_is_khugepaged(void)
{
return kthread_func(current) == khugepaged;
}
static int madvise_collapse_errno(enum scan_result r)
{
/*
* MADV_COLLAPSE breaks from existing madvise(2) conventions to provide
* actionable feedback to caller, so they may take an appropriate
* fallback measure depending on the nature of the failure.
*/
switch (r) {
case SCAN_ALLOC_HUGE_PAGE_FAIL:
return -ENOMEM;
case SCAN_CGROUP_CHARGE_FAIL:
case SCAN_EXCEED_NONE_PTE:
return -EBUSY;
/* Resource temporary unavailable - trying again might succeed */
case SCAN_PAGE_COUNT:
case SCAN_PAGE_LOCK:
case SCAN_PAGE_LRU:
case SCAN_DEL_PAGE_LRU:
case SCAN_PAGE_FILLED:
case SCAN_PAGE_DIRTY_OR_WRITEBACK:
return -EAGAIN;
/*
* Other: Trying again likely not to succeed / error intrinsic to
* specified memory range. khugepaged likely won't be able to collapse
* either.
*/
default:
return -EINVAL;
}
}
int madvise_collapse(struct vm_area_struct *vma, unsigned long start,
unsigned long end, bool *lock_dropped)
{
struct collapse_control *cc;
struct mm_struct *mm = vma->vm_mm;
unsigned long hstart, hend, addr;
enum scan_result last_fail = SCAN_FAIL;
int thps = 0;
bool mmap_locked = true;
BUG_ON(vma->vm_start > start);
BUG_ON(vma->vm_end < end);
if (!thp_vma_allowable_order(vma, vma->vm_flags, TVA_FORCED_COLLAPSE, PMD_ORDER))
return -EINVAL;
cc = kmalloc(sizeof(*cc), GFP_KERNEL);
if (!cc)
return -ENOMEM;
cc->is_khugepaged = false;
mmgrab(mm);
lru_add_drain_all();
hstart = (start + ~HPAGE_PMD_MASK) & HPAGE_PMD_MASK;
hend = end & HPAGE_PMD_MASK;
for (addr = hstart; addr < hend; addr += HPAGE_PMD_SIZE) {
enum scan_result result = SCAN_FAIL;
bool triggered_wb = false;
retry:
if (!mmap_locked) {
cond_resched();
mmap_read_lock(mm);
mmap_locked = true;
result = hugepage_vma_revalidate(mm, addr, false, &vma,
cc);
if (result != SCAN_SUCCEED) {
last_fail = result;
goto out_nolock;
}
hend = min(hend, vma->vm_end & HPAGE_PMD_MASK);
}
mmap_assert_locked(mm);
if (!vma_is_anonymous(vma)) {
struct file *file = get_file(vma->vm_file);
pgoff_t pgoff = linear_page_index(vma, addr);
mmap_read_unlock(mm);
mmap_locked = false;
*lock_dropped = true;
result = hpage_collapse_scan_file(mm, addr, file, pgoff,
cc);
if (result == SCAN_PAGE_DIRTY_OR_WRITEBACK && !triggered_wb &&
mapping_can_writeback(file->f_mapping)) {
loff_t lstart = (loff_t)pgoff << PAGE_SHIFT;
loff_t lend = lstart + HPAGE_PMD_SIZE - 1;
filemap_write_and_wait_range(file->f_mapping, lstart, lend);
triggered_wb = true;
fput(file);
goto retry;
}
fput(file);
} else {
result = hpage_collapse_scan_pmd(mm, vma, addr,
&mmap_locked, cc);
}
if (!mmap_locked)
*lock_dropped = true;
handle_result:
switch (result) {
case SCAN_SUCCEED:
case SCAN_PMD_MAPPED:
++thps;
break;
case SCAN_PTE_MAPPED_HUGEPAGE:
BUG_ON(mmap_locked);
mmap_read_lock(mm);
result = try_collapse_pte_mapped_thp(mm, addr, true);
mmap_read_unlock(mm);
goto handle_result;
/* Whitelisted set of results where continuing OK */
case SCAN_NO_PTE_TABLE:
case SCAN_PTE_NON_PRESENT:
case SCAN_PTE_UFFD_WP:
case SCAN_LACK_REFERENCED_PAGE:
case SCAN_PAGE_NULL:
case SCAN_PAGE_COUNT:
case SCAN_PAGE_LOCK:
case SCAN_PAGE_COMPOUND:
case SCAN_PAGE_LRU:
case SCAN_DEL_PAGE_LRU:
last_fail = result;
break;
default:
last_fail = result;
/* Other error, exit */
goto out_maybelock;
}
}
out_maybelock:
/* Caller expects us to hold mmap_lock on return */
if (!mmap_locked)
mmap_read_lock(mm);
out_nolock:
mmap_assert_locked(mm);
mmdrop(mm);
kfree(cc);
return thps == ((hend - hstart) >> HPAGE_PMD_SHIFT) ? 0
: madvise_collapse_errno(last_fail);
}
|
c
|
github
|
https://github.com/torvalds/linux
|
mm/khugepaged.c
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tenant Controller
"""
import logging
from keystone import utils
from keystone.controllers.base_controller import BaseController
from keystone.logic import service
from keystone.models import Tenant
logger = logging.getLogger(__name__) # pylint: disable=C0103
class TenantController(BaseController):
"""Controller for Tenant related operations"""
def __init__(self, is_service_operation=None):
self.identity_service = service.IdentityService()
self.is_service_operation = is_service_operation
logger.debug("Initializing: 'Service API' mode=%s" %
self.is_service_operation)
@utils.wrap_error
def create_tenant(self, req):
tenant = utils.get_normalized_request_content(Tenant, req)
return utils.send_result(201, req,
self.identity_service.create_tenant(utils.get_auth_token(req),
tenant))
@utils.wrap_error
def get_tenants(self, req):
tenant_name = req.GET["name"] if "name" in req.GET else None
if tenant_name:
tenant = self.identity_service.get_tenant_by_name(
utils.get_auth_token(req),
tenant_name)
return utils.send_result(200, req, tenant)
else:
marker, limit, url = self.get_marker_limit_and_url(req)
tenants = self.identity_service.get_tenants(
utils.get_auth_token(req), marker, limit, url,
self.is_service_operation)
return utils.send_result(200, req, tenants)
@utils.wrap_error
def get_tenant(self, req, tenant_id):
tenant = self.identity_service.get_tenant(utils.get_auth_token(req),
tenant_id)
return utils.send_result(200, req, tenant)
@utils.wrap_error
def update_tenant(self, req, tenant_id):
tenant = utils.get_normalized_request_content(Tenant, req)
rval = self.identity_service.update_tenant(utils.get_auth_token(req),
tenant_id, tenant)
return utils.send_result(200, req, rval)
@utils.wrap_error
def delete_tenant(self, req, tenant_id):
rval = self.identity_service.delete_tenant(utils.get_auth_token(req),
tenant_id)
return utils.send_result(204, req, rval)
|
unknown
|
codeparrot/codeparrot-clean
| ||
$('body').append(
'<div id="qunit"></div>' +
'<div id="qunit-fixture"></div>'
);
|
javascript
|
github
|
https://github.com/lodash/lodash
|
vendor/backbone/test/setup/dom-setup.js
|
from abc import ABCMeta, abstractstaticmethod
# en la función next sucesor establecemos al siguiente sucesor dentro de la cadena
#la sifuiente función el evento de la cadena
class CajeroHandler(metaclass=ABCMeta):
@abstractstaticmethod
def next_succesor(next):
@abstractstaticmethod
def handle(cantidad):
#se definen las clases con las funciones de cada uno de los cajeros
class Cajero50ConcreteHandler(CajeroHandler):
def __init__(self):
self._sucesor=None
def next_succesor(self, sucesor):
self._sucesor = sucesor
def handle(self, cambio):
if cambio >= 50:
numero = cambio // 50
resto = cambio % 50
print (f"Dar{numero} $50")
if resto !=0:
self._next.handle(resto)
else:
self._next.handle(cambio)
class Cajero20ConcreteHandler(CajeroHandler):
def __init__(self):
self._sucesor=None
def next_succesor(self, sucesor):
self._sucesor = sucesor
def handle(self, cambio):
if cambio >= 20:
numero = cambio // 20
resto = cambio % 20
print (f"Dar {numero} $20")
if resto !=0:
self._next.handle(resto)
else:
self._next.handle(cambio)
class Cajero10ConcreteHandler(CajeroHandler):
def __init__(self):
self._sucesor=None
def next_succesor(self, sucesor):
self._sucesor = sucesor
def handle(self, cambio):
if cambio >= 10:
numero = cambio // 10
resto = cambio % 10
print (f" Dar{numero} $10")
if resto !=0:
self._next.handle(resto)
else:
self._next.handle(cambio)
class CajeroATMchain:
#La instancia que indica el cambio del cliente
self.chain1 = Cajero50ConcreteHandler()
self.chain2 = Cajero20concretehandler()
self.chain2 = Cajero10concretehandler()
#se establece la consulta del cambio, ya sea por medio del cliente
#o ya sea el mismo conttrolador
self.chain1.set_sucesor(self.chain2)
self.chain2.set_sucesor(self.chain3)
if __name__ == '__main__':
cajero=CajeroATMChain()
Ingreso=int(input("Cantidad la cual va a ingrasar: "))
if ingreso < 10 or ingreso % 10 !=0 :
print("debes dar una cantidad multiplo de 10 ")
cajero.chain1.handle(ingreso)
|
unknown
|
codeparrot/codeparrot-clean
| ||
import dynamic from 'next/dynamic';
import somethingElse from 'something-else';
const DynamicComponent = dynamic(()=>import('../components/hello'), {
loadableGenerated: {
modules: [
"src/some-file.js -> " + "../components/hello"
]
}
});
somethingElse.dynamic('should not be transformed');
|
javascript
|
github
|
https://github.com/vercel/next.js
|
crates/next-custom-transforms/tests/fixture/next-dynamic/member-with-same-name/output-dev.js
|
'''
Mesh Manipulation Example
=========================
This demonstrates creating a mesh and using it to deform the texture (the
kivy log). You should see the kivy logo with a five sliders to right.
The sliders change the mesh points' x and y offsets, radius, and a
'wobble' deformation's magnitude and speed.
This example is developed in gabriel's blog post at
http://kivy.org/planet/2014/01/kivy-image-manipulations-with-mesh-and-textures/
'''
from kivy.app import App
from kivy.lang import Builder
from kivy.core.image import Image as CoreImage
from kivy.properties import ListProperty, ObjectProperty, NumericProperty
from kivy.clock import Clock
from kivy.core.window import Window
from math import sin, cos, pi
kv = '''
BoxLayout:
Widget:
canvas:
Color:
rgba: 1, 1, 1, 1
Mesh:
vertices: app.mesh_points
indices: range(len(app.mesh_points) // 4)
texture: app.mesh_texture
mode: 'triangle_fan'
BoxLayout:
orientation: 'vertical'
size_hint_x: None
width: 100
Slider:
value: app.offset_x
on_value: app.offset_x = args[1]
min: -1
max: 1
Slider:
value: app.offset_y
on_value: app.offset_y = args[1]
min: -1
max: 1
Slider:
value: app.radius
on_value: app.radius = args[1]
min: 10
max: 1000
Slider:
value: app.sin_wobble
on_value: app.sin_wobble = args[1]
min: -50
max: 50
Slider:
value: app.sin_wobble_speed
on_value: app.sin_wobble_speed = args[1]
min: 0
max: 50
step: 1
'''
class MeshBallApp(App):
mesh_points = ListProperty([])
mesh_texture = ObjectProperty(None)
radius = NumericProperty(500)
offset_x = NumericProperty(.5)
offset_y = NumericProperty(.5)
sin_wobble = NumericProperty(0)
sin_wobble_speed = NumericProperty(0)
def build(self):
self.mesh_texture = CoreImage('data/logo/kivy-icon-512.png').texture
Clock.schedule_interval(self.update_points, 0)
return Builder.load_string(kv)
def update_points(self, *args):
""" replace self.mesh_points based on current slider positions.
Called continuously by a timer because this only sample code.
"""
points = [Window.width / 2, Window.height / 2, .5, .5]
i = 0
while i < 2 * pi:
i += 0.01 * pi
points.extend([
Window.width / 2 + cos(i) * (self.radius + self.sin_wobble
* sin(i * self.sin_wobble_speed)),
Window.height / 2 + sin(i) * (self.radius + self.sin_wobble
* sin(i * self.sin_wobble_speed)),
self.offset_x + sin(i),
self.offset_y + cos(i)])
self.mesh_points = points
if __name__ == '__main__':
MeshBallApp().run()
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""Tests for distutils.command.bdist_wininst."""
import unittest
import os
from distutils.dist import Distribution
from distutils.command.bdist_wininst import bdist_wininst
from distutils.tests import support
class BuildWinInstTestCase(support.TempdirManager,
unittest.TestCase):
def test_get_exe_bytes(self):
# issue5731: command was broken on non-windows platforms
# this test makes sure it works now for every platform
# let's create a command
tmp_dir = self.mkdtemp()
pkg_dir = os.path.join(tmp_dir, 'foo')
os.mkdir(pkg_dir)
dist = Distribution()
cmd = bdist_wininst(dist)
cmd.ensure_finalized()
# let's run the code that finds the right wininst*.exe file
# and make sure it finds it and returns its content
# no matter what platform we have
exe_file = cmd.get_exe_bytes()
self.assert_(len(exe_file) > 10)
def test_suite():
return unittest.makeSuite(BuildWinInstTestCase)
if __name__ == '__main__':
test_support.run_unittest(test_suite())
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: latin1 -*-
################################################################################################
#
#
import tweepy, datetime, sys, time, json, os, os.path, shutil, time, struct, random
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
## Status - Versão 1 - Script que percorre o conjunto de Listas coletadas ques está em formato binário e adiciona os membros e inscritos nas listas de cada ego em um arquivo nomeado pelo ID do ego
## onde cada linha representa uma Lista do Twitter.
##
## CONSIDERA UMA COMUNIDADE GROUND-TRUTH APENAS OS MEMBROS DAS LISTAS - IGNORA OS INSCRITOS.
##
######################################################################################################################################################################
################################################################################################
# Imprime os arquivos binários com os ids das listas
################################################################################################
def read_arq_bin(file):
with open(file, 'r') as f:
f.seek(0,2)
tamanho = f.tell()
f.seek(0)
lists_ids = []
while f.tell() < tamanho:
buffer = f.read(list_struct.size)
user = list_struct.unpack(buffer)
lists_ids.append(user[0])
return lists_ids
######################################################################################################################################################################
#
# Converte formato data para armazenar em formato JSON
#
######################################################################################################################################################################
class DateTimeEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
encoded_object = list(obj.timetuple())[0:6]
else:
encoded_object =json.JSONEncoder.default(self, obj)
return encoded_object
######################################################################################################################################################################
#
# Grava o erro num arquivo específco
#
######################################################################################################################################################################
def save_error(user,reason):
agora = datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d%H%M') # Recupera o instante atual na forma AnoMesDiaHoraMinuto
error={}
with open(error_dir+str(user)+".err", "w") as outfile: # Abre o arquivo para gravação no final do arquivo
error = {'reason':str(reason) ,'date':agora}
outfile.write(json.dumps(error, cls=DateTimeEncoder, separators=(',', ':'))+"\n")
print error
######################################################################################################################################################################
#
# Salva as informações das listas do ego em formato de comunidades - Arquivo texto onde cada linha representa uma Lista (comunidade) do Twitter
#
######################################################################################################################################################################
def save_ground_truth(ego,i,lists_ids):
if not os.path.isfile(output_ground_truth_txt+str(ego)+".txt"):
print ("Ego nº "+str(i)+" - Localizando membros...")
n=0
for list_id in lists_ids:
n+=1
print ("Buscando por membros lista "+str(n)+"/"+str(len(lists_ids)))
list_ground_truth = set()
if os.path.isfile(members_lists_collected+str(list_id)+".dat"):
list_members_users = read_arq_bin(members_lists_collected+str(list_id)+".dat")
if list_members_users is not None:
list_ground_truth.update(list_members_users)
##################################################### Salvando Arquivo TXT #####################################################
try:
with open(output_ground_truth_txt+str(ego)+".txt", 'a+') as f:
if list_ground_truth is not None: # Verifica se a Lista não está vazia
for item in list_ground_truth: #
f.write(str(item)+" ") # Escreve os ids das Listas separadas por espaço
f.write("\n") # Passa para a próxima linha
except Exception as e:
save_error(ego,str(e))
if os.path.exists(output_ground_truth_txt+str(ego)+".txt"):
os.remove(output_ground_truth_txt+str(ego)+".txt")
print ("Erro - Ego nº "+str(i)+" - Arquivo TXT removido com sucesso...")
##################################################### Salvando Arquivo JSON #####################################################
try:
with open(output_ground_truth_json+str(ego)+".json", 'a+') as f:
if list_ground_truth is not None: # Verifica se a Lista não está vazia
f.write(json.dumps(list(list_ground_truth)))
except Exception as e2:
save_error(ego,str(e2))
if os.path.exists(output_ground_truth_json+str(ego)+".json"):
os.remove(output_ground_truth_json+str(ego)+".json")
print ("Erro - Ego nº "+str(i)+" - Arquivo JSON removido com sucesso...")
else:
print ("Ego nº "+str(i)+" - Membros e Inscritos já coletados - Ignorando...")
print("\n######################################################################")
######################################################################################################################################################################
######################################################################################################################################################################
#
# Método principal do programa.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
i = 0 # Ego verificado
for file in os.listdir(fonte):
i+=1
ego = file.split(".dat")
ego = long(ego[0])
lists_ids = read_arq_bin(fonte+file)
save_ground_truth(ego,i,lists_ids)
print("######################################################################")
print("Script finalizado!")
print("######################################################################\n")
#####################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
################################### CONFIGURAR AS LINHAS A SEGUIR ####################################################
######################################################################################################################
fonte = "/home/amaury/dataset/ground_truth/bin/" #Lista de egos... qualquer rede no dataset serviria.
output_ground_truth_txt = "/home/amaury/dataset/ground_truth_only_members/lists_users_TXT_of_bin/full/" # Diretório que armazenará as comunidades em formato TXT
output_ground_truth_json = "/home/amaury/dataset/ground_truth_only_members/lists_users_JSON_of_bin/full/" # Diretório que armazenará as comunidades em formato JSON tudo misturado (as que o ego é o dono e as que ele está inscrito)
error_dir = "/home/amaury/dataset/ground_truth_only_members/lists_users_JSON_of_bin_ERROR/" # Diretório que armazenará erros na formatação das listas.
######################################################################################################################
lists_set = "/home/amaury/coleta/users_lists/ego_lists_overview_full.json" # Arquivo que contém o conjunto de listas de cada ego.
members_lists_collected = "/home/amaury/coleta/ground_truth/members_lists_collected/bin/" # Diretório que contém o conjunto de listas COLETADAS de cada ego.
######################################################################################################################
formato = 'l' # Long para o código ('l') e depois o array de chars de X posições:
list_struct = struct.Struct(formato) # Inicializa o objeto do tipo struct para poder armazenar o formato específico no arquivo binário
######################################################################################################################
#Cria os diretórios para armazenamento dos arquivos
if not os.path.exists(output_ground_truth_txt):
os.makedirs(output_ground_truth_txt)
if not os.path.exists(output_ground_truth_json):
os.makedirs(output_ground_truth_json)
if not os.path.exists(error_dir):
os.makedirs(error_dir)
#Executa o método main
if __name__ == "__main__": main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
from amsn2.views import StringView, MenuItemView
from PyQt4 import Qt
from PyQt4 import QtCore
from PyQt4 import QtGui
def create_menu_items_from_view(menu, items):
# TODO: images & radio groups, for now only basic representation
for item in items:
if item.type is MenuItemView.COMMAND:
it = QtGui.QAction(item.label, menu)
QtCore.QObject.connect(it, QtCore.SIGNAL("triggered()"), item.command)
menu.addAction(it)
elif item.type is MenuItemView.CASCADE_MENU:
men = QtGui.QMenu(item.label, menu)
create_menu_items_from_view(men, item.items)
menu.addMenu(men)
elif item.type is MenuItemView.SEPARATOR:
menu.addSeparator()
elif item.type is MenuItemView.CHECKBUTTON:
it = QtGui.QAction(item.label, menu)
it.setCheckable(True)
if item.checkbox: #TODO : isn't it checkbox_value instead of checkbox ? By the way the MenuItemView constructor doesn't store the checkbox_value passed to it
it.setChecked(True)
QtCore.QObject.connect(it, QtCore.SIGNAL("triggered()"), item.command)
menu.addAction(it)
elif item.type is MenuItemView.RADIOBUTTON:
it = QtGui.QAction(item.label, menu)
it.setCheckable(True)
if item.checkbox:
it.setChecked(True)
QtCore.QObject.connect(it, QtCore.SIGNAL("triggered()"), item.command)
elif item.type is MenuItemView.RADIOBUTTONGROUP:
group = QtGui.QActionGroup(menu)
create_menu_items_from_view(group, item.items)
menu.addActions(group)
|
unknown
|
codeparrot/codeparrot-clean
| ||
//go:build ignore
// +build ignore
//go:generate go run gen.go
package main
import (
"context"
"fmt"
"io/fs"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"cuelang.org/go/cue"
"cuelang.org/go/cue/cuecontext"
"cuelang.org/go/cue/load"
"github.com/grafana/codejen"
"github.com/grafana/cuetsy"
"github.com/grafana/grafana/pkg/codegen"
)
// CoreDefParentPath is the path, relative to the repository root, where
// each child directory is expected to contain .cue files defining one
// Core kind.
var CoreDefParentPath = "kinds"
// TSCoreKindParentPath is the path, relative to the repository root, to the directory that
// contains one directory per kind, full of generated TS kind output: types and default consts.
var TSCoreKindParentPath = filepath.Join("packages", "grafana-schema", "src", "raw")
func main() {
if len(os.Args) > 1 {
fmt.Fprintf(os.Stderr, "code generator does not currently accept any arguments\n, got %q", os.Args)
os.Exit(1)
}
// Core kinds composite code generator. Produces all generated code in
// grafana/grafana that derives from core kinds.
coreKindsGen := codejen.JennyListWithNamer(func(def codegen.SchemaForGen) string {
return def.Name
})
// All the jennies that comprise the core kinds generator pipeline
coreKindsGen.Append(
&codegen.GoSpecJenny{},
&codegen.K8ResourcesJenny{},
codegen.LatestMajorsOrXJenny(TSCoreKindParentPath),
codegen.TSVeneerIndexJenny(filepath.Join("packages", "grafana-schema", "src")),
)
header := codegen.SlashHeaderMapper("kinds/gen.go")
coreKindsGen.AddPostprocessors(header, codegen.GoFormat())
ctx := cuecontext.New()
cwd, err := os.Getwd()
if err != nil {
fmt.Fprintf(os.Stderr, "could not get working directory: %s", err)
os.Exit(1)
}
groot := filepath.Dir(cwd)
f := os.DirFS(filepath.Join(groot, CoreDefParentPath))
kinddirs := elsedie(fs.ReadDir(f, "."))("error reading core kind fs root directory")
all, err := loadCueFiles(ctx, kinddirs)
if err != nil {
die(err)
}
sort.Slice(all, func(i, j int) bool {
return all[i].Name < all[j].Name
})
jfs, err := coreKindsGen.GenerateFS(all...)
if err != nil {
die(fmt.Errorf("core kinddirs codegen failed: %w", err))
}
commfsys := elsedie(genCommon(ctx, groot))("common schemas failed")
commfsys = elsedie(commfsys.Map(header))("failed gen header on common fsys")
if err = jfs.Merge(commfsys); err != nil {
die(err)
}
if _, set := os.LookupEnv("CODEGEN_VERIFY"); set {
if err = jfs.Verify(context.Background(), groot); err != nil {
die(fmt.Errorf("generated code is out of sync with inputs:\n%s\nrun `make gen-cue` to regenerate", err))
}
} else if err = jfs.Write(context.Background(), groot); err != nil {
die(fmt.Errorf("error while writing generated code to disk:\n%s", err))
}
}
type dummyCommonJenny struct{}
func genCommon(ctx *cue.Context, groot string) (*codejen.FS, error) {
fsys := codejen.NewFS()
path := filepath.Join("packages", "grafana-schema", "src", "common")
fsys = elsedie(fsys.Map(packageMapper))("failed remapping fs")
commonFiles := make([]string, 0)
filepath.WalkDir(filepath.Join(groot, path), func(path string, d fs.DirEntry, err error) error {
if d.IsDir() || filepath.Ext(d.Name()) != ".cue" {
return nil
}
commonFiles = append(commonFiles, path)
return nil
})
instance := load.Instances(commonFiles, &load.Config{})[0]
if instance.Err != nil {
return nil, instance.Err
}
v := ctx.BuildInstance(instance)
b := elsedie(cuetsy.Generate(v, cuetsy.Config{
Export: true,
}))("failed to generate common schema TS")
_ = fsys.Add(*codejen.NewFile(filepath.Join(path, "common.gen.ts"), b, dummyCommonJenny{}))
return fsys, nil
}
func (j dummyCommonJenny) JennyName() string {
return "CommonSchemaJenny"
}
func (j dummyCommonJenny) Generate(dummy any) ([]codejen.File, error) {
return nil, nil
}
var pkgReplace = regexp.MustCompile("^package kindsys")
func packageMapper(f codejen.File) (codejen.File, error) {
f.Data = pkgReplace.ReplaceAllLiteral(f.Data, []byte("package common"))
return f, nil
}
func elsedie[T any](t T, err error) func(msg string) T {
if err != nil {
return func(msg string) T {
fmt.Fprintf(os.Stderr, "%s: %s\n", msg, err)
os.Exit(1)
return t
}
}
return func(msg string) T {
return t
}
}
func die(err error) {
fmt.Fprint(os.Stderr, err, "\n")
os.Exit(1)
}
func loadCueFiles(ctx *cue.Context, dirs []os.DirEntry) ([]codegen.SchemaForGen, error) {
values := make([]codegen.SchemaForGen, 0)
for _, dir := range dirs {
if !dir.IsDir() {
continue
}
entries, err := os.ReadDir(dir.Name())
if err != nil {
fmt.Fprintf(os.Stderr, "error opening %s directory: %s", dir, err)
os.Exit(1)
}
if len(entries) == 0 {
continue
}
// It's assuming that we only have one file in each folder
entry := filepath.Join(dir.Name(), entries[0].Name())
cueFile, err := os.ReadFile(entry)
if err != nil {
fmt.Fprintf(os.Stderr, "unable to open %s/%s file: %s", dir, entries[0].Name(), err)
os.Exit(1)
}
v := ctx.CompileBytes(cueFile)
name, err := getSchemaName(v)
if err != nil {
return nil, err
}
sch := codegen.SchemaForGen{
Name: name,
FilePath: "./" + filepath.Join(CoreDefParentPath, entry),
CueFile: v,
IsGroup: false,
OutputName: strings.ToLower(name),
}
values = append(values, sch)
}
return values, nil
}
func getSchemaName(v cue.Value) (string, error) {
namePath := v.LookupPath(cue.ParsePath("name"))
name, err := namePath.String()
if err != nil {
return "", fmt.Errorf("file doesn't have name field set: %s", err)
}
name = strings.Replace(name, "-", "_", -1)
return name, nil
}
|
go
|
github
|
https://github.com/grafana/grafana
|
kinds/gen.go
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008-2009 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Segment Gettext PO, XLIFF and TMX localization files at the sentence level.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/posegment.html
for examples and usage instructions.
"""
from translate.lang import factory as lang_factory
from translate.storage import factory
class segment:
def __init__(self, sourcelang, targetlang, stripspaces=True, onlyaligned=False):
self.sourcelang = sourcelang
self.targetlang = targetlang
self.stripspaces = stripspaces
self.onlyaligned = onlyaligned
def segmentunit(self, unit):
if unit.isheader() or unit.hasplural():
return [unit]
sourcesegments = self.sourcelang.sentences(unit.source, strip=self.stripspaces)
targetsegments = self.targetlang.sentences(unit.target, strip=self.stripspaces)
if unit.istranslated() and (len(sourcesegments) != len(targetsegments)):
if not self.onlyaligned:
return [unit]
else:
return None
# We could do more here to check if the lengths correspond more or less,
# certain quality checks are passed, etc. But for now this is a good
# start.
units = []
for i in range(len(sourcesegments)):
newunit = unit.copy()
newunit.source = sourcesegments[i]
if not unit.istranslated():
newunit.target = ""
else:
newunit.target = targetsegments[i]
units.append(newunit)
return units
def convertstore(self, fromstore):
tostore = type(fromstore)()
for unit in fromstore.units:
newunits = self.segmentunit(unit)
if newunits:
for newunit in newunits:
tostore.addunit(newunit)
return tostore
def segmentfile(inputfile, outputfile, templatefile, sourcelanguage="en", targetlanguage=None, stripspaces=True, onlyaligned=False):
"""reads in inputfile, segments it then, writes to outputfile"""
# note that templatefile is not used, but it is required by the converter...
inputstore = factory.getobject(inputfile)
if inputstore.isempty():
return 0
sourcelang = lang_factory.getlanguage(sourcelanguage)
targetlang = lang_factory.getlanguage(targetlanguage)
convertor = segment(sourcelang, targetlang, stripspaces=stripspaces, onlyaligned=onlyaligned)
outputstore = convertor.convertstore(inputstore)
outputfile.write(str(outputstore))
return 1
def main():
from translate.convert import convert
formats = {"po": ("po", segmentfile), "xlf": ("xlf", segmentfile), "tmx": ("tmx", segmentfile)}
parser = convert.ConvertOptionParser(formats, usepots=True, description=__doc__)
parser.add_option("-l", "--language", dest="targetlanguage", default=None,
help="the target language code", metavar="LANG")
parser.add_option("", "--source-language", dest="sourcelanguage", default=None,
help="the source language code (default 'en')", metavar="LANG")
parser.passthrough.append("sourcelanguage")
parser.passthrough.append("targetlanguage")
parser.add_option("", "--keepspaces", dest="stripspaces", action="store_false",
default=True, help="Disable automatic stripping of whitespace")
parser.passthrough.append("stripspaces")
parser.add_option("", "--only-aligned", dest="onlyaligned", action="store_true",
default=False, help="Removes units where sentence number does not correspond")
parser.passthrough.append("onlyaligned")
parser.run()
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
/* GLOBAL STYLES
-------------------------------------------------- */
/* Padding below the footer and lighter body text */
body {
padding-top: 3rem;
padding-bottom: 3rem;
color: rgb(var(--bs-tertiary-color-rgb));
}
/* CUSTOMIZE THE CAROUSEL
-------------------------------------------------- */
/* Carousel base class */
.carousel {
margin-bottom: 4rem;
}
/* Since positioning the image, we need to help out the caption */
.carousel-caption {
bottom: 3rem;
z-index: 10;
}
/* Declare heights because of positioning of img element */
.carousel-item {
height: 32rem;
}
/* MARKETING CONTENT
-------------------------------------------------- */
/* Center align the text within the three columns below the carousel */
.marketing .col-lg-4 {
margin-bottom: 1.5rem;
text-align: center;
}
.marketing .col-lg-4 p {
margin-right: .75rem;
margin-left: .75rem;
}
/* Featurettes
------------------------- */
.featurette-divider {
margin: 5rem 0; /* Space out the Bootstrap <hr> more */
}
/* Thin out the marketing headings */
/* RESPONSIVE CSS
-------------------------------------------------- */
@media (min-width: 40em) {
/* Bump up size of carousel content */
.carousel-caption p {
margin-bottom: 1.25rem;
font-size: 1.25rem;
line-height: 1.4;
}
.featurette-heading {
font-size: 50px;
}
}
@media (min-width: 62em) {
.featurette-heading {
margin-top: 7rem;
}
}
|
css
|
github
|
https://github.com/twbs/bootstrap
|
site/src/assets/examples/carousel/carousel.rtl.css
|
"""
Copyright (c) 2012-2015 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import sys
import time
from cli.api_wrapper import APIWrapper
from fs.btrfs import device_scan
from system.osi import run_command
import requests
from django.conf import settings
from storageadmin.models import Setup
BASE_DIR = settings.ROOT_DIR
BASE_BIN = '%sbin' % BASE_DIR
QGROUP_CLEAN = '%s/qgroup-clean' % BASE_BIN
QGROUP_MAXOUT_LIMIT = '%s/qgroup-maxout-limit' % BASE_BIN
def main():
try:
device_scan()
except Exception, e:
print ('BTRFS device scan failed due to an exception. This indicates '
'a serious problem. Aborting. Exception: %s' % e.__str__())
sys.exit(1)
print('BTRFS device scan complete')
#if the appliance is not setup, there's nothing more to do beyond
#device scan
setup = Setup.objects.first()
if (setup is None or setup.setup_user is False):
print('Appliance is not yet setup.')
return
num_attempts = 0
while True:
try:
aw = APIWrapper()
aw.api_call('network')
aw.api_call('commands/bootstrap', calltype='post')
break
except Exception, e:
#Retry on every exception, primarily because of django-oauth related
#code behaving unpredictably while setting tokens. Retrying is a
#decent workaround for now(11302015).
if (num_attempts > 15):
print('Max attempts(15) reached. Connection errors persist. '
'Failed to bootstrap. Error: %s' % e.__str__())
sys.exit(1)
print('Exception occured while bootstrapping. This could be because '
'rockstor.service is still starting up. will wait 2 seconds '
'and try again. Exception: %s' % e.__str__())
time.sleep(2)
num_attempts += 1
print('Bootstrapping complete')
try:
print('Running qgroup cleanup. %s' % QGROUP_CLEAN)
run_command([QGROUP_CLEAN])
except Exception, e:
print('Exception while running %s: %s' % (QGROUP_CLEAN, e.__str__()))
try:
print('Running qgroup limit maxout. %s' % QGROUP_MAXOUT_LIMIT)
run_command([QGROUP_MAXOUT_LIMIT])
except Exception, e:
print('Exception while running %s: %s' % (QGROUP_MAXOUT_LIMIT, e.__str__()))
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regular expression based lexer."""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
from closure_linter.common import tokens
# Shorthand
Type = tokens.TokenType
class Tokenizer(object):
"""General purpose tokenizer.
Attributes:
mode: The latest mode of the tokenizer. This allows patterns to distinguish
if they are mid-comment, mid-parameter list, etc.
matchers: Dictionary of modes to sequences of matchers that define the
patterns to check at any given time.
default_types: Dictionary of modes to types, defining what type to give
non-matched text when in the given mode. Defaults to Type.NORMAL.
"""
def __init__(self, starting_mode, matchers, default_types):
"""Initialize the tokenizer.
Args:
starting_mode: Mode to start in.
matchers: Dictionary of modes to sequences of matchers that defines the
patterns to check at any given time.
default_types: Dictionary of modes to types, defining what type to give
non-matched text when in the given mode. Defaults to Type.NORMAL.
"""
self.__starting_mode = starting_mode
self.matchers = matchers
self.default_types = default_types
def TokenizeFile(self, file):
"""Tokenizes the given file.
Args:
file: An iterable that yields one line of the file at a time.
Returns:
The first token in the file
"""
# The current mode.
self.mode = self.__starting_mode
# The first token in the stream.
self.__first_token = None
# The last token added to the token stream.
self.__last_token = None
# The current line number.
self.__line_number = 0
for line in file:
self.__line_number += 1
self.__TokenizeLine(line)
return self.__first_token
def _CreateToken(self, string, token_type, line, line_number, values=None):
"""Creates a new Token object (or subclass).
Args:
string: The string of input the token represents.
token_type: The type of token.
line: The text of the line this token is in.
line_number: The line number of the token.
values: A dict of named values within the token. For instance, a
function declaration may have a value called 'name' which captures the
name of the function.
Returns:
The newly created Token object.
"""
return tokens.Token(string, token_type, line, line_number, values)
def __TokenizeLine(self, line):
"""Tokenizes the given line.
Args:
line: The contents of the line.
"""
string = line.rstrip('\n\r\f')
line_number = self.__line_number
self.__start_index = 0
if not string:
self.__AddToken(self._CreateToken('', Type.BLANK_LINE, line, line_number))
return
normal_token = ''
index = 0
while index < len(string):
for matcher in self.matchers[self.mode]:
if matcher.line_start and index > 0:
continue
match = matcher.regex.match(string, index)
if match:
if normal_token:
self.__AddToken(
self.__CreateNormalToken(self.mode, normal_token, line,
line_number))
normal_token = ''
# Add the match.
self.__AddToken(self._CreateToken(match.group(), matcher.type, line,
line_number, match.groupdict()))
# Change the mode to the correct one for after this match.
self.mode = matcher.result_mode or self.mode
# Shorten the string to be matched.
index = match.end()
break
else:
# If the for loop finishes naturally (i.e. no matches) we just add the
# first character to the string of consecutive non match characters.
# These will constitute a NORMAL token.
if string:
normal_token += string[index:index + 1]
index += 1
if normal_token:
self.__AddToken(
self.__CreateNormalToken(self.mode, normal_token, line, line_number))
def __CreateNormalToken(self, mode, string, line, line_number):
"""Creates a normal token.
Args:
mode: The current mode.
string: The string to tokenize.
line: The line of text.
line_number: The line number within the file.
Returns:
A Token object, of the default type for the current mode.
"""
type = Type.NORMAL
if mode in self.default_types:
type = self.default_types[mode]
return self._CreateToken(string, type, line, line_number)
def __AddToken(self, token):
"""Add the given token to the token stream.
Args:
token: The token to add.
"""
# Store the first token, or point the previous token to this one.
if not self.__first_token:
self.__first_token = token
else:
self.__last_token.next = token
# Establish the doubly linked list
token.previous = self.__last_token
self.__last_token = token
# Compute the character indices
token.start_index = self.__start_index
self.__start_index += token.length
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/bin/sh
# Copyright IBM Corp. 2016, 2025
# SPDX-License-Identifier: BUSL-1.1
set -e
# Prevent core dumps
ulimit -c 0
# Allow setting VAULT_REDIRECT_ADDR and VAULT_CLUSTER_ADDR using an interface
# name instead of an IP address. The interface name is specified using
# VAULT_REDIRECT_INTERFACE and VAULT_CLUSTER_INTERFACE environment variables. If
# VAULT_*_ADDR is also set, the resulting URI will combine the protocol and port
# number with the IP of the named interface.
get_addr () {
local if_name=$1
local uri_template=$2
ip addr show dev $if_name | awk -v uri=$uri_template '/\s*inet\s/ { \
ip=gensub(/(.+)\/.+/, "\\1", "g", $2); \
print gensub(/^(.+:\/\/).+(:.+)$/, "\\1" ip "\\2", "g", uri); \
exit}'
}
if [ -n "$VAULT_REDIRECT_INTERFACE" ]; then
export VAULT_REDIRECT_ADDR=$(get_addr $VAULT_REDIRECT_INTERFACE ${VAULT_REDIRECT_ADDR:-"http://0.0.0.0:8200"})
echo "Using $VAULT_REDIRECT_INTERFACE for VAULT_REDIRECT_ADDR: $VAULT_REDIRECT_ADDR"
fi
if [ -n "$VAULT_CLUSTER_INTERFACE" ]; then
export VAULT_CLUSTER_ADDR=$(get_addr $VAULT_CLUSTER_INTERFACE ${VAULT_CLUSTER_ADDR:-"https://0.0.0.0:8201"})
echo "Using $VAULT_CLUSTER_INTERFACE for VAULT_CLUSTER_ADDR: $VAULT_CLUSTER_ADDR"
fi
# VAULT_CONFIG_DIR isn't exposed as a volume but you can compose additional
# config files in there if you use this image as a base, or use
# VAULT_LOCAL_CONFIG below.
VAULT_CONFIG_DIR=/vault/config
# You can also set the VAULT_LOCAL_CONFIG environment variable to pass some
# Vault configuration JSON without having to bind any volumes.
if [ -n "$VAULT_LOCAL_CONFIG" ]; then
echo "$VAULT_LOCAL_CONFIG" > "$VAULT_CONFIG_DIR/local.json"
fi
# Due to OpenShift environment compatibility, we have to allow group write
# access to the Vault configuration. This requires us to disable the stricter
# file permissions checks introduced in Vault v1.11.0.
export VAULT_DISABLE_FILE_PERMISSIONS_CHECK=true
# If the user is trying to run Vault directly with some arguments, then
# pass them to Vault.
if [ "${1:0:1}" = '-' ]; then
set -- vault "$@"
fi
# Look for Vault subcommands.
if [ "$1" = 'server' ]; then
shift
set -- vault server \
-config="$VAULT_CONFIG_DIR" \
-dev-root-token-id="$VAULT_DEV_ROOT_TOKEN_ID" \
-dev-listen-address="${VAULT_DEV_LISTEN_ADDRESS:-"0.0.0.0:8200"}" \
"$@"
elif [ "$1" = 'version' ]; then
# This needs a special case because there's no help output.
set -- vault "$@"
elif vault --help "$1" 2>&1 | grep -q "vault $1"; then
# We can't use the return code to check for the existence of a subcommand, so
# we have to use grep to look for a pattern in the help output.
set -- vault "$@"
fi
# If we are running Vault, make sure it executes as the proper user.
if [ "$1" = 'vault' ]; then
if [ -z "$SKIP_CHOWN" ]; then
# If the config dir is bind mounted then chown it
if [ "$(stat -c %u /vault/config)" != "$(id -u vault)" ]; then
chown -R vault:vault /vault/config || echo "Could not chown /vault/config (may not have appropriate permissions)"
fi
# If the logs dir is bind mounted then chown it
if [ "$(stat -c %u /vault/logs)" != "$(id -u vault)" ]; then
chown -R vault:vault /vault/logs
fi
# If the file dir is bind mounted then chown it
if [ "$(stat -c %u /vault/file)" != "$(id -u vault)" ]; then
chown -R vault:vault /vault/file
fi
fi
if [ -z "$SKIP_SETCAP" ]; then
# Allow mlock to avoid swapping Vault memory to disk
setcap cap_ipc_lock=+ep $(readlink -f /bin/vault)
# In the case vault has been started in a container without IPC_LOCK privileges
if ! vault -version 1>/dev/null 2>/dev/null; then
>&2 echo "Couldn't start vault with IPC_LOCK. Disabling IPC_LOCK, please use --cap-add IPC_LOCK"
setcap cap_ipc_lock=-ep $(readlink -f /bin/vault)
fi
fi
fi
# In case of Docker, where swap may be enabled, we
# still require mlocking to be available. So this script
# was executed as root to make this happen, however,
# we're now rerunning the entrypoint script as the Vault
# user but no longer need to run setup code for setcap
# or chowning directories (previously done on the first run).
if [[ "$(id -u)" == '0' ]]
then
export SKIP_CHOWN="true"
export SKIP_SETCAP="true"
exec su vault -p "$0" -- "$@"
else
exec "$@"
fi
|
unknown
|
github
|
https://github.com/hashicorp/vault
|
.release/docker/ubi-docker-entrypoint.sh
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/shapes_for_arguments | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
import tensorflow.compat.v2 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common
class TestModule(tf.Module):
# Check that we get shapes annotated on function arguments.
#
# Besides checking the shape on the function input argument, this test also
# checks that the shape on the input argument is propagated to the return
# value.
# We eventually want to move the shape inference to a pass separate from
# the initial import, in which case that aspect of this test doesn't make much
# sense and will be superceded by MLIR->MLIR shape inference tests.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}(%arg0: tensor<f32> {{.*}}) -> (tensor<f32> {{.*}})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["some_function"]
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def some_function(self, x):
return x
if __name__ == '__main__':
common.do_test(TestModule)
|
python
|
github
|
https://github.com/tensorflow/tensorflow
|
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_arguments.py
|
#!/usr/bin/env python
#-------------------------------------------------------------------------------
#
# shotwellfs.py
#
#
# See usage for details.
#
# LIMITATIONS: Unicode tags are not supported?
#
#-------------------------------------------------------------------------------
import datetime
import errno
import os
import sqlite3
import stat
import sys
import unicodedata
from types import *
# pull in some spaghetti to make this stuff work without fuse-py being installed
try:
import _find_fuse_parts
except ImportError:
pass
import fuse
from fuse import Fuse
if not hasattr(fuse, '__version__'):
raise RuntimeError, \
"your fuse-py doesn't know of fuse.__version__, probably it's too old."
fuse.fuse_python_api = (0, 2)
# Constants
DEFAULT_SHOTWELL_DB = os.environ['HOME'] + '/.local/share/shotwell/data/photo.db'
TAGS_ROOT = 'tags'
EVENTS_ROOT = 'events'
DEFAULT_MIN_EVENT_RATING = 0
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
class MyStat(fuse.Stat):
FileType = enum('DIR', 'FILE')
def __init__(self, fileType):
self.st_ino = 0
self.st_dev = 0
self.st_uid = 0
self.st_gid = 0
self.st_atime = 0
self.st_mtime = 0
self.st_ctime = 0
if (self.FileType.DIR == fileType):
self.st_mode = stat.S_IFDIR | 0555
self.st_nlink = 2
self.st_size = 4096
else:
assert self.FileType.FILE == fileType
self.st_mode = stat.S_IFREG | 0444
self.st_nlink = 1
self.st_size = 0
class ShotwellDB:
def __init__(self, filename, mineventrating):
self.db = filename
self.mineventrating = mineventrating
self.connection = sqlite3.connect(self.db)
def stripUnicode(self, text):
if isinstance(text, unicode):
unicodedata.normalize('NFKD', text).encode('ascii', 'ignore')
text = str(text)
return text
def getEvents(self):
print "DEBUG: getEvents()"
eventsSet = set()
sql = 'SELECT EventTable.id, EventTable.name, max(PhotoTable.exposure_time) AS timestamp FROM EventTable LEFT JOIN PhotoTable ON EventTable.id = PhotoTable.event_id WHERE timestamp > 0 GROUP BY EventTable.id'
cursor = self.connection.cursor()
print "DEBUG executing query: ", sql
cursor.execute(sql)
results = cursor.fetchall()
cursor.close()
for row in results:
id = row[0]
name = row[1]
timestamp = row[2]
# timestamp not set in photo table
if type(timestamp) is not int:
timestamp = 0
title = self.event2title(timestamp, name, id)
eventsSet.add(title)
eventsList = [self.stripUnicode(item) for item in eventsSet]
print "DEBUG: return ", eventsList
return eventsList
def getTags(self):
print "DEBUG getTagsList"
tagsSet = set() # avoid inserting duplicates
sql = 'SELECT name FROM TagTable'
cursor = self.connection.cursor()
print "DEBUG executing query: ", sql
cursor.execute(sql)
results = cursor.fetchall()
for row in results:
hiertag = row[0] # entry could be '/animals/elephants'
tagsSet |= set(hiertag.split('/')) # this will insert empty strings
cursor.close()
# remove blanks
tagsSet = [tag for tag in tagsSet if tag.strip()]
# Shotwell tags are unicode; translate to str
tagsList = [self.stripUnicode(item) for item in tagsSet]
return tagsList
def listEvent(self, event):
print "DEBUG: listEvent(", event, ")"
fileSet = set()
eventId = self.title2event(event)
sql = 'SELECT id, filename ' +\
'FROM photoTable ' +\
'WHERE (event_id = \'' + str(eventId) + '\') ' +\
'AND rating >= ' + str(self.mineventrating)
cursor = self.connection.cursor()
print "DEBUG executing query: ", sql
cursor.execute(sql)
results = cursor.fetchall()
for row in results:
# fileSet.add(os.path.basename(row[0]))
print "row ", row
#fileSet.add(({ u'id': row[0], u'filename': os.path.basename(row[1]), u'realpath': row[1]}))
fileSet.add(os.path.basename(row[1]))
print "DEBUG: return ", fileSet
# Shotwell IDs are unicode; translate to str
fileSet = [self.stripUnicode(item) for item in fileSet]
return fileSet
def listTag(self, tag):
print "DEBUG: listTag(" + tag + ")"
keys = set()
# TagTable contains tag names as well as "/one" and "/one/two" in the case
# of hierarchical tags
sql = 'SELECT photo_id_list ' +\
'FROM TagTable ' +\
'WHERE (name = \'' + tag + '\') OR' +\
'(name LIKE \'/' + tag + '/%%\') OR' +\
'(name LIKE \'%%/' + tag + '/%%\') OR' +\
'(name LIKE \'%%/' + tag + '\')'
cursor = self.connection.cursor()
print "DEBUG executing query: ", sql
cursor.execute(sql)
result = cursor.fetchone()
keys = set(result[0].split(','))
keys.pop()
cursor.close()
for key in keys:
print "key: ", key
key = self.stripUnicode(key)
photoId = self.getPhotoIdByKey(key)
realPath = self.getPathByPhotoId(photoId)
extension = os.path.splitext(realPath)[1]
fileSet[key] = {
'id' : photoId,
'filename': self.stripUnicode(os.path.basename(realPath)),
'realpath': realPath
}
return fileSet
def getPhotoIdByKey(self, key):
print "DEBUG: getPhotoIdByKey(", key, ")"
id = 0
if key[0:5] == 'thumb':
id = key[5:]
else:
assert key[0:6] == 'video-'
id = key[6:]
photoId = int(id, 16) # convert hex to decimal
print "DEBUG: photoId = ", photoId
return photoId
def getPathByPhotoId(self, photoId):
sql = 'SELECT filename ' + \
'FROM PhotoTable ' + \
'WHERE (id = \'' + str(photoId) + '\')'
cursor = self.connection.cursor()
cursor.execute(sql)
results = cursor.fetchall()
assert 1 == len(results)
path = results[0][0]
cursor.close()
return path
def getRealPath(self, path):
print "DEBUG: getRealPath(" + path + ")"
if path == '/':
return path
components = path.split('/')[1:]
if components[0] != EVENTS_ROOT and components[0] != TAGS_ROOT:
print "DEBUG: path does not start with EVENTS_ROOT or TAGS_ROOT"
return False
if len(components) == 1:
print "DEBUG: len = 1"
return '/'
elif len(components) == 2:
print "DEBUG: len = 2"
return '/'
elif len(components) == 3:
# must be a file
if components[0] == EVENTS_ROOT:
return self.getPathByEventTitleAndFilename(components[1], components[2])
else:
return self.getPathByTagAndFilename(components[1], components[2])
print "DEBUG: HU?"
return False
def getPathByTagAndFilename(self, tag, filename):
print "DEBUG: getPathByTagAndFilename(" + tag + ", " + filename + ")"
photos = self.listTag(tag)
for photo in photos.itervalues():
if photo['filename'] == filename:
return photo['realpath']
return False
def getPathByEventTitleAndFilename(self, eventTitle, filename):
eventId = self.title2event(eventTitle)
sql = 'SELECT filename FROM PhotoTable WHERE event_id = ? AND filename LIKE ?'
self.connection.row_factory = sqlite3.Row
cursor = self.connection.cursor()
print "DEBUG: executing query: ", sql
cursor.execute(sql, (eventId, '%'+filename))
results = cursor.fetchone()
try:
return results['filename']
except:
return False
def title2event(self, title):
return int(title[title.find("(") + 1:title.find(")")])
def event2title(self, timestamp, name, id):
if name is None:
name = ''
return '%s_%s (%d)' % (
datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d'),
name,
id
)
class ShotwellFS(Fuse):
def __init__(self, *args, **kw):
Fuse.__init__(self, *args, **kw)
self.shotwelldb = DEFAULT_SHOTWELL_DB
self.mineventrating = DEFAULT_MIN_EVENT_RATING
def getattr(self, path):
print "DEBUG: getattr(" + path + ")"
db = ShotwellDB(self.shotwelldb, self.mineventrating)
realPath = db.getRealPath(path)
print "DEBUG: realPath = ", realPath
if realPath == False:
print "DEBUG: realPath == False"
return -errno.ENOENT
elif realPath == '/':
print "DEBUG: checking attr on /"
return MyStat(MyStat.FileType.DIR)
else:
print "DEBUG: getattr returns original stat values"
return os.stat(realPath)
def open(self, path, flags):
db = ShotwellDB(self.shotwelldb, self.mineventrating)
result = -errno.ENOENT
accmode = os.O_RDONLY | os.O_WRONLY | os.O_RDWR
if (flags & accmode) != os.O_RDONLY:
result = -errno.EACCES
else:
realPath = db.getRealPath(path)
if (realPath):
result = 0
return result
def read(self, path, size, offset):
db = ShotwellDB(self.shotwelldb, self.mineventrating)
result = -errno.ENOENT
realPath = db.getRealPath(path)
f = open(realPath)
f.seek(offset)
result = f.read(size)
f.close()
return result
def readdir(self, path, offset):
db = ShotwellDB(self.shotwelldb, self.mineventrating)
dirents = [ '.', '..' ]
if path == '/':
# directory entries in /
dirents.append(EVENTS_ROOT)
dirents.append(TAGS_ROOT)
else:
components = path.split('/')[1:] # remove leading '/'
if components[0] == TAGS_ROOT:
# listing tags
if len(components) == 1:
dirents.extend(db.getTags())
elif len(components) == 2:
for item in db.listTag(components[1]).itervalues():
dirents.append(item['filename'])
elif components[0] == EVENTS_ROOT:
# listing events
if len(components) == 1:
dirents.extend(db.getEvents())
elif len(components) == 2:
items = db.listEvent(components[1])
for item in items:
dirents.append(item)
for r in dirents:
yield fuse.Direntry(r)
def main():
exitCode = 0
if (0 == os.getuid()) or (0 == os.geteuid()):
print >> sys.stderr, 'Cannot run this program as root'
exitCode = 1
usage="""
Userspace ShotwellFS example
""" + Fuse.fusage
server = ShotwellFS(version="%prog " + fuse.__version__,
usage=usage,
dash_s_do='setsingle')
# Disable multithreading: if you want to use it, protect all method of
# XmlFile class with locks, in order to prevent race conditions
server.multithreaded = False
server.parser.add_option(
mountopt = "shotwelldb",
metavar = "FILE",
default = DEFAULT_SHOTWELL_DB,
help = "Shotwell SQLite3 database file [default: %default]")
server.parser.add_option(
mountopt = "mineventrating",
metavar = "INT",
default = DEFAULT_MIN_EVENT_RATING,
help = "Minimum photo rating to be listed in an event directory. Possible values: 0,1,2,3,4 or 5 [default: %default]")
server.parse(values = server, errex = 1)
server.main()
return exitCode
if __name__ == '__main__':
sys.exit(main())
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python
import sys
from smartcard.scard import *
def tohex(l, delim='', reverse=False):
if reverse:
l = l[:]
l.reverse()
return delim.join(['%02X' % x for x in l])
def tostr(r):
return ''.join(['.' if x < 0x20 or x > 0x7e else chr(x) for x in r])
def manuf(r, i):
ma = tohex(r[:-2], reverse=True)
if ma == 'FFFFFFAA':
return 'Sample card'
else:
return 'Manufacturer area: %s' % ma
def sn(r, i):
return 'S/N %d: %s' % (i, tohex(r[:-2]))
def mode(r, i):
m = r[3] >> 6
if m == 1:
return 'Issuer mode'
elif m == 2:
return 'User mode'
else:
return 'Invalid mode %s - card blocked' % ('00' if m == 0 else '11')
def aca(r, i):
sec = ['update forbidden', 'read protected']
reg = ['UA2', 'Bal2', 'UA1', 'Bal1']
j = 0
out = []
ac = r[3]
while ac > 0:
if (ac&1) == 1:
out.append('%s %s' % (reg[j>>1], sec[j&1]))
ac >>= 1
j += 1
return ', '.join(out)
def csc(r, i):
return 'Card secret code %d' % ((i-6)*(i-54)/100)
def cscrc(r, i):
return 'CSC %d errors: %d' % ((i-7)*(i-55)/100, sum([(r[3]>>k)&1 for k in range(4, 8)]))
def ctc(r, i):
if len(r) <= 2:
return ''
val = int(tohex(r[:-2], reverse=True), 16) & ~0x10000000
return 'Balance %d transaction count: %d%s' % (i/32+1, val, '' if (i&1) == 0 else ' (backup)')
def awf(r, i):
return '%s %d anti-withdrawal flag' % ('CTC' if (i&1) == 0 else 'Balance', i/32+1)
def balance(r, i):
if len(r) <= 2:
return ''
val = int(tohex(r[:-2], reverse=True), 16) << ((i&2)*16)
return 'Balance %d: %d%s' % (i/32+1, val, '' if (i&1) == 0 else ' (backup)')
def lastuse(r, i):
if len(r) < 6:
return ''
return 'Last used: %s' % tohex(r[1:4], '.')
def birth(r, i):
if len(r) < 6:
return ''
return 'Birth date: %s.%s' % (tohex(r[:2], '.'), tohex(r[2:4]))
def pike(r, i):
return 'Number of points: %d%s' % (int(tohex(r[:2])), '' if (i&1) == 0 else ' (backup)')
interp = {0x00: manuf, 0x01: sn, 0x02: sn, 0x03: sn, 0x04: mode, 0x05: aca,
0x06: csc, 0x07: cscrc, 0x08: ctc, 0x09: ctc, 0x0a: awf, 0x0b: awf,
0x0c: balance, 0x0d: balance, 0x0e: balance, 0x0f: balance, 0x10: lastuse,
0x20: ctc, 0x21: ctc, 0x22: awf, 0x23: awf, 0x24: balance, 0x25: balance,
0x26: balance, 0x27: balance, 0x2f: birth, 0x38: csc, 0x39: cscrc,
0x3a: csc, 0x3b: cscrc, 0x3c: pike, 0x3d: pike}
hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER)
if hresult!=0:
raise error, 'Failed to establish context : ' + SCardGetErrorMessage(hresult)
hresult, readers = SCardListReaders(hcontext, [])
if hresult!=0:
raise error, 'Failed to list readers: ' + SCardGetErrorMessage(hresult)
print 'PCSC Readers:', readers
if len(readers) == 0:
raise error, 'No smart card readers'
if len(sys.argv) > 1:
ri = int(sys.argv[1])
else:
if len(readers) > 1:
print 'Using first reader.'
ri = 0
hresult, hcard, dwActiveProtocol = SCardConnect(hcontext, readers[ri], SCARD_SHARE_SHARED, SCARD_PROTOCOL_T0)
if hresult!=0:
raise error, 'Unable to connect: ' + SCardGetErrorMessage(hresult)
for i in range(0x40):
hresult, response = SCardTransmit(hcard, SCARD_PCI_T0, [0x80, 0xBE, 0x00, i, 0x04])
if hresult!=0:
raise error, 'Failed to transmit: ' + SCardGetErrorMessage(hresult)
print '0x%02X\t%5s\t%11s\t%s' % (i, tohex(response[-2:], ' '), tohex(response[:-2], ' '), interp[i](response, i) if interp.has_key(i) else tostr(response[:-2]))
|
unknown
|
codeparrot/codeparrot-clean
| ||
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
import argparse
import logging
import shlex
import typing
import unittest
from os import linesep
from os import path
from os.path import exists
from shutil import rmtree
from tempfile import mkdtemp
import pytest
import apache_beam as beam
from apache_beam import Impulse
from apache_beam import Map
from apache_beam import Pipeline
from apache_beam.coders import VarIntCoder
from apache_beam.io.external.generate_sequence import GenerateSequence
from apache_beam.io.kafka import ReadFromKafka
from apache_beam.io.kafka import WriteToKafka
from apache_beam.metrics import Metrics
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import FlinkRunnerOptions
from apache_beam.options.pipeline_options import PortableOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.runners.portability import job_server
from apache_beam.runners.portability import portable_runner
from apache_beam.runners.portability import portable_runner_test
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import userstate
from apache_beam.transforms.sql import SqlTransform
# Run as
#
# pytest flink_runner_test.py[::TestClass::test_case] \
# --test-pipeline-options="--environment_type=LOOPBACK"
_LOGGER = logging.getLogger(__name__)
Row = typing.NamedTuple("Row", [("col1", int), ("col2", str)])
beam.coders.registry.register_coder(Row, beam.coders.RowCoder)
class FlinkRunnerTest(portable_runner_test.PortableRunnerTest):
_use_grpc = True
_use_subprocesses = True
conf_dir = None
expansion_port = None
flink_job_server_jar = None
def __init__(self, *args, **kwargs):
super(FlinkRunnerTest, self).__init__(*args, **kwargs)
self.environment_type = None
self.environment_config = None
@pytest.fixture(autouse=True)
def parse_options(self, request):
if not request.config.option.test_pipeline_options:
raise unittest.SkipTest(
'Skipping because --test-pipeline-options is not specified.')
test_pipeline_options = request.config.option.test_pipeline_options
parser = argparse.ArgumentParser(add_help=True)
parser.add_argument(
'--flink_job_server_jar',
help='Job server jar to submit jobs.',
action='store')
parser.add_argument(
'--environment_type',
default='LOOPBACK',
choices=['DOCKER', 'PROCESS', 'LOOPBACK'],
help='Set the environment type for running user code. DOCKER runs '
'user code in a container. PROCESS runs user code in '
'automatically started processes. LOOPBACK runs user code on '
'the same process that originally submitted the job.')
parser.add_argument(
'--environment_option',
'--environment_options',
dest='environment_options',
action='append',
default=None,
help=(
'Environment configuration for running the user code. '
'Recognized options depend on --environment_type.\n '
'For DOCKER: docker_container_image (optional)\n '
'For PROCESS: process_command (required), process_variables '
'(optional, comma-separated)\n '
'For EXTERNAL: external_service_address (required)'))
known_args, unknown_args = parser.parse_known_args(
shlex.split(test_pipeline_options))
if unknown_args:
_LOGGER.warning('Discarding unrecognized arguments %s' % unknown_args)
self.set_flink_job_server_jar(
known_args.flink_job_server_jar or
job_server.JavaJarJobServer.path_to_beam_jar((
':runners:flink:%s:job-server:shadowJar' %
FlinkRunnerOptions.PUBLISHED_FLINK_VERSIONS[-1])))
self.environment_type = known_args.environment_type
self.environment_options = known_args.environment_options
@classmethod
def tearDownClass(cls):
if cls.conf_dir and exists(cls.conf_dir):
_LOGGER.info("removing conf dir: %s" % cls.conf_dir)
rmtree(cls.conf_dir)
super(FlinkRunnerTest, cls).tearDownClass()
@classmethod
def _create_conf_dir(cls):
"""Create (and save a static reference to) a "conf dir", used to provide
metrics configs and verify metrics output
It gets cleaned up when the suite is done executing"""
if hasattr(cls, 'conf_dir'):
cls.conf_dir = mkdtemp(prefix='flinktest-conf')
# path for a FileReporter to write metrics to
cls.test_metrics_path = path.join(cls.conf_dir, 'test-metrics.txt')
# path to write Flink configuration to
conf_path = path.join(cls.conf_dir, 'flink-conf.yaml')
file_reporter = 'org.apache.beam.runners.flink.metrics.FileReporter'
with open(conf_path, 'w') as f:
f.write(
linesep.join([
'metrics.reporters: file',
'metrics.reporter.file.class: %s' % file_reporter,
'metrics.reporter.file.path: %s' % cls.test_metrics_path,
'metrics.scope.operator: <operator_name>',
]))
@classmethod
def _subprocess_command(cls, job_port, expansion_port):
# will be cleaned up at the end of this method, and recreated and used by
# the job server
tmp_dir = mkdtemp(prefix='flinktest')
cls._create_conf_dir()
cls.expansion_port = expansion_port
try:
return [
'java',
'-Dorg.slf4j.simpleLogger.defaultLogLevel=warn',
'-jar',
cls.flink_job_server_jar,
'--flink-master',
'[local]',
'--flink-conf-dir',
cls.conf_dir,
'--artifacts-dir',
tmp_dir,
'--job-port',
str(job_port),
'--artifact-port',
'0',
'--expansion-port',
str(expansion_port),
]
finally:
rmtree(tmp_dir)
@classmethod
def get_runner(cls):
return portable_runner.PortableRunner()
@classmethod
def get_expansion_service(cls):
# TODO Move expansion address resides into PipelineOptions
return 'localhost:%s' % cls.expansion_port
@classmethod
def set_flink_job_server_jar(cls, flink_job_server_jar):
cls.flink_job_server_jar = flink_job_server_jar
def create_options(self):
options = super(FlinkRunnerTest, self).create_options()
options.view_as(DebugOptions).experiments = ['beam_fn_api']
options._all_options['parallelism'] = 2
options.view_as(PortableOptions).environment_type = self.environment_type
options.view_as(
PortableOptions).environment_options = self.environment_options
return options
# Can't read host files from within docker, read a "local" file there.
def test_read(self):
print('name:', __name__)
with self.create_pipeline() as p:
lines = p | beam.io.ReadFromText('/etc/profile')
assert_that(lines, lambda lines: len(lines) > 0)
def test_no_subtransform_composite(self):
raise unittest.SkipTest("BEAM-4781")
def test_external_transform(self):
with self.create_pipeline() as p:
res = (
p
| GenerateSequence(
start=1, stop=10, expansion_service=self.get_expansion_service()))
assert_that(res, equal_to([i for i in range(1, 10)]))
def test_expand_kafka_read(self):
# We expect to fail here because we do not have a Kafka cluster handy.
# Nevertheless, we check that the transform is expanded by the
# ExpansionService and that the pipeline fails during execution.
with self.assertRaises(Exception) as ctx:
with self.create_pipeline() as p:
# pylint: disable=expression-not-assigned
(
p
| ReadFromKafka(
consumer_config={
'bootstrap.servers': 'notvalid1:7777, notvalid2:3531',
'group.id': 'any_group'
},
topics=['topic1', 'topic2'],
key_deserializer='org.apache.kafka.'
'common.serialization.'
'ByteArrayDeserializer',
value_deserializer='org.apache.kafka.'
'common.serialization.'
'LongDeserializer',
commit_offset_in_finalize=True,
timestamp_policy=ReadFromKafka.create_time_policy,
expansion_service=self.get_expansion_service()))
self.assertTrue(
'No resolvable bootstrap urls given in bootstrap.servers' in str(
ctx.exception),
'Expected to fail due to invalid bootstrap.servers, but '
'failed due to:\n%s' % str(ctx.exception))
def test_expand_kafka_write(self):
# We just test the expansion but do not execute.
# pylint: disable=expression-not-assigned
(
self.create_pipeline()
| Impulse()
| Map(lambda input: (1, input))
| WriteToKafka(
producer_config={
'bootstrap.servers': 'localhost:9092, notvalid2:3531'
},
topic='topic1',
key_serializer='org.apache.kafka.'
'common.serialization.'
'LongSerializer',
value_serializer='org.apache.kafka.'
'common.serialization.'
'ByteArraySerializer',
expansion_service=self.get_expansion_service()))
def test_sql(self):
with self.create_pipeline() as p:
output = (
p
| 'Create' >> beam.Create([Row(x, str(x)) for x in range(5)])
| 'Sql' >> SqlTransform(
"""SELECT col1, col2 || '*' || col2 as col2,
power(col1, 2) as col3
FROM PCOLLECTION
""",
expansion_service=self.get_expansion_service()))
assert_that(
output,
equal_to([(x, '{x}*{x}'.format(x=x), x * x) for x in range(5)]))
def test_flattened_side_input(self):
# Blocked on support for transcoding
# https://jira.apache.org/jira/browse/BEAM-6523
super(FlinkRunnerTest,
self).test_flattened_side_input(with_transcoding=False)
def test_metrics(self):
super(FlinkRunnerTest, self).test_metrics(check_gauge=False)
def test_flink_metrics(self):
"""Run a simple DoFn that increments a counter and verifies state
caching metrics. Verifies that its expected value is written to a
temporary file by the FileReporter"""
counter_name = 'elem_counter'
state_spec = userstate.BagStateSpec('state', VarIntCoder())
class DoFn(beam.DoFn):
def __init__(self):
self.counter = Metrics.counter(self.__class__, counter_name)
_LOGGER.info('counter: %s' % self.counter.metric_name)
def process(self, kv, state=beam.DoFn.StateParam(state_spec)):
# Trigger materialization
list(state.read())
state.add(1)
self.counter.inc()
options = self.create_options()
# Test only supports parallelism of 1
options._all_options['parallelism'] = 1
# Create multiple bundles to test cache metrics
options._all_options['max_bundle_size'] = 10
options._all_options['max_bundle_time_millis'] = 95130590130
experiments = options.view_as(DebugOptions).experiments or []
experiments.append('state_cache_size=123')
options.view_as(DebugOptions).experiments = experiments
with Pipeline(self.get_runner(), options) as p:
# pylint: disable=expression-not-assigned
(
p
| "create" >> beam.Create(list(range(0, 110)))
| "mapper" >> beam.Map(lambda x: (x % 10, 'val'))
| "stateful" >> beam.ParDo(DoFn()))
lines_expected = {'counter: 110'}
if options.view_as(StandardOptions).streaming:
lines_expected.update([
# Gauges for the last finished bundle
'stateful.beam_metric:statecache:capacity: 123',
'stateful.beam_metric:statecache:size: 10',
'stateful.beam_metric:statecache:get: 20',
'stateful.beam_metric:statecache:miss: 0',
'stateful.beam_metric:statecache:hit: 20',
'stateful.beam_metric:statecache:put: 0',
'stateful.beam_metric:statecache:evict: 0',
# Counters
'stateful.beam_metric:statecache:get_total: 220',
'stateful.beam_metric:statecache:miss_total: 10',
'stateful.beam_metric:statecache:hit_total: 210',
'stateful.beam_metric:statecache:put_total: 10',
'stateful.beam_metric:statecache:evict_total: 0',
])
else:
# Batch has a different processing model. All values for
# a key are processed at once.
lines_expected.update([
# Gauges
'stateful).beam_metric:statecache:capacity: 123',
# For the first key, the cache token will not be set yet.
# It's lazily initialized after first access in StateRequestHandlers
'stateful).beam_metric:statecache:size: 10',
# We have 11 here because there are 110 / 10 elements per key
'stateful).beam_metric:statecache:get: 12',
'stateful).beam_metric:statecache:miss: 1',
'stateful).beam_metric:statecache:hit: 11',
# State is flushed back once per key
'stateful).beam_metric:statecache:put: 1',
'stateful).beam_metric:statecache:evict: 0',
# Counters
'stateful).beam_metric:statecache:get_total: 120',
'stateful).beam_metric:statecache:miss_total: 10',
'stateful).beam_metric:statecache:hit_total: 110',
'stateful).beam_metric:statecache:put_total: 10',
'stateful).beam_metric:statecache:evict_total: 0',
])
lines_actual = set()
with open(self.test_metrics_path, 'r') as f:
for line in f:
print(line, end='')
for metric_str in lines_expected:
metric_name = metric_str.split()[0]
if metric_str in line:
lines_actual.add(metric_str)
elif metric_name in line:
lines_actual.add(line)
self.assertSetEqual(lines_actual, lines_expected)
def test_sdf_with_watermark_tracking(self):
raise unittest.SkipTest("BEAM-2939")
def test_callbacks_with_exception(self):
raise unittest.SkipTest("BEAM-11021")
def test_register_finalizations(self):
raise unittest.SkipTest("BEAM-11021")
def test_custom_merging_window(self):
raise unittest.SkipTest("BEAM-11004")
# Inherits all other tests.
class FlinkRunnerTestOptimized(FlinkRunnerTest):
# TODO: Remove these tests after resolving BEAM-7248 and enabling
# PortableRunnerOptimized
def create_options(self):
options = super(FlinkRunnerTestOptimized, self).create_options()
options.view_as(DebugOptions).experiments = [
'pre_optimize=all'
] + options.view_as(DebugOptions).experiments
return options
def test_external_transform(self):
raise unittest.SkipTest("BEAM-7252")
def test_expand_kafka_read(self):
raise unittest.SkipTest("BEAM-7252")
def test_expand_kafka_write(self):
raise unittest.SkipTest("BEAM-7252")
def test_sql(self):
raise unittest.SkipTest("BEAM-7252")
def test_pack_combiners(self):
# Stages produced by translations.pack_combiners are fused
# by translations.greedily_fuse, which prevent the stages
# from being detecting using counters by the test.
self._test_pack_combiners(assert_using_counter_names=False)
class FlinkRunnerTestStreaming(FlinkRunnerTest):
def __init__(self, *args, **kwargs):
super(FlinkRunnerTestStreaming, self).__init__(*args, **kwargs)
self.enable_commit = False
def setUp(self):
self.enable_commit = False
def create_options(self):
options = super(FlinkRunnerTestStreaming, self).create_options()
options.view_as(StandardOptions).streaming = True
if self.enable_commit:
options._all_options['checkpointing_interval'] = 3000
options._all_options['shutdown_sources_after_idle_ms'] = 60000
return options
def test_callbacks_with_exception(self):
self.enable_commit = True
super(FlinkRunnerTest, self).test_callbacks_with_exception()
def test_register_finalizations(self):
self.enable_commit = True
super(FlinkRunnerTest, self).test_register_finalizations()
if __name__ == '__main__':
# Run the tests.
logging.getLogger().setLevel(logging.INFO)
unittest.main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Script that trains progressive multitask models on HOPV dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import shutil
import numpy as np
import deepchem as dc
from hopv_datasets import load_hopv
# Only for debug!
np.random.seed(123)
# Load HOPV dataset
n_features = 1024
hopv_tasks, hopv_datasets, transformers = load_hopv()
train_dataset, valid_dataset, test_dataset = hopv_datasets
# Fit models
metric = [
dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean, mode="regression"),
dc.metrics.Metric(
dc.metrics.mean_absolute_error, np.mean, mode="regression")
]
model = dc.models.ProgressiveMultitaskRegressor(
len(hopv_tasks),
n_features,
layer_sizes=[1000],
dropouts=[.25],
learning_rate=0.001,
batch_size=50)
# Fit trained model
model.fit(train_dataset, nb_epoch=25)
model.save()
print("Evaluating model")
train_scores = model.evaluate(train_dataset, metric, transformers)
valid_scores = model.evaluate(valid_dataset, metric, transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
|
unknown
|
codeparrot/codeparrot-clean
| ||
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import get_user_model
from django.conf import settings
class Error(models.Model):
"""
Model for storing the individual errors.
"""
kind = models.CharField(_('type'), null=True, blank=True, max_length=128,
db_index=True)
info = models.TextField(null=False)
data = models.TextField(blank=True, null=True)
path = models.URLField(null=True, blank=True)
when = models.DateTimeField(null=False, auto_now_add=True, db_index=True)
html = models.TextField(null=True, blank=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True,
blank=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
"""
Meta information for the model.
"""
verbose_name = _('Error')
verbose_name_plural = _('Errors')
def __unicode__(self):
"""
String representation of the object.
"""
return "%s: %s" % (self.kind, self.info)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CC_SAVED_MODEL_SIGNATURE_CONSTANTS_H_
#define TENSORFLOW_CC_SAVED_MODEL_SIGNATURE_CONSTANTS_H_
namespace tensorflow {
/// Key in the signature def map for `default` serving signatures. The default
/// signature is used in inference requests where a specific signature was not
/// specified.
static constexpr char kDefaultServingSignatureDefKey[] = "serving_default";
////////////////////////////////////////////////////////////////////////////////
/// Classification API constants.
/// Classification inputs.
static constexpr char kClassifyInputs[] = "inputs";
/// Classification method name used in a SignatureDef.
static constexpr char kClassifyMethodName[] = "tensorflow/serving/classify";
/// Classification classes output.
static constexpr char kClassifyOutputClasses[] = "classes";
/// Classification scores output.
static constexpr char kClassifyOutputScores[] = "scores";
////////////////////////////////////////////////////////////////////////////////
/// Predict API constants.
/// Predict inputs.
static constexpr char kPredictInputs[] = "inputs";
/// Predict method name used in a SignatureDef.
static constexpr char kPredictMethodName[] = "tensorflow/serving/predict";
/// Predict outputs.
static constexpr char kPredictOutputs[] = "outputs";
////////////////////////////////////////////////////////////////////////////////
/// Regression API constants.
/// Regression inputs.
static constexpr char kRegressInputs[] = "inputs";
/// Regression method name used in a SignatureDef.
static constexpr char kRegressMethodName[] = "tensorflow/serving/regress";
/// Regression outputs.
static constexpr char kRegressOutputs[] = "outputs";
////////////////////////////////////////////////////////////////////////////////
} // namespace tensorflow
#endif // TENSORFLOW_CC_SAVED_MODEL_SIGNATURE_CONSTANTS_H_
|
c
|
github
|
https://github.com/tensorflow/tensorflow
|
tensorflow/cc/saved_model/signature_constants.h
|
prelude: |
obj = Object.new
hash = { obj => true }
benchmark: hash.key?(obj)
loop_count: 30000000
|
unknown
|
github
|
https://github.com/ruby/ruby
|
benchmark/hash_key.yml
|
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
__version__=''' $Id$ '''
__doc__="""
The Canvas object is the primary interface for creating PDF files. See
doc/reportlab-userguide.pdf for copious examples.
"""
__all__ = ['Canvas']
ENABLE_TRACKING = 1 # turn this off to do profile testing w/o tracking
import os
import sys
import re
import hashlib
from string import digits
import tempfile
from math import sin, cos, tan, pi, ceil
from reportlab import rl_config
from reportlab.pdfbase import pdfutils
from reportlab.pdfbase import pdfdoc
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfgen import pdfgeom, pathobject, textobject
from reportlab.lib.colors import black, _chooseEnforceColorSpace, Color, CMYKColor, toColor
from reportlab.lib.utils import import_zlib, ImageReader, isSeq, isStr, isUnicode, _digester
from reportlab.lib.rl_accel import fp_str, escapePDF
from reportlab.lib.boxstuff import aspectRatioFix
digitPat = re.compile('\d') #used in decimal alignment
zlib = import_zlib()
# Robert Kern
# Constants for closing paths.
# May be useful if one changes 'arc' and 'rect' to take a
# default argument that tells how to close the path.
# That way we can draw filled shapes.
FILL_EVEN_ODD = 0
FILL_NON_ZERO = 1
#this is used by path-closing routines.
#map stroke, fill, fillmode -> operator
# fillmode: 1 = non-Zero (obviously), 0 = evenOdd
PATH_OPS = {(0, 0, FILL_EVEN_ODD) : 'n', #no op
(0, 0, FILL_NON_ZERO) : 'n', #no op
(1, 0, FILL_EVEN_ODD) : 'S', #stroke only
(1, 0, FILL_NON_ZERO) : 'S', #stroke only
(0, 1, FILL_EVEN_ODD) : 'f*', #Fill only
(0, 1, FILL_NON_ZERO) : 'f', #Fill only
(1, 1, FILL_EVEN_ODD) : 'B*', #Stroke and Fill
(1, 1, FILL_NON_ZERO) : 'B', #Stroke and Fill
}
def _annFormat(D,color,thickness,dashArray,hradius=0,vradius=0):
from reportlab.pdfbase.pdfdoc import PDFArray, PDFDictionary
if color and 'C' not in D:
D["C"] = PDFArray([color.red, color.green, color.blue])
if 'Border' not in D:
border = [hradius,vradius,thickness or 0]
if dashArray:
border.append(PDFArray(dashArray))
D["Border"] = PDFArray(border)
# BS = PDFDictionary()
# bss = 'S'
# if dashArray:
# BS['D'] = PDFArray(dashArray)
# bss = 'D'
# BS['W'] = thickness or 0
# BS['S'] = bss
# D['BS'] = BS
# helpers to guess color space for gradients
def _normalizeColor(aColor):
if isinstance(aColor, CMYKColor):
d = aColor.density
return "DeviceCMYK", tuple(c*d for c in aColor.cmyk())
elif isinstance(aColor, Color):
return "DeviceRGB", aColor.rgb()
elif isinstance(aColor, (tuple, list)):
l = len(aColor)
if l == 3:
return "DeviceRGB", aColor
elif l == 4:
return "DeviceCMYK", aColor
elif isinstance(aColor, str):
return _normalizeColor(toColor(aColor))
raise ValueError("Unknown color %r" % aColor)
def _normalizeColors(colors):
space = None
outcolors = []
for aColor in colors:
nspace, outcolor = _normalizeColor(aColor)
if space is not None and space != nspace:
raise ValueError("Mismatch in color spaces: %s and %s" % (space, nspace))
space = nspace
outcolors.append(outcolor)
return space, outcolors
def _buildColorFunction(colors, positions):
from reportlab.pdfbase.pdfdoc import PDFExponentialFunction, PDFStitchingFunction
if positions is not None and len(positions) != len(colors):
raise ValueError("need to have the same number of colors and positions")
# simplified functions for edge cases
if len(colors) == 1:
# for completeness
return PDFExponentialFunction(N=1, C0=colors[0], C1=colors[0])
if len(colors) == 2:
if positions is None or (positions[0] == 0 and positions[1] == 1):
return PDFExponentialFunction(N=1, C0=colors[0], C1=colors[1])
# equally distribute if positions not specified
if positions is None:
nc = len(colors)
positions = [(1.0*x)/(nc-1) for x in range(nc)]
else:
# sort positions and colors in increasing order
poscolors = list(zip(positions, colors))
poscolors.sort(key=lambda x: x[0])
# add endpoint positions if not already present
if poscolors[0][0] != 0:
poscolors.insert(0, (0.0, poscolors[0][1]))
if poscolors[-1][0] != 1:
poscolors.append((1.0, poscolors[-1][1]))
positions, colors = list(zip(*poscolors)) # unzip
# build stitching function
functions = []
bounds = [pos for pos in positions[1:-1]]
encode = []
lastcolor = colors[0]
for color in colors[1:]:
functions.append(PDFExponentialFunction(N=1, C0=lastcolor, C1=color))
lastcolor = color
encode.append(0.0)
encode.append(1.0)
return PDFStitchingFunction(functions, bounds, encode, Domain="[0.0 1.0]")
class ExtGState:
defaults = dict(
CA=1,
ca=1,
OP=False,
op=False,
OPM=0,
)
def __init__(self):
self._d = {}
self._c = {}
def set(self,canv,a,v):
d = self.defaults[a]
isbool = isinstance(d,bool)
if isbool: v=bool(v)
if v!=self._d.get(a,d) or (a=='op' and self.getValue('OP')!=d):
self._d[a] = v
if isbool: v=str(v).lower()
t = a,v
if t in self._c:
name = self._c[t]
else:
name = 'gRLs'+str(len(self._c))
self._c[t] = name
canv._code.append('/%s gs' % name)
def getValue(self,a):
return self._d.get(a,self.defaults[a])
def getState(self):
S = {}
for t,name in self._c.items():
S[name] = pdfdoc.PDFDictionary(dict((t,)))
return S and pdfdoc.PDFDictionary(S) or None
def pushCopy(self):
'''the states must be shared across push/pop, but the values not'''
x = self.__class__()
x._d = self._d.copy()
x._c = self._c
return x
class Canvas(textobject._PDFColorSetter):
"""This class is the programmer's interface to the PDF file format. Methods
are (or will be) provided here to do just about everything PDF can do.
The underlying model to the canvas concept is that of a graphics state machine
that at any given point in time has a current font, fill color (for figure
interiors), stroke color (for figure borders), line width and geometric transform, among
many other characteristics.
Canvas methods generally either draw something (like canvas.line) using the
current state of the canvas or change some component of the canvas
state (like canvas.setFont). The current state can be saved and restored
using the saveState/restoreState methods.
Objects are "painted" in the order they are drawn so if, for example
two rectangles overlap the last draw will appear "on top". PDF form
objects (supported here) are used to draw complex drawings only once,
for possible repeated use.
There are other features of canvas which are not visible when printed,
such as outlines and bookmarks which are used for navigating a document
in a viewer.
Here is a very silly example usage which generates a Hello World pdf document.
Example::
from reportlab.pdfgen import canvas
c = canvas.Canvas("hello.pdf")
from reportlab.lib.units import inch
# move the origin up and to the left
c.translate(inch,inch)
# define a large font
c.setFont("Helvetica", 80)
# choose some colors
c.setStrokeColorRGB(0.2,0.5,0.3)
c.setFillColorRGB(1,0,1)
# draw a rectangle
c.rect(inch,inch,6*inch,9*inch, fill=1)
# make text go straight up
c.rotate(90)
# change color
c.setFillColorRGB(0,0,0.77)
# say hello (note after rotate the y coord needs to be negative!)
c.drawString(3*inch, -3*inch, "Hello World")
c.showPage()
c.save()
"""
def __init__(self,filename,
pagesize=None,
bottomup = 1,
pageCompression=None,
invariant = None,
verbosity=0,
encrypt=None,
cropMarks=None,
pdfVersion=None,
enforceColorSpace=None,
):
"""Create a canvas of a given size. etc.
You may pass a file-like object to filename as an alternative to
a string.
For more information about the encrypt parameter refer to the setEncrypt method.
Most of the attributes are private - we will use set/get methods
as the preferred interface. Default page size is A4.
cropMarks may be True/False or an object with parameters borderWidth, markColor, markWidth
and markLength
if enforceColorSpace is in ('cmyk', 'rgb', 'sep','sep_black','sep_cmyk') then one of
the standard _PDFColorSetter callables will be used to enforce appropriate color settings.
If it is a callable then that will be used.
"""
if pagesize is None: pagesize = rl_config.defaultPageSize
if invariant is None: invariant = rl_config.invariant
self._filename = filename
self._doc = pdfdoc.PDFDocument(compression=pageCompression,
invariant=invariant, filename=filename,
pdfVersion=pdfVersion or pdfdoc.PDF_VERSION_DEFAULT,
)
self._enforceColorSpace = _chooseEnforceColorSpace(enforceColorSpace)
#this only controls whether it prints 'saved ...' - 0 disables
self._verbosity = verbosity
#this is called each time a page is output if non-null
self._onPage = None
self._cropMarks = cropMarks
self._pagesize = pagesize
self._pageRotation = 0
#self._currentPageHasImages = 0
self._pageTransition = None
self._pageDuration = None
self._destinations = {} # dictionary of destinations for cross indexing.
self.setPageCompression(pageCompression)
self._pageNumber = 1 # keep a count
# when we create a form we need to save operations not in the form
self._codeStack = []
self._restartAccumulators() # restart all accumulation state (generalized, arw)
self._annotationCount = 0
self._outlines = [] # list for a name tree
self._psCommandsBeforePage = [] #for postscript tray/font commands
self._psCommandsAfterPage = [] #for postscript tray/font commands
#PostScript has the origin at bottom left. It is easy to achieve a top-
#down coord system by translating to the top of the page and setting y
#scale to -1, but then text is inverted. So self.bottomup is used
#to also set the text matrix accordingly. You can now choose your
#drawing coordinates.
self.bottomup = bottomup
self.imageCaching = rl_config.defaultImageCaching
self.init_graphics_state()
self._make_preamble()
self.state_stack = []
self.setEncrypt(encrypt)
def setEncrypt(self, encrypt):
'''
Set the encryption used for the pdf generated by this canvas.
If encrypt is a string object, it is used as the user password for the pdf.
If encrypt is an instance of reportlab.lib.pdfencrypt.StandardEncryption, this object is
used to encrypt the pdf. This allows more finegrained control over the encryption settings.
'''
if encrypt:
from reportlab.lib import pdfencrypt
if isStr(encrypt): #encrypt is the password itself
if isUnicode(encrypt):
encrypt = encrypt.encode('utf-8')
encrypt = pdfencrypt.StandardEncryption(encrypt) #now it's the encrypt object
encrypt.setAllPermissions(1)
elif not isinstance(encrypt, pdfencrypt.StandardEncryption):
raise TypeError('Expected string or instance of reportlab.lib.pdfencrypt.StandardEncryption as encrypt parameter but got %r' % encrypt)
self._doc.encrypt = encrypt
else:
try:
del self._doc.encrypt
except AttributeError:
pass
def init_graphics_state(self):
#initial graphics state, never modify any of these in place
self._x = 0
self._y = 0
self._fontname = rl_config.canvas_basefontname
self._fontsize = 12
self._textMode = 0 #track if between BT/ET
self._leading = 14.4
self._currentMatrix = (1., 0., 0., 1., 0., 0.)
self._fillMode = 0 #even-odd
#text state
self._charSpace = 0
self._wordSpace = 0
self._horizScale = 100
self._textRenderMode = 0
self._rise = 0
self._textLineMatrix = (1., 0., 0., 1., 0., 0.)
self._textMatrix = (1., 0., 0., 1., 0., 0.)
# line drawing
self._lineCap = 0
self._lineJoin = 0
self._lineDash = None #not done
self._lineWidth = 1
self._mitreLimit = 0
self._fillColorObj = self._strokeColorObj = rl_config.canvas_baseColor or (0,0,0)
self._extgstate = ExtGState()
def push_state_stack(self):
state = {}
d = self.__dict__
for name in self.STATE_ATTRIBUTES:
state[name] = d[name] #getattr(self, name)
self.state_stack.append(state)
self._extgstate = self._extgstate.pushCopy()
def pop_state_stack(self):
self.__dict__.update(self.state_stack.pop())
STATE_ATTRIBUTES = """_x _y _fontname _fontsize _textMode _leading _currentMatrix _fillMode
_charSpace _wordSpace _horizScale _textRenderMode _rise _textLineMatrix
_textMatrix _lineCap _lineJoin _lineDash _lineWidth _mitreLimit _fillColorObj
_strokeColorObj _extgstate""".split()
STATE_RANGE = list(range(len(STATE_ATTRIBUTES)))
#self._addStandardFonts()
def _make_preamble(self):
P = [].append
if self.bottomup:
P('1 0 0 1 0 0 cm')
else:
P('1 0 0 -1 0 %s cm' % fp_str(self._pagesize[1]))
C = self._code
n = len(C)
if self._fillColorObj != (0,0,0):
self.setFillColor(self._fillColorObj)
if self._strokeColorObj != (0,0,0):
self.setStrokeColor(self._strokeColorObj)
P(' '.join(C[n:]))
del C[n:]
font = pdfmetrics.getFont(self._fontname)
if not font._dynamicFont:
#set an initial font
if font.face.builtIn or not getattr(self,'_drawTextAsPath',False):
P('BT %s 12 Tf 14.4 TL ET' % self._doc.getInternalFontName(self._fontname))
self._preamble = ' '.join(P.__self__)
def _escape(self, s):
return escapePDF(s)
#info functions - non-standard
def setAuthor(self, author):
"""identify the author for invisible embedding inside the PDF document.
the author annotation will appear in the the text of the file but will
not automatically be seen when the document is viewed, but is visible
in document properties etc etc."""
self._doc.setAuthor(author)
def setDateFormatter(self, dateFormatter):
"""accepts a func(yyyy,mm,dd,hh,m,s) used to create embedded formatted date"""
self._doc.setDateFormatter(dateFormatter)
def addOutlineEntry(self, title, key, level=0, closed=None):
"""Adds a new entry to the outline at given level. If LEVEL not specified,
entry goes at the top level. If level specified, it must be
no more than 1 greater than the outline level in the last call.
The key must be the (unique) name of a bookmark.
the title is the (non-unique) name to be displayed for the entry.
If closed is set then the entry should show no subsections by default
when displayed.
Example::
c.addOutlineEntry("first section", "section1")
c.addOutlineEntry("introduction", "s1s1", 1, closed=1)
c.addOutlineEntry("body", "s1s2", 1)
c.addOutlineEntry("detail1", "s1s2s1", 2)
c.addOutlineEntry("detail2", "s1s2s2", 2)
c.addOutlineEntry("conclusion", "s1s3", 1)
c.addOutlineEntry("further reading", "s1s3s1", 2)
c.addOutlineEntry("second section", "section1")
c.addOutlineEntry("introduction", "s2s1", 1)
c.addOutlineEntry("body", "s2s2", 1, closed=1)
c.addOutlineEntry("detail1", "s2s2s1", 2)
c.addOutlineEntry("detail2", "s2s2s2", 2)
c.addOutlineEntry("conclusion", "s2s3", 1)
c.addOutlineEntry("further reading", "s2s3s1", 2)
generated outline looks like::
- first section
|- introduction
|- body
| |- detail1
| |- detail2
|- conclusion
| |- further reading
- second section
|- introduction
|+ body
|- conclusion
| |- further reading
Note that the second "body" is closed.
Note that you can jump from level 5 to level 3 but not
from 3 to 5: instead you need to provide all intervening
levels going down (4 in this case). Note that titles can
collide but keys cannot.
"""
#to be completed
#self._outlines.append(title)
self._doc.outline.addOutlineEntry(key, level, title, closed=closed)
def setOutlineNames0(self, *nametree): # keep this for now (?)
"""nametree should can be a recursive tree like so::
c.setOutlineNames(
"chapter1dest",
("chapter2dest",
["chapter2section1dest",
"chapter2section2dest",
"chapter2conclusiondest"]
), # end of chapter2 description
"chapter3dest",
("chapter4dest", ["c4s1", "c4s2"])
)
each of the string names inside must be bound to a bookmark
before the document is generated.
"""
self._doc.outline.setNames(*((self,)+nametree))
def setTitle(self, title):
"""write a title into the PDF file that won't automatically display
in the document itself."""
self._doc.setTitle(title)
def setSubject(self, subject):
"""write a subject into the PDF file that won't automatically display
in the document itself."""
self._doc.setSubject(subject)
def setCreator(self, creator):
"""write a creator into the PDF file that won't automatically display
in the document itself. This should be used to name the original app
which is passing data into ReportLab, if you wish to name it."""
self._doc.setCreator(creator)
def setKeywords(self, keywords):
"""write a list of keywords into the PDF file which shows in document properties.
Either submit a single string or a list/tuple"""
if isinstance(keywords,(list,tuple)):
keywords = ', '.join(keywords)
self._doc.setKeywords(keywords)
def pageHasData(self):
"Info function - app can call it after showPage to see if it needs a save"
return len(self._code) == 0
def showOutline(self):
"""Specify that Acrobat Reader should start with the outline tree visible.
showFullScreen() and showOutline() conflict; the one called last
wins."""
self._doc._catalog.showOutline()
def showFullScreen0(self):
"""Specify that Acrobat Reader should start in full screen mode.
showFullScreen() and showOutline() conflict; the one called last
wins."""
self._doc._catalog.showFullScreen()
def _setStrokeAlpha(self,v):
"""
Define the transparency/opacity of strokes. 0 is fully
transparent, 1 is fully opaque.
Note that calling this function will cause a version 1.4 PDF
to be generated (rather than 1.3).
"""
self._doc.ensureMinPdfVersion('transparency')
self._extgstate.set(self,'CA',v)
def _setFillAlpha(self,v):
"""
Define the transparency/opacity of non-strokes. 0 is fully
transparent, 1 is fully opaque.
Note that calling this function will cause a version 1.4 PDF
to be generated (rather than 1.3).
"""
self._doc.ensureMinPdfVersion('transparency')
self._extgstate.set(self,'ca',v)
def _setStrokeOverprint(self,v):
self._extgstate.set(self,'OP',v)
def _setFillOverprint(self,v):
self._extgstate.set(self,'op',v)
def _setOverprintMask(self,v):
self._extgstate.set(self,'OPM',v and 1 or 0)
def _getCmShift(self):
cM = self._cropMarks
if cM:
bleedW = max(0,getattr(cM,'bleedWidth',0))
bw = max(0,getattr(cM,'borderWidth',36))
if bleedW:
bw -= bleedW
return bw
def showPage(self):
"""Close the current page and possibly start on a new page."""
# ensure a space at the end of the stream - Acrobat does
# not mind, but Ghostscript dislikes 'Qendstream' even if
# the length marker finishes after 'Q'
pageWidth = self._pagesize[0]
pageHeight = self._pagesize[1]
cM = self._cropMarks
code = self._code
if cM:
bw = max(0,getattr(cM,'borderWidth',36))
if bw:
markLast = getattr(cM,'markLast',1)
ml = min(bw,max(0,getattr(cM,'markLength',18)))
mw = getattr(cM,'markWidth',0.5)
mc = getattr(cM,'markColor',black)
mg = 2*bw-ml
cx0 = len(code)
if ml and mc:
self.saveState()
self.setStrokeColor(mc)
self.setLineWidth(mw)
self.lines([
(bw,0,bw,ml),
(pageWidth+bw,0,pageWidth+bw,ml),
(bw,pageHeight+mg,bw,pageHeight+2*bw),
(pageWidth+bw,pageHeight+mg,pageWidth+bw,pageHeight+2*bw),
(0,bw,ml,bw),
(pageWidth+mg,bw,pageWidth+2*bw,bw),
(0,pageHeight+bw,ml,pageHeight+bw),
(pageWidth+mg,pageHeight+bw,pageWidth+2*bw,pageHeight+bw),
])
self.restoreState()
if markLast:
#if the marks are to be drawn after the content
#save the code we just drew for later use
L = code[cx0:]
del code[cx0:]
cx0 = len(code)
bleedW = max(0,getattr(cM,'bleedWidth',0))
self.saveState()
self.translate(bw-bleedW,bw-bleedW)
if bleedW:
#scale everything
self.scale(1+(2.0*bleedW)/pageWidth,1+(2.0*bleedW)/pageHeight)
#move our translation/expansion code to the beginning
C = code[cx0:]
del code[cx0:]
code[0:0] = C
self.restoreState()
if markLast:
code.extend(L)
pageWidth = 2*bw + pageWidth
pageHeight = 2*bw + pageHeight
code.append(' ')
page = pdfdoc.PDFPage()
page.pagewidth = pageWidth
page.pageheight = pageHeight
page.Rotate = self._pageRotation
page.hasImages = self._currentPageHasImages
page.setPageTransition(self._pageTransition)
page.setCompression(self._pageCompression)
if self._pageDuration is not None:
page.Dur = self._pageDuration
strm = self._psCommandsBeforePage + [self._preamble] + code + self._psCommandsAfterPage
page.setStream(strm)
self._setColorSpace(page)
self._setExtGState(page)
self._setXObjects(page)
self._setShadingUsed(page)
self._setAnnotations(page)
self._doc.addPage(page)
if self._onPage: self._onPage(self._pageNumber)
self._startPage()
def _startPage(self):
#now get ready for the next one
self._pageNumber += 1
self._restartAccumulators()
self.init_graphics_state()
self.state_stack = []
def setPageCallBack(self, func):
"""func(pageNum) will be called on each page end.
This is mainly a hook for progress monitoring.
Call setPageCallback(None) to clear a callback."""
self._onPage = func
def _setAnnotations(self,page):
page.Annots = self._annotationrefs
def _setColorSpace(self,obj):
obj._colorsUsed = self._colorsUsed
def _setShadingUsed(self, page):
page._shadingUsed = self._shadingUsed
def _setXObjects(self, thing):
"""for pages and forms, define the XObject dictionary for resources, if needed"""
forms = self._formsinuse
if forms:
xobjectsdict = self._doc.xobjDict(forms)
thing.XObjects = xobjectsdict
else:
thing.XObjects = None
def _bookmarkReference(self, name):
"""get a reference to a (possibly undefined, possibly unbound) bookmark"""
d = self._destinations
try:
return d[name]
except:
result = d[name] = pdfdoc.Destination(name) # newly defined, unbound
return result
def bookmarkPage(self, key,
fit="Fit",
left=None,
top=None,
bottom=None,
right=None,
zoom=None
):
"""
This creates a bookmark to the current page which can
be referred to with the given key elsewhere.
PDF offers very fine grained control over how Acrobat
reader is zoomed when people link to this. The default
is to keep the user's current zoom settings. the last
arguments may or may not be needed depending on the
choice of 'fitType'.
Fit types and the other arguments they use are:
- XYZ left top zoom - fine grained control. null
or zero for any of the parameters means 'leave
as is', so "0,0,0" will keep the reader's settings.
NB. Adobe Reader appears to prefer "null" to 0's.
- Fit - entire page fits in window
- FitH top - top coord at top of window, width scaled
to fit.
- FitV left - left coord at left of window, height
scaled to fit
- FitR left bottom right top - scale window to fit
the specified rectangle
(question: do we support /FitB, FitBH and /FitBV
which are hangovers from version 1.1 / Acrobat 3.0?)"""
dest = self._bookmarkReference(key)
self._doc.inPage() # try to enable page-only features
pageref = self._doc.thisPageRef()
#None = "null" for PDF
if left is None:
left = "null"
if top is None:
top = "null"
if bottom is None:
bottom = "null"
if right is None:
right = "null"
if zoom is None:
zoom = "null"
if fit == "XYZ":
dest.xyz(left,top,zoom)
elif fit == "Fit":
dest.fit()
elif fit == "FitH":
dest.fith(top)
elif fit == "FitV":
dest.fitv(left)
elif fit == "FitR":
dest.fitr(left,bottom,right,top)
#Do we need these (version 1.1 / Acrobat 3 versions)?
elif fit == "FitB":
dest.fitb()
elif fit == "FitBH":
dest.fitbh(top)
elif fit == "FitBV":
dest.fitbv(left)
else:
raise ValueError("Unknown Fit type %s" % ascii(fit))
dest.setPage(pageref)
return dest
def bookmarkHorizontalAbsolute(self, key, top, left=0, fit='XYZ', **kw):
"""Bind a bookmark (destination) to the current page at a horizontal position.
Note that the yhorizontal of the book mark is with respect to the default
user space (where the origin is at the lower left corner of the page)
and completely ignores any transform (translation, scale, skew, rotation,
etcetera) in effect for the current graphics state. The programmer is
responsible for making sure the bookmark matches an appropriate item on
the page."""
#This method should probably be deprecated since it is just a sub-set of bookmarkPage
return self.bookmarkPage(key, fit=fit, top=top, left=left, zoom=0)
def bookmarkHorizontal(self, key, relativeX, relativeY, **kw):
"""w.r.t. the current transformation, bookmark this horizontal."""
(left, top) = self.absolutePosition(relativeX,relativeY)
self.bookmarkHorizontalAbsolute(key, top, left=left, **kw)
#def _inPage0(self): disallowed!
# """declare a page, enable page features"""
# self._doc.inPage()
#def _inForm0(self):
# "deprecated in favore of beginForm...endForm"
# self._doc.inForm()
def doForm(self, name):
"""use a form XObj in current operation stream.
The form should either have been defined previously using
beginForm ... endForm, or may be defined later. If it is not
defined at save time, an exception will be raised. The form
will be drawn within the context of the current graphics
state."""
self._code.append("/%s Do" % self._doc.getXObjectName(name))
self._formsinuse.append(name)
def hasForm(self, name):
"""Query whether form XObj really exists yet."""
return self._doc.hasForm(name)
######################################################
#
# Image routines
#
######################################################
def drawInlineImage(self, image, x,y, width=None,height=None,
preserveAspectRatio=False,anchor='c'):
"""See drawImage, which should normally be used instead...
drawInlineImage behaves like drawImage, but stores the image content
within the graphics stream for the page. This means that the mask
parameter for transparency is not available. It also means that there
is no saving in file size or time if the same image is reused.
In theory it allows images to be displayed slightly faster; however,
we doubt if the difference is noticeable to any human user these days.
Only use this if you have studied the PDF specification and know the
implications.
"""
self._currentPageHasImages = 1
from reportlab.pdfgen.pdfimages import PDFImage
img_obj = PDFImage(image, x,y, width, height)
img_obj.drawInlineImage(self,
preserveAspectRatio=preserveAspectRatio,
anchor=anchor)
return (img_obj.width, img_obj.height)
def drawImage(self, image, x, y, width=None, height=None, mask=None,
preserveAspectRatio=False, anchor='c'):
"""Draws the image (ImageReader object or filename) as specified.
"image" may be an image filename or an ImageReader object.
x and y define the lower left corner of the image you wish to
draw (or of its bounding box, if using preserveAspectRation below).
If width and height are not given, the width and height of the
image in pixels is used at a scale of 1 point to 1 pixel.
If width and height are given, the image will be stretched to fill
the given rectangle bounded by (x, y, x+width, y-height).
If you supply negative widths and/or heights, it inverts them and adjusts
x and y accordingly.
The method returns the width and height of the underlying image, since
this is often useful for layout algorithms and saves you work if you have
not specified them yourself.
The mask parameter supports transparent backgrounds. It takes 6 numbers
and defines the range of RGB values which will be masked out or treated
as transparent. For example with [0,2,40,42,136,139], it will mask out
any pixels with a Red value from 0-2, Green from 40-42 and
Blue from 136-139 (on a scale of 0-255).
New post version 2.0: drawImage can center an image in a box you
provide, while preserving its aspect ratio. For example, you might
have a fixed square box in your design, and a collection of photos
which might be landscape or portrait that you want to appear within
the box. If preserveAspectRatio is true, your image will appear within
the box specified.
If preserveAspectRatio is True, the anchor property can be used to
specify how images should fit into the given box. It should
be set to one of the following values, taken from the points of
the compass (plus 'c' for 'centre'):
nw n ne
w c e
sw s se
The default value is 'c' for 'centre'. Thus, if you want your
bitmaps to always be centred and appear at the top of the given box,
set anchor='n'. There are good examples of this in the output
of test_pdfgen_general.py
Unlike drawInlineImage, this creates 'external images' which
are only stored once in the PDF file but can be drawn many times.
If you give it the same filename twice, even at different locations
and sizes, it will reuse the first occurrence, resulting in a saving
in file size and generation time. If you use ImageReader objects,
it tests whether the image content has changed before deciding
whether to reuse it.
In general you should use drawImage in preference to drawInlineImage
unless you have read the PDF Spec and understand the tradeoffs."""
self._currentPageHasImages = 1
# first, generate a unique name/signature for the image. If ANYTHING
# is different, even the mask, this should be different.
if isinstance(image,ImageReader):
rawdata = image.getRGBData()
smask = image._dataA
if mask=='auto' and smask:
mdata = smask.getRGBData()
else:
mdata = str(mask)
if isUnicode(mdata):
mdata = mdata.encode('utf8')
name = _digester(rawdata+mdata)
else:
#filename, use it
s = '%s%s' % (image, mask)
if isUnicode(s):
s = s.encode('utf-8')
name = _digester(s)
# in the pdf document, this will be prefixed with something to
# say it is an XObject. Does it exist yet?
regName = self._doc.getXObjectName(name)
imgObj = self._doc.idToObject.get(regName, None)
if not imgObj:
#first time seen, create and register the PDFImageXobject
imgObj = pdfdoc.PDFImageXObject(name, image, mask=mask)
imgObj.name = name
self._setXObjects(imgObj)
self._doc.Reference(imgObj, regName)
self._doc.addForm(name, imgObj)
smask = getattr(imgObj,'_smask',None)
if smask: #set up the softmask obtained above
mRegName = self._doc.getXObjectName(smask.name)
mImgObj = self._doc.idToObject.get(mRegName, None)
if not mImgObj:
self._setXObjects(smask)
imgObj.smask = self._doc.Reference(smask,mRegName)
else:
imgObj.smask = pdfdoc.PDFObjectReference(mRegName)
del imgObj._smask
# ensure we have a size, as PDF will make it 1x1 pixel otherwise!
x,y,width,height,scaled = aspectRatioFix(preserveAspectRatio,anchor,x,y,width,height,imgObj.width,imgObj.height)
# scale and draw
self.saveState()
self.translate(x, y)
self.scale(width, height)
self._code.append("/%s Do" % regName)
self.restoreState()
# track what's been used on this page
self._formsinuse.append(name)
return (imgObj.width, imgObj.height)
def _restartAccumulators(self):
if self._codeStack:
# restore the saved code
saved = self._codeStack[-1]
del self._codeStack[-1]
self._code, self._formsinuse, self._annotationrefs, self._formData,self._colorsUsed, self._shadingUsed = saved
else:
self._code = [] # ready for more...
self._psCommandsAfterPage = []
self._currentPageHasImages = 1 # for safety...
self._formsinuse = []
self._annotationrefs = []
self._formData = None
self._colorsUsed = {}
self._shadingUsed = {}
def _pushAccumulators(self):
"when you enter a form, save accumulator info not related to the form for page (if any)"
saved = (self._code, self._formsinuse, self._annotationrefs, self._formData, self._colorsUsed, self._shadingUsed)
self._codeStack.append(saved)
self._code = [] # ready for more...
self._currentPageHasImages = 1 # for safety...
self._formsinuse = []
self._annotationrefs = []
self._formData = None
self._colorsUsed = {}
self._shadingUsed = {}
def _setExtGState(self, obj):
obj.ExtGState = self._extgstate.getState()
def beginForm(self, name, lowerx=0, lowery=0, upperx=None, uppery=None):
"""declare the current graphics stream to be a named form.
A graphics stream can either be a page or a form, not both.
Some operations (like bookmarking) are permitted for pages
but not forms. The form will not automatically be shown in the
document but must be explicitly referenced using doForm in pages
that require the form."""
self.push_state_stack()
self.init_graphics_state()
if self._code or self._formData:
# save the code that is not in the formf
self._pushAccumulators()
#self._codeStack.append(self._code)
#self._code = []
self._formData = (name, lowerx, lowery, upperx, uppery)
self._doc.inForm()
#self._inForm0()
def endForm(self):
"""emit the current collection of graphics operations as a Form
as declared previously in beginForm."""
(name, lowerx, lowery, upperx, uppery) = self._formData
#self.makeForm0(name, lowerx, lowery, upperx, uppery)
# fall through! makeForm0 disallowed
#def makeForm0(self, name, lowerx=0, lowery=0, upperx=None, uppery=None):
"""Like showpage, but make a form using accumulated operations instead"""
# deprecated in favor or beginForm(...)... endForm()
(w,h) = self._pagesize
if upperx is None: upperx=w
if uppery is None: uppery=h
form = pdfdoc.PDFFormXObject(lowerx=lowerx, lowery=lowery, upperx=upperx, uppery=uppery)
form.compression = self._pageCompression
form.setStreamList([self._preamble] + self._code) # ??? minus preamble (seems to be needed!)
self._setColorSpace(form)
self._setExtGState(form)
self._setXObjects(form)
self._setAnnotations(form)
self._doc.addForm(name, form)
self._restartAccumulators()
self.pop_state_stack()
def addPostScriptCommand(self, command, position=1):
"""Embed literal Postscript in the document.
With position=0, it goes at very beginning of page stream;
with position=1, at current point; and
with position=2, at very end of page stream. What that does
to the resulting Postscript depends on Adobe's header :-)
Use with extreme caution, but sometimes needed for printer tray commands.
Acrobat 4.0 will export Postscript to a printer or file containing
the given commands. Adobe Reader 6.0 no longer does as this feature is
deprecated. 5.0, I don't know about (please let us know!). This was
funded by Bob Marshall of Vector.co.uk and tested on a Lexmark 750.
See test_pdfbase_postscript.py for 2 test cases - one will work on
any Postscript device, the other uses a 'setpapertray' command which
will error in Distiller but work on printers supporting it.
"""
#check if we've done this one already...
if isUnicode(command):
rawName = 'PS' + hashlib.md5(command.encode('utf-8')).hexdigest()
else:
rawName = 'PS' + hashlib.md5(command).hexdigest()
regName = self._doc.getXObjectName(rawName)
psObj = self._doc.idToObject.get(regName, None)
if not psObj:
#first use of this chunk of Postscript, make an object
psObj = pdfdoc.PDFPostScriptXObject(command + '\r\n')
self._setXObjects(psObj)
self._doc.Reference(psObj, regName)
self._doc.addForm(rawName, psObj)
if position == 0:
self._psCommandsBeforePage.append("/%s Do" % regName)
elif position==1:
self._code.append("/%s Do" % regName)
else:
self._psCommandsAfterPage.append("/%s Do" % regName)
self._formsinuse.append(rawName)
def _absRect(self,rect,relative=0):
if not rect:
w,h = self._pagesize
rect = (0,0,w,h)
elif relative:
lx, ly, ux, uy = rect
xll,yll = self.absolutePosition(lx,ly)
xur,yur = self.absolutePosition(ux, uy)
xul,yul = self.absolutePosition(lx, uy)
xlr,ylr = self.absolutePosition(ux, ly)
xs = xll, xur, xul, xlr
ys = yll, yur, yul, ylr
xmin, ymin = min(xs), min(ys)
xmax, ymax = max(xs), max(ys)
rect = xmin, ymin, xmax, ymax
bw = self._getCmShift()
if bw:
rect = rect[0]+bw,rect[1]+bw,rect[2]+bw,rect[3]+bw
return rect
def freeTextAnnotation(self, contents, DA, Rect=None, addtopage=1, name=None, relative=0, **kw):
"""DA is the default appearance string???"""
Rect = self._absRect(Rect,relative)
self._addAnnotation(pdfdoc.FreeTextAnnotation(Rect, contents, DA, **kw), name, addtopage)
def textAnnotation(self, contents, Rect=None, addtopage=1, name=None, relative=0, **kw):
"""Experimental, but works.
"""
Rect = self._absRect(Rect,relative)
self._addAnnotation(pdfdoc.TextAnnotation(Rect, contents, **kw), name, addtopage)
textAnnotation0 = textAnnotation #deprecated
def highlightAnnotation(self, contents, Rect, QuadPoints=None, Color=[0.83, 0.89, 0.95], addtopage=1,
name=None, relative=0, **kw):
"""
Allows adding of a highlighted annotation.
Rect: Mouseover area to show contents of annotation
QuadPoints: List of four x/y points [TOP-LEFT, TOP-RIGHT, BOTTOM-LEFT, BOTTOM-RIGHT]
These points outline the areas to highlight.
You can have multiple groups of four to allow multiple highlighted areas.
Is in the format [x1, y1, x2, y2, x3, y3, x4, y4, x1, y1, x2, y2, x3, y3, x4, y4] etc
QuadPoints defaults to be area inside of passed in Rect
Color: The color of the highlighting.
"""
Rect = self._absRect(Rect, relative)
if not QuadPoints:
QuadPoints = pdfdoc.rect_to_quad(Rect)
self._addAnnotation(pdfdoc.HighlightAnnotation(Rect, contents, QuadPoints, Color, **kw), name, addtopage)
def inkAnnotation(self, contents, InkList=None, Rect=None, addtopage=1, name=None, relative=0, **kw):
raise NotImplementedError
"Experimental"
Rect = self._absRect(Rect,relative)
if not InkList:
InkList = ((100,100,100,h-100,w-100,h-100,w-100,100),)
self._addAnnotation(pdfdoc.InkAnnotation(Rect, contents, InkList, **kw), name, addtopage)
inkAnnotation0 = inkAnnotation #deprecated
def linkAbsolute(self, contents, destinationname, Rect=None, addtopage=1, name=None,
thickness=0, color=None, dashArray=None, **kw):
"""rectangular link annotation positioned wrt the default user space.
The identified rectangle on the page becomes a "hot link" which
when clicked will send the viewer to the page and position identified
by the destination.
Rect identifies (lowerx, lowery, upperx, uppery) for lower left
and upperright points of the rectangle. Translations and other transforms
are IGNORED (the rectangular position is given with respect
to the default user space.
destinationname should be the name of a bookmark (which may be defined later
but must be defined before the document is generated).
You may want to use the keyword argument Border='[0 0 0]' to
suppress the visible rectangle around the during viewing link."""
return self.linkRect(contents, destinationname, Rect, addtopage, name, relative=0,
thickness=thickness, color=color, dashArray=dashArray, **kw)
def linkRect(self, contents, destinationname, Rect=None, addtopage=1, name=None, relative=1,
thickness=0, color=None, dashArray=None, **kw):
"""rectangular link annotation w.r.t the current user transform.
if the transform is skewed/rotated the absolute rectangle will use the max/min x/y
"""
destination = self._bookmarkReference(destinationname) # permitted to be undefined... must bind later...
Rect = self._absRect(Rect,relative)
kw["Rect"] = Rect
kw["Contents"] = contents
kw["Destination"] = destination
_annFormat(kw,color,thickness,dashArray)
return self._addAnnotation(pdfdoc.LinkAnnotation(**kw), name, addtopage)
def linkURL(self, url, rect, relative=0, thickness=0, color=None, dashArray=None, kind="URI", **kw):
"""Create a rectangular URL 'hotspot' in the given rectangle.
if relative=1, this is in the current coord system, otherwise
in absolute page space.
The remaining options affect the border appearance; the border is
drawn by Acrobat, not us. Set thickness to zero to hide it.
Any border drawn this way is NOT part of the page stream and
will not show when printed to a Postscript printer or distilled;
it is safest to draw your own."""
from reportlab.pdfbase.pdfdoc import PDFDictionary, PDFName, PDFArray, PDFString
#tried the documented BS element in the pdf spec but it
#does not work, and Acrobat itself does not appear to use it!
ann = PDFDictionary(dict=kw)
ann["Type"] = PDFName("Annot")
ann["Subtype"] = PDFName("Link")
ann["Rect"] = PDFArray(self._absRect(rect,relative)) # the whole page for testing
# the action is a separate dictionary
A = PDFDictionary()
A["Type"] = PDFName("Action") # not needed?
uri = PDFString(url)
A['S'] = PDFName(kind)
if kind=="URI":
A["URI"] = uri
elif kind=='GoToR':
A["F"] = uri
A["D"] = "[ 0 /XYZ null null null ]"
else:
raise ValueError("Unknown linkURI kind '%s'" % kind)
ann["A"] = A
_annFormat(ann,color,thickness,dashArray)
self._addAnnotation(ann)
def _addAnnotation(self, annotation, name=None, addtopage=1):
count = self._annotationCount = self._annotationCount+1
if not name: name="NUMBER"+repr(count)
self._doc.addAnnotation(name, annotation)
if addtopage:
self._annotatePage(name)
def _annotatePage(self, name):
ref = self._doc.refAnnotation(name)
self._annotationrefs.append(ref)
def getPageNumber(self):
"get the page number for the current page being generated."
return self._pageNumber
def save(self):
"""Saves and close the PDF document in the file.
If there is current data a ShowPage is executed automatically.
After this operation the canvas must not be used further."""
if len(self._code): self.showPage()
self._doc.SaveToFile(self._filename, self)
def getpdfdata(self):
"""Returns the PDF data that would normally be written to a file.
If there is current data a ShowPage is executed automatically.
After this operation the canvas must not be used further."""
if len(self._code): self.showPage()
s = self._doc.GetPDFData(self)
if isUnicode(s):
s = s.encode('utf-8')
return s
def setPageSize(self, size):
"""accepts a 2-tuple in points for paper size for this
and subsequent pages"""
self._pagesize = size
self._make_preamble()
def setPageRotation(self, rot):
"""Instruct display device that this page is to be rotated"""
assert rot % 90.0 == 0.0, "Rotation must be a multiple of 90 degrees"
self._pageRotation = rot
def addLiteral(self, s, escaped=1):
"""introduce the literal text of PDF operations s into the current stream.
Only use this if you are an expert in the PDF file format."""
s = str(s) # make sure its a string
if escaped==0:
s = self._escape(s) # convert to string for safety
self._code.append(s)
######################################################################
#
# coordinate transformations
#
######################################################################
def resetTransforms(self):
"""I want to draw something (eg, string underlines) w.r.t. the default user space.
Reset the matrix! This should be used usually as follows::
canv.saveState()
canv.resetTransforms()
#...draw some stuff in default space coords...
canv.restoreState() # go back!
"""
# we have to adjoin the inverse, since reset is not a basic operation (without save/restore)
(selfa, selfb, selfc, selfd, selfe, selff) = self._currentMatrix
det = selfa*selfd - selfc*selfb
resulta = selfd/det
resultc = -selfc/det
resulte = (selfc*selff - selfd*selfe)/det
resultd = selfa/det
resultb = -selfb/det
resultf = (selfe*selfb - selff*selfa)/det
self.transform(resulta, resultb, resultc, resultd, resulte, resultf)
def transform(self, a,b,c,d,e,f):
"""adjoin a mathematical transform to the current graphics state matrix.
Not recommended for beginners."""
#How can Python track this?
if ENABLE_TRACKING:
a0,b0,c0,d0,e0,f0 = self._currentMatrix
self._currentMatrix = (a0*a+c0*b, b0*a+d0*b,
a0*c+c0*d, b0*c+d0*d,
a0*e+c0*f+e0, b0*e+d0*f+f0)
if self._code and self._code[-1][-3:]==' cm':
L = self._code[-1].split()
a0, b0, c0, d0, e0, f0 = list(map(float,L[-7:-1]))
s = len(L)>7 and join(L)+ ' %s cm' or '%s cm'
self._code[-1] = s % fp_str(a0*a+c0*b,b0*a+d0*b,a0*c+c0*d,b0*c+d0*d,a0*e+c0*f+e0,b0*e+d0*f+f0)
else:
self._code.append('%s cm' % fp_str(a,b,c,d,e,f))
def absolutePosition(self, x, y):
"""return the absolute position of x,y in user space w.r.t. default user space"""
if not ENABLE_TRACKING:
raise ValueError("tracking not enabled! (canvas.ENABLE_TRACKING=0)")
(a,b,c,d,e,f) = self._currentMatrix
xp = a*x + c*y + e
yp = b*x + d*y + f
return (xp, yp)
def translate(self, dx, dy):
"""move the origin from the current (0,0) point to the (dx,dy) point
(with respect to the current graphics state)."""
self.transform(1,0,0,1,dx,dy)
def scale(self, x, y):
"""Scale the horizontal dimension by x and the vertical by y
(with respect to the current graphics state).
For example canvas.scale(2.0, 0.5) will make everything short and fat."""
self.transform(x,0,0,y,0,0)
def rotate(self, theta):
"""Canvas.rotate(theta)
Rotate the canvas by the angle theta (in degrees)."""
c = cos(theta * pi / 180)
s = sin(theta * pi / 180)
self.transform(c, s, -s, c, 0, 0)
def skew(self, alpha, beta):
tanAlpha = tan(alpha * pi / 180)
tanBeta = tan(beta * pi / 180)
self.transform(1, tanAlpha, tanBeta, 1, 0, 0)
######################################################################
#
# graphics state management
#
######################################################################
def saveState(self):
"""Save the current graphics state to be restored later by restoreState.
For example:
canvas.setFont("Helvetica", 20)
canvas.saveState()
...
canvas.setFont("Courier", 9)
...
canvas.restoreState()
# if the save/restore pairs match then font is Helvetica 20 again.
"""
self.push_state_stack()
self._code.append('q')
def restoreState(self):
"""restore the graphics state to the matching saved state (see saveState)."""
self._code.append('Q')
self.pop_state_stack()
###############################################################
#
# Drawing methods. These draw things directly without
# fiddling around with Path objects. We can add any geometry
# methods we wish as long as their meaning is precise and
# they are of general use.
#
# In general there are two patterns. Closed shapes
# have the pattern shape(self, args, stroke=1, fill=0);
# by default they draw an outline only. Line segments come
# in three flavours: line, bezier, arc (which is a segment
# of an elliptical arc, approximated by up to four bezier
# curves, one for each quadrant.
#
# In the case of lines, we provide a 'plural' to unroll
# the inner loop; it is useful for drawing big grids
################################################################
#--------first the line drawing methods-----------------------
def line(self, x1,y1, x2,y2):
"""draw a line segment from (x1,y1) to (x2,y2) (with color, thickness and
other attributes determined by the current graphics state)."""
self._code.append('n %s m %s l S' % (fp_str(x1, y1), fp_str(x2, y2)))
def lines(self, linelist):
"""Like line(), permits many lines to be drawn in one call.
for example for the figure::
|
-- --
|
crosshairs = [(20,0,20,10), (20,30,20,40), (0,20,10,20), (30,20,40,20)]
canvas.lines(crosshairs)
"""
self._code.append('n')
for (x1,y1,x2,y2) in linelist:
self._code.append('%s m %s l' % (fp_str(x1, y1), fp_str(x2, y2)))
self._code.append('S')
def grid(self, xlist, ylist):
"""Lays out a grid in current line style. Supply list of
x an y positions."""
assert len(xlist) > 1, "x coordinate list must have 2+ items"
assert len(ylist) > 1, "y coordinate list must have 2+ items"
lines = []
y0, y1 = ylist[0], ylist[-1]
x0, x1 = xlist[0], xlist[-1]
for x in xlist:
lines.append((x,y0,x,y1))
for y in ylist:
lines.append((x0,y,x1,y))
self.lines(lines)
def bezier(self, x1, y1, x2, y2, x3, y3, x4, y4):
"Bezier curve with the four given control points"
self._code.append('n %s m %s c S' %
(fp_str(x1, y1), fp_str(x2, y2, x3, y3, x4, y4))
)
def arc(self, x1,y1, x2,y2, startAng=0, extent=90):
"""Draw a partial ellipse inscribed within the rectangle x1,y1,x2,y2,
starting at startAng degrees and covering extent degrees. Angles
start with 0 to the right (+x) and increase counter-clockwise.
These should have x1<x2 and y1<y2."""
pathobject.PDFPathObject(code=self._code).arc(x1,y1,x2,y2,startAng,extent)
self._strokeAndFill(1,0)
#--------now the shape drawing methods-----------------------
def rect(self, x, y, width, height, stroke=1, fill=0):
"draws a rectangle with lower left corner at (x,y) and width and height as given."
self._code.append('n %s re ' % fp_str(x, y, width, height)
+ PATH_OPS[stroke, fill, self._fillMode])
def ellipse(self, x1, y1, x2, y2, stroke=1, fill=0):
"""Draw an ellipse defined by an enclosing rectangle.
Note that (x1,y1) and (x2,y2) are the corner points of
the enclosing rectangle.
"""
pathobject.PDFPathObject(code=self._code).ellipse(x1, y1, x2-x1, y2-y1)
self._strokeAndFill(stroke, fill)
def wedge(self, x1,y1, x2,y2, startAng, extent, stroke=1, fill=0):
"""Like arc, but connects to the centre of the ellipse.
Most useful for pie charts and PacMan!"""
p = pathobject.PDFPathObject(code=self._code)
p.moveTo(0.5*(x1+x2),0.5*(y1+y2))
p.arcTo(x1,y1,x2,y2,startAng,extent)
p.close()
self._strokeAndFill(stroke,fill)
def circle(self, x_cen, y_cen, r, stroke=1, fill=0):
"""draw a cirle centered at (x_cen,y_cen) with radius r (special case of ellipse)"""
x1 = x_cen - r
x2 = x_cen + r
y1 = y_cen - r
y2 = y_cen + r
self.ellipse(x1, y1, x2, y2, stroke, fill)
def roundRect(self, x, y, width, height, radius, stroke=1, fill=0):
"""Draws a rectangle with rounded corners. The corners are
approximately quadrants of a circle, with the given radius."""
#make the path operators draw into our code
pathobject.PDFPathObject(code=self._code).roundRect(x, y, width, height, radius)
self._strokeAndFill(stroke,fill)
def _addShading(self, shading):
name = self._doc.addShading(shading)
self._shadingUsed[name] = name
return name
def shade(self, shading):
name = self._addShading(shading)
self._code.append('/%s sh' % name)
def linearGradient(self, x0, y0, x1, y1, colors, positions=None, extend=True):
#this code contributed by Peter Johnson <johnson.peter@gmail.com>
from reportlab.pdfbase.pdfdoc import PDFAxialShading
colorSpace, ncolors = _normalizeColors(colors)
fcn = _buildColorFunction(ncolors, positions)
if extend:
extendStr = "[true true]"
else:
extendStr = "[false false]"
shading = PDFAxialShading(x0, y0, x1, y1, Function=fcn,
ColorSpace=colorSpace, Extend=extendStr)
self.shade(shading)
def radialGradient(self, x, y, radius, colors, positions=None, extend=True):
#this code contributed by Peter Johnson <johnson.peter@gmail.com>
from reportlab.pdfbase.pdfdoc import PDFRadialShading
colorSpace, ncolors = _normalizeColors(colors)
fcn = _buildColorFunction(ncolors, positions)
if extend:
extendStr = "[true true]"
else:
extendStr = "[false false]"
shading = PDFRadialShading(x, y, 0.0, x, y, radius, Function=fcn,
ColorSpace=colorSpace, Extend=extendStr)
self.shade(shading)
##################################################
#
# Text methods
#
# As with graphics, a separate object ensures that
# everything is bracketed between text operators.
# The methods below are a high-level convenience.
# use PDFTextObject for multi-line text.
##################################################
def drawString(self, x, y, text, mode=None):
"""Draws a string in the current text styles."""
if sys.version_info[0] == 3 and not isinstance(text, str):
text = text.decode('utf-8')
#we could inline this for speed if needed
t = self.beginText(x, y)
if mode is not None: t.setTextRenderMode(mode)
t.textLine(text)
self.drawText(t)
def drawRightString(self, x, y, text, mode=None):
"""Draws a string right-aligned with the x coordinate"""
if sys.version_info[0] == 3 and not isinstance(text, str):
text = text.decode('utf-8')
width = self.stringWidth(text, self._fontname, self._fontsize)
t = self.beginText(x - width, y)
if mode is not None: t.setTextRenderMode(mode)
t.textLine(text)
self.drawText(t)
def drawCentredString(self, x, y, text,mode=None):
"""Draws a string centred on the x coordinate.
We're British, dammit, and proud of our spelling!"""
if sys.version_info[0] == 3 and not isinstance(text, str):
text = text.decode('utf-8')
width = self.stringWidth(text, self._fontname, self._fontsize)
t = self.beginText(x - 0.5*width, y)
if mode is not None: t.setTextRenderMode(mode)
t.textLine(text)
self.drawText(t)
def drawAlignedString(self, x, y, text, pivotChar="."):
"""Draws a string aligned on the first '.' (or other pivot character).
The centre position of the pivot character will be used as x.
So, you could draw a straight line down through all the decimals in a
column of numbers, and anything without a decimal should be
optically aligned with those that have.
There is one special rule to help with accounting formatting. Here's
how normal numbers should be aligned on the 'dot'. Look at the
LAST two::
12,345,67
987.15
42
-1,234.56
(456.78)
(456)
27 inches
13cm
Since the last three do not contain a dot, a crude dot-finding
rule would place them wrong. So we test for the special case
where no pivot is found, digits are present, but the last character
is not a digit. We then work back from the end of the string
This case is a tad slower but hopefully rare.
"""
parts = text.split(pivotChar,1)
pivW = self.stringWidth(pivotChar, self._fontname, self._fontsize)
if len(parts) == 1 and digitPat.search(text) is not None and text[-1] not in digits:
#we have no decimal but it ends in a bracket, or 'in' or something.
#the cut should be after the last digit.
leftText = parts[0][0:-1]
rightText = parts[0][-1]
#any more?
while leftText[-1] not in digits:
rightText = leftText[-1] + rightText
leftText = leftText[0:-1]
self.drawRightString(x-0.5*pivW, y, leftText)
self.drawString(x-0.5*pivW, y, rightText)
else:
#normal case
leftText = parts[0]
self.drawRightString(x-0.5*pivW, y, leftText)
if len(parts) > 1:
rightText = pivotChar + parts[1]
self.drawString(x-0.5*pivW, y, rightText)
def getAvailableFonts(self):
"""Returns the list of PostScript font names available.
Standard set now, but may grow in future with font embedding."""
fontnames = self._doc.getAvailableFonts()
fontnames.sort()
return fontnames
def addFont(self, fontObj):
"add a new font for subsequent use."
self._doc.addFont(fontObj)
def _addStandardFonts(self):
"""Ensures the standard 14 fonts are available in the system encoding.
Called by canvas on initialization"""
for fontName in pdfmetrics.standardFonts:
self.addFont(pdfmetrics.fontsByName[fontName])
def listLoadedFonts0(self):
"Convenience function to list all loaded fonts"
names = list(pdfmetrics.widths.keys())
names.sort()
return names
def setFont(self, psfontname, size, leading = None):
"""Sets the font. If leading not specified, defaults to 1.2 x
font size. Raises a readable exception if an illegal font
is supplied. Font names are case-sensitive! Keeps track
of font name and size for metrics."""
self._fontname = psfontname
self._fontsize = size
if leading is None:
leading = size * 1.2
self._leading = leading
font = pdfmetrics.getFont(self._fontname)
if not font._dynamicFont:
if font.face.builtIn or not getattr(self,'_drawTextAsPath',False):
pdffontname = self._doc.getInternalFontName(psfontname)
self._code.append('BT %s %s Tf %s TL ET' % (pdffontname, fp_str(size), fp_str(leading)))
def setFontSize(self, size=None, leading=None):
'''Sets font size or leading without knowing the font face'''
if size is None: size = self._fontsize
if leading is None: leading = self._leading
self.setFont(self._fontname, size, leading)
def stringWidth(self, text, fontName=None, fontSize=None):
"gets width of a string in the given font and size"
return pdfmetrics.stringWidth(text, fontName or self._fontname,
(fontSize,self._fontsize)[fontSize is None])
# basic graphics modes
def setLineWidth(self, width):
self._lineWidth = width
self._code.append('%s w' % fp_str(width))
def setLineCap(self, mode):
"""0=butt,1=round,2=square"""
assert mode in (0,1,2), "Line caps allowed: 0=butt,1=round,2=square"
self._lineCap = mode
self._code.append('%d J' % mode)
def setLineJoin(self, mode):
"""0=mitre, 1=round, 2=bevel"""
assert mode in (0,1,2), "Line Joins allowed: 0=mitre, 1=round, 2=bevel"
self._lineJoin = mode
self._code.append('%d j' % mode)
def setMiterLimit(self, limit):
self._miterLimit = limit
self._code.append('%s M' % fp_str(limit))
def setDash(self, array=[], phase=0):
"""Two notations. pass two numbers, or an array and phase"""
if isinstance(array,(int,float)):
self._code.append('[%s %s] 0 d' % (array, phase))
elif isSeq(array):
assert phase >= 0, "phase is a length in user space"
textarray = ' '.join([str(s) for s in array])
self._code.append('[%s] %s d' % (textarray, phase))
# path stuff - the separate path object builds it
def beginPath(self):
"""Returns a fresh path object. Paths are used to draw
complex figures. The object returned follows the protocol
for a pathobject.PDFPathObject instance"""
return pathobject.PDFPathObject()
def drawPath(self, aPath, stroke=1, fill=0):
"Draw the path object in the mode indicated"
self._code.append(str(aPath.getCode()))
self._strokeAndFill(stroke,fill)
def _strokeAndFill(self,stroke,fill):
self._code.append(PATH_OPS[stroke, fill, self._fillMode])
def clipPath(self, aPath, stroke=1, fill=0):
"clip as well as drawing"
gc = aPath.getCode(); pathops = PATH_OPS[stroke, fill, self._fillMode]
clip = (self._fillMode == FILL_EVEN_ODD and ' W* ' or ' W ')
item = "%s%s%s" % (gc, clip, pathops) # ensure string conversion
self._code.append(item)
#self._code.append( aPath.getCode()
# + (self._fillMode == FILL_EVEN_ODD and ' W* ' or ' W ')
# + PATH_OPS[stroke,fill,self._fillMode])
def beginText(self, x=0, y=0):
"""Returns a fresh text object. Text objects are used
to add large amounts of text. See textobject.PDFTextObject"""
return textobject.PDFTextObject(self, x, y)
def drawText(self, aTextObject):
"""Draws a text object"""
self._code.append(str(aTextObject.getCode()))
def setPageCompression(self, pageCompression=1):
"""Possible values None, 1 or 0
If None the value from rl_config will be used.
If on, the page data will be compressed, leading to much
smaller files, but takes a little longer to create the files.
This applies to all subsequent pages, or until setPageCompression()
is next called."""
if pageCompression is None: pageCompression = rl_config.pageCompression
if pageCompression and not zlib:
self._pageCompression = 0
else:
self._pageCompression = pageCompression
self._doc.setCompression(self._pageCompression)
def setPageDuration(self, duration=None):
"""Allows hands-off animation of presentations :-)
If this is set to a number, in full screen mode, Acrobat Reader
will advance to the next page after this many seconds. The
duration of the transition itself (fade/flicker etc.) is controlled
by the 'duration' argument to setPageTransition; this controls
the time spent looking at the page. This is effective for all
subsequent pages."""
self._pageDuration = duration
def setPageTransition(self, effectname=None, duration=1,
direction=0,dimension='H',motion='I'):
"""PDF allows page transition effects for use when giving
presentations. There are six possible effects. You can
just guive the effect name, or supply more advanced options
to refine the way it works. There are three types of extra
argument permitted, and here are the allowed values::
direction_arg = [0,90,180,270]
dimension_arg = ['H', 'V']
motion_arg = ['I','O'] (start at inside or outside)
This table says which ones take which arguments::
PageTransitionEffects = {
'Split': [direction_arg, motion_arg],
'Blinds': [dimension_arg],
'Box': [motion_arg],
'Wipe' : [direction_arg],
'Dissolve' : [],
'Glitter':[direction_arg]
}
Have fun!
"""
# This builds a Python dictionary with the right arguments
# for the Trans dictionary in the PDFPage object,
# and stores it in the variable _pageTransition.
# showPage later passes this to the setPageTransition method
# of the PDFPage object, which turns it to a PDFDictionary.
self._pageTransition = {}
if not effectname:
return
#first check each optional argument has an allowed value
if direction in [0,90,180,270]:
direction_arg = ('Di', '/%d' % direction)
else:
raise pdfdoc.PDFError(' directions allowed are 0,90,180,270')
if dimension in ['H', 'V']:
dimension_arg = ('Dm', '/' + dimension)
else:
raise pdfdoc.PDFError('dimension values allowed are H and V')
if motion in ['I','O']:
motion_arg = ('M', '/' + motion)
else:
raise pdfdoc.PDFError('motion values allowed are I and O')
# this says which effects require which argument types from above
PageTransitionEffects = {
'Split': [direction_arg, motion_arg],
'Blinds': [dimension_arg],
'Box': [motion_arg],
'Wipe' : [direction_arg],
'Dissolve' : [],
'Glitter':[direction_arg]
}
try:
args = PageTransitionEffects[effectname]
except KeyError:
raise pdfdoc.PDFError('Unknown Effect Name "%s"' % effectname)
# now build the dictionary
transDict = {}
transDict['Type'] = '/Trans'
transDict['D'] = '%d' % duration
transDict['S'] = '/' + effectname
for (key, value) in args:
transDict[key] = value
self._pageTransition = transDict
def getCurrentPageContent(self):
"""Return uncompressed contents of current page buffer.
This is useful in creating test cases and assertions of what
got drawn, without necessarily saving pages to disk"""
return '\n'.join(self._code)
def setViewerPreference(self,pref,value):
'''set one of the allowed enbtries in the documents viewer preferences'''
catalog = self._doc.Catalog
VP = getattr(catalog,'ViewerPreferences',None)
if VP is None:
from reportlab.pdfbase.pdfdoc import ViewerPreferencesPDFDictionary
VP = catalog.ViewerPreferences = ViewerPreferencesPDFDictionary()
VP[pref] = value
def getViewerPreference(self,pref):
'''you'll get an error here if none have been set'''
return self._doc.Catalog.ViewerPreferences[pref]
def delViewerPreference(self,pref):
'''you'll get an error here if none have been set'''
del self._doc.Catalog.ViewerPreferences[pref]
def addPageLabel(self, pageNum, style=None, start=None, prefix=None):
'''add a PDFPageLabel for pageNum'''
catalog = self._doc.Catalog
PL = getattr(catalog,'PageLabels',None)
if PL is None:
from reportlab.pdfbase.pdfdoc import PDFPageLabels
PL = catalog.PageLabels = PDFPageLabels()
from reportlab.pdfbase.pdfdoc import PDFPageLabel
PL.addPageLabel(pageNum,PDFPageLabel(style,start,prefix))
if __name__ == '__main__':
print('For test scripts, look in tests')
|
unknown
|
codeparrot/codeparrot-clean
| ||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
from types import NoneType
from ansible.errors import AnsibleParserError
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
def load_list_of_blocks(ds, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of mixed task/block data (parsed from YAML),
return a list of Block() objects, where implicit blocks
are created for each bare Task.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
assert type(ds) in (list, NoneType)
block_list = []
if ds:
for block in ds:
b = Block.load(
block,
parent_block=parent_block,
role=role,
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader
)
block_list.append(b)
return block_list
def load_list_of_tasks(ds, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of task datastructures (parsed from YAML),
return a list of Task() or TaskInclude() objects.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.handler import Handler
from ansible.playbook.task import Task
#from ansible.playbook.task_include import TaskInclude
assert type(ds) == list
task_list = []
for task in ds:
if not isinstance(task, dict):
raise AnsibleParserError("task/handler entries must be dictionaries (got a %s)" % type(task), obj=ds)
#if 'include' in task:
# cur_basedir = None
# if isinstance(task, AnsibleBaseYAMLObject) and loader:
# pos_info = task.get_position_info()
# new_basedir = os.path.dirname(pos_info[0])
# cur_basedir = loader.get_basedir()
# loader.set_basedir(new_basedir)
# t = TaskInclude.load(
# task,
# block=block,
# role=role,
# task_include=task_include,
# use_handlers=use_handlers,
# loader=loader
# )
# if cur_basedir and loader:
# loader.set_basedir(cur_basedir)
#else:
if True:
if use_handlers:
t = Handler.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
else:
t = Task.load(task, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
task_list.append(t)
return task_list
def load_list_of_roles(ds, current_role_path=None, variable_manager=None, loader=None):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.role.include import RoleInclude
assert isinstance(ds, list)
roles = []
for role_def in ds:
i = RoleInclude.load(role_def, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
roles.append(i)
return roles
def compile_block_list(block_list):
'''
Given a list of blocks, compile them into a flat list of tasks
'''
task_list = []
for block in block_list:
task_list.extend(block.compile())
return task_list
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
from gettext import gettext as _
from bbcalc.utils import KILOGRAMS, POUNDS, MALE, FEMALE, METRIC
from bbcalc.utils.unitconvertor import kg2lb, cm2in
# General constants
MALECOEF = 98.42
FEMALECOEF = 76.76
WAISTCOEF = 4.15
WEIGHTCOEF = 1.082
CALORIESCOEF = 13.83
def bodyfat_calc(waist, weight, waist_unit=METRIC, weight_unit=METRIC, gender=MALE, precision=2):
"""Body Fat calculator
This formula comes from 'The Bodyfat Guide' by Ron Brown
"""
if waist_unit == METRIC:
waist = cm2in(float(waist))
if weight_unit == METRIC:
weight = kg2lb(float(weight))
tmp_waist = float(waist) * WAISTCOEF
tmp_weight = float(weight) * WEIGHTCOEF
diff = tmp_weight - tmp_waist;
if gender == FEMALE:
lbm = diff + FEMALECOEF;
else:
lbm = diff + MALECOEF;
fatweight = float(weight) - lbm;
bodyfat = fatweight / float(weight) * 100.0;
if weight_unit == METRIC:
factor = POUNDS
else:
factor = KILOGRAMS
bodyfat = round(bodyfat, precision)
fatweight = round(fatweight / factor, precision)
lbm = round(lbm / factor, precision)
calories = round((lbm * factor) * CALORIESCOEF, 0)
# TODO: Code refactoring
if gender == MALE:
if bodyfat < 5.0:
classification = (_(u'Essential Fat'))
elif bodyfat > 5.0 and bodyfat < 14.0:
classification = (_(u'Athletes'))
elif bodyfat > 14.0 and bodyfat < 18.0:
classification = (_(u'Fitness'))
elif bodyfat > 18.0 and bodyfat < 25.0:
classification = (_(u'Acceptable'))
elif bodyfat > 25.0:
classification = (_(u'Obese'))
else:
if bodyfat < 14.0:
classification = (_(u'Essential Fat'))
elif bodyfat > 14.0 and bodyfat < 21.0:
classification = (_(u'Athletes'))
elif bodyfat > 21.0 and bodyfat < 25.0:
classification = (_(u'Fitness'))
elif bodyfat > 25.0 and bodyfat < 32.0:
classification = (_(u'Acceptable'))
elif bodyfat > 32.0:
classification = (_(u'Obese'))
return {'bodyfat' : bodyfat, 'fatweight' : fatweight, 'lbm' : lbm,
'calories' : calories, 'classification' : classification}
|
unknown
|
codeparrot/codeparrot-clean
| ||
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {
HIRFunction,
InstructionId,
Place,
PrunedReactiveScopeBlock,
ReactiveBlock,
ReactiveFunction,
ReactiveInstruction,
ReactiveScopeBlock,
ReactiveStatement,
ReactiveTerminal,
ReactiveTerminalStatement,
ReactiveValue,
} from '../HIR/HIR';
import {
eachInstructionLValue,
eachInstructionValueOperand,
eachTerminalOperand,
} from '../HIR/visitors';
import {assertExhaustive} from '../Utils/utils';
export function visitReactiveFunction<TState>(
fn: ReactiveFunction,
visitor: ReactiveFunctionVisitor<TState>,
state: TState,
): void {
visitor.visitBlock(fn.body, state);
}
export class ReactiveFunctionVisitor<TState = void> {
visitID(_id: InstructionId, _state: TState): void {}
visitParam(_place: Place, _state: TState): void {}
visitLValue(_id: InstructionId, _lvalue: Place, _state: TState): void {}
visitPlace(_id: InstructionId, _place: Place, _state: TState): void {}
visitReactiveFunctionValue(
_id: InstructionId,
_dependencies: Array<Place>,
_fn: ReactiveFunction,
_state: TState,
): void {}
visitValue(id: InstructionId, value: ReactiveValue, state: TState): void {
this.traverseValue(id, value, state);
}
traverseValue(id: InstructionId, value: ReactiveValue, state: TState): void {
switch (value.kind) {
case 'OptionalExpression': {
this.visitValue(id, value.value, state);
break;
}
case 'LogicalExpression': {
this.visitValue(id, value.left, state);
this.visitValue(id, value.right, state);
break;
}
case 'ConditionalExpression': {
this.visitValue(id, value.test, state);
this.visitValue(id, value.consequent, state);
this.visitValue(id, value.alternate, state);
break;
}
case 'SequenceExpression': {
for (const instr of value.instructions) {
this.visitInstruction(instr, state);
}
this.visitValue(value.id, value.value, state);
break;
}
default: {
for (const place of eachInstructionValueOperand(value)) {
this.visitPlace(id, place, state);
}
}
}
}
visitInstruction(instruction: ReactiveInstruction, state: TState): void {
this.traverseInstruction(instruction, state);
}
traverseInstruction(instruction: ReactiveInstruction, state: TState): void {
this.visitID(instruction.id, state);
for (const operand of eachInstructionLValue(instruction)) {
this.visitLValue(instruction.id, operand, state);
}
this.visitValue(instruction.id, instruction.value, state);
}
visitTerminal(stmt: ReactiveTerminalStatement, state: TState): void {
this.traverseTerminal(stmt, state);
}
traverseTerminal(stmt: ReactiveTerminalStatement, state: TState): void {
const {terminal} = stmt;
if (terminal.id !== null) {
this.visitID(terminal.id, state);
}
switch (terminal.kind) {
case 'break':
case 'continue': {
break;
}
case 'return': {
this.visitPlace(terminal.id, terminal.value, state);
break;
}
case 'throw': {
this.visitPlace(terminal.id, terminal.value, state);
break;
}
case 'for': {
this.visitValue(terminal.id, terminal.init, state);
this.visitValue(terminal.id, terminal.test, state);
this.visitBlock(terminal.loop, state);
if (terminal.update !== null) {
this.visitValue(terminal.id, terminal.update, state);
}
break;
}
case 'for-of': {
this.visitValue(terminal.id, terminal.init, state);
this.visitValue(terminal.id, terminal.test, state);
this.visitBlock(terminal.loop, state);
break;
}
case 'for-in': {
this.visitValue(terminal.id, terminal.init, state);
this.visitBlock(terminal.loop, state);
break;
}
case 'do-while': {
this.visitBlock(terminal.loop, state);
this.visitValue(terminal.id, terminal.test, state);
break;
}
case 'while': {
this.visitValue(terminal.id, terminal.test, state);
this.visitBlock(terminal.loop, state);
break;
}
case 'if': {
this.visitPlace(terminal.id, terminal.test, state);
this.visitBlock(terminal.consequent, state);
if (terminal.alternate !== null) {
this.visitBlock(terminal.alternate, state);
}
break;
}
case 'switch': {
this.visitPlace(terminal.id, terminal.test, state);
for (const case_ of terminal.cases) {
if (case_.test !== null) {
this.visitPlace(terminal.id, case_.test, state);
}
if (case_.block !== undefined) {
this.visitBlock(case_.block, state);
}
}
break;
}
case 'label': {
this.visitBlock(terminal.block, state);
break;
}
case 'try': {
this.visitBlock(terminal.block, state);
this.visitBlock(terminal.handler, state);
break;
}
default: {
assertExhaustive(
terminal,
`Unexpected terminal kind \`${(terminal as any).kind}\``,
);
}
}
}
visitScope(scope: ReactiveScopeBlock, state: TState): void {
this.traverseScope(scope, state);
}
traverseScope(scope: ReactiveScopeBlock, state: TState): void {
this.visitBlock(scope.instructions, state);
}
visitPrunedScope(scopeBlock: PrunedReactiveScopeBlock, state: TState): void {
this.traversePrunedScope(scopeBlock, state);
}
traversePrunedScope(
scopeBlock: PrunedReactiveScopeBlock,
state: TState,
): void {
this.visitBlock(scopeBlock.instructions, state);
}
visitBlock(block: ReactiveBlock, state: TState): void {
this.traverseBlock(block, state);
}
traverseBlock(block: ReactiveBlock, state: TState): void {
for (const instr of block) {
switch (instr.kind) {
case 'instruction': {
this.visitInstruction(instr.instruction, state);
break;
}
case 'scope': {
this.visitScope(instr, state);
break;
}
case 'pruned-scope': {
this.visitPrunedScope(instr, state);
break;
}
case 'terminal': {
this.visitTerminal(instr, state);
break;
}
default: {
assertExhaustive(
instr,
`Unexpected instruction kind \`${(instr as any).kind}\``,
);
}
}
}
}
visitHirFunction(fn: HIRFunction, state: TState): void {
for (const param of fn.params) {
const place = param.kind === 'Identifier' ? param : param.place;
this.visitParam(place, state);
}
for (const [, block] of fn.body.blocks) {
for (const instr of block.instructions) {
this.visitInstruction(instr, state);
if (
instr.value.kind === 'FunctionExpression' ||
instr.value.kind === 'ObjectMethod'
) {
this.visitHirFunction(instr.value.loweredFunc.func, state);
}
}
for (const operand of eachTerminalOperand(block.terminal)) {
this.visitPlace(block.terminal.id, operand, state);
}
}
}
}
export type TransformedValue =
| {kind: 'keep'}
| {kind: 'replace'; value: ReactiveValue};
export type Transformed<T> =
| {kind: 'remove'}
| {kind: 'keep'}
| {kind: 'replace'; value: T}
| {kind: 'replace-many'; value: Array<T>};
export class ReactiveFunctionTransform<
TState = void,
> extends ReactiveFunctionVisitor<TState> {
override traverseBlock(block: ReactiveBlock, state: TState): void {
let nextBlock: ReactiveBlock | null = null;
for (let i = 0; i < block.length; i++) {
const instr = block[i]!;
let transformed: Transformed<ReactiveStatement>;
switch (instr.kind) {
case 'instruction': {
transformed = this.transformInstruction(instr.instruction, state);
break;
}
case 'scope': {
transformed = this.transformScope(instr, state);
break;
}
case 'pruned-scope': {
transformed = this.transformPrunedScope(instr, state);
break;
}
case 'terminal': {
transformed = this.transformTerminal(instr, state);
break;
}
default: {
assertExhaustive(
instr,
`Unexpected instruction kind \`${(instr as any).kind}\``,
);
}
}
switch (transformed.kind) {
case 'keep': {
if (nextBlock !== null) {
nextBlock.push(instr);
}
break;
}
case 'remove': {
if (nextBlock === null) {
nextBlock = block.slice(0, i);
}
break;
}
case 'replace': {
nextBlock ??= block.slice(0, i);
nextBlock.push(transformed.value);
break;
}
case 'replace-many': {
nextBlock ??= block.slice(0, i);
nextBlock.push(...transformed.value);
break;
}
}
}
if (nextBlock !== null) {
block.length = 0;
block.push(...nextBlock);
}
}
transformInstruction(
instruction: ReactiveInstruction,
state: TState,
): Transformed<ReactiveStatement> {
this.visitInstruction(instruction, state);
return {kind: 'keep'};
}
transformTerminal(
stmt: ReactiveTerminalStatement,
state: TState,
): Transformed<ReactiveStatement> {
this.visitTerminal(stmt, state);
return {kind: 'keep'};
}
transformScope(
scope: ReactiveScopeBlock,
state: TState,
): Transformed<ReactiveStatement> {
this.visitScope(scope, state);
return {kind: 'keep'};
}
transformPrunedScope(
scope: PrunedReactiveScopeBlock,
state: TState,
): Transformed<ReactiveStatement> {
this.visitPrunedScope(scope, state);
return {kind: 'keep'};
}
transformValue(
id: InstructionId,
value: ReactiveValue,
state: TState,
): TransformedValue {
this.visitValue(id, value, state);
return {kind: 'keep'};
}
transformReactiveFunctionValue(
id: InstructionId,
dependencies: Array<Place>,
fn: ReactiveFunction,
state: TState,
): {kind: 'keep'} | {kind: 'replace'; value: ReactiveFunction} {
this.visitReactiveFunctionValue(id, dependencies, fn, state);
return {kind: 'keep'};
}
override traverseValue(
id: InstructionId,
value: ReactiveValue,
state: TState,
): void {
switch (value.kind) {
case 'OptionalExpression': {
const nextValue = this.transformValue(id, value.value, state);
if (nextValue.kind === 'replace') {
value.value = nextValue.value;
}
break;
}
case 'LogicalExpression': {
const left = this.transformValue(id, value.left, state);
if (left.kind === 'replace') {
value.left = left.value;
}
const right = this.transformValue(id, value.right, state);
if (right.kind === 'replace') {
value.right = right.value;
}
break;
}
case 'ConditionalExpression': {
const test = this.transformValue(id, value.test, state);
if (test.kind === 'replace') {
value.test = test.value;
}
const consequent = this.transformValue(id, value.consequent, state);
if (consequent.kind === 'replace') {
value.consequent = consequent.value;
}
const alternate = this.transformValue(id, value.alternate, state);
if (alternate.kind === 'replace') {
value.alternate = alternate.value;
}
break;
}
case 'SequenceExpression': {
for (const instr of value.instructions) {
this.visitInstruction(instr, state);
}
const nextValue = this.transformValue(value.id, value.value, state);
if (nextValue.kind === 'replace') {
value.value = nextValue.value;
}
break;
}
default: {
for (const place of eachInstructionValueOperand(value)) {
this.visitPlace(id, place, state);
}
}
}
}
override traverseInstruction(
instruction: ReactiveInstruction,
state: TState,
): void {
this.visitID(instruction.id, state);
for (const operand of eachInstructionLValue(instruction)) {
this.visitLValue(instruction.id, operand, state);
}
const nextValue = this.transformValue(
instruction.id,
instruction.value,
state,
);
if (nextValue.kind === 'replace') {
instruction.value = nextValue.value;
}
}
override traverseTerminal(
stmt: ReactiveTerminalStatement,
state: TState,
): void {
const {terminal} = stmt;
if (terminal.id !== null) {
this.visitID(terminal.id, state);
}
switch (terminal.kind) {
case 'break':
case 'continue': {
break;
}
case 'return': {
this.visitPlace(terminal.id, terminal.value, state);
break;
}
case 'throw': {
this.visitPlace(terminal.id, terminal.value, state);
break;
}
case 'for': {
const init = this.transformValue(terminal.id, terminal.init, state);
if (init.kind === 'replace') {
terminal.init = init.value;
}
const test = this.transformValue(terminal.id, terminal.test, state);
if (test.kind === 'replace') {
terminal.test = test.value;
}
if (terminal.update !== null) {
const update = this.transformValue(
terminal.id,
terminal.update,
state,
);
if (update.kind === 'replace') {
terminal.update = update.value;
}
}
this.visitBlock(terminal.loop, state);
break;
}
case 'for-of': {
const init = this.transformValue(terminal.id, terminal.init, state);
if (init.kind === 'replace') {
terminal.init = init.value;
}
const test = this.transformValue(terminal.id, terminal.test, state);
if (test.kind === 'replace') {
terminal.test = test.value;
}
this.visitBlock(terminal.loop, state);
break;
}
case 'for-in': {
const init = this.transformValue(terminal.id, terminal.init, state);
if (init.kind === 'replace') {
terminal.init = init.value;
}
this.visitBlock(terminal.loop, state);
break;
}
case 'do-while': {
this.visitBlock(terminal.loop, state);
const test = this.transformValue(terminal.id, terminal.test, state);
if (test.kind === 'replace') {
terminal.test = test.value;
}
break;
}
case 'while': {
const test = this.transformValue(terminal.id, terminal.test, state);
if (test.kind === 'replace') {
terminal.test = test.value;
}
this.visitBlock(terminal.loop, state);
break;
}
case 'if': {
this.visitPlace(terminal.id, terminal.test, state);
this.visitBlock(terminal.consequent, state);
if (terminal.alternate !== null) {
this.visitBlock(terminal.alternate, state);
}
break;
}
case 'switch': {
this.visitPlace(terminal.id, terminal.test, state);
for (const case_ of terminal.cases) {
if (case_.test !== null) {
this.visitPlace(terminal.id, case_.test, state);
}
if (case_.block !== undefined) {
this.visitBlock(case_.block, state);
}
}
break;
}
case 'label': {
this.visitBlock(terminal.block, state);
break;
}
case 'try': {
this.visitBlock(terminal.block, state);
if (terminal.handlerBinding !== null) {
this.visitPlace(terminal.id, terminal.handlerBinding, state);
}
this.visitBlock(terminal.handler, state);
break;
}
default: {
assertExhaustive(
terminal,
`Unexpected terminal kind \`${(terminal as any).kind}\``,
);
}
}
}
}
export function* eachReactiveValueOperand(
instrValue: ReactiveValue,
): Iterable<Place> {
switch (instrValue.kind) {
case 'OptionalExpression': {
yield* eachReactiveValueOperand(instrValue.value);
break;
}
case 'LogicalExpression': {
yield* eachReactiveValueOperand(instrValue.left);
yield* eachReactiveValueOperand(instrValue.right);
break;
}
case 'SequenceExpression': {
for (const instr of instrValue.instructions) {
yield* eachReactiveValueOperand(instr.value);
}
yield* eachReactiveValueOperand(instrValue.value);
break;
}
case 'ConditionalExpression': {
yield* eachReactiveValueOperand(instrValue.test);
yield* eachReactiveValueOperand(instrValue.consequent);
yield* eachReactiveValueOperand(instrValue.alternate);
break;
}
default: {
yield* eachInstructionValueOperand(instrValue);
}
}
}
export function mapTerminalBlocks(
terminal: ReactiveTerminal,
fn: (block: ReactiveBlock) => ReactiveBlock,
): void {
switch (terminal.kind) {
case 'break':
case 'continue':
case 'return':
case 'throw': {
break;
}
case 'for': {
terminal.loop = fn(terminal.loop);
break;
}
case 'for-of': {
terminal.loop = fn(terminal.loop);
break;
}
case 'for-in': {
terminal.loop = fn(terminal.loop);
break;
}
case 'do-while':
case 'while': {
terminal.loop = fn(terminal.loop);
break;
}
case 'if': {
terminal.consequent = fn(terminal.consequent);
if (terminal.alternate !== null) {
terminal.alternate = fn(terminal.alternate);
}
break;
}
case 'switch': {
for (const case_ of terminal.cases) {
if (case_.block !== undefined) {
case_.block = fn(case_.block);
}
}
break;
}
case 'label': {
terminal.block = fn(terminal.block);
break;
}
case 'try': {
terminal.block = fn(terminal.block);
terminal.handler = fn(terminal.handler);
break;
}
default: {
assertExhaustive(
terminal,
`Unexpected terminal kind \`${(terminal as any).kind}\``,
);
}
}
}
|
typescript
|
github
|
https://github.com/facebook/react
|
compiler/packages/babel-plugin-react-compiler/src/ReactiveScopes/visitors.ts
|
#ifndef EDITOR_H
#define EDITOR_H
struct repository;
struct strbuf;
const char *git_editor(void);
const char *git_sequence_editor(void);
int is_terminal_dumb(void);
/**
* Launch the user preferred editor to edit a file and fill the buffer
* with the file's contents upon the user completing their editing. The
* third argument can be used to set the environment which the editor is
* run in. If the buffer is NULL the editor is launched as usual but the
* file's contents are not read into the buffer upon completion.
*/
int launch_editor(const char *path, struct strbuf *buffer,
const char *const *env);
int launch_sequence_editor(const char *path, struct strbuf *buffer,
const char *const *env);
/*
* In contrast to `launch_editor()`, this function writes out the contents
* of the specified file first, then clears the `buffer`, then launches
* the editor and reads back in the file contents into the `buffer`.
* Finally, it deletes the temporary file.
*
* If `path` is relative, it refers to a file in the `.git` directory.
*/
int strbuf_edit_interactively(struct repository *r, struct strbuf *buffer,
const char *path, const char *const *env);
#endif
|
c
|
github
|
https://github.com/git/git
|
editor.h
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os.path
import sys
import tempfile
import types
import unittest
from contextlib import contextmanager
from django.template import Context, TemplateDoesNotExist
from django.template.engine import Engine
from django.test import SimpleTestCase, ignore_warnings, override_settings
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from .utils import TEMPLATE_DIR
try:
import pkg_resources
except ImportError:
pkg_resources = None
class CachedLoaderTests(SimpleTestCase):
def setUp(self):
self.engine = Engine(
dirs=[TEMPLATE_DIR],
loaders=[
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
]),
],
)
def test_get_template(self):
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0].loaders[0])
cache = self.engine.template_loaders[0].get_template_cache
self.assertEqual(cache['index.html'], template)
# Run a second time from cache
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0].loaders[0])
def test_get_template_missing(self):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('doesnotexist.html')
e = self.engine.template_loaders[0].get_template_cache['doesnotexist.html']
self.assertEqual(e.args[0], 'doesnotexist.html')
@ignore_warnings(category=RemovedInDjango20Warning)
def test_load_template(self):
loader = self.engine.template_loaders[0]
template, origin = loader.load_template('index.html')
self.assertEqual(template.origin.template_name, 'index.html')
cache = self.engine.template_loaders[0].template_cache
self.assertEqual(cache['index.html'][0], template)
# Run a second time from cache
loader = self.engine.template_loaders[0]
source, name = loader.load_template('index.html')
self.assertEqual(template.origin.template_name, 'index.html')
@ignore_warnings(category=RemovedInDjango20Warning)
def test_load_template_missing(self):
"""
#19949 -- TemplateDoesNotExist exceptions should be cached.
"""
loader = self.engine.template_loaders[0]
self.assertFalse('missing.html' in loader.template_cache)
with self.assertRaises(TemplateDoesNotExist):
loader.load_template("missing.html")
self.assertEqual(
loader.template_cache["missing.html"],
TemplateDoesNotExist,
"Cached loader failed to cache the TemplateDoesNotExist exception",
)
def test_templatedir_caching(self):
"""
#13573 -- Template directories should be part of the cache key.
"""
# Retrieve a template specifying a template directory to check
t1, name = self.engine.find_template('test.html', (os.path.join(TEMPLATE_DIR, 'first'),))
# Now retrieve the same template name, but from a different directory
t2, name = self.engine.find_template('test.html', (os.path.join(TEMPLATE_DIR, 'second'),))
# The two templates should not have the same content
self.assertNotEqual(t1.render(Context({})), t2.render(Context({})))
@unittest.skipUnless(pkg_resources, 'setuptools is not installed')
class EggLoaderTests(SimpleTestCase):
@contextmanager
def create_egg(self, name, resources):
"""
Creates a mock egg with a list of resources.
name: The name of the module.
resources: A dictionary of template names mapped to file-like objects.
"""
if six.PY2:
name = name.encode('utf-8')
class MockLoader(object):
pass
class MockProvider(pkg_resources.NullProvider):
def __init__(self, module):
pkg_resources.NullProvider.__init__(self, module)
self.module = module
def _has(self, path):
return path in self.module._resources
def _isdir(self, path):
return False
def get_resource_stream(self, manager, resource_name):
return self.module._resources[resource_name]
def _get(self, path):
return self.module._resources[path].read()
def _fn(self, base, resource_name):
return os.path.normcase(resource_name)
egg = types.ModuleType(name)
egg.__loader__ = MockLoader()
egg.__path__ = ['/some/bogus/path/']
egg.__file__ = '/some/bogus/path/__init__.pyc'
egg._resources = resources
sys.modules[name] = egg
pkg_resources._provider_factories[MockLoader] = MockProvider
try:
yield
finally:
del sys.modules[name]
del pkg_resources._provider_factories[MockLoader]
@classmethod
@ignore_warnings(category=RemovedInDjango20Warning)
def setUpClass(cls):
cls.engine = Engine(loaders=[
'django.template.loaders.eggs.Loader',
])
cls.loader = cls.engine.template_loaders[0]
super(EggLoaderTests, cls).setUpClass()
def test_get_template(self):
templates = {
os.path.normcase('templates/y.html'): six.StringIO("y"),
}
with self.create_egg('egg', templates):
with override_settings(INSTALLED_APPS=['egg']):
template = self.engine.get_template("y.html")
self.assertEqual(template.origin.name, 'egg:egg:templates/y.html')
self.assertEqual(template.origin.template_name, 'y.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0])
output = template.render(Context({}))
self.assertEqual(output, "y")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_load_template_source(self):
loader = self.engine.template_loaders[0]
templates = {
os.path.normcase('templates/y.html'): six.StringIO("y"),
}
with self.create_egg('egg', templates):
with override_settings(INSTALLED_APPS=['egg']):
source, name = loader.load_template_source('y.html')
self.assertEqual(source.strip(), 'y')
self.assertEqual(name, 'egg:egg:templates/y.html')
def test_non_existing(self):
"""
Template loading fails if the template is not in the egg.
"""
with self.create_egg('egg', {}):
with override_settings(INSTALLED_APPS=['egg']):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('not-existing.html')
def test_not_installed(self):
"""
Template loading fails if the egg is not in INSTALLED_APPS.
"""
templates = {
os.path.normcase('templates/y.html'): six.StringIO("y"),
}
with self.create_egg('egg', templates):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('y.html')
class FileSystemLoaderTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(dirs=[TEMPLATE_DIR])
super(FileSystemLoaderTests, cls).setUpClass()
@contextmanager
def set_dirs(self, dirs):
original_dirs = self.engine.dirs
self.engine.dirs = dirs
try:
yield
finally:
self.engine.dirs = original_dirs
@contextmanager
def source_checker(self, dirs):
loader = self.engine.template_loaders[0]
def check_sources(path, expected_sources):
expected_sources = [os.path.abspath(s) for s in expected_sources]
self.assertEqual(
[origin.name for origin in loader.get_template_sources(path)],
expected_sources,
)
with self.set_dirs(dirs):
yield check_sources
def test_get_template(self):
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0])
self.assertEqual(template.origin.loader_name, 'django.template.loaders.filesystem.Loader')
@ignore_warnings(category=RemovedInDjango20Warning)
def test_load_template_source(self):
loader = self.engine.template_loaders[0]
source, name = loader.load_template_source('index.html')
self.assertEqual(source.strip(), 'index')
self.assertEqual(name, os.path.join(TEMPLATE_DIR, 'index.html'))
def test_directory_security(self):
with self.source_checker(['/dir1', '/dir2']) as check_sources:
check_sources('index.html', ['/dir1/index.html', '/dir2/index.html'])
check_sources('/etc/passwd', [])
check_sources('etc/passwd', ['/dir1/etc/passwd', '/dir2/etc/passwd'])
check_sources('../etc/passwd', [])
check_sources('../../../etc/passwd', [])
check_sources('/dir1/index.html', ['/dir1/index.html'])
check_sources('../dir2/index.html', ['/dir2/index.html'])
check_sources('/dir1blah', [])
check_sources('../dir1blah', [])
def test_unicode_template_name(self):
with self.source_checker(['/dir1', '/dir2']) as check_sources:
# UTF-8 bytestrings are permitted.
check_sources(b'\xc3\x85ngstr\xc3\xb6m', ['/dir1/Ångström', '/dir2/Ångström'])
# Unicode strings are permitted.
check_sources('Ångström', ['/dir1/Ångström', '/dir2/Ångström'])
def test_utf8_bytestring(self):
"""
Invalid UTF-8 encoding in bytestrings should raise a useful error
"""
engine = Engine()
loader = engine.template_loaders[0]
with self.assertRaises(UnicodeDecodeError):
list(loader.get_template_sources(b'\xc3\xc3', ['/dir1']))
def test_unicode_dir_name(self):
with self.source_checker([b'/Stra\xc3\x9fe']) as check_sources:
check_sources('Ångström', ['/Straße/Ångström'])
check_sources(b'\xc3\x85ngstr\xc3\xb6m', ['/Straße/Ångström'])
@unittest.skipUnless(
os.path.normcase('/TEST') == os.path.normpath('/test'),
"This test only runs on case-sensitive file systems.",
)
def test_case_sensitivity(self):
with self.source_checker(['/dir1', '/DIR2']) as check_sources:
check_sources('index.html', ['/dir1/index.html', '/DIR2/index.html'])
check_sources('/DIR1/index.HTML', ['/DIR1/index.HTML'])
def test_file_does_not_exist(self):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('doesnotexist.html')
@unittest.skipIf(
sys.platform == 'win32',
"Python on Windows doesn't have working os.chmod().",
)
def test_permissions_error(self):
with tempfile.NamedTemporaryFile() as tmpfile:
tmpdir = os.path.dirname(tmpfile.name)
tmppath = os.path.join(tmpdir, tmpfile.name)
os.chmod(tmppath, 0o0222)
with self.set_dirs([tmpdir]):
with self.assertRaisesMessage(IOError, 'Permission denied'):
self.engine.get_template(tmpfile.name)
def test_notafile_error(self):
with self.assertRaises(IOError):
self.engine.get_template('first')
class AppDirectoriesLoaderTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(
loaders=['django.template.loaders.app_directories.Loader'],
)
super(AppDirectoriesLoaderTests, cls).setUpClass()
@override_settings(INSTALLED_APPS=['template_tests'])
def test_get_template(self):
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0])
@ignore_warnings(category=RemovedInDjango20Warning)
@override_settings(INSTALLED_APPS=['template_tests'])
def test_load_template_source(self):
loader = self.engine.template_loaders[0]
source, name = loader.load_template_source('index.html')
self.assertEqual(source.strip(), 'index')
self.assertEqual(name, os.path.join(TEMPLATE_DIR, 'index.html'))
@override_settings(INSTALLED_APPS=[])
def test_not_installed(self):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('index.html')
class LocmemLoaderTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(
loaders=[('django.template.loaders.locmem.Loader', {
'index.html': 'index',
})],
)
super(LocmemLoaderTests, cls).setUpClass()
def test_get_template(self):
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, 'index.html')
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0])
@ignore_warnings(category=RemovedInDjango20Warning)
def test_load_template_source(self):
loader = self.engine.template_loaders[0]
source, name = loader.load_template_source('index.html')
self.assertEqual(source.strip(), 'index')
self.assertEqual(name, 'index.html')
|
unknown
|
codeparrot/codeparrot-clean
| ||
(function webpackUniversalModuleDefinition(root, factory) {
if(typeof exports === 'object' && typeof module === 'object')
module.exports = factory();
else if(typeof define === 'function' && define.amd)
define([], factory);
else if(typeof exports === 'object')
exports["mapping"] = factory();
else
root["mapping"] = factory();
})(this, function() {
return /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ (function(module, exports) {
/** Used to map aliases to their real names. */
exports.aliasToReal = {
// Lodash aliases.
'each': 'forEach',
'eachRight': 'forEachRight',
'entries': 'toPairs',
'entriesIn': 'toPairsIn',
'extend': 'assignIn',
'extendAll': 'assignInAll',
'extendAllWith': 'assignInAllWith',
'extendWith': 'assignInWith',
'first': 'head',
// Methods that are curried variants of others.
'conforms': 'conformsTo',
'matches': 'isMatch',
'property': 'get',
// Ramda aliases.
'__': 'placeholder',
'F': 'stubFalse',
'T': 'stubTrue',
'all': 'every',
'allPass': 'overEvery',
'always': 'constant',
'any': 'some',
'anyPass': 'overSome',
'apply': 'spread',
'assoc': 'set',
'assocPath': 'set',
'complement': 'negate',
'compose': 'flowRight',
'contains': 'includes',
'dissoc': 'unset',
'dissocPath': 'unset',
'dropLast': 'dropRight',
'dropLastWhile': 'dropRightWhile',
'equals': 'isEqual',
'identical': 'eq',
'indexBy': 'keyBy',
'init': 'initial',
'invertObj': 'invert',
'juxt': 'over',
'omitAll': 'omit',
'nAry': 'ary',
'path': 'get',
'pathEq': 'matchesProperty',
'pathOr': 'getOr',
'paths': 'at',
'pickAll': 'pick',
'pipe': 'flow',
'pluck': 'map',
'prop': 'get',
'propEq': 'matchesProperty',
'propOr': 'getOr',
'props': 'at',
'symmetricDifference': 'xor',
'symmetricDifferenceBy': 'xorBy',
'symmetricDifferenceWith': 'xorWith',
'takeLast': 'takeRight',
'takeLastWhile': 'takeRightWhile',
'unapply': 'rest',
'unnest': 'flatten',
'useWith': 'overArgs',
'where': 'conformsTo',
'whereEq': 'isMatch',
'zipObj': 'zipObject'
};
/** Used to map ary to method names. */
exports.aryMethod = {
'1': [
'assignAll', 'assignInAll', 'attempt', 'castArray', 'ceil', 'create',
'curry', 'curryRight', 'defaultsAll', 'defaultsDeepAll', 'floor', 'flow',
'flowRight', 'fromPairs', 'invert', 'iteratee', 'memoize', 'method', 'mergeAll',
'methodOf', 'mixin', 'nthArg', 'over', 'overEvery', 'overSome','rest', 'reverse',
'round', 'runInContext', 'spread', 'template', 'trim', 'trimEnd', 'trimStart',
'uniqueId', 'words', 'zipAll'
],
'2': [
'add', 'after', 'ary', 'assign', 'assignAllWith', 'assignIn', 'assignInAllWith',
'at', 'before', 'bind', 'bindAll', 'bindKey', 'chunk', 'cloneDeepWith',
'cloneWith', 'concat', 'conformsTo', 'countBy', 'curryN', 'curryRightN',
'debounce', 'defaults', 'defaultsDeep', 'defaultTo', 'delay', 'difference',
'divide', 'drop', 'dropRight', 'dropRightWhile', 'dropWhile', 'endsWith', 'eq',
'every', 'filter', 'find', 'findIndex', 'findKey', 'findLast', 'findLastIndex',
'findLastKey', 'flatMap', 'flatMapDeep', 'flattenDepth', 'forEach',
'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight', 'get',
'groupBy', 'gt', 'gte', 'has', 'hasIn', 'includes', 'indexOf', 'intersection',
'invertBy', 'invoke', 'invokeMap', 'isEqual', 'isMatch', 'join', 'keyBy',
'lastIndexOf', 'lt', 'lte', 'map', 'mapKeys', 'mapValues', 'matchesProperty',
'maxBy', 'meanBy', 'merge', 'mergeAllWith', 'minBy', 'multiply', 'nth', 'omit',
'omitBy', 'overArgs', 'pad', 'padEnd', 'padStart', 'parseInt', 'partial',
'partialRight', 'partition', 'pick', 'pickBy', 'propertyOf', 'pull', 'pullAll',
'pullAt', 'random', 'range', 'rangeRight', 'rearg', 'reject', 'remove',
'repeat', 'restFrom', 'result', 'sampleSize', 'some', 'sortBy', 'sortedIndex',
'sortedIndexOf', 'sortedLastIndex', 'sortedLastIndexOf', 'sortedUniqBy',
'split', 'spreadFrom', 'startsWith', 'subtract', 'sumBy', 'take', 'takeRight',
'takeRightWhile', 'takeWhile', 'tap', 'throttle', 'thru', 'times', 'trimChars',
'trimCharsEnd', 'trimCharsStart', 'truncate', 'union', 'uniqBy', 'uniqWith',
'unset', 'unzipWith', 'without', 'wrap', 'xor', 'zip', 'zipObject',
'zipObjectDeep'
],
'3': [
'assignInWith', 'assignWith', 'clamp', 'differenceBy', 'differenceWith',
'findFrom', 'findIndexFrom', 'findLastFrom', 'findLastIndexFrom', 'getOr',
'includesFrom', 'indexOfFrom', 'inRange', 'intersectionBy', 'intersectionWith',
'invokeArgs', 'invokeArgsMap', 'isEqualWith', 'isMatchWith', 'flatMapDepth',
'lastIndexOfFrom', 'mergeWith', 'orderBy', 'padChars', 'padCharsEnd',
'padCharsStart', 'pullAllBy', 'pullAllWith', 'rangeStep', 'rangeStepRight',
'reduce', 'reduceRight', 'replace', 'set', 'slice', 'sortedIndexBy',
'sortedLastIndexBy', 'transform', 'unionBy', 'unionWith', 'update', 'xorBy',
'xorWith', 'zipWith'
],
'4': [
'fill', 'setWith', 'updateWith'
]
};
/** Used to map ary to rearg configs. */
exports.aryRearg = {
'2': [1, 0],
'3': [2, 0, 1],
'4': [3, 2, 0, 1]
};
/** Used to map method names to their iteratee ary. */
exports.iterateeAry = {
'dropRightWhile': 1,
'dropWhile': 1,
'every': 1,
'filter': 1,
'find': 1,
'findFrom': 1,
'findIndex': 1,
'findIndexFrom': 1,
'findKey': 1,
'findLast': 1,
'findLastFrom': 1,
'findLastIndex': 1,
'findLastIndexFrom': 1,
'findLastKey': 1,
'flatMap': 1,
'flatMapDeep': 1,
'flatMapDepth': 1,
'forEach': 1,
'forEachRight': 1,
'forIn': 1,
'forInRight': 1,
'forOwn': 1,
'forOwnRight': 1,
'map': 1,
'mapKeys': 1,
'mapValues': 1,
'partition': 1,
'reduce': 2,
'reduceRight': 2,
'reject': 1,
'remove': 1,
'some': 1,
'takeRightWhile': 1,
'takeWhile': 1,
'times': 1,
'transform': 2
};
/** Used to map method names to iteratee rearg configs. */
exports.iterateeRearg = {
'mapKeys': [1],
'reduceRight': [1, 0]
};
/** Used to map method names to rearg configs. */
exports.methodRearg = {
'assignInAllWith': [1, 0],
'assignInWith': [1, 2, 0],
'assignAllWith': [1, 0],
'assignWith': [1, 2, 0],
'differenceBy': [1, 2, 0],
'differenceWith': [1, 2, 0],
'getOr': [2, 1, 0],
'intersectionBy': [1, 2, 0],
'intersectionWith': [1, 2, 0],
'isEqualWith': [1, 2, 0],
'isMatchWith': [2, 1, 0],
'mergeAllWith': [1, 0],
'mergeWith': [1, 2, 0],
'padChars': [2, 1, 0],
'padCharsEnd': [2, 1, 0],
'padCharsStart': [2, 1, 0],
'pullAllBy': [2, 1, 0],
'pullAllWith': [2, 1, 0],
'rangeStep': [1, 2, 0],
'rangeStepRight': [1, 2, 0],
'setWith': [3, 1, 2, 0],
'sortedIndexBy': [2, 1, 0],
'sortedLastIndexBy': [2, 1, 0],
'unionBy': [1, 2, 0],
'unionWith': [1, 2, 0],
'updateWith': [3, 1, 2, 0],
'xorBy': [1, 2, 0],
'xorWith': [1, 2, 0],
'zipWith': [1, 2, 0]
};
/** Used to map method names to spread configs. */
exports.methodSpread = {
'assignAll': { 'start': 0 },
'assignAllWith': { 'start': 0 },
'assignInAll': { 'start': 0 },
'assignInAllWith': { 'start': 0 },
'defaultsAll': { 'start': 0 },
'defaultsDeepAll': { 'start': 0 },
'invokeArgs': { 'start': 2 },
'invokeArgsMap': { 'start': 2 },
'mergeAll': { 'start': 0 },
'mergeAllWith': { 'start': 0 },
'partial': { 'start': 1 },
'partialRight': { 'start': 1 },
'without': { 'start': 1 },
'zipAll': { 'start': 0 }
};
/** Used to identify methods which mutate arrays or objects. */
exports.mutate = {
'array': {
'fill': true,
'pull': true,
'pullAll': true,
'pullAllBy': true,
'pullAllWith': true,
'pullAt': true,
'remove': true,
'reverse': true
},
'object': {
'assign': true,
'assignAll': true,
'assignAllWith': true,
'assignIn': true,
'assignInAll': true,
'assignInAllWith': true,
'assignInWith': true,
'assignWith': true,
'defaults': true,
'defaultsAll': true,
'defaultsDeep': true,
'defaultsDeepAll': true,
'merge': true,
'mergeAll': true,
'mergeAllWith': true,
'mergeWith': true,
},
'set': {
'set': true,
'setWith': true,
'unset': true,
'update': true,
'updateWith': true
}
};
/** Used to map real names to their aliases. */
exports.realToAlias = (function() {
var hasOwnProperty = Object.prototype.hasOwnProperty,
object = exports.aliasToReal,
result = {};
for (var key in object) {
var value = object[key];
if (hasOwnProperty.call(result, value)) {
result[value].push(key);
} else {
result[value] = [key];
}
}
return result;
}());
/** Used to map method names to other names. */
exports.remap = {
'assignAll': 'assign',
'assignAllWith': 'assignWith',
'assignInAll': 'assignIn',
'assignInAllWith': 'assignInWith',
'curryN': 'curry',
'curryRightN': 'curryRight',
'defaultsAll': 'defaults',
'defaultsDeepAll': 'defaultsDeep',
'findFrom': 'find',
'findIndexFrom': 'findIndex',
'findLastFrom': 'findLast',
'findLastIndexFrom': 'findLastIndex',
'getOr': 'get',
'includesFrom': 'includes',
'indexOfFrom': 'indexOf',
'invokeArgs': 'invoke',
'invokeArgsMap': 'invokeMap',
'lastIndexOfFrom': 'lastIndexOf',
'mergeAll': 'merge',
'mergeAllWith': 'mergeWith',
'padChars': 'pad',
'padCharsEnd': 'padEnd',
'padCharsStart': 'padStart',
'propertyOf': 'get',
'rangeStep': 'range',
'rangeStepRight': 'rangeRight',
'restFrom': 'rest',
'spreadFrom': 'spread',
'trimChars': 'trim',
'trimCharsEnd': 'trimEnd',
'trimCharsStart': 'trimStart',
'zipAll': 'zip'
};
/** Used to track methods that skip fixing their arity. */
exports.skipFixed = {
'castArray': true,
'flow': true,
'flowRight': true,
'iteratee': true,
'mixin': true,
'rearg': true,
'runInContext': true
};
/** Used to track methods that skip rearranging arguments. */
exports.skipRearg = {
'add': true,
'assign': true,
'assignIn': true,
'bind': true,
'bindKey': true,
'concat': true,
'difference': true,
'divide': true,
'eq': true,
'gt': true,
'gte': true,
'isEqual': true,
'lt': true,
'lte': true,
'matchesProperty': true,
'merge': true,
'multiply': true,
'overArgs': true,
'partial': true,
'partialRight': true,
'propertyOf': true,
'random': true,
'range': true,
'rangeRight': true,
'subtract': true,
'zip': true,
'zipObject': true,
'zipObjectDeep': true
};
/***/ })
/******/ ])
});
;
|
javascript
|
github
|
https://github.com/lodash/lodash
|
dist/mapping.fp.js
|
##########################################################################
#
# Copyright 2010 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios),
# its affiliates and/or its licensors.
#
# Copyright (c) 2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import IECore
# renders a sphere
class sphereProcedural( IECore.ParameterisedProcedural ) :
def __init__( self ) :
IECore.ParameterisedProcedural.__init__( self, "Renders a sphere." )
rad_param = IECore.FloatParameter(
name = "radius",
description = "Sphere radius.",
defaultValue = 1,
minValue = 0.01,
maxValue = 100.0,
userData = { 'UI': { "update" : IECore.BoolData( True ) } }
)
theta_param = IECore.FloatParameter(
name = "theta",
description = "Sphere theta.",
defaultValue = 360,
minValue = 1,
maxValue = 360,
userData = { 'UI': { "update" : IECore.BoolData( True ) } }
)
self.parameters().addParameters( [rad_param, theta_param] )
def doBound( self, args ) :
rad = args["radius"].value
return IECore.Box3f( IECore.V3f(-rad,-rad,-rad), IECore.V3f(rad,rad,rad) )
def doRenderState( self, renderer, args ) :
pass
def doRender( self, renderer, args ) :
rad = args["radius"].value
theta = args["theta"].value
with IECore.AttributeBlock( renderer ):
renderer.sphere( rad, -1, 1, theta, {} )
# register
IECore.registerRunTimeTyped( sphereProcedural )
|
unknown
|
codeparrot/codeparrot-clean
| ||
# SPDX-License-Identifier: (GPL-2.0-only OR BSD-2-Clause)
# Copyright (C) 2020 BAIKAL ELECTRONICS, JSC
%YAML 1.2
---
$id: http://devicetree.org/schemas/hwmon/baikal,bt1-pvt.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Baikal-T1 PVT Sensor
maintainers:
- Serge Semin <fancer.lancer@gmail.com>
description: |
Baikal-T1 SoC provides an embedded process, voltage and temperature
sensor to monitor an internal SoC environment (chip temperature, supply
voltage and process monitor) and on time detect critical situations,
which may cause the system instability and even damages. The IP-block
is based on the Analog Bits PVT sensor, but is equipped with a dedicated
control wrapper, which provides a MMIO registers-based access to the
sensor core functionality (APB3-bus based) and exposes an additional
functions like thresholds/data ready interrupts, its status and masks,
measurements timeout. Its internal structure is depicted on the next
diagram:
Analog Bits core Bakal-T1 PVT control block
+--------------------+ +------------------------+
| Temperature sensor |-+ +------| Sensors control |
|--------------------| |<---En---| |------------------------|
| Voltage sensor |-|<--Mode--| +--->| Sampled data |
|--------------------| |<--Trim--+ | |------------------------|
| Low-Vt sensor |-| | +--| Thresholds comparator |
|--------------------| |---Data----| | |------------------------|
| High-Vt sensor |-| | +->| Interrupts status |
|--------------------| |--Valid--+-+ | |------------------------|
| Standard-Vt sensor |-+ +---+--| Interrupts mask |
+--------------------+ |------------------------|
^ | Interrupts timeout |
| +------------------------+
| ^ ^
Rclk-----+----------------------------------------+ |
APB3-------------------------------------------------+
This bindings describes the external Baikal-T1 PVT control interfaces
like MMIO registers space, interrupt request number and clocks source.
These are then used by the corresponding hwmon device driver to
implement the sysfs files-based access to the sensors functionality.
properties:
compatible:
const: baikal,bt1-pvt
reg:
maxItems: 1
interrupts:
maxItems: 1
clocks:
items:
- description: PVT reference clock
- description: APB3 interface clock
clock-names:
items:
- const: ref
- const: pclk
"#thermal-sensor-cells":
description: Baikal-T1 can be referenced as the CPU thermal-sensor
const: 0
baikal,pvt-temp-offset-millicelsius:
description: |
Temperature sensor trimming factor. It can be used to manually adjust the
temperature measurements within 7.130 degrees Celsius.
default: 0
minimum: 0
maximum: 7130
additionalProperties: false
required:
- compatible
- reg
- interrupts
- clocks
- clock-names
examples:
- |
#include <dt-bindings/interrupt-controller/mips-gic.h>
pvt@1f200000 {
compatible = "baikal,bt1-pvt";
reg = <0x1f200000 0x1000>;
#thermal-sensor-cells = <0>;
interrupts = <GIC_SHARED 31 IRQ_TYPE_LEVEL_HIGH>;
baikal,pvt-temp-offset-millicelsius = <1000>;
clocks = <&ccu_sys>, <&ccu_sys>;
clock-names = "ref", "pclk";
};
...
|
unknown
|
github
|
https://github.com/torvalds/linux
|
Documentation/devicetree/bindings/hwmon/baikal,bt1-pvt.yaml
|
from __future__ import unicode_literals
import codecs
import subprocess
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from youtube_dl.utils import intlist_to_bytes
from youtube_dl.aes import aes_encrypt, key_expansion
secret_msg = b'Secret message goes here'
def hex_str(int_list):
return codecs.encode(intlist_to_bytes(int_list), 'hex')
def openssl_encode(algo, key, iv):
cmd = ['openssl', 'enc', '-e', '-' + algo, '-K', hex_str(key), '-iv', hex_str(iv)]
prog = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out, _ = prog.communicate(secret_msg)
return out
iv = key = [0x20, 0x15] + 14 * [0]
r = openssl_encode('aes-128-cbc', key, iv)
print('aes_cbc_decrypt')
print(repr(r))
password = key
new_key = aes_encrypt(password, key_expansion(password))
r = openssl_encode('aes-128-ctr', new_key, iv)
print('aes_decrypt_text 16')
print(repr(r))
password = key + 16 * [0]
new_key = aes_encrypt(password, key_expansion(password)) * (32 // 16)
r = openssl_encode('aes-256-ctr', new_key, iv)
print('aes_decrypt_text 32')
print(repr(r))
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: clc_server
short_description: Create, Delete, Start and Stop servers in CenturyLink Cloud.
description:
- An Ansible module to Create, Delete, Start and Stop servers in CenturyLink Cloud.
version_added: "2.0"
options:
additional_disks:
description:
- The list of additional disks for the server
default: []
add_public_ip:
description:
- Whether to add a public ip to the server
type: bool
default: 'no'
alias:
description:
- The account alias to provision the servers under.
anti_affinity_policy_id:
description:
- The anti-affinity policy to assign to the server. This is mutually exclusive with 'anti_affinity_policy_name'.
anti_affinity_policy_name:
description:
- The anti-affinity policy to assign to the server. This is mutually exclusive with 'anti_affinity_policy_id'.
alert_policy_id:
description:
- The alert policy to assign to the server. This is mutually exclusive with 'alert_policy_name'.
alert_policy_name:
description:
- The alert policy to assign to the server. This is mutually exclusive with 'alert_policy_id'.
count:
description:
- The number of servers to build (mutually exclusive with exact_count)
default: 1
count_group:
description:
- Required when exact_count is specified. The Server Group use to determine how many servers to deploy.
cpu:
description:
- How many CPUs to provision on the server
default: 1
cpu_autoscale_policy_id:
description:
- The autoscale policy to assign to the server.
custom_fields:
description:
- The list of custom fields to set on the server.
default: []
description:
description:
- The description to set for the server.
exact_count:
description:
- Run in idempotent mode. Will insure that this exact number of servers are running in the provided group,
creating and deleting them to reach that count. Requires count_group to be set.
group:
description:
- The Server Group to create servers under.
default: 'Default Group'
ip_address:
description:
- The IP Address for the server. One is assigned if not provided.
location:
description:
- The Datacenter to create servers in.
managed_os:
description:
- Whether to create the server as 'Managed' or not.
type: bool
default: 'no'
required: False
memory:
description:
- Memory in GB.
default: 1
name:
description:
- A 1 to 6 character identifier to use for the server. This is required when state is 'present'
network_id:
description:
- The network UUID on which to create servers.
packages:
description:
- The list of blue print packages to run on the server after its created.
default: []
password:
description:
- Password for the administrator / root user
primary_dns:
description:
- Primary DNS used by the server.
public_ip_protocol:
description:
- The protocol to use for the public ip if add_public_ip is set to True.
default: 'TCP'
choices: ['TCP', 'UDP', 'ICMP']
public_ip_ports:
description:
- A list of ports to allow on the firewall to the servers public ip, if add_public_ip is set to True.
default: []
secondary_dns:
description:
- Secondary DNS used by the server.
server_ids:
description:
- Required for started, stopped, and absent states.
A list of server Ids to insure are started, stopped, or absent.
default: []
source_server_password:
description:
- The password for the source server if a clone is specified.
state:
description:
- The state to insure that the provided resources are in.
default: 'present'
choices: ['present', 'absent', 'started', 'stopped']
storage_type:
description:
- The type of storage to attach to the server.
default: 'standard'
choices: ['standard', 'hyperscale']
template:
description:
- The template to use for server creation. Will search for a template if a partial string is provided.
This is required when state is 'present'
ttl:
description:
- The time to live for the server in seconds. The server will be deleted when this time expires.
type:
description:
- The type of server to create.
default: 'standard'
choices: ['standard', 'hyperscale', 'bareMetal']
configuration_id:
description:
- Only required for bare metal servers.
Specifies the identifier for the specific configuration type of bare metal server to deploy.
os_type:
description:
- Only required for bare metal servers.
Specifies the OS to provision with the bare metal server.
choices: ['redHat6_64Bit', 'centOS6_64Bit', 'windows2012R2Standard_64Bit', 'ubuntu14_64Bit']
wait:
description:
- Whether to wait for the provisioning tasks to finish before returning.
type: bool
default: 'yes'
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples
- name: Provision a single Ubuntu Server
clc_server:
name: test
template: ubuntu-14-64
count: 1
group: Default Group
state: present
- name: Ensure 'Default Group' has exactly 5 servers
clc_server:
name: test
template: ubuntu-14-64
exact_count: 5
count_group: Default Group
group: Default Group
- name: Stop a Server
clc_server:
server_ids:
- UC1ACCT-TEST01
state: stopped
- name: Start a Server
clc_server:
server_ids:
- UC1ACCT-TEST01
state: started
- name: Delete a Server
clc_server:
server_ids:
- UC1ACCT-TEST01
state: absent
'''
RETURN = '''
server_ids:
description: The list of server ids that are created
returned: success
type: list
sample:
[
"UC1TEST-SVR01",
"UC1TEST-SVR02"
]
partially_created_server_ids:
description: The list of server ids that are partially created
returned: success
type: list
sample:
[
"UC1TEST-SVR01",
"UC1TEST-SVR02"
]
servers:
description: The list of server objects returned from CLC
returned: success
type: list
sample:
[
{
"changeInfo":{
"createdBy":"service.wfad",
"createdDate":1438196820,
"modifiedBy":"service.wfad",
"modifiedDate":1438196820
},
"description":"test-server",
"details":{
"alertPolicies":[
],
"cpu":1,
"customFields":[
],
"diskCount":3,
"disks":[
{
"id":"0:0",
"partitionPaths":[
],
"sizeGB":1
},
{
"id":"0:1",
"partitionPaths":[
],
"sizeGB":2
},
{
"id":"0:2",
"partitionPaths":[
],
"sizeGB":14
}
],
"hostName":"",
"inMaintenanceMode":false,
"ipAddresses":[
{
"internal":"10.1.1.1"
}
],
"memoryGB":1,
"memoryMB":1024,
"partitions":[
],
"powerState":"started",
"snapshots":[
],
"storageGB":17
},
"groupId":"086ac1dfe0b6411989e8d1b77c4065f0",
"id":"test-server",
"ipaddress":"10.120.45.23",
"isTemplate":false,
"links":[
{
"href":"/v2/servers/wfad/test-server",
"id":"test-server",
"rel":"self",
"verbs":[
"GET",
"PATCH",
"DELETE"
]
},
{
"href":"/v2/groups/wfad/086ac1dfe0b6411989e8d1b77c4065f0",
"id":"086ac1dfe0b6411989e8d1b77c4065f0",
"rel":"group"
},
{
"href":"/v2/accounts/wfad",
"id":"wfad",
"rel":"account"
},
{
"href":"/v2/billing/wfad/serverPricing/test-server",
"rel":"billing"
},
{
"href":"/v2/servers/wfad/test-server/publicIPAddresses",
"rel":"publicIPAddresses",
"verbs":[
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/credentials",
"rel":"credentials"
},
{
"href":"/v2/servers/wfad/test-server/statistics",
"rel":"statistics"
},
{
"href":"/v2/servers/wfad/510ec21ae82d4dc89d28479753bf736a/upcomingScheduledActivities",
"rel":"upcomingScheduledActivities"
},
{
"href":"/v2/servers/wfad/510ec21ae82d4dc89d28479753bf736a/scheduledActivities",
"rel":"scheduledActivities",
"verbs":[
"GET",
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/capabilities",
"rel":"capabilities"
},
{
"href":"/v2/servers/wfad/test-server/alertPolicies",
"rel":"alertPolicyMappings",
"verbs":[
"POST"
]
},
{
"href":"/v2/servers/wfad/test-server/antiAffinityPolicy",
"rel":"antiAffinityPolicyMapping",
"verbs":[
"PUT",
"DELETE"
]
},
{
"href":"/v2/servers/wfad/test-server/cpuAutoscalePolicy",
"rel":"cpuAutoscalePolicyMapping",
"verbs":[
"PUT",
"DELETE"
]
}
],
"locationId":"UC1",
"name":"test-server",
"os":"ubuntu14_64Bit",
"osType":"Ubuntu 14 64-bit",
"status":"active",
"storageType":"standard",
"type":"standard"
}
]
'''
__version__ = '${version}'
import json
import os
import time
import traceback
from distutils.version import LooseVersion
REQUESTS_IMP_ERR = None
try:
import requests
except ImportError:
REQUESTS_IMP_ERR = traceback.format_exc()
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
CLC_IMP_ERR = None
try:
import clc as clc_sdk
from clc import CLCException
from clc import APIFailedResponse
except ImportError:
CLC_IMP_ERR = traceback.format_exc()
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
class ClcServer:
clc = clc_sdk
def __init__(self, module):
"""
Construct module
"""
self.clc = clc_sdk
self.module = module
self.group_dict = {}
if not CLC_FOUND:
self.module.fail_json(msg=missing_required_lib('clc-sdk'), exception=CLC_IMP_ERR)
if not REQUESTS_FOUND:
self.module.fail_json(msg=missing_required_lib('requests'), exception=REQUESTS_IMP_ERR)
if requests.__version__ and LooseVersion(
requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
def process_request(self):
"""
Process the request - Main Code Path
:return: Returns with either an exit_json or fail_json
"""
changed = False
new_server_ids = []
server_dict_array = []
self._set_clc_credentials_from_env()
self.module.params = self._validate_module_params(
self.clc,
self.module)
p = self.module.params
state = p.get('state')
#
# Handle each state
#
partial_servers_ids = []
if state == 'absent':
server_ids = p['server_ids']
if not isinstance(server_ids, list):
return self.module.fail_json(
msg='server_ids needs to be a list of instances to delete: %s' %
server_ids)
(changed,
server_dict_array,
new_server_ids) = self._delete_servers(module=self.module,
clc=self.clc,
server_ids=server_ids)
elif state in ('started', 'stopped'):
server_ids = p.get('server_ids')
if not isinstance(server_ids, list):
return self.module.fail_json(
msg='server_ids needs to be a list of servers to run: %s' %
server_ids)
(changed,
server_dict_array,
new_server_ids) = self._start_stop_servers(self.module,
self.clc,
server_ids)
elif state == 'present':
# Changed is always set to true when provisioning new instances
if not p.get('template') and p.get('type') != 'bareMetal':
return self.module.fail_json(
msg='template parameter is required for new instance')
if p.get('exact_count') is None:
(server_dict_array,
new_server_ids,
partial_servers_ids,
changed) = self._create_servers(self.module,
self.clc)
else:
(server_dict_array,
new_server_ids,
partial_servers_ids,
changed) = self._enforce_count(self.module,
self.clc)
self.module.exit_json(
changed=changed,
server_ids=new_server_ids,
partially_created_server_ids=partial_servers_ids,
servers=server_dict_array)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
name=dict(),
template=dict(),
group=dict(default='Default Group'),
network_id=dict(),
location=dict(default=None),
cpu=dict(default=1, type='int'),
memory=dict(default=1, type='int'),
alias=dict(default=None),
password=dict(default=None, no_log=True),
ip_address=dict(default=None),
storage_type=dict(
default='standard',
choices=[
'standard',
'hyperscale']),
type=dict(default='standard', choices=['standard', 'hyperscale', 'bareMetal']),
primary_dns=dict(default=None),
secondary_dns=dict(default=None),
additional_disks=dict(type='list', default=[]),
custom_fields=dict(type='list', default=[]),
ttl=dict(default=None),
managed_os=dict(type='bool', default=False),
description=dict(default=None),
source_server_password=dict(default=None, no_log=True),
cpu_autoscale_policy_id=dict(default=None),
anti_affinity_policy_id=dict(default=None),
anti_affinity_policy_name=dict(default=None),
alert_policy_id=dict(default=None),
alert_policy_name=dict(default=None),
packages=dict(type='list', default=[]),
state=dict(
default='present',
choices=[
'present',
'absent',
'started',
'stopped']),
count=dict(type='int', default=1),
exact_count=dict(type='int', default=None),
count_group=dict(),
server_ids=dict(type='list', default=[]),
add_public_ip=dict(type='bool', default=False),
public_ip_protocol=dict(
default='TCP',
choices=[
'TCP',
'UDP',
'ICMP']),
public_ip_ports=dict(type='list', default=[]),
configuration_id=dict(default=None),
os_type=dict(default=None,
choices=[
'redHat6_64Bit',
'centOS6_64Bit',
'windows2012R2Standard_64Bit',
'ubuntu14_64Bit'
]),
wait=dict(type='bool', default=True))
mutually_exclusive = [
['exact_count', 'count'],
['exact_count', 'state'],
['anti_affinity_policy_id', 'anti_affinity_policy_name'],
['alert_policy_id', 'alert_policy_name'],
]
return {"argument_spec": argument_spec,
"mutually_exclusive": mutually_exclusive}
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
@staticmethod
def _validate_module_params(clc, module):
"""
Validate the module params, and lookup default values.
:param clc: clc-sdk instance to use
:param module: module to validate
:return: dictionary of validated params
"""
params = module.params
datacenter = ClcServer._find_datacenter(clc, module)
ClcServer._validate_types(module)
ClcServer._validate_name(module)
params['alias'] = ClcServer._find_alias(clc, module)
params['cpu'] = ClcServer._find_cpu(clc, module)
params['memory'] = ClcServer._find_memory(clc, module)
params['description'] = ClcServer._find_description(module)
params['ttl'] = ClcServer._find_ttl(clc, module)
params['template'] = ClcServer._find_template_id(module, datacenter)
params['group'] = ClcServer._find_group(module, datacenter).id
params['network_id'] = ClcServer._find_network_id(module, datacenter)
params['anti_affinity_policy_id'] = ClcServer._find_aa_policy_id(
clc,
module)
params['alert_policy_id'] = ClcServer._find_alert_policy_id(
clc,
module)
return params
@staticmethod
def _find_datacenter(clc, module):
"""
Find the datacenter by calling the CLC API.
:param clc: clc-sdk instance to use
:param module: module to validate
:return: clc-sdk.Datacenter instance
"""
location = module.params.get('location')
try:
if not location:
account = clc.v2.Account()
location = account.data.get('primaryDataCenter')
data_center = clc.v2.Datacenter(location)
return data_center
except CLCException:
module.fail_json(msg="Unable to find location: {0}".format(location))
@staticmethod
def _find_alias(clc, module):
"""
Find or Validate the Account Alias by calling the CLC API
:param clc: clc-sdk instance to use
:param module: module to validate
:return: clc-sdk.Account instance
"""
alias = module.params.get('alias')
if not alias:
try:
alias = clc.v2.Account.GetAlias()
except CLCException as ex:
module.fail_json(msg='Unable to find account alias. {0}'.format(
ex.message
))
return alias
@staticmethod
def _find_cpu(clc, module):
"""
Find or validate the CPU value by calling the CLC API
:param clc: clc-sdk instance to use
:param module: module to validate
:return: Int value for CPU
"""
cpu = module.params.get('cpu')
group_id = module.params.get('group_id')
alias = module.params.get('alias')
state = module.params.get('state')
if not cpu and state == 'present':
group = clc.v2.Group(id=group_id,
alias=alias)
if group.Defaults("cpu"):
cpu = group.Defaults("cpu")
else:
module.fail_json(
msg=str("Can\'t determine a default cpu value. Please provide a value for cpu."))
return cpu
@staticmethod
def _find_memory(clc, module):
"""
Find or validate the Memory value by calling the CLC API
:param clc: clc-sdk instance to use
:param module: module to validate
:return: Int value for Memory
"""
memory = module.params.get('memory')
group_id = module.params.get('group_id')
alias = module.params.get('alias')
state = module.params.get('state')
if not memory and state == 'present':
group = clc.v2.Group(id=group_id,
alias=alias)
if group.Defaults("memory"):
memory = group.Defaults("memory")
else:
module.fail_json(msg=str(
"Can\'t determine a default memory value. Please provide a value for memory."))
return memory
@staticmethod
def _find_description(module):
"""
Set the description module param to name if description is blank
:param module: the module to validate
:return: string description
"""
description = module.params.get('description')
if not description:
description = module.params.get('name')
return description
@staticmethod
def _validate_types(module):
"""
Validate that type and storage_type are set appropriately, and fail if not
:param module: the module to validate
:return: none
"""
state = module.params.get('state')
server_type = module.params.get(
'type').lower() if module.params.get('type') else None
storage_type = module.params.get(
'storage_type').lower() if module.params.get('storage_type') else None
if state == "present":
if server_type == "standard" and storage_type not in (
"standard", "premium"):
module.fail_json(
msg=str("Standard VMs must have storage_type = 'standard' or 'premium'"))
if server_type == "hyperscale" and storage_type != "hyperscale":
module.fail_json(
msg=str("Hyperscale VMs must have storage_type = 'hyperscale'"))
@staticmethod
def _validate_name(module):
"""
Validate that name is the correct length if provided, fail if it's not
:param module: the module to validate
:return: none
"""
server_name = module.params.get('name')
state = module.params.get('state')
if state == 'present' and (
len(server_name) < 1 or len(server_name) > 6):
module.fail_json(msg=str(
"When state = 'present', name must be a string with a minimum length of 1 and a maximum length of 6"))
@staticmethod
def _find_ttl(clc, module):
"""
Validate that TTL is > 3600 if set, and fail if not
:param clc: clc-sdk instance to use
:param module: module to validate
:return: validated ttl
"""
ttl = module.params.get('ttl')
if ttl:
if ttl <= 3600:
return module.fail_json(msg=str("Ttl cannot be <= 3600"))
else:
ttl = clc.v2.time_utils.SecondsToZuluTS(int(time.time()) + ttl)
return ttl
@staticmethod
def _find_template_id(module, datacenter):
"""
Find the template id by calling the CLC API.
:param module: the module to validate
:param datacenter: the datacenter to search for the template
:return: a valid clc template id
"""
lookup_template = module.params.get('template')
state = module.params.get('state')
type = module.params.get('type')
result = None
if state == 'present' and type != 'bareMetal':
try:
result = datacenter.Templates().Search(lookup_template)[0].id
except CLCException:
module.fail_json(
msg=str(
"Unable to find a template: " +
lookup_template +
" in location: " +
datacenter.id))
return result
@staticmethod
def _find_network_id(module, datacenter):
"""
Validate the provided network id or return a default.
:param module: the module to validate
:param datacenter: the datacenter to search for a network id
:return: a valid network id
"""
network_id = module.params.get('network_id')
if not network_id:
try:
network_id = datacenter.Networks().networks[0].id
# -- added for clc-sdk 2.23 compatibility
# datacenter_networks = clc_sdk.v2.Networks(
# networks_lst=datacenter._DeploymentCapabilities()['deployableNetworks'])
# network_id = datacenter_networks.networks[0].id
# -- end
except CLCException:
module.fail_json(
msg=str(
"Unable to find a network in location: " +
datacenter.id))
return network_id
@staticmethod
def _find_aa_policy_id(clc, module):
"""
Validate if the anti affinity policy exist for the given name and throw error if not
:param clc: the clc-sdk instance
:param module: the module to validate
:return: aa_policy_id: the anti affinity policy id of the given name.
"""
aa_policy_id = module.params.get('anti_affinity_policy_id')
aa_policy_name = module.params.get('anti_affinity_policy_name')
if not aa_policy_id and aa_policy_name:
alias = module.params.get('alias')
aa_policy_id = ClcServer._get_anti_affinity_policy_id(
clc,
module,
alias,
aa_policy_name)
if not aa_policy_id:
module.fail_json(
msg='No anti affinity policy was found with policy name : %s' % aa_policy_name)
return aa_policy_id
@staticmethod
def _find_alert_policy_id(clc, module):
"""
Validate if the alert policy exist for the given name and throw error if not
:param clc: the clc-sdk instance
:param module: the module to validate
:return: alert_policy_id: the alert policy id of the given name.
"""
alert_policy_id = module.params.get('alert_policy_id')
alert_policy_name = module.params.get('alert_policy_name')
if not alert_policy_id and alert_policy_name:
alias = module.params.get('alias')
alert_policy_id = ClcServer._get_alert_policy_id_by_name(
clc=clc,
module=module,
alias=alias,
alert_policy_name=alert_policy_name
)
if not alert_policy_id:
module.fail_json(
msg='No alert policy exist with name : %s' % alert_policy_name)
return alert_policy_id
def _create_servers(self, module, clc, override_count=None):
"""
Create New Servers in CLC cloud
:param module: the AnsibleModule object
:param clc: the clc-sdk instance to use
:return: a list of dictionaries with server information about the servers that were created
"""
p = module.params
request_list = []
servers = []
server_dict_array = []
created_server_ids = []
partial_created_servers_ids = []
add_public_ip = p.get('add_public_ip')
public_ip_protocol = p.get('public_ip_protocol')
public_ip_ports = p.get('public_ip_ports')
params = {
'name': p.get('name'),
'template': p.get('template'),
'group_id': p.get('group'),
'network_id': p.get('network_id'),
'cpu': p.get('cpu'),
'memory': p.get('memory'),
'alias': p.get('alias'),
'password': p.get('password'),
'ip_address': p.get('ip_address'),
'storage_type': p.get('storage_type'),
'type': p.get('type'),
'primary_dns': p.get('primary_dns'),
'secondary_dns': p.get('secondary_dns'),
'additional_disks': p.get('additional_disks'),
'custom_fields': p.get('custom_fields'),
'ttl': p.get('ttl'),
'managed_os': p.get('managed_os'),
'description': p.get('description'),
'source_server_password': p.get('source_server_password'),
'cpu_autoscale_policy_id': p.get('cpu_autoscale_policy_id'),
'anti_affinity_policy_id': p.get('anti_affinity_policy_id'),
'packages': p.get('packages'),
'configuration_id': p.get('configuration_id'),
'os_type': p.get('os_type')
}
count = override_count if override_count else p.get('count')
changed = False if count == 0 else True
if not changed:
return server_dict_array, created_server_ids, partial_created_servers_ids, changed
for i in range(0, count):
if not module.check_mode:
req = self._create_clc_server(clc=clc,
module=module,
server_params=params)
server = req.requests[0].Server()
request_list.append(req)
servers.append(server)
self._wait_for_requests(module, request_list)
self._refresh_servers(module, servers)
ip_failed_servers = self._add_public_ip_to_servers(
module=module,
should_add_public_ip=add_public_ip,
servers=servers,
public_ip_protocol=public_ip_protocol,
public_ip_ports=public_ip_ports)
ap_failed_servers = self._add_alert_policy_to_servers(clc=clc,
module=module,
servers=servers)
for server in servers:
if server in ip_failed_servers or server in ap_failed_servers:
partial_created_servers_ids.append(server.id)
else:
# reload server details
server = clc.v2.Server(server.id)
server.data['ipaddress'] = server.details[
'ipAddresses'][0]['internal']
if add_public_ip and len(server.PublicIPs().public_ips) > 0:
server.data['publicip'] = str(
server.PublicIPs().public_ips[0])
created_server_ids.append(server.id)
server_dict_array.append(server.data)
return server_dict_array, created_server_ids, partial_created_servers_ids, changed
def _enforce_count(self, module, clc):
"""
Enforce that there is the right number of servers in the provided group.
Starts or stops servers as necessary.
:param module: the AnsibleModule object
:param clc: the clc-sdk instance to use
:return: a list of dictionaries with server information about the servers that were created or deleted
"""
p = module.params
changed = False
count_group = p.get('count_group')
datacenter = ClcServer._find_datacenter(clc, module)
exact_count = p.get('exact_count')
server_dict_array = []
partial_servers_ids = []
changed_server_ids = []
# fail here if the exact count was specified without filtering
# on a group, as this may lead to a undesired removal of instances
if exact_count and count_group is None:
return module.fail_json(
msg="you must use the 'count_group' option with exact_count")
servers, running_servers = ClcServer._find_running_servers_by_group(
module, datacenter, count_group)
if len(running_servers) == exact_count:
changed = False
elif len(running_servers) < exact_count:
to_create = exact_count - len(running_servers)
server_dict_array, changed_server_ids, partial_servers_ids, changed \
= self._create_servers(module, clc, override_count=to_create)
for server in server_dict_array:
running_servers.append(server)
elif len(running_servers) > exact_count:
to_remove = len(running_servers) - exact_count
all_server_ids = sorted([x.id for x in running_servers])
remove_ids = all_server_ids[0:to_remove]
(changed, server_dict_array, changed_server_ids) \
= ClcServer._delete_servers(module, clc, remove_ids)
return server_dict_array, changed_server_ids, partial_servers_ids, changed
@staticmethod
def _wait_for_requests(module, request_list):
"""
Block until server provisioning requests are completed.
:param module: the AnsibleModule object
:param request_list: a list of clc-sdk.Request instances
:return: none
"""
wait = module.params.get('wait')
if wait:
# Requests.WaitUntilComplete() returns the count of failed requests
failed_requests_count = sum(
[request.WaitUntilComplete() for request in request_list])
if failed_requests_count > 0:
module.fail_json(
msg='Unable to process server request')
@staticmethod
def _refresh_servers(module, servers):
"""
Loop through a list of servers and refresh them.
:param module: the AnsibleModule object
:param servers: list of clc-sdk.Server instances to refresh
:return: none
"""
for server in servers:
try:
server.Refresh()
except CLCException as ex:
module.fail_json(msg='Unable to refresh the server {0}. {1}'.format(
server.id, ex.message
))
@staticmethod
def _add_public_ip_to_servers(
module,
should_add_public_ip,
servers,
public_ip_protocol,
public_ip_ports):
"""
Create a public IP for servers
:param module: the AnsibleModule object
:param should_add_public_ip: boolean - whether or not to provision a public ip for servers. Skipped if False
:param servers: List of servers to add public ips to
:param public_ip_protocol: a protocol to allow for the public ips
:param public_ip_ports: list of ports to allow for the public ips
:return: none
"""
failed_servers = []
if not should_add_public_ip:
return failed_servers
ports_lst = []
request_list = []
server = None
for port in public_ip_ports:
ports_lst.append(
{'protocol': public_ip_protocol, 'port': port})
try:
if not module.check_mode:
for server in servers:
request = server.PublicIPs().Add(ports_lst)
request_list.append(request)
except APIFailedResponse:
failed_servers.append(server)
ClcServer._wait_for_requests(module, request_list)
return failed_servers
@staticmethod
def _add_alert_policy_to_servers(clc, module, servers):
"""
Associate the alert policy to servers
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param servers: List of servers to add alert policy to
:return: failed_servers: the list of servers which failed while associating alert policy
"""
failed_servers = []
p = module.params
alert_policy_id = p.get('alert_policy_id')
alias = p.get('alias')
if alert_policy_id and not module.check_mode:
for server in servers:
try:
ClcServer._add_alert_policy_to_server(
clc=clc,
alias=alias,
server_id=server.id,
alert_policy_id=alert_policy_id)
except CLCException:
failed_servers.append(server)
return failed_servers
@staticmethod
def _add_alert_policy_to_server(
clc, alias, server_id, alert_policy_id):
"""
Associate an alert policy to a clc server
:param clc: the clc-sdk instance to use
:param alias: the clc account alias
:param server_id: The clc server id
:param alert_policy_id: the alert policy id to be associated to the server
:return: none
"""
try:
clc.v2.API.Call(
method='POST',
url='servers/%s/%s/alertPolicies' % (alias, server_id),
payload=json.dumps(
{
'id': alert_policy_id
}))
except APIFailedResponse as e:
raise CLCException(
'Failed to associate alert policy to the server : {0} with Error {1}'.format(
server_id, str(e.response_text)))
@staticmethod
def _get_alert_policy_id_by_name(clc, module, alias, alert_policy_name):
"""
Returns the alert policy id for the given alert policy name
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param alias: the clc account alias
:param alert_policy_name: the name of the alert policy
:return: alert_policy_id: the alert policy id
"""
alert_policy_id = None
policies = clc.v2.API.Call('GET', '/v2/alertPolicies/%s' % alias)
if not policies:
return alert_policy_id
for policy in policies.get('items'):
if policy.get('name') == alert_policy_name:
if not alert_policy_id:
alert_policy_id = policy.get('id')
else:
return module.fail_json(
msg='multiple alert policies were found with policy name : %s' % alert_policy_name)
return alert_policy_id
@staticmethod
def _delete_servers(module, clc, server_ids):
"""
Delete the servers on the provided list
:param module: the AnsibleModule object
:param clc: the clc-sdk instance to use
:param server_ids: list of servers to delete
:return: a list of dictionaries with server information about the servers that were deleted
"""
terminated_server_ids = []
server_dict_array = []
request_list = []
if not isinstance(server_ids, list) or len(server_ids) < 1:
return module.fail_json(
msg='server_ids should be a list of servers, aborting')
servers = clc.v2.Servers(server_ids).Servers()
for server in servers:
if not module.check_mode:
request_list.append(server.Delete())
ClcServer._wait_for_requests(module, request_list)
for server in servers:
terminated_server_ids.append(server.id)
return True, server_dict_array, terminated_server_ids
@staticmethod
def _start_stop_servers(module, clc, server_ids):
"""
Start or Stop the servers on the provided list
:param module: the AnsibleModule object
:param clc: the clc-sdk instance to use
:param server_ids: list of servers to start or stop
:return: a list of dictionaries with server information about the servers that were started or stopped
"""
p = module.params
state = p.get('state')
changed = False
changed_servers = []
server_dict_array = []
result_server_ids = []
request_list = []
if not isinstance(server_ids, list) or len(server_ids) < 1:
return module.fail_json(
msg='server_ids should be a list of servers, aborting')
servers = clc.v2.Servers(server_ids).Servers()
for server in servers:
if server.powerState != state:
changed_servers.append(server)
if not module.check_mode:
request_list.append(
ClcServer._change_server_power_state(
module,
server,
state))
changed = True
ClcServer._wait_for_requests(module, request_list)
ClcServer._refresh_servers(module, changed_servers)
for server in set(changed_servers + servers):
try:
server.data['ipaddress'] = server.details[
'ipAddresses'][0]['internal']
server.data['publicip'] = str(
server.PublicIPs().public_ips[0])
except (KeyError, IndexError):
pass
server_dict_array.append(server.data)
result_server_ids.append(server.id)
return changed, server_dict_array, result_server_ids
@staticmethod
def _change_server_power_state(module, server, state):
"""
Change the server powerState
:param module: the module to check for intended state
:param server: the server to start or stop
:param state: the intended powerState for the server
:return: the request object from clc-sdk call
"""
result = None
try:
if state == 'started':
result = server.PowerOn()
else:
# Try to shut down the server and fall back to power off when unable to shut down.
result = server.ShutDown()
if result and hasattr(result, 'requests') and result.requests[0]:
return result
else:
result = server.PowerOff()
except CLCException:
module.fail_json(
msg='Unable to change power state for server {0}'.format(
server.id))
return result
@staticmethod
def _find_running_servers_by_group(module, datacenter, count_group):
"""
Find a list of running servers in the provided group
:param module: the AnsibleModule object
:param datacenter: the clc-sdk.Datacenter instance to use to lookup the group
:param count_group: the group to count the servers
:return: list of servers, and list of running servers
"""
group = ClcServer._find_group(
module=module,
datacenter=datacenter,
lookup_group=count_group)
servers = group.Servers().Servers()
running_servers = []
for server in servers:
if server.status == 'active' and server.powerState == 'started':
running_servers.append(server)
return servers, running_servers
@staticmethod
def _find_group(module, datacenter, lookup_group=None):
"""
Find a server group in a datacenter by calling the CLC API
:param module: the AnsibleModule instance
:param datacenter: clc-sdk.Datacenter instance to search for the group
:param lookup_group: string name of the group to search for
:return: clc-sdk.Group instance
"""
if not lookup_group:
lookup_group = module.params.get('group')
try:
return datacenter.Groups().Get(lookup_group)
except CLCException:
pass
# The search above only acts on the main
result = ClcServer._find_group_recursive(
module,
datacenter.Groups(),
lookup_group)
if result is None:
module.fail_json(
msg=str(
"Unable to find group: " +
lookup_group +
" in location: " +
datacenter.id))
return result
@staticmethod
def _find_group_recursive(module, group_list, lookup_group):
"""
Find a server group by recursively walking the tree
:param module: the AnsibleModule instance to use
:param group_list: a list of groups to search
:param lookup_group: the group to look for
:return: list of groups
"""
result = None
for group in group_list.groups:
subgroups = group.Subgroups()
try:
return subgroups.Get(lookup_group)
except CLCException:
result = ClcServer._find_group_recursive(
module,
subgroups,
lookup_group)
if result is not None:
break
return result
@staticmethod
def _create_clc_server(
clc,
module,
server_params):
"""
Call the CLC Rest API to Create a Server
:param clc: the clc-python-sdk instance to use
:param module: the AnsibleModule instance to use
:param server_params: a dictionary of params to use to create the servers
:return: clc-sdk.Request object linked to the queued server request
"""
try:
res = clc.v2.API.Call(
method='POST',
url='servers/%s' %
(server_params.get('alias')),
payload=json.dumps(
{
'name': server_params.get('name'),
'description': server_params.get('description'),
'groupId': server_params.get('group_id'),
'sourceServerId': server_params.get('template'),
'isManagedOS': server_params.get('managed_os'),
'primaryDNS': server_params.get('primary_dns'),
'secondaryDNS': server_params.get('secondary_dns'),
'networkId': server_params.get('network_id'),
'ipAddress': server_params.get('ip_address'),
'password': server_params.get('password'),
'sourceServerPassword': server_params.get('source_server_password'),
'cpu': server_params.get('cpu'),
'cpuAutoscalePolicyId': server_params.get('cpu_autoscale_policy_id'),
'memoryGB': server_params.get('memory'),
'type': server_params.get('type'),
'storageType': server_params.get('storage_type'),
'antiAffinityPolicyId': server_params.get('anti_affinity_policy_id'),
'customFields': server_params.get('custom_fields'),
'additionalDisks': server_params.get('additional_disks'),
'ttl': server_params.get('ttl'),
'packages': server_params.get('packages'),
'configurationId': server_params.get('configuration_id'),
'osType': server_params.get('os_type')}))
result = clc.v2.Requests(res)
except APIFailedResponse as ex:
return module.fail_json(msg='Unable to create the server: {0}. {1}'.format(
server_params.get('name'),
ex.response_text
))
#
# Patch the Request object so that it returns a valid server
# Find the server's UUID from the API response
server_uuid = [obj['id']
for obj in res['links'] if obj['rel'] == 'self'][0]
# Change the request server method to a _find_server_by_uuid closure so
# that it will work
result.requests[0].Server = lambda: ClcServer._find_server_by_uuid_w_retry(
clc,
module,
server_uuid,
server_params.get('alias'))
return result
@staticmethod
def _get_anti_affinity_policy_id(clc, module, alias, aa_policy_name):
"""
retrieves the anti affinity policy id of the server based on the name of the policy
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param alias: the CLC account alias
:param aa_policy_name: the anti affinity policy name
:return: aa_policy_id: The anti affinity policy id
"""
aa_policy_id = None
try:
aa_policies = clc.v2.API.Call(method='GET',
url='antiAffinityPolicies/%s' % alias)
except APIFailedResponse as ex:
return module.fail_json(msg='Unable to fetch anti affinity policies for account: {0}. {1}'.format(
alias, ex.response_text))
for aa_policy in aa_policies.get('items'):
if aa_policy.get('name') == aa_policy_name:
if not aa_policy_id:
aa_policy_id = aa_policy.get('id')
else:
return module.fail_json(
msg='multiple anti affinity policies were found with policy name : %s' % aa_policy_name)
return aa_policy_id
#
# This is the function that gets patched to the Request.server object using a lamda closure
#
@staticmethod
def _find_server_by_uuid_w_retry(
clc, module, svr_uuid, alias=None, retries=5, back_out=2):
"""
Find the clc server by the UUID returned from the provisioning request. Retry the request if a 404 is returned.
:param clc: the clc-sdk instance to use
:param module: the AnsibleModule object
:param svr_uuid: UUID of the server
:param retries: the number of retry attempts to make prior to fail. default is 5
:param alias: the Account Alias to search
:return: a clc-sdk.Server instance
"""
if not alias:
alias = clc.v2.Account.GetAlias()
# Wait and retry if the api returns a 404
while True:
retries -= 1
try:
server_obj = clc.v2.API.Call(
method='GET', url='servers/%s/%s?uuid=true' %
(alias, svr_uuid))
server_id = server_obj['id']
server = clc.v2.Server(
id=server_id,
alias=alias,
server_obj=server_obj)
return server
except APIFailedResponse as e:
if e.response_status_code != 404:
return module.fail_json(
msg='A failure response was received from CLC API when '
'attempting to get details for a server: UUID=%s, Code=%i, Message=%s' %
(svr_uuid, e.response_status_code, e.message))
if retries == 0:
return module.fail_json(
msg='Unable to reach the CLC API after 5 attempts')
time.sleep(back_out)
back_out *= 2
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
argument_dict = ClcServer._define_module_argument_spec()
module = AnsibleModule(supports_check_mode=True, **argument_dict)
clc_server = ClcServer(module)
clc_server.process_request()
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
from email.mime.text import MIMEText
from email.utils import formatdate
import smtplib
from flask import Blueprint, request, render_template, redirect, url_for, abort, current_app, flash, g
from flask.ext.babel import _
from werkzeug.exceptions import ServiceUnavailable
from sqlalchemy import func
from sqlalchemy.orm import joinedload
from skylines.database import db
from skylines.model import User
from skylines.model.event import create_new_user_event
from skylines.frontend.forms import CreatePilotForm, RecoverStep1Form, RecoverStep2Form
users_blueprint = Blueprint('users', 'skylines')
@users_blueprint.route('/')
def index():
users = User.query() \
.options(joinedload(User.club)) \
.order_by(func.lower(User.name))
return render_template('users/list.jinja',
active_page='settings',
users=users)
@users_blueprint.route('/new', methods=['GET', 'POST'])
def new():
form = CreatePilotForm()
if form.validate_on_submit():
return new_post(form)
return render_template('users/new.jinja', form=form)
def new_post(form):
user = User(
first_name=form.first_name.data,
last_name=form.last_name.data,
email_address=form.email_address.data,
password=form.password.data
)
user.created_ip = request.remote_addr
db.session.add(user)
create_new_user_event(user)
db.session.commit()
flash(_('Welcome to SkyLines, %(user)s! You can now log in and share your flights with the world!', user=user))
return redirect(url_for('index'))
def hex(value):
return int(value, 16)
@users_blueprint.route('/recover', methods=['GET', 'POST'])
def recover():
key = request.values.get('key', type=hex)
if key is None:
return recover_step1()
else:
return recover_step2(key)
def recover_step1():
form = RecoverStep1Form()
if form.validate_on_submit():
return recover_step1_post(form)
return render_template('users/recover_step1.jinja', form=form)
def recover_step1_post(form):
user = User.by_email_address(form.email_address.data)
if not user:
abort(404)
user.generate_recover_key(request.remote_addr)
send_recover_mail(user)
flash('Check your email, we have sent you a link to recover your password.')
db.session.commit()
return redirect(url_for('index'))
def send_recover_mail(user):
text = u"""Hi %s,
you have asked to recover your password (from IP %s). To enter a new
password, click on the following link:
http://skylines.aero/users/recover?key=%x
The SkyLines Team
""" % (unicode(user), request.remote_addr, user.recover_key)
msg = MIMEText(text.encode('utf-8'), 'plain', 'utf-8')
msg['Subject'] = 'SkyLines password recovery'
msg['From'] = current_app.config['EMAIL_FROM']
msg['To'] = user.email_address.encode('ascii')
msg['Date'] = formatdate(localtime=1)
try:
smtp = smtplib.SMTP(current_app.config['SMTP_SERVER'])
smtp.ehlo()
smtp.sendmail(current_app.config['EMAIL_FROM'].encode('ascii'),
user.email_address.encode('ascii'), msg.as_string())
smtp.quit()
except:
raise ServiceUnavailable(description=_(
"The mail server is currently not reachable. "
"Please try again later or contact the developers."))
def recover_step2(key):
user = User.by_recover_key(key)
if not user:
abort(404)
form = RecoverStep2Form(key='%x' % key)
if form.validate_on_submit():
return recover_step2_post(key, form)
return render_template('users/recover_step2.jinja', form=form)
def recover_step2_post(key, form):
user = User.by_recover_key(key)
if not user:
abort(404)
user.password = form.password.data
user.recover_key = None
flash(_('Password changed.'))
db.session.commit()
return redirect(url_for('index'))
@users_blueprint.route('/generate_keys')
def generate_keys():
"""Hidden method that generates missing tracking keys."""
if not g.current_user or not g.current_user.is_manager():
abort(403)
for user in User.query():
if user.tracking_key is None:
user.generate_tracking_key()
db.session.commit()
return redirect(url_for('.index'))
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2010-2024 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.fir.test.cases.generated.cases.components.containingDeclarationProvider;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.util.KtTestUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.analysis.api.fir.test.configurators.AnalysisApiFirTestConfiguratorFactory;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfiguratorFactoryData;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfigurator;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.TestModuleKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.FrontendKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisSessionMode;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiMode;
import org.jetbrains.kotlin.analysis.api.impl.base.test.cases.components.containingDeclarationProvider.AbstractContainingDeclarationProviderForSetterParameterTest;
import org.jetbrains.kotlin.test.TestMetadata;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.regex.Pattern;
/** This class is generated by {@link org.jetbrains.kotlin.generators.tests.analysis.api.GenerateAnalysisApiTestsKt}. DO NOT MODIFY MANUALLY */
@SuppressWarnings("all")
@TestMetadata("analysis/analysis-api/testData/components/containingDeclarationProvider/containingDeclarationForSetterParameter")
@TestDataPath("$PROJECT_ROOT")
public class FirIdeNormalAnalysisScriptSourceModuleContainingDeclarationProviderForSetterParameterTestGenerated extends AbstractContainingDeclarationProviderForSetterParameterTest {
@NotNull
@Override
public AnalysisApiTestConfigurator getConfigurator() {
return AnalysisApiFirTestConfiguratorFactory.INSTANCE.createConfigurator(
new AnalysisApiTestConfiguratorFactoryData(
FrontendKind.Fir,
TestModuleKind.ScriptSource,
AnalysisSessionMode.Normal,
AnalysisApiMode.Ide
)
);
}
@Test
public void testAllFilesPresentInContainingDeclarationForSetterParameter() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/containingDeclarationProvider/containingDeclarationForSetterParameter"), Pattern.compile("^(.+)\\.kts$"), null, true);
}
}
|
java
|
github
|
https://github.com/JetBrains/kotlin
|
analysis/analysis-api-fir/tests-gen/org/jetbrains/kotlin/analysis/api/fir/test/cases/generated/cases/components/containingDeclarationProvider/FirIdeNormalAnalysisScriptSourceModuleContainingDeclarationProviderForSetterParameterTestGenerated.java
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test the CHECKLOCKTIMEVERIFY (BIP65) soft-fork logic
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class BIP65Test(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 3
self.setup_clean_chain = False
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, []))
self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=3"]))
self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=4"]))
connect_nodes(self.nodes[1], 0)
connect_nodes(self.nodes[2], 0)
self.is_network_split = False
self.sync_all()
def run_test(self):
cnt = self.nodes[0].getblockcount()
# Mine some old-version blocks
self.nodes[1].generate(200)
cnt += 100
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 100):
raise AssertionError("Failed to mine 100 version=3 blocks")
# Mine 750 new-version blocks
for i in range(15):
self.nodes[2].generate(50)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 850):
raise AssertionError("Failed to mine 750 version=4 blocks")
# TODO: check that new CHECKLOCKTIMEVERIFY rules are not enforced
# Mine 1 new-version block
self.nodes[2].generate(1)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 851):
raise AssertionError("Failed to mine a version=4 blocks")
# TODO: check that new CHECKLOCKTIMEVERIFY rules are enforced
# Mine 198 new-version blocks
for i in range(2):
self.nodes[2].generate(99)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1049):
raise AssertionError("Failed to mine 198 version=4 blocks")
# Mine 1 old-version block
self.nodes[1].generate(1)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1050):
raise AssertionError("Failed to mine a version=3 block after 949 version=4 blocks")
# Mine 1 new-version blocks
self.nodes[2].generate(1)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1051):
raise AssertionError("Failed to mine a version=4 block")
# Mine 1 old-version blocks
try:
self.nodes[1].generate(1)
raise AssertionError("Succeeded to mine a version=3 block after 950 version=4 blocks")
except JSONRPCException:
pass
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1051):
raise AssertionError("Accepted a version=3 block after 950 version=4 blocks")
# Mine 1 new-version blocks
self.nodes[2].generate(1)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1052):
raise AssertionError("Failed to mine a version=4 block")
if __name__ == '__main__':
BIP65Test().main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
from __future__ import absolute_import
import inspect
import warnings
class RemovedInDjango20Warning(PendingDeprecationWarning):
pass
class RemovedInNextVersionWarning(DeprecationWarning):
pass
class warn_about_renamed_method(object):
def __init__(self, class_name, old_method_name, new_method_name, deprecation_warning):
self.class_name = class_name
self.old_method_name = old_method_name
self.new_method_name = new_method_name
self.deprecation_warning = deprecation_warning
def __call__(self, f):
def wrapped(*args, **kwargs):
warnings.warn(
"`%s.%s` is deprecated, use `%s` instead." %
(self.class_name, self.old_method_name, self.new_method_name),
self.deprecation_warning, 2)
return f(*args, **kwargs)
return wrapped
class RenameMethodsBase(type):
"""
Handles the deprecation paths when renaming a method.
It does the following:
1) Define the new method if missing and complain about it.
2) Define the old method if missing.
3) Complain whenever an old method is called.
See #15363 for more details.
"""
renamed_methods = ()
def __new__(cls, name, bases, attrs):
new_class = super(RenameMethodsBase, cls).__new__(cls, name, bases, attrs)
for base in inspect.getmro(new_class):
class_name = base.__name__
for renamed_method in cls.renamed_methods:
old_method_name = renamed_method[0]
old_method = base.__dict__.get(old_method_name)
new_method_name = renamed_method[1]
new_method = base.__dict__.get(new_method_name)
deprecation_warning = renamed_method[2]
wrapper = warn_about_renamed_method(class_name, *renamed_method)
# Define the new method if missing and complain about it
if not new_method and old_method:
warnings.warn(
"`%s.%s` method should be renamed `%s`." %
(class_name, old_method_name, new_method_name),
deprecation_warning, 2)
setattr(base, new_method_name, old_method)
setattr(base, old_method_name, wrapper(old_method))
# Define the old method as a wrapped call to the new method.
if not old_method and new_method:
setattr(base, old_method_name, wrapper(new_method))
return new_class
class DeprecationInstanceCheck(type):
def __instancecheck__(self, instance):
warnings.warn(
"`%s` is deprecated, use `%s` instead." % (self.__name__, self.alternative),
self.deprecation_warning, 2
)
return super(DeprecationInstanceCheck, self).__instancecheck__(instance)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
# coding: utf-8
#
# 応答生成モジュール
# 基本的には
# - 入力と応答の対応リスト(argv[1])
# - 話者認識結果ID (argv[2])
# - 音声認識結果 (argv[3])
# を受け取って応答文および音声を生成する
#
# 前の応答への依存性を持たせたい場合は引数を追加すれば良い
import sys, os
#import goslate
import talkey
import time
from recognize import music_recognize
from record import record
from microsofttranslator import Translator
import image as im
translator = Translator('TranslateAppOnRS', 'KiRTM60FqU2CLMDrQhVdd3yeicWSztHtLiDx5JRIavA=')
#gs = goslate.Goslate()
tts = talkey.Talkey(
# These languages are given better scoring by the language detector
# to minimise the chance of it detecting a short string completely incorrectly.
# Order is not important here
preferred_languages=['en', 'af', 'el', 'fr'],
# The factor by which preferred_languages gets their score increased, defaults to 80.0
preferred_factor=80.0,
# The order of preference of using a TTS engine for a given language.
# Note, that networked engines (Google, Mary) is disabled by default, and so is dummy
# default: ['google', 'mary', 'espeak', 'festival', 'pico', 'flite', 'dummy']
# This sets eSpeak as the preferred engine, the other engines may still be used
# if eSpeak doesn't support a requested language.
engine_preference=['espeak'],
# Here you segment the configuration by engine
# Key is the engine SLUG, in this case ``espeak``
espeak={
# Specify the engine options:
'options': {
'enabled': True,
},
# Specify some default voice options
'defaults': {
'words_per_minute': 70,
'variant': 'f1',
},
# Here you specify language-specific voice options
# e.g. for english we prefer the mbrola en1 voice
'languages': {
'en': {
'voice': 'english-mb-en2',
'words_per_minute': 70
},
}
}
)
# 音声合成エンジンのpath
#jtalkbin = '/usr/local/open_jtalk-1.07/bin/open_jtalk '
#options = ' -m syn/nitech_jp_atr503_m001.htsvoice -ow /tmp/dialogue/out.wav -x /usr/local/open_jtalk-1.07/dic'
jtalkbin = 'open_jtalk '
options = '-m /usr/share/hts-voice/nitech-jp-atr503-m001/nitech_jp_atr503_m001.htsvoice -ow /tmp/dialogue/out.wav -x /var/lib/mecab/dic/open-jtalk/naist-jdic'
options2 = '-m 唱地ヨエ.htsvoice -ow /tmp/dialogue/out.wav -x /var/lib/mecab/dic/open-jtalk/naist-jdic'
# 音声合成のコマンドを生成 (open jtalk を 使う場合
def mk_jtalk_command(answer):
jtalk = 'echo "' + answer + '" | ' + jtalkbin + options + ';'
play = 'play -q /tmp/dialogue/out.wav; rm /tmp/dialogue/out.wav;'
return jtalk + play
def mk_jtalk_command2(answer):
jtalk = 'echo "' + answer + '" | ' + jtalkbin + options2 + ';'
play = 'play -q /tmp/dialogue/out.wav; rm /tmp/dialogue/out.wav;'
return jtalk + play
if __name__ == '__main__':
# 応答を辞書 reply に登録
conf = open(sys.argv[1],'r')
#conf = codecs.open(sys.argv[1],'r','utf8','ignore')
reply = {}
for line in conf:
line = line.rstrip()
a = line.split();
reply[a[0]] = a[1]
conf.close()
# 話者ID
sid = int(sys.argv[2])
#print sid
# 認識結果
asrresult = open(sys.argv[3],'r')
question = asrresult.read().rstrip()
asrresult.close()
# 話者ID と認識結果を表示
print "SPK"+str(sid)+": "+question
#話した言葉をtalk.txtへと書き込む
#f = open('talk.txt','w')
#f.write('少女:')
#f.closed
f = open('mode.txt','r')
mode = int(f.read())
f.closed
f = open('mode.txt','w')
if mode == 0:
# 応答リストから対応する応答を出力
if question == '翻訳して':
answer = '翻訳したい言葉を言ってください'
f.write('1')
f.closed
print "Silly: " + answer
os.system(mk_jtalk_command(answer))
elif question == '検索して':
answer = '検索したい言葉を言ってください'
f.write('2')
f.closed
print "Silly: " + answer
os.system(mk_jtalk_command(answer))
elif question == 'この曲何':
answer = '聞かせてください'
f.write('0')
f.closed
print "Silly: " + answer
os.system(mk_jtalk_command(answer))
record()
song, youtube = music_recognize('output.wav')
if song == False:
answer = 'すいません わかりませんでした'
print "Silly: " + answer
os.system(mk_jtalk_command(answer))
else:
answer = 'わかりました'
print "Silly: " + answer
os.system(mk_jtalk_command(answer))
answer = song
print u"Silly: " + u"この曲は " + song + u" です"
if youtube != False:
os.system('firefox '+youtube)
elif '恋愛' in question:
answer = '恋はハリケーンなのです'
#話した言葉をtalk.txtへと書き込む
# 画面の表示
#初回だけ起動を行う
os.system("nohup python image.py &")
#f.write('3')
f.closed
print "Silly: " + answer
#os.system(mk_jtalk_command(answer))
else:
if question in reply:
answer = str(reply[question])
else:
answer = 'もう一度お願いします'
f.write('0')
f.closed
print "Silly: " + answer
os.system(mk_jtalk_command(answer))
if question == '恋愛':
os.system(mk_jtalk_command2(answer))
os.system(mk_jtalk_command2('こんばんはっ!'))
os.system(mk_jtalk_command2('突然だけどイヴは暇?'))
elif mode == 1:
if question == '翻訳おわり':
answer = '翻訳おわります'
print "Silly: " + answer
f.write('0')
f.closed
os.system(mk_jtalk_command(answer))
else:
#answer = gs.translate(question, 'en')
answer = translator.translate(question.decode('utf-8'), "en")
print "Silly: " + answer
tts.say(answer)
time.sleep(5.0)
f.write('1')
f.closed
elif mode == 2:
if question == '検索おわり':
answer = '検索おわります'
print "Silly: " + answer
f.write('0')
f.closed
os.system(mk_jtalk_command(answer))
else:
answer = question + 'を検索します'
print "Silly: " + answer
os.system(mk_jtalk_command(answer))
os.system('firefox -search ' + '\''+ question + '\'')
f.write('2')
f.closed
elif mode >= 3:
if question == '恋愛おわり':
answer = '恋愛おわります'
print "Silly: " + answer
f.write('0')
f.closed
os.system("rm talk.txt")
os.system(mk_jtalk_command(answer))
else:
answer = 'ローディングなう'
if mode == 4:
if question == 'はい':
answer = 'やった!デートいこう!どこいく?'
f.write('6')
f.closed
elif question == 'いいえ':
answer = 'しょぼーん、クリスマスの日は?'
f.write('5')
f.closed
else:
answer = 'うーん、え?なんて言ったのかな?'
f.write('4')
f.closed
#話した言葉をtalk.txtへと書き込む
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
elif mode == 5:
if question == '空いてる':
answer = 'やった!じゃあデートね!どこいく?'
f.write('6')
f.closed
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
elif question == '空いてない': #恋愛の終了
answer = '恋愛おわります'
print "Silly: " + answer
f.write('0')
f.closed
os.system("rm talk.txt")
os.system(mk_jtalk_command(answer))
else:
answer = 'うーん、え?なんて言ったのかな?'
f.write('5')
f.closed
#話した言葉をtalk.txtへと書き込む
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
elif mode == 6:
if '水族館' in question:
answer = '私水族館好きじゃないの...'
f.write('6')
f.closed
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
elif 'イルミネーション' in question: #恋愛の終了
answer = '最高!楽しみにしてるね!じゃあ!'
f.write('7')
f.closed
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
else:
answer = 'うーん、え?なんて言ったのかな?'
f.write('6')
f.closed
#話した言葉をtalk.txtへと書き込む
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
elif mode == 7:
answer='やっほー'
f = open('mode.txt','w')
f.write('8')
f.closed
#話した言葉をtalk.txtへと書き込む
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
elif mode == 8:
if question == 'じゃあ行こうか':
answer = 'むーなにか言うことないの?'
f.write('0')
f.closed
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
elif question == 'かわいいね': #恋愛の終了
answer = 'うれしい!君もイケメンよ!'
f.write('0')
f.closed
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
else:
answer = 'うーん、え?なんて言ったのかな?'
f.write('6')
f.closed
#話した言葉をtalk.txtへと書き込む
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
#話した言葉をtalk.txtへと書き込む
f = open('talk.txt','w')
f.write('少女:'+answer)
f.closed
os.system(mk_jtalk_command2(answer))
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
"""
requests.utils
~~~~~~~~~~~~~~
This module provides utility functions that are used within Requests
that are also useful for external consumption.
"""
import cgi
import codecs
import collections
import contextlib
import io
import os
import platform
import re
import socket
import struct
import warnings
from .__version__ import __version__
from . import certs
# to_native_string is unused here, but imported here for backwards compatibility
from ._internal_utils import to_native_string
from .compat import parse_http_list as _parse_list_header
from .compat import (
quote, urlparse, bytes, str, OrderedDict, unquote, getproxies,
proxy_bypass, urlunparse, basestring, integer_types, is_py3,
proxy_bypass_environment, getproxies_environment)
from .cookies import cookiejar_from_dict
from .structures import CaseInsensitiveDict
from .exceptions import (
InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)
NETRC_FILES = ('.netrc', '_netrc')
DEFAULT_CA_BUNDLE_PATH = certs.where()
if platform.system() == 'Windows':
# provide a proxy_bypass version on Windows without DNS lookups
def proxy_bypass_registry(host):
if is_py3:
import winreg
else:
import _winreg as winreg
try:
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
proxyOverride = winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0]
except OSError:
return False
if not proxyEnable or not proxyOverride:
return False
# make a check value list from the registry entry: replace the
# '<local>' string by the localhost entry and the corresponding
# canonical entry.
proxyOverride = proxyOverride.split(';')
# now check if we match one of the registry values.
for test in proxyOverride:
if test == '<local>':
if '.' not in host:
return True
test = test.replace(".", r"\.") # mask dots
test = test.replace("*", r".*") # change glob sequence
test = test.replace("?", r".") # change glob char
if re.match(test, host, re.I):
return True
return False
def proxy_bypass(host): # noqa
"""Return True, if the host should be bypassed.
Checks proxy settings gathered from the environment, if specified,
or the registry.
"""
if getproxies_environment():
return proxy_bypass_environment(host)
else:
return proxy_bypass_registry(host)
def dict_to_sequence(d):
"""Returns an internal sequence dictionary update."""
if hasattr(d, 'items'):
d = d.items()
return d
def super_len(o):
total_length = None
current_position = 0
if hasattr(o, '__len__'):
total_length = len(o)
elif hasattr(o, 'len'):
total_length = o.len
elif hasattr(o, 'fileno'):
try:
fileno = o.fileno()
except io.UnsupportedOperation:
pass
else:
total_length = os.fstat(fileno).st_size
# Having used fstat to determine the file length, we need to
# confirm that this file was opened up in binary mode.
if 'b' not in o.mode:
warnings.warn((
"Requests has determined the content-length for this "
"request using the binary size of the file: however, the "
"file has been opened in text mode (i.e. without the 'b' "
"flag in the mode). This may lead to an incorrect "
"content-length. In Requests 3.0, support will be removed "
"for files in text mode."),
FileModeWarning
)
if hasattr(o, 'tell'):
try:
current_position = o.tell()
except (OSError, IOError):
# This can happen in some weird situations, such as when the file
# is actually a special file descriptor like stdin. In this
# instance, we don't know what the length is, so set it to zero and
# let requests chunk it instead.
if total_length is not None:
current_position = total_length
else:
if hasattr(o, 'seek') and total_length is None:
# StringIO and BytesIO have seek but no useable fileno
try:
# seek to end of file
o.seek(0, 2)
total_length = o.tell()
# seek back to current position to support
# partially read file-like objects
o.seek(current_position or 0)
except (OSError, IOError):
total_length = 0
if total_length is None:
total_length = 0
return max(0, total_length - current_position)
def get_netrc_auth(url, raise_errors=False):
"""Returns the Requests tuple auth for a given url from netrc."""
try:
from netrc import netrc, NetrcParseError
netrc_path = None
for f in NETRC_FILES:
try:
loc = os.path.expanduser('~/{0}'.format(f))
except KeyError:
# os.path.expanduser can fail when $HOME is undefined and
# getpwuid fails. See http://bugs.python.org/issue20164 &
# https://github.com/requests/requests/issues/1846
return
if os.path.exists(loc):
netrc_path = loc
break
# Abort early if there isn't one.
if netrc_path is None:
return
ri = urlparse(url)
# Strip port numbers from netloc. This weird `if...encode`` dance is
# used for Python 3.2, which doesn't support unicode literals.
splitstr = b':'
if isinstance(url, str):
splitstr = splitstr.decode('ascii')
host = ri.netloc.split(splitstr)[0]
try:
_netrc = netrc(netrc_path).authenticators(host)
if _netrc:
# Return with login / password
login_i = (0 if _netrc[0] else 1)
return (_netrc[login_i], _netrc[2])
except (NetrcParseError, IOError):
# If there was a parsing error or a permissions issue reading the file,
# we'll just skip netrc auth unless explicitly asked to raise errors.
if raise_errors:
raise
# AppEngine hackiness.
except (ImportError, AttributeError):
pass
def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
if (name and isinstance(name, basestring) and name[0] != '<' and
name[-1] != '>'):
return os.path.basename(name)
def from_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
OrderedDict, e.g.,
::
>>> from_key_val_list([('key', 'val')])
OrderedDict([('key', 'val')])
>>> from_key_val_list('string')
ValueError: need more than 1 value to unpack
>>> from_key_val_list({'key': 'val'})
OrderedDict([('key', 'val')])
:rtype: OrderedDict
"""
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
return OrderedDict(value)
def to_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. If it can be, return a list of tuples, e.g.,
::
>>> to_key_val_list([('key', 'val')])
[('key', 'val')]
>>> to_key_val_list({'key': 'val'})
[('key', 'val')]
>>> to_key_val_list('string')
ValueError: cannot encode objects that are not 2-tuples.
:rtype: list
"""
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
if isinstance(value, collections.Mapping):
value = value.items()
return list(value)
# From mitsuhiko/werkzeug (used with permission).
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Quotes are removed automatically after parsing.
It basically works like :func:`parse_set_header` just that items
may appear multiple times and case sensitivity is preserved.
The return value is a standard :class:`list`:
>>> parse_list_header('token, "quoted value"')
['token', 'quoted value']
To create a header from the :class:`list` again, use the
:func:`dump_header` function.
:param value: a string with a list header.
:return: :class:`list`
:rtype: list
"""
result = []
for item in _parse_list_header(value):
if item[:1] == item[-1:] == '"':
item = unquote_header_value(item[1:-1])
result.append(item)
return result
# From mitsuhiko/werkzeug (used with permission).
def parse_dict_header(value):
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
convert them into a python dict:
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
>>> type(d) is dict
True
>>> sorted(d.items())
[('bar', 'as well'), ('foo', 'is a fish')]
If there is no value for a key it will be `None`:
>>> parse_dict_header('key_without_value')
{'key_without_value': None}
To create a header from the :class:`dict` again, use the
:func:`dump_header` function.
:param value: a string with a dict header.
:return: :class:`dict`
:rtype: dict
"""
result = {}
for item in _parse_list_header(value):
if '=' not in item:
result[item] = None
continue
name, value = item.split('=', 1)
if value[:1] == value[-1:] == '"':
value = unquote_header_value(value[1:-1])
result[name] = value
return result
# From mitsuhiko/werkzeug (used with permission).
def unquote_header_value(value, is_filename=False):
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
:rtype: str
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
# if this is a filename and the starting characters look like
# a UNC path, then just return the value without quotes. Using the
# replace sequence below on a UNC path has the effect of turning
# the leading double slash into a single slash and then
# _fix_ie_filename() doesn't work correctly. See #458.
if not is_filename or value[:2] != '\\\\':
return value.replace('\\\\', '\\').replace('\\"', '"')
return value
def dict_from_cookiejar(cj):
"""Returns a key/value dictionary from a CookieJar.
:param cj: CookieJar object to extract cookies from.
:rtype: dict
"""
cookie_dict = {}
for cookie in cj:
cookie_dict[cookie.name] = cookie.value
return cookie_dict
def add_dict_to_cookiejar(cj, cookie_dict):
"""Returns a CookieJar from a key/value dictionary.
:param cj: CookieJar to insert cookies into.
:param cookie_dict: Dict of key/values to insert into CookieJar.
:rtype: CookieJar
"""
return cookiejar_from_dict(cookie_dict, cj)
def get_encodings_from_content(content):
"""Returns encodings from given content string.
:param content: bytestring to extract encodings from.
"""
warnings.warn((
'In requests 3.0, get_encodings_from_content will be removed. For '
'more information, please see the discussion on issue #2266. (This'
' warning should only appear once.)'),
DeprecationWarning)
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
return (charset_re.findall(content) +
pragma_re.findall(content) +
xml_re.findall(content))
def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
:param headers: dictionary to extract encoding from.
:rtype: str
"""
content_type = headers.get('content-type')
if not content_type:
return None
content_type, params = cgi.parse_header(content_type)
if 'charset' in params:
return params['charset'].strip("'\"")
if 'text' in content_type:
return 'ISO-8859-1'
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
if r.encoding is None:
for item in iterator:
yield item
return
decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
for chunk in iterator:
rv = decoder.decode(chunk)
if rv:
yield rv
rv = decoder.decode(b'', final=True)
if rv:
yield rv
def iter_slices(string, slice_length):
"""Iterate over slices of a string."""
pos = 0
if slice_length is None or slice_length <= 0:
slice_length = len(string)
while pos < len(string):
yield string[pos:pos + slice_length]
pos += slice_length
def get_unicode_from_response(r):
"""Returns the requested content back in unicode.
:param r: Response object to get unicode content from.
Tried:
1. charset from content-type
2. fall back and replace all unicode characters
:rtype: str
"""
warnings.warn((
'In requests 3.0, get_unicode_from_response will be removed. For '
'more information, please see the discussion on issue #2266. (This'
' warning should only appear once.)'),
DeprecationWarning)
tried_encodings = []
# Try charset from content-type
encoding = get_encoding_from_headers(r.headers)
if encoding:
try:
return str(r.content, encoding)
except UnicodeError:
tried_encodings.append(encoding)
# Fall back:
try:
return str(r.content, encoding, errors='replace')
except TypeError:
return r.content
# The unreserved URI characters (RFC 3986)
UNRESERVED_SET = frozenset(
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~")
def unquote_unreserved(uri):
"""Un-escape any percent-escape sequences in a URI that are unreserved
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
:rtype: str
"""
parts = uri.split('%')
for i in range(1, len(parts)):
h = parts[i][0:2]
if len(h) == 2 and h.isalnum():
try:
c = chr(int(h, 16))
except ValueError:
raise InvalidURL("Invalid percent-escape sequence: '%s'" % h)
if c in UNRESERVED_SET:
parts[i] = c + parts[i][2:]
else:
parts[i] = '%' + parts[i]
else:
parts[i] = '%' + parts[i]
return ''.join(parts)
def requote_uri(uri):
"""Re-quote the given URI.
This function passes the given URI through an unquote/quote cycle to
ensure that it is fully and consistently quoted.
:rtype: str
"""
safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
safe_without_percent = "!#$&'()*+,/:;=?@[]~"
try:
# Unquote only the unreserved characters
# Then quote only illegal characters (do not quote reserved,
# unreserved, or '%')
return quote(unquote_unreserved(uri), safe=safe_with_percent)
except InvalidURL:
# We couldn't unquote the given URI, so let's try quoting it, but
# there may be unquoted '%'s in the URI. We need to make sure they're
# properly quoted so they do not cause issues elsewhere.
return quote(uri, safe=safe_without_percent)
def address_in_network(ip, net):
"""This function allows you to check if an IP belongs to a network subnet
Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
:rtype: bool
"""
ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]
netaddr, bits = net.split('/')
netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]
network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask
return (ipaddr & netmask) == (network & netmask)
def dotted_netmask(mask):
"""Converts mask from /xx format to xxx.xxx.xxx.xxx
Example: if mask is 24 function returns 255.255.255.0
:rtype: str
"""
bits = 0xffffffff ^ (1 << 32 - mask) - 1
return socket.inet_ntoa(struct.pack('>I', bits))
def is_ipv4_address(string_ip):
"""
:rtype: bool
"""
try:
socket.inet_aton(string_ip)
except socket.error:
return False
return True
def is_valid_cidr(string_network):
"""
Very simple check of the cidr format in no_proxy variable.
:rtype: bool
"""
if string_network.count('/') == 1:
try:
mask = int(string_network.split('/')[1])
except ValueError:
return False
if mask < 1 or mask > 32:
return False
try:
socket.inet_aton(string_network.split('/')[0])
except socket.error:
return False
else:
return False
return True
@contextlib.contextmanager
def set_environ(env_name, value):
"""Set the environment variable 'env_name' to 'value'
Save previous value, yield, and then restore the previous value stored in
the environment variable 'env_name'.
If 'value' is None, do nothing"""
value_changed = value is not None
if value_changed:
old_value = os.environ.get(env_name)
os.environ[env_name] = value
try:
yield
finally:
if value_changed:
if old_value is None:
del os.environ[env_name]
else:
os.environ[env_name] = old_value
def should_bypass_proxies(url, no_proxy):
"""
Returns whether we should bypass proxies or not.
:rtype: bool
"""
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
# First check whether no_proxy is defined. If it is, check that the URL
# we're getting isn't in the no_proxy list.
no_proxy_arg = no_proxy
if no_proxy is None:
no_proxy = get_proxy('no_proxy')
netloc = urlparse(url).netloc
if no_proxy:
# We need to check whether we match here. We need to see if we match
# the end of the netloc, both with and without the port.
no_proxy = (
host for host in no_proxy.replace(' ', '').split(',') if host
)
ip = netloc.split(':')[0]
if is_ipv4_address(ip):
for proxy_ip in no_proxy:
if is_valid_cidr(proxy_ip):
if address_in_network(ip, proxy_ip):
return True
elif ip == proxy_ip:
# If no_proxy ip was defined in plain IP notation instead of cidr notation &
# matches the IP of the index
return True
else:
for host in no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
# The URL does match something in no_proxy, so we don't want
# to apply the proxies on this URL.
return True
# If the system proxy settings indicate that this URL should be bypassed,
# don't proxy.
# The proxy_bypass function is incredibly buggy on OS X in early versions
# of Python 2.6, so allow this call to fail. Only catch the specific
# exceptions we've seen, though: this call failing in other ways can reveal
# legitimate problems.
with set_environ('no_proxy', no_proxy_arg):
try:
bypass = proxy_bypass(netloc)
except (TypeError, socket.gaierror):
bypass = False
if bypass:
return True
return False
def get_environ_proxies(url, no_proxy=None):
"""
Return a dict of environment proxies.
:rtype: dict
"""
if should_bypass_proxies(url, no_proxy=no_proxy):
return {}
else:
return getproxies()
def select_proxy(url, proxies):
"""Select a proxy for the url, if applicable.
:param url: The url being for the request
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
"""
proxies = proxies or {}
urlparts = urlparse(url)
if urlparts.hostname is None:
return proxies.get(urlparts.scheme, proxies.get('all'))
proxy_keys = [
urlparts.scheme + '://' + urlparts.hostname,
urlparts.scheme,
'all://' + urlparts.hostname,
'all',
]
proxy = None
for proxy_key in proxy_keys:
if proxy_key in proxies:
proxy = proxies[proxy_key]
break
return proxy
def default_user_agent(name="python-requests"):
"""
Return a string representing the default user agent.
:rtype: str
"""
return '%s/%s' % (name, __version__)
def default_headers():
"""
:rtype: requests.structures.CaseInsensitiveDict
"""
return CaseInsensitiveDict({
'User-Agent': default_user_agent(),
'Accept-Encoding': ', '.join(('gzip', 'deflate')),
'Accept': '*/*',
'Connection': 'keep-alive',
})
def parse_header_links(value):
"""Return a dict of parsed link headers proxies.
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
:rtype: list
"""
links = []
replace_chars = ' \'"'
for val in re.split(', *<', value):
try:
url, params = val.split(';', 1)
except ValueError:
url, params = val, ''
link = {'url': url.strip('<> \'"')}
for param in params.split(';'):
try:
key, value = param.split('=')
except ValueError:
break
link[key.strip(replace_chars)] = value.strip(replace_chars)
links.append(link)
return links
# Null bytes; no need to recreate these on each call to guess_json_utf
_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3
_null2 = _null * 2
_null3 = _null * 3
def guess_json_utf(data):
"""
:rtype: str
"""
# JSON always starts with two ASCII characters, so detection is as
# easy as counting the nulls and from their location and count
# determine the encoding. Also detect a BOM, if present.
sample = data[:4]
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
return 'utf-32' # BOM included
if sample[:3] == codecs.BOM_UTF8:
return 'utf-8-sig' # BOM included, MS style (discouraged)
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
return 'utf-16' # BOM included
nullcount = sample.count(_null)
if nullcount == 0:
return 'utf-8'
if nullcount == 2:
if sample[::2] == _null2: # 1st and 3rd are null
return 'utf-16-be'
if sample[1::2] == _null2: # 2nd and 4th are null
return 'utf-16-le'
# Did not detect 2 valid UTF-16 ascii-range characters
if nullcount == 3:
if sample[:3] == _null3:
return 'utf-32-be'
if sample[1:] == _null3:
return 'utf-32-le'
# Did not detect a valid UTF-32 ascii-range character
return None
def prepend_scheme_if_needed(url, new_scheme):
"""Given a URL that may or may not have a scheme, prepend the given scheme.
Does not replace a present scheme with the one provided as an argument.
:rtype: str
"""
scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
# urlparse is a finicky beast, and sometimes decides that there isn't a
# netloc present. Assume that it's being over-cautious, and switch netloc
# and path if urlparse decided there was no netloc.
if not netloc:
netloc, path = path, netloc
return urlunparse((scheme, netloc, path, params, query, fragment))
def get_auth_from_url(url):
"""Given a url with authentication components, extract them into a tuple of
username,password.
:rtype: (str,str)
"""
parsed = urlparse(url)
try:
auth = (unquote(parsed.username), unquote(parsed.password))
except (AttributeError, TypeError):
auth = ('', '')
return auth
# Moved outside of function to avoid recompile every call
_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')
def check_header_validity(header):
"""Verifies that header value is a string which doesn't contain
leading whitespace or return characters. This prevents unintended
header injection.
:param header: tuple, in the format (name, value).
"""
name, value = header
if isinstance(value, bytes):
pat = _CLEAN_HEADER_REGEX_BYTE
else:
pat = _CLEAN_HEADER_REGEX_STR
try:
if not pat.match(value):
raise InvalidHeader("Invalid return character or leading space in header: %s" % name)
except TypeError:
raise InvalidHeader("Value for header {%s: %s} must be of type str or "
"bytes, not %s" % (name, value, type(value)))
def urldefragauth(url):
"""
Given a url remove the fragment and the authentication part.
:rtype: str
"""
scheme, netloc, path, params, query, fragment = urlparse(url)
# see func:`prepend_scheme_if_needed`
if not netloc:
netloc, path = path, netloc
netloc = netloc.rsplit('@', 1)[-1]
return urlunparse((scheme, netloc, path, params, query, ''))
def rewind_body(prepared_request):
"""Move file pointer back to its recorded starting position
so it can be read again on redirect.
"""
body_seek = getattr(prepared_request.body, 'seek', None)
if body_seek is not None and isinstance(prepared_request._body_position, integer_types):
try:
body_seek(prepared_request._body_position)
except (IOError, OSError):
raise UnrewindableBodyError("An error occurred when rewinding request "
"body for redirect.")
else:
raise UnrewindableBodyError("Unable to rewind request body for redirect.")
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
""" """
from . raw_polygon_collection import RawPolygonCollection
#from . agg_polygon_collection import AggPolygonCollection
#from . agg_fast_polygon_collection import AggPolygonCollection
def PolygonCollection(mode="raw", *args, **kwargs):
"""
mode: string
- "raw" (speed: fastest, size: small, output: ugly, no dash, no
thickness)
- "agg" (speed: medium, size: medium output: nice, some flaws, no
dash)
- "agg+" (speed: slow, size: big, output: perfect, no dash)
"""
# if mode == "raw":
return RawPolygonCollection(*args, **kwargs)
# elif mode == "agg":
# return AggFastPolygonCollection(*args, **kwargs)
# return AggPolygonCollection(*args, **kwargs)
|
unknown
|
codeparrot/codeparrot-clean
| ||
from bedrock.redirects.util import redirect
redirectpatterns = (
# Bug 608370, 957664
redirect(r'^press/kit(?:.*\.html|s/?)$', 'https://blog.mozilla.org/press/kits/'),
# bug 877198
redirect(r'^press/news\.html$', 'http://blog.mozilla.org/press/'),
redirect(r'^press/mozilla-2003-10-15\.html$',
'http://blog.mozilla.org/press/2003/10/'
'mozilla-foundation-launches-new-web-browser-and-end-user-services/'),
redirect(r'^press/mozilla-2004-02-09\.html$',
'https://blog.mozilla.org/press/2004/02/'
'new-round-of-releases-extends-mozilla-project%C2%92s-standards-based-open-source-offerings/'),
redirect(r'^press/mozilla-2004-02-17\.html$',
'http://blog.mozilla.org/press/2004/02/mozilla-gains-stronghold-in-europe/'),
redirect(r'^press/mozilla-2004-02-26\.html$',
'https://blog.mozilla.org/press/2004/02/'
'mozilla-foundation-rallies-supporters-to-take-back-the-web/'),
redirect(r'^press/mozilla-2004-05-03\.html$',
'http://blog.mozilla.org/press/2004/05/mozilla-foundation-releases-thunderbird-0-6/'),
redirect(r'^press/mozilla-2004-06-15\.html$',
'http://blog.mozilla.org/press/2004/06/mozilla-reloads-firefox/'),
redirect(r'^press/mozilla-2004-06-16\.html$',
'http://blog.mozilla.org/press/2004/06/mozilla-foundation-releases-thunderbird-0-7/'),
redirect(r'^press/mozilla-2004-06-30\.html$',
'http://blog.mozilla.org/press/2013/11/mozilla-foundation-announces-more-open-scriptable-plugins/'),
redirect(r'^press/mozilla-2004-08-02\.html$',
'http://blog.mozilla.org/press/2004/08/mozilla-foundation-announces-security-bug-bounty-program/'),
redirect(r'^press/mozilla-2004-08-10\.html$',
'http://blog.mozilla.org/press/2004/08/mozilla-foundation-announces-xforms-development-project/'),
redirect(r'^press/mozilla-2004-08-18\.html$',
'http://blog.mozilla.org/press/2004/08/mozilla-affiliate-in-japan-kicks-off/'),
redirect(r'^press/mozilla-2004-09-14-01\.html$',
'http://blog.mozilla.org/press/2004/09/'
'mozilla-foundation-announces-first-payments-of-security-bug-bounty-program-further-strengthens-browser-security/'),
redirect(r'^press/mozilla-2004-09-14-02\.html$',
'http://blog.mozilla.org/press/2013/11/'
'firefox-preview-release-and-thunderbird-0-8-released/'),
redirect(r'^press/mozilla-2004-09-20\.html$',
'http://blog.mozilla.org/press/2004/09/'
'mozilla-firefox-preview-release-hits-one-million-downloads-in-first-four-days-of-availability/'),
redirect(r'^press/mozilla-2004-10-01-02\.html$',
'http://blog.mozilla.org/press/2004/10/important-security-update-for-firefox-available/'),
redirect(r'^press/mozilla-2004-11-09\.html$',
'http://blog.mozilla.org/press/2004/11/'
'mozilla-foundation-releases-the-highly-anticipated-mozilla-firefox-1-0-web-browser/'),
redirect(r'^press/mozilla-2004-11-22\.html$',
'http://blog.mozilla.org/press/2004/11/important-update-to-german-language-version-of-firefox-1-0/'),
redirect(r'^press/mozilla-2004-12-15\.html$',
'http://blog.mozilla.org/press/2004/12/mozilla-foundation-places-two-page-advocacy-ad-in-the-new-york-times/'),
redirect(r'^press/mozilla-2004-12-7\.html$',
'http://blog.mozilla.org/press/2004/12/mozilla-thunderbird-1-0-email-client-has-landed/'),
redirect(r'^press/mozilla-2005-01-07\.html$',
'http://blog.mozilla.org/press/2005/01/'
'mozilla-firefox-and-thunderbird-to-support-new-open-standard-platform-for-usb-drives/'),
redirect(r'^press/mozilla-2005-02-02\.html$',
'http://blog.mozilla.org/press/2005/02/mozilla-foundation-announces-beta-release-of-xforms-1-0-recommendation/'),
redirect(r'^press/mozilla-2005-02-16\.html$',
'http://blog.mozilla.org/press/2005/01/'
'mozilla-firefox-and-thunderbird-to-support-new-open-standard-platform-for-usb-drives/'),
redirect(r'^press/mozilla-2005-02-24\.html$',
'http://blog.mozilla.org/press/2005/02/mozilla-foundation-announces-update-to-firefox/'),
redirect(r'^press/mozilla-2005-03-04\.html$',
'http://blog.mozilla.org/press/2005/03/mozilla-foundation-expands-with-launch-of-mozilla-china/'),
redirect(r'^press/mozilla-2005-03-23\.html$',
'http://blog.mozilla.org/press/2005/03/mozilla-foundation-releases-security-update-to-firefox/'),
redirect(r'^press/mozilla-2005-03-28\.html$',
'http://blog.mozilla.org/press/2005/03/mozilla-foundation-awards-bug-bounties/'),
redirect(r'^press/mozilla-2005-05-13\.html$',
'http://blog.mozilla.org/press/2005/05/'
'mozilla-foundation-co-hosts-europes-leading-xml-and-web-developer-conference/'),
redirect(r'^press/mozilla-2005-07-28\.html$',
'http://blog.mozilla.org/press/2005/07/mozilla-headlines-two-key-open-source-development-conferences-in-august/'),
redirect(r'^press/mozilla-2005-08-03\.html$',
'http://blog.mozilla.org/press/2005/08/mozilla-foundation-forms-new-organization-to-further-the-creation-'
'of-free-open-source-internet-software-including-the-award-winning-mozilla-firefox-browser/'),
redirect(r'^press/mozilla-2005-10-03\.html$',
'http://blog.mozilla.org/press/2005/10/mozilla-launches-beta-of-comprehensive-online-developer-center/'),
redirect(r'^press/mozilla-2005-10-19\.html$',
'http://blog.mozilla.org/press/2005/10/firefox-surpasses-100-million-downloads/'),
redirect(r'^press/mozilla-2005-11-29\.html$',
'http://blog.mozilla.org/press/2005/11/mozilla-introduces-firefox-1-5-and-ups-the-ante-in-web-browsing/'),
redirect(r'^press/mozilla-2005-11-3\.html$',
'http://blog.mozilla.org/press/2005/11/mozilla-kicks-off-extend-firefox-competition/'),
redirect(r'^press/mozilla-2005-11-30\.html$',
'http://blog.mozilla.org/press/2005/11/firefox-1-5-adds-answers-com-for-quick-reference/'),
redirect(r'^press/mozilla-2005-12-2\.html$',
'http://blog.mozilla.org/press/2005/12/mozilla-launches-firefox-flicks-campaign/'),
redirect(r'^press/mozilla-2005-12-22\.html$',
'http://blog.mozilla.org/press/2005/12/mozilla-launches-firefox-flicks-ad-contest/'),
redirect(r'^press/mozilla-2006-01-12\.html$',
'http://blog.mozilla.org/press/2006/01/mozilla-releases-thunderbird-1-5-email-client/'),
redirect(r'^press/mozilla-2006-01-24\.html$',
'http://blog.mozilla.org/press/2006/01/firefox-1-5-adoption-rising-as-browser-garners-acclaim/'),
redirect(r'^press/mozilla-2006-01-25\.html$',
'http://blog.mozilla.org/press/2006/01/indie-film-all-stars-foin-firefox-flicks-crew/'),
redirect(r'^press/mozilla-2006-02-03\.html$',
'http://blog.mozilla.org/press/2006/02/mozilla-releases-preview-of-application-framework-for-'
'development-of-cross-platform-internet-client-applications/'),
redirect(r'^press/mozilla-2006-03-02\.html$',
'http://blog.mozilla.org/press/2006/03/mozilla-announces-winners-of-extend-firefox-competition/'),
redirect(r'^press/mozilla-2006-04-12\.html$',
'http://blog.mozilla.org/press/2006/04/mozilla-showcases-first-round-of-community-produced-firefox-flicks-videos/'),
redirect(r'^press/mozilla-2006-04-18\.html$',
'http://blog.mozilla.org/press/2006/04/mozilla-receives-over-280-community-produced-videos-for-firefox-flicks/'),
redirect(r'^press/mozilla-2006-04-27\.html$',
'http://blog.mozilla.org/press/2006/04/firefox-flicks-video-contest-winners-announced/'),
redirect(r'^press/mozilla-2006-06-14\.html$',
'http://blog.mozilla.org/press/2006/06/mozilla-feeds-soccer-fans-passion-with-new-firefox-add-on/'),
redirect(r'^press/mozilla-2006-10-11\.html$',
'http://blog.mozilla.org/press/2006/10/qualcomm-launches-project-in-collaboration-with-'
'mozilla-foundation-to-develop-open-source-version-of-eudora-email-program/'),
redirect(r'^press/mozilla-2006-10-24-02\.html$',
'http://blog.mozilla.org/press/2006/10/firefox-moving-the-internet-forward/'),
redirect(r'^press/mozilla-2006-10-24\.html$',
'http://blog.mozilla.org/press/2006/10/'
'mozilla-releases-major-update-to-firefox-and-raises-the-bar-for-online-experience/'),
redirect(r'^press/mozilla-2006-11-07\.html$',
'http://blog.mozilla.org/press/2006/11/adobe-and-mozilla-foundation-to-open-source-flash-player-scripting-engine/'),
redirect(r'^press/mozilla-2006-12-04\.html$',
'http://blog.mozilla.org/press/2006/12/the-world-economic-forum-announces-technology-pioneers-2007-mozilla-selected/'),
redirect(r'^press/mozilla-2006-12-11\.html$',
'http://blog.mozilla.org/press/2006/12/mozilla-firefox-headed-for-primetime/'),
redirect(r'^press/mozilla-2007-02-07\.html$',
'http://blog.mozilla.org/press/2007/02/kodak-and-mozilla-join-forces-to-make-sharing-photos-even-easier/'),
redirect(r'^press/mozilla-2007-03-27\.html$',
'http://blog.mozilla.org/press/2007/03/mozilla-launches-new-firefox-add-ons-web-site/'),
redirect(r'^press/mozilla-2007-03-28\.html$',
'http://blog.mozilla.org/press/2007/03/mozilla-and-ebay-working-together-to-make-the-auction-'
'experience-easier-for-firefox-users-in-france-germany-and-the-uk/'),
redirect(r'^press/mozilla-2007-04-19\.html$',
'http://blog.mozilla.org/press/2007/04/mozilla-thunderbird-2-soars-to-new-heights/'),
redirect(r'^press/mozilla-2007-05-16\.html$',
'http://blog.mozilla.org/press/2007/05/united-nations-agency-awards-mozilla-world-information-society-award/'),
redirect(r'^press/mozilla-2007-07-04\.html$',
'http://blog.mozilla.org/press/2007/07/mozilla-and-ebay-launch-firefox-companion-for-ebay-users/'),
redirect(r'^press/mozilla-2007-08-10\.html$',
'http://blog.mozilla.org/press/2007/08/mozilla-to-host-24-hour-worldwide-community-event/'),
redirect(r'^press/mozilla-2007-08-28\.html$',
'http://blog.mozilla.org/press/2007/08/mozilla-welcomes-students-back-to-school-with-firefox-campus-edition/'),
redirect(r'^press/mozilla-2007-09-17-faq\.html$',
'http://blog.mozilla.org/press/2007/09/mozilla-launches-internet-mail-and-communications-initiative/'),
redirect(r'^press/mozilla-2007-09-17\.html$',
'http://blog.mozilla.org/press/2007/09/mozilla-launches-internet-mail-and-communications-initiative/'),
redirect(r'^press/mozilla-2008-01-07-faq\.html$',
'http://blog.mozilla.org/press/2008/01/mozilla-appoints-john-lilly-as-chief-executive-officer/'),
redirect(r'^press/mozilla-2008-01-07\.html$',
'http://blog.mozilla.org/press/2008/01/mozilla-appoints-john-lilly-as-chief-executive-officer/'),
redirect(r'^press/mozilla-2008-02-19-faq\.html$',
'http://blog.mozilla.org/press/2008/02/mozilla-messaging-starts-up-operations/'),
redirect(r'^press/mozilla-2008-02-19\.html$',
'http://blog.mozilla.org/press/2008/02/mozilla-messaging-starts-up-operations/'),
redirect(r'^press/mozilla-2008-05-28\.html$',
'http://blog.mozilla.org/press/2008/05/mozilla-aims-to-set-guinness-world-record-on-firefox-3-download-day/'),
redirect(r'^press/mozilla-2008-06-17-faq\.html$',
'http://blog.mozilla.org/press/2008/06/mozilla-releases-firefox-3-and-redefines-the-web-experience/'),
redirect(r'^press/mozilla-2008-06-17\.html$',
'http://blog.mozilla.org/press/2008/06/mozilla-releases-firefox-3-and-redefines-the-web-experience/'),
redirect(r'^press/mozilla-2008-07-02\.html$',
'http://blog.mozilla.org/press/2008/07/mozilla-sets-new-guinness-world-record-with-firefox-3-downloads/'),
redirect(r'^press/mozilla-2008-11-18\.html$',
'http://blog.mozilla.org/press/2008/11/mozilla-launches-fashion-your-'
'firefox-and-makes-it-easy-to-customize-the-browsing-experience/'),
redirect(r'^press/mozilla-2008-12-03\.html$',
'http://blog.mozilla.org/press/2008/12/mozilla-and-zazzle-announce-strategic-relationship-for-apparel-on-demand/'),
redirect(r'^press/mozilla-2009-03-31\.html$',
'https://blog.mozilla.org/press/2009/03/%C2%AD%C2%ADmozilla-adds-style-and-star-power-to-firefox-with-new-personas/'),
redirect(r'^press/mozilla-2009-06-30-faq\.html$',
'http://blog.mozilla.org/press/2009/04/mozilla-advances-the-web-with-firefox-3-5/'),
redirect(r'^press/mozilla-2009-06-30\.html$',
'http://blog.mozilla.org/press/2009/04/mozilla-advances-the-web-with-firefox-3-5/'),
redirect(r'^press/mozilla-foundation\.html$',
'http://blog.mozilla.org/press/2003/07/mozilla-org-announces-launch-of-the-'
'mozilla-foundation-to-lead-open-source-browser-efforts/'),
redirect(r'^press/mozilla1.0\.html$',
'http://blog.mozilla.org/press/2002/06/mozilla-org-launches-mozilla-1-0/'),
redirect(r'^press/open-source-security\.html$',
'http://blog.mozilla.org/press/2000/01/open-source-development-of-security-products-'
'possible-worldwide-enhancing-security-and-privacy-for-e-commerce-and-communication/'),
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# frozen_string_literal: true
require "abstract_unit"
class HelperMailer < ActionMailer::Base
def use_mail_helper
@text = "But soft! What light through yonder window breaks? It is the east, " \
"and Juliet is the sun. Arise, fair sun, and kill the envious moon, " \
"which is sick and pale with grief that thou, her maid, art far more " \
"fair than she. Be not her maid, for she is envious! Her vestal " \
"livery is but sick and green, and none but fools do wear it. Cast " \
"it off!"
mail_with_defaults do |format|
format.html { render(inline: "<%= block_format @text %>") }
end
end
def use_format_paragraph
@text = "But soft! What light through yonder window breaks?"
mail_with_defaults do |format|
format.html { render(inline: "<%= format_paragraph @text, 15, 1 %>") }
end
end
def use_format_paragraph_with_long_first_word
@text = "Antidisestablishmentarianism is very long."
mail_with_defaults do |format|
format.html { render(inline: "<%= format_paragraph @text, 10, 1 %>") }
end
end
def use_mailer
mail_with_defaults do |format|
format.html { render(inline: "<%= mailer.message.subject %>") }
end
end
def use_message
mail_with_defaults do |format|
format.html { render(inline: "<%= message.subject %>") }
end
end
def use_block_format
@text = <<-TEXT
This is the
first paragraph.
The second
paragraph.
* item1 * item2
* item3
TEXT
mail_with_defaults do |format|
format.html { render(inline: "<%= block_format @text %>") }
end
end
def use_cache
mail_with_defaults do |format|
format.html { render(inline: "<% cache(:foo) do %>Greetings from a cache helper block<% end %>") }
end
end
def use_stylesheet_link_tag
mail_with_defaults do |format|
format.html { render(inline: "<%= stylesheet_link_tag 'mailer' %>") }
end
end
private
def mail_with_defaults(&block)
mail(to: "test@localhost", from: "tester@example.com",
subject: "using helpers", &block)
end
end
class MailerHelperTest < ActionMailer::TestCase
def test_use_mail_helper
mail = HelperMailer.use_mail_helper
assert_match %r{ But soft!}, mail.body.encoded
assert_match %r{east, and\r\n Juliet}, mail.body.encoded
end
def test_use_mailer
mail = HelperMailer.use_mailer
assert_match "using helpers", mail.body.encoded
end
def test_use_message
mail = HelperMailer.use_message
assert_match "using helpers", mail.body.encoded
end
def test_use_format_paragraph
mail = HelperMailer.use_format_paragraph
assert_match " But soft! What\r\n light through\r\n yonder window\r\n breaks?", mail.body.encoded
end
def test_use_format_paragraph_with_long_first_word
mail = HelperMailer.use_format_paragraph_with_long_first_word
assert_equal " Antidisestablishmentarianism\r\n is very\r\n long.", mail.body.encoded
end
def test_use_block_format
mail = HelperMailer.use_block_format
expected = <<-TEXT
This is the first paragraph.
The second paragraph.
* item1
* item2
* item3
TEXT
assert_equal expected.gsub("\n", "\r\n"), mail.body.encoded
end
def test_use_cache
assert_nothing_raised do
mail = HelperMailer.use_cache
assert_equal "Greetings from a cache helper block", mail.body.encoded
end
end
def test_stylesheet_link_tag_without_nonce_method
original_auto_include_nonce_for_styles = ActionView::Helpers::AssetTagHelper.auto_include_nonce_for_styles
ActionView::Helpers::AssetTagHelper.auto_include_nonce_for_styles = true
mail = HelperMailer.use_stylesheet_link_tag
assert_includes mail.body.encoded, %(<link rel="stylesheet" href="/stylesheets/mailer.css")
assert_not_includes mail.body.encoded, "nonce="
ensure
ActionView::Helpers::AssetTagHelper.auto_include_nonce_for_styles = original_auto_include_nonce_for_styles
end
def helper
Object.new.extend(ActionMailer::MailHelper)
end
def test_block_format
assert_equal " * foo\n", helper.block_format(" * foo")
assert_equal " * foo\n", helper.block_format(" * foo")
assert_equal " * foo\n", helper.block_format("* foo")
assert_equal " * foo\n*bar", helper.block_format("* foo*bar")
assert_equal " * foo\n * bar\n", helper.block_format("* foo * bar")
assert_equal " *", helper.block_format("* ")
end
end
|
ruby
|
github
|
https://github.com/rails/rails
|
actionmailer/test/mail_helper_test.rb
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes import _
from webnotes.utils import cint, cstr, date_diff, flt, formatdate, getdate, get_url_to_form, \
comma_or, get_fullname
from webnotes import msgprint
class LeaveDayBlockedError(webnotes.ValidationError): pass
class OverlapError(webnotes.ValidationError): pass
class InvalidLeaveApproverError(webnotes.ValidationError): pass
from webnotes.model.controller import DocListController
class DocType(DocListController):
def setup(self):
if webnotes.conn.exists(self.doc.doctype, self.doc.name):
self.previous_doc = webnotes.doc(self.doc.doctype, self.doc.name)
else:
self.previous_doc = None
def validate(self):
self.validate_to_date()
self.validate_balance_leaves()
self.validate_leave_overlap()
self.validate_max_days()
self.show_block_day_warning()
self.validate_block_days()
self.validate_leave_approver()
def on_update(self):
if (not self.previous_doc and self.doc.leave_approver) or (self.previous_doc and \
self.doc.status == "Open" and self.previous_doc.leave_approver != self.doc.leave_approver):
# notify leave approver about creation
self.notify_leave_approver()
elif self.previous_doc and \
self.previous_doc.status == "Open" and self.doc.status == "Rejected":
# notify employee about rejection
self.notify_employee(self.doc.status)
def on_submit(self):
if self.doc.status != "Approved":
webnotes.msgprint("""Only Leave Applications with status 'Approved' can be Submitted.""",
raise_exception=True)
# notify leave applier about approval
self.notify_employee(self.doc.status)
def on_cancel(self):
# notify leave applier about cancellation
self.notify_employee("cancelled")
def show_block_day_warning(self):
from hr.doctype.leave_block_list.leave_block_list import get_applicable_block_dates
block_dates = get_applicable_block_dates(self.doc.from_date, self.doc.to_date,
self.doc.employee, self.doc.company, all_lists=True)
if block_dates:
webnotes.msgprint(_("Warning: Leave application contains following block dates") + ":")
for d in block_dates:
webnotes.msgprint(formatdate(d.block_date) + ": " + d.reason)
def validate_block_days(self):
from hr.doctype.leave_block_list.leave_block_list import get_applicable_block_dates
block_dates = get_applicable_block_dates(self.doc.from_date, self.doc.to_date,
self.doc.employee, self.doc.company)
if block_dates:
if self.doc.status == "Approved":
webnotes.msgprint(_("Cannot approve leave as you are not authorized to approve leaves on Block Dates."))
raise LeaveDayBlockedError
def get_holidays(self):
tot_hol = webnotes.conn.sql("""select count(*) from `tabHoliday` h1, `tabHoliday List` h2, `tabEmployee` e1
where e1.name = %s and h1.parent = h2.name and e1.holiday_list = h2.name
and h1.holiday_date between %s and %s""", (self.doc.employee, self.doc.from_date, self.doc.to_date))
if not tot_hol:
tot_hol = webnotes.conn.sql("""select count(*) from `tabHoliday` h1, `tabHoliday List` h2
where h1.parent = h2.name and h1.holiday_date between %s and %s
and ifnull(h2.is_default,0) = 1 and h2.fiscal_year = %s""",
(self.doc.from_date, self.doc.to_date, self.doc.fiscal_year))
return tot_hol and flt(tot_hol[0][0]) or 0
def get_total_leave_days(self):
"""Calculates total leave days based on input and holidays"""
ret = {'total_leave_days' : 0.5}
if not self.doc.half_day:
tot_days = date_diff(self.doc.to_date, self.doc.from_date) + 1
holidays = self.get_holidays()
ret = {
'total_leave_days' : flt(tot_days)-flt(holidays)
}
return ret
def validate_to_date(self):
if self.doc.from_date and self.doc.to_date and \
(getdate(self.doc.to_date) < getdate(self.doc.from_date)):
msgprint("To date cannot be before from date")
raise Exception
def validate_balance_leaves(self):
if self.doc.from_date and self.doc.to_date:
self.doc.total_leave_days = self.get_total_leave_days()["total_leave_days"]
if self.doc.total_leave_days == 0:
msgprint(_("Hurray! The day(s) on which you are applying for leave \
coincide with holiday(s). You need not apply for leave."),
raise_exception=1)
if not is_lwp(self.doc.leave_type):
self.doc.leave_balance = get_leave_balance(self.doc.employee,
self.doc.leave_type, self.doc.fiscal_year)["leave_balance"]
if self.doc.status != "Rejected" \
and self.doc.leave_balance - self.doc.total_leave_days < 0:
#check if this leave type allow the remaining balance to be in negative. If yes then warn the user and continue to save else warn the user and don't save.
msgprint("There is not enough leave balance for Leave Type: %s" % \
(self.doc.leave_type,),
raise_exception=not(webnotes.conn.get_value("Leave Type", self.doc.leave_type,"allow_negative") or None))
def validate_leave_overlap(self):
if not self.doc.name:
self.doc.name = "New Leave Application"
for d in webnotes.conn.sql("""select name, leave_type, posting_date,
from_date, to_date
from `tabLeave Application`
where
employee = %(employee)s
and docstatus < 2
and status in ("Open", "Approved")
and (from_date between %(from_date)s and %(to_date)s
or to_date between %(from_date)s and %(to_date)s
or %(from_date)s between from_date and to_date)
and name != %(name)s""", self.doc.fields, as_dict = 1):
msgprint("Employee : %s has already applied for %s between %s and %s on %s. Please refer Leave Application : <a href=\"#Form/Leave Application/%s\">%s</a>" % (self.doc.employee, cstr(d['leave_type']), formatdate(d['from_date']), formatdate(d['to_date']), formatdate(d['posting_date']), d['name'], d['name']), raise_exception = OverlapError)
def validate_max_days(self):
max_days = webnotes.conn.sql("select max_days_allowed from `tabLeave Type` where name = '%s'" %(self.doc.leave_type))
max_days = max_days and flt(max_days[0][0]) or 0
if max_days and self.doc.total_leave_days > max_days:
msgprint("Sorry ! You cannot apply for %s for more than %s days" % (self.doc.leave_type, max_days))
raise Exception
def validate_leave_approver(self):
employee = webnotes.bean("Employee", self.doc.employee)
leave_approvers = [l.leave_approver for l in
employee.doclist.get({"parentfield": "employee_leave_approvers"})]
if len(leave_approvers) and self.doc.leave_approver not in leave_approvers:
msgprint(("[" + _("For Employee") + ' "' + self.doc.employee + '"] '
+ _("Leave Approver can be one of") + ": "
+ comma_or(leave_approvers)), raise_exception=InvalidLeaveApproverError)
elif self.doc.leave_approver and not webnotes.conn.sql("""select name from `tabUserRole`
where parent=%s and role='Leave Approver'""", self.doc.leave_approver):
msgprint(get_fullname(self.doc.leave_approver) + ": " \
+ _("does not have role 'Leave Approver'"), raise_exception=InvalidLeaveApproverError)
def notify_employee(self, status):
employee = webnotes.doc("Employee", self.doc.employee)
if not employee.user_id:
return
def _get_message(url=False):
if url:
name = get_url_to_form(self.doc.doctype, self.doc.name)
else:
name = self.doc.name
return (_("Leave Application") + ": %s - %s") % (name, _(status))
self.notify({
# for post in messages
"message": _get_message(url=True),
"message_to": employee.user_id,
"subject": _get_message(),
})
def notify_leave_approver(self):
employee = webnotes.doc("Employee", self.doc.employee)
def _get_message(url=False):
name = self.doc.name
employee_name = cstr(employee.employee_name)
if url:
name = get_url_to_form(self.doc.doctype, self.doc.name)
employee_name = get_url_to_form("Employee", self.doc.employee, label=employee_name)
return (_("New Leave Application") + ": %s - " + _("Employee") + ": %s") % (name, employee_name)
self.notify({
# for post in messages
"message": _get_message(url=True),
"message_to": self.doc.leave_approver,
# for email
"subject": _get_message()
})
def notify(self, args):
args = webnotes._dict(args)
from core.page.messages.messages import post
post({"txt": args.message, "contact": args.message_to, "subject": args.subject,
"notify": cint(self.doc.follow_via_email)})
@webnotes.whitelist()
def get_leave_balance(employee, leave_type, fiscal_year):
leave_all = webnotes.conn.sql("""select total_leaves_allocated
from `tabLeave Allocation` where employee = %s and leave_type = %s
and fiscal_year = %s and docstatus = 1""", (employee,
leave_type, fiscal_year))
leave_all = leave_all and flt(leave_all[0][0]) or 0
leave_app = webnotes.conn.sql("""select SUM(total_leave_days)
from `tabLeave Application`
where employee = %s and leave_type = %s and fiscal_year = %s
and status="Approved" and docstatus = 1""", (employee, leave_type, fiscal_year))
leave_app = leave_app and flt(leave_app[0][0]) or 0
ret = {'leave_balance': leave_all - leave_app}
return ret
def is_lwp(leave_type):
lwp = webnotes.conn.sql("select is_lwp from `tabLeave Type` where name = %s", leave_type)
return lwp and cint(lwp[0][0]) or 0
@webnotes.whitelist()
def get_events(start, end):
events = []
employee = webnotes.conn.get_default("employee", webnotes.session.user)
company = webnotes.conn.get_default("company", webnotes.session.user)
from webnotes.widgets.reportview import build_match_conditions
match_conditions = build_match_conditions("Leave Application")
# show department leaves for employee
if "Employee" in webnotes.get_roles():
add_department_leaves(events, start, end, employee, company)
add_leaves(events, start, end, employee, company, match_conditions)
add_block_dates(events, start, end, employee, company)
add_holidays(events, start, end, employee, company)
return events
def add_department_leaves(events, start, end, employee, company):
department = webnotes.conn.get_value("Employee", employee, "department")
if not department:
return
# department leaves
department_employees = webnotes.conn.sql_list("""select name from tabEmployee where department=%s
and company=%s""", (department, company))
match_conditions = "employee in (\"%s\")" % '", "'.join(department_employees)
add_leaves(events, start, end, employee, company, match_conditions=match_conditions)
def add_leaves(events, start, end, employee, company, match_conditions=None):
query = """select name, from_date, to_date, employee_name, half_day,
status, employee, docstatus
from `tabLeave Application` where
(from_date between %s and %s or to_date between %s and %s)
and docstatus < 2
and status!="Rejected" """
if match_conditions:
query += " and " + match_conditions
for d in webnotes.conn.sql(query, (start, end, start, end), as_dict=True):
e = {
"name": d.name,
"doctype": "Leave Application",
"from_date": d.from_date,
"to_date": d.to_date,
"status": d.status,
"title": cstr(d.employee_name) + \
(d.half_day and _(" (Half Day)") or ""),
"docstatus": d.docstatus
}
if e not in events:
events.append(e)
def add_block_dates(events, start, end, employee, company):
# block days
from hr.doctype.leave_block_list.leave_block_list import get_applicable_block_dates
cnt = 0
block_dates = get_applicable_block_dates(start, end, employee, company, all_lists=True)
for block_date in block_dates:
events.append({
"doctype": "Leave Block List Date",
"from_date": block_date.block_date,
"title": _("Leave Blocked") + ": " + block_date.reason,
"name": "_" + str(cnt),
})
cnt+=1
def add_holidays(events, start, end, employee, company):
applicable_holiday_list = webnotes.conn.get_value("Employee", employee, "holiday_list")
if not applicable_holiday_list:
return
for holiday in webnotes.conn.sql("""select name, holiday_date, description
from `tabHoliday` where parent=%s and holiday_date between %s and %s""",
(applicable_holiday_list, start, end), as_dict=True):
events.append({
"doctype": "Holiday",
"from_date": holiday.holiday_date,
"title": _("Holiday") + ": " + cstr(holiday.description),
"name": holiday.name
})
@webnotes.whitelist()
def query_for_permitted_employees(doctype, txt, searchfield, start, page_len, filters):
txt = "%" + cstr(txt) + "%"
if "Leave Approver" in webnotes.user.get_roles():
condition = """and (exists(select ela.name from `tabEmployee Leave Approver` ela
where ela.parent=`tabEmployee`.name and ela.leave_approver= "%s") or
not exists(select ela.name from `tabEmployee Leave Approver` ela
where ela.parent=`tabEmployee`.name)
or user_id = "%s")""" % (webnotes.session.user, webnotes.session.user)
else:
from webnotes.widgets.reportview import build_match_conditions
condition = build_match_conditions("Employee")
condition = ("and " + condition) if condition else ""
return webnotes.conn.sql("""select name, employee_name from `tabEmployee`
where status = 'Active' and docstatus < 2 and
(`%s` like %s or employee_name like %s) %s
order by
case when name like %s then 0 else 1 end,
case when employee_name like %s then 0 else 1 end,
name limit %s, %s""" % tuple([searchfield] + ["%s"]*2 + [condition] + ["%s"]*4),
(txt, txt, txt, txt, start, page_len))
|
unknown
|
codeparrot/codeparrot-clean
| ||
;(function() {
/** Used as a safe reference for `undefined` in pre-ES5 environments. */
var undefined;
/** Used to detect when a function becomes hot. */
var HOT_COUNT = 150;
/** Used as the size to cover large array optimizations. */
var LARGE_ARRAY_SIZE = 200;
/** Used as the `TypeError` message for "Functions" methods. */
var FUNC_ERROR_TEXT = 'Expected a function';
/** Used as the maximum memoize cache size. */
var MAX_MEMOIZE_SIZE = 500;
/** Used as references for various `Number` constants. */
var MAX_SAFE_INTEGER = 9007199254740991,
MAX_INTEGER = 1.7976931348623157e+308;
/** Used as references for the maximum length and index of an array. */
var MAX_ARRAY_LENGTH = 4294967295,
MAX_ARRAY_INDEX = MAX_ARRAY_LENGTH - 1;
/** `Object#toString` result references. */
var funcTag = '[object Function]',
numberTag = '[object Number]',
objectTag = '[object Object]';
/** Used as a reference to the global object. */
var root = (typeof global == 'object' && global) || this;
/** Used to store lodash to test for bad extensions/shims. */
var lodashBizarro = root.lodashBizarro;
/** Used for native method references. */
var arrayProto = Array.prototype,
funcProto = Function.prototype,
objectProto = Object.prototype,
numberProto = Number.prototype,
stringProto = String.prototype;
/** Method and object shortcuts. */
var phantom = root.phantom,
process = root.process,
amd = root.define ? define.amd : undefined,
args = toArgs([1, 2, 3]),
argv = process ? process.argv : undefined,
defineProperty = Object.defineProperty,
document = phantom ? undefined : root.document,
body = root.document ? root.document.body : undefined,
create = Object.create,
fnToString = funcProto.toString,
freeze = Object.freeze,
getSymbols = Object.getOwnPropertySymbols,
identity = function(value) { return value; },
noop = function() {},
objToString = objectProto.toString,
params = argv,
push = arrayProto.push,
realm = {},
slice = arrayProto.slice,
strictArgs = (function() { 'use strict'; return arguments; }(1, 2, 3));
var ArrayBuffer = root.ArrayBuffer,
Buffer = root.Buffer,
Map = root.Map,
Promise = root.Promise,
Proxy = root.Proxy,
Set = root.Set,
Symbol = root.Symbol,
Uint8Array = root.Uint8Array,
WeakMap = root.WeakMap,
WeakSet = root.WeakSet;
var arrayBuffer = ArrayBuffer ? new ArrayBuffer(2) : undefined,
map = Map ? new Map : undefined,
promise = Promise ? Promise.resolve(1) : undefined,
set = Set ? new Set : undefined,
symbol = Symbol ? Symbol('a') : undefined,
weakMap = WeakMap ? new WeakMap : undefined,
weakSet = WeakSet ? new WeakSet : undefined;
/** Math helpers. */
var add = function(x, y) { return x + y; },
doubled = function(n) { return n * 2; },
isEven = function(n) { return n % 2 == 0; },
square = function(n) { return n * n; };
/** Stub functions. */
var stubA = function() { return 'a'; },
stubB = function() { return 'b'; },
stubC = function() { return 'c'; };
var stubTrue = function() { return true; },
stubFalse = function() { return false; };
var stubNaN = function() { return NaN; },
stubNull = function() { return null; };
var stubZero = function() { return 0; },
stubOne = function() { return 1; },
stubTwo = function() { return 2; },
stubThree = function() { return 3; },
stubFour = function() { return 4; };
var stubArray = function() { return []; },
stubObject = function() { return {}; },
stubString = function() { return ''; };
/** List of Latin Unicode letters. */
var burredLetters = [
// Latin-1 Supplement letters.
'\xc0', '\xc1', '\xc2', '\xc3', '\xc4', '\xc5', '\xc6', '\xc7', '\xc8', '\xc9', '\xca', '\xcb', '\xcc', '\xcd', '\xce', '\xcf',
'\xd0', '\xd1', '\xd2', '\xd3', '\xd4', '\xd5', '\xd6', '\xd8', '\xd9', '\xda', '\xdb', '\xdc', '\xdd', '\xde', '\xdf',
'\xe0', '\xe1', '\xe2', '\xe3', '\xe4', '\xe5', '\xe6', '\xe7', '\xe8', '\xe9', '\xea', '\xeb', '\xec', '\xed', '\xee', '\xef',
'\xf0', '\xf1', '\xf2', '\xf3', '\xf4', '\xf5', '\xf6', '\xf8', '\xf9', '\xfa', '\xfb', '\xfc', '\xfd', '\xfe', '\xff',
// Latin Extended-A letters.
'\u0100', '\u0101', '\u0102', '\u0103', '\u0104', '\u0105', '\u0106', '\u0107', '\u0108', '\u0109', '\u010a', '\u010b', '\u010c', '\u010d', '\u010e', '\u010f',
'\u0110', '\u0111', '\u0112', '\u0113', '\u0114', '\u0115', '\u0116', '\u0117', '\u0118', '\u0119', '\u011a', '\u011b', '\u011c', '\u011d', '\u011e', '\u011f',
'\u0120', '\u0121', '\u0122', '\u0123', '\u0124', '\u0125', '\u0126', '\u0127', '\u0128', '\u0129', '\u012a', '\u012b', '\u012c', '\u012d', '\u012e', '\u012f',
'\u0130', '\u0131', '\u0132', '\u0133', '\u0134', '\u0135', '\u0136', '\u0137', '\u0138', '\u0139', '\u013a', '\u013b', '\u013c', '\u013d', '\u013e', '\u013f',
'\u0140', '\u0141', '\u0142', '\u0143', '\u0144', '\u0145', '\u0146', '\u0147', '\u0148', '\u0149', '\u014a', '\u014b', '\u014c', '\u014d', '\u014e', '\u014f',
'\u0150', '\u0151', '\u0152', '\u0153', '\u0154', '\u0155', '\u0156', '\u0157', '\u0158', '\u0159', '\u015a', '\u015b', '\u015c', '\u015d', '\u015e', '\u015f',
'\u0160', '\u0161', '\u0162', '\u0163', '\u0164', '\u0165', '\u0166', '\u0167', '\u0168', '\u0169', '\u016a', '\u016b', '\u016c', '\u016d', '\u016e', '\u016f',
'\u0170', '\u0171', '\u0172', '\u0173', '\u0174', '\u0175', '\u0176', '\u0177', '\u0178', '\u0179', '\u017a', '\u017b', '\u017c', '\u017d', '\u017e', '\u017f'
];
/** List of combining diacritical marks. */
var comboMarks = [
'\u0300', '\u0301', '\u0302', '\u0303', '\u0304', '\u0305', '\u0306', '\u0307', '\u0308', '\u0309', '\u030a', '\u030b', '\u030c', '\u030d', '\u030e', '\u030f',
'\u0310', '\u0311', '\u0312', '\u0313', '\u0314', '\u0315', '\u0316', '\u0317', '\u0318', '\u0319', '\u031a', '\u031b', '\u031c', '\u031d', '\u031e', '\u031f',
'\u0320', '\u0321', '\u0322', '\u0323', '\u0324', '\u0325', '\u0326', '\u0327', '\u0328', '\u0329', '\u032a', '\u032b', '\u032c', '\u032d', '\u032e', '\u032f',
'\u0330', '\u0331', '\u0332', '\u0333', '\u0334', '\u0335', '\u0336', '\u0337', '\u0338', '\u0339', '\u033a', '\u033b', '\u033c', '\u033d', '\u033e', '\u033f',
'\u0340', '\u0341', '\u0342', '\u0343', '\u0344', '\u0345', '\u0346', '\u0347', '\u0348', '\u0349', '\u034a', '\u034b', '\u034c', '\u034d', '\u034e', '\u034f',
'\u0350', '\u0351', '\u0352', '\u0353', '\u0354', '\u0355', '\u0356', '\u0357', '\u0358', '\u0359', '\u035a', '\u035b', '\u035c', '\u035d', '\u035e', '\u035f',
'\u0360', '\u0361', '\u0362', '\u0363', '\u0364', '\u0365', '\u0366', '\u0367', '\u0368', '\u0369', '\u036a', '\u036b', '\u036c', '\u036d', '\u036e', '\u036f',
'\ufe20', '\ufe21', '\ufe22', '\ufe23'
];
/** List of converted Latin Unicode letters. */
var deburredLetters = [
// Converted Latin-1 Supplement letters.
'A', 'A', 'A', 'A', 'A', 'A', 'Ae', 'C', 'E', 'E', 'E', 'E', 'I', 'I', 'I',
'I', 'D', 'N', 'O', 'O', 'O', 'O', 'O', 'O', 'U', 'U', 'U', 'U', 'Y', 'Th',
'ss', 'a', 'a', 'a', 'a', 'a', 'a', 'ae', 'c', 'e', 'e', 'e', 'e', 'i', 'i', 'i',
'i', 'd', 'n', 'o', 'o', 'o', 'o', 'o', 'o', 'u', 'u', 'u', 'u', 'y', 'th', 'y',
// Converted Latin Extended-A letters.
'A', 'a', 'A', 'a', 'A', 'a', 'C', 'c', 'C', 'c', 'C', 'c', 'C', 'c',
'D', 'd', 'D', 'd', 'E', 'e', 'E', 'e', 'E', 'e', 'E', 'e', 'E', 'e',
'G', 'g', 'G', 'g', 'G', 'g', 'G', 'g', 'H', 'h', 'H', 'h',
'I', 'i', 'I', 'i', 'I', 'i', 'I', 'i', 'I', 'i', 'IJ', 'ij', 'J', 'j',
'K', 'k', 'k', 'L', 'l', 'L', 'l', 'L', 'l', 'L', 'l', 'L', 'l',
'N', 'n', 'N', 'n', 'N', 'n', "'n", 'N', 'n',
'O', 'o', 'O', 'o', 'O', 'o', 'Oe', 'oe',
'R', 'r', 'R', 'r', 'R', 'r', 'S', 's', 'S', 's', 'S', 's', 'S', 's',
'T', 't', 'T', 't', 'T', 't',
'U', 'u', 'U', 'u', 'U', 'u', 'U', 'u', 'U', 'u', 'U', 'u',
'W', 'w', 'Y', 'y', 'Y', 'Z', 'z', 'Z', 'z', 'Z', 'z', 's'
];
/** Used to provide falsey values to methods. */
var falsey = [, null, undefined, false, 0, NaN, ''];
/** Used to specify the emoji style glyph variant of characters. */
var emojiVar = '\ufe0f';
/** Used to provide empty values to methods. */
var empties = [[], {}].concat(falsey.slice(1));
/** Used to test error objects. */
var errors = [
new Error,
new EvalError,
new RangeError,
new ReferenceError,
new SyntaxError,
new TypeError,
new URIError
];
/** List of fitzpatrick modifiers. */
var fitzModifiers = [
'\ud83c\udffb',
'\ud83c\udffc',
'\ud83c\udffd',
'\ud83c\udffe',
'\ud83c\udfff'
];
/** Used to provide primitive values to methods. */
var primitives = [null, undefined, false, true, 1, NaN, 'a'];
/** Used to check whether methods support typed arrays. */
var typedArrays = [
'Float32Array',
'Float64Array',
'Int8Array',
'Int16Array',
'Int32Array',
'Uint8Array',
'Uint8ClampedArray',
'Uint16Array',
'Uint32Array'
];
/** Used to check whether methods support array views. */
var arrayViews = typedArrays.concat('DataView');
/** The file path of the lodash file to test. */
var filePath = (function() {
var min = 2,
result = params || [];
if (phantom) {
min = 0;
result = params = phantom.args || require('system').args;
}
var last = result[result.length - 1];
result = (result.length > min && !/test(?:\.js)?$/.test(last)) ? last : '../lodash.js';
if (!amd) {
try {
result = require('fs').realpathSync(result);
} catch (e) {}
try {
result = require.resolve(result);
} catch (e) {}
}
return result;
}());
/** The `ui` object. */
var ui = root.ui || (root.ui = {
'buildPath': filePath,
'loaderPath': '',
'isModularize': /\b(?:amd|commonjs|es|node|npm|(index|main)\.js)\b/.test(filePath),
'isStrict': /\bes\b/.test(filePath) || 'default' in require(filePath),
'urlParams': {}
});
/** The basename of the lodash file to test. */
var basename = /[\w.-]+$/.exec(filePath)[0];
/** Used to indicate testing a modularized build. */
var isModularize = ui.isModularize;
/** Detect if testing `npm` modules. */
var isNpm = isModularize && /\bnpm\b/.test([ui.buildPath, ui.urlParams.build]);
/** Detect if running in PhantomJS. */
var isPhantom = phantom || (typeof callPhantom == 'function');
/** Detect if lodash is in strict mode. */
var isStrict = ui.isStrict;
/*--------------------------------------------------------------------------*/
// Leak to avoid sporadic `noglobals` fails on Edge in Sauce Labs.
root.msWDfn = undefined;
// Assign `setTimeout` to itself to avoid being flagged as a leak.
setProperty(root, 'setTimeout', setTimeout);
// Exit early if going to run tests in a PhantomJS web page.
if (phantom && isModularize) {
var page = require('webpage').create();
page.onCallback = function(details) {
var coverage = details.coverage;
if (coverage) {
var fs = require('fs'),
cwd = fs.workingDirectory,
sep = fs.separator;
fs.write([cwd, 'coverage', 'coverage.json'].join(sep), JSON.stringify(coverage));
}
phantom.exit(details.failed ? 1 : 0);
};
page.onConsoleMessage = function(message) {
console.log(message);
};
page.onInitialized = function() {
page.evaluate(function() {
document.addEventListener('DOMContentLoaded', function() {
QUnit.done(function(details) {
details.coverage = window.__coverage__;
callPhantom(details);
});
});
});
};
page.open(filePath, function(status) {
if (status != 'success') {
console.log('PhantomJS failed to load page: ' + filePath);
phantom.exit(1);
}
});
console.log('test.js invoked with arguments: ' + JSON.stringify(slice.call(params)));
return;
}
/*--------------------------------------------------------------------------*/
/** Used to test Web Workers. */
var Worker = !(ui.isForeign || ui.isSauceLabs || isModularize) &&
(document && document.origin != 'null') && root.Worker;
/** Used to test host objects in IE. */
try {
var xml = new ActiveXObject('Microsoft.XMLDOM');
} catch (e) {}
/** Poison the free variable `root` in Node.js */
try {
defineProperty(global.root, 'root', {
'configurable': false,
'enumerable': false,
'get': function() { throw new ReferenceError; }
});
} catch (e) {}
/** Load QUnit and extras. */
var QUnit = root.QUnit || require('qunit-extras');
/** Load stable Lodash. */
var lodashStable = root.lodashStable;
if (!lodashStable) {
try {
lodashStable = interopRequire('../node_modules/lodash/lodash.js');
} catch (e) {
console.log('Error: The stable lodash dev dependency should be at least a version behind master branch.');
return;
}
lodashStable = lodashStable.noConflict();
}
/** The `lodash` function to test. */
var _ = root._ || (root._ = interopRequire(filePath));
/** Used to test pseudo private map caches. */
var mapCaches = (function() {
var MapCache = (_.memoize || lodashStable.memoize).Cache;
var result = {
'Hash': new MapCache().__data__.hash.constructor,
'MapCache': MapCache
};
(_.isMatchWith || lodashStable.isMatchWith)({ 'a': 1 }, { 'a': 1 }, function() {
var stack = lodashStable.last(arguments);
result.ListCache = stack.__data__.constructor;
result.Stack = stack.constructor;
});
return result;
}());
/** Used to detect instrumented istanbul code coverage runs. */
var coverage = root.__coverage__ || root[lodashStable.find(lodashStable.keys(root), function(key) {
return /^(?:\$\$cov_\d+\$\$)$/.test(key);
})];
/** Used to test async functions. */
var asyncFunc = lodashStable.attempt(function() {
return Function('return async () => {}');
});
/** Used to test generator functions. */
var genFunc = lodashStable.attempt(function() {
return Function('return function*(){}');
});
/** Used to restore the `_` reference. */
var oldDash = root._;
/**
* Used to check for problems removing whitespace. For a whitespace reference,
* see [V8's unit test](https://code.google.com/p/v8/source/browse/branches/bleeding_edge/test/mjsunit/whitespaces.js).
*/
var whitespace = lodashStable.filter([
// Basic whitespace characters.
' ', '\t', '\x0b', '\f', '\xa0', '\ufeff',
// Line terminators.
'\n', '\r', '\u2028', '\u2029',
// Unicode category "Zs" space separators.
'\u1680', '\u180e', '\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005',
'\u2006', '\u2007', '\u2008', '\u2009', '\u200a', '\u202f', '\u205f', '\u3000'
],
function(chr) { return /\s/.exec(chr); })
.join('');
/**
* Creates a custom error object.
*
* @private
* @constructor
* @param {string} message The error message.
*/
function CustomError(message) {
this.name = 'CustomError';
this.message = message;
}
CustomError.prototype = lodashStable.create(Error.prototype, {
'constructor': CustomError
});
/**
* Removes all own enumerable string keyed properties from a given object.
*
* @private
* @param {Object} object The object to empty.
*/
function emptyObject(object) {
lodashStable.forOwn(object, function(value, key, object) {
delete object[key];
});
}
/**
* Extracts the unwrapped value from its wrapper.
*
* @private
* @param {Object} wrapper The wrapper to unwrap.
* @returns {*} Returns the unwrapped value.
*/
function getUnwrappedValue(wrapper) {
var index = -1,
actions = wrapper.__actions__,
length = actions.length,
result = wrapper.__wrapped__;
while (++index < length) {
var args = [result],
action = actions[index];
push.apply(args, action.args);
result = action.func.apply(action.thisArg, args);
}
return result;
}
/**
* Loads the module of `id`. If the module has an `exports.default`, the
* exported default value is returned as the resolved module.
*
* @private
* @param {string} id The identifier of the module to resolve.
* @returns {*} Returns the resolved module.
*/
function interopRequire(id) {
var result = require(id);
return 'default' in result ? result['default'] : result;
}
/**
* Sets a non-enumerable property value on `object`.
*
* Note: This function is used to avoid a bug in older versions of V8 where
* overwriting non-enumerable built-ins makes them enumerable.
* See https://code.google.com/p/v8/issues/detail?id=1623
*
* @private
* @param {Object} object The object modify.
* @param {string} key The name of the property to set.
* @param {*} value The property value.
*/
function setProperty(object, key, value) {
try {
defineProperty(object, key, {
'configurable': true,
'enumerable': false,
'writable': true,
'value': value
});
} catch (e) {
object[key] = value;
}
return object;
}
/**
* Skips a given number of tests with a passing result.
*
* @private
* @param {Object} assert The QUnit assert object.
* @param {number} [count=1] The number of tests to skip.
*/
function skipAssert(assert, count) {
count || (count = 1);
while (count--) {
assert.ok(true, 'test skipped');
}
}
/**
* Converts `array` to an `arguments` object.
*
* @private
* @param {Array} array The array to convert.
* @returns {Object} Returns the converted `arguments` object.
*/
function toArgs(array) {
return (function() { return arguments; }.apply(undefined, array));
}
/*--------------------------------------------------------------------------*/
// Add bizarro values.
(function() {
if (document || (typeof require != 'function')) {
return;
}
var nativeString = fnToString.call(toString),
reToString = /toString/g;
function createToString(funcName) {
return lodashStable.constant(nativeString.replace(reToString, funcName));
}
// Allow bypassing native checks.
setProperty(funcProto, 'toString', function wrapper() {
setProperty(funcProto, 'toString', fnToString);
var result = lodashStable.has(this, 'toString') ? this.toString() : fnToString.call(this);
setProperty(funcProto, 'toString', wrapper);
return result;
});
// Add prototype extensions.
funcProto._method = noop;
// Set bad shims.
setProperty(Object, 'create', undefined);
setProperty(Object, 'getOwnPropertySymbols', undefined);
var _propertyIsEnumerable = objectProto.propertyIsEnumerable;
setProperty(objectProto, 'propertyIsEnumerable', function(key) {
return !(key == 'valueOf' && this && this.valueOf === 1) && _propertyIsEnumerable.call(this, key);
});
if (Buffer) {
defineProperty(root, 'Buffer', {
'configurable': true,
'enumerable': true,
'get': function get() {
var caller = get.caller,
name = caller ? caller.name : '';
if (!(name == 'runInContext' || name.length == 1 || /\b_\.isBuffer\b/.test(caller))) {
return Buffer;
}
}
});
}
if (Map) {
setProperty(root, 'Map', (function() {
var count = 0;
return function() {
if (count++) {
return new Map;
}
setProperty(root, 'Map', Map);
return {};
};
}()));
setProperty(root.Map, 'toString', createToString('Map'));
}
setProperty(root, 'Promise', noop);
setProperty(root, 'Set', noop);
setProperty(root, 'Symbol', undefined);
setProperty(root, 'WeakMap', noop);
// Fake `WinRTError`.
setProperty(root, 'WinRTError', Error);
// Clear cache so lodash can be reloaded.
emptyObject(require.cache);
// Load lodash and expose it to the bad extensions/shims.
lodashBizarro = interopRequire(filePath);
root._ = oldDash;
// Restore built-in methods.
setProperty(Object, 'create', create);
setProperty(objectProto, 'propertyIsEnumerable', _propertyIsEnumerable);
setProperty(root, 'Buffer', Buffer);
if (getSymbols) {
Object.getOwnPropertySymbols = getSymbols;
} else {
delete Object.getOwnPropertySymbols;
}
if (Map) {
setProperty(root, 'Map', Map);
} else {
delete root.Map;
}
if (Promise) {
setProperty(root, 'Promise', Promise);
} else {
delete root.Promise;
}
if (Set) {
setProperty(root, 'Set', Set);
} else {
delete root.Set;
}
if (Symbol) {
setProperty(root, 'Symbol', Symbol);
} else {
delete root.Symbol;
}
if (WeakMap) {
setProperty(root, 'WeakMap', WeakMap);
} else {
delete root.WeakMap;
}
delete root.WinRTError;
delete funcProto._method;
}());
// Add other realm values from the `vm` module.
lodashStable.attempt(function() {
lodashStable.assign(realm, require('vm').runInNewContext([
'(function() {',
' var noop = function() {},',
' root = this;',
'',
' var object = {',
" 'ArrayBuffer': root.ArrayBuffer,",
" 'arguments': (function() { return arguments; }(1, 2, 3)),",
" 'array': [1],",
" 'arrayBuffer': root.ArrayBuffer ? new root.ArrayBuffer : undefined,",
" 'boolean': Object(false),",
" 'date': new Date,",
" 'errors': [new Error, new EvalError, new RangeError, new ReferenceError, new SyntaxError, new TypeError, new URIError],",
" 'function': noop,",
" 'map': root.Map ? new root.Map : undefined,",
" 'nan': NaN,",
" 'null': null,",
" 'number': Object(0),",
" 'object': { 'a': 1 },",
" 'promise': root.Promise ? Promise.resolve(1) : undefined,",
" 'regexp': /x/,",
" 'set': root.Set ? new root.Set : undefined,",
" 'string': Object('a'),",
" 'symbol': root.Symbol ? root.Symbol() : undefined,",
" 'undefined': undefined,",
" 'weakMap': root.WeakMap ? new root.WeakMap : undefined,",
" 'weakSet': root.WeakSet ? new root.WeakSet : undefined",
' };',
'',
" ['" + arrayViews.join("', '") + "'].forEach(function(type) {",
' var Ctor = root[type]',
' object[type] = Ctor;',
' object[type.toLowerCase()] = Ctor ? new Ctor(new ArrayBuffer(24)) : undefined;',
' });',
'',
' return object;',
'}());'
].join('\n')));
});
// Add other realm values from an iframe.
lodashStable.attempt(function() {
_._realm = realm;
var iframe = document.createElement('iframe');
iframe.frameBorder = iframe.height = iframe.width = 0;
body.appendChild(iframe);
var idoc = (idoc = iframe.contentDocument || iframe.contentWindow).document || idoc;
idoc.write([
'<html>',
'<body>',
'<script>',
'var _ = parent._,',
' noop = function() {},',
' root = this;',
'',
'var object = {',
" 'ArrayBuffer': root.ArrayBuffer,",
" 'arguments': (function() { return arguments; }(1, 2, 3)),",
" 'array': [1],",
" 'arrayBuffer': root.ArrayBuffer ? new root.ArrayBuffer : undefined,",
" 'boolean': Object(false),",
" 'date': new Date,",
" 'element': document.body,",
" 'errors': [new Error, new EvalError, new RangeError, new ReferenceError, new SyntaxError, new TypeError, new URIError],",
" 'function': noop,",
" 'map': root.Map ? new root.Map : undefined,",
" 'nan': NaN,",
" 'null': null,",
" 'number': Object(0),",
" 'object': { 'a': 1 },",
" 'promise': root.Promise ? Promise.resolve(1) : undefined,",
" 'regexp': /x/,",
" 'set': root.Set ? new root.Set : undefined,",
" 'string': Object('a'),",
" 'symbol': root.Symbol ? root.Symbol() : undefined,",
" 'undefined': undefined,",
" 'weakMap': root.WeakMap ? new root.WeakMap : undefined,",
" 'weakSet': root.WeakSet ? new root.WeakSet : undefined",
'};',
'',
"_.each(['" + arrayViews.join("', '") + "'], function(type) {",
' var Ctor = root[type];',
' object[type] = Ctor;',
' object[type.toLowerCase()] = Ctor ? new Ctor(new ArrayBuffer(24)) : undefined;',
'});',
'',
'_.assign(_._realm, object);',
'</script>',
'</body>',
'</html>'
].join('\n'));
idoc.close();
delete _._realm;
});
// Add a web worker.
lodashStable.attempt(function() {
var worker = new Worker('./asset/worker.js?t=' + (+new Date));
worker.addEventListener('message', function(e) {
_._VERSION = e.data || '';
}, false);
worker.postMessage(ui.buildPath);
});
// Expose internal modules for better code coverage.
lodashStable.attempt(function() {
var path = require('path'),
basePath = path.dirname(filePath);
if (isModularize && !(amd || isNpm)) {
lodashStable.each([
'baseEach',
'isIndex',
'isIterateeCall',
'memoizeCapped'
], function(funcName) {
_['_' + funcName] = interopRequire(path.join(basePath, '_' + funcName));
});
}
});
/*--------------------------------------------------------------------------*/
if (params) {
console.log('Running lodash tests.');
console.log('test.js invoked with arguments: ' + JSON.stringify(slice.call(params)));
}
QUnit.module(basename);
(function() {
QUnit.test('should support loading ' + basename + ' as the "lodash" module', function(assert) {
assert.expect(1);
if (amd) {
assert.strictEqual((lodashModule || {}).moduleName, 'lodash');
}
else {
skipAssert(assert);
}
});
QUnit.test('should support loading ' + basename + ' with the Require.js "shim" configuration option', function(assert) {
assert.expect(1);
if (amd && lodashStable.includes(ui.loaderPath, 'requirejs')) {
assert.strictEqual((shimmedModule || {}).moduleName, 'shimmed');
} else {
skipAssert(assert);
}
});
QUnit.test('should support loading ' + basename + ' as the "underscore" module', function(assert) {
assert.expect(1);
if (amd) {
assert.strictEqual((underscoreModule || {}).moduleName, 'underscore');
}
else {
skipAssert(assert);
}
});
QUnit.test('should support loading ' + basename + ' in a web worker', function(assert) {
assert.expect(1);
var done = assert.async();
if (Worker) {
var limit = 30000 / QUnit.config.asyncRetries,
start = +new Date;
var attempt = function() {
var actual = _._VERSION;
if ((new Date - start) < limit && typeof actual != 'string') {
setTimeout(attempt, 16);
return;
}
assert.strictEqual(actual, _.VERSION);
done();
};
attempt();
}
else {
skipAssert(assert);
done();
}
});
QUnit.test('should not add `Function.prototype` extensions to lodash', function(assert) {
assert.expect(1);
if (lodashBizarro) {
assert.notOk('_method' in lodashBizarro);
}
else {
skipAssert(assert);
}
});
QUnit.test('should avoid non-native built-ins', function(assert) {
assert.expect(6);
function message(lodashMethod, nativeMethod) {
return '`' + lodashMethod + '` should avoid overwritten native `' + nativeMethod + '`';
}
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var object = { 'a': 1 },
otherObject = { 'b': 2 },
largeArray = lodashStable.times(LARGE_ARRAY_SIZE, lodashStable.constant(object));
if (lodashBizarro) {
try {
var actual = lodashBizarro.create(Foo.prototype);
} catch (e) {
actual = null;
}
var label = message('_.create', 'Object.create');
assert.ok(actual instanceof Foo, label);
try {
actual = [
lodashBizarro.difference([object, otherObject], largeArray),
lodashBizarro.intersection(largeArray, [object]),
lodashBizarro.uniq(largeArray)
];
} catch (e) {
actual = null;
}
label = message('_.difference`, `_.intersection`, and `_.uniq', 'Map');
assert.deepEqual(actual, [[otherObject], [object], [object]], label);
try {
if (Symbol) {
object[symbol] = {};
}
actual = [
lodashBizarro.clone(object),
lodashBizarro.cloneDeep(object)
];
} catch (e) {
actual = null;
}
label = message('_.clone` and `_.cloneDeep', 'Object.getOwnPropertySymbols');
assert.deepEqual(actual, [object, object], label);
try {
// Avoid buggy symbol detection in Babel's `_typeof` helper.
var symObject = setProperty(Object(symbol), 'constructor', Object);
actual = [
Symbol ? lodashBizarro.clone(symObject) : {},
Symbol ? lodashBizarro.isEqual(symObject, Object(symbol)) : false,
Symbol ? lodashBizarro.toString(symObject) : ''
];
} catch (e) {
actual = null;
}
label = message('_.clone`, `_.isEqual`, and `_.toString', 'Symbol');
assert.deepEqual(actual, [{}, false, ''], label);
try {
var map = new lodashBizarro.memoize.Cache;
actual = map.set('a', 1).get('a');
} catch (e) {
actual = null;
}
label = message('_.memoize.Cache', 'Map');
assert.deepEqual(actual, 1, label);
try {
map = new (Map || Object);
if (Symbol && Symbol.iterator) {
map[Symbol.iterator] = null;
}
actual = lodashBizarro.toArray(map);
} catch (e) {
actual = null;
}
label = message('_.toArray', 'Map');
assert.deepEqual(actual, [], label);
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('isIndex');
(function() {
var func = _._isIndex;
QUnit.test('should return `true` for indexes', function(assert) {
assert.expect(1);
if (func) {
var values = [[0], ['0'], ['1'], [3, 4], [MAX_SAFE_INTEGER - 1]],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(args) {
return func.apply(undefined, args);
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non-indexes', function(assert) {
assert.expect(1);
if (func) {
var values = [['1abc'], ['07'], ['0001'], [-1], [3, 3], [1.1], [MAX_SAFE_INTEGER]],
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(args) {
return func.apply(undefined, args);
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('isIterateeCall');
(function() {
var array = [1],
func = _._isIterateeCall,
object = { 'a': 1 };
QUnit.test('should return `true` for iteratee calls', function(assert) {
assert.expect(3);
function Foo() {}
Foo.prototype.a = 1;
if (func) {
assert.strictEqual(func(1, 0, array), true);
assert.strictEqual(func(1, 'a', object), true);
assert.strictEqual(func(1, 'a', new Foo), true);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should return `false` for non-iteratee calls', function(assert) {
assert.expect(4);
if (func) {
assert.strictEqual(func(2, 0, array), false);
assert.strictEqual(func(1, 1.1, array), false);
assert.strictEqual(func(1, 0, { 'length': MAX_SAFE_INTEGER + 1 }), false);
assert.strictEqual(func(1, 'b', object), false);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should work with `NaN` values', function(assert) {
assert.expect(2);
if (func) {
assert.strictEqual(func(NaN, 0, [NaN]), true);
assert.strictEqual(func(NaN, 'a', { 'a': NaN }), true);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should not error when `index` is an object without a `toString` method', function(assert) {
assert.expect(1);
if (func) {
try {
var actual = func(1, { 'toString': null }, [1]);
} catch (e) {
var message = e.message;
}
assert.strictEqual(actual, false, message || '');
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('map caches');
(function() {
var keys = [null, undefined, false, true, 1, -Infinity, NaN, {}, 'a', symbol || noop];
var pairs = lodashStable.map(keys, function(key, index) {
var lastIndex = keys.length - 1;
return [key, keys[lastIndex - index]];
});
function createCaches(pairs) {
var largeStack = new mapCaches.Stack(pairs),
length = pairs ? pairs.length : 0;
lodashStable.times(LARGE_ARRAY_SIZE - length, function() {
largeStack.set({}, {});
});
return {
'hashes': new mapCaches.Hash(pairs),
'list caches': new mapCaches.ListCache(pairs),
'map caches': new mapCaches.MapCache(pairs),
'stack caches': new mapCaches.Stack(pairs),
'large stacks': largeStack
};
}
lodashStable.forOwn(createCaches(pairs), function(cache, kind) {
var isLarge = /^large/.test(kind);
QUnit.test('should implement a `Map` interface for ' + kind, function(assert) {
assert.expect(83);
lodashStable.each(keys, function(key, index) {
var value = pairs[index][1];
assert.deepEqual(cache.get(key), value);
assert.strictEqual(cache.has(key), true);
assert.strictEqual(cache.delete(key), true);
assert.strictEqual(cache.has(key), false);
assert.strictEqual(cache.get(key), undefined);
assert.strictEqual(cache.delete(key), false);
assert.strictEqual(cache.set(key, value), cache);
assert.strictEqual(cache.has(key), true);
});
assert.strictEqual(cache.size, isLarge ? LARGE_ARRAY_SIZE : keys.length);
assert.strictEqual(cache.clear(), undefined);
assert.ok(lodashStable.every(keys, function(key) {
return !cache.has(key);
}));
});
});
lodashStable.forOwn(createCaches(), function(cache, kind) {
QUnit.test('should support changing values of ' + kind, function(assert) {
assert.expect(10);
lodashStable.each(keys, function(key) {
cache.set(key, 1).set(key, 2);
assert.strictEqual(cache.get(key), 2);
});
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash constructor');
(function() {
var values = empties.concat(true, 1, 'a'),
expected = lodashStable.map(values, stubTrue);
QUnit.test('should create a new instance when called without the `new` operator', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = lodashStable.map(values, function(value) {
return _(value) instanceof _;
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return the given `lodash` instances', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = lodashStable.map(values, function(value) {
var wrapped = _(value);
return _(wrapped) === wrapped;
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('should convert foreign wrapped values to `lodash` instances', function(assert) {
assert.expect(1);
if (!isNpm && lodashBizarro) {
var actual = lodashStable.map(values, function(value) {
var wrapped = _(lodashBizarro(value)),
unwrapped = wrapped.value();
return wrapped instanceof _ &&
((unwrapped === value) || (unwrapped !== unwrapped && value !== value));
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.add');
(function() {
QUnit.test('should add two numbers', function(assert) {
assert.expect(3);
assert.strictEqual(_.add(6, 4), 10);
assert.strictEqual(_.add(-6, 4), -2);
assert.strictEqual(_.add(-6, -4), -10);
});
QUnit.test('should not coerce arguments to numbers', function(assert) {
assert.expect(2);
assert.strictEqual(_.add('6', '4'), '64');
assert.strictEqual(_.add('x', 'y'), 'xy');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.after');
(function() {
function after(n, times) {
var count = 0;
lodashStable.times(times, _.after(n, function() { count++; }));
return count;
}
QUnit.test('should create a function that invokes `func` after `n` calls', function(assert) {
assert.expect(4);
assert.strictEqual(after(5, 5), 1, 'after(n) should invoke `func` after being called `n` times');
assert.strictEqual(after(5, 4), 0, 'after(n) should not invoke `func` before being called `n` times');
assert.strictEqual(after(0, 0), 0, 'after(0) should not invoke `func` immediately');
assert.strictEqual(after(0, 1), 1, 'after(0) should invoke `func` when called once');
});
QUnit.test('should coerce `n` values of `NaN` to `0`', function(assert) {
assert.expect(1);
assert.strictEqual(after(NaN, 1), 1);
});
QUnit.test('should use `this` binding of function', function(assert) {
assert.expect(2);
var after = _.after(1, function(assert) { return ++this.count; }),
object = { 'after': after, 'count': 0 };
object.after();
assert.strictEqual(object.after(), 2);
assert.strictEqual(object.count, 2);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.ary');
(function() {
function fn(a, b, c) {
return slice.call(arguments);
}
QUnit.test('should cap the number of arguments provided to `func`', function(assert) {
assert.expect(2);
var actual = lodashStable.map(['6', '8', '10'], _.ary(parseInt, 1));
assert.deepEqual(actual, [6, 8, 10]);
var capped = _.ary(fn, 2);
assert.deepEqual(capped('a', 'b', 'c', 'd'), ['a', 'b']);
});
QUnit.test('should use `func.length` if `n` is not given', function(assert) {
assert.expect(1);
var capped = _.ary(fn);
assert.deepEqual(capped('a', 'b', 'c', 'd'), ['a', 'b', 'c']);
});
QUnit.test('should treat a negative `n` as `0`', function(assert) {
assert.expect(1);
var capped = _.ary(fn, -1);
try {
var actual = capped('a');
} catch (e) {}
assert.deepEqual(actual, []);
});
QUnit.test('should coerce `n` to an integer', function(assert) {
assert.expect(1);
var values = ['1', 1.6, 'xyz'],
expected = [['a'], ['a'], []];
var actual = lodashStable.map(values, function(n) {
var capped = _.ary(fn, n);
return capped('a', 'b');
});
assert.deepEqual(actual, expected);
});
QUnit.test('should not force a minimum argument count', function(assert) {
assert.expect(1);
var args = ['a', 'b', 'c'],
capped = _.ary(fn, 3);
var expected = lodashStable.map(args, function(arg, index) {
return args.slice(0, index);
});
var actual = lodashStable.map(expected, function(array) {
return capped.apply(undefined, array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should use `this` binding of function', function(assert) {
assert.expect(1);
var capped = _.ary(function(a, b) { return this; }, 1),
object = { 'capped': capped };
assert.strictEqual(object.capped(), object);
});
QUnit.test('should use the existing `ary` if smaller', function(assert) {
assert.expect(1);
var capped = _.ary(_.ary(fn, 1), 2);
assert.deepEqual(capped('a', 'b', 'c'), ['a']);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var funcs = lodashStable.map([fn], _.ary),
actual = funcs[0]('a', 'b', 'c');
assert.deepEqual(actual, ['a', 'b', 'c']);
});
QUnit.test('should work when combined with other methods that use metadata', function(assert) {
assert.expect(2);
var array = ['a', 'b', 'c'],
includes = _.curry(_.rearg(_.ary(_.includes, 2), 1, 0), 2);
assert.strictEqual(includes('b')(array, 2), true);
if (!isNpm) {
includes = _(_.includes).ary(2).rearg(1, 0).curry(2).value();
assert.strictEqual(includes('b')(array, 2), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.assignIn');
(function() {
QUnit.test('should be aliased', function(assert) {
assert.expect(1);
assert.strictEqual(_.extend, _.assignIn);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.assign and lodash.assignIn');
lodashStable.each(['assign', 'assignIn'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should assign source properties to `object`', function(assert) {
assert.expect(1);
assert.deepEqual(func({ 'a': 1 }, { 'b': 2 }), { 'a': 1, 'b': 2 });
});
QUnit.test('`_.' + methodName + '` should accept multiple sources', function(assert) {
assert.expect(2);
var expected = { 'a': 1, 'b': 2, 'c': 3 };
assert.deepEqual(func({ 'a': 1 }, { 'b': 2 }, { 'c': 3 }), expected);
assert.deepEqual(func({ 'a': 1 }, { 'b': 2, 'c': 2 }, { 'c': 3 }), expected);
});
QUnit.test('`_.' + methodName + '` should overwrite destination properties', function(assert) {
assert.expect(1);
var expected = { 'a': 3, 'b': 2, 'c': 1 };
assert.deepEqual(func({ 'a': 1, 'b': 2 }, expected), expected);
});
QUnit.test('`_.' + methodName + '` should assign source properties with nullish values', function(assert) {
assert.expect(1);
var expected = { 'a': null, 'b': undefined, 'c': null };
assert.deepEqual(func({ 'a': 1, 'b': 2 }, expected), expected);
});
QUnit.test('`_.' + methodName + '` should skip assignments if values are the same', function(assert) {
assert.expect(1);
var object = {};
var descriptor = {
'configurable': true,
'enumerable': true,
'set': function() { throw new Error; }
};
var source = {
'a': 1,
'b': undefined,
'c': NaN,
'd': undefined,
'constructor': Object,
'toString': lodashStable.constant('source')
};
defineProperty(object, 'a', lodashStable.assign({}, descriptor, {
'get': stubOne
}));
defineProperty(object, 'b', lodashStable.assign({}, descriptor, {
'get': noop
}));
defineProperty(object, 'c', lodashStable.assign({}, descriptor, {
'get': stubNaN
}));
defineProperty(object, 'constructor', lodashStable.assign({}, descriptor, {
'get': lodashStable.constant(Object)
}));
try {
var actual = func(object, source);
} catch (e) {}
assert.deepEqual(actual, source);
});
QUnit.test('`_.' + methodName + '` should treat sparse array sources as dense', function(assert) {
assert.expect(1);
var array = [1];
array[2] = 3;
assert.deepEqual(func({}, array), { '0': 1, '1': undefined, '2': 3 });
});
QUnit.test('`_.' + methodName + '` should assign values of prototype objects', function(assert) {
assert.expect(1);
function Foo() {}
Foo.prototype.a = 1;
assert.deepEqual(func({}, Foo.prototype), { 'a': 1 });
});
QUnit.test('`_.' + methodName + '` should coerce string sources to objects', function(assert) {
assert.expect(1);
assert.deepEqual(func({}, 'a'), { '0': 'a' });
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.assignInWith');
(function() {
QUnit.test('should be aliased', function(assert) {
assert.expect(1);
assert.strictEqual(_.extendWith, _.assignInWith);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.assignWith and lodash.assignInWith');
lodashStable.each(['assignWith', 'assignInWith'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should work with a `customizer` callback', function(assert) {
assert.expect(1);
var actual = func({ 'a': 1, 'b': 2 }, { 'a': 3, 'c': 3 }, function(a, b) {
return a === undefined ? b : a;
});
assert.deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 });
});
QUnit.test('`_.' + methodName + '` should work with a `customizer` that returns `undefined`', function(assert) {
assert.expect(1);
var expected = { 'a': 1 };
assert.deepEqual(func({}, expected, noop), expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.at');
(function() {
var array = ['a', 'b', 'c'],
object = { 'a': [{ 'b': { 'c': 3 } }, 4] };
QUnit.test('should return the elements corresponding to the specified keys', function(assert) {
assert.expect(1);
var actual = _.at(array, [0, 2]);
assert.deepEqual(actual, ['a', 'c']);
});
QUnit.test('should return `undefined` for nonexistent keys', function(assert) {
assert.expect(1);
var actual = _.at(array, [2, 4, 0]);
assert.deepEqual(actual, ['c', undefined, 'a']);
});
QUnit.test('should work with non-index keys on array values', function(assert) {
assert.expect(1);
var values = lodashStable.reject(empties, function(value) {
return (value === 0) || lodashStable.isArray(value);
}).concat(-1, 1.1);
var array = lodashStable.transform(values, function(result, value) {
result[value] = 1;
}, []);
var expected = lodashStable.map(values, stubOne),
actual = _.at(array, values);
assert.deepEqual(actual, expected);
});
QUnit.test('should return an empty array when no keys are given', function(assert) {
assert.expect(2);
assert.deepEqual(_.at(array), []);
assert.deepEqual(_.at(array, [], []), []);
});
QUnit.test('should accept multiple key arguments', function(assert) {
assert.expect(1);
var actual = _.at(['a', 'b', 'c', 'd'], 3, 0, 2);
assert.deepEqual(actual, ['d', 'a', 'c']);
});
QUnit.test('should work with a falsey `object` when keys are given', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, lodashStable.constant(Array(4)));
var actual = lodashStable.map(falsey, function(object) {
try {
return _.at(object, 0, 1, 'pop', 'push');
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with an `arguments` object for `object`', function(assert) {
assert.expect(1);
var actual = _.at(args, [2, 0]);
assert.deepEqual(actual, [3, 1]);
});
QUnit.test('should work with `arguments` object as secondary arguments', function(assert) {
assert.expect(1);
var actual = _.at([1, 2, 3, 4, 5], args);
assert.deepEqual(actual, [2, 3, 4]);
});
QUnit.test('should work with an object for `object`', function(assert) {
assert.expect(1);
var actual = _.at(object, ['a[0].b.c', 'a[1]']);
assert.deepEqual(actual, [3, 4]);
});
QUnit.test('should pluck inherited property values', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var actual = _.at(new Foo, 'b');
assert.deepEqual(actual, [2]);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(6);
if (!isNpm) {
var largeArray = lodashStable.range(LARGE_ARRAY_SIZE),
smallArray = array;
lodashStable.each([[2], ['2'], [2, 1]], function(paths) {
lodashStable.times(2, function(index) {
var array = index ? largeArray : smallArray,
wrapped = _(array).map(identity).at(paths);
assert.deepEqual(wrapped.value(), _.at(_.map(array, identity), paths));
});
});
}
else {
skipAssert(assert, 6);
}
});
QUnit.test('should support shortcut fusion', function(assert) {
assert.expect(8);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE),
count = 0,
iteratee = function(value) { count++; return square(value); },
lastIndex = LARGE_ARRAY_SIZE - 1;
lodashStable.each([lastIndex, lastIndex + '', LARGE_ARRAY_SIZE, []], function(n, index) {
count = 0;
var actual = _(array).map(iteratee).at(n).value(),
expected = index < 2 ? 1 : 0;
assert.strictEqual(count, expected);
expected = index == 3 ? [] : [index == 2 ? undefined : square(lastIndex)];
assert.deepEqual(actual, expected);
});
}
else {
skipAssert(assert, 8);
}
});
QUnit.test('work with an object for `object` when chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var paths = ['a[0].b.c', 'a[1]'],
actual = _(object).map(identity).at(paths).value();
assert.deepEqual(actual, _.at(_.map(object, identity), paths));
var indexObject = { '0': 1 };
actual = _(indexObject).at(0).value();
assert.deepEqual(actual, _.at(indexObject, 0));
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.attempt');
(function() {
QUnit.test('should return the result of `func`', function(assert) {
assert.expect(1);
assert.strictEqual(_.attempt(lodashStable.constant('x')), 'x');
});
QUnit.test('should provide additional arguments to `func`', function(assert) {
assert.expect(1);
var actual = _.attempt(function() { return slice.call(arguments); }, 1, 2);
assert.deepEqual(actual, [1, 2]);
});
QUnit.test('should return the caught error', function(assert) {
assert.expect(1);
var expected = lodashStable.map(errors, stubTrue);
var actual = lodashStable.map(errors, function(error) {
return _.attempt(function() { throw error; }) === error;
});
assert.deepEqual(actual, expected);
});
QUnit.test('should coerce errors to error objects', function(assert) {
assert.expect(1);
var actual = _.attempt(function() { throw 'x'; });
assert.ok(lodashStable.isEqual(actual, Error('x')));
});
QUnit.test('should preserve custom errors', function(assert) {
assert.expect(1);
var actual = _.attempt(function() { throw new CustomError('x'); });
assert.ok(actual instanceof CustomError);
});
QUnit.test('should work with an error object from another realm', function(assert) {
assert.expect(1);
if (realm.errors) {
var expected = lodashStable.map(realm.errors, stubTrue);
var actual = lodashStable.map(realm.errors, function(error) {
return _.attempt(function() { throw error; }) === error;
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.strictEqual(_(lodashStable.constant('x')).attempt(), 'x');
}
else {
skipAssert(assert);
}
});
QUnit.test('should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(lodashStable.constant('x')).chain().attempt() instanceof _);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.before');
(function() {
function before(n, times) {
var count = 0;
lodashStable.times(times, _.before(n, function() { count++; }));
return count;
}
QUnit.test('should create a function that invokes `func` after `n` calls', function(assert) {
assert.expect(4);
assert.strictEqual(before(5, 4), 4, 'before(n) should invoke `func` before being called `n` times');
assert.strictEqual(before(5, 6), 4, 'before(n) should not invoke `func` after being called `n - 1` times');
assert.strictEqual(before(0, 0), 0, 'before(0) should not invoke `func` immediately');
assert.strictEqual(before(0, 1), 0, 'before(0) should not invoke `func` when called');
});
QUnit.test('should coerce `n` values of `NaN` to `0`', function(assert) {
assert.expect(1);
assert.strictEqual(before(NaN, 1), 0);
});
QUnit.test('should use `this` binding of function', function(assert) {
assert.expect(2);
var before = _.before(2, function(assert) { return ++this.count; }),
object = { 'before': before, 'count': 0 };
object.before();
assert.strictEqual(object.before(), 1);
assert.strictEqual(object.count, 1);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.bind');
(function() {
function fn() {
var result = [this];
push.apply(result, arguments);
return result;
}
QUnit.test('should bind a function to an object', function(assert) {
assert.expect(1);
var object = {},
bound = _.bind(fn, object);
assert.deepEqual(bound('a'), [object, 'a']);
});
QUnit.test('should accept a falsey `thisArg`', function(assert) {
assert.expect(1);
var values = lodashStable.reject(falsey.slice(1), function(value) { return value == null; }),
expected = lodashStable.map(values, function(value) { return [value]; });
var actual = lodashStable.map(values, function(value) {
try {
var bound = _.bind(fn, value);
return bound();
} catch (e) {}
});
assert.ok(lodashStable.every(actual, function(value, index) {
return lodashStable.isEqual(value, expected[index]);
}));
});
QUnit.test('should bind a function to nullish values', function(assert) {
assert.expect(6);
var bound = _.bind(fn, null),
actual = bound('a');
assert.ok((actual[0] === null) || (actual[0] && actual[0].Array));
assert.strictEqual(actual[1], 'a');
lodashStable.times(2, function(index) {
bound = index ? _.bind(fn, undefined) : _.bind(fn);
actual = bound('b');
assert.ok((actual[0] === undefined) || (actual[0] && actual[0].Array));
assert.strictEqual(actual[1], 'b');
});
});
QUnit.test('should partially apply arguments ', function(assert) {
assert.expect(4);
var object = {},
bound = _.bind(fn, object, 'a');
assert.deepEqual(bound(), [object, 'a']);
bound = _.bind(fn, object, 'a');
assert.deepEqual(bound('b'), [object, 'a', 'b']);
bound = _.bind(fn, object, 'a', 'b');
assert.deepEqual(bound(), [object, 'a', 'b']);
assert.deepEqual(bound('c', 'd'), [object, 'a', 'b', 'c', 'd']);
});
QUnit.test('should support placeholders', function(assert) {
assert.expect(4);
var object = {},
ph = _.bind.placeholder,
bound = _.bind(fn, object, ph, 'b', ph);
assert.deepEqual(bound('a', 'c'), [object, 'a', 'b', 'c']);
assert.deepEqual(bound('a'), [object, 'a', 'b', undefined]);
assert.deepEqual(bound('a', 'c', 'd'), [object, 'a', 'b', 'c', 'd']);
assert.deepEqual(bound(), [object, undefined, 'b', undefined]);
});
QUnit.test('should use `_.placeholder` when set', function(assert) {
assert.expect(1);
if (!isModularize) {
var _ph = _.placeholder = {},
ph = _.bind.placeholder,
object = {},
bound = _.bind(fn, object, _ph, 'b', ph);
assert.deepEqual(bound('a', 'c'), [object, 'a', 'b', ph, 'c']);
delete _.placeholder;
}
else {
skipAssert(assert);
}
});
QUnit.test('should create a function with a `length` of `0`', function(assert) {
assert.expect(2);
var fn = function(a, b, c) {},
bound = _.bind(fn, {});
assert.strictEqual(bound.length, 0);
bound = _.bind(fn, {}, 1);
assert.strictEqual(bound.length, 0);
});
QUnit.test('should ignore binding when called with the `new` operator', function(assert) {
assert.expect(3);
function Foo() {
return this;
}
var bound = _.bind(Foo, { 'a': 1 }),
newBound = new bound;
assert.strictEqual(bound().a, 1);
assert.strictEqual(newBound.a, undefined);
assert.ok(newBound instanceof Foo);
});
QUnit.test('should handle a number of arguments when called with the `new` operator', function(assert) {
assert.expect(1);
function Foo() {
return this;
}
function Bar() {}
var thisArg = { 'a': 1 },
boundFoo = _.bind(Foo, thisArg),
boundBar = _.bind(Bar, thisArg),
count = 9,
expected = lodashStable.times(count, lodashStable.constant([undefined, undefined]));
var actual = lodashStable.times(count, function(index) {
try {
switch (index) {
case 0: return [new boundFoo().a, new boundBar().a];
case 1: return [new boundFoo(1).a, new boundBar(1).a];
case 2: return [new boundFoo(1, 2).a, new boundBar(1, 2).a];
case 3: return [new boundFoo(1, 2, 3).a, new boundBar(1, 2, 3).a];
case 4: return [new boundFoo(1, 2, 3, 4).a, new boundBar(1, 2, 3, 4).a];
case 5: return [new boundFoo(1, 2, 3, 4, 5).a, new boundBar(1, 2, 3, 4, 5).a];
case 6: return [new boundFoo(1, 2, 3, 4, 5, 6).a, new boundBar(1, 2, 3, 4, 5, 6).a];
case 7: return [new boundFoo(1, 2, 3, 4, 5, 6, 7).a, new boundBar(1, 2, 3, 4, 5, 6, 7).a];
case 8: return [new boundFoo(1, 2, 3, 4, 5, 6, 7, 8).a, new boundBar(1, 2, 3, 4, 5, 6, 7, 8).a];
}
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should ensure `new bound` is an instance of `func`', function(assert) {
assert.expect(2);
function Foo(value) {
return value && object;
}
var bound = _.bind(Foo),
object = {};
assert.ok(new bound instanceof Foo);
assert.strictEqual(new bound(true), object);
});
QUnit.test('should append array arguments to partially applied arguments', function(assert) {
assert.expect(1);
var object = {},
bound = _.bind(fn, object, 'a');
assert.deepEqual(bound(['b'], 'c'), [object, 'a', ['b'], 'c']);
});
QUnit.test('should not rebind functions', function(assert) {
assert.expect(3);
var object1 = {},
object2 = {},
object3 = {};
var bound1 = _.bind(fn, object1),
bound2 = _.bind(bound1, object2, 'a'),
bound3 = _.bind(bound1, object3, 'b');
assert.deepEqual(bound1(), [object1]);
assert.deepEqual(bound2(), [object1, 'a']);
assert.deepEqual(bound3(), [object1, 'b']);
});
QUnit.test('should not error when instantiating bound built-ins', function(assert) {
assert.expect(2);
var Ctor = _.bind(Date, null),
expected = new Date(2012, 4, 23, 0, 0, 0, 0);
try {
var actual = new Ctor(2012, 4, 23, 0, 0, 0, 0);
} catch (e) {}
assert.deepEqual(actual, expected);
Ctor = _.bind(Date, null, 2012, 4, 23);
try {
actual = new Ctor(0, 0, 0, 0);
} catch (e) {}
assert.deepEqual(actual, expected);
});
QUnit.test('should not error when calling bound class constructors with the `new` operator', function(assert) {
assert.expect(1);
var createCtor = lodashStable.attempt(Function, '"use strict";return class A{}');
if (typeof createCtor == 'function') {
var bound = _.bind(createCtor()),
count = 8,
expected = lodashStable.times(count, stubTrue);
var actual = lodashStable.times(count, function(index) {
try {
switch (index) {
case 0: return !!(new bound);
case 1: return !!(new bound(1));
case 2: return !!(new bound(1, 2));
case 3: return !!(new bound(1, 2, 3));
case 4: return !!(new bound(1, 2, 3, 4));
case 5: return !!(new bound(1, 2, 3, 4, 5));
case 6: return !!(new bound(1, 2, 3, 4, 5, 6));
case 7: return !!(new bound(1, 2, 3, 4, 5, 6, 7));
}
} catch (e) {}
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var object = {},
bound = _(fn).bind({}, 'a', 'b');
assert.ok(bound instanceof _);
var actual = bound.value()('c');
assert.deepEqual(actual, [object, 'a', 'b', 'c']);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.bindAll');
(function() {
var args = toArgs(['a']);
var source = {
'_n0': -2,
'_p0': -1,
'_a': 1,
'_b': 2,
'_c': 3,
'_d': 4,
'-0': function() { return this._n0; },
'0': function() { return this._p0; },
'a': function() { return this._a; },
'b': function() { return this._b; },
'c': function() { return this._c; },
'd': function() { return this._d; }
};
QUnit.test('should accept individual method names', function(assert) {
assert.expect(1);
var object = lodashStable.cloneDeep(source);
_.bindAll(object, 'a', 'b');
var actual = lodashStable.map(['a', 'b', 'c'], function(key) {
return object[key].call({});
});
assert.deepEqual(actual, [1, 2, undefined]);
});
QUnit.test('should accept arrays of method names', function(assert) {
assert.expect(1);
var object = lodashStable.cloneDeep(source);
_.bindAll(object, ['a', 'b'], ['c']);
var actual = lodashStable.map(['a', 'b', 'c', 'd'], function(key) {
return object[key].call({});
});
assert.deepEqual(actual, [1, 2, 3, undefined]);
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var props = [-0, Object(-0), 0, Object(0)];
var actual = lodashStable.map(props, function(key) {
var object = lodashStable.cloneDeep(source);
_.bindAll(object, key);
return object[lodashStable.toString(key)].call({});
});
assert.deepEqual(actual, [-2, -2, -1, -1]);
});
QUnit.test('should work with an array `object`', function(assert) {
assert.expect(1);
var array = ['push', 'pop'];
_.bindAll(array);
assert.strictEqual(array.pop, arrayProto.pop);
});
QUnit.test('should work with `arguments` objects as secondary arguments', function(assert) {
assert.expect(1);
var object = lodashStable.cloneDeep(source);
_.bindAll(object, args);
var actual = lodashStable.map(args, function(key) {
return object[key].call({});
});
assert.deepEqual(actual, [1]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.bindKey');
(function() {
QUnit.test('should work when the target function is overwritten', function(assert) {
assert.expect(2);
var object = {
'user': 'fred',
'greet': function(greeting) {
return this.user + ' says: ' + greeting;
}
};
var bound = _.bindKey(object, 'greet', 'hi');
assert.strictEqual(bound(), 'fred says: hi');
object.greet = function(greeting) {
return this.user + ' says: ' + greeting + '!';
};
assert.strictEqual(bound(), 'fred says: hi!');
});
QUnit.test('should support placeholders', function(assert) {
assert.expect(4);
var object = {
'fn': function() {
return slice.call(arguments);
}
};
var ph = _.bindKey.placeholder,
bound = _.bindKey(object, 'fn', ph, 'b', ph);
assert.deepEqual(bound('a', 'c'), ['a', 'b', 'c']);
assert.deepEqual(bound('a'), ['a', 'b', undefined]);
assert.deepEqual(bound('a', 'c', 'd'), ['a', 'b', 'c', 'd']);
assert.deepEqual(bound(), [undefined, 'b', undefined]);
});
QUnit.test('should use `_.placeholder` when set', function(assert) {
assert.expect(1);
if (!isModularize) {
var object = {
'fn': function() {
return slice.call(arguments);
}
};
var _ph = _.placeholder = {},
ph = _.bindKey.placeholder,
bound = _.bindKey(object, 'fn', _ph, 'b', ph);
assert.deepEqual(bound('a', 'c'), ['a', 'b', ph, 'c']);
delete _.placeholder;
}
else {
skipAssert(assert);
}
});
QUnit.test('should ensure `new bound` is an instance of `object[key]`', function(assert) {
assert.expect(2);
function Foo(value) {
return value && object;
}
var object = { 'Foo': Foo },
bound = _.bindKey(object, 'Foo');
assert.ok(new bound instanceof Foo);
assert.strictEqual(new bound(true), object);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('case methods');
lodashStable.each(['camel', 'kebab', 'lower', 'snake', 'start', 'upper'], function(caseName) {
var methodName = caseName + 'Case',
func = _[methodName];
var strings = [
'foo bar', 'Foo bar', 'foo Bar', 'Foo Bar',
'FOO BAR', 'fooBar', '--foo-bar--', '__foo_bar__'
];
var converted = (function() {
switch (caseName) {
case 'camel': return 'fooBar';
case 'kebab': return 'foo-bar';
case 'lower': return 'foo bar';
case 'snake': return 'foo_bar';
case 'start': return 'Foo Bar';
case 'upper': return 'FOO BAR';
}
}());
QUnit.test('`_.' + methodName + '` should convert `string` to ' + caseName + ' case', function(assert) {
assert.expect(1);
var actual = lodashStable.map(strings, function(string) {
var expected = (caseName == 'start' && string == 'FOO BAR') ? string : converted;
return func(string) === expected;
});
assert.deepEqual(actual, lodashStable.map(strings, stubTrue));
});
QUnit.test('`_.' + methodName + '` should handle double-converting strings', function(assert) {
assert.expect(1);
var actual = lodashStable.map(strings, function(string) {
var expected = (caseName == 'start' && string == 'FOO BAR') ? string : converted;
return func(func(string)) === expected;
});
assert.deepEqual(actual, lodashStable.map(strings, stubTrue));
});
QUnit.test('`_.' + methodName + '` should deburr letters', function(assert) {
assert.expect(1);
var actual = lodashStable.map(burredLetters, function(burred, index) {
var letter = deburredLetters[index].replace(/['\u2019]/g, '');
if (caseName == 'start') {
letter = letter == 'IJ' ? letter : lodashStable.capitalize(letter);
} else if (caseName == 'upper') {
letter = letter.toUpperCase();
} else {
letter = letter.toLowerCase();
}
return func(burred) === letter;
});
assert.deepEqual(actual, lodashStable.map(burredLetters, stubTrue));
});
QUnit.test('`_.' + methodName + '` should remove contraction apostrophes', function(assert) {
assert.expect(2);
var postfixes = ['d', 'll', 'm', 're', 's', 't', 've'];
lodashStable.each(["'", '\u2019'], function(apos) {
var actual = lodashStable.map(postfixes, function(postfix) {
return func('a b' + apos + postfix + ' c');
});
var expected = lodashStable.map(postfixes, function(postfix) {
switch (caseName) {
case 'camel': return 'aB' + postfix + 'C';
case 'kebab': return 'a-b' + postfix + '-c';
case 'lower': return 'a b' + postfix + ' c';
case 'snake': return 'a_b' + postfix + '_c';
case 'start': return 'A B' + postfix + ' C';
case 'upper': return 'A B' + postfix.toUpperCase() + ' C';
}
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should remove Latin mathematical operators', function(assert) {
assert.expect(1);
var actual = lodashStable.map(['\xd7', '\xf7'], func);
assert.deepEqual(actual, ['', '']);
});
QUnit.test('`_.' + methodName + '` should coerce `string` to a string', function(assert) {
assert.expect(2);
var string = 'foo bar';
assert.strictEqual(func(Object(string)), converted);
assert.strictEqual(func({ 'toString': lodashStable.constant(string) }), converted);
});
QUnit.test('`_.' + methodName + '` should return an unwrapped value implicitly when chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.strictEqual(_('foo bar')[methodName](), converted);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_('foo bar').chain()[methodName]() instanceof _);
}
else {
skipAssert(assert);
}
});
});
(function() {
QUnit.test('should get the original value after cycling through all case methods', function(assert) {
assert.expect(1);
var funcs = [_.camelCase, _.kebabCase, _.lowerCase, _.snakeCase, _.startCase, _.lowerCase, _.camelCase];
var actual = lodashStable.reduce(funcs, function(result, func) {
return func(result);
}, 'enable 6h format');
assert.strictEqual(actual, 'enable6HFormat');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.camelCase');
(function() {
QUnit.test('should work with numbers', function(assert) {
assert.expect(6);
assert.strictEqual(_.camelCase('12 feet'), '12Feet');
assert.strictEqual(_.camelCase('enable 6h format'), 'enable6HFormat');
assert.strictEqual(_.camelCase('enable 24H format'), 'enable24HFormat');
assert.strictEqual(_.camelCase('too legit 2 quit'), 'tooLegit2Quit');
assert.strictEqual(_.camelCase('walk 500 miles'), 'walk500Miles');
assert.strictEqual(_.camelCase('xhr2 request'), 'xhr2Request');
});
QUnit.test('should handle acronyms', function(assert) {
assert.expect(6);
lodashStable.each(['safe HTML', 'safeHTML'], function(string) {
assert.strictEqual(_.camelCase(string), 'safeHtml');
});
lodashStable.each(['escape HTML entities', 'escapeHTMLEntities'], function(string) {
assert.strictEqual(_.camelCase(string), 'escapeHtmlEntities');
});
lodashStable.each(['XMLHttpRequest', 'XmlHTTPRequest'], function(string) {
assert.strictEqual(_.camelCase(string), 'xmlHttpRequest');
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.capitalize');
(function() {
QUnit.test('should capitalize the first character of a string', function(assert) {
assert.expect(3);
assert.strictEqual(_.capitalize('fred'), 'Fred');
assert.strictEqual(_.capitalize('Fred'), 'Fred');
assert.strictEqual(_.capitalize(' fred'), ' fred');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.castArray');
(function() {
QUnit.test('should wrap non-array items in an array', function(assert) {
assert.expect(1);
var values = falsey.concat(true, 1, 'a', { 'a': 1 }),
expected = lodashStable.map(values, function(value) { return [value]; }),
actual = lodashStable.map(values, _.castArray);
assert.deepEqual(actual, expected);
});
QUnit.test('should return array values by reference', function(assert) {
assert.expect(1);
var array = [1];
assert.strictEqual(_.castArray(array), array);
});
QUnit.test('should return an empty array when no arguments are given', function(assert) {
assert.expect(1);
assert.deepEqual(_.castArray(), []);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.chain');
(function() {
QUnit.test('should return a wrapped value', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = _.chain({ 'a': 0 });
assert.ok(actual instanceof _);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return existing wrapped values', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _({ 'a': 0 });
assert.strictEqual(_.chain(wrapped), wrapped);
assert.strictEqual(wrapped.chain(), wrapped);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should enable chaining for methods that return unwrapped values', function(assert) {
assert.expect(6);
if (!isNpm) {
var array = ['c', 'b', 'a'];
assert.ok(_.chain(array).head() instanceof _);
assert.ok(_(array).chain().head() instanceof _);
assert.ok(_.chain(array).isArray() instanceof _);
assert.ok(_(array).chain().isArray() instanceof _);
assert.ok(_.chain(array).sortBy().head() instanceof _);
assert.ok(_(array).chain().sortBy().head() instanceof _);
}
else {
skipAssert(assert, 6);
}
});
QUnit.test('should chain multiple methods', function(assert) {
assert.expect(6);
if (!isNpm) {
lodashStable.times(2, function(index) {
var array = ['one two three four', 'five six seven eight', 'nine ten eleven twelve'],
expected = { ' ': 9, 'e': 14, 'f': 2, 'g': 1, 'h': 2, 'i': 4, 'l': 2, 'n': 6, 'o': 3, 'r': 2, 's': 2, 't': 5, 'u': 1, 'v': 4, 'w': 2, 'x': 1 },
wrapped = index ? _(array).chain() : _.chain(array);
var actual = wrapped
.chain()
.map(function(value) { return value.split(''); })
.flatten()
.reduce(function(object, chr) {
object[chr] || (object[chr] = 0);
object[chr]++;
return object;
}, {})
.value();
assert.deepEqual(actual, expected);
array = [1, 2, 3, 4, 5, 6];
wrapped = index ? _(array).chain() : _.chain(array);
actual = wrapped
.chain()
.filter(function(n) { return n % 2 != 0; })
.reject(function(n) { return n % 3 == 0; })
.sortBy(function(n) { return -n; })
.value();
assert.deepEqual(actual, [5, 1]);
array = [3, 4];
wrapped = index ? _(array).chain() : _.chain(array);
actual = wrapped
.reverse()
.concat([2, 1])
.unshift(5)
.tap(function(value) { value.pop(); })
.map(square)
.value();
assert.deepEqual(actual, [25, 16, 9, 4]);
});
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.chunk');
(function() {
var array = [0, 1, 2, 3, 4, 5];
QUnit.test('should return chunked arrays', function(assert) {
assert.expect(1);
var actual = _.chunk(array, 3);
assert.deepEqual(actual, [[0, 1, 2], [3, 4, 5]]);
});
QUnit.test('should return the last chunk as remaining elements', function(assert) {
assert.expect(1);
var actual = _.chunk(array, 4);
assert.deepEqual(actual, [[0, 1, 2, 3], [4, 5]]);
});
QUnit.test('should treat falsey `size` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? [[0], [1], [2], [3], [4], [5]] : [];
});
var actual = lodashStable.map(falsey, function(size, index) {
return index ? _.chunk(array, size) : _.chunk(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should ensure the minimum `size` is `0`', function(assert) {
assert.expect(1);
var values = lodashStable.reject(falsey, lodashStable.isUndefined).concat(-1, -Infinity),
expected = lodashStable.map(values, stubArray);
var actual = lodashStable.map(values, function(n) {
return _.chunk(array, n);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should coerce `size` to an integer', function(assert) {
assert.expect(1);
assert.deepEqual(_.chunk(array, array.length / 4), [[0], [1], [2], [3], [4], [5]]);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var actual = lodashStable.map([[1, 2], [3, 4]], _.chunk);
assert.deepEqual(actual, [[[1], [2]], [[3], [4]]]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.clamp');
(function() {
QUnit.test('should work with a `max`', function(assert) {
assert.expect(2);
assert.strictEqual(_.clamp(5, 3), 3);
assert.strictEqual(_.clamp(1, 3), 1);
});
QUnit.test('should clamp negative numbers', function(assert) {
assert.expect(3);
assert.strictEqual(_.clamp(-10, -5, 5), -5);
assert.strictEqual(_.clamp(-10.2, -5.5, 5.5), -5.5);
assert.strictEqual(_.clamp(-Infinity, -5, 5), -5);
});
QUnit.test('should clamp positive numbers', function(assert) {
assert.expect(3);
assert.strictEqual(_.clamp(10, -5, 5), 5);
assert.strictEqual(_.clamp(10.6, -5.6, 5.4), 5.4);
assert.strictEqual(_.clamp(Infinity, -5, 5), 5);
});
QUnit.test('should not alter negative numbers in range', function(assert) {
assert.expect(3);
assert.strictEqual(_.clamp(-4, -5, 5), -4);
assert.strictEqual(_.clamp(-5, -5, 5), -5);
assert.strictEqual(_.clamp(-5.5, -5.6, 5.6), -5.5);
});
QUnit.test('should not alter positive numbers in range', function(assert) {
assert.expect(3);
assert.strictEqual(_.clamp(4, -5, 5), 4);
assert.strictEqual(_.clamp(5, -5, 5), 5);
assert.strictEqual(_.clamp(4.5, -5.1, 5.2), 4.5);
});
QUnit.test('should not alter `0` in range', function(assert) {
assert.expect(1);
assert.strictEqual(1 / _.clamp(0, -5, 5), Infinity);
});
QUnit.test('should clamp to `0`', function(assert) {
assert.expect(1);
assert.strictEqual(1 / _.clamp(-10, 0, 5), Infinity);
});
QUnit.test('should not alter `-0` in range', function(assert) {
assert.expect(1);
assert.strictEqual(1 / _.clamp(-0, -5, 5), -Infinity);
});
QUnit.test('should clamp to `-0`', function(assert) {
assert.expect(1);
assert.strictEqual(1 / _.clamp(-10, -0, 5), -Infinity);
});
QUnit.test('should return `NaN` when `number` is `NaN`', function(assert) {
assert.expect(1);
assert.deepEqual(_.clamp(NaN, -5, 5), NaN);
});
QUnit.test('should coerce `min` and `max` of `NaN` to `0`', function(assert) {
assert.expect(2);
assert.deepEqual(_.clamp(1, -5, NaN), 0);
assert.deepEqual(_.clamp(-1, NaN, 5), 0);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('clone methods');
(function() {
function Foo() {
this.a = 1;
}
Foo.prototype.b = 1;
Foo.c = function() {};
if (Map) {
var map = new Map;
map.set('a', 1);
map.set('b', 2);
}
if (Set) {
var set = new Set;
set.add(1);
set.add(2);
}
var objects = {
'`arguments` objects': arguments,
'arrays': ['a', ''],
'array-like objects': { '0': 'a', 'length': 1 },
'booleans': false,
'boolean objects': Object(false),
'date objects': new Date,
'Foo instances': new Foo,
'objects': { 'a': 0, 'b': 1, 'c': 2 },
'objects with object values': { 'a': /a/, 'b': ['B'], 'c': { 'C': 1 } },
'objects from another document': realm.object || {},
'maps': map,
'null values': null,
'numbers': 0,
'number objects': Object(0),
'regexes': /a/gim,
'sets': set,
'strings': 'a',
'string objects': Object('a'),
'undefined values': undefined
};
objects.arrays.length = 3;
var uncloneable = {
'DOM elements': body,
'functions': Foo,
'async functions': asyncFunc,
'generator functions': genFunc,
'the `Proxy` constructor': Proxy
};
lodashStable.each(errors, function(error) {
uncloneable[error.name + 's'] = error;
});
QUnit.test('`_.clone` should perform a shallow clone', function(assert) {
assert.expect(2);
var array = [{ 'a': 0 }, { 'b': 1 }],
actual = _.clone(array);
assert.deepEqual(actual, array);
assert.ok(actual !== array && actual[0] === array[0]);
});
QUnit.test('`_.cloneDeep` should deep clone objects with circular references', function(assert) {
assert.expect(1);
var object = {
'foo': { 'b': { 'c': { 'd': {} } } },
'bar': {}
};
object.foo.b.c.d = object;
object.bar.b = object.foo.b;
var actual = _.cloneDeep(object);
assert.ok(actual.bar.b === actual.foo.b && actual === actual.foo.b.c.d && actual !== object);
});
QUnit.test('`_.cloneDeep` should deep clone objects with lots of circular references', function(assert) {
assert.expect(2);
var cyclical = {};
lodashStable.times(LARGE_ARRAY_SIZE + 1, function(index) {
cyclical['v' + index] = [index ? cyclical['v' + (index - 1)] : cyclical];
});
var clone = _.cloneDeep(cyclical),
actual = clone['v' + LARGE_ARRAY_SIZE][0];
assert.strictEqual(actual, clone['v' + (LARGE_ARRAY_SIZE - 1)]);
assert.notStrictEqual(actual, cyclical['v' + (LARGE_ARRAY_SIZE - 1)]);
});
QUnit.test('`_.cloneDeepWith` should provide `stack` to `customizer`', function(assert) {
assert.expect(1);
var actual;
_.cloneDeepWith({ 'a': 1 }, function() {
actual = _.last(arguments);
});
assert.ok(isNpm
? actual.constructor.name == 'Stack'
: actual instanceof mapCaches.Stack
);
});
lodashStable.each(['clone', 'cloneDeep'], function(methodName) {
var func = _[methodName],
isDeep = methodName == 'cloneDeep';
lodashStable.forOwn(objects, function(object, kind) {
QUnit.test('`_.' + methodName + '` should clone ' + kind, function(assert) {
assert.expect(2);
var actual = func(object);
assert.ok(lodashStable.isEqual(actual, object));
if (lodashStable.isObject(object)) {
assert.notStrictEqual(actual, object);
} else {
assert.strictEqual(actual, object);
}
});
});
QUnit.test('`_.' + methodName + '` should clone array buffers', function(assert) {
assert.expect(2);
if (ArrayBuffer) {
var actual = func(arrayBuffer);
assert.strictEqual(actual.byteLength, arrayBuffer.byteLength);
assert.notStrictEqual(actual, arrayBuffer);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('`_.' + methodName + '` should clone buffers', function(assert) {
assert.expect(4);
if (Buffer) {
var buffer = new Buffer([1, 2]),
actual = func(buffer);
assert.strictEqual(actual.byteLength, buffer.byteLength);
assert.strictEqual(actual.inspect(), buffer.inspect());
assert.notStrictEqual(actual, buffer);
buffer[0] = 2;
assert.strictEqual(actual[0], isDeep ? 2 : 1);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('`_.' + methodName + '` should clone `index` and `input` array properties', function(assert) {
assert.expect(2);
var array = /c/.exec('abcde'),
actual = func(array);
assert.strictEqual(actual.index, 2);
assert.strictEqual(actual.input, 'abcde');
});
QUnit.test('`_.' + methodName + '` should clone `lastIndex` regexp property', function(assert) {
assert.expect(1);
var regexp = /c/g;
regexp.exec('abcde');
assert.strictEqual(func(regexp).lastIndex, 3);
});
QUnit.test('`_.' + methodName + '` should clone expando properties', function(assert) {
assert.expect(1);
var values = lodashStable.map([false, true, 1, 'a'], function(value) {
var object = Object(value);
object.a = 1;
return object;
});
var expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
return func(value).a === 1;
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should clone prototype objects', function(assert) {
assert.expect(2);
var actual = func(Foo.prototype);
assert.notOk(actual instanceof Foo);
assert.deepEqual(actual, { 'b': 1 });
});
QUnit.test('`_.' + methodName + '` should set the `[[Prototype]]` of a clone', function(assert) {
assert.expect(1);
assert.ok(func(new Foo) instanceof Foo);
});
QUnit.test('`_.' + methodName + '` should set the `[[Prototype]]` of a clone even when the `constructor` is incorrect', function(assert) {
assert.expect(1);
Foo.prototype.constructor = Object;
assert.ok(func(new Foo) instanceof Foo);
Foo.prototype.constructor = Foo;
});
QUnit.test('`_.' + methodName + '` should ensure `value` constructor is a function before using its `[[Prototype]]`', function(assert) {
assert.expect(1);
Foo.prototype.constructor = null;
assert.notOk(func(new Foo) instanceof Foo);
Foo.prototype.constructor = Foo;
});
QUnit.test('`_.' + methodName + '` should clone properties that shadow those on `Object.prototype`', function(assert) {
assert.expect(2);
var object = {
'constructor': objectProto.constructor,
'hasOwnProperty': objectProto.hasOwnProperty,
'isPrototypeOf': objectProto.isPrototypeOf,
'propertyIsEnumerable': objectProto.propertyIsEnumerable,
'toLocaleString': objectProto.toLocaleString,
'toString': objectProto.toString,
'valueOf': objectProto.valueOf
};
var actual = func(object);
assert.deepEqual(actual, object);
assert.notStrictEqual(actual, object);
});
QUnit.test('`_.' + methodName + '` should clone symbol properties', function(assert) {
assert.expect(7);
function Foo() {
this[symbol] = { 'c': 1 };
}
if (Symbol) {
var symbol2 = Symbol('b');
Foo.prototype[symbol2] = 2;
var symbol3 = Symbol('c');
defineProperty(Foo.prototype, symbol3, {
'configurable': true,
'enumerable': false,
'writable': true,
'value': 3
});
var object = { 'a': { 'b': new Foo } };
object[symbol] = { 'b': 1 };
var actual = func(object);
if (isDeep) {
assert.notStrictEqual(actual[symbol], object[symbol]);
assert.notStrictEqual(actual.a, object.a);
} else {
assert.strictEqual(actual[symbol], object[symbol]);
assert.strictEqual(actual.a, object.a);
}
assert.deepEqual(actual[symbol], object[symbol]);
assert.deepEqual(getSymbols(actual.a.b), [symbol]);
assert.deepEqual(actual.a.b[symbol], object.a.b[symbol]);
assert.deepEqual(actual.a.b[symbol2], object.a.b[symbol2]);
assert.deepEqual(actual.a.b[symbol3], object.a.b[symbol3]);
}
else {
skipAssert(assert, 7);
}
});
QUnit.test('`_.' + methodName + '` should clone symbol objects', function(assert) {
assert.expect(4);
if (Symbol) {
assert.strictEqual(func(symbol), symbol);
var object = Object(symbol),
actual = func(object);
assert.strictEqual(typeof actual, 'object');
assert.strictEqual(typeof actual.valueOf(), 'symbol');
assert.notStrictEqual(actual, object);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('`_.' + methodName + '` should not clone symbol primitives', function(assert) {
assert.expect(1);
if (Symbol) {
assert.strictEqual(func(symbol), symbol);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should not error on DOM elements', function(assert) {
assert.expect(1);
if (document) {
var element = document.createElement('div');
try {
assert.deepEqual(func(element), {});
} catch (e) {
assert.ok(false, e.message);
}
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should create an object from the same realm as `value`', function(assert) {
assert.expect(1);
var props = [];
var objects = lodashStable.transform(_, function(result, value, key) {
if (lodashStable.startsWith(key, '_') && lodashStable.isObject(value) &&
!lodashStable.isArguments(value) && !lodashStable.isElement(value) &&
!lodashStable.isFunction(value)) {
props.push(lodashStable.capitalize(lodashStable.camelCase(key)));
result.push(value);
}
}, []);
var expected = lodashStable.map(objects, stubTrue);
var actual = lodashStable.map(objects, function(object) {
var Ctor = object.constructor,
result = func(object);
return result !== object && ((result instanceof Ctor) || !(new Ctor instanceof Ctor));
});
assert.deepEqual(actual, expected, props.join(', '));
});
QUnit.test('`_.' + methodName + '` should perform a ' + (isDeep ? 'deep' : 'shallow') + ' clone when used as an iteratee for methods like `_.map`', function(assert) {
assert.expect(2);
var expected = [{ 'a': [0] }, { 'b': [1] }],
actual = lodashStable.map(expected, func);
assert.deepEqual(actual, expected);
if (isDeep) {
assert.ok(actual[0] !== expected[0] && actual[0].a !== expected[0].a && actual[1].b !== expected[1].b);
} else {
assert.ok(actual[0] !== expected[0] && actual[0].a === expected[0].a && actual[1].b === expected[1].b);
}
});
QUnit.test('`_.' + methodName + '` should return a unwrapped value when chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var object = objects.objects,
actual = _(object)[methodName]();
assert.deepEqual(actual, object);
assert.notStrictEqual(actual, object);
}
else {
skipAssert(assert, 2);
}
});
lodashStable.each(arrayViews, function(type) {
QUnit.test('`_.' + methodName + '` should clone ' + type + ' values', function(assert) {
assert.expect(10);
var Ctor = root[type];
lodashStable.times(2, function(index) {
if (Ctor) {
var buffer = new ArrayBuffer(24),
view = index ? new Ctor(buffer, 8, 1) : new Ctor(buffer),
actual = func(view);
assert.deepEqual(actual, view);
assert.notStrictEqual(actual, view);
assert.strictEqual(actual.buffer === view.buffer, !isDeep);
assert.strictEqual(actual.byteOffset, view.byteOffset);
assert.strictEqual(actual.length, view.length);
}
else {
skipAssert(assert, 5);
}
});
});
});
lodashStable.forOwn(uncloneable, function(value, key) {
QUnit.test('`_.' + methodName + '` should not clone ' + key, function(assert) {
assert.expect(3);
if (value) {
var object = { 'a': value, 'b': { 'c': value } },
actual = func(object),
expected = value === Foo ? { 'c': Foo.c } : {};
assert.deepEqual(actual, object);
assert.notStrictEqual(actual, object);
assert.deepEqual(func(value), expected);
}
else {
skipAssert(assert, 3);
}
});
});
});
lodashStable.each(['cloneWith', 'cloneDeepWith'], function(methodName) {
var func = _[methodName],
isDeep = methodName == 'cloneDeepWith';
QUnit.test('`_.' + methodName + '` should provide correct `customizer` arguments', function(assert) {
assert.expect(1);
var argsList = [],
object = new Foo;
func(object, function() {
var length = arguments.length,
args = slice.call(arguments, 0, length - (length > 1 ? 1 : 0));
argsList.push(args);
});
assert.deepEqual(argsList, isDeep ? [[object], [1, 'a', object]] : [[object]]);
});
QUnit.test('`_.' + methodName + '` should handle cloning when `customizer` returns `undefined`', function(assert) {
assert.expect(1);
var actual = func({ 'a': { 'b': 'c' } }, noop);
assert.deepEqual(actual, { 'a': { 'b': 'c' } });
});
lodashStable.forOwn(uncloneable, function(value, key) {
QUnit.test('`_.' + methodName + '` should work with a `customizer` callback and ' + key, function(assert) {
assert.expect(3);
var customizer = function(value) {
return lodashStable.isPlainObject(value) ? undefined : value;
};
var actual = func(value, customizer);
assert.strictEqual(actual, value);
var object = { 'a': value, 'b': { 'c': value } };
actual = func(object, customizer);
assert.deepEqual(actual, object);
assert.notStrictEqual(actual, object);
});
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.compact');
(function() {
var largeArray = lodashStable.range(LARGE_ARRAY_SIZE).concat(null);
QUnit.test('should filter falsey values', function(assert) {
assert.expect(1);
var array = ['0', '1', '2'];
assert.deepEqual(_.compact(falsey.concat(array)), array);
});
QUnit.test('should work when in-between lazy operators', function(assert) {
assert.expect(2);
if (!isNpm) {
var actual = _(falsey).thru(_.slice).compact().thru(_.slice).value();
assert.deepEqual(actual, []);
actual = _(falsey).thru(_.slice).push(true, 1).compact().push('a').value();
assert.deepEqual(actual, [true, 1, 'a']);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = _(largeArray).slice(1).compact().reverse().take().value();
assert.deepEqual(actual, _.take(_.compact(_.slice(largeArray, 1)).reverse()));
}
else {
skipAssert(assert);
}
});
QUnit.test('should work in a lazy sequence with a custom `_.iteratee`', function(assert) {
assert.expect(1);
if (!isModularize) {
var iteratee = _.iteratee,
pass = false;
_.iteratee = identity;
try {
var actual = _(largeArray).slice(1).compact().value();
pass = lodashStable.isEqual(actual, _.compact(_.slice(largeArray, 1)));
} catch (e) {console.log(e);}
assert.ok(pass);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.concat');
(function() {
QUnit.test('should shallow clone `array`', function(assert) {
assert.expect(2);
var array = [1, 2, 3],
actual = _.concat(array);
assert.deepEqual(actual, array);
assert.notStrictEqual(actual, array);
});
QUnit.test('should concat arrays and values', function(assert) {
assert.expect(2);
var array = [1],
actual = _.concat(array, 2, [3], [[4]]);
assert.deepEqual(actual, [1, 2, 3, [4]]);
assert.deepEqual(array, [1]);
});
QUnit.test('should cast non-array `array` values to arrays', function(assert) {
assert.expect(2);
var values = [, null, undefined, false, true, 1, NaN, 'a'];
var expected = lodashStable.map(values, function(value, index) {
return index ? [value] : [];
});
var actual = lodashStable.map(values, function(value, index) {
return index ? _.concat(value) : _.concat();
});
assert.deepEqual(actual, expected);
expected = lodashStable.map(values, function(value) {
return [value, 2, [3]];
});
actual = lodashStable.map(values, function(value) {
return _.concat(value, [2], [[3]]);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should treat sparse arrays as dense', function(assert) {
assert.expect(3);
var expected = [],
actual = _.concat(Array(1), Array(1));
expected.push(undefined, undefined);
assert.ok('0'in actual);
assert.ok('1' in actual);
assert.deepEqual(actual, expected);
});
QUnit.test('should return a new wrapped array', function(assert) {
assert.expect(2);
if (!isNpm) {
var array = [1],
wrapped = _(array).concat([2, 3]),
actual = wrapped.value();
assert.deepEqual(array, [1]);
assert.deepEqual(actual, [1, 2, 3]);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.cond');
(function() {
QUnit.test('should create a conditional function', function(assert) {
assert.expect(3);
var cond = _.cond([
[lodashStable.matches({ 'a': 1 }), stubA],
[lodashStable.matchesProperty('b', 1), stubB],
[lodashStable.property('c'), stubC]
]);
assert.strictEqual(cond({ 'a': 1, 'b': 2, 'c': 3 }), 'a');
assert.strictEqual(cond({ 'a': 0, 'b': 1, 'c': 2 }), 'b');
assert.strictEqual(cond({ 'a': -1, 'b': 0, 'c': 1 }), 'c');
});
QUnit.test('should provide arguments to functions', function(assert) {
assert.expect(2);
var args1,
args2,
expected = ['a', 'b', 'c'];
var cond = _.cond([[
function() { args1 || (args1 = slice.call(arguments)); return true; },
function() { args2 || (args2 = slice.call(arguments)); }
]]);
cond('a', 'b', 'c');
assert.deepEqual(args1, expected);
assert.deepEqual(args2, expected);
});
QUnit.test('should work with predicate shorthands', function(assert) {
assert.expect(3);
var cond = _.cond([
[{ 'a': 1 }, stubA],
[['b', 1], stubB],
['c', stubC]
]);
assert.strictEqual(cond({ 'a': 1, 'b': 2, 'c': 3 }), 'a');
assert.strictEqual(cond({ 'a': 0, 'b': 1, 'c': 2 }), 'b');
assert.strictEqual(cond({ 'a': -1, 'b': 0, 'c': 1 }), 'c');
});
QUnit.test('should return `undefined` when no condition is met', function(assert) {
assert.expect(1);
var cond = _.cond([[stubFalse, stubA]]);
assert.strictEqual(cond({ 'a': 1 }), undefined);
});
QUnit.test('should throw a TypeError if `pairs` is not composed of functions', function(assert) {
assert.expect(2);
lodashStable.each([false, true], function(value) {
assert.raises(function() { _.cond([[stubTrue, value]])(); }, TypeError);
});
});
QUnit.test('should use `this` binding of function for `pairs`', function(assert) {
assert.expect(1);
var cond = _.cond([
[function(a) { return this[a]; }, function(a, b) { return this[b]; }]
]);
var object = { 'cond': cond, 'a': 1, 'b': 2 };
assert.strictEqual(object.cond('a', 'b'), 2);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.conforms');
(function() {
QUnit.test('should not change behavior if `source` is modified', function(assert) {
assert.expect(2);
var object = { 'a': 2 },
source = { 'a': function(value) { return value > 1; } },
par = _.conforms(source);
assert.strictEqual(par(object), true);
source.a = function(value) { return value < 2; };
assert.strictEqual(par(object), true);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('conforms methods');
lodashStable.each(['conforms', 'conformsTo'], function(methodName) {
var isConforms = methodName == 'conforms';
function conforms(source) {
return isConforms ? _.conforms(source) : function(object) {
return _.conformsTo(object, source);
};
}
QUnit.test('`_.' + methodName + '` should check if `object` conforms to `source`', function(assert) {
assert.expect(2);
var objects = [
{ 'a': 1, 'b': 8 },
{ 'a': 2, 'b': 4 },
{ 'a': 3, 'b': 16 }
];
var par = conforms({
'b': function(value) { return value > 4; }
});
var actual = lodashStable.filter(objects, par);
assert.deepEqual(actual, [objects[0], objects[2]]);
par = conforms({
'b': function(value) { return value > 8; },
'a': function(value) { return value > 1; }
});
actual = lodashStable.filter(objects, par);
assert.deepEqual(actual, [objects[2]]);
});
QUnit.test('`_.' + methodName + '` should not match by inherited `source` properties', function(assert) {
assert.expect(1);
function Foo() {
this.a = function(value) {
return value > 1;
};
}
Foo.prototype.b = function(value) {
return value > 8;
};
var objects = [
{ 'a': 1, 'b': 8 },
{ 'a': 2, 'b': 4 },
{ 'a': 3, 'b': 16 }
];
var par = conforms(new Foo),
actual = lodashStable.filter(objects, par);
assert.deepEqual(actual, [objects[1], objects[2]]);
});
QUnit.test('`_.' + methodName + '` should not invoke `source` predicates for missing `object` properties', function(assert) {
assert.expect(2);
var count = 0;
var par = conforms({
'a': function() { count++; return true; }
});
assert.strictEqual(par({}), false);
assert.strictEqual(count, 0);
});
QUnit.test('`_.' + methodName + '` should work with a function for `object`', function(assert) {
assert.expect(2);
function Foo() {}
Foo.a = 1;
function Bar() {}
Bar.a = 2;
var par = conforms({
'a': function(value) { return value > 1; }
});
assert.strictEqual(par(Foo), false);
assert.strictEqual(par(Bar), true);
});
QUnit.test('`_.' + methodName + '` should work with a function for `source`', function(assert) {
assert.expect(1);
function Foo() {}
Foo.a = function(value) { return value > 1; };
var objects = [{ 'a': 1 }, { 'a': 2 }],
actual = lodashStable.filter(objects, conforms(Foo));
assert.deepEqual(actual, [objects[1]]);
});
QUnit.test('`_.' + methodName + '` should work with a non-plain `object`', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var par = conforms({
'b': function(value) { return value > 1; }
});
assert.strictEqual(par(new Foo), true);
});
QUnit.test('`_.' + methodName + '` should return `false` when `object` is nullish', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, stubFalse);
var par = conforms({
'a': function(value) { return value > 1; }
});
var actual = lodashStable.map(values, function(value, index) {
try {
return index ? par(value) : par();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `true` when comparing an empty `source` to a nullish `object`', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, stubTrue),
par = conforms({});
var actual = lodashStable.map(values, function(value, index) {
try {
return index ? par(value) : par();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `true` when comparing an empty `source`', function(assert) {
assert.expect(1);
var object = { 'a': 1 },
expected = lodashStable.map(empties, stubTrue);
var actual = lodashStable.map(empties, function(value) {
var par = conforms(value);
return par(object);
});
assert.deepEqual(actual, expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.constant');
(function() {
QUnit.test('should create a function that returns `value`', function(assert) {
assert.expect(1);
var object = { 'a': 1 },
values = Array(2).concat(empties, true, 1, 'a'),
constant = _.constant(object);
var results = lodashStable.map(values, function(value, index) {
if (index < 2) {
return index ? constant.call({}) : constant();
}
return constant(value);
});
assert.ok(lodashStable.every(results, function(result) {
return result === object;
}));
});
QUnit.test('should work with falsey values', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(value, index) {
var constant = index ? _.constant(value) : _.constant(),
result = constant();
return (result === value) || (result !== result && value !== value);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _(true).constant();
assert.ok(wrapped instanceof _);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.countBy');
(function() {
var array = [6.1, 4.2, 6.3];
QUnit.test('should transform keys by `iteratee`', function(assert) {
assert.expect(1);
var actual = _.countBy(array, Math.floor);
assert.deepEqual(actual, { '4': 1, '6': 2 });
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var array = [4, 6, 6],
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant({ '4': 1, '6': 2 }));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.countBy(array, value) : _.countBy(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var actual = _.countBy(['one', 'two', 'three'], 'length');
assert.deepEqual(actual, { '3': 2, '5': 1 });
});
QUnit.test('should only add values to own, not inherited, properties', function(assert) {
assert.expect(2);
var actual = _.countBy(array, function(n) {
return Math.floor(n) > 4 ? 'hasOwnProperty' : 'constructor';
});
assert.deepEqual(actual.constructor, 1);
assert.deepEqual(actual.hasOwnProperty, 2);
});
QUnit.test('should work with a number for `iteratee`', function(assert) {
assert.expect(2);
var array = [
[1, 'a'],
[2, 'a'],
[2, 'b']
];
assert.deepEqual(_.countBy(array, 0), { '1': 1, '2': 2 });
assert.deepEqual(_.countBy(array, 1), { 'a': 2, 'b': 1 });
});
QUnit.test('should work with an object for `collection`', function(assert) {
assert.expect(1);
var actual = _.countBy({ 'a': 6.1, 'b': 4.2, 'c': 6.3 }, Math.floor);
assert.deepEqual(actual, { '4': 1, '6': 2 });
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(1);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE).concat(
lodashStable.range(Math.floor(LARGE_ARRAY_SIZE / 2), LARGE_ARRAY_SIZE),
lodashStable.range(Math.floor(LARGE_ARRAY_SIZE / 1.5), LARGE_ARRAY_SIZE)
);
var actual = _(array).countBy().map(square).filter(isEven).take().value();
assert.deepEqual(actual, _.take(_.filter(_.map(_.countBy(array), square), isEven)));
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.create');
(function() {
function Shape() {
this.x = 0;
this.y = 0;
}
function Circle() {
Shape.call(this);
}
QUnit.test('should create an object that inherits from the given `prototype` object', function(assert) {
assert.expect(3);
Circle.prototype = _.create(Shape.prototype);
Circle.prototype.constructor = Circle;
var actual = new Circle;
assert.ok(actual instanceof Circle);
assert.ok(actual instanceof Shape);
assert.notStrictEqual(Circle.prototype, Shape.prototype);
});
QUnit.test('should assign `properties` to the created object', function(assert) {
assert.expect(3);
var expected = { 'constructor': Circle, 'radius': 0 };
Circle.prototype = _.create(Shape.prototype, expected);
var actual = new Circle;
assert.ok(actual instanceof Circle);
assert.ok(actual instanceof Shape);
assert.deepEqual(Circle.prototype, expected);
});
QUnit.test('should assign own properties', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
this.c = 3;
}
Foo.prototype.b = 2;
assert.deepEqual(_.create({}, new Foo), { 'a': 1, 'c': 3 });
});
QUnit.test('should assign properties that shadow those of `prototype`', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
var object = _.create(new Foo, { 'a': 1 });
assert.deepEqual(lodashStable.keys(object), ['a']);
});
QUnit.test('should accept a falsey `prototype`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubObject);
var actual = lodashStable.map(falsey, function(prototype, index) {
return index ? _.create(prototype) : _.create();
});
assert.deepEqual(actual, expected);
});
QUnit.test('should ignore a primitive `prototype` and use an empty object instead', function(assert) {
assert.expect(1);
var expected = lodashStable.map(primitives, stubTrue);
var actual = lodashStable.map(primitives, function(value, index) {
return lodashStable.isPlainObject(index ? _.create(value) : _.create());
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [{ 'a': 1 }, { 'a': 1 }, { 'a': 1 }],
expected = lodashStable.map(array, stubTrue),
objects = lodashStable.map(array, _.create);
var actual = lodashStable.map(objects, function(object) {
return object.a === 1 && !_.keys(object).length;
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.curry');
(function() {
function fn(a, b, c, d) {
return slice.call(arguments);
}
QUnit.test('should curry based on the number of arguments given', function(assert) {
assert.expect(3);
var curried = _.curry(fn),
expected = [1, 2, 3, 4];
assert.deepEqual(curried(1)(2)(3)(4), expected);
assert.deepEqual(curried(1, 2)(3, 4), expected);
assert.deepEqual(curried(1, 2, 3, 4), expected);
});
QUnit.test('should allow specifying `arity`', function(assert) {
assert.expect(3);
var curried = _.curry(fn, 3),
expected = [1, 2, 3];
assert.deepEqual(curried(1)(2, 3), expected);
assert.deepEqual(curried(1, 2)(3), expected);
assert.deepEqual(curried(1, 2, 3), expected);
});
QUnit.test('should coerce `arity` to an integer', function(assert) {
assert.expect(2);
var values = ['0', 0.6, 'xyz'],
expected = lodashStable.map(values, stubArray);
var actual = lodashStable.map(values, function(arity) {
return _.curry(fn, arity)();
});
assert.deepEqual(actual, expected);
assert.deepEqual(_.curry(fn, '2')(1)(2), [1, 2]);
});
QUnit.test('should support placeholders', function(assert) {
assert.expect(4);
var curried = _.curry(fn),
ph = curried.placeholder;
assert.deepEqual(curried(1)(ph, 3)(ph, 4)(2), [1, 2, 3, 4]);
assert.deepEqual(curried(ph, 2)(1)(ph, 4)(3), [1, 2, 3, 4]);
assert.deepEqual(curried(ph, ph, 3)(ph, 2)(ph, 4)(1), [1, 2, 3, 4]);
assert.deepEqual(curried(ph, ph, ph, 4)(ph, ph, 3)(ph, 2)(1), [1, 2, 3, 4]);
});
QUnit.test('should persist placeholders', function(assert) {
assert.expect(1);
var curried = _.curry(fn),
ph = curried.placeholder,
actual = curried(ph, ph, ph, 'd')('a')(ph)('b')('c');
assert.deepEqual(actual, ['a', 'b', 'c', 'd']);
});
QUnit.test('should use `_.placeholder` when set', function(assert) {
assert.expect(1);
if (!isModularize) {
var curried = _.curry(fn),
_ph = _.placeholder = {},
ph = curried.placeholder;
assert.deepEqual(curried(1)(_ph, 3)(ph, 4), [1, ph, 3, 4]);
delete _.placeholder;
}
else {
skipAssert(assert);
}
});
QUnit.test('should provide additional arguments after reaching the target arity', function(assert) {
assert.expect(3);
var curried = _.curry(fn, 3);
assert.deepEqual(curried(1)(2, 3, 4), [1, 2, 3, 4]);
assert.deepEqual(curried(1, 2)(3, 4, 5), [1, 2, 3, 4, 5]);
assert.deepEqual(curried(1, 2, 3, 4, 5, 6), [1, 2, 3, 4, 5, 6]);
});
QUnit.test('should create a function with a `length` of `0`', function(assert) {
assert.expect(6);
lodashStable.times(2, function(index) {
var curried = index ? _.curry(fn, 4) : _.curry(fn);
assert.strictEqual(curried.length, 0);
assert.strictEqual(curried(1).length, 0);
assert.strictEqual(curried(1, 2).length, 0);
});
});
QUnit.test('should ensure `new curried` is an instance of `func`', function(assert) {
assert.expect(2);
function Foo(value) {
return value && object;
}
var curried = _.curry(Foo),
object = {};
assert.ok(new curried(false) instanceof Foo);
assert.strictEqual(new curried(true), object);
});
QUnit.test('should use `this` binding of function', function(assert) {
assert.expect(9);
var fn = function(a, b, c) {
var value = this || {};
return [value[a], value[b], value[c]];
};
var object = { 'a': 1, 'b': 2, 'c': 3 },
expected = [1, 2, 3];
assert.deepEqual(_.curry(_.bind(fn, object), 3)('a')('b')('c'), expected);
assert.deepEqual(_.curry(_.bind(fn, object), 3)('a', 'b')('c'), expected);
assert.deepEqual(_.curry(_.bind(fn, object), 3)('a', 'b', 'c'), expected);
assert.deepEqual(_.bind(_.curry(fn), object)('a')('b')('c'), Array(3));
assert.deepEqual(_.bind(_.curry(fn), object)('a', 'b')('c'), Array(3));
assert.deepEqual(_.bind(_.curry(fn), object)('a', 'b', 'c'), expected);
object.curried = _.curry(fn);
assert.deepEqual(object.curried('a')('b')('c'), Array(3));
assert.deepEqual(object.curried('a', 'b')('c'), Array(3));
assert.deepEqual(object.curried('a', 'b', 'c'), expected);
});
QUnit.test('should work with partialed methods', function(assert) {
assert.expect(2);
var curried = _.curry(fn),
expected = [1, 2, 3, 4];
var a = _.partial(curried, 1),
b = _.bind(a, null, 2),
c = _.partialRight(b, 4),
d = _.partialRight(b(3), 4);
assert.deepEqual(c(3), expected);
assert.deepEqual(d(), expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.curryRight');
(function() {
function fn(a, b, c, d) {
return slice.call(arguments);
}
QUnit.test('should curry based on the number of arguments given', function(assert) {
assert.expect(3);
var curried = _.curryRight(fn),
expected = [1, 2, 3, 4];
assert.deepEqual(curried(4)(3)(2)(1), expected);
assert.deepEqual(curried(3, 4)(1, 2), expected);
assert.deepEqual(curried(1, 2, 3, 4), expected);
});
QUnit.test('should allow specifying `arity`', function(assert) {
assert.expect(3);
var curried = _.curryRight(fn, 3),
expected = [1, 2, 3];
assert.deepEqual(curried(3)(1, 2), expected);
assert.deepEqual(curried(2, 3)(1), expected);
assert.deepEqual(curried(1, 2, 3), expected);
});
QUnit.test('should coerce `arity` to an integer', function(assert) {
assert.expect(2);
var values = ['0', 0.6, 'xyz'],
expected = lodashStable.map(values, stubArray);
var actual = lodashStable.map(values, function(arity) {
return _.curryRight(fn, arity)();
});
assert.deepEqual(actual, expected);
assert.deepEqual(_.curryRight(fn, '2')(1)(2), [2, 1]);
});
QUnit.test('should support placeholders', function(assert) {
assert.expect(4);
var curried = _.curryRight(fn),
expected = [1, 2, 3, 4],
ph = curried.placeholder;
assert.deepEqual(curried(4)(2, ph)(1, ph)(3), expected);
assert.deepEqual(curried(3, ph)(4)(1, ph)(2), expected);
assert.deepEqual(curried(ph, ph, 4)(ph, 3)(ph, 2)(1), expected);
assert.deepEqual(curried(ph, ph, ph, 4)(ph, ph, 3)(ph, 2)(1), expected);
});
QUnit.test('should persist placeholders', function(assert) {
assert.expect(1);
var curried = _.curryRight(fn),
ph = curried.placeholder,
actual = curried('a', ph, ph, ph)('b')(ph)('c')('d');
assert.deepEqual(actual, ['a', 'b', 'c', 'd']);
});
QUnit.test('should use `_.placeholder` when set', function(assert) {
assert.expect(1);
if (!isModularize) {
var curried = _.curryRight(fn),
_ph = _.placeholder = {},
ph = curried.placeholder;
assert.deepEqual(curried(4)(2, _ph)(1, ph), [1, 2, ph, 4]);
delete _.placeholder;
}
else {
skipAssert(assert);
}
});
QUnit.test('should provide additional arguments after reaching the target arity', function(assert) {
assert.expect(3);
var curried = _.curryRight(fn, 3);
assert.deepEqual(curried(4)(1, 2, 3), [1, 2, 3, 4]);
assert.deepEqual(curried(4, 5)(1, 2, 3), [1, 2, 3, 4, 5]);
assert.deepEqual(curried(1, 2, 3, 4, 5, 6), [1, 2, 3, 4, 5, 6]);
});
QUnit.test('should create a function with a `length` of `0`', function(assert) {
assert.expect(6);
lodashStable.times(2, function(index) {
var curried = index ? _.curryRight(fn, 4) : _.curryRight(fn);
assert.strictEqual(curried.length, 0);
assert.strictEqual(curried(4).length, 0);
assert.strictEqual(curried(3, 4).length, 0);
});
});
QUnit.test('should ensure `new curried` is an instance of `func`', function(assert) {
assert.expect(2);
function Foo(value) {
return value && object;
}
var curried = _.curryRight(Foo),
object = {};
assert.ok(new curried(false) instanceof Foo);
assert.strictEqual(new curried(true), object);
});
QUnit.test('should use `this` binding of function', function(assert) {
assert.expect(9);
var fn = function(a, b, c) {
var value = this || {};
return [value[a], value[b], value[c]];
};
var object = { 'a': 1, 'b': 2, 'c': 3 },
expected = [1, 2, 3];
assert.deepEqual(_.curryRight(_.bind(fn, object), 3)('c')('b')('a'), expected);
assert.deepEqual(_.curryRight(_.bind(fn, object), 3)('b', 'c')('a'), expected);
assert.deepEqual(_.curryRight(_.bind(fn, object), 3)('a', 'b', 'c'), expected);
assert.deepEqual(_.bind(_.curryRight(fn), object)('c')('b')('a'), Array(3));
assert.deepEqual(_.bind(_.curryRight(fn), object)('b', 'c')('a'), Array(3));
assert.deepEqual(_.bind(_.curryRight(fn), object)('a', 'b', 'c'), expected);
object.curried = _.curryRight(fn);
assert.deepEqual(object.curried('c')('b')('a'), Array(3));
assert.deepEqual(object.curried('b', 'c')('a'), Array(3));
assert.deepEqual(object.curried('a', 'b', 'c'), expected);
});
QUnit.test('should work with partialed methods', function(assert) {
assert.expect(2);
var curried = _.curryRight(fn),
expected = [1, 2, 3, 4];
var a = _.partialRight(curried, 4),
b = _.partialRight(a, 3),
c = _.bind(b, null, 1),
d = _.partial(b(2), 1);
assert.deepEqual(c(2), expected);
assert.deepEqual(d(), expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('curry methods');
lodashStable.each(['curry', 'curryRight'], function(methodName) {
var func = _[methodName],
fn = function(a, b) { return slice.call(arguments); },
isCurry = methodName == 'curry';
QUnit.test('`_.' + methodName + '` should not error on functions with the same name as lodash methods', function(assert) {
assert.expect(1);
function run(a, b) {
return a + b;
}
var curried = func(run);
try {
var actual = curried(1)(2);
} catch (e) {}
assert.strictEqual(actual, 3);
});
QUnit.test('`_.' + methodName + '` should work for function names that shadow those on `Object.prototype`', function(assert) {
assert.expect(1);
var curried = _.curry(function hasOwnProperty(a, b, c) {
return [a, b, c];
});
var expected = [1, 2, 3];
assert.deepEqual(curried(1)(2)(3), expected);
});
QUnit.test('`_.' + methodName + '` should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(2);
var array = [fn, fn, fn],
object = { 'a': fn, 'b': fn, 'c': fn };
lodashStable.each([array, object], function(collection) {
var curries = lodashStable.map(collection, func),
expected = lodashStable.map(collection, lodashStable.constant(isCurry ? ['a', 'b'] : ['b', 'a']));
var actual = lodashStable.map(curries, function(curried) {
return curried('a')('b');
});
assert.deepEqual(actual, expected);
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.debounce');
(function() {
QUnit.test('should debounce a function', function(assert) {
assert.expect(6);
var done = assert.async();
var callCount = 0;
var debounced = _.debounce(function(value) {
++callCount;
return value;
}, 32);
var results = [debounced('a'), debounced('b'), debounced('c')];
assert.deepEqual(results, [undefined, undefined, undefined]);
assert.strictEqual(callCount, 0);
setTimeout(function() {
assert.strictEqual(callCount, 1);
var results = [debounced('d'), debounced('e'), debounced('f')];
assert.deepEqual(results, ['c', 'c', 'c']);
assert.strictEqual(callCount, 1);
}, 128);
setTimeout(function() {
assert.strictEqual(callCount, 2);
done();
}, 256);
});
QUnit.test('subsequent debounced calls return the last `func` result', function(assert) {
assert.expect(2);
var done = assert.async();
var debounced = _.debounce(identity, 32);
debounced('a');
setTimeout(function() {
assert.notEqual(debounced('b'), 'b');
}, 64);
setTimeout(function() {
assert.notEqual(debounced('c'), 'c');
done();
}, 128);
});
QUnit.test('should not immediately call `func` when `wait` is `0`', function(assert) {
assert.expect(2);
var done = assert.async();
var callCount = 0,
debounced = _.debounce(function() { ++callCount; }, 0);
debounced();
debounced();
assert.strictEqual(callCount, 0);
setTimeout(function() {
assert.strictEqual(callCount, 1);
done();
}, 5);
});
QUnit.test('should apply default options', function(assert) {
assert.expect(2);
var done = assert.async();
var callCount = 0,
debounced = _.debounce(function() { callCount++; }, 32, {});
debounced();
assert.strictEqual(callCount, 0);
setTimeout(function() {
assert.strictEqual(callCount, 1);
done();
}, 64);
});
QUnit.test('should support a `leading` option', function(assert) {
assert.expect(4);
var done = assert.async();
var callCounts = [0, 0];
var withLeading = _.debounce(function() {
callCounts[0]++;
}, 32, { 'leading': true });
var withLeadingAndTrailing = _.debounce(function() {
callCounts[1]++;
}, 32, { 'leading': true });
withLeading();
assert.strictEqual(callCounts[0], 1);
withLeadingAndTrailing();
withLeadingAndTrailing();
assert.strictEqual(callCounts[1], 1);
setTimeout(function() {
assert.deepEqual(callCounts, [1, 2]);
withLeading();
assert.strictEqual(callCounts[0], 2);
done();
}, 64);
});
QUnit.test('subsequent leading debounced calls return the last `func` result', function(assert) {
assert.expect(2);
var done = assert.async();
var debounced = _.debounce(identity, 32, { 'leading': true, 'trailing': false }),
results = [debounced('a'), debounced('b')];
assert.deepEqual(results, ['a', 'a']);
setTimeout(function() {
var results = [debounced('c'), debounced('d')];
assert.deepEqual(results, ['c', 'c']);
done();
}, 64);
});
QUnit.test('should support a `trailing` option', function(assert) {
assert.expect(4);
var done = assert.async();
var withCount = 0,
withoutCount = 0;
var withTrailing = _.debounce(function() {
withCount++;
}, 32, { 'trailing': true });
var withoutTrailing = _.debounce(function() {
withoutCount++;
}, 32, { 'trailing': false });
withTrailing();
assert.strictEqual(withCount, 0);
withoutTrailing();
assert.strictEqual(withoutCount, 0);
setTimeout(function() {
assert.strictEqual(withCount, 1);
assert.strictEqual(withoutCount, 0);
done();
}, 64);
});
QUnit.test('should support a `maxWait` option', function(assert) {
assert.expect(4);
var done = assert.async();
var callCount = 0;
var debounced = _.debounce(function(value) {
++callCount;
return value;
}, 32, { 'maxWait': 64 });
debounced();
debounced();
assert.strictEqual(callCount, 0);
setTimeout(function() {
assert.strictEqual(callCount, 1);
debounced();
debounced();
assert.strictEqual(callCount, 1);
}, 128);
setTimeout(function() {
assert.strictEqual(callCount, 2);
done();
}, 256);
});
QUnit.test('should support `maxWait` in a tight loop', function(assert) {
assert.expect(1);
var done = assert.async();
var limit = (argv || isPhantom) ? 1000 : 320,
withCount = 0,
withoutCount = 0;
var withMaxWait = _.debounce(function() {
withCount++;
}, 64, { 'maxWait': 128 });
var withoutMaxWait = _.debounce(function() {
withoutCount++;
}, 96);
var start = +new Date;
while ((new Date - start) < limit) {
withMaxWait();
withoutMaxWait();
}
var actual = [Boolean(withoutCount), Boolean(withCount)];
setTimeout(function() {
assert.deepEqual(actual, [false, true]);
done();
}, 1);
});
QUnit.test('should queue a trailing call for subsequent debounced calls after `maxWait`', function(assert) {
assert.expect(1);
var done = assert.async();
var callCount = 0;
var debounced = _.debounce(function() {
++callCount;
}, 200, { 'maxWait': 200 });
debounced();
setTimeout(debounced, 190);
setTimeout(debounced, 200);
setTimeout(debounced, 210);
setTimeout(function() {
assert.strictEqual(callCount, 2);
done();
}, 500);
});
QUnit.test('should cancel `maxDelayed` when `delayed` is invoked', function(assert) {
assert.expect(2);
var done = assert.async();
var callCount = 0;
var debounced = _.debounce(function() {
callCount++;
}, 32, { 'maxWait': 64 });
debounced();
setTimeout(function() {
debounced();
assert.strictEqual(callCount, 1);
}, 128);
setTimeout(function() {
assert.strictEqual(callCount, 2);
done();
}, 192);
});
QUnit.test('should invoke the trailing call with the correct arguments and `this` binding', function(assert) {
assert.expect(2);
var done = assert.async();
var actual,
callCount = 0,
object = {};
var debounced = _.debounce(function(value) {
actual = [this];
push.apply(actual, arguments);
return ++callCount != 2;
}, 32, { 'leading': true, 'maxWait': 64 });
while (true) {
if (!debounced.call(object, 'a')) {
break;
}
}
setTimeout(function() {
assert.strictEqual(callCount, 2);
assert.deepEqual(actual, [object, 'a']);
done();
}, 64);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.deburr');
(function() {
QUnit.test('should convert Latin Unicode letters to basic Latin', function(assert) {
assert.expect(1);
var actual = lodashStable.map(burredLetters, _.deburr);
assert.deepEqual(actual, deburredLetters);
});
QUnit.test('should not deburr Latin mathematical operators', function(assert) {
assert.expect(1);
var operators = ['\xd7', '\xf7'],
actual = lodashStable.map(operators, _.deburr);
assert.deepEqual(actual, operators);
});
QUnit.test('should deburr combining diacritical marks', function(assert) {
assert.expect(1);
var expected = lodashStable.map(comboMarks, lodashStable.constant('ei'));
var actual = lodashStable.map(comboMarks, function(chr) {
return _.deburr('e' + chr + 'i');
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.defaults');
(function() {
QUnit.test('should assign source properties if missing on `object`', function(assert) {
assert.expect(1);
var actual = _.defaults({ 'a': 1 }, { 'a': 2, 'b': 2 });
assert.deepEqual(actual, { 'a': 1, 'b': 2 });
});
QUnit.test('should accept multiple sources', function(assert) {
assert.expect(2);
var expected = { 'a': 1, 'b': 2, 'c': 3 },
actual = _.defaults({ 'a': 1, 'b': 2 }, { 'b': 3 }, { 'c': 3 });
assert.deepEqual(actual, expected);
actual = _.defaults({ 'a': 1, 'b': 2 }, { 'b': 3, 'c': 3 }, { 'c': 2 });
assert.deepEqual(actual, expected);
});
QUnit.test('should not overwrite `null` values', function(assert) {
assert.expect(1);
var actual = _.defaults({ 'a': null }, { 'a': 1 });
assert.strictEqual(actual.a, null);
});
QUnit.test('should overwrite `undefined` values', function(assert) {
assert.expect(1);
var actual = _.defaults({ 'a': undefined }, { 'a': 1 });
assert.strictEqual(actual.a, 1);
});
QUnit.test('should assign `undefined` values', function(assert) {
assert.expect(1);
var source = { 'a': undefined, 'b': 1 },
actual = _.defaults({}, source);
assert.deepEqual(actual, { 'a': undefined, 'b': 1 });
});
QUnit.test('should assign properties that shadow those on `Object.prototype`', function(assert) {
assert.expect(2);
var object = {
'constructor': objectProto.constructor,
'hasOwnProperty': objectProto.hasOwnProperty,
'isPrototypeOf': objectProto.isPrototypeOf,
'propertyIsEnumerable': objectProto.propertyIsEnumerable,
'toLocaleString': objectProto.toLocaleString,
'toString': objectProto.toString,
'valueOf': objectProto.valueOf
};
var source = {
'constructor': 1,
'hasOwnProperty': 2,
'isPrototypeOf': 3,
'propertyIsEnumerable': 4,
'toLocaleString': 5,
'toString': 6,
'valueOf': 7
};
var expected = lodashStable.clone(source);
assert.deepEqual(_.defaults({}, source), expected);
expected = lodashStable.clone(object);
assert.deepEqual(_.defaults({}, object, source), expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.defaultsDeep');
(function() {
QUnit.test('should deep assign source properties if missing on `object`', function(assert) {
assert.expect(1);
var object = { 'a': { 'b': 2 }, 'd': 4 },
source = { 'a': { 'b': 3, 'c': 3 }, 'e': 5 },
expected = { 'a': { 'b': 2, 'c': 3 }, 'd': 4, 'e': 5 };
assert.deepEqual(_.defaultsDeep(object, source), expected);
});
QUnit.test('should accept multiple sources', function(assert) {
assert.expect(2);
var source1 = { 'a': { 'b': 3 } },
source2 = { 'a': { 'c': 3 } },
source3 = { 'a': { 'b': 3, 'c': 3 } },
source4 = { 'a': { 'c': 4 } },
expected = { 'a': { 'b': 2, 'c': 3 } };
assert.deepEqual(_.defaultsDeep({ 'a': { 'b': 2 } }, source1, source2), expected);
assert.deepEqual(_.defaultsDeep({ 'a': { 'b': 2 } }, source3, source4), expected);
});
QUnit.test('should not overwrite `null` values', function(assert) {
assert.expect(1);
var object = { 'a': { 'b': null } },
source = { 'a': { 'b': 2 } },
actual = _.defaultsDeep(object, source);
assert.strictEqual(actual.a.b, null);
});
QUnit.test('should not overwrite regexp values', function(assert) {
assert.expect(1);
var object = { 'a': { 'b': /x/ } },
source = { 'a': { 'b': /y/ } },
actual = _.defaultsDeep(object, source);
assert.deepEqual(actual.a.b, /x/);
});
QUnit.test('should not convert function properties to objects', function(assert) {
assert.expect(2);
var actual = _.defaultsDeep({}, { 'a': noop });
assert.strictEqual(actual.a, noop);
actual = _.defaultsDeep({}, { 'a': { 'b': noop } });
assert.strictEqual(actual.a.b, noop);
});
QUnit.test('should overwrite `undefined` values', function(assert) {
assert.expect(1);
var object = { 'a': { 'b': undefined } },
source = { 'a': { 'b': 2 } },
actual = _.defaultsDeep(object, source);
assert.strictEqual(actual.a.b, 2);
});
QUnit.test('should assign `undefined` values', function(assert) {
assert.expect(1);
var source = { 'a': undefined, 'b': { 'c': undefined, 'd': 1 } },
expected = lodashStable.cloneDeep(source),
actual = _.defaultsDeep({}, source);
assert.deepEqual(actual, expected);
});
QUnit.test('should merge sources containing circular references', function(assert) {
assert.expect(2);
var object = {
'foo': { 'b': { 'c': { 'd': {} } } },
'bar': { 'a': 2 }
};
var source = {
'foo': { 'b': { 'c': { 'd': {} } } },
'bar': {}
};
object.foo.b.c.d = object;
source.foo.b.c.d = source;
source.bar.b = source.foo.b;
var actual = _.defaultsDeep(object, source);
assert.strictEqual(actual.bar.b, actual.foo.b);
assert.strictEqual(actual.foo.b.c.d, actual.foo.b.c.d.foo.b.c.d);
});
QUnit.test('should not modify sources', function(assert) {
assert.expect(3);
var source1 = { 'a': 1, 'b': { 'c': 2 } },
source2 = { 'b': { 'c': 3, 'd': 3 } },
actual = _.defaultsDeep({}, source1, source2);
assert.deepEqual(actual, { 'a': 1, 'b': { 'c': 2, 'd': 3 } });
assert.deepEqual(source1, { 'a': 1, 'b': { 'c': 2 } });
assert.deepEqual(source2, { 'b': { 'c': 3, 'd': 3 } });
});
QUnit.test('should not attempt a merge of a string into an array', function(assert) {
assert.expect(1);
var actual = _.defaultsDeep({ 'a': ['abc'] }, { 'a': 'abc' });
assert.deepEqual(actual.a, ['abc']);
});
QUnit.test('should not indirectly merge `Object` properties', function(assert) {
assert.expect(1);
_.defaultsDeep({}, { 'constructor': { 'a': 1 } });
var actual = 'a' in Object;
delete Object.a;
assert.notOk(actual);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.defaultTo');
(function() {
QUnit.test('should return a default value if `value` is `NaN` or nullish', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return (value == null || value !== value) ? 1 : value;
});
var actual = lodashStable.map(falsey, function(value) {
return _.defaultTo(value, 1);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.defer');
(function() {
QUnit.test('should defer `func` execution', function(assert) {
assert.expect(1);
var done = assert.async();
var pass = false;
_.defer(function() { pass = true; });
setTimeout(function() {
assert.ok(pass);
done();
}, 32);
});
QUnit.test('should provide additional arguments to `func`', function(assert) {
assert.expect(1);
var done = assert.async();
var args;
_.defer(function() {
args = slice.call(arguments);
}, 1, 2);
setTimeout(function() {
assert.deepEqual(args, [1, 2]);
done();
}, 32);
});
QUnit.test('should be cancelable', function(assert) {
assert.expect(1);
var done = assert.async();
var pass = true,
timerId = _.defer(function() { pass = false; });
clearTimeout(timerId);
setTimeout(function() {
assert.ok(pass);
done();
}, 32);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.delay');
(function() {
QUnit.test('should delay `func` execution', function(assert) {
assert.expect(2);
var done = assert.async();
var pass = false;
_.delay(function() { pass = true; }, 32);
setTimeout(function() {
assert.notOk(pass);
}, 1);
setTimeout(function() {
assert.ok(pass);
done();
}, 64);
});
QUnit.test('should provide additional arguments to `func`', function(assert) {
assert.expect(1);
var done = assert.async();
var args;
_.delay(function() {
args = slice.call(arguments);
}, 32, 1, 2);
setTimeout(function() {
assert.deepEqual(args, [1, 2]);
done();
}, 64);
});
QUnit.test('should use a default `wait` of `0`', function(assert) {
assert.expect(2);
var done = assert.async();
var pass = false;
_.delay(function() { pass = true; });
assert.notOk(pass);
setTimeout(function() {
assert.ok(pass);
done();
}, 0);
});
QUnit.test('should be cancelable', function(assert) {
assert.expect(1);
var done = assert.async();
var pass = true,
timerId = _.delay(function() { pass = false; }, 32);
clearTimeout(timerId);
setTimeout(function() {
assert.ok(pass);
done();
}, 64);
});
QUnit.test('should work with mocked `setTimeout`', function(assert) {
assert.expect(1);
if (!isPhantom) {
var pass = false,
setTimeout = root.setTimeout;
setProperty(root, 'setTimeout', function(func) { func(); });
_.delay(function() { pass = true; }, 32);
setProperty(root, 'setTimeout', setTimeout);
assert.ok(pass);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('difference methods');
lodashStable.each(['difference', 'differenceBy', 'differenceWith'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should return the difference of two arrays', function(assert) {
assert.expect(1);
var actual = func([2, 1], [2, 3]);
assert.deepEqual(actual, [1]);
});
QUnit.test('`_.' + methodName + '` should return the difference of multiple arrays', function(assert) {
assert.expect(1);
var actual = func([2, 1, 2, 3], [3, 4], [3, 2]);
assert.deepEqual(actual, [1]);
});
QUnit.test('`_.' + methodName + '` should treat `-0` as `0`', function(assert) {
assert.expect(2);
var array = [-0, 0];
var actual = lodashStable.map(array, function(value) {
return func(array, [value]);
});
assert.deepEqual(actual, [[], []]);
actual = lodashStable.map(func([-0, 1], [1]), lodashStable.toString);
assert.deepEqual(actual, ['0']);
});
QUnit.test('`_.' + methodName + '` should match `NaN`', function(assert) {
assert.expect(1);
assert.deepEqual(func([1, NaN, 3], [NaN, 5, NaN]), [1, 3]);
});
QUnit.test('`_.' + methodName + '` should work with large arrays', function(assert) {
assert.expect(1);
var array1 = lodashStable.range(LARGE_ARRAY_SIZE + 1),
array2 = lodashStable.range(LARGE_ARRAY_SIZE),
a = {},
b = {},
c = {};
array1.push(a, b, c);
array2.push(b, c, a);
assert.deepEqual(func(array1, array2), [LARGE_ARRAY_SIZE]);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of `-0` as `0`', function(assert) {
assert.expect(2);
var array = [-0, 0];
var actual = lodashStable.map(array, function(value) {
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, lodashStable.constant(value));
return func(array, largeArray);
});
assert.deepEqual(actual, [[], []]);
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, stubOne);
actual = lodashStable.map(func([-0, 1], largeArray), lodashStable.toString);
assert.deepEqual(actual, ['0']);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of `NaN`', function(assert) {
assert.expect(1);
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, stubNaN);
assert.deepEqual(func([1, NaN, 3], largeArray), [1, 3]);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of objects', function(assert) {
assert.expect(1);
var object1 = {},
object2 = {},
largeArray = lodashStable.times(LARGE_ARRAY_SIZE, lodashStable.constant(object1));
assert.deepEqual(func([object1, object2], largeArray), [object2]);
});
QUnit.test('`_.' + methodName + '` should ignore values that are not array-like', function(assert) {
assert.expect(3);
var array = [1, null, 3];
assert.deepEqual(func(args, 3, { '0': 1 }), [1, 2, 3]);
assert.deepEqual(func(null, array, 1), []);
assert.deepEqual(func(array, args, null), [null]);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.differenceBy');
(function() {
QUnit.test('should accept an `iteratee`', function(assert) {
assert.expect(2);
var actual = _.differenceBy([2.1, 1.2], [2.3, 3.4], Math.floor);
assert.deepEqual(actual, [1.2]);
actual = _.differenceBy([{ 'x': 2 }, { 'x': 1 }], [{ 'x': 1 }], 'x');
assert.deepEqual(actual, [{ 'x': 2 }]);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.differenceBy([2.1, 1.2], [2.3, 3.4], function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [2.3]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.differenceWith');
(function() {
QUnit.test('should work with a `comparator`', function(assert) {
assert.expect(1);
var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }],
actual = _.differenceWith(objects, [{ 'x': 1, 'y': 2 }], lodashStable.isEqual);
assert.deepEqual(actual, [objects[1]]);
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var array = [-0, 1],
largeArray = lodashStable.times(LARGE_ARRAY_SIZE, stubOne),
others = [[1], largeArray],
expected = lodashStable.map(others, lodashStable.constant(['-0']));
var actual = lodashStable.map(others, function(other) {
return lodashStable.map(_.differenceWith(array, other, lodashStable.eq), lodashStable.toString);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.divide');
(function() {
QUnit.test('should divide two numbers', function(assert) {
assert.expect(3);
assert.strictEqual(_.divide(6, 4), 1.5);
assert.strictEqual(_.divide(-6, 4), -1.5);
assert.strictEqual(_.divide(-6, -4), 1.5);
});
QUnit.test('should coerce arguments to numbers', function(assert) {
assert.expect(2);
assert.strictEqual(_.divide('6', '4'), 1.5);
assert.deepEqual(_.divide('x', 'y'), NaN);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.drop');
(function() {
var array = [1, 2, 3];
QUnit.test('should drop the first two elements', function(assert) {
assert.expect(1);
assert.deepEqual(_.drop(array, 2), [3]);
});
QUnit.test('should treat falsey `n` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? [2, 3] : array;
});
var actual = lodashStable.map(falsey, function(n) {
return _.drop(array, n);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return all elements when `n` < `1`', function(assert) {
assert.expect(3);
lodashStable.each([0, -1, -Infinity], function(n) {
assert.deepEqual(_.drop(array, n), array);
});
});
QUnit.test('should return an empty array when `n` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(n) {
assert.deepEqual(_.drop(array, n), []);
});
});
QUnit.test('should coerce `n` to an integer', function(assert) {
assert.expect(1);
assert.deepEqual(_.drop(array, 1.6), [2, 3]);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
actual = lodashStable.map(array, _.drop);
assert.deepEqual(actual, [[2, 3], [5, 6], [8, 9]]);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(6);
if (!isNpm) {
var array = lodashStable.range(1, LARGE_ARRAY_SIZE + 1),
predicate = function(value) { values.push(value); return isEven(value); },
values = [],
actual = _(array).drop(2).drop().value();
assert.deepEqual(actual, array.slice(3));
actual = _(array).filter(predicate).drop(2).drop().value();
assert.deepEqual(values, array);
assert.deepEqual(actual, _.drop(_.drop(_.filter(array, predicate), 2)));
actual = _(array).drop(2).dropRight().drop().dropRight(2).value();
assert.deepEqual(actual, _.dropRight(_.drop(_.dropRight(_.drop(array, 2))), 2));
values = [];
actual = _(array).drop().filter(predicate).drop(2).dropRight().drop().dropRight(2).value();
assert.deepEqual(values, array.slice(1));
assert.deepEqual(actual, _.dropRight(_.drop(_.dropRight(_.drop(_.filter(_.drop(array), predicate), 2))), 2));
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.dropRight');
(function() {
var array = [1, 2, 3];
QUnit.test('should drop the last two elements', function(assert) {
assert.expect(1);
assert.deepEqual(_.dropRight(array, 2), [1]);
});
QUnit.test('should treat falsey `n` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? [1, 2] : array;
});
var actual = lodashStable.map(falsey, function(n) {
return _.dropRight(array, n);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return all elements when `n` < `1`', function(assert) {
assert.expect(3);
lodashStable.each([0, -1, -Infinity], function(n) {
assert.deepEqual(_.dropRight(array, n), array);
});
});
QUnit.test('should return an empty array when `n` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(n) {
assert.deepEqual(_.dropRight(array, n), []);
});
});
QUnit.test('should coerce `n` to an integer', function(assert) {
assert.expect(1);
assert.deepEqual(_.dropRight(array, 1.6), [1, 2]);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
actual = lodashStable.map(array, _.dropRight);
assert.deepEqual(actual, [[1, 2], [4, 5], [7, 8]]);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(6);
if (!isNpm) {
var array = lodashStable.range(1, LARGE_ARRAY_SIZE + 1),
predicate = function(value) { values.push(value); return isEven(value); },
values = [],
actual = _(array).dropRight(2).dropRight().value();
assert.deepEqual(actual, array.slice(0, -3));
actual = _(array).filter(predicate).dropRight(2).dropRight().value();
assert.deepEqual(values, array);
assert.deepEqual(actual, _.dropRight(_.dropRight(_.filter(array, predicate), 2)));
actual = _(array).dropRight(2).drop().dropRight().drop(2).value();
assert.deepEqual(actual, _.drop(_.dropRight(_.drop(_.dropRight(array, 2))), 2));
values = [];
actual = _(array).dropRight().filter(predicate).dropRight(2).drop().dropRight().drop(2).value();
assert.deepEqual(values, array.slice(0, -1));
assert.deepEqual(actual, _.drop(_.dropRight(_.drop(_.dropRight(_.filter(_.dropRight(array), predicate), 2))), 2));
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.dropRightWhile');
(function() {
var array = [1, 2, 3, 4];
var objects = [
{ 'a': 0, 'b': 0 },
{ 'a': 1, 'b': 1 },
{ 'a': 2, 'b': 2 }
];
QUnit.test('should drop elements while `predicate` returns truthy', function(assert) {
assert.expect(1);
var actual = _.dropRightWhile(array, function(n) {
return n > 2;
});
assert.deepEqual(actual, [1, 2]);
});
QUnit.test('should provide correct `predicate` arguments', function(assert) {
assert.expect(1);
var args;
_.dropRightWhile(array, function() {
args = slice.call(arguments);
});
assert.deepEqual(args, [4, 3, array]);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.dropRightWhile(objects, { 'b': 2 }), objects.slice(0, 2));
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.dropRightWhile(objects, ['b', 2]), objects.slice(0, 2));
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.dropRightWhile(objects, 'b'), objects.slice(0, 1));
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _(array).dropRightWhile(function(n) {
return n > 2;
});
assert.ok(wrapped instanceof _);
assert.deepEqual(wrapped.value(), [1, 2]);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.dropWhile');
(function() {
var array = [1, 2, 3, 4];
var objects = [
{ 'a': 2, 'b': 2 },
{ 'a': 1, 'b': 1 },
{ 'a': 0, 'b': 0 }
];
QUnit.test('should drop elements while `predicate` returns truthy', function(assert) {
assert.expect(1);
var actual = _.dropWhile(array, function(n) {
return n < 3;
});
assert.deepEqual(actual, [3, 4]);
});
QUnit.test('should provide correct `predicate` arguments', function(assert) {
assert.expect(1);
var args;
_.dropWhile(array, function() {
args = slice.call(arguments);
});
assert.deepEqual(args, [1, 0, array]);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.dropWhile(objects, { 'b': 2 }), objects.slice(1));
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.dropWhile(objects, ['b', 2]), objects.slice(1));
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.dropWhile(objects, 'b'), objects.slice(2));
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(3);
if (!isNpm) {
var array = lodashStable.range(1, LARGE_ARRAY_SIZE + 3),
predicate = function(n) { return n < 3; },
expected = _.dropWhile(array, predicate),
wrapped = _(array).dropWhile(predicate);
assert.deepEqual(wrapped.value(), expected);
assert.deepEqual(wrapped.reverse().value(), expected.slice().reverse());
assert.strictEqual(wrapped.last(), _.last(expected));
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should work in a lazy sequence with `drop`', function(assert) {
assert.expect(1);
if (!isNpm) {
var array = lodashStable.range(1, LARGE_ARRAY_SIZE + 3);
var actual = _(array)
.dropWhile(function(n) { return n == 1; })
.drop()
.dropWhile(function(n) { return n == 3; })
.value();
assert.deepEqual(actual, array.slice(3));
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.endsWith');
(function() {
var string = 'abc';
QUnit.test('should return `true` if a string ends with `target`', function(assert) {
assert.expect(1);
assert.strictEqual(_.endsWith(string, 'c'), true);
});
QUnit.test('should return `false` if a string does not end with `target`', function(assert) {
assert.expect(1);
assert.strictEqual(_.endsWith(string, 'b'), false);
});
QUnit.test('should work with a `position`', function(assert) {
assert.expect(1);
assert.strictEqual(_.endsWith(string, 'b', 2), true);
});
QUnit.test('should work with `position` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 5, MAX_SAFE_INTEGER, Infinity], function(position) {
assert.strictEqual(_.endsWith(string, 'c', position), true);
});
});
QUnit.test('should treat falsey `position` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(position) {
return _.endsWith(string, position === undefined ? 'c' : '', position);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should treat a negative `position` as `0`', function(assert) {
assert.expect(6);
lodashStable.each([-1, -3, -Infinity], function(position) {
assert.ok(lodashStable.every(string, function(chr) {
return !_.endsWith(string, chr, position);
}));
assert.strictEqual(_.endsWith(string, '', position), true);
});
});
QUnit.test('should coerce `position` to an integer', function(assert) {
assert.expect(1);
assert.strictEqual(_.endsWith(string, 'ab', 2.2), true);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.eq');
(function() {
QUnit.test('should perform a `SameValueZero` comparison of two values', function(assert) {
assert.expect(11);
assert.strictEqual(_.eq(), true);
assert.strictEqual(_.eq(undefined), true);
assert.strictEqual(_.eq(0, -0), true);
assert.strictEqual(_.eq(NaN, NaN), true);
assert.strictEqual(_.eq(1, 1), true);
assert.strictEqual(_.eq(null, undefined), false);
assert.strictEqual(_.eq(1, Object(1)), false);
assert.strictEqual(_.eq(1, '1'), false);
assert.strictEqual(_.eq(1, '1'), false);
var object = { 'a': 1 };
assert.strictEqual(_.eq(object, object), true);
assert.strictEqual(_.eq(object, { 'a': 1 }), false);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.escape');
(function() {
var escaped = '&<>"'/',
unescaped = '&<>"\'/';
escaped += escaped;
unescaped += unescaped;
QUnit.test('should escape values', function(assert) {
assert.expect(1);
assert.strictEqual(_.escape(unescaped), escaped);
});
QUnit.test('should handle strings with nothing to escape', function(assert) {
assert.expect(1);
assert.strictEqual(_.escape('abc'), 'abc');
});
QUnit.test('should escape the same characters unescaped by `_.unescape`', function(assert) {
assert.expect(1);
assert.strictEqual(_.escape(_.unescape(escaped)), escaped);
});
lodashStable.each(['`', '/'], function(chr) {
QUnit.test('should not escape the "' + chr + '" character', function(assert) {
assert.expect(1);
assert.strictEqual(_.escape(chr), chr);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.escapeRegExp');
(function() {
var escaped = '\\^\\$\\.\\*\\+\\?\\(\\)\\[\\]\\{\\}\\|\\\\',
unescaped = '^$.*+?()[]{}|\\';
QUnit.test('should escape values', function(assert) {
assert.expect(1);
assert.strictEqual(_.escapeRegExp(unescaped + unescaped), escaped + escaped);
});
QUnit.test('should handle strings with nothing to escape', function(assert) {
assert.expect(1);
assert.strictEqual(_.escapeRegExp('abc'), 'abc');
});
QUnit.test('should return an empty string for empty values', function(assert) {
assert.expect(1);
var values = [, null, undefined, ''],
expected = lodashStable.map(values, stubString);
var actual = lodashStable.map(values, function(value, index) {
return index ? _.escapeRegExp(value) : _.escapeRegExp();
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.every');
(function() {
QUnit.test('should return `true` if `predicate` returns truthy for all elements', function(assert) {
assert.expect(1);
assert.strictEqual(lodashStable.every([true, 1, 'a'], identity), true);
});
QUnit.test('should return `true` for empty collections', function(assert) {
assert.expect(1);
var expected = lodashStable.map(empties, stubTrue);
var actual = lodashStable.map(empties, function(value) {
try {
return _.every(value, identity);
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` as soon as `predicate` returns falsey', function(assert) {
assert.expect(2);
var count = 0;
assert.strictEqual(_.every([true, null, true], function(value) {
count++;
return value;
}), false);
assert.strictEqual(count, 2);
});
QUnit.test('should work with collections of `undefined` values (test in IE < 9)', function(assert) {
assert.expect(1);
assert.strictEqual(_.every([undefined, undefined, undefined], identity), false);
});
QUnit.test('should use `_.identity` when `predicate` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value, index) {
var array = [0];
return index ? _.every(array, value) : _.every(array);
});
assert.deepEqual(actual, expected);
expected = lodashStable.map(values, stubTrue);
actual = lodashStable.map(values, function(value, index) {
var array = [1];
return index ? _.every(array, value) : _.every(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(2);
var objects = [{ 'a': 0, 'b': 1 }, { 'a': 1, 'b': 2 }];
assert.strictEqual(_.every(objects, 'a'), false);
assert.strictEqual(_.every(objects, 'b'), true);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(2);
var objects = [{ 'a': 0, 'b': 0 }, { 'a': 0, 'b': 1 }];
assert.strictEqual(_.every(objects, { 'a': 0 }), true);
assert.strictEqual(_.every(objects, { 'b': 1 }), false);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var actual = lodashStable.map([[1]], _.every);
assert.deepEqual(actual, [true]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('strict mode checks');
lodashStable.each(['assign', 'assignIn', 'bindAll', 'defaults', 'defaultsDeep', 'merge'], function(methodName) {
var func = _[methodName],
isBindAll = methodName == 'bindAll';
QUnit.test('`_.' + methodName + '` should ' + (isStrict ? '' : 'not ') + 'throw strict mode errors', function(assert) {
assert.expect(1);
var object = freeze({ 'a': undefined, 'b': function() {} }),
pass = !isStrict;
try {
func(object, isBindAll ? 'b' : { 'a': 1 });
} catch (e) {
pass = !pass;
}
assert.ok(pass);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.fill');
(function() {
QUnit.test('should use a default `start` of `0` and a default `end` of `length`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a'), ['a', 'a', 'a']);
});
QUnit.test('should use `undefined` for `value` if not given', function(assert) {
assert.expect(2);
var array = [1, 2, 3],
actual = _.fill(array);
assert.deepEqual(actual, Array(3));
assert.ok(lodashStable.every(actual, function(value, index) {
return index in actual;
}));
});
QUnit.test('should work with a positive `start`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', 1), [1, 'a', 'a']);
});
QUnit.test('should work with a `start` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(start) {
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', start), [1, 2, 3]);
});
});
QUnit.test('should treat falsey `start` values as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, lodashStable.constant(['a', 'a', 'a']));
var actual = lodashStable.map(falsey, function(start) {
var array = [1, 2, 3];
return _.fill(array, 'a', start);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with a negative `start`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', -1), [1, 2, 'a']);
});
QUnit.test('should work with a negative `start` <= negative `length`', function(assert) {
assert.expect(3);
lodashStable.each([-3, -4, -Infinity], function(start) {
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', start), ['a', 'a', 'a']);
});
});
QUnit.test('should work with `start` >= `end`', function(assert) {
assert.expect(2);
lodashStable.each([2, 3], function(start) {
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', start, 2), [1, 2, 3]);
});
});
QUnit.test('should work with a positive `end`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', 0, 1), ['a', 2, 3]);
});
QUnit.test('should work with a `end` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(end) {
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', 0, end), ['a', 'a', 'a']);
});
});
QUnit.test('should treat falsey `end` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? ['a', 'a', 'a'] : [1, 2, 3];
});
var actual = lodashStable.map(falsey, function(end) {
var array = [1, 2, 3];
return _.fill(array, 'a', 0, end);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with a negative `end`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', 0, -1), ['a', 'a', 3]);
});
QUnit.test('should work with a negative `end` <= negative `length`', function(assert) {
assert.expect(3);
lodashStable.each([-3, -4, -Infinity], function(end) {
var array = [1, 2, 3];
assert.deepEqual(_.fill(array, 'a', 0, end), [1, 2, 3]);
});
});
QUnit.test('should coerce `start` and `end` to integers', function(assert) {
assert.expect(1);
var positions = [[0.1, 1.6], ['0', 1], [0, '1'], ['1'], [NaN, 1], [1, NaN]];
var actual = lodashStable.map(positions, function(pos) {
var array = [1, 2, 3];
return _.fill.apply(_, [array, 'a'].concat(pos));
});
assert.deepEqual(actual, [['a', 2, 3], ['a', 2, 3], ['a', 2, 3], [1, 'a', 'a'], ['a', 2, 3], [1, 2, 3]]);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2], [3, 4]],
actual = lodashStable.map(array, _.fill);
assert.deepEqual(actual, [[0, 0], [1, 1]]);
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(3);
if (!isNpm) {
var array = [1, 2, 3],
wrapped = _(array).fill('a'),
actual = wrapped.value();
assert.ok(wrapped instanceof _);
assert.strictEqual(actual, array);
assert.deepEqual(actual, ['a', 'a', 'a']);
}
else {
skipAssert(assert, 3);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.filter');
(function() {
var array = [1, 2, 3];
QUnit.test('should return elements `predicate` returns truthy for', function(assert) {
assert.expect(1);
assert.deepEqual(_.filter(array, isEven), [2]);
});
}());
/*--------------------------------------------------------------------------*/
lodashStable.each(['find', 'findIndex', 'findKey', 'findLast', 'findLastIndex', 'findLastKey'], function(methodName) {
QUnit.module('lodash.' + methodName);
var array = [1, 2, 3, 4],
func = _[methodName];
var objects = [
{ 'a': 0, 'b': 0 },
{ 'a': 1, 'b': 1 },
{ 'a': 2, 'b': 2 }
];
var expected = ({
'find': [objects[1], undefined, objects[2]],
'findIndex': [1, -1, 2],
'findKey': ['1', undefined, '2'],
'findLast': [objects[2], undefined, objects[2]],
'findLastIndex': [2, -1, 2],
'findLastKey': ['2', undefined, '2']
})[methodName];
QUnit.test('`_.' + methodName + '` should return the found value', function(assert) {
assert.expect(1);
assert.strictEqual(func(objects, function(object) { return object.a; }), expected[0]);
});
QUnit.test('`_.' + methodName + '` should return `' + expected[1] + '` if value is not found', function(assert) {
assert.expect(1);
assert.strictEqual(func(objects, function(object) { return object.a === 3; }), expected[1]);
});
QUnit.test('`_.' + methodName + '` should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
assert.strictEqual(func(objects, { 'b': 2 }), expected[2]);
});
QUnit.test('`_.' + methodName + '` should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(1);
assert.strictEqual(func(objects, ['b', 2]), expected[2]);
});
QUnit.test('`_.' + methodName + '` should work with `_.property` shorthands', function(assert) {
assert.expect(1);
assert.strictEqual(func(objects, 'b'), expected[0]);
});
QUnit.test('`_.' + methodName + '` should return `' + expected[1] + '` for empty collections', function(assert) {
assert.expect(1);
var emptyValues = lodashStable.endsWith(methodName, 'Index') ? lodashStable.reject(empties, lodashStable.isPlainObject) : empties,
expecting = lodashStable.map(emptyValues, lodashStable.constant(expected[1]));
var actual = lodashStable.map(emptyValues, function(value) {
try {
return func(value, { 'a': 3 });
} catch (e) {}
});
assert.deepEqual(actual, expecting);
});
QUnit.test('`_.' + methodName + '` should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
var expected = ({
'find': 1,
'findIndex': 0,
'findKey': '0',
'findLast': 4,
'findLastIndex': 3,
'findLastKey': '3'
})[methodName];
if (!isNpm) {
assert.strictEqual(_(array)[methodName](), expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(array).chain()[methodName]() instanceof _);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should not execute immediately when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _(array).chain()[methodName]();
assert.strictEqual(wrapped.__wrapped__, array);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should work in a lazy sequence', function(assert) {
assert.expect(2);
if (!isNpm) {
var largeArray = lodashStable.range(1, LARGE_ARRAY_SIZE + 1),
smallArray = array;
lodashStable.times(2, function(index) {
var array = index ? largeArray : smallArray,
wrapped = _(array).filter(isEven);
assert.strictEqual(wrapped[methodName](), func(lodashStable.filter(array, isEven)));
});
}
else {
skipAssert(assert, 2);
}
});
});
_.each(['find', 'findIndex', 'findLast', 'findLastIndex'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should provide correct `predicate` arguments for arrays', function(assert) {
assert.expect(1);
var args,
array = ['a'];
func(array, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, ['a', 0, array]);
});
});
_.each(['find', 'findKey', 'findLast', 'findLastKey'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should work with an object for `collection`', function(assert) {
assert.expect(1);
var actual = func({ 'a': 1, 'b': 2, 'c': 3 }, function(n) {
return n < 3;
});
var expected = ({
'find': 1,
'findKey': 'a',
'findLast': 2,
'findLastKey': 'b'
})[methodName];
assert.strictEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should provide correct `predicate` arguments for objects', function(assert) {
assert.expect(1);
var args,
object = { 'a': 1 };
func(object, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [1, 'a', object]);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.find and lodash.findLast');
lodashStable.each(['find', 'findLast'], function(methodName) {
var isFind = methodName == 'find';
QUnit.test('`_.' + methodName + '` should support shortcut fusion', function(assert) {
assert.expect(3);
if (!isNpm) {
var findCount = 0,
mapCount = 0,
array = lodashStable.range(1, LARGE_ARRAY_SIZE + 1),
iteratee = function(value) { mapCount++; return square(value); },
predicate = function(value) { findCount++; return isEven(value); },
actual = _(array).map(iteratee)[methodName](predicate);
assert.strictEqual(findCount, isFind ? 2 : 1);
assert.strictEqual(mapCount, isFind ? 2 : 1);
assert.strictEqual(actual, isFind ? 4 : square(LARGE_ARRAY_SIZE));
}
else {
skipAssert(assert, 3);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.find and lodash.includes');
lodashStable.each(['includes', 'find'], function(methodName) {
var func = _[methodName],
isIncludes = methodName == 'includes',
resolve = methodName == 'find' ? lodashStable.curry(lodashStable.eq) : identity;
lodashStable.each({
'an `arguments` object': args,
'an array': [1, 2, 3]
},
function(collection, key) {
var values = lodashStable.toArray(collection);
QUnit.test('`_.' + methodName + '` should work with ' + key + ' and a positive `fromIndex`', function(assert) {
assert.expect(1);
var expected = [
isIncludes || values[2],
isIncludes ? false : undefined
];
var actual = [
func(collection, resolve(values[2]), 2),
func(collection, resolve(values[1]), 2)
];
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with ' + key + ' and a `fromIndex` >= `length`', function(assert) {
assert.expect(1);
var indexes = [4, 6, Math.pow(2, 32), Infinity];
var expected = lodashStable.map(indexes, function() {
var result = isIncludes ? false : undefined;
return [result, result, result];
});
var actual = lodashStable.map(indexes, function(fromIndex) {
return [
func(collection, resolve(1), fromIndex),
func(collection, resolve(undefined), fromIndex),
func(collection, resolve(''), fromIndex)
];
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with ' + key + ' and treat falsey `fromIndex` values as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, lodashStable.constant(isIncludes || values[0]));
var actual = lodashStable.map(falsey, function(fromIndex) {
return func(collection, resolve(values[0]), fromIndex);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with ' + key + ' and coerce `fromIndex` to an integer', function(assert) {
assert.expect(1);
var expected = [
isIncludes || values[0],
isIncludes || values[0],
isIncludes ? false : undefined
];
var actual = [
func(collection, resolve(values[0]), 0.1),
func(collection, resolve(values[0]), NaN),
func(collection, resolve(values[0]), '1')
];
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with ' + key + ' and a negative `fromIndex`', function(assert) {
assert.expect(1);
var expected = [
isIncludes || values[2],
isIncludes ? false : undefined
];
var actual = [
func(collection, resolve(values[2]), -1),
func(collection, resolve(values[1]), -1)
];
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with ' + key + ' and a negative `fromIndex` <= `-length`', function(assert) {
assert.expect(1);
var indexes = [-4, -6, -Infinity],
expected = lodashStable.map(indexes, lodashStable.constant(isIncludes || values[0]));
var actual = lodashStable.map(indexes, function(fromIndex) {
return func(collection, resolve(values[0]), fromIndex);
});
assert.deepEqual(actual, expected);
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.findIndex and lodash.indexOf');
lodashStable.each(['findIndex', 'indexOf'], function(methodName) {
var array = [1, 2, 3, 1, 2, 3],
func = _[methodName],
resolve = methodName == 'findIndex' ? lodashStable.curry(lodashStable.eq) : identity;
QUnit.test('`_.' + methodName + '` should return the index of the first matched value', function(assert) {
assert.expect(1);
assert.strictEqual(func(array, resolve(3)), 2);
});
QUnit.test('`_.' + methodName + '` should work with a positive `fromIndex`', function(assert) {
assert.expect(1);
assert.strictEqual(func(array, resolve(1), 2), 3);
});
QUnit.test('`_.' + methodName + '` should work with a `fromIndex` >= `length`', function(assert) {
assert.expect(1);
var values = [6, 8, Math.pow(2, 32), Infinity],
expected = lodashStable.map(values, lodashStable.constant([-1, -1, -1]));
var actual = lodashStable.map(values, function(fromIndex) {
return [
func(array, resolve(undefined), fromIndex),
func(array, resolve(1), fromIndex),
func(array, resolve(''), fromIndex)
];
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with a negative `fromIndex`', function(assert) {
assert.expect(1);
assert.strictEqual(func(array, resolve(2), -3), 4);
});
QUnit.test('`_.' + methodName + '` should work with a negative `fromIndex` <= `-length`', function(assert) {
assert.expect(1);
var values = [-6, -8, -Infinity],
expected = lodashStable.map(values, stubZero);
var actual = lodashStable.map(values, function(fromIndex) {
return func(array, resolve(1), fromIndex);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should treat falsey `fromIndex` values as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubZero);
var actual = lodashStable.map(falsey, function(fromIndex) {
return func(array, resolve(1), fromIndex);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should coerce `fromIndex` to an integer', function(assert) {
assert.expect(1);
assert.strictEqual(func(array, resolve(2), 1.2), 1);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.findLast');
(function() {
var resolve = lodashStable.curry(lodashStable.eq);
lodashStable.each({
'an `arguments` object': args,
'an array': [1, 2, 3]
},
function(collection, key) {
var values = lodashStable.toArray(collection);
QUnit.test('should work with ' + key + ' and a positive `fromIndex`', function(assert) {
assert.expect(1);
var expected = [
values[1],
undefined
];
var actual = [
_.findLast(collection, resolve(values[1]), 1),
_.findLast(collection, resolve(values[2]), 1)
];
assert.deepEqual(actual, expected);
});
QUnit.test('should work with ' + key + ' and a `fromIndex` >= `length`', function(assert) {
assert.expect(1);
var indexes = [4, 6, Math.pow(2, 32), Infinity];
var expected = lodashStable.map(indexes, lodashStable.constant([values[0], undefined, undefined]));
var actual = lodashStable.map(indexes, function(fromIndex) {
return [
_.findLast(collection, resolve(1), fromIndex),
_.findLast(collection, resolve(undefined), fromIndex),
_.findLast(collection, resolve(''), fromIndex)
];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with ' + key + ' and treat falsey `fromIndex` values correctly', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? values[3] : undefined;
});
var actual = lodashStable.map(falsey, function(fromIndex) {
return _.findLast(collection, resolve(values[3]), fromIndex);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with ' + key + ' and coerce `fromIndex` to an integer', function(assert) {
assert.expect(1);
var expected = [
values[0],
values[0],
undefined
];
var actual = [
_.findLast(collection, resolve(values[0]), 0.1),
_.findLast(collection, resolve(values[0]), NaN),
_.findLast(collection, resolve(values[2]), '1')
];
assert.deepEqual(actual, expected);
});
QUnit.test('should work with ' + key + ' and a negative `fromIndex`', function(assert) {
assert.expect(1);
var expected = [
values[1],
undefined
];
var actual = [
_.findLast(collection, resolve(values[1]), -2),
_.findLast(collection, resolve(values[2]), -2)
];
assert.deepEqual(actual, expected);
});
QUnit.test('should work with ' + key + ' and a negative `fromIndex` <= `-length`', function(assert) {
assert.expect(1);
var indexes = [-4, -6, -Infinity],
expected = lodashStable.map(indexes, lodashStable.constant(values[0]));
var actual = lodashStable.map(indexes, function(fromIndex) {
return _.findLast(collection, resolve(values[0]), fromIndex);
});
assert.deepEqual(actual, expected);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.flip');
(function() {
function fn() {
return slice.call(arguments);
}
QUnit.test('should flip arguments provided to `func`', function(assert) {
assert.expect(1);
var flipped = _.flip(fn);
assert.deepEqual(flipped('a', 'b', 'c', 'd'), ['d', 'c', 'b', 'a']);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.flatMapDepth');
(function() {
var array = [1, [2, [3, [4]], 5]];
QUnit.test('should use a default `depth` of `1`', function(assert) {
assert.expect(1);
assert.deepEqual(_.flatMapDepth(array, identity), [1, 2, [3, [4]], 5]);
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant([1, 2, [3, [4]], 5]));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.flatMapDepth(array, value) : _.flatMapDepth(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should treat a `depth` of < `1` as a shallow clone', function(assert) {
assert.expect(2);
lodashStable.each([-1, 0], function(depth) {
assert.deepEqual(_.flatMapDepth(array, identity, depth), [1, [2, [3, [4]], 5]]);
});
});
QUnit.test('should coerce `depth` to an integer', function(assert) {
assert.expect(1);
assert.deepEqual(_.flatMapDepth(array, identity, 2.2), [1, 2, 3, [4], 5]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('flatMap methods');
lodashStable.each(['flatMap', 'flatMapDeep', 'flatMapDepth'], function(methodName) {
var func = _[methodName],
array = [1, 2, 3, 4];
function duplicate(n) {
return [n, n];
}
QUnit.test('`_.' + methodName + '` should map values in `array` to a new flattened array', function(assert) {
assert.expect(1);
var actual = func(array, duplicate),
expected = lodashStable.flatten(lodashStable.map(array, duplicate));
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var objects = [{ 'a': [1, 2] }, { 'a': [3, 4] }];
assert.deepEqual(func(objects, 'a'), array);
});
QUnit.test('`_.' + methodName + '` should iterate over own string keyed properties of objects', function(assert) {
assert.expect(1);
function Foo() {
this.a = [1, 2];
}
Foo.prototype.b = [3, 4];
var actual = func(new Foo, identity);
assert.deepEqual(actual, [1, 2]);
});
QUnit.test('`_.' + methodName + '` should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(2);
var array = [[1, 2], [3, 4]],
object = { 'a': [1, 2], 'b': [3, 4] },
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant([1, 2, 3, 4]));
lodashStable.each([array, object], function(collection) {
var actual = lodashStable.map(values, function(value, index) {
return index ? func(collection, value) : func(collection);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should accept a falsey `collection`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubArray);
var actual = lodashStable.map(falsey, function(collection, index) {
try {
return index ? func(collection) : func();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should treat number values for `collection` as empty', function(assert) {
assert.expect(1);
assert.deepEqual(func(1), []);
});
QUnit.test('`_.' + methodName + '` should work with objects with non-number length properties', function(assert) {
assert.expect(1);
var object = { 'length': [1, 2] };
assert.deepEqual(func(object, identity), [1, 2]);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.flattenDepth');
(function() {
var array = [1, [2, [3, [4]], 5]];
QUnit.test('should use a default `depth` of `1`', function(assert) {
assert.expect(1);
assert.deepEqual(_.flattenDepth(array), [1, 2, [3, [4]], 5]);
});
QUnit.test('should treat a `depth` of < `1` as a shallow clone', function(assert) {
assert.expect(2);
lodashStable.each([-1, 0], function(depth) {
assert.deepEqual(_.flattenDepth(array, depth), [1, [2, [3, [4]], 5]]);
});
});
QUnit.test('should coerce `depth` to an integer', function(assert) {
assert.expect(1);
assert.deepEqual(_.flattenDepth(array, 2.2), [1, 2, 3, [4], 5]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('flatten methods');
(function() {
var array = [1, [2, [3, [4]], 5]],
methodNames = ['flatten', 'flattenDeep', 'flattenDepth'];
QUnit.test('should flatten `arguments` objects', function(assert) {
assert.expect(3);
var array = [args, [args]];
assert.deepEqual(_.flatten(array), [1, 2, 3, args]);
assert.deepEqual(_.flattenDeep(array), [1, 2, 3, 1, 2, 3]);
assert.deepEqual(_.flattenDepth(array, 2), [1, 2, 3, 1, 2, 3]);
});
QUnit.test('should treat sparse arrays as dense', function(assert) {
assert.expect(6);
var array = [[1, 2, 3], Array(3)],
expected = [1, 2, 3];
expected.push(undefined, undefined, undefined);
lodashStable.each(methodNames, function(methodName) {
var actual = _[methodName](array);
assert.deepEqual(actual, expected);
assert.ok('4' in actual);
});
});
QUnit.test('should flatten objects with a truthy `Symbol.isConcatSpreadable` value', function(assert) {
assert.expect(1);
if (Symbol && Symbol.isConcatSpreadable) {
var object = { '0': 'a', 'length': 1 },
array = [object],
expected = lodashStable.map(methodNames, lodashStable.constant(['a']));
object[Symbol.isConcatSpreadable] = true;
var actual = lodashStable.map(methodNames, function(methodName) {
return _[methodName](array);
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('should work with extremely large arrays', function(assert) {
assert.expect(3);
lodashStable.times(3, function(index) {
var expected = Array(5e5);
try {
var func = _.flatten;
if (index == 1) {
func = _.flattenDeep;
} else if (index == 2) {
func = _.flattenDepth;
}
assert.deepEqual(func([expected]), expected);
} catch (e) {
assert.ok(false, e.message);
}
});
});
QUnit.test('should work with empty arrays', function(assert) {
assert.expect(3);
var array = [[], [[]], [[], [[[]]]]];
assert.deepEqual(_.flatten(array), [[], [], [[[]]]]);
assert.deepEqual(_.flattenDeep(array), []);
assert.deepEqual(_.flattenDepth(array, 2), [[[]]]);
});
QUnit.test('should support flattening of nested arrays', function(assert) {
assert.expect(3);
assert.deepEqual(_.flatten(array), [1, 2, [3, [4]], 5]);
assert.deepEqual(_.flattenDeep(array), [1, 2, 3, 4, 5]);
assert.deepEqual(_.flattenDepth(array, 2), [1, 2, 3, [4], 5]);
});
QUnit.test('should return an empty array for non array-like objects', function(assert) {
assert.expect(3);
var expected = [],
nonArray = { '0': 'a' };
assert.deepEqual(_.flatten(nonArray), expected);
assert.deepEqual(_.flattenDeep(nonArray), expected);
assert.deepEqual(_.flattenDepth(nonArray, 2), expected);
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(6);
if (!isNpm) {
var wrapped = _(array),
actual = wrapped.flatten();
assert.ok(actual instanceof _);
assert.deepEqual(actual.value(), [1, 2, [3, [4]], 5]);
actual = wrapped.flattenDeep();
assert.ok(actual instanceof _);
assert.deepEqual(actual.value(), [1, 2, 3, 4, 5]);
actual = wrapped.flattenDepth(2);
assert.ok(actual instanceof _);
assert.deepEqual(actual.value(), [1, 2, 3, [4], 5]);
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('flow methods');
lodashStable.each(['flow', 'flowRight'], function(methodName) {
var func = _[methodName],
isFlow = methodName == 'flow';
QUnit.test('`_.' + methodName + '` should supply each function with the return value of the previous', function(assert) {
assert.expect(1);
var fixed = function(n) { return n.toFixed(1); },
combined = isFlow ? func(add, square, fixed) : func(fixed, square, add);
assert.strictEqual(combined(1, 2), '9.0');
});
QUnit.test('`_.' + methodName + '` should return a new function', function(assert) {
assert.expect(1);
assert.notStrictEqual(func(noop), noop);
});
QUnit.test('`_.' + methodName + '` should return an identity function when no arguments are given', function(assert) {
assert.expect(6);
_.times(2, function(index) {
try {
var combined = index ? func([]) : func();
assert.strictEqual(combined('a'), 'a');
} catch (e) {
assert.ok(false, e.message);
}
assert.strictEqual(combined.length, 0);
assert.notStrictEqual(combined, identity);
});
});
QUnit.test('`_.' + methodName + '` should work with a curried function and `_.head`', function(assert) {
assert.expect(1);
var curried = _.curry(identity);
var combined = isFlow
? func(_.head, curried)
: func(curried, _.head);
assert.strictEqual(combined([1]), 1);
});
QUnit.test('`_.' + methodName + '` should support shortcut fusion', function(assert) {
assert.expect(6);
var filterCount,
mapCount,
array = lodashStable.range(LARGE_ARRAY_SIZE),
iteratee = function(value) { mapCount++; return square(value); },
predicate = function(value) { filterCount++; return isEven(value); };
lodashStable.times(2, function(index) {
var filter1 = _.filter,
filter2 = _.curry(_.rearg(_.ary(_.filter, 2), 1, 0), 2),
filter3 = (_.filter = index ? filter2 : filter1, filter2(predicate));
var map1 = _.map,
map2 = _.curry(_.rearg(_.ary(_.map, 2), 1, 0), 2),
map3 = (_.map = index ? map2 : map1, map2(iteratee));
var take1 = _.take,
take2 = _.curry(_.rearg(_.ary(_.take, 2), 1, 0), 2),
take3 = (_.take = index ? take2 : take1, take2(2));
var combined = isFlow
? func(map3, filter3, _.compact, take3)
: func(take3, _.compact, filter3, map3);
filterCount = mapCount = 0;
assert.deepEqual(combined(array), [4, 16]);
if (!isNpm && WeakMap && WeakMap.name) {
assert.strictEqual(filterCount, 5, 'filterCount');
assert.strictEqual(mapCount, 5, 'mapCount');
}
else {
skipAssert(assert, 2);
}
_.filter = filter1;
_.map = map1;
_.take = take1;
});
});
QUnit.test('`_.' + methodName + '` should work with curried functions with placeholders', function(assert) {
assert.expect(1);
var curried = _.curry(_.ary(_.map, 2), 2),
getProp = curried(curried.placeholder, 'a'),
objects = [{ 'a': 1 }, { 'a': 2 }, { 'a': 1 }];
var combined = isFlow
? func(getProp, _.uniq)
: func(_.uniq, getProp);
assert.deepEqual(combined(objects), [1, 2]);
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _(noop)[methodName]();
assert.ok(wrapped instanceof _);
}
else {
skipAssert(assert);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.forEach');
(function() {
QUnit.test('should be aliased', function(assert) {
assert.expect(1);
assert.strictEqual(_.each, _.forEach);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.forEachRight');
(function() {
QUnit.test('should be aliased', function(assert) {
assert.expect(1);
assert.strictEqual(_.eachRight, _.forEachRight);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('forIn methods');
lodashStable.each(['forIn', 'forInRight'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` iterates over inherited string keyed properties', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var keys = [];
func(new Foo, function(value, key) { keys.push(key); });
assert.deepEqual(keys.sort(), ['a', 'b']);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('forOwn methods');
lodashStable.each(['forOwn', 'forOwnRight'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should iterate over `length` properties', function(assert) {
assert.expect(1);
var object = { '0': 'zero', '1': 'one', 'length': 2 },
props = [];
func(object, function(value, prop) { props.push(prop); });
assert.deepEqual(props.sort(), ['0', '1', 'length']);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('iteration methods');
(function() {
var methods = [
'_baseEach',
'countBy',
'every',
'filter',
'find',
'findIndex',
'findKey',
'findLast',
'findLastIndex',
'findLastKey',
'forEach',
'forEachRight',
'forIn',
'forInRight',
'forOwn',
'forOwnRight',
'groupBy',
'keyBy',
'map',
'mapKeys',
'mapValues',
'maxBy',
'minBy',
'omitBy',
'partition',
'pickBy',
'reject',
'some'
];
var arrayMethods = [
'findIndex',
'findLastIndex',
'maxBy',
'minBy'
];
var collectionMethods = [
'_baseEach',
'countBy',
'every',
'filter',
'find',
'findLast',
'forEach',
'forEachRight',
'groupBy',
'keyBy',
'map',
'partition',
'reduce',
'reduceRight',
'reject',
'some'
];
var forInMethods = [
'forIn',
'forInRight',
'omitBy',
'pickBy'
];
var iterationMethods = [
'_baseEach',
'forEach',
'forEachRight',
'forIn',
'forInRight',
'forOwn',
'forOwnRight'
];
var objectMethods = [
'findKey',
'findLastKey',
'forIn',
'forInRight',
'forOwn',
'forOwnRight',
'mapKeys',
'mapValues',
'omitBy',
'pickBy'
];
var rightMethods = [
'findLast',
'findLastIndex',
'findLastKey',
'forEachRight',
'forInRight',
'forOwnRight'
];
var unwrappedMethods = [
'each',
'eachRight',
'every',
'find',
'findIndex',
'findKey',
'findLast',
'findLastIndex',
'findLastKey',
'forEach',
'forEachRight',
'forIn',
'forInRight',
'forOwn',
'forOwnRight',
'max',
'maxBy',
'min',
'minBy',
'some'
];
lodashStable.each(methods, function(methodName) {
var array = [1, 2, 3],
func = _[methodName],
isBy = /(^partition|By)$/.test(methodName),
isFind = /^find/.test(methodName),
isOmitPick = /^(?:omit|pick)By$/.test(methodName),
isSome = methodName == 'some';
QUnit.test('`_.' + methodName + '` should provide correct iteratee arguments', function(assert) {
assert.expect(1);
if (func) {
var args,
expected = [1, 0, array];
func(array, function() {
args || (args = slice.call(arguments));
});
if (lodashStable.includes(rightMethods, methodName)) {
expected[0] = 3;
expected[1] = 2;
}
if (lodashStable.includes(objectMethods, methodName)) {
expected[1] += '';
}
if (isBy) {
expected.length = isOmitPick ? 2 : 1;
}
assert.deepEqual(args, expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should treat sparse arrays as dense', function(assert) {
assert.expect(1);
if (func) {
var array = [1];
array[2] = 3;
var expected = lodashStable.includes(objectMethods, methodName)
? [[1, '0', array], [undefined, '1', array], [3, '2', array]]
: [[1, 0, array], [undefined, 1, array], [3, 2, array]];
if (isBy) {
expected = lodashStable.map(expected, function(args) {
return args.slice(0, isOmitPick ? 2 : 1);
});
}
else if (lodashStable.includes(objectMethods, methodName)) {
expected = lodashStable.map(expected, function(args) {
args[1] += '';
return args;
});
}
if (lodashStable.includes(rightMethods, methodName)) {
expected.reverse();
}
var argsList = [];
func(array, function() {
argsList.push(slice.call(arguments));
return !(isFind || isSome);
});
assert.deepEqual(argsList, expected);
}
else {
skipAssert(assert);
}
});
});
lodashStable.each(lodashStable.difference(methods, objectMethods), function(methodName) {
var array = [1, 2, 3],
func = _[methodName],
isEvery = methodName == 'every';
array.a = 1;
QUnit.test('`_.' + methodName + '` should not iterate custom properties on arrays', function(assert) {
assert.expect(1);
if (func) {
var keys = [];
func(array, function(value, key) {
keys.push(key);
return isEvery;
});
assert.notOk(lodashStable.includes(keys, 'a'));
}
else {
skipAssert(assert);
}
});
});
lodashStable.each(lodashStable.difference(methods, unwrappedMethods), function(methodName) {
var array = [1, 2, 3],
isBaseEach = methodName == '_baseEach';
QUnit.test('`_.' + methodName + '` should return a wrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!(isBaseEach || isNpm)) {
var wrapped = _(array)[methodName](noop);
assert.ok(wrapped instanceof _);
}
else {
skipAssert(assert);
}
});
});
lodashStable.each(unwrappedMethods, function(methodName) {
var array = [1, 2, 3];
QUnit.test('`_.' + methodName + '` should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = _(array)[methodName](noop);
assert.notOk(actual instanceof _);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _(array).chain(),
actual = wrapped[methodName](noop);
assert.ok(actual instanceof _);
assert.notStrictEqual(actual, wrapped);
}
else {
skipAssert(assert, 2);
}
});
});
lodashStable.each(lodashStable.difference(methods, arrayMethods, forInMethods), function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` iterates over own string keyed properties of objects', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
if (func) {
var values = [];
func(new Foo, function(value) { values.push(value); });
assert.deepEqual(values, [1]);
}
else {
skipAssert(assert);
}
});
});
lodashStable.each(iterationMethods, function(methodName) {
var array = [1, 2, 3],
func = _[methodName];
QUnit.test('`_.' + methodName + '` should return the collection', function(assert) {
assert.expect(1);
if (func) {
assert.strictEqual(func(array, Boolean), array);
}
else {
skipAssert(assert);
}
});
});
lodashStable.each(collectionMethods, function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should use `isArrayLike` to determine whether a value is array-like', function(assert) {
assert.expect(3);
if (func) {
var isIteratedAsObject = function(object) {
var result = false;
func(object, function() { result = true; }, 0);
return result;
};
var values = [-1, '1', 1.1, Object(1), MAX_SAFE_INTEGER + 1],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(length) {
return isIteratedAsObject({ 'length': length });
});
var Foo = function(a) {};
Foo.a = 1;
assert.deepEqual(actual, expected);
assert.ok(isIteratedAsObject(Foo));
assert.notOk(isIteratedAsObject({ 'length': 0 }));
}
else {
skipAssert(assert, 3);
}
});
});
lodashStable.each(methods, function(methodName) {
var func = _[methodName],
isFind = /^find/.test(methodName),
isSome = methodName == 'some',
isReduce = /^reduce/.test(methodName);
QUnit.test('`_.' + methodName + '` should ignore changes to `length`', function(assert) {
assert.expect(1);
if (func) {
var count = 0,
array = [1];
func(array, function() {
if (++count == 1) {
array.push(2);
}
return !(isFind || isSome);
}, isReduce ? array : null);
assert.strictEqual(count, 1);
}
else {
skipAssert(assert);
}
});
});
lodashStable.each(lodashStable.difference(lodashStable.union(methods, collectionMethods), arrayMethods), function(methodName) {
var func = _[methodName],
isFind = /^find/.test(methodName),
isSome = methodName == 'some',
isReduce = /^reduce/.test(methodName);
QUnit.test('`_.' + methodName + '` should ignore added `object` properties', function(assert) {
assert.expect(1);
if (func) {
var count = 0,
object = { 'a': 1 };
func(object, function() {
if (++count == 1) {
object.b = 2;
}
return !(isFind || isSome);
}, isReduce ? object : null);
assert.strictEqual(count, 1);
}
else {
skipAssert(assert);
}
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('object assignments');
lodashStable.each(['assign', 'assignIn', 'defaults', 'defaultsDeep', 'merge'], function(methodName) {
var func = _[methodName],
isAssign = methodName == 'assign',
isDefaults = /^defaults/.test(methodName);
QUnit.test('`_.' + methodName + '` should coerce primitives to objects', function(assert) {
assert.expect(1);
var expected = lodashStable.map(primitives, function(value) {
var object = Object(value);
object.a = 1;
return object;
});
var actual = lodashStable.map(primitives, function(value) {
return func(value, { 'a': 1 });
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should assign own ' + (isAssign ? '' : 'and inherited ') + 'string keyed source properties', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var expected = isAssign ? { 'a': 1 } : { 'a': 1, 'b': 2 };
assert.deepEqual(func({}, new Foo), expected);
});
QUnit.test('`_.' + methodName + '` should not skip a trailing function source', function(assert) {
assert.expect(1);
function fn() {}
fn.b = 2;
assert.deepEqual(func({}, { 'a': 1 }, fn), { 'a': 1, 'b': 2 });
});
QUnit.test('`_.' + methodName + '` should not error on nullish sources', function(assert) {
assert.expect(1);
try {
assert.deepEqual(func({ 'a': 1 }, undefined, { 'b': 2 }, null), { 'a': 1, 'b': 2 });
} catch (e) {
assert.ok(false, e.message);
}
});
QUnit.test('`_.' + methodName + '` should create an object when `object` is nullish', function(assert) {
assert.expect(2);
var source = { 'a': 1 },
values = [null, undefined],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
var object = func(value, source);
return object !== source && lodashStable.isEqual(object, source);
});
assert.deepEqual(actual, expected);
actual = lodashStable.map(values, function(value) {
return lodashStable.isEqual(func(value), {});
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work as an iteratee for methods like `_.reduce`', function(assert) {
assert.expect(2);
var array = [{ 'a': 1 }, { 'b': 2 }, { 'c': 3 }],
expected = { 'a': isDefaults ? 0 : 1, 'b': 2, 'c': 3 };
function fn() {};
fn.a = array[0];
fn.b = array[1];
fn.c = array[2];
assert.deepEqual(lodashStable.reduce(array, func, { 'a': 0 }), expected);
assert.deepEqual(lodashStable.reduce(fn, func, { 'a': 0 }), expected);
});
QUnit.test('`_.' + methodName + '` should not return the existing wrapped value when chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _({ 'a': 1 }),
actual = wrapped[methodName]({ 'b': 2 });
assert.notStrictEqual(actual, wrapped);
}
else {
skipAssert(assert);
}
});
});
lodashStable.each(['assign', 'assignIn', 'merge'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should not treat `object` as `source`', function(assert) {
assert.expect(1);
function Foo() {}
Foo.prototype.a = 1;
var actual = func(new Foo, { 'b': 2 });
assert.notOk(_.has(actual, 'a'));
});
});
lodashStable.each(['assign', 'assignIn', 'assignInWith', 'assignWith', 'defaults', 'defaultsDeep', 'merge', 'mergeWith'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should not assign values that are the same as their destinations', function(assert) {
assert.expect(4);
lodashStable.each(['a', ['a'], { 'a': 1 }, NaN], function(value) {
var object = {},
pass = true;
defineProperty(object, 'a', {
'configurable': true,
'enumerable': true,
'get': lodashStable.constant(value),
'set': function() { pass = false; }
});
func(object, { 'a': value });
assert.ok(pass);
});
});
});
lodashStable.each(['assignWith', 'assignInWith', 'mergeWith'], function(methodName) {
var func = _[methodName],
isMergeWith = methodName == 'mergeWith';
QUnit.test('`_.' + methodName + '` should provide correct `customizer` arguments', function(assert) {
assert.expect(3);
var args,
object = { 'a': 1 },
source = { 'a': 2 },
expected = lodashStable.map([1, 2, 'a', object, source], lodashStable.cloneDeep);
func(object, source, function() {
args || (args = lodashStable.map(slice.call(arguments, 0, 5), lodashStable.cloneDeep));
});
assert.deepEqual(args, expected, 'primitive values');
var argsList = [],
objectValue = [1, 2],
sourceValue = { 'b': 2 };
object = { 'a': objectValue };
source = { 'a': sourceValue };
expected = [lodashStable.map([objectValue, sourceValue, 'a', object, source], lodashStable.cloneDeep)];
if (isMergeWith) {
expected.push(lodashStable.map([undefined, 2, 'b', objectValue, sourceValue], lodashStable.cloneDeep));
}
func(object, source, function() {
argsList.push(lodashStable.map(slice.call(arguments, 0, 5), lodashStable.cloneDeep));
});
assert.deepEqual(argsList, expected, 'object values');
args = undefined;
object = { 'a': 1 };
source = { 'b': 2 };
expected = lodashStable.map([undefined, 2, 'b', object, source], lodashStable.cloneDeep);
func(object, source, function() {
args || (args = lodashStable.map(slice.call(arguments, 0, 5), lodashStable.cloneDeep));
});
assert.deepEqual(args, expected, 'undefined properties');
});
QUnit.test('`_.' + methodName + '` should not treat the second argument as a `customizer` callback', function(assert) {
assert.expect(2);
function callback() {}
callback.b = 2;
var actual = func({ 'a': 1 }, callback);
assert.deepEqual(actual, { 'a': 1, 'b': 2 });
actual = func({ 'a': 1 }, callback, { 'c': 3 });
assert.deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 });
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('exit early');
lodashStable.each(['_baseEach', 'forEach', 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight', 'transform'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` can exit early when iterating arrays', function(assert) {
assert.expect(1);
if (func) {
var array = [1, 2, 3],
values = [];
func(array, function(value, other) {
values.push(lodashStable.isArray(value) ? other : value);
return false;
});
assert.deepEqual(values, [lodashStable.endsWith(methodName, 'Right') ? 3 : 1]);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` can exit early when iterating objects', function(assert) {
assert.expect(1);
if (func) {
var object = { 'a': 1, 'b': 2, 'c': 3 },
values = [];
func(object, function(value, other) {
values.push(lodashStable.isArray(value) ? other : value);
return false;
});
assert.strictEqual(values.length, 1);
}
else {
skipAssert(assert);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('`__proto__` property bugs');
(function() {
QUnit.test('should work with the "__proto__" key in internal data objects', function(assert) {
assert.expect(4);
var stringLiteral = '__proto__',
stringObject = Object(stringLiteral),
expected = [stringLiteral, stringObject];
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, function(count) {
return isEven(count) ? stringLiteral : stringObject;
});
assert.deepEqual(_.difference(largeArray, largeArray), []);
assert.deepEqual(_.intersection(largeArray, largeArray), expected);
assert.deepEqual(_.uniq(largeArray), expected);
assert.deepEqual(_.without.apply(_, [largeArray].concat(largeArray)), []);
});
QUnit.test('should treat "__proto__" as a regular key in assignments', function(assert) {
assert.expect(2);
var methods = [
'assign',
'assignIn',
'defaults',
'defaultsDeep',
'merge'
];
var source = create(null);
source.__proto__ = [];
var expected = lodashStable.map(methods, stubFalse);
var actual = lodashStable.map(methods, function(methodName) {
var result = _[methodName]({}, source);
return result instanceof Array;
});
assert.deepEqual(actual, expected);
actual = _.groupBy([{ 'a': '__proto__' }], 'a');
assert.notOk(actual instanceof Array);
});
QUnit.test('should not merge "__proto__" properties', function(assert) {
assert.expect(1);
if (JSON) {
_.merge({}, JSON.parse('{"__proto__":{"a":1}}'));
var actual = 'a' in objectProto;
delete objectProto.a;
assert.notOk(actual);
} else {
skipAssert(assert);
}
});
QUnit.test('should not indirectly merge builtin prototype properties', function(assert) {
assert.expect(2);
_.merge({}, { 'toString': { 'constructor': { 'prototype': { 'a': 1 } } } });
var actual = 'a' in funcProto;
delete funcProto.a;
assert.notOk(actual);
_.merge({}, { 'constructor': { 'prototype': { 'a': 1 } } });
actual = 'a' in objectProto;
delete objectProto.a;
assert.notOk(actual);
});
QUnit.test('should not indirectly merge `Object` properties', function(assert) {
assert.expect(1);
_.merge({}, { 'constructor': { 'a': 1 } });
var actual = 'a' in Object;
delete Object.a;
assert.notOk(actual);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.fromPairs');
(function() {
QUnit.test('should accept a two dimensional array', function(assert) {
assert.expect(1);
var array = [['a', 1], ['b', 2]],
object = { 'a': 1, 'b': 2 },
actual = _.fromPairs(array);
assert.deepEqual(actual, object);
});
QUnit.test('should accept a falsey `array`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubObject);
var actual = lodashStable.map(falsey, function(array, index) {
try {
return index ? _.fromPairs(array) : _.fromPairs();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should not support deep paths', function(assert) {
assert.expect(1);
var actual = _.fromPairs([['a.b', 1]]);
assert.deepEqual(actual, { 'a.b': 1 });
});
QUnit.test('should support consuming the return value of `_.toPairs`', function(assert) {
assert.expect(1);
var object = { 'a.b': 1 };
assert.deepEqual(_.fromPairs(_.toPairs(object)), object);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(1);
if (!isNpm) {
var array = lodashStable.times(LARGE_ARRAY_SIZE, function(index) {
return ['key' + index, index];
});
var actual = _(array).fromPairs().map(square).filter(isEven).take().value();
assert.deepEqual(actual, _.take(_.filter(_.map(_.fromPairs(array), square), isEven)));
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.functions');
(function() {
QUnit.test('should return the function names of an object', function(assert) {
assert.expect(1);
var object = { 'a': 'a', 'b': identity, 'c': /x/, 'd': noop },
actual = _.functions(object).sort();
assert.deepEqual(actual, ['b', 'd']);
});
QUnit.test('should not include inherited functions', function(assert) {
assert.expect(1);
function Foo() {
this.a = identity;
this.b = 'b';
}
Foo.prototype.c = noop;
assert.deepEqual(_.functions(new Foo), ['a']);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.groupBy');
(function() {
var array = [6.1, 4.2, 6.3];
QUnit.test('should transform keys by `iteratee`', function(assert) {
assert.expect(1);
var actual = _.groupBy(array, Math.floor);
assert.deepEqual(actual, { '4': [4.2], '6': [6.1, 6.3] });
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var array = [6, 4, 6],
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant({ '4': [4], '6': [6, 6] }));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.groupBy(array, value) : _.groupBy(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var actual = _.groupBy(['one', 'two', 'three'], 'length');
assert.deepEqual(actual, { '3': ['one', 'two'], '5': ['three'] });
});
QUnit.test('should only add values to own, not inherited, properties', function(assert) {
assert.expect(2);
var actual = _.groupBy(array, function(n) {
return Math.floor(n) > 4 ? 'hasOwnProperty' : 'constructor';
});
assert.deepEqual(actual.constructor, [4.2]);
assert.deepEqual(actual.hasOwnProperty, [6.1, 6.3]);
});
QUnit.test('should work with a number for `iteratee`', function(assert) {
assert.expect(2);
var array = [
[1, 'a'],
[2, 'a'],
[2, 'b']
];
assert.deepEqual(_.groupBy(array, 0), { '1': [[1, 'a']], '2': [[2, 'a'], [2, 'b']] });
assert.deepEqual(_.groupBy(array, 1), { 'a': [[1, 'a'], [2, 'a']], 'b': [[2, 'b']] });
});
QUnit.test('should work with an object for `collection`', function(assert) {
assert.expect(1);
var actual = _.groupBy({ 'a': 6.1, 'b': 4.2, 'c': 6.3 }, Math.floor);
assert.deepEqual(actual, { '4': [4.2], '6': [6.1, 6.3] });
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(1);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE).concat(
lodashStable.range(Math.floor(LARGE_ARRAY_SIZE / 2), LARGE_ARRAY_SIZE),
lodashStable.range(Math.floor(LARGE_ARRAY_SIZE / 1.5), LARGE_ARRAY_SIZE)
);
var iteratee = function(value) { value.push(value[0]); return value; },
predicate = function(value) { return isEven(value[0]); },
actual = _(array).groupBy().map(iteratee).filter(predicate).take().value();
assert.deepEqual(actual, _.take(_.filter(lodashStable.map(_.groupBy(array), iteratee), predicate)));
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.gt');
(function() {
QUnit.test('should return `true` if `value` > `other`', function(assert) {
assert.expect(2);
assert.strictEqual(_.gt(3, 1), true);
assert.strictEqual(_.gt('def', 'abc'), true);
});
QUnit.test('should return `false` if `value` is <= `other`', function(assert) {
assert.expect(4);
assert.strictEqual(_.gt(1, 3), false);
assert.strictEqual(_.gt(3, 3), false);
assert.strictEqual(_.gt('abc', 'def'), false);
assert.strictEqual(_.gt('def', 'def'), false);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.gte');
(function() {
QUnit.test('should return `true` if `value` >= `other`', function(assert) {
assert.expect(4);
assert.strictEqual(_.gte(3, 1), true);
assert.strictEqual(_.gte(3, 3), true);
assert.strictEqual(_.gte('def', 'abc'), true);
assert.strictEqual(_.gte('def', 'def'), true);
});
QUnit.test('should return `false` if `value` is less than `other`', function(assert) {
assert.expect(2);
assert.strictEqual(_.gte(1, 3), false);
assert.strictEqual(_.gte('abc', 'def'), false);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('has methods');
lodashStable.each(['has', 'hasIn'], function(methodName) {
var func = _[methodName],
isHas = methodName == 'has',
sparseArgs = toArgs([1]),
sparseArray = Array(1),
sparseString = Object('a');
delete sparseArgs[0];
delete sparseString[0];
QUnit.test('`_.' + methodName + '` should check for own properties', function(assert) {
assert.expect(2);
var object = { 'a': 1 };
lodashStable.each(['a', ['a']], function(path) {
assert.strictEqual(func(object, path), true);
});
});
QUnit.test('`_.' + methodName + '` should not use the `hasOwnProperty` method of `object`', function(assert) {
assert.expect(1);
var object = { 'hasOwnProperty': null, 'a': 1 };
assert.strictEqual(func(object, 'a'), true);
});
QUnit.test('`_.' + methodName + '` should support deep paths', function(assert) {
assert.expect(4);
var object = { 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.strictEqual(func(object, path), true);
});
lodashStable.each(['a.a', ['a', 'a']], function(path) {
assert.strictEqual(func(object, path), false);
});
});
QUnit.test('`_.' + methodName + '` should coerce `path` to a string', function(assert) {
assert.expect(2);
function fn() {}
fn.toString = lodashStable.constant('fn');
var object = { 'null': 1 , 'undefined': 2, 'fn': 3, '[object Object]': 4 },
paths = [null, undefined, fn, {}],
expected = lodashStable.map(paths, stubTrue);
lodashStable.times(2, function(index) {
var actual = lodashStable.map(paths, function(path) {
return func(object, index ? [path] : path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should work with `arguments` objects', function(assert) {
assert.expect(1);
assert.strictEqual(func(args, 1), true);
});
QUnit.test('`_.' + methodName + '` should work with a non-string `path`', function(assert) {
assert.expect(2);
var array = [1, 2, 3];
lodashStable.each([1, [1]], function(path) {
assert.strictEqual(func(array, path), true);
});
});
QUnit.test('`_.' + methodName + '` should preserve the sign of `0`', function(assert) {
assert.expect(1);
var object = { '-0': 'a', '0': 'b' },
props = [-0, Object(-0), 0, Object(0)],
expected = lodashStable.map(props, stubTrue);
var actual = lodashStable.map(props, function(key) {
return func(object, key);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with a symbol `path`', function(assert) {
assert.expect(2);
function Foo() {}
if (Symbol) {
Foo.prototype[symbol] = 1;
var symbol2 = Symbol('b');
defineProperty(Foo.prototype, symbol2, {
'configurable': true,
'enumerable': false,
'writable': true,
'value': 2
});
var object = isHas ? Foo.prototype : new Foo;
assert.strictEqual(func(object, symbol), true);
assert.strictEqual(func(object, symbol2), true);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('`_.' + methodName + '` should check for a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': 1 };
lodashStable.each(['a.b', ['a.b']], function(path) {
assert.strictEqual(func(object, path), true);
});
});
QUnit.test('`_.' + methodName + '` should return `true` for indexes of sparse values', function(assert) {
assert.expect(1);
var values = [sparseArgs, sparseArray, sparseString],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
return func(value, 0);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `true` for indexes of sparse values with deep paths', function(assert) {
assert.expect(1);
var values = [sparseArgs, sparseArray, sparseString],
expected = lodashStable.map(values, lodashStable.constant([true, true]));
var actual = lodashStable.map(values, function(value) {
return lodashStable.map(['a[0]', ['a', '0']], function(path) {
return func({ 'a': value }, path);
});
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `' + (isHas ? 'false' : 'true') + '` for inherited properties', function(assert) {
assert.expect(2);
function Foo() {}
Foo.prototype.a = 1;
lodashStable.each(['a', ['a']], function(path) {
assert.strictEqual(func(new Foo, path), !isHas);
});
});
QUnit.test('`_.' + methodName + '` should return `' + (isHas ? 'false' : 'true') + '` for nested inherited properties', function(assert) {
assert.expect(2);
function Foo() {}
Foo.prototype.a = { 'b': 1 };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.strictEqual(func(new Foo, path), !isHas);
});
});
QUnit.test('`_.' + methodName + '` should return `false` when `object` is nullish', function(assert) {
assert.expect(2);
var values = [null, undefined],
expected = lodashStable.map(values, stubFalse);
lodashStable.each(['constructor', ['constructor']], function(path) {
var actual = lodashStable.map(values, function(value) {
return func(value, path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should return `false` for deep paths when `object` is nullish', function(assert) {
assert.expect(2);
var values = [null, undefined],
expected = lodashStable.map(values, stubFalse);
lodashStable.each(['constructor.prototype.valueOf', ['constructor', 'prototype', 'valueOf']], function(path) {
var actual = lodashStable.map(values, function(value) {
return func(value, path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should return `false` for nullish values of nested objects', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, stubFalse);
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var actual = lodashStable.map(values, function(value, index) {
var object = index ? { 'a': value } : {};
return func(object, path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should return `false` over sparse values of deep paths', function(assert) {
assert.expect(1);
var values = [sparseArgs, sparseArray, sparseString],
expected = lodashStable.map(values, lodashStable.constant([false, false]));
var actual = lodashStable.map(values, function(value) {
return lodashStable.map(['a[0].b', ['a', '0', 'b']], function(path) {
return func({ 'a': value }, path);
});
});
assert.deepEqual(actual, expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.head');
(function() {
var array = [1, 2, 3, 4];
QUnit.test('should return the first element', function(assert) {
assert.expect(1);
assert.strictEqual(_.head(array), 1);
});
QUnit.test('should return `undefined` when querying empty arrays', function(assert) {
assert.expect(1);
arrayProto[0] = 1;
assert.strictEqual(_.head([]), undefined);
arrayProto.length = 0;
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
actual = lodashStable.map(array, _.head);
assert.deepEqual(actual, [1, 4, 7]);
});
QUnit.test('should be aliased', function(assert) {
assert.expect(1);
assert.strictEqual(_.first, _.head);
});
QUnit.test('should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _(array);
assert.strictEqual(wrapped.head(), 1);
assert.strictEqual(wrapped.first(), 1);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _(array).chain();
assert.ok(wrapped.head() instanceof _);
assert.ok(wrapped.first() instanceof _);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should not execute immediately when explicitly chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _(array).chain();
assert.strictEqual(wrapped.head().__wrapped__, array);
assert.strictEqual(wrapped.first().__wrapped__, array);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(4);
if (!isNpm) {
var largeArray = lodashStable.range(LARGE_ARRAY_SIZE),
smallArray = array;
lodashStable.each(['head', 'first'], function(methodName) {
lodashStable.times(2, function(index) {
var array = index ? largeArray : smallArray,
actual = _(array).filter(isEven)[methodName]();
assert.strictEqual(actual, _[methodName](_.filter(array, isEven)));
});
});
}
else {
skipAssert(assert, 4);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.identity');
(function() {
QUnit.test('should return the first argument given', function(assert) {
assert.expect(1);
var object = { 'name': 'fred' };
assert.strictEqual(_.identity(object), object);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.includes');
(function() {
lodashStable.each({
'an `arguments` object': arguments,
'an array': [1, 2, 3, 4],
'an object': { 'a': 1, 'b': 2, 'c': 3, 'd': 4 },
'a string': '1234'
},
function(collection, key) {
QUnit.test('should work with ' + key + ' and return `true` for matched values', function(assert) {
assert.expect(1);
assert.strictEqual(_.includes(collection, 3), true);
});
QUnit.test('should work with ' + key + ' and return `false` for unmatched values', function(assert) {
assert.expect(1);
assert.strictEqual(_.includes(collection, 5), false);
});
QUnit.test('should work with ' + key + ' and floor `position` values', function(assert) {
assert.expect(1);
assert.strictEqual(_.includes(collection, 2, 1.2), true);
});
QUnit.test('should work with ' + key + ' and return an unwrapped value implicitly when chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.strictEqual(_(collection).includes(3), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should work with ' + key + ' and return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(collection).chain().includes(3) instanceof _);
}
else {
skipAssert(assert);
}
});
});
lodashStable.each({
'literal': 'abc',
'object': Object('abc')
},
function(collection, key) {
QUnit.test('should work with a string ' + key + ' for `collection`', function(assert) {
assert.expect(2);
assert.strictEqual(_.includes(collection, 'bc'), true);
assert.strictEqual(_.includes(collection, 'd'), false);
});
});
QUnit.test('should return `false` for empty collections', function(assert) {
assert.expect(1);
var expected = lodashStable.map(empties, stubFalse);
var actual = lodashStable.map(empties, function(value) {
try {
return _.includes(value);
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with a string and a `fromIndex` >= `length`', function(assert) {
assert.expect(1);
var string = '1234',
length = string.length,
indexes = [4, 6, Math.pow(2, 32), Infinity];
var expected = lodashStable.map(indexes, function(index) {
return [false, false, index == length];
});
var actual = lodashStable.map(indexes, function(fromIndex) {
return [
_.includes(string, 1, fromIndex),
_.includes(string, undefined, fromIndex),
_.includes(string, '', fromIndex)
];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should match `NaN`', function(assert) {
assert.expect(1);
assert.strictEqual(_.includes([1, NaN, 3], NaN), true);
});
QUnit.test('should match `-0` as `0`', function(assert) {
assert.expect(2);
assert.strictEqual(_.includes([-0], 0), true);
assert.strictEqual(_.includes([0], -0), true);
});
QUnit.test('should work as an iteratee for methods like `_.every`', function(assert) {
assert.expect(1);
var array = [2, 3, 1],
values = [1, 2, 3];
assert.ok(lodashStable.every(values, lodashStable.partial(_.includes, array)));
});
}(1, 2, 3, 4));
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.initial');
(function() {
var array = [1, 2, 3];
QUnit.test('should accept a falsey `array`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubArray);
var actual = lodashStable.map(falsey, function(array, index) {
try {
return index ? _.initial(array) : _.initial();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should exclude last element', function(assert) {
assert.expect(1);
assert.deepEqual(_.initial(array), [1, 2]);
});
QUnit.test('should return an empty when querying empty arrays', function(assert) {
assert.expect(1);
assert.deepEqual(_.initial([]), []);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
actual = lodashStable.map(array, _.initial);
assert.deepEqual(actual, [[1, 2], [4, 5], [7, 8]]);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(4);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE),
values = [];
var actual = _(array).initial().filter(function(value) {
values.push(value);
return false;
})
.value();
assert.deepEqual(actual, []);
assert.deepEqual(values, _.initial(array));
values = [];
actual = _(array).filter(function(value) {
values.push(value);
return isEven(value);
})
.initial()
.value();
assert.deepEqual(actual, _.initial(lodashStable.filter(array, isEven)));
assert.deepEqual(values, array);
}
else {
skipAssert(assert, 4);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.inRange');
(function() {
QUnit.test('should work with an `end`', function(assert) {
assert.expect(3);
assert.strictEqual(_.inRange(3, 5), true);
assert.strictEqual(_.inRange(5, 5), false);
assert.strictEqual(_.inRange(6, 5), false);
});
QUnit.test('should work with a `start` and `end`', function(assert) {
assert.expect(4);
assert.strictEqual(_.inRange(1, 1, 5), true);
assert.strictEqual(_.inRange(3, 1, 5), true);
assert.strictEqual(_.inRange(0, 1, 5), false);
assert.strictEqual(_.inRange(5, 1, 5), false);
});
QUnit.test('should treat falsey `start` as `0`', function(assert) {
assert.expect(13);
lodashStable.each(falsey, function(value, index) {
if (index) {
assert.strictEqual(_.inRange(0, value), false);
assert.strictEqual(_.inRange(0, value, 1), true);
} else {
assert.strictEqual(_.inRange(0), false);
}
});
});
QUnit.test('should swap `start` and `end` when `start` > `end`', function(assert) {
assert.expect(2);
assert.strictEqual(_.inRange(2, 5, 1), true);
assert.strictEqual(_.inRange(-3, -2, -6), true);
});
QUnit.test('should work with a floating point `n` value', function(assert) {
assert.expect(4);
assert.strictEqual(_.inRange(0.5, 5), true);
assert.strictEqual(_.inRange(1.2, 1, 5), true);
assert.strictEqual(_.inRange(5.2, 5), false);
assert.strictEqual(_.inRange(0.5, 1, 5), false);
});
QUnit.test('should coerce arguments to finite numbers', function(assert) {
assert.expect(1);
var actual = [
_.inRange(0, '1'),
_.inRange(0, '0', 1),
_.inRange(0, 0, '1'),
_.inRange(0, NaN, 1),
_.inRange(-1, -1, NaN)
];
assert.deepEqual(actual, lodashStable.map(actual, stubTrue));
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('intersection methods');
lodashStable.each(['intersection', 'intersectionBy', 'intersectionWith'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should return the intersection of two arrays', function(assert) {
assert.expect(1);
var actual = func([2, 1], [2, 3]);
assert.deepEqual(actual, [2]);
});
QUnit.test('`_.' + methodName + '` should return the intersection of multiple arrays', function(assert) {
assert.expect(1);
var actual = func([2, 1, 2, 3], [3, 4], [3, 2]);
assert.deepEqual(actual, [3]);
});
QUnit.test('`_.' + methodName + '` should return an array of unique values', function(assert) {
assert.expect(1);
var actual = func([1, 1, 3, 2, 2], [5, 2, 2, 1, 4], [2, 1, 1]);
assert.deepEqual(actual, [1, 2]);
});
QUnit.test('`_.' + methodName + '` should work with a single array', function(assert) {
assert.expect(1);
var actual = func([1, 1, 3, 2, 2]);
assert.deepEqual(actual, [1, 3, 2]);
});
QUnit.test('`_.' + methodName + '` should work with `arguments` objects', function(assert) {
assert.expect(2);
var array = [0, 1, null, 3],
expected = [1, 3];
assert.deepEqual(func(array, args), expected);
assert.deepEqual(func(args, array), expected);
});
QUnit.test('`_.' + methodName + '` should treat `-0` as `0`', function(assert) {
assert.expect(1);
var values = [-0, 0],
expected = lodashStable.map(values, lodashStable.constant(['0']));
var actual = lodashStable.map(values, function(value) {
return lodashStable.map(func(values, [value]), lodashStable.toString);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should match `NaN`', function(assert) {
assert.expect(1);
var actual = func([1, NaN, 3], [NaN, 5, NaN]);
assert.deepEqual(actual, [NaN]);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of `-0` as `0`', function(assert) {
assert.expect(1);
var values = [-0, 0],
expected = lodashStable.map(values, lodashStable.constant(['0']));
var actual = lodashStable.map(values, function(value) {
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, lodashStable.constant(value));
return lodashStable.map(func(values, largeArray), lodashStable.toString);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of `NaN`', function(assert) {
assert.expect(1);
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, stubNaN);
assert.deepEqual(func([1, NaN, 3], largeArray), [NaN]);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of objects', function(assert) {
assert.expect(2);
var object = {},
largeArray = lodashStable.times(LARGE_ARRAY_SIZE, lodashStable.constant(object));
assert.deepEqual(func([object], largeArray), [object]);
assert.deepEqual(func(lodashStable.range(LARGE_ARRAY_SIZE), [1]), [1]);
});
QUnit.test('`_.' + methodName + '` should treat values that are not arrays or `arguments` objects as empty', function(assert) {
assert.expect(3);
var array = [0, 1, null, 3];
assert.deepEqual(func(array, 3, { '0': 1 }, null), []);
assert.deepEqual(func(null, array, null, [2, 3]), []);
assert.deepEqual(func(array, null, args, null), []);
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _([1, 3, 2])[methodName]([5, 2, 1, 4]);
assert.ok(wrapped instanceof _);
assert.deepEqual(wrapped.value(), [1, 2]);
}
else {
skipAssert(assert, 2);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.intersectionBy');
(function() {
QUnit.test('should accept an `iteratee`', function(assert) {
assert.expect(2);
var actual = _.intersectionBy([2.1, 1.2], [2.3, 3.4], Math.floor);
assert.deepEqual(actual, [2.1]);
actual = _.intersectionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x');
assert.deepEqual(actual, [{ 'x': 1 }]);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.intersectionBy([2.1, 1.2], [2.3, 3.4], function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [2.3]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.intersectionWith');
(function() {
QUnit.test('should work with a `comparator`', function(assert) {
assert.expect(1);
var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }],
others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }],
actual = _.intersectionWith(objects, others, lodashStable.isEqual);
assert.deepEqual(actual, [objects[0]]);
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var array = [-0],
largeArray = lodashStable.times(LARGE_ARRAY_SIZE, stubZero),
others = [[0], largeArray],
expected = lodashStable.map(others, lodashStable.constant(['-0']));
var actual = lodashStable.map(others, function(other) {
return lodashStable.map(_.intersectionWith(array, other, lodashStable.eq), lodashStable.toString);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.invert');
(function() {
QUnit.test('should invert an object', function(assert) {
assert.expect(2);
var object = { 'a': 1, 'b': 2 },
actual = _.invert(object);
assert.deepEqual(actual, { '1': 'a', '2': 'b' });
assert.deepEqual(_.invert(actual), { 'a': '1', 'b': '2' });
});
QUnit.test('should work with values that shadow keys on `Object.prototype`', function(assert) {
assert.expect(1);
var object = { 'a': 'hasOwnProperty', 'b': 'constructor' };
assert.deepEqual(_.invert(object), { 'hasOwnProperty': 'a', 'constructor': 'b' });
});
QUnit.test('should work with an object that has a `length` property', function(assert) {
assert.expect(1);
var object = { '0': 'a', '1': 'b', 'length': 2 };
assert.deepEqual(_.invert(object), { 'a': '0', 'b': '1', '2': 'length' });
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var object = { 'a': 1, 'b': 2 },
wrapped = _(object).invert();
assert.ok(wrapped instanceof _);
assert.deepEqual(wrapped.value(), { '1': 'a', '2': 'b' });
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.invertBy');
(function() {
var object = { 'a': 1, 'b': 2, 'c': 1 };
QUnit.test('should transform keys by `iteratee`', function(assert) {
assert.expect(1);
var expected = { 'group1': ['a', 'c'], 'group2': ['b'] };
var actual = _.invertBy(object, function(value) {
return 'group' + value;
});
assert.deepEqual(actual, expected);
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant({ '1': ['a', 'c'], '2': ['b'] }));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.invertBy(object, value) : _.invertBy(object);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should only add multiple values to own, not inherited, properties', function(assert) {
assert.expect(1);
var object = { 'a': 'hasOwnProperty', 'b': 'constructor' },
expected = { 'hasOwnProperty': ['a'], 'constructor': ['b'] };
assert.ok(lodashStable.isEqual(_.invertBy(object), expected));
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _(object).invertBy();
assert.ok(wrapped instanceof _);
assert.deepEqual(wrapped.value(), { '1': ['a', 'c'], '2': ['b'] });
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.invoke');
(function() {
QUnit.test('should invoke a method on `object`', function(assert) {
assert.expect(1);
var object = { 'a': lodashStable.constant('A') },
actual = _.invoke(object, 'a');
assert.strictEqual(actual, 'A');
});
QUnit.test('should support invoking with arguments', function(assert) {
assert.expect(1);
var object = { 'a': function(a, b) { return [a, b]; } },
actual = _.invoke(object, 'a', 1, 2);
assert.deepEqual(actual, [1, 2]);
});
QUnit.test('should not error on nullish elements', function(assert) {
assert.expect(1);
var values = [null, undefined],
expected = lodashStable.map(values, noop);
var actual = lodashStable.map(values, function(value) {
try {
return _.invoke(value, 'a.b', 1, 2);
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var object = { '-0': stubA, '0': stubB },
props = [-0, Object(-0), 0, Object(0)];
var actual = lodashStable.map(props, function(key) {
return _.invoke(object, key);
});
assert.deepEqual(actual, ['a', 'a', 'b', 'b']);
});
QUnit.test('should support deep paths', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': function(a, b) { return [a, b]; } } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var actual = _.invoke(object, path, 1, 2);
assert.deepEqual(actual, [1, 2]);
});
});
QUnit.test('should invoke deep property methods with the correct `this` binding', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': function() { return this.c; }, 'c': 1 } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.deepEqual(_.invoke(object, path), 1);
});
});
QUnit.test('should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var object = { 'a': stubOne };
assert.strictEqual(_(object).invoke('a'), 1);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var object = { 'a': stubOne };
assert.ok(_(object).chain().invoke('a') instanceof _);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.invokeMap');
(function() {
QUnit.test('should invoke a methods on each element of `collection`', function(assert) {
assert.expect(1);
var array = ['a', 'b', 'c'],
actual = _.invokeMap(array, 'toUpperCase');
assert.deepEqual(actual, ['A', 'B', 'C']);
});
QUnit.test('should support invoking with arguments', function(assert) {
assert.expect(1);
var array = [function() { return slice.call(arguments); }],
actual = _.invokeMap(array, 'call', null, 'a', 'b', 'c');
assert.deepEqual(actual, [['a', 'b', 'c']]);
});
QUnit.test('should work with a function for `methodName`', function(assert) {
assert.expect(1);
var array = ['a', 'b', 'c'];
var actual = _.invokeMap(array, function(left, right) {
return left + this.toUpperCase() + right;
}, '(', ')');
assert.deepEqual(actual, ['(A)', '(B)', '(C)']);
});
QUnit.test('should work with an object for `collection`', function(assert) {
assert.expect(1);
var object = { 'a': 1, 'b': 2, 'c': 3 },
actual = _.invokeMap(object, 'toFixed', 1);
assert.deepEqual(actual, ['1.0', '2.0', '3.0']);
});
QUnit.test('should treat number values for `collection` as empty', function(assert) {
assert.expect(1);
assert.deepEqual(_.invokeMap(1), []);
});
QUnit.test('should not error on nullish elements', function(assert) {
assert.expect(1);
var array = ['a', null, undefined, 'd'];
try {
var actual = _.invokeMap(array, 'toUpperCase');
} catch (e) {}
assert.deepEqual(actual, ['A', undefined, undefined, 'D']);
});
QUnit.test('should not error on elements with missing properties', function(assert) {
assert.expect(1);
var objects = lodashStable.map([null, undefined, stubOne], function(value) {
return { 'a': value };
});
var expected = lodashStable.map(objects, function(object) {
return object.a ? object.a() : undefined;
});
try {
var actual = _.invokeMap(objects, 'a');
} catch (e) {}
assert.deepEqual(actual, expected);
});
QUnit.test('should invoke deep property methods with the correct `this` binding', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': function() { return this.c; }, 'c': 1 } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.deepEqual(_.invokeMap([object], path), [1]);
});
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(4);
if (!isNpm) {
var array = ['a', 'b', 'c'],
wrapped = _(array),
actual = wrapped.invokeMap('toUpperCase');
assert.ok(actual instanceof _);
assert.deepEqual(actual.valueOf(), ['A', 'B', 'C']);
actual = wrapped.invokeMap(function(left, right) {
return left + this.toUpperCase() + right;
}, '(', ')');
assert.ok(actual instanceof _);
assert.deepEqual(actual.valueOf(), ['(A)', '(B)', '(C)']);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should support shortcut fusion', function(assert) {
assert.expect(2);
if (!isNpm) {
var count = 0,
method = function() { count++; return this.index; };
var array = lodashStable.times(LARGE_ARRAY_SIZE, function(index) {
return { 'index': index, 'method': method };
});
var actual = _(array).invokeMap('method').take(1).value();
assert.strictEqual(count, 1);
assert.deepEqual(actual, [0]);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isArguments');
(function() {
QUnit.test('should return `true` for `arguments` objects', function(assert) {
assert.expect(2);
assert.strictEqual(_.isArguments(args), true);
assert.strictEqual(_.isArguments(strictArgs), true);
});
QUnit.test('should return `false` for non `arguments` objects', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isArguments(value) : _.isArguments();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isArguments([1, 2, 3]), false);
assert.strictEqual(_.isArguments(true), false);
assert.strictEqual(_.isArguments(new Date), false);
assert.strictEqual(_.isArguments(new Error), false);
assert.strictEqual(_.isArguments(_), false);
assert.strictEqual(_.isArguments(slice), false);
assert.strictEqual(_.isArguments({ '0': 1, 'callee': noop, 'length': 1 }), false);
assert.strictEqual(_.isArguments(1), false);
assert.strictEqual(_.isArguments(/x/), false);
assert.strictEqual(_.isArguments('a'), false);
assert.strictEqual(_.isArguments(symbol), false);
});
QUnit.test('should work with an `arguments` object from another realm', function(assert) {
assert.expect(1);
if (realm.arguments) {
assert.strictEqual(_.isArguments(realm.arguments), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isArray');
(function() {
QUnit.test('should return `true` for arrays', function(assert) {
assert.expect(1);
assert.strictEqual(_.isArray([1, 2, 3]), true);
});
QUnit.test('should return `false` for non-arrays', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isArray(value) : _.isArray();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isArray(args), false);
assert.strictEqual(_.isArray(true), false);
assert.strictEqual(_.isArray(new Date), false);
assert.strictEqual(_.isArray(new Error), false);
assert.strictEqual(_.isArray(_), false);
assert.strictEqual(_.isArray(slice), false);
assert.strictEqual(_.isArray({ '0': 1, 'length': 1 }), false);
assert.strictEqual(_.isArray(1), false);
assert.strictEqual(_.isArray(/x/), false);
assert.strictEqual(_.isArray('a'), false);
assert.strictEqual(_.isArray(symbol), false);
});
QUnit.test('should work with an array from another realm', function(assert) {
assert.expect(1);
if (realm.array) {
assert.strictEqual(_.isArray(realm.array), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isArrayBuffer');
(function() {
QUnit.test('should return `true` for array buffers', function(assert) {
assert.expect(1);
if (ArrayBuffer) {
assert.strictEqual(_.isArrayBuffer(arrayBuffer), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non array buffers', function(assert) {
assert.expect(13);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isArrayBuffer(value) : _.isArrayBuffer();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isArrayBuffer(args), false);
assert.strictEqual(_.isArrayBuffer([1]), false);
assert.strictEqual(_.isArrayBuffer(true), false);
assert.strictEqual(_.isArrayBuffer(new Date), false);
assert.strictEqual(_.isArrayBuffer(new Error), false);
assert.strictEqual(_.isArrayBuffer(_), false);
assert.strictEqual(_.isArrayBuffer(slice), false);
assert.strictEqual(_.isArrayBuffer({ 'a': 1 }), false);
assert.strictEqual(_.isArrayBuffer(1), false);
assert.strictEqual(_.isArrayBuffer(/x/), false);
assert.strictEqual(_.isArrayBuffer('a'), false);
assert.strictEqual(_.isArrayBuffer(symbol), false);
});
QUnit.test('should work with array buffers from another realm', function(assert) {
assert.expect(1);
if (realm.arrayBuffer) {
assert.strictEqual(_.isArrayBuffer(realm.arrayBuffer), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isArrayLike');
(function() {
QUnit.test('should return `true` for array-like values', function(assert) {
assert.expect(1);
var values = [args, [1, 2, 3], { '0': 'a', 'length': 1 }, 'a'],
expected = lodashStable.map(values, stubTrue),
actual = lodashStable.map(values, _.isArrayLike);
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` for non-arrays', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, function(value) {
return value === '';
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isArrayLike(value) : _.isArrayLike();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isArrayLike(true), false);
assert.strictEqual(_.isArrayLike(new Date), false);
assert.strictEqual(_.isArrayLike(new Error), false);
assert.strictEqual(_.isArrayLike(_), false);
assert.strictEqual(_.isArrayLike(asyncFunc), false);
assert.strictEqual(_.isArrayLike(genFunc), false);
assert.strictEqual(_.isArrayLike(slice), false);
assert.strictEqual(_.isArrayLike({ 'a': 1 }), false);
assert.strictEqual(_.isArrayLike(1), false);
assert.strictEqual(_.isArrayLike(/x/), false);
assert.strictEqual(_.isArrayLike(symbol), false);
});
QUnit.test('should work with an array from another realm', function(assert) {
assert.expect(1);
if (realm.object) {
var values = [realm.arguments, realm.array, realm.string],
expected = lodashStable.map(values, stubTrue),
actual = lodashStable.map(values, _.isArrayLike);
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isBoolean');
(function() {
QUnit.test('should return `true` for booleans', function(assert) {
assert.expect(4);
assert.strictEqual(_.isBoolean(true), true);
assert.strictEqual(_.isBoolean(false), true);
assert.strictEqual(_.isBoolean(Object(true)), true);
assert.strictEqual(_.isBoolean(Object(false)), true);
});
QUnit.test('should return `false` for non-booleans', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, function(value) {
return value === false;
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isBoolean(value) : _.isBoolean();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isBoolean(args), false);
assert.strictEqual(_.isBoolean([1, 2, 3]), false);
assert.strictEqual(_.isBoolean(new Date), false);
assert.strictEqual(_.isBoolean(new Error), false);
assert.strictEqual(_.isBoolean(_), false);
assert.strictEqual(_.isBoolean(slice), false);
assert.strictEqual(_.isBoolean({ 'a': 1 }), false);
assert.strictEqual(_.isBoolean(1), false);
assert.strictEqual(_.isBoolean(/x/), false);
assert.strictEqual(_.isBoolean('a'), false);
assert.strictEqual(_.isBoolean(symbol), false);
});
QUnit.test('should work with a boolean from another realm', function(assert) {
assert.expect(1);
if (realm.boolean) {
assert.strictEqual(_.isBoolean(realm.boolean), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isBuffer');
(function() {
QUnit.test('should return `true` for buffers', function(assert) {
assert.expect(1);
if (Buffer) {
assert.strictEqual(_.isBuffer(new Buffer(2)), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non-buffers', function(assert) {
assert.expect(13);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isBuffer(value) : _.isBuffer();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isBuffer(args), false);
assert.strictEqual(_.isBuffer([1]), false);
assert.strictEqual(_.isBuffer(true), false);
assert.strictEqual(_.isBuffer(new Date), false);
assert.strictEqual(_.isBuffer(new Error), false);
assert.strictEqual(_.isBuffer(_), false);
assert.strictEqual(_.isBuffer(slice), false);
assert.strictEqual(_.isBuffer({ 'a': 1 }), false);
assert.strictEqual(_.isBuffer(1), false);
assert.strictEqual(_.isBuffer(/x/), false);
assert.strictEqual(_.isBuffer('a'), false);
assert.strictEqual(_.isBuffer(symbol), false);
});
QUnit.test('should return `false` if `Buffer` is not defined', function(assert) {
assert.expect(1);
if (!isStrict && Buffer && lodashBizarro) {
assert.strictEqual(lodashBizarro.isBuffer(new Buffer(2)), false);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isDate');
(function() {
QUnit.test('should return `true` for dates', function(assert) {
assert.expect(1);
assert.strictEqual(_.isDate(new Date), true);
});
QUnit.test('should return `false` for non-dates', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isDate(value) : _.isDate();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isDate(args), false);
assert.strictEqual(_.isDate([1, 2, 3]), false);
assert.strictEqual(_.isDate(true), false);
assert.strictEqual(_.isDate(new Error), false);
assert.strictEqual(_.isDate(_), false);
assert.strictEqual(_.isDate(slice), false);
assert.strictEqual(_.isDate({ 'a': 1 }), false);
assert.strictEqual(_.isDate(1), false);
assert.strictEqual(_.isDate(/x/), false);
assert.strictEqual(_.isDate('a'), false);
assert.strictEqual(_.isDate(symbol), false);
});
QUnit.test('should work with a date object from another realm', function(assert) {
assert.expect(1);
if (realm.date) {
assert.strictEqual(_.isDate(realm.date), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isElement');
(function() {
QUnit.test('should return `true` for elements', function(assert) {
assert.expect(1);
if (document) {
assert.strictEqual(_.isElement(body), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `true` for non-plain objects', function(assert) {
assert.expect(1);
function Foo() {
this.nodeType = 1;
}
assert.strictEqual(_.isElement(new Foo), true);
});
QUnit.test('should return `false` for non DOM elements', function(assert) {
assert.expect(13);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isElement(value) : _.isElement();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isElement(args), false);
assert.strictEqual(_.isElement([1, 2, 3]), false);
assert.strictEqual(_.isElement(true), false);
assert.strictEqual(_.isElement(new Date), false);
assert.strictEqual(_.isElement(new Error), false);
assert.strictEqual(_.isElement(_), false);
assert.strictEqual(_.isElement(slice), false);
assert.strictEqual(_.isElement({ 'a': 1 }), false);
assert.strictEqual(_.isElement(1), false);
assert.strictEqual(_.isElement(/x/), false);
assert.strictEqual(_.isElement('a'), false);
assert.strictEqual(_.isElement(symbol), false);
});
QUnit.test('should return `false` for plain objects', function(assert) {
assert.expect(6);
assert.strictEqual(_.isElement({ 'nodeType': 1 }), false);
assert.strictEqual(_.isElement({ 'nodeType': Object(1) }), false);
assert.strictEqual(_.isElement({ 'nodeType': true }), false);
assert.strictEqual(_.isElement({ 'nodeType': [1] }), false);
assert.strictEqual(_.isElement({ 'nodeType': '1' }), false);
assert.strictEqual(_.isElement({ 'nodeType': '001' }), false);
});
QUnit.test('should work with a DOM element from another realm', function(assert) {
assert.expect(1);
if (realm.element) {
assert.strictEqual(_.isElement(realm.element), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isEmpty');
(function() {
QUnit.test('should return `true` for empty values', function(assert) {
assert.expect(10);
var expected = lodashStable.map(empties, stubTrue),
actual = lodashStable.map(empties, _.isEmpty);
assert.deepEqual(actual, expected);
assert.strictEqual(_.isEmpty(true), true);
assert.strictEqual(_.isEmpty(slice), true);
assert.strictEqual(_.isEmpty(1), true);
assert.strictEqual(_.isEmpty(NaN), true);
assert.strictEqual(_.isEmpty(/x/), true);
assert.strictEqual(_.isEmpty(symbol), true);
assert.strictEqual(_.isEmpty(), true);
if (Buffer) {
assert.strictEqual(_.isEmpty(new Buffer(0)), true);
assert.strictEqual(_.isEmpty(new Buffer(1)), false);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should return `false` for non-empty values', function(assert) {
assert.expect(3);
assert.strictEqual(_.isEmpty([0]), false);
assert.strictEqual(_.isEmpty({ 'a': 0 }), false);
assert.strictEqual(_.isEmpty('a'), false);
});
QUnit.test('should work with an object that has a `length` property', function(assert) {
assert.expect(1);
assert.strictEqual(_.isEmpty({ 'length': 0 }), false);
});
QUnit.test('should work with `arguments` objects', function(assert) {
assert.expect(1);
assert.strictEqual(_.isEmpty(args), false);
});
QUnit.test('should work with prototytpe objects', function(assert) {
assert.expect(2);
function Foo() {}
Foo.prototype = { 'constructor': Foo };
assert.strictEqual(_.isEmpty(Foo.prototype), true);
Foo.prototype.a = 1;
assert.strictEqual(_.isEmpty(Foo.prototype), false);
});
QUnit.test('should work with jQuery/MooTools DOM query collections', function(assert) {
assert.expect(1);
function Foo(elements) {
push.apply(this, elements);
}
Foo.prototype = { 'length': 0, 'splice': arrayProto.splice };
assert.strictEqual(_.isEmpty(new Foo([])), true);
});
QUnit.test('should work with maps', function(assert) {
assert.expect(4);
if (Map) {
lodashStable.each([new Map, realm.map], function(map) {
assert.strictEqual(_.isEmpty(map), true);
map.set('a', 1);
assert.strictEqual(_.isEmpty(map), false);
map.clear();
});
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should work with sets', function(assert) {
assert.expect(4);
if (Set) {
lodashStable.each([new Set, realm.set], function(set) {
assert.strictEqual(_.isEmpty(set), true);
set.add(1);
assert.strictEqual(_.isEmpty(set), false);
set.clear();
});
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should not treat objects with negative lengths as array-like', function(assert) {
assert.expect(1);
function Foo() {}
Foo.prototype.length = -1;
assert.strictEqual(_.isEmpty(new Foo), true);
});
QUnit.test('should not treat objects with lengths larger than `MAX_SAFE_INTEGER` as array-like', function(assert) {
assert.expect(1);
function Foo() {}
Foo.prototype.length = MAX_SAFE_INTEGER + 1;
assert.strictEqual(_.isEmpty(new Foo), true);
});
QUnit.test('should not treat objects with non-number lengths as array-like', function(assert) {
assert.expect(1);
assert.strictEqual(_.isEmpty({ 'length': '0' }), false);
});
QUnit.test('should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.strictEqual(_({}).isEmpty(), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_({}).chain().isEmpty() instanceof _);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isEqual');
(function() {
var symbol1 = Symbol ? Symbol('a') : true,
symbol2 = Symbol ? Symbol('b') : false;
QUnit.test('should compare primitives', function(assert) {
assert.expect(1);
var pairs = [
[1, 1, true], [1, Object(1), true], [1, '1', false], [1, 2, false],
[-0, -0, true], [0, 0, true], [0, Object(0), true], [Object(0), Object(0), true], [-0, 0, true], [0, '0', false], [0, null, false],
[NaN, NaN, true], [NaN, Object(NaN), true], [Object(NaN), Object(NaN), true], [NaN, 'a', false], [NaN, Infinity, false],
['a', 'a', true], ['a', Object('a'), true], [Object('a'), Object('a'), true], ['a', 'b', false], ['a', ['a'], false],
[true, true, true], [true, Object(true), true], [Object(true), Object(true), true], [true, 1, false], [true, 'a', false],
[false, false, true], [false, Object(false), true], [Object(false), Object(false), true], [false, 0, false], [false, '', false],
[symbol1, symbol1, true], [symbol1, Object(symbol1), true], [Object(symbol1), Object(symbol1), true], [symbol1, symbol2, false],
[null, null, true], [null, undefined, false], [null, {}, false], [null, '', false],
[undefined, undefined, true], [undefined, null, false], [undefined, '', false]
];
var expected = lodashStable.map(pairs, function(pair) {
return pair[2];
});
var actual = lodashStable.map(pairs, function(pair) {
return _.isEqual(pair[0], pair[1]);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should compare arrays', function(assert) {
assert.expect(6);
var array1 = [true, null, 1, 'a', undefined],
array2 = [true, null, 1, 'a', undefined];
assert.strictEqual(_.isEqual(array1, array2), true);
array1 = [[1, 2, 3], new Date(2012, 4, 23), /x/, { 'e': 1 }];
array2 = [[1, 2, 3], new Date(2012, 4, 23), /x/, { 'e': 1 }];
assert.strictEqual(_.isEqual(array1, array2), true);
array1 = [1];
array1[2] = 3;
array2 = [1];
array2[1] = undefined;
array2[2] = 3;
assert.strictEqual(_.isEqual(array1, array2), true);
array1 = [Object(1), false, Object('a'), /x/, new Date(2012, 4, 23), ['a', 'b', [Object('c')]], { 'a': 1 }];
array2 = [1, Object(false), 'a', /x/, new Date(2012, 4, 23), ['a', Object('b'), ['c']], { 'a': 1 }];
assert.strictEqual(_.isEqual(array1, array2), true);
array1 = [1, 2, 3];
array2 = [3, 2, 1];
assert.strictEqual(_.isEqual(array1, array2), false);
array1 = [1, 2];
array2 = [1, 2, 3];
assert.strictEqual(_.isEqual(array1, array2), false);
});
QUnit.test('should treat arrays with identical values but different non-index properties as equal', function(assert) {
assert.expect(3);
var array1 = [1, 2, 3],
array2 = [1, 2, 3];
array1.every = array1.filter = array1.forEach =
array1.indexOf = array1.lastIndexOf = array1.map =
array1.some = array1.reduce = array1.reduceRight = null;
array2.concat = array2.join = array2.pop =
array2.reverse = array2.shift = array2.slice =
array2.sort = array2.splice = array2.unshift = null;
assert.strictEqual(_.isEqual(array1, array2), true);
array1 = [1, 2, 3];
array1.a = 1;
array2 = [1, 2, 3];
array2.b = 1;
assert.strictEqual(_.isEqual(array1, array2), true);
array1 = /c/.exec('abcde');
array2 = ['c'];
assert.strictEqual(_.isEqual(array1, array2), true);
});
QUnit.test('should compare sparse arrays', function(assert) {
assert.expect(3);
var array = Array(1);
assert.strictEqual(_.isEqual(array, Array(1)), true);
assert.strictEqual(_.isEqual(array, [undefined]), true);
assert.strictEqual(_.isEqual(array, Array(2)), false);
});
QUnit.test('should compare plain objects', function(assert) {
assert.expect(5);
var object1 = { 'a': true, 'b': null, 'c': 1, 'd': 'a', 'e': undefined },
object2 = { 'a': true, 'b': null, 'c': 1, 'd': 'a', 'e': undefined };
assert.strictEqual(_.isEqual(object1, object2), true);
object1 = { 'a': [1, 2, 3], 'b': new Date(2012, 4, 23), 'c': /x/, 'd': { 'e': 1 } };
object2 = { 'a': [1, 2, 3], 'b': new Date(2012, 4, 23), 'c': /x/, 'd': { 'e': 1 } };
assert.strictEqual(_.isEqual(object1, object2), true);
object1 = { 'a': 1, 'b': 2, 'c': 3 };
object2 = { 'a': 3, 'b': 2, 'c': 1 };
assert.strictEqual(_.isEqual(object1, object2), false);
object1 = { 'a': 1, 'b': 2, 'c': 3 };
object2 = { 'd': 1, 'e': 2, 'f': 3 };
assert.strictEqual(_.isEqual(object1, object2), false);
object1 = { 'a': 1, 'b': 2 };
object2 = { 'a': 1, 'b': 2, 'c': 3 };
assert.strictEqual(_.isEqual(object1, object2), false);
});
QUnit.test('should compare objects regardless of key order', function(assert) {
assert.expect(1);
var object1 = { 'a': 1, 'b': 2, 'c': 3 },
object2 = { 'c': 3, 'a': 1, 'b': 2 };
assert.strictEqual(_.isEqual(object1, object2), true);
});
QUnit.test('should compare nested objects', function(assert) {
assert.expect(1);
var object1 = {
'a': [1, 2, 3],
'b': true,
'c': Object(1),
'd': 'a',
'e': {
'f': ['a', Object('b'), 'c'],
'g': Object(false),
'h': new Date(2012, 4, 23),
'i': noop,
'j': 'a'
}
};
var object2 = {
'a': [1, Object(2), 3],
'b': Object(true),
'c': 1,
'd': Object('a'),
'e': {
'f': ['a', 'b', 'c'],
'g': false,
'h': new Date(2012, 4, 23),
'i': noop,
'j': 'a'
}
};
assert.strictEqual(_.isEqual(object1, object2), true);
});
QUnit.test('should compare object instances', function(assert) {
assert.expect(4);
function Foo() {
this.a = 1;
}
Foo.prototype.a = 1;
function Bar() {
this.a = 1;
}
Bar.prototype.a = 2;
assert.strictEqual(_.isEqual(new Foo, new Foo), true);
assert.strictEqual(_.isEqual(new Foo, new Bar), false);
assert.strictEqual(_.isEqual({ 'a': 1 }, new Foo), false);
assert.strictEqual(_.isEqual({ 'a': 2 }, new Bar), false);
});
QUnit.test('should compare objects with constructor properties', function(assert) {
assert.expect(5);
assert.strictEqual(_.isEqual({ 'constructor': 1 }, { 'constructor': 1 }), true);
assert.strictEqual(_.isEqual({ 'constructor': 1 }, { 'constructor': '1' }), false);
assert.strictEqual(_.isEqual({ 'constructor': [1] }, { 'constructor': [1] }), true);
assert.strictEqual(_.isEqual({ 'constructor': [1] }, { 'constructor': ['1'] }), false);
assert.strictEqual(_.isEqual({ 'constructor': Object }, {}), false);
});
QUnit.test('should compare arrays with circular references', function(assert) {
assert.expect(6);
var array1 = [],
array2 = [];
array1.push(array1);
array2.push(array2);
assert.strictEqual(_.isEqual(array1, array2), true);
array1.push('b');
array2.push('b');
assert.strictEqual(_.isEqual(array1, array2), true);
array1.push('c');
array2.push('d');
assert.strictEqual(_.isEqual(array1, array2), false);
array1 = ['a', 'b', 'c'];
array1[1] = array1;
array2 = ['a', ['a', 'b', 'c'], 'c'];
assert.strictEqual(_.isEqual(array1, array2), false);
array1 = [[[]]];
array1[0][0][0] = array1;
array2 = [];
array2[0] = array2;
assert.strictEqual(_.isEqual(array1, array2), false);
assert.strictEqual(_.isEqual(array2, array1), false);
});
QUnit.test('should have transitive equivalence for circular references of arrays', function(assert) {
assert.expect(3);
var array1 = [],
array2 = [array1],
array3 = [array2];
array1[0] = array1;
assert.strictEqual(_.isEqual(array1, array2), true);
assert.strictEqual(_.isEqual(array2, array3), true);
assert.strictEqual(_.isEqual(array1, array3), true);
});
QUnit.test('should compare objects with circular references', function(assert) {
assert.expect(6);
var object1 = {},
object2 = {};
object1.a = object1;
object2.a = object2;
assert.strictEqual(_.isEqual(object1, object2), true);
object1.b = 0;
object2.b = Object(0);
assert.strictEqual(_.isEqual(object1, object2), true);
object1.c = Object(1);
object2.c = Object(2);
assert.strictEqual(_.isEqual(object1, object2), false);
object1 = { 'a': 1, 'b': 2, 'c': 3 };
object1.b = object1;
object2 = { 'a': 1, 'b': { 'a': 1, 'b': 2, 'c': 3 }, 'c': 3 };
assert.strictEqual(_.isEqual(object1, object2), false);
object1 = {self: {self: {self: {}}}};
object1.self.self.self = object1;
object2 = {self: {}};
object2.self = object2;
assert.strictEqual(_.isEqual(object1, object2), false);
assert.strictEqual(_.isEqual(object2, object1), false);
});
QUnit.test('should have transitive equivalence for circular references of objects', function(assert) {
assert.expect(3);
var object1 = {},
object2 = { 'a': object1 },
object3 = { 'a': object2 };
object1.a = object1;
assert.strictEqual(_.isEqual(object1, object2), true);
assert.strictEqual(_.isEqual(object2, object3), true);
assert.strictEqual(_.isEqual(object1, object3), true);
});
QUnit.test('should compare objects with multiple circular references', function(assert) {
assert.expect(3);
var array1 = [{}],
array2 = [{}];
(array1[0].a = array1).push(array1);
(array2[0].a = array2).push(array2);
assert.strictEqual(_.isEqual(array1, array2), true);
array1[0].b = 0;
array2[0].b = Object(0);
assert.strictEqual(_.isEqual(array1, array2), true);
array1[0].c = Object(1);
array2[0].c = Object(2);
assert.strictEqual(_.isEqual(array1, array2), false);
});
QUnit.test('should compare objects with complex circular references', function(assert) {
assert.expect(1);
var object1 = {
'foo': { 'b': { 'c': { 'd': {} } } },
'bar': { 'a': 2 }
};
var object2 = {
'foo': { 'b': { 'c': { 'd': {} } } },
'bar': { 'a': 2 }
};
object1.foo.b.c.d = object1;
object1.bar.b = object1.foo.b;
object2.foo.b.c.d = object2;
object2.bar.b = object2.foo.b;
assert.strictEqual(_.isEqual(object1, object2), true);
});
QUnit.test('should compare objects with shared property values', function(assert) {
assert.expect(1);
var object1 = {
'a': [1, 2]
};
var object2 = {
'a': [1, 2],
'b': [1, 2]
};
object1.b = object1.a;
assert.strictEqual(_.isEqual(object1, object2), true);
});
QUnit.test('should treat objects created by `Object.create(null)` like plain objects', function(assert) {
assert.expect(2);
function Foo() {
this.a = 1;
}
Foo.prototype.constructor = null;
var object1 = create(null);
object1.a = 1;
var object2 = { 'a': 1 };
assert.strictEqual(_.isEqual(object1, object2), true);
assert.strictEqual(_.isEqual(new Foo, object2), false);
});
QUnit.test('should avoid common type coercions', function(assert) {
assert.expect(9);
assert.strictEqual(_.isEqual(true, Object(false)), false);
assert.strictEqual(_.isEqual(Object(false), Object(0)), false);
assert.strictEqual(_.isEqual(false, Object('')), false);
assert.strictEqual(_.isEqual(Object(36), Object('36')), false);
assert.strictEqual(_.isEqual(0, ''), false);
assert.strictEqual(_.isEqual(1, true), false);
assert.strictEqual(_.isEqual(1337756400000, new Date(2012, 4, 23)), false);
assert.strictEqual(_.isEqual('36', 36), false);
assert.strictEqual(_.isEqual(36, '36'), false);
});
QUnit.test('should compare `arguments` objects', function(assert) {
assert.expect(2);
var args1 = (function() { return arguments; }()),
args2 = (function() { return arguments; }()),
args3 = (function() { return arguments; }(1, 2));
assert.strictEqual(_.isEqual(args1, args2), true);
assert.strictEqual(_.isEqual(args1, args3), false);
});
QUnit.test('should treat `arguments` objects like `Object` objects', function(assert) {
assert.expect(4);
var object = { '0': 1, '1': 2, '2': 3 };
function Foo() {}
Foo.prototype = object;
assert.strictEqual(_.isEqual(args, object), true);
assert.strictEqual(_.isEqual(object, args), true);
assert.strictEqual(_.isEqual(args, new Foo), false);
assert.strictEqual(_.isEqual(new Foo, args), false);
});
QUnit.test('should compare array buffers', function(assert) {
assert.expect(2);
if (ArrayBuffer) {
var buffer = new Int8Array([-1]).buffer;
assert.strictEqual(_.isEqual(buffer, new Uint8Array([255]).buffer), true);
assert.strictEqual(_.isEqual(buffer, new ArrayBuffer(1)), false);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should compare array views', function(assert) {
assert.expect(2);
lodashStable.times(2, function(index) {
var ns = index ? realm : root;
var pairs = lodashStable.map(arrayViews, function(type, viewIndex) {
var otherType = arrayViews[(viewIndex + 1) % arrayViews.length],
CtorA = ns[type] || function(n) { this.n = n; },
CtorB = ns[otherType] || function(n) { this.n = n; },
bufferA = ns[type] ? new ns.ArrayBuffer(8) : 8,
bufferB = ns[otherType] ? new ns.ArrayBuffer(8) : 8,
bufferC = ns[otherType] ? new ns.ArrayBuffer(16) : 16;
return [new CtorA(bufferA), new CtorA(bufferA), new CtorB(bufferB), new CtorB(bufferC)];
});
var expected = lodashStable.map(pairs, lodashStable.constant([true, false, false]));
var actual = lodashStable.map(pairs, function(pair) {
return [_.isEqual(pair[0], pair[1]), _.isEqual(pair[0], pair[2]), _.isEqual(pair[2], pair[3])];
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should compare buffers', function(assert) {
assert.expect(3);
if (Buffer) {
var buffer = new Buffer([1]);
assert.strictEqual(_.isEqual(buffer, new Buffer([1])), true);
assert.strictEqual(_.isEqual(buffer, new Buffer([2])), false);
assert.strictEqual(_.isEqual(buffer, new Uint8Array([1])), false);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should compare date objects', function(assert) {
assert.expect(4);
var date = new Date(2012, 4, 23);
assert.strictEqual(_.isEqual(date, new Date(2012, 4, 23)), true);
assert.strictEqual(_.isEqual(new Date('a'), new Date('b')), true);
assert.strictEqual(_.isEqual(date, new Date(2013, 3, 25)), false);
assert.strictEqual(_.isEqual(date, { 'getTime': lodashStable.constant(+date) }), false);
});
QUnit.test('should compare error objects', function(assert) {
assert.expect(1);
var pairs = lodashStable.map([
'Error',
'EvalError',
'RangeError',
'ReferenceError',
'SyntaxError',
'TypeError',
'URIError'
], function(type, index, errorTypes) {
var otherType = errorTypes[++index % errorTypes.length],
CtorA = root[type],
CtorB = root[otherType];
return [new CtorA('a'), new CtorA('a'), new CtorB('a'), new CtorB('b')];
});
var expected = lodashStable.map(pairs, lodashStable.constant([true, false, false]));
var actual = lodashStable.map(pairs, function(pair) {
return [_.isEqual(pair[0], pair[1]), _.isEqual(pair[0], pair[2]), _.isEqual(pair[2], pair[3])];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should compare functions', function(assert) {
assert.expect(2);
function a() { return 1 + 2; }
function b() { return 1 + 2; }
assert.strictEqual(_.isEqual(a, a), true);
assert.strictEqual(_.isEqual(a, b), false);
});
QUnit.test('should compare maps', function(assert) {
assert.expect(8);
if (Map) {
lodashStable.each([[map, new Map], [map, realm.map]], function(maps) {
var map1 = maps[0],
map2 = maps[1];
map1.set('a', 1);
map2.set('b', 2);
assert.strictEqual(_.isEqual(map1, map2), false);
map1.set('b', 2);
map2.set('a', 1);
assert.strictEqual(_.isEqual(map1, map2), true);
map1.delete('a');
map1.set('a', 1);
assert.strictEqual(_.isEqual(map1, map2), true);
map2.delete('a');
assert.strictEqual(_.isEqual(map1, map2), false);
map1.clear();
map2.clear();
});
}
else {
skipAssert(assert, 8);
}
});
QUnit.test('should compare maps with circular references', function(assert) {
assert.expect(2);
if (Map) {
var map1 = new Map,
map2 = new Map;
map1.set('a', map1);
map2.set('a', map2);
assert.strictEqual(_.isEqual(map1, map2), true);
map1.set('b', 1);
map2.set('b', 2);
assert.strictEqual(_.isEqual(map1, map2), false);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should compare promises by reference', function(assert) {
assert.expect(4);
if (promise) {
lodashStable.each([[promise, Promise.resolve(1)], [promise, realm.promise]], function(promises) {
var promise1 = promises[0],
promise2 = promises[1];
assert.strictEqual(_.isEqual(promise1, promise2), false);
assert.strictEqual(_.isEqual(promise1, promise1), true);
});
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should compare regexes', function(assert) {
assert.expect(5);
assert.strictEqual(_.isEqual(/x/gim, /x/gim), true);
assert.strictEqual(_.isEqual(/x/gim, /x/mgi), true);
assert.strictEqual(_.isEqual(/x/gi, /x/g), false);
assert.strictEqual(_.isEqual(/x/, /y/), false);
assert.strictEqual(_.isEqual(/x/g, { 'global': true, 'ignoreCase': false, 'multiline': false, 'source': 'x' }), false);
});
QUnit.test('should compare sets', function(assert) {
assert.expect(8);
if (Set) {
lodashStable.each([[set, new Set], [set, realm.set]], function(sets) {
var set1 = sets[0],
set2 = sets[1];
set1.add(1);
set2.add(2);
assert.strictEqual(_.isEqual(set1, set2), false);
set1.add(2);
set2.add(1);
assert.strictEqual(_.isEqual(set1, set2), true);
set1.delete(1);
set1.add(1);
assert.strictEqual(_.isEqual(set1, set2), true);
set2.delete(1);
assert.strictEqual(_.isEqual(set1, set2), false);
set1.clear();
set2.clear();
});
}
else {
skipAssert(assert, 8);
}
});
QUnit.test('should compare sets with circular references', function(assert) {
assert.expect(2);
if (Set) {
var set1 = new Set,
set2 = new Set;
set1.add(set1);
set2.add(set2);
assert.strictEqual(_.isEqual(set1, set2), true);
set1.add(1);
set2.add(2);
assert.strictEqual(_.isEqual(set1, set2), false);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should compare symbol properties', function(assert) {
assert.expect(3);
if (Symbol) {
var object1 = { 'a': 1 },
object2 = { 'a': 1 };
object1[symbol1] = { 'a': { 'b': 2 } };
object2[symbol1] = { 'a': { 'b': 2 } };
defineProperty(object2, symbol2, {
'configurable': true,
'enumerable': false,
'writable': true,
'value': 2
});
assert.strictEqual(_.isEqual(object1, object2), true);
object2[symbol1] = { 'a': 1 };
assert.strictEqual(_.isEqual(object1, object2), false);
delete object2[symbol1];
object2[Symbol('a')] = { 'a': { 'b': 2 } };
assert.strictEqual(_.isEqual(object1, object2), false);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should compare wrapped values', function(assert) {
assert.expect(32);
var stamp = +new Date;
var values = [
[[1, 2], [1, 2], [1, 2, 3]],
[true, true, false],
[new Date(stamp), new Date(stamp), new Date(stamp - 100)],
[{ 'a': 1, 'b': 2 }, { 'a': 1, 'b': 2 }, { 'a': 1, 'b': 1 }],
[1, 1, 2],
[NaN, NaN, Infinity],
[/x/, /x/, /x/i],
['a', 'a', 'A']
];
lodashStable.each(values, function(vals) {
if (!isNpm) {
var wrapped1 = _(vals[0]),
wrapped2 = _(vals[1]),
actual = wrapped1.isEqual(wrapped2);
assert.strictEqual(actual, true);
assert.strictEqual(_.isEqual(_(actual), _(true)), true);
wrapped1 = _(vals[0]);
wrapped2 = _(vals[2]);
actual = wrapped1.isEqual(wrapped2);
assert.strictEqual(actual, false);
assert.strictEqual(_.isEqual(_(actual), _(false)), true);
}
else {
skipAssert(assert, 4);
}
});
});
QUnit.test('should compare wrapped and non-wrapped values', function(assert) {
assert.expect(4);
if (!isNpm) {
var object1 = _({ 'a': 1, 'b': 2 }),
object2 = { 'a': 1, 'b': 2 };
assert.strictEqual(object1.isEqual(object2), true);
assert.strictEqual(_.isEqual(object1, object2), true);
object1 = _({ 'a': 1, 'b': 2 });
object2 = { 'a': 1, 'b': 1 };
assert.strictEqual(object1.isEqual(object2), false);
assert.strictEqual(_.isEqual(object1, object2), false);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should work as an iteratee for `_.every`', function(assert) {
assert.expect(1);
var actual = lodashStable.every([1, 1, 1], lodashStable.partial(_.isEqual, 1));
assert.ok(actual);
});
QUnit.test('should not error on DOM elements', function(assert) {
assert.expect(1);
if (document) {
var element1 = document.createElement('div'),
element2 = element1.cloneNode(true);
try {
assert.strictEqual(_.isEqual(element1, element2), false);
} catch (e) {
assert.ok(false, e.message);
}
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `true` for like-objects from different documents', function(assert) {
assert.expect(4);
if (realm.object) {
assert.strictEqual(_.isEqual([1], realm.array), true);
assert.strictEqual(_.isEqual([2], realm.array), false);
assert.strictEqual(_.isEqual({ 'a': 1 }, realm.object), true);
assert.strictEqual(_.isEqual({ 'a': 2 }, realm.object), false);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should return `false` for objects with custom `toString` methods', function(assert) {
assert.expect(1);
var primitive,
object = { 'toString': function() { return primitive; } },
values = [true, null, 1, 'a', undefined],
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value) {
primitive = value;
return _.isEqual(object, value);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.strictEqual(_('a').isEqual('a'), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_('a').chain().isEqual('a') instanceof _);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isEqualWith');
(function() {
QUnit.test('should provide correct `customizer` arguments', function(assert) {
assert.expect(1);
var argsList = [],
object1 = { 'a': [1, 2], 'b': null },
object2 = { 'a': [1, 2], 'b': null };
object1.b = object2;
object2.b = object1;
var expected = [
[object1, object2],
[object1.a, object2.a, 'a', object1, object2],
[object1.a[0], object2.a[0], 0, object1.a, object2.a],
[object1.a[1], object2.a[1], 1, object1.a, object2.a],
[object1.b, object2.b, 'b', object1.b, object2.b]
];
_.isEqualWith(object1, object2, function(assert) {
var length = arguments.length,
args = slice.call(arguments, 0, length - (length > 2 ? 1 : 0));
argsList.push(args);
});
assert.deepEqual(argsList, expected);
});
QUnit.test('should handle comparisons when `customizer` returns `undefined`', function(assert) {
assert.expect(3);
assert.strictEqual(_.isEqualWith('a', 'a', noop), true);
assert.strictEqual(_.isEqualWith(['a'], ['a'], noop), true);
assert.strictEqual(_.isEqualWith({ '0': 'a' }, { '0': 'a' }, noop), true);
});
QUnit.test('should not handle comparisons when `customizer` returns `true`', function(assert) {
assert.expect(3);
var customizer = function(value) {
return _.isString(value) || undefined;
};
assert.strictEqual(_.isEqualWith('a', 'b', customizer), true);
assert.strictEqual(_.isEqualWith(['a'], ['b'], customizer), true);
assert.strictEqual(_.isEqualWith({ '0': 'a' }, { '0': 'b' }, customizer), true);
});
QUnit.test('should not handle comparisons when `customizer` returns `false`', function(assert) {
assert.expect(3);
var customizer = function(value) {
return _.isString(value) ? false : undefined;
};
assert.strictEqual(_.isEqualWith('a', 'a', customizer), false);
assert.strictEqual(_.isEqualWith(['a'], ['a'], customizer), false);
assert.strictEqual(_.isEqualWith({ '0': 'a' }, { '0': 'a' }, customizer), false);
});
QUnit.test('should return a boolean value even when `customizer` does not', function(assert) {
assert.expect(2);
var actual = _.isEqualWith('a', 'b', stubC);
assert.strictEqual(actual, true);
var values = _.without(falsey, undefined),
expected = lodashStable.map(values, stubFalse);
actual = [];
lodashStable.each(values, function(value) {
actual.push(_.isEqualWith('a', 'a', lodashStable.constant(value)));
});
assert.deepEqual(actual, expected);
});
QUnit.test('should ensure `customizer` is a function', function(assert) {
assert.expect(1);
var array = [1, 2, 3],
eq = _.partial(_.isEqualWith, array),
actual = lodashStable.map([array, [1, 0, 3]], eq);
assert.deepEqual(actual, [true, false]);
});
QUnit.test('should call `customizer` for values maps and sets', function(assert) {
assert.expect(2);
var value = { 'a': { 'b': 2 } };
if (Map) {
var map1 = new Map;
map1.set('a', value);
var map2 = new Map;
map2.set('a', value);
}
if (Set) {
var set1 = new Set;
set1.add(value);
var set2 = new Set;
set2.add(value);
}
lodashStable.each([[map1, map2], [set1, set2]], function(pair, index) {
if (pair[0]) {
var argsList = [],
array = lodashStable.toArray(pair[0]);
var expected = [
[pair[0], pair[1]],
[array[0], array[0], 0, array, array],
[array[0][0], array[0][0], 0, array[0], array[0]],
[array[0][1], array[0][1], 1, array[0], array[0]]
];
if (index) {
expected.length = 2;
}
_.isEqualWith(pair[0], pair[1], function() {
var length = arguments.length,
args = slice.call(arguments, 0, length - (length > 2 ? 1 : 0));
argsList.push(args);
});
assert.deepEqual(argsList, expected, index ? 'Set' : 'Map');
}
else {
skipAssert(assert);
}
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isError');
(function() {
QUnit.test('should return `true` for error objects', function(assert) {
assert.expect(1);
var expected = lodashStable.map(errors, stubTrue);
var actual = lodashStable.map(errors, function(error) {
return _.isError(error) === true;
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `true` for subclassed values', function(assert) {
assert.expect(1);
assert.strictEqual(_.isError(new CustomError('x')), true);
});
QUnit.test('should return `false` for non error objects', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isError(value) : _.isError();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isError(args), false);
assert.strictEqual(_.isError([1, 2, 3]), false);
assert.strictEqual(_.isError(true), false);
assert.strictEqual(_.isError(new Date), false);
assert.strictEqual(_.isError(_), false);
assert.strictEqual(_.isError(slice), false);
assert.strictEqual(_.isError({ 'a': 1 }), false);
assert.strictEqual(_.isError(1), false);
assert.strictEqual(_.isError(/x/), false);
assert.strictEqual(_.isError('a'), false);
assert.strictEqual(_.isError(symbol), false);
});
QUnit.test('should return `false` for plain objects', function(assert) {
assert.expect(1);
assert.strictEqual(_.isError({ 'name': 'Error', 'message': '' }), false);
});
QUnit.test('should work with an error object from another realm', function(assert) {
assert.expect(1);
if (realm.errors) {
var expected = lodashStable.map(realm.errors, stubTrue);
var actual = lodashStable.map(realm.errors, function(error) {
return _.isError(error) === true;
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isFinite');
(function() {
QUnit.test('should return `true` for finite values', function(assert) {
assert.expect(1);
var values = [0, 1, 3.14, -1],
expected = lodashStable.map(values, stubTrue),
actual = lodashStable.map(values, _.isFinite);
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` for non-finite values', function(assert) {
assert.expect(1);
var values = [NaN, Infinity, -Infinity, Object(1)],
expected = lodashStable.map(values, stubFalse),
actual = lodashStable.map(values, _.isFinite);
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` for non-numeric values', function(assert) {
assert.expect(10);
var values = [undefined, [], true, '', ' ', '2px'],
expected = lodashStable.map(values, stubFalse),
actual = lodashStable.map(values, _.isFinite);
assert.deepEqual(actual, expected);
assert.strictEqual(_.isFinite(args), false);
assert.strictEqual(_.isFinite([1, 2, 3]), false);
assert.strictEqual(_.isFinite(true), false);
assert.strictEqual(_.isFinite(new Date), false);
assert.strictEqual(_.isFinite(new Error), false);
assert.strictEqual(_.isFinite({ 'a': 1 }), false);
assert.strictEqual(_.isFinite(/x/), false);
assert.strictEqual(_.isFinite('a'), false);
assert.strictEqual(_.isFinite(symbol), false);
});
QUnit.test('should return `false` for numeric string values', function(assert) {
assert.expect(1);
var values = ['2', '0', '08'],
expected = lodashStable.map(values, stubFalse),
actual = lodashStable.map(values, _.isFinite);
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isFunction');
(function() {
QUnit.test('should return `true` for functions', function(assert) {
assert.expect(2);
assert.strictEqual(_.isFunction(_), true);
assert.strictEqual(_.isFunction(slice), true);
});
QUnit.test('should return `true` for async functions', function(assert) {
assert.expect(1);
assert.strictEqual(_.isFunction(asyncFunc), typeof asyncFunc == 'function');
});
QUnit.test('should return `true` for generator functions', function(assert) {
assert.expect(1);
assert.strictEqual(_.isFunction(genFunc), typeof genFunc == 'function');
});
QUnit.test('should return `true` for the `Proxy` constructor', function(assert) {
assert.expect(1);
if (Proxy) {
assert.strictEqual(_.isFunction(Proxy), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `true` for array view constructors', function(assert) {
assert.expect(1);
var expected = lodashStable.map(arrayViews, function(type) {
return objToString.call(root[type]) == funcTag;
});
var actual = lodashStable.map(arrayViews, function(type) {
return _.isFunction(root[type]);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` for non-functions', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isFunction(value) : _.isFunction();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isFunction(args), false);
assert.strictEqual(_.isFunction([1, 2, 3]), false);
assert.strictEqual(_.isFunction(true), false);
assert.strictEqual(_.isFunction(new Date), false);
assert.strictEqual(_.isFunction(new Error), false);
assert.strictEqual(_.isFunction({ 'a': 1 }), false);
assert.strictEqual(_.isFunction(1), false);
assert.strictEqual(_.isFunction(/x/), false);
assert.strictEqual(_.isFunction('a'), false);
assert.strictEqual(_.isFunction(symbol), false);
if (document) {
assert.strictEqual(_.isFunction(document.getElementsByTagName('body')), false);
}
else {
skipAssert(assert);
}
});
QUnit.test('should work with a function from another realm', function(assert) {
assert.expect(1);
if (realm.function) {
assert.strictEqual(_.isFunction(realm.function), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('isInteger methods');
lodashStable.each(['isInteger', 'isSafeInteger'], function(methodName) {
var func = _[methodName],
isSafe = methodName == 'isSafeInteger';
QUnit.test('`_.' + methodName + '` should return `true` for integer values', function(assert) {
assert.expect(2);
var values = [-1, 0, 1],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
return func(value);
});
assert.deepEqual(actual, expected);
assert.strictEqual(func(MAX_INTEGER), !isSafe);
});
QUnit.test('should return `false` for non-integer number values', function(assert) {
assert.expect(1);
var values = [NaN, Infinity, -Infinity, Object(1), 3.14],
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value) {
return func(value);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` for non-numeric values', function(assert) {
assert.expect(10);
var expected = lodashStable.map(falsey, function(value) {
return value === 0;
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? func(value) : func();
});
assert.deepEqual(actual, expected);
assert.strictEqual(func(args), false);
assert.strictEqual(func([1, 2, 3]), false);
assert.strictEqual(func(true), false);
assert.strictEqual(func(new Date), false);
assert.strictEqual(func(new Error), false);
assert.strictEqual(func({ 'a': 1 }), false);
assert.strictEqual(func(/x/), false);
assert.strictEqual(func('a'), false);
assert.strictEqual(func(symbol), false);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isLength');
(function() {
QUnit.test('should return `true` for lengths', function(assert) {
assert.expect(1);
var values = [0, 3, MAX_SAFE_INTEGER],
expected = lodashStable.map(values, stubTrue),
actual = lodashStable.map(values, _.isLength);
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` for non-lengths', function(assert) {
assert.expect(1);
var values = [-1, '1', 1.1, MAX_SAFE_INTEGER + 1],
expected = lodashStable.map(values, stubFalse),
actual = lodashStable.map(values, _.isLength);
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isMap');
(function() {
QUnit.test('should return `true` for maps', function(assert) {
assert.expect(1);
if (Map) {
assert.strictEqual(_.isMap(map), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non-maps', function(assert) {
assert.expect(14);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isMap(value) : _.isMap();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isMap(args), false);
assert.strictEqual(_.isMap([1, 2, 3]), false);
assert.strictEqual(_.isMap(true), false);
assert.strictEqual(_.isMap(new Date), false);
assert.strictEqual(_.isMap(new Error), false);
assert.strictEqual(_.isMap(_), false);
assert.strictEqual(_.isMap(slice), false);
assert.strictEqual(_.isMap({ 'a': 1 }), false);
assert.strictEqual(_.isMap(1), false);
assert.strictEqual(_.isMap(/x/), false);
assert.strictEqual(_.isMap('a'), false);
assert.strictEqual(_.isMap(symbol), false);
assert.strictEqual(_.isMap(weakMap), false);
});
QUnit.test('should work for objects with a non-function `constructor` (test in IE 11)', function(assert) {
assert.expect(1);
var values = [false, true],
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value) {
return _.isMap({ 'constructor': value });
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with maps from another realm', function(assert) {
assert.expect(1);
if (realm.map) {
assert.strictEqual(_.isMap(realm.map), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isMatchWith');
(function() {
QUnit.test('should provide correct `customizer` arguments', function(assert) {
assert.expect(1);
var argsList = [],
object1 = { 'a': [1, 2], 'b': null },
object2 = { 'a': [1, 2], 'b': null };
object1.b = object2;
object2.b = object1;
var expected = [
[object1.a, object2.a, 'a', object1, object2],
[object1.a[0], object2.a[0], 0, object1.a, object2.a],
[object1.a[1], object2.a[1], 1, object1.a, object2.a],
[object1.b, object2.b, 'b', object1, object2],
[object1.b.a, object2.b.a, 'a', object1.b, object2.b],
[object1.b.a[0], object2.b.a[0], 0, object1.b.a, object2.b.a],
[object1.b.a[1], object2.b.a[1], 1, object1.b.a, object2.b.a],
[object1.b.b, object2.b.b, 'b', object1.b, object2.b]
];
_.isMatchWith(object1, object2, function(assert) {
argsList.push(slice.call(arguments, 0, -1));
});
assert.deepEqual(argsList, expected);
});
QUnit.test('should handle comparisons when `customizer` returns `undefined`', function(assert) {
assert.expect(1);
assert.strictEqual(_.isMatchWith({ 'a': 1 }, { 'a': 1 }, noop), true);
});
QUnit.test('should not handle comparisons when `customizer` returns `true`', function(assert) {
assert.expect(2);
var customizer = function(value) {
return _.isString(value) || undefined;
};
assert.strictEqual(_.isMatchWith(['a'], ['b'], customizer), true);
assert.strictEqual(_.isMatchWith({ '0': 'a' }, { '0': 'b' }, customizer), true);
});
QUnit.test('should not handle comparisons when `customizer` returns `false`', function(assert) {
assert.expect(2);
var customizer = function(value) {
return _.isString(value) ? false : undefined;
};
assert.strictEqual(_.isMatchWith(['a'], ['a'], customizer), false);
assert.strictEqual(_.isMatchWith({ '0': 'a' }, { '0': 'a' }, customizer), false);
});
QUnit.test('should return a boolean value even when `customizer` does not', function(assert) {
assert.expect(2);
var object = { 'a': 1 },
actual = _.isMatchWith(object, { 'a': 1 }, stubA);
assert.strictEqual(actual, true);
var expected = lodashStable.map(falsey, stubFalse);
actual = [];
lodashStable.each(falsey, function(value) {
actual.push(_.isMatchWith(object, { 'a': 2 }, lodashStable.constant(value)));
});
assert.deepEqual(actual, expected);
});
QUnit.test('should provide `stack` to `customizer`', function(assert) {
assert.expect(1);
var actual;
_.isMatchWith({ 'a': 1 }, { 'a': 1 }, function() {
actual = _.last(arguments);
});
assert.ok(isNpm
? actual.constructor.name == 'Stack'
: actual instanceof mapCaches.Stack
);
});
QUnit.test('should ensure `customizer` is a function', function(assert) {
assert.expect(1);
var object = { 'a': 1 },
matches = _.partial(_.isMatchWith, object),
actual = lodashStable.map([object, { 'a': 2 }], matches);
assert.deepEqual(actual, [true, false]);
});
QUnit.test('should call `customizer` for values maps and sets', function(assert) {
assert.expect(2);
var value = { 'a': { 'b': 2 } };
if (Map) {
var map1 = new Map;
map1.set('a', value);
var map2 = new Map;
map2.set('a', value);
}
if (Set) {
var set1 = new Set;
set1.add(value);
var set2 = new Set;
set2.add(value);
}
lodashStable.each([[map1, map2], [set1, set2]], function(pair, index) {
if (pair[0]) {
var argsList = [],
array = lodashStable.toArray(pair[0]),
object1 = { 'a': pair[0] },
object2 = { 'a': pair[1] };
var expected = [
[pair[0], pair[1], 'a', object1, object2],
[array[0], array[0], 0, array, array],
[array[0][0], array[0][0], 0, array[0], array[0]],
[array[0][1], array[0][1], 1, array[0], array[0]]
];
if (index) {
expected.length = 2;
}
_.isMatchWith({ 'a': pair[0] }, { 'a': pair[1] }, function() {
argsList.push(slice.call(arguments, 0, -1));
});
assert.deepEqual(argsList, expected, index ? 'Set' : 'Map');
}
else {
skipAssert(assert);
}
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isNaN');
(function() {
QUnit.test('should return `true` for NaNs', function(assert) {
assert.expect(2);
assert.strictEqual(_.isNaN(NaN), true);
assert.strictEqual(_.isNaN(Object(NaN)), true);
});
QUnit.test('should return `false` for non-NaNs', function(assert) {
assert.expect(14);
var expected = lodashStable.map(falsey, function(value) {
return value !== value;
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isNaN(value) : _.isNaN();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isNaN(args), false);
assert.strictEqual(_.isNaN([1, 2, 3]), false);
assert.strictEqual(_.isNaN(true), false);
assert.strictEqual(_.isNaN(new Date), false);
assert.strictEqual(_.isNaN(new Error), false);
assert.strictEqual(_.isNaN(_), false);
assert.strictEqual(_.isNaN(slice), false);
assert.strictEqual(_.isNaN({ 'a': 1 }), false);
assert.strictEqual(_.isNaN(1), false);
assert.strictEqual(_.isNaN(Object(1)), false);
assert.strictEqual(_.isNaN(/x/), false);
assert.strictEqual(_.isNaN('a'), false);
assert.strictEqual(_.isNaN(symbol), false);
});
QUnit.test('should work with `NaN` from another realm', function(assert) {
assert.expect(1);
if (realm.object) {
assert.strictEqual(_.isNaN(realm.nan), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isNative');
(function() {
QUnit.test('should return `true` for native methods', function(assert) {
assert.expect(1);
var values = [Array, body && body.cloneNode, create, root.encodeURI, Promise, slice, Uint8Array],
expected = lodashStable.map(values, Boolean),
actual = lodashStable.map(values, _.isNative);
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` for non-native methods', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isNative(value) : _.isNative();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isNative(args), false);
assert.strictEqual(_.isNative([1, 2, 3]), false);
assert.strictEqual(_.isNative(true), false);
assert.strictEqual(_.isNative(new Date), false);
assert.strictEqual(_.isNative(new Error), false);
assert.strictEqual(_.isNative(_), false);
assert.strictEqual(_.isNative({ 'a': 1 }), false);
assert.strictEqual(_.isNative(1), false);
assert.strictEqual(_.isNative(/x/), false);
assert.strictEqual(_.isNative('a'), false);
assert.strictEqual(_.isNative(symbol), false);
});
QUnit.test('should work with native functions from another realm', function(assert) {
assert.expect(2);
if (realm.element) {
assert.strictEqual(_.isNative(realm.element.cloneNode), true);
}
else {
skipAssert(assert);
}
if (realm.object) {
assert.strictEqual(_.isNative(realm.object.valueOf), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should throw an error if core-js is detected', function(assert) {
assert.expect(1);
if (!isModularize) {
var lodash = _.runInContext({
'__core-js_shared__': {}
});
assert.raises(function() { lodash.isNative(noop); });
}
else {
skipAssert(assert);
}
});
QUnit.test('should detect methods masquerading as native (test in Node.js)', function(assert) {
assert.expect(2);
if (!amd && _._baseEach) {
var path = require('path'),
basePath = path.dirname(filePath),
uid = 'e0gvgyrad1jor',
coreKey = '__core-js_shared__',
fakeSrcKey = 'Symbol(src)_1.' + uid;
root[coreKey] = { 'keys': { 'IE_PROTO': 'Symbol(IE_PROTO)_3.' + uid } };
emptyObject(require.cache);
var baseIsNative = interopRequire(path.join(basePath, '_baseIsNative'));
assert.strictEqual(baseIsNative(slice), true);
slice[fakeSrcKey] = slice + '';
assert.strictEqual(baseIsNative(slice), false);
delete slice[fakeSrcKey];
delete root[coreKey];
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isNil');
(function() {
QUnit.test('should return `true` for nullish values', function(assert) {
assert.expect(3);
assert.strictEqual(_.isNil(null), true);
assert.strictEqual(_.isNil(), true);
assert.strictEqual(_.isNil(undefined), true);
});
QUnit.test('should return `false` for non-nullish values', function(assert) {
assert.expect(13);
var expected = lodashStable.map(falsey, function(value) {
return value == null;
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isNil(value) : _.isNil();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isNil(args), false);
assert.strictEqual(_.isNil([1, 2, 3]), false);
assert.strictEqual(_.isNil(true), false);
assert.strictEqual(_.isNil(new Date), false);
assert.strictEqual(_.isNil(new Error), false);
assert.strictEqual(_.isNil(_), false);
assert.strictEqual(_.isNil(slice), false);
assert.strictEqual(_.isNil({ 'a': 1 }), false);
assert.strictEqual(_.isNil(1), false);
assert.strictEqual(_.isNil(/x/), false);
assert.strictEqual(_.isNil('a'), false);
if (Symbol) {
assert.strictEqual(_.isNil(symbol), false);
}
else {
skipAssert(assert);
}
});
QUnit.test('should work with nils from another realm', function(assert) {
assert.expect(2);
if (realm.object) {
assert.strictEqual(_.isNil(realm.null), true);
assert.strictEqual(_.isNil(realm.undefined), true);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isNull');
(function() {
QUnit.test('should return `true` for `null` values', function(assert) {
assert.expect(1);
assert.strictEqual(_.isNull(null), true);
});
QUnit.test('should return `false` for non `null` values', function(assert) {
assert.expect(13);
var expected = lodashStable.map(falsey, function(value) {
return value === null;
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isNull(value) : _.isNull();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isNull(args), false);
assert.strictEqual(_.isNull([1, 2, 3]), false);
assert.strictEqual(_.isNull(true), false);
assert.strictEqual(_.isNull(new Date), false);
assert.strictEqual(_.isNull(new Error), false);
assert.strictEqual(_.isNull(_), false);
assert.strictEqual(_.isNull(slice), false);
assert.strictEqual(_.isNull({ 'a': 1 }), false);
assert.strictEqual(_.isNull(1), false);
assert.strictEqual(_.isNull(/x/), false);
assert.strictEqual(_.isNull('a'), false);
assert.strictEqual(_.isNull(symbol), false);
});
QUnit.test('should work with nulls from another realm', function(assert) {
assert.expect(1);
if (realm.object) {
assert.strictEqual(_.isNull(realm.null), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isNumber');
(function() {
QUnit.test('should return `true` for numbers', function(assert) {
assert.expect(3);
assert.strictEqual(_.isNumber(0), true);
assert.strictEqual(_.isNumber(Object(0)), true);
assert.strictEqual(_.isNumber(NaN), true);
});
QUnit.test('should return `false` for non-numbers', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, function(value) {
return typeof value == 'number';
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isNumber(value) : _.isNumber();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isNumber(args), false);
assert.strictEqual(_.isNumber([1, 2, 3]), false);
assert.strictEqual(_.isNumber(true), false);
assert.strictEqual(_.isNumber(new Date), false);
assert.strictEqual(_.isNumber(new Error), false);
assert.strictEqual(_.isNumber(_), false);
assert.strictEqual(_.isNumber(slice), false);
assert.strictEqual(_.isNumber({ 'a': 1 }), false);
assert.strictEqual(_.isNumber(/x/), false);
assert.strictEqual(_.isNumber('a'), false);
assert.strictEqual(_.isNumber(symbol), false);
});
QUnit.test('should work with numbers from another realm', function(assert) {
assert.expect(1);
if (realm.number) {
assert.strictEqual(_.isNumber(realm.number), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isObject');
(function() {
QUnit.test('should return `true` for objects', function(assert) {
assert.expect(13);
assert.strictEqual(_.isObject(args), true);
assert.strictEqual(_.isObject([1, 2, 3]), true);
assert.strictEqual(_.isObject(Object(false)), true);
assert.strictEqual(_.isObject(new Date), true);
assert.strictEqual(_.isObject(new Error), true);
assert.strictEqual(_.isObject(_), true);
assert.strictEqual(_.isObject(slice), true);
assert.strictEqual(_.isObject({ 'a': 1 }), true);
assert.strictEqual(_.isObject(Object(0)), true);
assert.strictEqual(_.isObject(/x/), true);
assert.strictEqual(_.isObject(Object('a')), true);
if (document) {
assert.strictEqual(_.isObject(body), true);
}
else {
skipAssert(assert);
}
if (Symbol) {
assert.strictEqual(_.isObject(Object(symbol)), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non-objects', function(assert) {
assert.expect(1);
var values = falsey.concat(true, 1, 'a', symbol),
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value, index) {
return index ? _.isObject(value) : _.isObject();
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with objects from another realm', function(assert) {
assert.expect(8);
if (realm.element) {
assert.strictEqual(_.isObject(realm.element), true);
}
else {
skipAssert(assert);
}
if (realm.object) {
assert.strictEqual(_.isObject(realm.boolean), true);
assert.strictEqual(_.isObject(realm.date), true);
assert.strictEqual(_.isObject(realm.function), true);
assert.strictEqual(_.isObject(realm.number), true);
assert.strictEqual(_.isObject(realm.object), true);
assert.strictEqual(_.isObject(realm.regexp), true);
assert.strictEqual(_.isObject(realm.string), true);
}
else {
skipAssert(assert, 7);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isObjectLike');
(function() {
QUnit.test('should return `true` for objects', function(assert) {
assert.expect(9);
assert.strictEqual(_.isObjectLike(args), true);
assert.strictEqual(_.isObjectLike([1, 2, 3]), true);
assert.strictEqual(_.isObjectLike(Object(false)), true);
assert.strictEqual(_.isObjectLike(new Date), true);
assert.strictEqual(_.isObjectLike(new Error), true);
assert.strictEqual(_.isObjectLike({ 'a': 1 }), true);
assert.strictEqual(_.isObjectLike(Object(0)), true);
assert.strictEqual(_.isObjectLike(/x/), true);
assert.strictEqual(_.isObjectLike(Object('a')), true);
});
QUnit.test('should return `false` for non-objects', function(assert) {
assert.expect(1);
var values = falsey.concat(true, _, slice, 1, 'a', symbol),
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value, index) {
return index ? _.isObjectLike(value) : _.isObjectLike();
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with objects from another realm', function(assert) {
assert.expect(6);
if (realm.object) {
assert.strictEqual(_.isObjectLike(realm.boolean), true);
assert.strictEqual(_.isObjectLike(realm.date), true);
assert.strictEqual(_.isObjectLike(realm.number), true);
assert.strictEqual(_.isObjectLike(realm.object), true);
assert.strictEqual(_.isObjectLike(realm.regexp), true);
assert.strictEqual(_.isObjectLike(realm.string), true);
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isPlainObject');
(function() {
var element = document && document.createElement('div');
QUnit.test('should detect plain objects', function(assert) {
assert.expect(5);
function Foo(a) {
this.a = 1;
}
assert.strictEqual(_.isPlainObject({}), true);
assert.strictEqual(_.isPlainObject({ 'a': 1 }), true);
assert.strictEqual(_.isPlainObject({ 'constructor': Foo }), true);
assert.strictEqual(_.isPlainObject([1, 2, 3]), false);
assert.strictEqual(_.isPlainObject(new Foo(1)), false);
});
QUnit.test('should return `true` for objects with a `[[Prototype]]` of `null`', function(assert) {
assert.expect(2);
var object = create(null);
assert.strictEqual(_.isPlainObject(object), true);
object.constructor = objectProto.constructor;
assert.strictEqual(_.isPlainObject(object), true);
});
QUnit.test('should return `true` for objects with a `valueOf` property', function(assert) {
assert.expect(1);
assert.strictEqual(_.isPlainObject({ 'valueOf': 0 }), true);
});
QUnit.test('should return `true` for objects with a writable `Symbol.toStringTag` property', function(assert) {
assert.expect(1);
if (Symbol && Symbol.toStringTag) {
var object = {};
object[Symbol.toStringTag] = 'X';
assert.deepEqual(_.isPlainObject(object), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for objects with a custom `[[Prototype]]`', function(assert) {
assert.expect(1);
var object = create({ 'a': 1 });
assert.strictEqual(_.isPlainObject(object), false);
});
QUnit.test('should return `false` for DOM elements', function(assert) {
assert.expect(1);
if (element) {
assert.strictEqual(_.isPlainObject(element), false);
} else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non-Object objects', function(assert) {
assert.expect(3);
assert.strictEqual(_.isPlainObject(arguments), false);
assert.strictEqual(_.isPlainObject(Error), false);
assert.strictEqual(_.isPlainObject(Math), false);
});
QUnit.test('should return `false` for non-objects', function(assert) {
assert.expect(4);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isPlainObject(value) : _.isPlainObject();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isPlainObject(true), false);
assert.strictEqual(_.isPlainObject('a'), false);
assert.strictEqual(_.isPlainObject(symbol), false);
});
QUnit.test('should return `false` for objects with a read-only `Symbol.toStringTag` property', function(assert) {
assert.expect(1);
if (Symbol && Symbol.toStringTag) {
var object = {};
defineProperty(object, Symbol.toStringTag, {
'configurable': true,
'enumerable': false,
'writable': false,
'value': 'X'
});
assert.deepEqual(_.isPlainObject(object), false);
}
else {
skipAssert(assert);
}
});
QUnit.test('should not mutate `value`', function(assert) {
assert.expect(2);
if (Symbol && Symbol.toStringTag) {
var proto = {};
proto[Symbol.toStringTag] = undefined;
var object = create(proto);
assert.strictEqual(_.isPlainObject(object), false);
assert.notOk(lodashStable.has(object, Symbol.toStringTag));
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should work with objects from another realm', function(assert) {
assert.expect(1);
if (realm.object) {
assert.strictEqual(_.isPlainObject(realm.object), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isRegExp');
(function() {
QUnit.test('should return `true` for regexes', function(assert) {
assert.expect(2);
assert.strictEqual(_.isRegExp(/x/), true);
assert.strictEqual(_.isRegExp(RegExp('x')), true);
});
QUnit.test('should return `false` for non-regexes', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isRegExp(value) : _.isRegExp();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isRegExp(args), false);
assert.strictEqual(_.isRegExp([1, 2, 3]), false);
assert.strictEqual(_.isRegExp(true), false);
assert.strictEqual(_.isRegExp(new Date), false);
assert.strictEqual(_.isRegExp(new Error), false);
assert.strictEqual(_.isRegExp(_), false);
assert.strictEqual(_.isRegExp(slice), false);
assert.strictEqual(_.isRegExp({ 'a': 1 }), false);
assert.strictEqual(_.isRegExp(1), false);
assert.strictEqual(_.isRegExp('a'), false);
assert.strictEqual(_.isRegExp(symbol), false);
});
QUnit.test('should work with regexes from another realm', function(assert) {
assert.expect(1);
if (realm.regexp) {
assert.strictEqual(_.isRegExp(realm.regexp), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isSet');
(function() {
QUnit.test('should return `true` for sets', function(assert) {
assert.expect(1);
if (Set) {
assert.strictEqual(_.isSet(set), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non-sets', function(assert) {
assert.expect(14);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isSet(value) : _.isSet();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isSet(args), false);
assert.strictEqual(_.isSet([1, 2, 3]), false);
assert.strictEqual(_.isSet(true), false);
assert.strictEqual(_.isSet(new Date), false);
assert.strictEqual(_.isSet(new Error), false);
assert.strictEqual(_.isSet(_), false);
assert.strictEqual(_.isSet(slice), false);
assert.strictEqual(_.isSet({ 'a': 1 }), false);
assert.strictEqual(_.isSet(1), false);
assert.strictEqual(_.isSet(/x/), false);
assert.strictEqual(_.isSet('a'), false);
assert.strictEqual(_.isSet(symbol), false);
assert.strictEqual(_.isSet(weakSet), false);
});
QUnit.test('should work for objects with a non-function `constructor` (test in IE 11)', function(assert) {
assert.expect(1);
var values = [false, true],
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value) {
return _.isSet({ 'constructor': value });
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with weak sets from another realm', function(assert) {
assert.expect(1);
if (realm.set) {
assert.strictEqual(_.isSet(realm.set), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isString');
(function() {
QUnit.test('should return `true` for strings', function(assert) {
assert.expect(2);
assert.strictEqual(_.isString('a'), true);
assert.strictEqual(_.isString(Object('a')), true);
});
QUnit.test('should return `false` for non-strings', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, function(value) {
return value === '';
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isString(value) : _.isString();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isString(args), false);
assert.strictEqual(_.isString([1, 2, 3]), false);
assert.strictEqual(_.isString(true), false);
assert.strictEqual(_.isString(new Date), false);
assert.strictEqual(_.isString(new Error), false);
assert.strictEqual(_.isString(_), false);
assert.strictEqual(_.isString(slice), false);
assert.strictEqual(_.isString({ '0': 1, 'length': 1 }), false);
assert.strictEqual(_.isString(1), false);
assert.strictEqual(_.isString(/x/), false);
assert.strictEqual(_.isString(symbol), false);
});
QUnit.test('should work with strings from another realm', function(assert) {
assert.expect(1);
if (realm.string) {
assert.strictEqual(_.isString(realm.string), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isSymbol');
(function() {
QUnit.test('should return `true` for symbols', function(assert) {
assert.expect(2);
if (Symbol) {
assert.strictEqual(_.isSymbol(symbol), true);
assert.strictEqual(_.isSymbol(Object(symbol)), true);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should return `false` for non-symbols', function(assert) {
assert.expect(12);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isSymbol(value) : _.isSymbol();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isSymbol(args), false);
assert.strictEqual(_.isSymbol([1, 2, 3]), false);
assert.strictEqual(_.isSymbol(true), false);
assert.strictEqual(_.isSymbol(new Date), false);
assert.strictEqual(_.isSymbol(new Error), false);
assert.strictEqual(_.isSymbol(_), false);
assert.strictEqual(_.isSymbol(slice), false);
assert.strictEqual(_.isSymbol({ '0': 1, 'length': 1 }), false);
assert.strictEqual(_.isSymbol(1), false);
assert.strictEqual(_.isSymbol(/x/), false);
assert.strictEqual(_.isSymbol('a'), false);
});
QUnit.test('should work with symbols from another realm', function(assert) {
assert.expect(1);
if (Symbol && realm.symbol) {
assert.strictEqual(_.isSymbol(realm.symbol), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isTypedArray');
(function() {
QUnit.test('should return `true` for typed arrays', function(assert) {
assert.expect(1);
var expected = lodashStable.map(typedArrays, function(type) {
return type in root;
});
var actual = lodashStable.map(typedArrays, function(type) {
var Ctor = root[type];
return Ctor ? _.isTypedArray(new Ctor(new ArrayBuffer(8))) : false;
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `false` for non typed arrays', function(assert) {
assert.expect(13);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isTypedArray(value) : _.isTypedArray();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isTypedArray(args), false);
assert.strictEqual(_.isTypedArray([1, 2, 3]), false);
assert.strictEqual(_.isTypedArray(true), false);
assert.strictEqual(_.isTypedArray(new Date), false);
assert.strictEqual(_.isTypedArray(new Error), false);
assert.strictEqual(_.isTypedArray(_), false);
assert.strictEqual(_.isTypedArray(slice), false);
assert.strictEqual(_.isTypedArray({ 'a': 1 }), false);
assert.strictEqual(_.isTypedArray(1), false);
assert.strictEqual(_.isTypedArray(/x/), false);
assert.strictEqual(_.isTypedArray('a'), false);
assert.strictEqual(_.isTypedArray(symbol), false);
});
QUnit.test('should work with typed arrays from another realm', function(assert) {
assert.expect(1);
if (realm.object) {
var props = lodashStable.invokeMap(typedArrays, 'toLowerCase');
var expected = lodashStable.map(props, function(key) {
return realm[key] !== undefined;
});
var actual = lodashStable.map(props, function(key) {
var value = realm[key];
return value ? _.isTypedArray(value) : false;
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isUndefined');
(function() {
QUnit.test('should return `true` for `undefined` values', function(assert) {
assert.expect(2);
assert.strictEqual(_.isUndefined(), true);
assert.strictEqual(_.isUndefined(undefined), true);
});
QUnit.test('should return `false` for non `undefined` values', function(assert) {
assert.expect(13);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined;
});
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isUndefined(value) : _.isUndefined();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isUndefined(args), false);
assert.strictEqual(_.isUndefined([1, 2, 3]), false);
assert.strictEqual(_.isUndefined(true), false);
assert.strictEqual(_.isUndefined(new Date), false);
assert.strictEqual(_.isUndefined(new Error), false);
assert.strictEqual(_.isUndefined(_), false);
assert.strictEqual(_.isUndefined(slice), false);
assert.strictEqual(_.isUndefined({ 'a': 1 }), false);
assert.strictEqual(_.isUndefined(1), false);
assert.strictEqual(_.isUndefined(/x/), false);
assert.strictEqual(_.isUndefined('a'), false);
if (Symbol) {
assert.strictEqual(_.isUndefined(symbol), false);
}
else {
skipAssert(assert);
}
});
QUnit.test('should work with `undefined` from another realm', function(assert) {
assert.expect(1);
if (realm.object) {
assert.strictEqual(_.isUndefined(realm.undefined), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isWeakMap');
(function() {
QUnit.test('should return `true` for weak maps', function(assert) {
assert.expect(1);
if (WeakMap) {
assert.strictEqual(_.isWeakMap(weakMap), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non weak maps', function(assert) {
assert.expect(14);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isWeakMap(value) : _.isWeakMap();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isWeakMap(args), false);
assert.strictEqual(_.isWeakMap([1, 2, 3]), false);
assert.strictEqual(_.isWeakMap(true), false);
assert.strictEqual(_.isWeakMap(new Date), false);
assert.strictEqual(_.isWeakMap(new Error), false);
assert.strictEqual(_.isWeakMap(_), false);
assert.strictEqual(_.isWeakMap(slice), false);
assert.strictEqual(_.isWeakMap({ 'a': 1 }), false);
assert.strictEqual(_.isWeakMap(map), false);
assert.strictEqual(_.isWeakMap(1), false);
assert.strictEqual(_.isWeakMap(/x/), false);
assert.strictEqual(_.isWeakMap('a'), false);
assert.strictEqual(_.isWeakMap(symbol), false);
});
QUnit.test('should work for objects with a non-function `constructor` (test in IE 11)', function(assert) {
assert.expect(1);
var values = [false, true],
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value) {
return _.isWeakMap({ 'constructor': value });
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with weak maps from another realm', function(assert) {
assert.expect(1);
if (realm.weakMap) {
assert.strictEqual(_.isWeakMap(realm.weakMap), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.isWeakSet');
(function() {
QUnit.test('should return `true` for weak sets', function(assert) {
assert.expect(1);
if (WeakSet) {
assert.strictEqual(_.isWeakSet(weakSet), true);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return `false` for non weak sets', function(assert) {
assert.expect(14);
var expected = lodashStable.map(falsey, stubFalse);
var actual = lodashStable.map(falsey, function(value, index) {
return index ? _.isWeakSet(value) : _.isWeakSet();
});
assert.deepEqual(actual, expected);
assert.strictEqual(_.isWeakSet(args), false);
assert.strictEqual(_.isWeakSet([1, 2, 3]), false);
assert.strictEqual(_.isWeakSet(true), false);
assert.strictEqual(_.isWeakSet(new Date), false);
assert.strictEqual(_.isWeakSet(new Error), false);
assert.strictEqual(_.isWeakSet(_), false);
assert.strictEqual(_.isWeakSet(slice), false);
assert.strictEqual(_.isWeakSet({ 'a': 1 }), false);
assert.strictEqual(_.isWeakSet(1), false);
assert.strictEqual(_.isWeakSet(/x/), false);
assert.strictEqual(_.isWeakSet('a'), false);
assert.strictEqual(_.isWeakSet(set), false);
assert.strictEqual(_.isWeakSet(symbol), false);
});
QUnit.test('should work with weak sets from another realm', function(assert) {
assert.expect(1);
if (realm.weakSet) {
assert.strictEqual(_.isWeakSet(realm.weakSet), true);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('isType checks');
(function() {
QUnit.test('should return `false` for subclassed values', function(assert) {
assert.expect(7);
var funcs = [
'isArray', 'isBoolean', 'isDate', 'isFunction',
'isNumber', 'isRegExp', 'isString'
];
lodashStable.each(funcs, function(methodName) {
function Foo() {}
Foo.prototype = root[methodName.slice(2)].prototype;
var object = new Foo;
if (objToString.call(object) == objectTag) {
assert.strictEqual(_[methodName](object), false, '`_.' + methodName + '` returns `false`');
}
else {
skipAssert(assert);
}
});
});
QUnit.test('should not error on host objects (test in IE)', function(assert) {
assert.expect(26);
var funcs = [
'isArguments', 'isArray', 'isArrayBuffer', 'isArrayLike', 'isBoolean',
'isBuffer', 'isDate', 'isElement', 'isError', 'isFinite', 'isFunction',
'isInteger', 'isMap', 'isNaN', 'isNil', 'isNull', 'isNumber', 'isObject',
'isObjectLike', 'isRegExp', 'isSet', 'isSafeInteger', 'isString',
'isUndefined', 'isWeakMap', 'isWeakSet'
];
lodashStable.each(funcs, function(methodName) {
if (xml) {
_[methodName](xml);
assert.ok(true, '`_.' + methodName + '` should not error');
}
else {
skipAssert(assert);
}
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.iteratee');
(function() {
QUnit.test('should provide arguments to `func`', function(assert) {
assert.expect(1);
var fn = function() { return slice.call(arguments); },
iteratee = _.iteratee(fn),
actual = iteratee('a', 'b', 'c', 'd', 'e', 'f');
assert.deepEqual(actual, ['a', 'b', 'c', 'd', 'e', 'f']);
});
QUnit.test('should return `_.identity` when `func` is nullish', function(assert) {
assert.expect(1);
var object = {},
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant([!isNpm && _.identity, object]));
var actual = lodashStable.map(values, function(value, index) {
var identity = index ? _.iteratee(value) : _.iteratee();
return [!isNpm && identity, identity(object)];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return an iteratee created by `_.matches` when `func` is an object', function(assert) {
assert.expect(2);
var matches = _.iteratee({ 'a': 1, 'b': 2 });
assert.strictEqual(matches({ 'a': 1, 'b': 2, 'c': 3 }), true);
assert.strictEqual(matches({ 'b': 2 }), false);
});
QUnit.test('should not change `_.matches` behavior if `source` is modified', function(assert) {
assert.expect(9);
var sources = [
{ 'a': { 'b': 2, 'c': 3 } },
{ 'a': 1, 'b': 2 },
{ 'a': 1 }
];
lodashStable.each(sources, function(source, index) {
var object = lodashStable.cloneDeep(source),
matches = _.iteratee(source);
assert.strictEqual(matches(object), true);
if (index) {
source.a = 2;
source.b = 1;
source.c = 3;
} else {
source.a.b = 1;
source.a.c = 2;
source.a.d = 3;
}
assert.strictEqual(matches(object), true);
assert.strictEqual(matches(source), false);
});
});
QUnit.test('should return an iteratee created by `_.matchesProperty` when `func` is an array', function(assert) {
assert.expect(3);
var array = ['a', undefined],
matches = _.iteratee([0, 'a']);
assert.strictEqual(matches(array), true);
matches = _.iteratee(['0', 'a']);
assert.strictEqual(matches(array), true);
matches = _.iteratee([1, undefined]);
assert.strictEqual(matches(array), true);
});
QUnit.test('should support deep paths for `_.matchesProperty` shorthands', function(assert) {
assert.expect(1);
var object = { 'a': { 'b': { 'c': 1, 'd': 2 } } },
matches = _.iteratee(['a.b', { 'c': 1 }]);
assert.strictEqual(matches(object), true);
});
QUnit.test('should not change `_.matchesProperty` behavior if `source` is modified', function(assert) {
assert.expect(9);
var sources = [
{ 'a': { 'b': 2, 'c': 3 } },
{ 'a': 1, 'b': 2 },
{ 'a': 1 }
];
lodashStable.each(sources, function(source, index) {
var object = { 'a': lodashStable.cloneDeep(source) },
matches = _.iteratee(['a', source]);
assert.strictEqual(matches(object), true);
if (index) {
source.a = 2;
source.b = 1;
source.c = 3;
} else {
source.a.b = 1;
source.a.c = 2;
source.a.d = 3;
}
assert.strictEqual(matches(object), true);
assert.strictEqual(matches({ 'a': source }), false);
});
});
QUnit.test('should return an iteratee created by `_.property` when `func` is a number or string', function(assert) {
assert.expect(2);
var array = ['a'],
prop = _.iteratee(0);
assert.strictEqual(prop(array), 'a');
prop = _.iteratee('0');
assert.strictEqual(prop(array), 'a');
});
QUnit.test('should support deep paths for `_.property` shorthands', function(assert) {
assert.expect(1);
var object = { 'a': { 'b': 2 } },
prop = _.iteratee('a.b');
assert.strictEqual(prop(object), 2);
});
QUnit.test('should work with functions created by `_.partial` and `_.partialRight`', function(assert) {
assert.expect(2);
var fn = function() {
var result = [this.a];
push.apply(result, arguments);
return result;
};
var expected = [1, 2, 3],
object = { 'a': 1 , 'iteratee': _.iteratee(_.partial(fn, 2)) };
assert.deepEqual(object.iteratee(3), expected);
object.iteratee = _.iteratee(_.partialRight(fn, 3));
assert.deepEqual(object.iteratee(2), expected);
});
QUnit.test('should use internal `iteratee` if external is unavailable', function(assert) {
assert.expect(1);
var iteratee = _.iteratee;
delete _.iteratee;
assert.deepEqual(_.map([{ 'a': 1 }], 'a'), [1]);
_.iteratee = iteratee;
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var fn = function() { return this instanceof Number; },
array = [fn, fn, fn],
iteratees = lodashStable.map(array, _.iteratee),
expected = lodashStable.map(array, stubFalse);
var actual = lodashStable.map(iteratees, function(iteratee) {
return iteratee();
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('custom `_.iteratee` methods');
(function() {
var array = ['one', 'two', 'three'],
getPropA = _.partial(_.property, 'a'),
getPropB = _.partial(_.property, 'b'),
getLength = _.partial(_.property, 'length'),
iteratee = _.iteratee;
var getSum = function() {
return function(result, object) {
return result + object.a;
};
};
var objects = [
{ 'a': 0, 'b': 0 },
{ 'a': 1, 'b': 0 },
{ 'a': 1, 'b': 1 }
];
QUnit.test('`_.countBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getLength;
assert.deepEqual(_.countBy(array), { '3': 2, '5': 1 });
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.differenceBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.deepEqual(_.differenceBy(objects, [objects[1]]), [objects[0]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.dropRightWhile` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.dropRightWhile(objects), objects.slice(0, 2));
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.dropWhile` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.dropWhile(objects.reverse()).reverse(), objects.reverse().slice(0, 2));
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.every` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.strictEqual(_.every(objects.slice(1)), true);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.filter` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
var objects = [{ 'a': 0 }, { 'a': 1 }];
_.iteratee = getPropA;
assert.deepEqual(_.filter(objects), [objects[1]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.find` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.strictEqual(_.find(objects), objects[1]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.findIndex` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.strictEqual(_.findIndex(objects), 1);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.findLast` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.strictEqual(_.findLast(objects), objects[2]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.findLastIndex` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.strictEqual(_.findLastIndex(objects), 2);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.findKey` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.strictEqual(_.findKey(objects), '2');
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.findLastKey` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.strictEqual(_.findLastKey(objects), '2');
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.groupBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getLength;
assert.deepEqual(_.groupBy(array), { '3': ['one', 'two'], '5': ['three'] });
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.intersectionBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.deepEqual(_.intersectionBy(objects, [objects[2]]), [objects[1]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.keyBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getLength;
assert.deepEqual(_.keyBy(array), { '3': 'two', '5': 'three' });
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.map` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.deepEqual(_.map(objects), [0, 1, 1]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.mapKeys` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.mapKeys({ 'a': { 'b': 2 } }), { '2': { 'b': 2 } });
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.mapValues` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.mapValues({ 'a': { 'b': 2 } }), { 'a': 2 });
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.maxBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.maxBy(objects), objects[2]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.meanBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.strictEqual(_.meanBy(objects), 2 / 3);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.minBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.minBy(objects), objects[0]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.partition` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
var objects = [{ 'a': 1 }, { 'a': 1 }, { 'b': 2 }];
_.iteratee = getPropA;
assert.deepEqual(_.partition(objects), [objects.slice(0, 2), objects.slice(2)]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.pullAllBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.deepEqual(_.pullAllBy(objects.slice(), [{ 'a': 1, 'b': 0 }]), [objects[0]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.reduce` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getSum;
assert.strictEqual(_.reduce(objects, undefined, 0), 2);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.reduceRight` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getSum;
assert.strictEqual(_.reduceRight(objects, undefined, 0), 2);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.reject` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
var objects = [{ 'a': 0 }, { 'a': 1 }];
_.iteratee = getPropA;
assert.deepEqual(_.reject(objects), [objects[0]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.remove` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
var objects = [{ 'a': 0 }, { 'a': 1 }];
_.iteratee = getPropA;
_.remove(objects);
assert.deepEqual(objects, [{ 'a': 0 }]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.some` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.strictEqual(_.some(objects), true);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.sortBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.deepEqual(_.sortBy(objects.slice().reverse()), [objects[0], objects[2], objects[1]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.sortedIndexBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
var objects = [{ 'a': 30 }, { 'a': 50 }];
_.iteratee = getPropA;
assert.strictEqual(_.sortedIndexBy(objects, { 'a': 40 }), 1);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.sortedLastIndexBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
var objects = [{ 'a': 30 }, { 'a': 50 }];
_.iteratee = getPropA;
assert.strictEqual(_.sortedLastIndexBy(objects, { 'a': 40 }), 1);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.sumBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.strictEqual(_.sumBy(objects), 1);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.takeRightWhile` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.takeRightWhile(objects), objects.slice(2));
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.takeWhile` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.takeWhile(objects.reverse()), objects.reverse().slice(2));
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.transform` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = function() {
return function(result, object) {
result.sum += object.a;
};
};
assert.deepEqual(_.transform(objects, undefined, { 'sum': 0 }), { 'sum': 2 });
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.uniqBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.uniqBy(objects), [objects[0], objects[2]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.unionBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropB;
assert.deepEqual(_.unionBy(objects.slice(0, 1), [objects[2]]), [objects[0], objects[2]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.xorBy` should use `_.iteratee` internally', function(assert) {
assert.expect(1);
if (!isModularize) {
_.iteratee = getPropA;
assert.deepEqual(_.xorBy(objects, objects.slice(1)), [objects[0]]);
_.iteratee = iteratee;
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.join');
(function() {
var array = ['a', 'b', 'c'];
QUnit.test('should return join all array elements into a string', function(assert) {
assert.expect(1);
assert.strictEqual(_.join(array, '~'), 'a~b~c');
});
QUnit.test('should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _(array);
assert.strictEqual(wrapped.join('~'), 'a~b~c');
assert.strictEqual(wrapped.value(), array);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(array).chain().join('~') instanceof _);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.keyBy');
(function() {
var array = [
{ 'dir': 'left', 'code': 97 },
{ 'dir': 'right', 'code': 100 }
];
QUnit.test('should transform keys by `iteratee`', function(assert) {
assert.expect(1);
var expected = { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } };
var actual = _.keyBy(array, function(object) {
return String.fromCharCode(object.code);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var array = [4, 6, 6],
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant({ '4': 4, '6': 6 }));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.keyBy(array, value) : _.keyBy(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var expected = { 'left': { 'dir': 'left', 'code': 97 }, 'right': { 'dir': 'right', 'code': 100 } },
actual = _.keyBy(array, 'dir');
assert.deepEqual(actual, expected);
});
QUnit.test('should only add values to own, not inherited, properties', function(assert) {
assert.expect(2);
var actual = _.keyBy([6.1, 4.2, 6.3], function(n) {
return Math.floor(n) > 4 ? 'hasOwnProperty' : 'constructor';
});
assert.deepEqual(actual.constructor, 4.2);
assert.deepEqual(actual.hasOwnProperty, 6.3);
});
QUnit.test('should work with a number for `iteratee`', function(assert) {
assert.expect(2);
var array = [
[1, 'a'],
[2, 'a'],
[2, 'b']
];
assert.deepEqual(_.keyBy(array, 0), { '1': [1, 'a'], '2': [2, 'b'] });
assert.deepEqual(_.keyBy(array, 1), { 'a': [2, 'a'], 'b': [2, 'b'] });
});
QUnit.test('should work with an object for `collection`', function(assert) {
assert.expect(1);
var actual = _.keyBy({ 'a': 6.1, 'b': 4.2, 'c': 6.3 }, Math.floor);
assert.deepEqual(actual, { '4': 4.2, '6': 6.3 });
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(1);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE).concat(
lodashStable.range(Math.floor(LARGE_ARRAY_SIZE / 2), LARGE_ARRAY_SIZE),
lodashStable.range(Math.floor(LARGE_ARRAY_SIZE / 1.5), LARGE_ARRAY_SIZE)
);
var actual = _(array).keyBy().map(square).filter(isEven).take().value();
assert.deepEqual(actual, _.take(_.filter(_.map(_.keyBy(array), square), isEven)));
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('keys methods');
lodashStable.each(['keys', 'keysIn'], function(methodName) {
var func = _[methodName],
isKeys = methodName == 'keys';
QUnit.test('`_.' + methodName + '` should return the string keyed property names of `object`', function(assert) {
assert.expect(1);
var actual = func({ 'a': 1, 'b': 1 }).sort();
assert.deepEqual(actual, ['a', 'b']);
});
QUnit.test('`_.' + methodName + '` should ' + (isKeys ? 'not ' : '') + 'include inherited string keyed properties', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var expected = isKeys ? ['a'] : ['a', 'b'],
actual = func(new Foo).sort();
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should treat sparse arrays as dense', function(assert) {
assert.expect(1);
var array = [1];
array[2] = 3;
var actual = func(array).sort();
assert.deepEqual(actual, ['0', '1', '2']);
});
QUnit.test('`_.' + methodName + '` should return keys for custom properties on arrays', function(assert) {
assert.expect(1);
var array = [1];
array.a = 1;
var actual = func(array).sort();
assert.deepEqual(actual, ['0', 'a']);
});
QUnit.test('`_.' + methodName + '` should ' + (isKeys ? 'not ' : '') + 'include inherited string keyed properties of arrays', function(assert) {
assert.expect(1);
arrayProto.a = 1;
var expected = isKeys ? ['0'] : ['0', 'a'],
actual = func([1]).sort();
assert.deepEqual(actual, expected);
delete arrayProto.a;
});
QUnit.test('`_.' + methodName + '` should work with `arguments` objects', function(assert) {
assert.expect(1);
var values = [args, strictArgs],
expected = lodashStable.map(values, lodashStable.constant(['0', '1', '2']));
var actual = lodashStable.map(values, function(value) {
return func(value).sort();
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return keys for custom properties on `arguments` objects', function(assert) {
assert.expect(1);
var values = [args, strictArgs],
expected = lodashStable.map(values, lodashStable.constant(['0', '1', '2', 'a']));
var actual = lodashStable.map(values, function(value) {
value.a = 1;
var result = func(value).sort();
delete value.a;
return result;
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should ' + (isKeys ? 'not ' : '') + 'include inherited string keyed properties of `arguments` objects', function(assert) {
assert.expect(1);
var values = [args, strictArgs],
expected = lodashStable.map(values, lodashStable.constant(isKeys ? ['0', '1', '2'] : ['0', '1', '2', 'a']));
var actual = lodashStable.map(values, function(value) {
objectProto.a = 1;
var result = func(value).sort();
delete objectProto.a;
return result;
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with string objects', function(assert) {
assert.expect(1);
var actual = func(Object('abc')).sort();
assert.deepEqual(actual, ['0', '1', '2']);
});
QUnit.test('`_.' + methodName + '` should return keys for custom properties on string objects', function(assert) {
assert.expect(1);
var object = Object('a');
object.a = 1;
var actual = func(object).sort();
assert.deepEqual(actual, ['0', 'a']);
});
QUnit.test('`_.' + methodName + '` should ' + (isKeys ? 'not ' : '') + 'include inherited string keyed properties of string objects', function(assert) {
assert.expect(1);
stringProto.a = 1;
var expected = isKeys ? ['0'] : ['0', 'a'],
actual = func(Object('a')).sort();
assert.deepEqual(actual, expected);
delete stringProto.a;
});
QUnit.test('`_.' + methodName + '` should work with array-like objects', function(assert) {
assert.expect(1);
var object = { '0': 'a', 'length': 1 },
actual = func(object).sort();
assert.deepEqual(actual, ['0', 'length']);
});
QUnit.test('`_.' + methodName + '` should coerce primitives to objects (test in IE 9)', function(assert) {
assert.expect(2);
var expected = lodashStable.map(primitives, function(value) {
return typeof value == 'string' ? ['0'] : [];
});
var actual = lodashStable.map(primitives, func);
assert.deepEqual(actual, expected);
// IE 9 doesn't box numbers in for-in loops.
numberProto.a = 1;
assert.deepEqual(func(0), isKeys ? [] : ['a']);
delete numberProto.a;
});
QUnit.test('`_.' + methodName + '` skips the `constructor` property on prototype objects', function(assert) {
assert.expect(3);
function Foo() {}
Foo.prototype.a = 1;
var expected = ['a'];
assert.deepEqual(func(Foo.prototype), expected);
Foo.prototype = { 'constructor': Foo, 'a': 1 };
assert.deepEqual(func(Foo.prototype), expected);
var Fake = { 'prototype': {} };
Fake.prototype.constructor = Fake;
assert.deepEqual(func(Fake.prototype), ['constructor']);
});
QUnit.test('`_.' + methodName + '` should return an empty array when `object` is nullish', function(assert) {
var values = [, null, undefined],
expected = lodashStable.map(values, stubArray);
var actual = lodashStable.map(values, function(value, index) {
objectProto.a = 1;
var result = index ? func(value) : func();
delete objectProto.a;
return result;
});
assert.deepEqual(actual, expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.last');
(function() {
var array = [1, 2, 3, 4];
QUnit.test('should return the last element', function(assert) {
assert.expect(1);
assert.strictEqual(_.last(array), 4);
});
QUnit.test('should return `undefined` when querying empty arrays', function(assert) {
assert.expect(1);
var array = [];
array['-1'] = 1;
assert.strictEqual(_.last([]), undefined);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
actual = lodashStable.map(array, _.last);
assert.deepEqual(actual, [3, 6, 9]);
});
QUnit.test('should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.strictEqual(_(array).last(), 4);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(array).chain().last() instanceof _);
}
else {
skipAssert(assert);
}
});
QUnit.test('should not execute immediately when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _(array).chain().last();
assert.strictEqual(wrapped.__wrapped__, array);
}
else {
skipAssert(assert);
}
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(2);
if (!isNpm) {
var largeArray = lodashStable.range(LARGE_ARRAY_SIZE),
smallArray = array;
lodashStable.times(2, function(index) {
var array = index ? largeArray : smallArray,
wrapped = _(array).filter(isEven);
assert.strictEqual(wrapped.last(), _.last(_.filter(array, isEven)));
});
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.lowerCase');
(function() {
QUnit.test('should lowercase as space-separated words', function(assert) {
assert.expect(3);
assert.strictEqual(_.lowerCase('--Foo-Bar--'), 'foo bar');
assert.strictEqual(_.lowerCase('fooBar'), 'foo bar');
assert.strictEqual(_.lowerCase('__FOO_BAR__'), 'foo bar');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.lowerFirst');
(function() {
QUnit.test('should lowercase only the first character', function(assert) {
assert.expect(3);
assert.strictEqual(_.lowerFirst('fred'), 'fred');
assert.strictEqual(_.lowerFirst('Fred'), 'fred');
assert.strictEqual(_.lowerFirst('FRED'), 'fRED');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.lt');
(function() {
QUnit.test('should return `true` if `value` is less than `other`', function(assert) {
assert.expect(2);
assert.strictEqual(_.lt(1, 3), true);
assert.strictEqual(_.lt('abc', 'def'), true);
});
QUnit.test('should return `false` if `value` >= `other`', function(assert) {
assert.expect(4);
assert.strictEqual(_.lt(3, 1), false);
assert.strictEqual(_.lt(3, 3), false);
assert.strictEqual(_.lt('def', 'abc'), false);
assert.strictEqual(_.lt('def', 'def'), false);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.lte');
(function() {
QUnit.test('should return `true` if `value` is <= `other`', function(assert) {
assert.expect(4);
assert.strictEqual(_.lte(1, 3), true);
assert.strictEqual(_.lte(3, 3), true);
assert.strictEqual(_.lte('abc', 'def'), true);
assert.strictEqual(_.lte('def', 'def'), true);
});
QUnit.test('should return `false` if `value` > `other`', function(assert) {
assert.expect(2);
assert.strictEqual(_.lt(3, 1), false);
assert.strictEqual(_.lt('def', 'abc'), false);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.findLastIndex and lodash.lastIndexOf');
lodashStable.each(['findLastIndex', 'lastIndexOf'], function(methodName) {
var array = [1, 2, 3, 1, 2, 3],
func = _[methodName],
resolve = methodName == 'findLastIndex' ? lodashStable.curry(lodashStable.eq) : identity;
QUnit.test('`_.' + methodName + '` should return the index of the last matched value', function(assert) {
assert.expect(1);
assert.strictEqual(func(array, resolve(3)), 5);
});
QUnit.test('`_.' + methodName + '` should work with a positive `fromIndex`', function(assert) {
assert.expect(1);
assert.strictEqual(func(array, resolve(1), 2), 0);
});
QUnit.test('`_.' + methodName + '` should work with a `fromIndex` >= `length`', function(assert) {
assert.expect(1);
var values = [6, 8, Math.pow(2, 32), Infinity],
expected = lodashStable.map(values, lodashStable.constant([-1, 3, -1]));
var actual = lodashStable.map(values, function(fromIndex) {
return [
func(array, resolve(undefined), fromIndex),
func(array, resolve(1), fromIndex),
func(array, resolve(''), fromIndex)
];
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with a negative `fromIndex`', function(assert) {
assert.expect(1);
assert.strictEqual(func(array, resolve(2), -3), 1);
});
QUnit.test('`_.' + methodName + '` should work with a negative `fromIndex` <= `-length`', function(assert) {
assert.expect(1);
var values = [-6, -8, -Infinity],
expected = lodashStable.map(values, stubZero);
var actual = lodashStable.map(values, function(fromIndex) {
return func(array, resolve(1), fromIndex);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should treat falsey `fromIndex` values correctly', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? 5 : -1;
});
var actual = lodashStable.map(falsey, function(fromIndex) {
return func(array, resolve(3), fromIndex);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should coerce `fromIndex` to an integer', function(assert) {
assert.expect(1);
assert.strictEqual(func(array, resolve(2), 4.2), 4);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('indexOf methods');
lodashStable.each(['indexOf', 'lastIndexOf', 'sortedIndexOf', 'sortedLastIndexOf'], function(methodName) {
var func = _[methodName],
isIndexOf = !/last/i.test(methodName),
isSorted = /^sorted/.test(methodName);
QUnit.test('`_.' + methodName + '` should accept a falsey `array`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, lodashStable.constant(-1));
var actual = lodashStable.map(falsey, function(array, index) {
try {
return index ? func(array) : func();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `-1` for an unmatched value', function(assert) {
assert.expect(5);
var array = [1, 2, 3],
empty = [];
assert.strictEqual(func(array, 4), -1);
assert.strictEqual(func(array, 4, true), -1);
assert.strictEqual(func(array, undefined, true), -1);
assert.strictEqual(func(empty, undefined), -1);
assert.strictEqual(func(empty, undefined, true), -1);
});
QUnit.test('`_.' + methodName + '` should not match values on empty arrays', function(assert) {
assert.expect(2);
var array = [];
array[-1] = 0;
assert.strictEqual(func(array, undefined), -1);
assert.strictEqual(func(array, 0, true), -1);
});
QUnit.test('`_.' + methodName + '` should match `NaN`', function(assert) {
assert.expect(3);
var array = isSorted
? [1, 2, NaN, NaN]
: [1, NaN, 3, NaN, 5, NaN];
if (isSorted) {
assert.strictEqual(func(array, NaN, true), isIndexOf ? 2 : 3);
skipAssert(assert, 2);
}
else {
assert.strictEqual(func(array, NaN), isIndexOf ? 1 : 5);
assert.strictEqual(func(array, NaN, 2), isIndexOf ? 3 : 1);
assert.strictEqual(func(array, NaN, -2), isIndexOf ? 5 : 3);
}
});
QUnit.test('`_.' + methodName + '` should match `-0` as `0`', function(assert) {
assert.expect(2);
assert.strictEqual(func([-0], 0), 0);
assert.strictEqual(func([0], -0), 0);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.map');
(function() {
var array = [1, 2];
QUnit.test('should map values in `collection` to a new array', function(assert) {
assert.expect(2);
var object = { 'a': 1, 'b': 2 },
expected = ['1', '2'];
assert.deepEqual(_.map(array, String), expected);
assert.deepEqual(_.map(object, String), expected);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var objects = [{ 'a': 'x' }, { 'a': 'y' }];
assert.deepEqual(_.map(objects, 'a'), ['x', 'y']);
});
QUnit.test('should iterate over own string keyed properties of objects', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var actual = _.map(new Foo, identity);
assert.deepEqual(actual, [1]);
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(2);
var object = { 'a': 1, 'b': 2 },
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant([1, 2]));
lodashStable.each([array, object], function(collection) {
var actual = lodashStable.map(values, function(value, index) {
return index ? _.map(collection, value) : _.map(collection);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should accept a falsey `collection`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubArray);
var actual = lodashStable.map(falsey, function(collection, index) {
try {
return index ? _.map(collection) : _.map();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should treat number values for `collection` as empty', function(assert) {
assert.expect(1);
assert.deepEqual(_.map(1), []);
});
QUnit.test('should treat a nodelist as an array-like object', function(assert) {
assert.expect(1);
if (document) {
var actual = _.map(document.getElementsByTagName('body'), function(element) {
return element.nodeName.toLowerCase();
});
assert.deepEqual(actual, ['body']);
}
else {
skipAssert(assert);
}
});
QUnit.test('should work with objects with non-number length properties', function(assert) {
assert.expect(1);
var value = { 'value': 'x' },
object = { 'length': { 'value': 'x' } };
assert.deepEqual(_.map(object, identity), [value]);
});
QUnit.test('should return a wrapped value when chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(array).map(noop) instanceof _);
}
else {
skipAssert(assert);
}
});
QUnit.test('should provide correct `predicate` arguments in a lazy sequence', function(assert) {
assert.expect(5);
if (!isNpm) {
var args,
array = lodashStable.range(LARGE_ARRAY_SIZE + 1),
expected = [1, 0, _.map(array.slice(1), square)];
_(array).slice(1).map(function(value, index, array) {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, [1, 0, array.slice(1)]);
args = undefined;
_(array).slice(1).map(square).map(function(value, index, array) {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, expected);
args = undefined;
_(array).slice(1).map(square).map(function(value, index) {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, expected);
args = undefined;
_(array).slice(1).map(square).map(function(value) {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, [1]);
args = undefined;
_(array).slice(1).map(square).map(function() {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, expected);
}
else {
skipAssert(assert, 5);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.mapKeys');
(function() {
var array = [1, 2],
object = { 'a': 1, 'b': 2 };
QUnit.test('should map keys in `object` to a new object', function(assert) {
assert.expect(1);
var actual = _.mapKeys(object, String);
assert.deepEqual(actual, { '1': 1, '2': 2 });
});
QUnit.test('should treat arrays like objects', function(assert) {
assert.expect(1);
var actual = _.mapKeys(array, String);
assert.deepEqual(actual, { '1': 1, '2': 2 });
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var actual = _.mapKeys({ 'a': { 'b': 'c' } }, 'b');
assert.deepEqual(actual, { 'c': { 'b': 'c' } });
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var object = { 'a': 1, 'b': 2 },
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant({ '1': 1, '2': 2 }));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.mapKeys(object, value) : _.mapKeys(object);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.mapValues');
(function() {
var array = [1, 2],
object = { 'a': 1, 'b': 2 };
QUnit.test('should map values in `object` to a new object', function(assert) {
assert.expect(1);
var actual = _.mapValues(object, String);
assert.deepEqual(actual, { 'a': '1', 'b': '2' });
});
QUnit.test('should treat arrays like objects', function(assert) {
assert.expect(1);
var actual = _.mapValues(array, String);
assert.deepEqual(actual, { '0': '1', '1': '2' });
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var actual = _.mapValues({ 'a': { 'b': 2 } }, 'b');
assert.deepEqual(actual, { 'a': 2 });
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var object = { 'a': 1, 'b': 2 },
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant([true, false]));
var actual = lodashStable.map(values, function(value, index) {
var result = index ? _.mapValues(object, value) : _.mapValues(object);
return [lodashStable.isEqual(result, object), result === object];
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.mapKeys and lodash.mapValues');
lodashStable.each(['mapKeys', 'mapValues'], function(methodName) {
var func = _[methodName],
object = { 'a': 1, 'b': 2 };
QUnit.test('`_.' + methodName + '` should iterate over own string keyed properties of objects', function(assert) {
assert.expect(1);
function Foo() {
this.a = 'a';
}
Foo.prototype.b = 'b';
var actual = func(new Foo, function(value, key) { return key; });
assert.deepEqual(actual, { 'a': 'a' });
});
QUnit.test('`_.' + methodName + '` should accept a falsey `object`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubObject);
var actual = lodashStable.map(falsey, function(object, index) {
try {
return index ? func(object) : func();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(object)[methodName](noop) instanceof _);
}
else {
skipAssert(assert);
}
});
});
QUnit.module('lodash.matches');
(function() {
QUnit.test('should not change behavior if `source` is modified', function(assert) {
assert.expect(9);
var sources = [
{ 'a': { 'b': 2, 'c': 3 } },
{ 'a': 1, 'b': 2 },
{ 'a': 1 }
];
lodashStable.each(sources, function(source, index) {
var object = lodashStable.cloneDeep(source),
par = _.matches(source);
assert.strictEqual(par(object), true);
if (index) {
source.a = 2;
source.b = 1;
source.c = 3;
} else {
source.a.b = 1;
source.a.c = 2;
source.a.d = 3;
}
assert.strictEqual(par(object), true);
assert.strictEqual(par(source), false);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('matches methods');
lodashStable.each(['matches', 'isMatch'], function(methodName) {
var isMatches = methodName == 'matches';
function matches(source) {
return isMatches ? _.matches(source) : function(object) {
return _.isMatch(object, source);
};
}
QUnit.test('`_.' + methodName + '` should perform a deep comparison between `source` and `object`', function(assert) {
assert.expect(5);
var object = { 'a': 1, 'b': 2, 'c': 3 },
par = matches({ 'a': 1 });
assert.strictEqual(par(object), true);
par = matches({ 'b': 1 });
assert.strictEqual(par(object), false);
par = matches({ 'a': 1, 'c': 3 });
assert.strictEqual(par(object), true);
par = matches({ 'c': 3, 'd': 4 });
assert.strictEqual(par(object), false);
object = { 'a': { 'b': { 'c': 1, 'd': 2 }, 'e': 3 }, 'f': 4 };
par = matches({ 'a': { 'b': { 'c': 1 } } });
assert.strictEqual(par(object), true);
});
QUnit.test('`_.' + methodName + '` should match inherited string keyed `object` properties', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var object = { 'a': new Foo },
par = matches({ 'a': { 'b': 2 } });
assert.strictEqual(par(object), true);
});
QUnit.test('`_.' + methodName + '` should not match by inherited `source` properties', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var objects = [{ 'a': 1 }, { 'a': 1, 'b': 2 }],
source = new Foo,
actual = lodashStable.map(objects, matches(source)),
expected = lodashStable.map(objects, stubTrue);
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should compare a variety of `source` property values', function(assert) {
assert.expect(2);
var object1 = { 'a': false, 'b': true, 'c': '3', 'd': 4, 'e': [5], 'f': { 'g': 6 } },
object2 = { 'a': 0, 'b': 1, 'c': 3, 'd': '4', 'e': ['5'], 'f': { 'g': '6' } },
par = matches(object1);
assert.strictEqual(par(object1), true);
assert.strictEqual(par(object2), false);
});
QUnit.test('`_.' + methodName + '` should match `-0` as `0`', function(assert) {
assert.expect(2);
var object1 = { 'a': -0 },
object2 = { 'a': 0 },
par = matches(object1);
assert.strictEqual(par(object2), true);
par = matches(object2);
assert.strictEqual(par(object1), true);
});
QUnit.test('`_.' + methodName + '` should compare functions by reference', function(assert) {
assert.expect(3);
var object1 = { 'a': lodashStable.noop },
object2 = { 'a': noop },
object3 = { 'a': {} },
par = matches(object1);
assert.strictEqual(par(object1), true);
assert.strictEqual(par(object2), false);
assert.strictEqual(par(object3), false);
});
QUnit.test('`_.' + methodName + '` should work with a function for `object`', function(assert) {
assert.expect(1);
function Foo() {}
Foo.a = { 'b': 2, 'c': 3 };
var par = matches({ 'a': { 'b': 2 } });
assert.strictEqual(par(Foo), true);
});
QUnit.test('`_.' + methodName + '` should work with a function for `source`', function(assert) {
assert.expect(1);
function Foo() {}
Foo.a = 1;
Foo.b = function() {};
Foo.c = 3;
var objects = [{ 'a': 1 }, { 'a': 1, 'b': Foo.b, 'c': 3 }],
actual = lodashStable.map(objects, matches(Foo));
assert.deepEqual(actual, [false, true]);
});
QUnit.test('`_.' + methodName + '` should work with a non-plain `object`', function(assert) {
assert.expect(1);
function Foo(object) { lodashStable.assign(this, object); }
var object = new Foo({ 'a': new Foo({ 'b': 2, 'c': 3 }) }),
par = matches({ 'a': { 'b': 2 } });
assert.strictEqual(par(object), true);
});
QUnit.test('`_.' + methodName + '` should partial match arrays', function(assert) {
assert.expect(3);
var objects = [{ 'a': ['b'] }, { 'a': ['c', 'd'] }],
actual = lodashStable.filter(objects, matches({ 'a': ['d'] }));
assert.deepEqual(actual, [objects[1]]);
actual = lodashStable.filter(objects, matches({ 'a': ['b', 'd'] }));
assert.deepEqual(actual, []);
actual = lodashStable.filter(objects, matches({ 'a': ['d', 'b'] }));
assert.deepEqual(actual, []);
});
QUnit.test('`_.' + methodName + '` should partial match arrays with duplicate values', function(assert) {
assert.expect(1);
var objects = [{ 'a': [1, 2] }, { 'a': [2, 2] }],
actual = lodashStable.filter(objects, matches({ 'a': [2, 2] }));
assert.deepEqual(actual, [objects[1]]);
});
QUnit.test('should partial match arrays of objects', function(assert) {
assert.expect(1);
var objects = [
{ 'a': [{ 'b': 1, 'c': 2 }, { 'b': 4, 'c': 5, 'd': 6 }] },
{ 'a': [{ 'b': 1, 'c': 2 }, { 'b': 4, 'c': 6, 'd': 7 }] }
];
var actual = lodashStable.filter(objects, matches({ 'a': [{ 'b': 1 }, { 'b': 4, 'c': 5 }] }));
assert.deepEqual(actual, [objects[0]]);
});
QUnit.test('`_.' + methodName + '` should partial match maps', function(assert) {
assert.expect(3);
if (Map) {
var objects = [{ 'a': new Map }, { 'a': new Map }];
objects[0].a.set('a', 1);
objects[1].a.set('a', 1);
objects[1].a.set('b', 2);
var map = new Map;
map.set('b', 2);
var actual = lodashStable.filter(objects, matches({ 'a': map }));
assert.deepEqual(actual, [objects[1]]);
map.delete('b');
actual = lodashStable.filter(objects, matches({ 'a': map }));
assert.deepEqual(actual, objects);
map.set('c', 3);
actual = lodashStable.filter(objects, matches({ 'a': map }));
assert.deepEqual(actual, []);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('`_.' + methodName + '` should partial match sets', function(assert) {
assert.expect(3);
if (Set) {
var objects = [{ 'a': new Set }, { 'a': new Set }];
objects[0].a.add(1);
objects[1].a.add(1);
objects[1].a.add(2);
var set = new Set;
set.add(2);
var actual = lodashStable.filter(objects, matches({ 'a': set }));
assert.deepEqual(actual, [objects[1]]);
set.delete(2);
actual = lodashStable.filter(objects, matches({ 'a': set }));
assert.deepEqual(actual, objects);
set.add(3);
actual = lodashStable.filter(objects, matches({ 'a': set }));
assert.deepEqual(actual, []);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('`_.' + methodName + '` should match `undefined` values', function(assert) {
assert.expect(3);
var objects = [{ 'a': 1 }, { 'a': 1, 'b': 1 }, { 'a': 1, 'b': undefined }],
actual = lodashStable.map(objects, matches({ 'b': undefined })),
expected = [false, false, true];
assert.deepEqual(actual, expected);
actual = lodashStable.map(objects, matches({ 'a': 1, 'b': undefined }));
assert.deepEqual(actual, expected);
objects = [{ 'a': { 'b': 2 } }, { 'a': { 'b': 2, 'c': 3 } }, { 'a': { 'b': 2, 'c': undefined } }];
actual = lodashStable.map(objects, matches({ 'a': { 'c': undefined } }));
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should match `undefined` values on primitives', function(assert) {
assert.expect(3);
numberProto.a = 1;
numberProto.b = undefined;
try {
var par = matches({ 'b': undefined });
assert.strictEqual(par(1), true);
} catch (e) {
assert.ok(false, e.message);
}
try {
par = matches({ 'a': 1, 'b': undefined });
assert.strictEqual(par(1), true);
} catch (e) {
assert.ok(false, e.message);
}
numberProto.a = { 'b': 1, 'c': undefined };
try {
par = matches({ 'a': { 'c': undefined } });
assert.strictEqual(par(1), true);
} catch (e) {
assert.ok(false, e.message);
}
delete numberProto.a;
delete numberProto.b;
});
QUnit.test('`_.' + methodName + '` should return `false` when `object` is nullish', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, stubFalse),
par = matches({ 'a': 1 });
var actual = lodashStable.map(values, function(value, index) {
try {
return index ? par(value) : par();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `true` when comparing an empty `source`', function(assert) {
assert.expect(1);
var object = { 'a': 1 },
expected = lodashStable.map(empties, stubTrue);
var actual = lodashStable.map(empties, function(value) {
var par = matches(value);
return par(object);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `true` when comparing an empty `source` to a nullish `object`', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, stubTrue),
par = matches({});
var actual = lodashStable.map(values, function(value, index) {
try {
return index ? par(value) : par();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `true` when comparing a `source` of empty arrays and objects', function(assert) {
assert.expect(1);
var objects = [{ 'a': [1], 'b': { 'c': 1 } }, { 'a': [2, 3], 'b': { 'd': 2 } }],
actual = lodashStable.filter(objects, matches({ 'a': [], 'b': {} }));
assert.deepEqual(actual, objects);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.matchesProperty');
(function() {
QUnit.test('should create a function that performs a deep comparison between a property value and `srcValue`', function(assert) {
assert.expect(6);
var object = { 'a': 1, 'b': 2, 'c': 3 },
matches = _.matchesProperty('a', 1);
assert.strictEqual(matches.length, 1);
assert.strictEqual(matches(object), true);
matches = _.matchesProperty('b', 3);
assert.strictEqual(matches(object), false);
matches = _.matchesProperty('a', { 'a': 1, 'c': 3 });
assert.strictEqual(matches({ 'a': object }), true);
matches = _.matchesProperty('a', { 'c': 3, 'd': 4 });
assert.strictEqual(matches(object), false);
object = { 'a': { 'b': { 'c': 1, 'd': 2 }, 'e': 3 }, 'f': 4 };
matches = _.matchesProperty('a', { 'b': { 'c': 1 } });
assert.strictEqual(matches(object), true);
});
QUnit.test('should support deep paths', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var matches = _.matchesProperty(path, 2);
assert.strictEqual(matches(object), true);
});
});
QUnit.test('should work with a non-string `path`', function(assert) {
assert.expect(2);
var array = [1, 2, 3];
lodashStable.each([1, [1]], function(path) {
var matches = _.matchesProperty(path, 2);
assert.strictEqual(matches(array), true);
});
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var object1 = { '-0': 'a' },
object2 = { '0': 'b' },
pairs = [[object1, object2], [object1, object2], [object2, object1], [object2, object1]],
props = [-0, Object(-0), 0, Object(0)],
values = ['a', 'a', 'b', 'b'],
expected = lodashStable.map(props, lodashStable.constant([true, false]));
var actual = lodashStable.map(props, function(key, index) {
var matches = _.matchesProperty(key, values[index]),
pair = pairs[index];
return [matches(pair[0]), matches(pair[1])];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should coerce `path` to a string', function(assert) {
assert.expect(2);
function fn() {}
fn.toString = lodashStable.constant('fn');
var object = { 'null': 1, 'undefined': 2, 'fn': 3, '[object Object]': 4 },
paths = [null, undefined, fn, {}],
expected = lodashStable.map(paths, stubTrue);
lodashStable.times(2, function(index) {
var actual = lodashStable.map(paths, function(path) {
var matches = _.matchesProperty(index ? [path] : path, object[path]);
return matches(object);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should match a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': 1, 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a.b']], function(path) {
var matches = _.matchesProperty(path, 1);
assert.strictEqual(matches(object), true);
});
});
QUnit.test('should return `false` when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, stubFalse);
lodashStable.each(['constructor', ['constructor']], function(path) {
var matches = _.matchesProperty(path, 1);
var actual = lodashStable.map(values, function(value, index) {
try {
return index ? matches(value) : matches();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `false` for deep paths when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, stubFalse);
lodashStable.each(['constructor.prototype.valueOf', ['constructor', 'prototype', 'valueOf']], function(path) {
var matches = _.matchesProperty(path, 1);
var actual = lodashStable.map(values, function(value, index) {
try {
return index ? matches(value) : matches();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `false` if parts of `path` are missing', function(assert) {
assert.expect(4);
var object = {};
lodashStable.each(['a', 'a[1].b.c', ['a'], ['a', '1', 'b', 'c']], function(path) {
var matches = _.matchesProperty(path, 1);
assert.strictEqual(matches(object), false);
});
});
QUnit.test('should match inherited string keyed `srcValue` properties', function(assert) {
assert.expect(2);
function Foo() {}
Foo.prototype.b = 2;
var object = { 'a': new Foo };
lodashStable.each(['a', ['a']], function(path) {
var matches = _.matchesProperty(path, { 'b': 2 });
assert.strictEqual(matches(object), true);
});
});
QUnit.test('should not match by inherited `srcValue` properties', function(assert) {
assert.expect(2);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var objects = [{ 'a': { 'a': 1 } }, { 'a': { 'a': 1, 'b': 2 } }],
expected = lodashStable.map(objects, stubTrue);
lodashStable.each(['a', ['a']], function(path) {
assert.deepEqual(lodashStable.map(objects, _.matchesProperty(path, new Foo)), expected);
});
});
QUnit.test('should compare a variety of values', function(assert) {
assert.expect(2);
var object1 = { 'a': false, 'b': true, 'c': '3', 'd': 4, 'e': [5], 'f': { 'g': 6 } },
object2 = { 'a': 0, 'b': 1, 'c': 3, 'd': '4', 'e': ['5'], 'f': { 'g': '6' } },
matches = _.matchesProperty('a', object1);
assert.strictEqual(matches({ 'a': object1 }), true);
assert.strictEqual(matches({ 'a': object2 }), false);
});
QUnit.test('should match `-0` as `0`', function(assert) {
assert.expect(2);
var matches = _.matchesProperty('a', -0);
assert.strictEqual(matches({ 'a': 0 }), true);
matches = _.matchesProperty('a', 0);
assert.strictEqual(matches({ 'a': -0 }), true);
});
QUnit.test('should compare functions by reference', function(assert) {
assert.expect(3);
var object1 = { 'a': lodashStable.noop },
object2 = { 'a': noop },
object3 = { 'a': {} },
matches = _.matchesProperty('a', object1);
assert.strictEqual(matches({ 'a': object1 }), true);
assert.strictEqual(matches({ 'a': object2 }), false);
assert.strictEqual(matches({ 'a': object3 }), false);
});
QUnit.test('should work with a function for `srcValue`', function(assert) {
assert.expect(1);
function Foo() {}
Foo.a = 1;
Foo.b = function() {};
Foo.c = 3;
var objects = [{ 'a': { 'a': 1 } }, { 'a': { 'a': 1, 'b': Foo.b, 'c': 3 } }],
actual = lodashStable.map(objects, _.matchesProperty('a', Foo));
assert.deepEqual(actual, [false, true]);
});
QUnit.test('should work with a non-plain `srcValue`', function(assert) {
assert.expect(1);
function Foo(object) { lodashStable.assign(this, object); }
var object = new Foo({ 'a': new Foo({ 'b': 1, 'c': 2 }) }),
matches = _.matchesProperty('a', { 'b': 1 });
assert.strictEqual(matches(object), true);
});
QUnit.test('should partial match arrays', function(assert) {
assert.expect(3);
var objects = [{ 'a': ['b'] }, { 'a': ['c', 'd'] }],
actual = lodashStable.filter(objects, _.matchesProperty('a', ['d']));
assert.deepEqual(actual, [objects[1]]);
actual = lodashStable.filter(objects, _.matchesProperty('a', ['b', 'd']));
assert.deepEqual(actual, []);
actual = lodashStable.filter(objects, _.matchesProperty('a', ['d', 'b']));
assert.deepEqual(actual, []);
});
QUnit.test('should partial match arrays with duplicate values', function(assert) {
assert.expect(1);
var objects = [{ 'a': [1, 2] }, { 'a': [2, 2] }],
actual = lodashStable.filter(objects, _.matchesProperty('a', [2, 2]));
assert.deepEqual(actual, [objects[1]]);
});
QUnit.test('should partial match arrays of objects', function(assert) {
assert.expect(1);
var objects = [
{ 'a': [{ 'a': 1, 'b': 2 }, { 'a': 4, 'b': 5, 'c': 6 }] },
{ 'a': [{ 'a': 1, 'b': 2 }, { 'a': 4, 'b': 6, 'c': 7 }] }
];
var actual = lodashStable.filter(objects, _.matchesProperty('a', [{ 'a': 1 }, { 'a': 4, 'b': 5 }]));
assert.deepEqual(actual, [objects[0]]);
});
QUnit.test('should partial match maps', function(assert) {
assert.expect(3);
if (Map) {
var objects = [{ 'a': new Map }, { 'a': new Map }];
objects[0].a.set('a', 1);
objects[1].a.set('a', 1);
objects[1].a.set('b', 2);
var map = new Map;
map.set('b', 2);
var actual = lodashStable.filter(objects, _.matchesProperty('a', map));
assert.deepEqual(actual, [objects[1]]);
map.delete('b');
actual = lodashStable.filter(objects, _.matchesProperty('a', map));
assert.deepEqual(actual, objects);
map.set('c', 3);
actual = lodashStable.filter(objects, _.matchesProperty('a', map));
assert.deepEqual(actual, []);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should partial match sets', function(assert) {
assert.expect(3);
if (Set) {
var objects = [{ 'a': new Set }, { 'a': new Set }];
objects[0].a.add(1);
objects[1].a.add(1);
objects[1].a.add(2);
var set = new Set;
set.add(2);
var actual = lodashStable.filter(objects, _.matchesProperty('a', set));
assert.deepEqual(actual, [objects[1]]);
set.delete(2);
actual = lodashStable.filter(objects, _.matchesProperty('a', set));
assert.deepEqual(actual, objects);
set.add(3);
actual = lodashStable.filter(objects, _.matchesProperty('a', set));
assert.deepEqual(actual, []);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should match `undefined` values', function(assert) {
assert.expect(2);
var objects = [{ 'a': 1 }, { 'a': 1, 'b': 1 }, { 'a': 1, 'b': undefined }],
actual = lodashStable.map(objects, _.matchesProperty('b', undefined)),
expected = [false, false, true];
assert.deepEqual(actual, expected);
objects = [{ 'a': { 'a': 1 } }, { 'a': { 'a': 1, 'b': 1 } }, { 'a': { 'a': 1, 'b': undefined } }];
actual = lodashStable.map(objects, _.matchesProperty('a', { 'b': undefined }));
assert.deepEqual(actual, expected);
});
QUnit.test('should match `undefined` values of nested objects', function(assert) {
assert.expect(4);
var object = { 'a': { 'b': undefined } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var matches = _.matchesProperty(path, undefined);
assert.strictEqual(matches(object), true);
});
lodashStable.each(['a.a', ['a', 'a']], function(path) {
var matches = _.matchesProperty(path, undefined);
assert.strictEqual(matches(object), false);
});
});
QUnit.test('should match `undefined` values on primitives', function(assert) {
assert.expect(2);
numberProto.a = 1;
numberProto.b = undefined;
try {
var matches = _.matchesProperty('b', undefined);
assert.strictEqual(matches(1), true);
} catch (e) {
assert.ok(false, e.message);
}
numberProto.a = { 'b': 1, 'c': undefined };
try {
matches = _.matchesProperty('a', { 'c': undefined });
assert.strictEqual(matches(1), true);
} catch (e) {
assert.ok(false, e.message);
}
delete numberProto.a;
delete numberProto.b;
});
QUnit.test('should return `true` when comparing a `srcValue` of empty arrays and objects', function(assert) {
assert.expect(1);
var objects = [{ 'a': [1], 'b': { 'c': 1 } }, { 'a': [2, 3], 'b': { 'd': 2 } }],
matches = _.matchesProperty('a', { 'a': [], 'b': {} });
var actual = lodashStable.filter(objects, function(object) {
return matches({ 'a': object });
});
assert.deepEqual(actual, objects);
});
QUnit.test('should not change behavior if `srcValue` is modified', function(assert) {
assert.expect(9);
lodashStable.each([{ 'a': { 'b': 2, 'c': 3 } }, { 'a': 1, 'b': 2 }, { 'a': 1 }], function(source, index) {
var object = lodashStable.cloneDeep(source),
matches = _.matchesProperty('a', source);
assert.strictEqual(matches({ 'a': object }), true);
if (index) {
source.a = 2;
source.b = 1;
source.c = 3;
} else {
source.a.b = 1;
source.a.c = 2;
source.a.d = 3;
}
assert.strictEqual(matches({ 'a': object }), true);
assert.strictEqual(matches({ 'a': source }), false);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.max');
(function() {
QUnit.test('should return the largest value from a collection', function(assert) {
assert.expect(1);
assert.strictEqual(_.max([1, 2, 3]), 3);
});
QUnit.test('should return `undefined` for empty collections', function(assert) {
assert.expect(1);
var values = falsey.concat([[]]),
expected = lodashStable.map(values, noop);
var actual = lodashStable.map(values, function(value, index) {
try {
return index ? _.max(value) : _.max();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with non-numeric collection values', function(assert) {
assert.expect(1);
assert.strictEqual(_.max(['a', 'b']), 'b');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.mean');
(function() {
QUnit.test('should return the mean of an array of numbers', function(assert) {
assert.expect(1);
var array = [4, 2, 8, 6];
assert.strictEqual(_.mean(array), 5);
});
QUnit.test('should return `NaN` when passing empty `array` values', function(assert) {
assert.expect(1);
var expected = lodashStable.map(empties, stubNaN),
actual = lodashStable.map(empties, _.mean);
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.meanBy');
(function() {
var objects = [{ 'a': 2 }, { 'a': 3 }, { 'a': 1 }];
QUnit.test('should work with an `iteratee`', function(assert) {
assert.expect(1);
var actual = _.meanBy(objects, function(object) {
return object.a;
});
assert.deepEqual(actual, 2);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.meanBy(objects, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [{ 'a': 2 }]);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(2);
var arrays = [[2], [3], [1]];
assert.strictEqual(_.meanBy(arrays, 0), 2);
assert.strictEqual(_.meanBy(objects, 'a'), 2);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.memoize');
(function() {
function CustomCache() {
this.clear();
}
CustomCache.prototype = {
'clear': function() {
this.__data__ = [];
return this;
},
'get': function(key) {
var entry = lodashStable.find(this.__data__, ['key', key]);
return entry && entry.value;
},
'has': function(key) {
return lodashStable.some(this.__data__, ['key', key]);
},
'set': function(key, value) {
this.__data__.push({ 'key': key, 'value': value });
return this;
}
};
function ImmutableCache() {
this.__data__ = [];
}
ImmutableCache.prototype = lodashStable.create(CustomCache.prototype, {
'constructor': ImmutableCache,
'clear': function() {
return new ImmutableCache;
},
'set': function(key, value) {
var result = new ImmutableCache;
result.__data__ = this.__data__.concat({ 'key': key, 'value': value });
return result;
}
});
QUnit.test('should memoize results based on the first argument given', function(assert) {
assert.expect(2);
var memoized = _.memoize(function(a, b, c) {
return a + b + c;
});
assert.strictEqual(memoized(1, 2, 3), 6);
assert.strictEqual(memoized(1, 3, 5), 6);
});
QUnit.test('should support a `resolver`', function(assert) {
assert.expect(2);
var fn = function(a, b, c) { return a + b + c; },
memoized = _.memoize(fn, fn);
assert.strictEqual(memoized(1, 2, 3), 6);
assert.strictEqual(memoized(1, 3, 5), 9);
});
QUnit.test('should use `this` binding of function for `resolver`', function(assert) {
assert.expect(2);
var fn = function(a, b, c) { return a + this.b + this.c; },
memoized = _.memoize(fn, fn);
var object = { 'memoized': memoized, 'b': 2, 'c': 3 };
assert.strictEqual(object.memoized(1), 6);
object.b = 3;
object.c = 5;
assert.strictEqual(object.memoized(1), 9);
});
QUnit.test('should throw a TypeError if `resolve` is truthy and not a function', function(assert) {
assert.expect(1);
assert.raises(function() { _.memoize(noop, true); }, TypeError);
});
QUnit.test('should not error if `resolver` is nullish', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(resolver, index) {
try {
return _.isFunction(index ? _.memoize(noop, resolver) : _.memoize(noop));
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should check cache for own properties', function(assert) {
assert.expect(1);
var props = [
'constructor',
'hasOwnProperty',
'isPrototypeOf',
'propertyIsEnumerable',
'toLocaleString',
'toString',
'valueOf'
];
var memoized = _.memoize(identity);
var actual = lodashStable.map(props, function(value) {
return memoized(value);
});
assert.deepEqual(actual, props);
});
QUnit.test('should cache the `__proto__` key', function(assert) {
assert.expect(8);
var array = [],
key = '__proto__';
lodashStable.times(2, function(index) {
var count = 0,
resolver = index ? identity : undefined;
var memoized = _.memoize(function() {
count++;
return array;
}, resolver);
var cache = memoized.cache;
memoized(key);
memoized(key);
assert.strictEqual(count, 1);
assert.strictEqual(cache.get(key), array);
assert.notOk(cache.__data__ instanceof Array);
assert.strictEqual(cache.delete(key), true);
});
});
QUnit.test('should allow `_.memoize.Cache` to be customized', function(assert) {
assert.expect(4);
var oldCache = _.memoize.Cache;
_.memoize.Cache = CustomCache;
var memoized = _.memoize(function(object) {
return object.id;
});
var cache = memoized.cache,
key1 = { 'id': 'a' },
key2 = { 'id': 'b' };
assert.strictEqual(memoized(key1), 'a');
assert.strictEqual(cache.has(key1), true);
assert.strictEqual(memoized(key2), 'b');
assert.strictEqual(cache.has(key2), true);
_.memoize.Cache = oldCache;
});
QUnit.test('should works with an immutable `_.memoize.Cache` ', function(assert) {
assert.expect(2);
var oldCache = _.memoize.Cache;
_.memoize.Cache = ImmutableCache;
var memoized = _.memoize(function(object) {
return object.id;
});
var key1 = { 'id': 'a' },
key2 = { 'id': 'b' };
memoized(key1);
memoized(key2);
var cache = memoized.cache;
assert.strictEqual(cache.has(key1), true);
assert.strictEqual(cache.has(key2), true);
_.memoize.Cache = oldCache;
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('memoizeCapped');
(function() {
var func = _._memoizeCapped;
QUnit.test('should enforce a max cache size of `MAX_MEMOIZE_SIZE`', function(assert) {
assert.expect(2);
if (func) {
var memoized = func(identity),
cache = memoized.cache;
lodashStable.times(MAX_MEMOIZE_SIZE, memoized);
assert.strictEqual(cache.size, MAX_MEMOIZE_SIZE);
memoized(MAX_MEMOIZE_SIZE);
assert.strictEqual(cache.size, 1);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.merge');
(function() {
QUnit.test('should merge `source` into `object`', function(assert) {
assert.expect(1);
var names = {
'characters': [
{ 'name': 'barney' },
{ 'name': 'fred' }
]
};
var ages = {
'characters': [
{ 'age': 36 },
{ 'age': 40 }
]
};
var heights = {
'characters': [
{ 'height': '5\'4"' },
{ 'height': '5\'5"' }
]
};
var expected = {
'characters': [
{ 'name': 'barney', 'age': 36, 'height': '5\'4"' },
{ 'name': 'fred', 'age': 40, 'height': '5\'5"' }
]
};
assert.deepEqual(_.merge(names, ages, heights), expected);
});
QUnit.test('should merge sources containing circular references', function(assert) {
assert.expect(2);
var object = {
'foo': { 'a': 1 },
'bar': { 'a': 2 }
};
var source = {
'foo': { 'b': { 'c': { 'd': {} } } },
'bar': {}
};
source.foo.b.c.d = source;
source.bar.b = source.foo.b;
var actual = _.merge(object, source);
assert.notStrictEqual(actual.bar.b, actual.foo.b);
assert.strictEqual(actual.foo.b.c.d, actual.foo.b.c.d.foo.b.c.d);
});
QUnit.test('should work with four arguments', function(assert) {
assert.expect(1);
var expected = { 'a': 4 },
actual = _.merge({ 'a': 1 }, { 'a': 2 }, { 'a': 3 }, expected);
assert.deepEqual(actual, expected);
});
QUnit.test('should merge onto function `object` values', function(assert) {
assert.expect(2);
function Foo() {}
var source = { 'a': 1 },
actual = _.merge(Foo, source);
assert.strictEqual(actual, Foo);
assert.strictEqual(Foo.a, 1);
});
QUnit.test('should merge first source object properties to function', function(assert) {
assert.expect(1);
var fn = function() {},
object = { 'prop': {} },
actual = _.merge({ 'prop': fn }, object);
assert.deepEqual(actual, object);
});
QUnit.test('should merge first and second source object properties to function', function(assert) {
assert.expect(1);
var fn = function() {},
object = { 'prop': {} },
actual = _.merge({ 'prop': fn }, { 'prop': fn }, object);
assert.deepEqual(actual, object);
});
QUnit.test('should not merge onto function values of sources', function(assert) {
assert.expect(3);
var source1 = { 'a': function() {} },
source2 = { 'a': { 'b': 2 } },
expected = { 'a': { 'b': 2 } },
actual = _.merge({}, source1, source2);
assert.deepEqual(actual, expected);
assert.notOk('b' in source1.a);
actual = _.merge(source1, source2);
assert.deepEqual(actual, expected);
});
QUnit.test('should merge onto non-plain `object` values', function(assert) {
assert.expect(2);
function Foo() {}
var object = new Foo,
actual = _.merge(object, { 'a': 1 });
assert.strictEqual(actual, object);
assert.strictEqual(object.a, 1);
});
QUnit.test('should treat sparse array sources as dense', function(assert) {
assert.expect(2);
var array = [1];
array[2] = 3;
var actual = _.merge([], array),
expected = array.slice();
expected[1] = undefined;
assert.ok('1' in actual);
assert.deepEqual(actual, expected);
});
QUnit.test('should merge `arguments` objects', function(assert) {
assert.expect(7);
var object1 = { 'value': args },
object2 = { 'value': { '3': 4 } },
expected = { '0': 1, '1': 2, '2': 3, '3': 4 },
actual = _.merge(object1, object2);
assert.notOk('3' in args);
assert.notOk(_.isArguments(actual.value));
assert.deepEqual(actual.value, expected);
object1.value = args;
actual = _.merge(object2, object1);
assert.notOk(_.isArguments(actual.value));
assert.deepEqual(actual.value, expected);
expected = { '0': 1, '1': 2, '2': 3 };
actual = _.merge({}, object1);
assert.notOk(_.isArguments(actual.value));
assert.deepEqual(actual.value, expected);
});
QUnit.test('should merge typed arrays', function(assert) {
assert.expect(4);
var array1 = [0],
array2 = [0, 0],
array3 = [0, 0, 0, 0],
array4 = [0, 0, 0, 0, 0, 0, 0, 0];
var arrays = [array2, array1, array4, array3, array2, array4, array4, array3, array2],
buffer = ArrayBuffer && new ArrayBuffer(8);
var expected = lodashStable.map(typedArrays, function(type, index) {
var array = arrays[index].slice();
array[0] = 1;
return root[type] ? { 'value': array } : false;
});
var actual = lodashStable.map(typedArrays, function(type) {
var Ctor = root[type];
return Ctor ? _.merge({ 'value': new Ctor(buffer) }, { 'value': [1] }) : false;
});
assert.ok(lodashStable.isArray(actual));
assert.deepEqual(actual, expected);
expected = lodashStable.map(typedArrays, function(type, index) {
var array = arrays[index].slice();
array.push(1);
return root[type] ? { 'value': array } : false;
});
actual = lodashStable.map(typedArrays, function(type, index) {
var Ctor = root[type],
array = lodashStable.range(arrays[index].length);
array.push(1);
return Ctor ? _.merge({ 'value': array }, { 'value': new Ctor(buffer) }) : false;
});
assert.ok(lodashStable.isArray(actual));
assert.deepEqual(actual, expected);
});
QUnit.test('should assign `null` values', function(assert) {
assert.expect(1);
var actual = _.merge({ 'a': 1 }, { 'a': null });
assert.strictEqual(actual.a, null);
});
QUnit.test('should assign non array/buffer/typed-array/plain-object source values directly', function(assert) {
assert.expect(1);
function Foo() {}
var values = [new Foo, new Boolean, new Date, Foo, new Number, new String, new RegExp],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
var object = _.merge({}, { 'a': value, 'b': { 'c': value } });
return object.a === value && object.b.c === value;
});
assert.deepEqual(actual, expected);
});
QUnit.test('should clone buffer source values', function(assert) {
assert.expect(3);
if (Buffer) {
var buffer = new Buffer([1]),
actual = _.merge({}, { 'value': buffer }).value;
assert.ok(lodashStable.isBuffer(actual));
assert.strictEqual(actual[0], buffer[0]);
assert.notStrictEqual(actual, buffer);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should deep clone array/typed-array/plain-object source values', function(assert) {
assert.expect(1);
var typedArray = Uint8Array
? new Uint8Array([1])
: { 'buffer': [1] };
var props = ['0', 'buffer', 'a'],
values = [[{ 'a': 1 }], typedArray, { 'a': [1] }],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value, index) {
var key = props[index],
object = _.merge({}, { 'value': value }),
subValue = value[key],
newValue = object.value,
newSubValue = newValue[key];
return (
newValue !== value &&
newSubValue !== subValue &&
lodashStable.isEqual(newValue, value)
);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should not augment source objects', function(assert) {
assert.expect(6);
var source1 = { 'a': [{ 'a': 1 }] },
source2 = { 'a': [{ 'b': 2 }] },
actual = _.merge({}, source1, source2);
assert.deepEqual(source1.a, [{ 'a': 1 }]);
assert.deepEqual(source2.a, [{ 'b': 2 }]);
assert.deepEqual(actual.a, [{ 'a': 1, 'b': 2 }]);
var source1 = { 'a': [[1, 2, 3]] },
source2 = { 'a': [[3, 4]] },
actual = _.merge({}, source1, source2);
assert.deepEqual(source1.a, [[1, 2, 3]]);
assert.deepEqual(source2.a, [[3, 4]]);
assert.deepEqual(actual.a, [[3, 4, 3]]);
});
QUnit.test('should merge plain objects onto non-plain objects', function(assert) {
assert.expect(4);
function Foo(object) {
lodashStable.assign(this, object);
}
var object = { 'a': 1 },
actual = _.merge(new Foo, object);
assert.ok(actual instanceof Foo);
assert.deepEqual(actual, new Foo(object));
actual = _.merge([new Foo], [object]);
assert.ok(actual[0] instanceof Foo);
assert.deepEqual(actual, [new Foo(object)]);
});
QUnit.test('should not overwrite existing values with `undefined` values of object sources', function(assert) {
assert.expect(1);
var actual = _.merge({ 'a': 1 }, { 'a': undefined, 'b': undefined });
assert.deepEqual(actual, { 'a': 1, 'b': undefined });
});
QUnit.test('should not overwrite existing values with `undefined` values of array sources', function(assert) {
assert.expect(2);
var array = [1];
array[2] = 3;
var actual = _.merge([4, 5, 6], array),
expected = [1, 5, 3];
assert.deepEqual(actual, expected);
array = [1, , 3];
array[1] = undefined;
actual = _.merge([4, 5, 6], array);
assert.deepEqual(actual, expected);
});
QUnit.test('should skip merging when `object` and `source` are the same value', function(assert) {
assert.expect(1);
var object = {},
pass = true;
defineProperty(object, 'a', {
'configurable': true,
'enumerable': true,
'get': function() { pass = false; },
'set': function() { pass = false; }
});
_.merge(object, object);
assert.ok(pass);
});
QUnit.test('should convert values to arrays when merging arrays of `source`', function(assert) {
assert.expect(2);
var object = { 'a': { '1': 'y', 'b': 'z', 'length': 2 } },
actual = _.merge(object, { 'a': ['x'] });
assert.deepEqual(actual, { 'a': ['x', 'y'] });
actual = _.merge({ 'a': {} }, { 'a': [] });
assert.deepEqual(actual, { 'a': [] });
});
QUnit.test('should not convert strings to arrays when merging arrays of `source`', function(assert) {
assert.expect(1);
var object = { 'a': 'abcde' },
actual = _.merge(object, { 'a': ['x', 'y', 'z'] });
assert.deepEqual(actual, { 'a': ['x', 'y', 'z'] });
});
QUnit.test('should not error on DOM elements', function(assert) {
assert.expect(1);
var object1 = { 'el': document && document.createElement('div') },
object2 = { 'el': document && document.createElement('div') },
pairs = [[{}, object1], [object1, object2]],
expected = lodashStable.map(pairs, stubTrue);
var actual = lodashStable.map(pairs, function(pair) {
try {
return _.merge(pair[0], pair[1]).el === pair[1].el;
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.mergeWith');
(function() {
QUnit.test('should handle merging when `customizer` returns `undefined`', function(assert) {
assert.expect(2);
var actual = _.mergeWith({ 'a': { 'b': [1, 1] } }, { 'a': { 'b': [0] } }, noop);
assert.deepEqual(actual, { 'a': { 'b': [0, 1] } });
actual = _.mergeWith([], [undefined], identity);
assert.deepEqual(actual, [undefined]);
});
QUnit.test('should clone sources when `customizer` returns `undefined`', function(assert) {
assert.expect(1);
var source1 = { 'a': { 'b': { 'c': 1 } } },
source2 = { 'a': { 'b': { 'd': 2 } } };
_.mergeWith({}, source1, source2, noop);
assert.deepEqual(source1.a.b, { 'c': 1 });
});
QUnit.test('should defer to `customizer` for non `undefined` results', function(assert) {
assert.expect(1);
var actual = _.mergeWith({ 'a': { 'b': [0, 1] } }, { 'a': { 'b': [2] } }, function(a, b) {
return lodashStable.isArray(a) ? a.concat(b) : undefined;
});
assert.deepEqual(actual, { 'a': { 'b': [0, 1, 2] } });
});
QUnit.test('should provide `stack` to `customizer`', function(assert) {
assert.expect(4);
var actual = [];
_.mergeWith({}, { 'z': 1, 'a': { 'b': 2 } }, function() {
actual.push(_.last(arguments));
});
assert.strictEqual(actual.length, 3);
_.each(actual, function(a) {
assert.ok(isNpm
? a.constructor.name == 'Stack'
: a instanceof mapCaches.Stack
);
});
});
QUnit.test('should overwrite primitives with source object clones', function(assert) {
assert.expect(1);
var actual = _.mergeWith({ 'a': 0 }, { 'a': { 'b': ['c'] } }, function(a, b) {
return lodashStable.isArray(a) ? a.concat(b) : undefined;
});
assert.deepEqual(actual, { 'a': { 'b': ['c'] } });
});
QUnit.test('should pop the stack of sources for each sibling property', function(assert) {
assert.expect(1);
var array = ['b', 'c'],
object = { 'a': ['a'] },
source = { 'a': array, 'b': array };
var actual = _.mergeWith(object, source, function(a, b) {
return lodashStable.isArray(a) ? a.concat(b) : undefined;
});
assert.deepEqual(actual, { 'a': ['a', 'b', 'c'], 'b': ['b', 'c'] });
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.method');
(function() {
QUnit.test('should create a function that calls a method of a given object', function(assert) {
assert.expect(4);
var object = { 'a': stubOne };
lodashStable.each(['a', ['a']], function(path) {
var method = _.method(path);
assert.strictEqual(method.length, 1);
assert.strictEqual(method(object), 1);
});
});
QUnit.test('should work with deep property values', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': stubTwo } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var method = _.method(path);
assert.strictEqual(method(object), 2);
});
});
QUnit.test('should work with a non-string `path`', function(assert) {
assert.expect(2);
var array = lodashStable.times(3, _.constant);
lodashStable.each([1, [1]], function(path) {
var method = _.method(path);
assert.strictEqual(method(array), 1);
});
});
QUnit.test('should coerce `path` to a string', function(assert) {
assert.expect(2);
function fn() {}
fn.toString = lodashStable.constant('fn');
var expected = [1, 2, 3, 4],
object = { 'null': stubOne, 'undefined': stubTwo, 'fn': stubThree, '[object Object]': stubFour },
paths = [null, undefined, fn, {}];
lodashStable.times(2, function(index) {
var actual = lodashStable.map(paths, function(path) {
var method = _.method(index ? [path] : path);
return method(object);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should work with inherited property values', function(assert) {
assert.expect(2);
function Foo() {}
Foo.prototype.a = stubOne;
lodashStable.each(['a', ['a']], function(path) {
var method = _.method(path);
assert.strictEqual(method(new Foo), 1);
});
});
QUnit.test('should use a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': stubOne, 'a': { 'b': stubTwo } };
lodashStable.each(['a.b', ['a.b']], function(path) {
var method = _.method(path);
assert.strictEqual(method(object), 1);
});
});
QUnit.test('should return `undefined` when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, noop);
lodashStable.each(['constructor', ['constructor']], function(path) {
var method = _.method(path);
var actual = lodashStable.map(values, function(value, index) {
return index ? method(value) : method();
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `undefined` for deep paths when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, noop);
lodashStable.each(['constructor.prototype.valueOf', ['constructor', 'prototype', 'valueOf']], function(path) {
var method = _.method(path);
var actual = lodashStable.map(values, function(value, index) {
return index ? method(value) : method();
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `undefined` if parts of `path` are missing', function(assert) {
assert.expect(4);
var object = {};
lodashStable.each(['a', 'a[1].b.c', ['a'], ['a', '1', 'b', 'c']], function(path) {
var method = _.method(path);
assert.strictEqual(method(object), undefined);
});
});
QUnit.test('should apply partial arguments to function', function(assert) {
assert.expect(2);
var object = {
'fn': function() {
return slice.call(arguments);
}
};
lodashStable.each(['fn', ['fn']], function(path) {
var method = _.method(path, 1, 2, 3);
assert.deepEqual(method(object), [1, 2, 3]);
});
});
QUnit.test('should invoke deep property methods with the correct `this` binding', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': function() { return this.c; }, 'c': 1 } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var method = _.method(path);
assert.strictEqual(method(object), 1);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.methodOf');
(function() {
QUnit.test('should create a function that calls a method of a given key', function(assert) {
assert.expect(4);
var object = { 'a': stubOne };
lodashStable.each(['a', ['a']], function(path) {
var methodOf = _.methodOf(object);
assert.strictEqual(methodOf.length, 1);
assert.strictEqual(methodOf(path), 1);
});
});
QUnit.test('should work with deep property values', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': stubTwo } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var methodOf = _.methodOf(object);
assert.strictEqual(methodOf(path), 2);
});
});
QUnit.test('should work with a non-string `path`', function(assert) {
assert.expect(2);
var array = lodashStable.times(3, _.constant);
lodashStable.each([1, [1]], function(path) {
var methodOf = _.methodOf(array);
assert.strictEqual(methodOf(path), 1);
});
});
QUnit.test('should coerce `path` to a string', function(assert) {
assert.expect(2);
function fn() {}
fn.toString = lodashStable.constant('fn');
var expected = [1, 2, 3, 4],
object = { 'null': stubOne, 'undefined': stubTwo, 'fn': stubThree, '[object Object]': stubFour },
paths = [null, undefined, fn, {}];
lodashStable.times(2, function(index) {
var actual = lodashStable.map(paths, function(path) {
var methodOf = _.methodOf(object);
return methodOf(index ? [path] : path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should work with inherited property values', function(assert) {
assert.expect(2);
function Foo() {}
Foo.prototype.a = stubOne;
lodashStable.each(['a', ['a']], function(path) {
var methodOf = _.methodOf(new Foo);
assert.strictEqual(methodOf(path), 1);
});
});
QUnit.test('should use a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': stubOne, 'a': { 'b': stubTwo } };
lodashStable.each(['a.b', ['a.b']], function(path) {
var methodOf = _.methodOf(object);
assert.strictEqual(methodOf(path), 1);
});
});
QUnit.test('should return `undefined` when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, noop);
lodashStable.each(['constructor', ['constructor']], function(path) {
var actual = lodashStable.map(values, function(value, index) {
var methodOf = index ? _.methodOf() : _.methodOf(value);
return methodOf(path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `undefined` for deep paths when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, noop);
lodashStable.each(['constructor.prototype.valueOf', ['constructor', 'prototype', 'valueOf']], function(path) {
var actual = lodashStable.map(values, function(value, index) {
var methodOf = index ? _.methodOf() : _.methodOf(value);
return methodOf(path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `undefined` if parts of `path` are missing', function(assert) {
assert.expect(4);
var object = {},
methodOf = _.methodOf(object);
lodashStable.each(['a', 'a[1].b.c', ['a'], ['a', '1', 'b', 'c']], function(path) {
assert.strictEqual(methodOf(path), undefined);
});
});
QUnit.test('should apply partial arguments to function', function(assert) {
assert.expect(2);
var object = {
'fn': function() {
return slice.call(arguments);
}
};
var methodOf = _.methodOf(object, 1, 2, 3);
lodashStable.each(['fn', ['fn']], function(path) {
assert.deepEqual(methodOf(path), [1, 2, 3]);
});
});
QUnit.test('should invoke deep property methods with the correct `this` binding', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': function() { return this.c; }, 'c': 1 } },
methodOf = _.methodOf(object);
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.strictEqual(methodOf(path), 1);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.min');
(function() {
QUnit.test('should return the smallest value from a collection', function(assert) {
assert.expect(1);
assert.strictEqual(_.min([1, 2, 3]), 1);
});
QUnit.test('should return `undefined` for empty collections', function(assert) {
assert.expect(1);
var values = falsey.concat([[]]),
expected = lodashStable.map(values, noop);
var actual = lodashStable.map(values, function(value, index) {
try {
return index ? _.min(value) : _.min();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with non-numeric collection values', function(assert) {
assert.expect(1);
assert.strictEqual(_.min(['a', 'b']), 'a');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('extremum methods');
lodashStable.each(['max', 'maxBy', 'min', 'minBy'], function(methodName) {
var func = _[methodName],
isMax = /^max/.test(methodName);
QUnit.test('`_.' + methodName + '` should work with Date objects', function(assert) {
assert.expect(1);
var curr = new Date,
past = new Date(0);
assert.strictEqual(func([curr, past]), isMax ? curr : past);
});
QUnit.test('`_.' + methodName + '` should work with extremely large arrays', function(assert) {
assert.expect(1);
var array = lodashStable.range(0, 5e5);
assert.strictEqual(func(array), isMax ? 499999 : 0);
});
QUnit.test('`_.' + methodName + '` should work when chaining on an array with only one value', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = _([40])[methodName]();
assert.strictEqual(actual, 40);
}
else {
skipAssert(assert);
}
});
});
lodashStable.each(['maxBy', 'minBy'], function(methodName) {
var array = [1, 2, 3],
func = _[methodName],
isMax = methodName == 'maxBy';
QUnit.test('`_.' + methodName + '` should work with an `iteratee`', function(assert) {
assert.expect(1);
var actual = func(array, function(n) {
return -n;
});
assert.strictEqual(actual, isMax ? 1 : 3);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(2);
var objects = [{ 'a': 2 }, { 'a': 3 }, { 'a': 1 }],
actual = func(objects, 'a');
assert.deepEqual(actual, objects[isMax ? 1 : 2]);
var arrays = [[2], [3], [1]];
actual = func(arrays, 0);
assert.deepEqual(actual, arrays[isMax ? 1 : 2]);
});
QUnit.test('`_.' + methodName + '` should work when `iteratee` returns +/-Infinity', function(assert) {
assert.expect(1);
var value = isMax ? -Infinity : Infinity,
object = { 'a': value };
var actual = func([object, { 'a': value }], function(object) {
return object.a;
});
assert.strictEqual(actual, object);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.mixin');
(function() {
function reset(wrapper) {
delete wrapper.a;
delete wrapper.prototype.a;
delete wrapper.b;
delete wrapper.prototype.b;
}
function Wrapper(value) {
if (!(this instanceof Wrapper)) {
return new Wrapper(value);
}
if (_.has(value, '__wrapped__')) {
var actions = slice.call(value.__actions__),
chain = value.__chain__;
value = value.__wrapped__;
}
this.__wrapped__ = value;
this.__actions__ = actions || [];
this.__chain__ = chain || false;
}
Wrapper.prototype.value = function() {
return getUnwrappedValue(this);
};
var array = ['a'],
source = { 'a': function(array) { return array[0]; }, 'b': 'B' };
QUnit.test('should mixin `source` methods into lodash', function(assert) {
assert.expect(4);
if (!isNpm) {
_.mixin(source);
assert.strictEqual(_.a(array), 'a');
assert.strictEqual(_(array).a().value(), 'a');
assert.notOk('b' in _);
assert.notOk('b' in _.prototype);
reset(_);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should mixin chaining methods by reference', function(assert) {
assert.expect(2);
if (!isNpm) {
_.mixin(source);
_.a = stubB;
assert.strictEqual(_.a(array), 'b');
assert.strictEqual(_(array).a().value(), 'a');
reset(_);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should use a default `object` of `this`', function(assert) {
assert.expect(3);
var object = lodashStable.create(_);
object.mixin(source);
assert.strictEqual(object.a(array), 'a');
assert.notOk('a' in _);
assert.notOk('a' in _.prototype);
reset(_);
});
QUnit.test('should accept an `object`', function(assert) {
assert.expect(1);
var object = {};
_.mixin(object, source);
assert.strictEqual(object.a(array), 'a');
});
QUnit.test('should accept a function `object`', function(assert) {
assert.expect(2);
_.mixin(Wrapper, source);
var wrapped = Wrapper(array),
actual = wrapped.a();
assert.strictEqual(actual.value(), 'a');
assert.ok(actual instanceof Wrapper);
reset(Wrapper);
});
QUnit.test('should return `object`', function(assert) {
assert.expect(3);
var object = {};
assert.strictEqual(_.mixin(object, source), object);
assert.strictEqual(_.mixin(Wrapper, source), Wrapper);
assert.strictEqual(_.mixin(), _);
reset(Wrapper);
});
QUnit.test('should not assign inherited `source` methods', function(assert) {
assert.expect(1);
function Foo() {}
Foo.prototype.a = noop;
var object = {};
assert.strictEqual(_.mixin(object, new Foo), object);
});
QUnit.test('should accept an `options`', function(assert) {
assert.expect(8);
function message(func, chain) {
return (func === _ ? 'lodash' : 'given') + ' function should ' + (chain ? '' : 'not ') + 'chain';
}
lodashStable.each([_, Wrapper], function(func) {
lodashStable.each([{ 'chain': false }, { 'chain': true }], function(options) {
if (!isNpm) {
if (func === _) {
_.mixin(source, options);
} else {
_.mixin(func, source, options);
}
var wrapped = func(array),
actual = wrapped.a();
if (options.chain) {
assert.strictEqual(actual.value(), 'a', message(func, true));
assert.ok(actual instanceof func, message(func, true));
} else {
assert.strictEqual(actual, 'a', message(func, false));
assert.notOk(actual instanceof func, message(func, false));
}
reset(func);
}
else {
skipAssert(assert, 2);
}
});
});
});
QUnit.test('should not extend lodash when an `object` is given with an empty `options` object', function(assert) {
assert.expect(1);
_.mixin({ 'a': noop }, {});
assert.notOk('a' in _);
reset(_);
});
QUnit.test('should not error for non-object `options` values', function(assert) {
assert.expect(2);
var pass = true;
try {
_.mixin({}, source, 1);
} catch (e) {
pass = false;
}
assert.ok(pass);
pass = true;
try {
_.mixin(source, 1);
} catch (e) {
pass = false;
}
assert.ok(pass);
reset(_);
});
QUnit.test('should not return the existing wrapped value when chaining', function(assert) {
assert.expect(2);
lodashStable.each([_, Wrapper], function(func) {
if (!isNpm) {
if (func === _) {
var wrapped = _(source),
actual = wrapped.mixin();
assert.strictEqual(actual.value(), _);
}
else {
wrapped = _(func);
actual = wrapped.mixin(source);
assert.notStrictEqual(actual, wrapped);
}
reset(func);
}
else {
skipAssert(assert);
}
});
});
QUnit.test('should produce methods that work in a lazy sequence', function(assert) {
assert.expect(1);
if (!isNpm) {
_.mixin({ 'a': _.countBy, 'b': _.filter });
var array = lodashStable.range(LARGE_ARRAY_SIZE),
actual = _(array).a().map(square).b(isEven).take().value();
assert.deepEqual(actual, _.take(_.b(_.map(_.a(array), square), isEven)));
reset(_);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.multiply');
(function() {
QUnit.test('should multiply two numbers', function(assert) {
assert.expect(3);
assert.strictEqual(_.multiply(6, 4), 24);
assert.strictEqual(_.multiply(-6, 4), -24);
assert.strictEqual(_.multiply(-6, -4), 24);
});
QUnit.test('should coerce arguments to numbers', function(assert) {
assert.expect(2);
assert.strictEqual(_.multiply('6', '4'), 24);
assert.deepEqual(_.multiply('x', 'y'), NaN);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.orderBy');
(function() {
var objects = [
{ 'a': 'x', 'b': 3 },
{ 'a': 'y', 'b': 4 },
{ 'a': 'x', 'b': 1 },
{ 'a': 'y', 'b': 2 }
];
var nestedObj = [
{ id: '4', address: { zipCode: 4, streetName: 'Beta' } },
{ id: '3', address: { zipCode: 3, streetName: 'Alpha' } },
{ id: '1', address: { zipCode: 1, streetName: 'Alpha' } },
{ id: '2', address: { zipCode: 2, streetName: 'Alpha' } },
{ id: '5', address: { zipCode: 4, streetName: 'Alpha' } },
];
QUnit.test('should sort by a single property by a specified order', function(assert) {
assert.expect(1);
var actual = _.orderBy(objects, 'a', 'desc');
assert.deepEqual(actual, [objects[1], objects[3], objects[0], objects[2]]);
});
QUnit.test('should sort by nested key in array format', function(assert) {
assert.expect(1);
var actual = _.orderBy(
nestedObj,
[['address', 'zipCode'], ['address.streetName']],
['asc', 'desc']
);
assert.deepEqual(actual, [nestedObj[2], nestedObj[3], nestedObj[1], nestedObj[0], nestedObj[4]]);
});
QUnit.test('should sort by multiple properties by specified orders', function(assert) {
assert.expect(1);
var actual = _.orderBy(objects, ['a', 'b'], ['desc', 'asc']);
assert.deepEqual(actual, [objects[3], objects[1], objects[2], objects[0]]);
});
QUnit.test('should sort by a property in ascending order when its order is not specified', function(assert) {
assert.expect(2);
var expected = [objects[2], objects[0], objects[3], objects[1]],
actual = _.orderBy(objects, ['a', 'b']);
assert.deepEqual(actual, expected);
expected = lodashStable.map(falsey, lodashStable.constant([objects[3], objects[1], objects[2], objects[0]]));
actual = lodashStable.map(falsey, function(order, index) {
return _.orderBy(objects, ['a', 'b'], index ? ['desc', order] : ['desc']);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `orders` specified as string objects', function(assert) {
assert.expect(1);
var actual = _.orderBy(objects, ['a'], [Object('desc')]);
assert.deepEqual(actual, [objects[1], objects[3], objects[0], objects[2]]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.overArgs');
(function() {
function fn() {
return slice.call(arguments);
}
QUnit.test('should transform each argument', function(assert) {
assert.expect(1);
var over = _.overArgs(fn, doubled, square);
assert.deepEqual(over(5, 10), [10, 100]);
});
QUnit.test('should use `_.identity` when a predicate is nullish', function(assert) {
assert.expect(1);
var over = _.overArgs(fn, undefined, null);
assert.deepEqual(over('a', 'b'), ['a', 'b']);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var over = _.overArgs(fn, 'b', 'a');
assert.deepEqual(over({ 'b': 2 }, { 'a': 1 }), [2, 1]);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
var over = _.overArgs(fn, { 'b': 1 }, { 'a': 1 });
assert.deepEqual(over({ 'b': 2 }, { 'a': 1 }), [false, true]);
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(1);
var over = _.overArgs(fn, [['b', 1], ['a', 1]]);
assert.deepEqual(over({ 'b': 2 }, { 'a': 1 }), [false, true]);
});
QUnit.test('should differentiate between `_.property` and `_.matchesProperty` shorthands', function(assert) {
assert.expect(2);
var over = _.overArgs(fn, ['a', 1]);
assert.deepEqual(over({ 'a': 1 }, { '1': 2 }), [1, 2]);
over = _.overArgs(fn, [['a', 1]]);
assert.deepEqual(over({ 'a': 1 }), [true]);
});
QUnit.test('should flatten `transforms`', function(assert) {
assert.expect(1);
var over = _.overArgs(fn, [doubled, square], String);
assert.deepEqual(over(5, 10, 15), [10, 100, '15']);
});
QUnit.test('should not transform any argument greater than the number of transforms', function(assert) {
assert.expect(1);
var over = _.overArgs(fn, doubled, square);
assert.deepEqual(over(5, 10, 18), [10, 100, 18]);
});
QUnit.test('should not transform any arguments if no transforms are given', function(assert) {
assert.expect(1);
var over = _.overArgs(fn);
assert.deepEqual(over(5, 10, 18), [5, 10, 18]);
});
QUnit.test('should not pass `undefined` if there are more transforms than arguments', function(assert) {
assert.expect(1);
var over = _.overArgs(fn, doubled, identity);
assert.deepEqual(over(5), [10]);
});
QUnit.test('should provide the correct argument to each transform', function(assert) {
assert.expect(1);
var argsList = [],
transform = function() { argsList.push(slice.call(arguments)); },
over = _.overArgs(noop, transform, transform, transform);
over('a', 'b');
assert.deepEqual(argsList, [['a'], ['b']]);
});
QUnit.test('should use `this` binding of function for `transforms`', function(assert) {
assert.expect(1);
var over = _.overArgs(function(x) {
return this[x];
}, function(x) {
return this === x;
});
var object = { 'over': over, 'true': 1 };
assert.strictEqual(object.over(object), 1);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.negate');
(function() {
QUnit.test('should create a function that negates the result of `func`', function(assert) {
assert.expect(2);
var negate = _.negate(isEven);
assert.strictEqual(negate(1), true);
assert.strictEqual(negate(2), false);
});
QUnit.test('should create a function that negates the result of `func`', function(assert) {
assert.expect(2);
var negate = _.negate(isEven);
assert.strictEqual(negate(1), true);
assert.strictEqual(negate(2), false);
});
QUnit.test('should create a function that accepts multiple arguments', function(assert) {
assert.expect(1);
var argCount,
count = 5,
negate = _.negate(function() { argCount = arguments.length; }),
expected = lodashStable.times(count, stubTrue);
var actual = lodashStable.times(count, function(index) {
switch (index) {
case 0: negate(); break;
case 1: negate(1); break;
case 2: negate(1, 2); break;
case 3: negate(1, 2, 3); break;
case 4: negate(1, 2, 3, 4);
}
return argCount == index;
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.noConflict');
(function() {
QUnit.test('should return the `lodash` function', function(assert) {
assert.expect(2);
if (!isModularize) {
assert.strictEqual(_.noConflict(), oldDash);
assert.notStrictEqual(root._, oldDash);
root._ = oldDash;
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should restore `_` only if `lodash` is the current `_` value', function(assert) {
assert.expect(2);
if (!isModularize) {
var object = root._ = {};
assert.strictEqual(_.noConflict(), oldDash);
assert.strictEqual(root._, object);
root._ = oldDash;
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should work with a `root` of `this`', function(assert) {
assert.expect(2);
if (!coverage && !document && !isModularize && realm.object) {
var fs = require('fs'),
vm = require('vm'),
expected = {},
context = vm.createContext({ '_': expected, 'console': console }),
source = fs.readFileSync(filePath, 'utf8');
vm.runInContext(source + '\nthis.lodash = this._.noConflict()', context);
assert.strictEqual(context._, expected);
assert.ok(context.lodash);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.now');
(function() {
QUnit.test('should return the number of milliseconds that have elapsed since the Unix epoch', function(assert) {
assert.expect(2);
var done = assert.async();
var stamp = +new Date,
actual = _.now();
assert.ok(actual >= stamp);
setTimeout(function() {
assert.ok(_.now() > actual);
done();
}, 32);
});
QUnit.test('should work with mocked `Date.now`', function(assert) {
assert.expect(1);
var now = Date.now;
Date.now = stubA;
var actual = _.now();
Date.now = now;
assert.strictEqual(actual, 'a');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.nth');
(function() {
var array = ['a', 'b', 'c', 'd'];
QUnit.test('should get the nth element of `array`', function(assert) {
assert.expect(1);
var actual = lodashStable.map(array, function(value, index) {
return _.nth(array, index);
});
assert.deepEqual(actual, array);
});
QUnit.test('should work with a negative `n`', function(assert) {
assert.expect(1);
var actual = lodashStable.map(lodashStable.range(1, array.length + 1), function(n) {
return _.nth(array, -n);
});
assert.deepEqual(actual, ['d', 'c', 'b', 'a']);
});
QUnit.test('should coerce `n` to an integer', function(assert) {
assert.expect(2);
var values = falsey,
expected = lodashStable.map(values, stubA);
var actual = lodashStable.map(values, function(n) {
return n ? _.nth(array, n) : _.nth(array);
});
assert.deepEqual(actual, expected);
values = ['1', 1.6];
expected = lodashStable.map(values, stubB);
actual = lodashStable.map(values, function(n) {
return _.nth(array, n);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `undefined` for empty arrays', function(assert) {
assert.expect(1);
var values = [null, undefined, []],
expected = lodashStable.map(values, noop);
var actual = lodashStable.map(values, function(array) {
return _.nth(array, 1);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `undefined` for non-indexes', function(assert) {
assert.expect(1);
var array = [1, 2],
values = [Infinity, array.length],
expected = lodashStable.map(values, noop);
array[-1] = 3;
var actual = lodashStable.map(values, function(n) {
return _.nth(array, n);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.nthArg');
(function() {
var args = ['a', 'b', 'c', 'd'];
QUnit.test('should create a function that returns its nth argument', function(assert) {
assert.expect(1);
var actual = lodashStable.map(args, function(value, index) {
var func = _.nthArg(index);
return func.apply(undefined, args);
});
assert.deepEqual(actual, args);
});
QUnit.test('should work with a negative `n`', function(assert) {
assert.expect(1);
var actual = lodashStable.map(lodashStable.range(1, args.length + 1), function(n) {
var func = _.nthArg(-n);
return func.apply(undefined, args);
});
assert.deepEqual(actual, ['d', 'c', 'b', 'a']);
});
QUnit.test('should coerce `n` to an integer', function(assert) {
assert.expect(2);
var values = falsey,
expected = lodashStable.map(values, stubA);
var actual = lodashStable.map(values, function(n) {
var func = n ? _.nthArg(n) : _.nthArg();
return func.apply(undefined, args);
});
assert.deepEqual(actual, expected);
values = ['1', 1.6];
expected = lodashStable.map(values, stubB);
actual = lodashStable.map(values, function(n) {
var func = _.nthArg(n);
return func.apply(undefined, args);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `undefined` for empty arrays', function(assert) {
assert.expect(1);
var func = _.nthArg(1);
assert.strictEqual(func(), undefined);
});
QUnit.test('should return `undefined` for non-indexes', function(assert) {
assert.expect(1);
var values = [Infinity, args.length],
expected = lodashStable.map(values, noop);
var actual = lodashStable.map(values, function(n) {
var func = _.nthArg(n);
return func.apply(undefined, args);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.omit');
(function() {
var args = toArgs(['a', 'c']),
object = { 'a': 1, 'b': 2, 'c': 3, 'd': 4 },
nested = { 'a': 1, 'b': { 'c': 2, 'd': 3 } };
QUnit.test('should flatten `paths`', function(assert) {
assert.expect(2);
assert.deepEqual(_.omit(object, 'a', 'c'), { 'b': 2, 'd': 4 });
assert.deepEqual(_.omit(object, ['a', 'd'], 'c'), { 'b': 2 });
});
QUnit.test('should support deep paths', function(assert) {
assert.expect(1);
assert.deepEqual(_.omit(nested, 'b.c'), { 'a': 1, 'b': { 'd': 3} });
});
QUnit.test('should support path arrays', function(assert) {
assert.expect(1);
var object = { 'a.b': 1, 'a': { 'b': 2 } },
actual = _.omit(object, [['a.b']]);
assert.deepEqual(actual, { 'a': { 'b': 2 } });
});
QUnit.test('should omit a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': 1, 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a.b']], function(path) {
assert.deepEqual(_.omit(object, path), { 'a': { 'b': 2 } });
});
});
QUnit.test('should coerce `paths` to strings', function(assert) {
assert.expect(1);
assert.deepEqual(_.omit({ '0': 'a' }, 0), {});
});
QUnit.test('should return an empty object when `object` is nullish', function(assert) {
assert.expect(2);
lodashStable.each([null, undefined], function(value) {
objectProto.a = 1;
var actual = _.omit(value, 'valueOf');
delete objectProto.a;
assert.deepEqual(actual, {});
});
});
QUnit.test('should work with a primitive `object`', function(assert) {
assert.expect(1);
stringProto.a = 1;
stringProto.b = 2;
assert.deepEqual(_.omit('', 'b'), { 'a': 1 });
delete stringProto.a;
delete stringProto.b;
});
QUnit.test('should work with `arguments` object `paths`', function(assert) {
assert.expect(1);
assert.deepEqual(_.omit(object, args), { 'b': 2, 'd': 4 });
});
QUnit.test('should not mutate `object`', function(assert) {
assert.expect(4);
lodashStable.each(['a', ['a'], 'a.b', ['a.b']], function(path) {
var object = { 'a': { 'b': 2 } };
_.omit(object, path);
assert.deepEqual(object, { 'a': { 'b': 2 } });
});
});
// Prevent regression for https://github.com/lodash/lodash/security/advisories/GHSA-xxjr-mmjv-4gpg
QUnit.test('Security: _.omit should not allow modifying prototype or constructor properties', function(assert) {
assert.expect(3);
var testObj1 = {};
assert.strictEqual(typeof testObj1.toString, 'function', 'Object.toString should work before omit');
_.omit({}, ['__proto__.toString']);
_.omit({}, ['constructor.prototype.toString']);
var testObj2 = {};
assert.strictEqual(typeof testObj2.toString, 'function', 'Object.toString should still work after omit');
assert.strictEqual(Object.prototype.toString.call({}), '[object Object]', 'Object.toString should behave as expected');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.omitBy');
(function() {
QUnit.test('should work with a predicate argument', function(assert) {
assert.expect(1);
var object = { 'a': 1, 'b': 2, 'c': 3, 'd': 4 };
var actual = _.omitBy(object, function(n) {
return n != 2 && n != 4;
});
assert.deepEqual(actual, { 'b': 2, 'd': 4 });
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('omit methods');
lodashStable.each(['omit', 'omitBy'], function(methodName) {
var expected = { 'b': 2, 'd': 4 },
func = _[methodName],
object = { 'a': 1, 'b': 2, 'c': 3, 'd': 4 },
resolve = lodashStable.nthArg(1);
if (methodName == 'omitBy') {
resolve = function(object, props) {
props = lodashStable.castArray(props);
return function(value) {
return lodashStable.some(props, function(key) {
key = lodashStable.isSymbol(key) ? key : lodashStable.toString(key);
return object[key] === value;
});
};
};
}
QUnit.test('`_.' + methodName + '` should create an object with omitted string keyed properties', function(assert) {
assert.expect(2);
assert.deepEqual(func(object, resolve(object, 'a')), { 'b': 2, 'c': 3, 'd': 4 });
assert.deepEqual(func(object, resolve(object, ['a', 'c'])), expected);
});
QUnit.test('`_.' + methodName + '` should include inherited string keyed properties', function(assert) {
assert.expect(1);
function Foo() {}
Foo.prototype = object;
assert.deepEqual(func(new Foo, resolve(object, ['a', 'c'])), expected);
});
QUnit.test('`_.' + methodName + '` should preserve the sign of `0`', function(assert) {
assert.expect(1);
var object = { '-0': 'a', '0': 'b' },
props = [-0, Object(-0), 0, Object(0)],
expected = [{ '0': 'b' }, { '0': 'b' }, { '-0': 'a' }, { '-0': 'a' }];
var actual = lodashStable.map(props, function(key) {
return func(object, resolve(object, key));
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should include symbols', function(assert) {
assert.expect(3);
function Foo() {
this.a = 0;
this[symbol] = 1;
}
if (Symbol) {
var symbol2 = Symbol('b');
Foo.prototype[symbol2] = 2;
var symbol3 = Symbol('c');
defineProperty(Foo.prototype, symbol3, {
'configurable': true,
'enumerable': false,
'writable': true,
'value': 3
});
var foo = new Foo,
actual = func(foo, resolve(foo, 'a'));
assert.strictEqual(actual[symbol], 1);
assert.strictEqual(actual[symbol2], 2);
assert.notOk(symbol3 in actual);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('`_.' + methodName + '` should create an object with omitted symbols', function(assert) {
assert.expect(8);
function Foo() {
this.a = 0;
this[symbol] = 1;
}
if (Symbol) {
var symbol2 = Symbol('b');
Foo.prototype[symbol2] = 2;
var symbol3 = Symbol('c');
defineProperty(Foo.prototype, symbol3, {
'configurable': true,
'enumerable': false,
'writable': true,
'value': 3
});
var foo = new Foo,
actual = func(foo, resolve(foo, symbol));
assert.strictEqual(actual.a, 0);
assert.notOk(symbol in actual);
assert.strictEqual(actual[symbol2], 2);
assert.notOk(symbol3 in actual);
actual = func(foo, resolve(foo, symbol2));
assert.strictEqual(actual.a, 0);
assert.strictEqual(actual[symbol], 1);
assert.notOk(symbol2 in actual);
assert.notOk(symbol3 in actual);
}
else {
skipAssert(assert, 8);
}
});
QUnit.test('`_.' + methodName + '` should work with an array `object`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
assert.deepEqual(func(array, resolve(array, ['0', '2'])), { '1': 2 });
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.once');
(function() {
QUnit.test('should invoke `func` once', function(assert) {
assert.expect(2);
var count = 0,
once = _.once(function() { return ++count; });
once();
assert.strictEqual(once(), 1);
assert.strictEqual(count, 1);
});
QUnit.test('should ignore recursive calls', function(assert) {
assert.expect(2);
var count = 0;
var once = _.once(function() {
once();
return ++count;
});
assert.strictEqual(once(), 1);
assert.strictEqual(count, 1);
});
QUnit.test('should not throw more than once', function(assert) {
assert.expect(2);
var once = _.once(function() {
throw new Error;
});
assert.raises(once);
once();
assert.ok(true);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.over');
(function() {
QUnit.test('should create a function that invokes `iteratees`', function(assert) {
assert.expect(1);
var over = _.over(Math.max, Math.min);
assert.deepEqual(over(1, 2, 3, 4), [4, 1]);
});
QUnit.test('should use `_.identity` when a predicate is nullish', function(assert) {
assert.expect(1);
var over = _.over(undefined, null);
assert.deepEqual(over('a', 'b', 'c'), ['a', 'a']);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var over = _.over('b', 'a');
assert.deepEqual(over({ 'a': 1, 'b': 2 }), [2, 1]);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
var over = _.over({ 'b': 1 }, { 'a': 1 });
assert.deepEqual(over({ 'a': 1, 'b': 2 }), [false, true]);
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(2);
var over = _.over([['b', 2], ['a', 2]]);
assert.deepEqual(over({ 'a': 1, 'b': 2 }), [true, false]);
assert.deepEqual(over({ 'a': 2, 'b': 1 }), [false, true]);
});
QUnit.test('should differentiate between `_.property` and `_.matchesProperty` shorthands', function(assert) {
assert.expect(4);
var over = _.over(['a', 1]);
assert.deepEqual(over({ 'a': 1, '1': 2 }), [1, 2]);
assert.deepEqual(over({ 'a': 2, '1': 1 }), [2, 1]);
over = _.over([['a', 1]]);
assert.deepEqual(over({ 'a': 1 }), [true]);
assert.deepEqual(over({ 'a': 2 }), [false]);
});
QUnit.test('should provide arguments to predicates', function(assert) {
assert.expect(1);
var over = _.over(function() {
return slice.call(arguments);
});
assert.deepEqual(over('a', 'b', 'c'), [['a', 'b', 'c']]);
});
QUnit.test('should use `this` binding of function for `iteratees`', function(assert) {
assert.expect(1);
var over = _.over(function() { return this.b; }, function() { return this.a; }),
object = { 'over': over, 'a': 1, 'b': 2 };
assert.deepEqual(object.over(), [2, 1]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.overEvery');
(function() {
QUnit.test('should create a function that returns `true` if all predicates return truthy', function(assert) {
assert.expect(1);
var over = _.overEvery(stubTrue, stubOne, stubA);
assert.strictEqual(over(), true);
});
QUnit.test('should return `false` as soon as a predicate returns falsey', function(assert) {
assert.expect(2);
var count = 0,
countFalse = function() { count++; return false; },
countTrue = function() { count++; return true; },
over = _.overEvery(countTrue, countFalse, countTrue);
assert.strictEqual(over(), false);
assert.strictEqual(count, 2);
});
QUnit.test('should use `_.identity` when a predicate is nullish', function(assert) {
assert.expect(2);
var over = _.overEvery(undefined, null);
assert.strictEqual(over(true), true);
assert.strictEqual(over(false), false);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(2);
var over = _.overEvery('b', 'a');
assert.strictEqual(over({ 'a': 1, 'b': 1 }), true);
assert.strictEqual(over({ 'a': 0, 'b': 1 }), false);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(2);
var over = _.overEvery({ 'b': 2 }, { 'a': 1 });
assert.strictEqual(over({ 'a': 1, 'b': 2 }), true);
assert.strictEqual(over({ 'a': 0, 'b': 2 }), false);
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(2);
var over = _.overEvery([['b', 2], ['a', 1]]);
assert.strictEqual(over({ 'a': 1, 'b': 2 }), true);
assert.strictEqual(over({ 'a': 0, 'b': 2 }), false);
});
QUnit.test('should differentiate between `_.property` and `_.matchesProperty` shorthands', function(assert) {
assert.expect(5);
var over = _.overEvery(['a', 1]);
assert.strictEqual(over({ 'a': 1, '1': 1 }), true);
assert.strictEqual(over({ 'a': 1, '1': 0 }), false);
assert.strictEqual(over({ 'a': 0, '1': 1 }), false);
over = _.overEvery([['a', 1]]);
assert.strictEqual(over({ 'a': 1 }), true);
assert.strictEqual(over({ 'a': 2 }), false);
});
QUnit.test('should flatten `predicates`', function(assert) {
assert.expect(1);
var over = _.overEvery(stubTrue, [stubFalse]);
assert.strictEqual(over(), false);
});
QUnit.test('should provide arguments to predicates', function(assert) {
assert.expect(1);
var args;
var over = _.overEvery(function() {
args = slice.call(arguments);
});
over('a', 'b', 'c');
assert.deepEqual(args, ['a', 'b', 'c']);
});
QUnit.test('should use `this` binding of function for `predicates`', function(assert) {
assert.expect(2);
var over = _.overEvery(function() { return this.b; }, function() { return this.a; }),
object = { 'over': over, 'a': 1, 'b': 2 };
assert.strictEqual(object.over(), true);
object.a = 0;
assert.strictEqual(object.over(), false);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.overSome');
(function() {
QUnit.test('should create a function that returns `true` if any predicates return truthy', function(assert) {
assert.expect(2);
var over = _.overSome(stubFalse, stubOne, stubString);
assert.strictEqual(over(), true);
over = _.overSome(stubNull, stubA, stubZero);
assert.strictEqual(over(), true);
});
QUnit.test('should return `true` as soon as `predicate` returns truthy', function(assert) {
assert.expect(2);
var count = 0,
countFalse = function() { count++; return false; },
countTrue = function() { count++; return true; },
over = _.overSome(countFalse, countTrue, countFalse);
assert.strictEqual(over(), true);
assert.strictEqual(count, 2);
});
QUnit.test('should return `false` if all predicates return falsey', function(assert) {
assert.expect(2);
var over = _.overSome(stubFalse, stubFalse, stubFalse);
assert.strictEqual(over(), false);
over = _.overSome(stubNull, stubZero, stubString);
assert.strictEqual(over(), false);
});
QUnit.test('should use `_.identity` when a predicate is nullish', function(assert) {
assert.expect(2);
var over = _.overSome(undefined, null);
assert.strictEqual(over(true), true);
assert.strictEqual(over(false), false);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(2);
var over = _.overSome('b', 'a');
assert.strictEqual(over({ 'a': 1, 'b': 0 }), true);
assert.strictEqual(over({ 'a': 0, 'b': 0 }), false);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(2);
var over = _.overSome({ 'b': 2 }, { 'a': 1 });
assert.strictEqual(over({ 'a': 0, 'b': 2 }), true);
assert.strictEqual(over({ 'a': 0, 'b': 0 }), false);
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(2);
var over = _.overSome([['b', 2], ['a', 1]]);
assert.strictEqual(over({ 'a': 0, 'b': 2 }), true);
assert.strictEqual(over({ 'a': 0, 'b': 0 }), false);
});
QUnit.test('should differentiate between `_.property` and `_.matchesProperty` shorthands', function(assert) {
assert.expect(5);
var over = _.overSome(['a', 1]);
assert.strictEqual(over({ 'a': 0, '1': 0 }), false);
assert.strictEqual(over({ 'a': 1, '1': 0 }), true);
assert.strictEqual(over({ 'a': 0, '1': 1 }), true);
over = _.overSome([['a', 1]]);
assert.strictEqual(over({ 'a': 1 }), true);
assert.strictEqual(over({ 'a': 2 }), false);
});
QUnit.test('should flatten `predicates`', function(assert) {
assert.expect(1);
var over = _.overSome(stubFalse, [stubTrue]);
assert.strictEqual(over(), true);
});
QUnit.test('should provide arguments to predicates', function(assert) {
assert.expect(1);
var args;
var over = _.overSome(function() {
args = slice.call(arguments);
});
over('a', 'b', 'c');
assert.deepEqual(args, ['a', 'b', 'c']);
});
QUnit.test('should use `this` binding of function for `predicates`', function(assert) {
assert.expect(2);
var over = _.overSome(function() { return this.b; }, function() { return this.a; }),
object = { 'over': over, 'a': 1, 'b': 2 };
assert.strictEqual(object.over(), true);
object.a = object.b = 0;
assert.strictEqual(object.over(), false);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.pad');
(function() {
var string = 'abc';
QUnit.test('should pad a string to a given length', function(assert) {
assert.expect(1);
var values = [, undefined],
expected = lodashStable.map(values, lodashStable.constant(' abc '));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.pad(string, 6, value) : _.pad(string, 6);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should truncate pad characters to fit the pad length', function(assert) {
assert.expect(2);
assert.strictEqual(_.pad(string, 8), ' abc ');
assert.strictEqual(_.pad(string, 8, '_-'), '_-abc_-_');
});
QUnit.test('should coerce `string` to a string', function(assert) {
assert.expect(1);
var values = [Object(string), { 'toString': lodashStable.constant(string) }],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
return _.pad(value, 6) === ' abc ';
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.padEnd');
(function() {
var string = 'abc';
QUnit.test('should pad a string to a given length', function(assert) {
assert.expect(1);
var values = [, undefined],
expected = lodashStable.map(values, lodashStable.constant('abc '));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.padEnd(string, 6, value) : _.padEnd(string, 6);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should truncate pad characters to fit the pad length', function(assert) {
assert.expect(1);
assert.strictEqual(_.padEnd(string, 6, '_-'), 'abc_-_');
});
QUnit.test('should coerce `string` to a string', function(assert) {
assert.expect(1);
var values = [Object(string), { 'toString': lodashStable.constant(string) }],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
return _.padEnd(value, 6) === 'abc ';
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.padStart');
(function() {
var string = 'abc';
QUnit.test('should pad a string to a given length', function(assert) {
assert.expect(1);
var values = [, undefined],
expected = lodashStable.map(values, lodashStable.constant(' abc'));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.padStart(string, 6, value) : _.padStart(string, 6);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should truncate pad characters to fit the pad length', function(assert) {
assert.expect(1);
assert.strictEqual(_.padStart(string, 6, '_-'), '_-_abc');
});
QUnit.test('should coerce `string` to a string', function(assert) {
assert.expect(1);
var values = [Object(string), { 'toString': lodashStable.constant(string) }],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
return _.padStart(value, 6) === ' abc';
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('pad methods');
lodashStable.each(['pad', 'padStart', 'padEnd'], function(methodName) {
var func = _[methodName],
isPad = methodName == 'pad',
isStart = methodName == 'padStart',
string = 'abc';
QUnit.test('`_.' + methodName + '` should not pad if string is >= `length`', function(assert) {
assert.expect(2);
assert.strictEqual(func(string, 2), string);
assert.strictEqual(func(string, 3), string);
});
QUnit.test('`_.' + methodName + '` should treat negative `length` as `0`', function(assert) {
assert.expect(2);
lodashStable.each([0, -2], function(length) {
assert.strictEqual(func(string, length), string);
});
});
QUnit.test('`_.' + methodName + '` should coerce `length` to a number', function(assert) {
assert.expect(2);
lodashStable.each(['', '4'], function(length) {
var actual = length ? (isStart ? ' abc' : 'abc ') : string;
assert.strictEqual(func(string, length), actual);
});
});
QUnit.test('`_.' + methodName + '` should treat nullish values as empty strings', function(assert) {
assert.expect(6);
lodashStable.each([undefined, '_-'], function(chars) {
var expected = chars ? (isPad ? '__' : chars) : ' ';
assert.strictEqual(func(null, 2, chars), expected);
assert.strictEqual(func(undefined, 2, chars), expected);
assert.strictEqual(func('', 2, chars), expected);
});
});
QUnit.test('`_.' + methodName + '` should return `string` when `chars` coerces to an empty string', function(assert) {
assert.expect(1);
var values = ['', Object('')],
expected = lodashStable.map(values, lodashStable.constant(string));
var actual = lodashStable.map(values, function(value) {
return _.pad(string, 6, value);
});
assert.deepEqual(actual, expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.parseInt');
(function() {
QUnit.test('should accept a `radix`', function(assert) {
assert.expect(1);
var expected = lodashStable.range(2, 37);
var actual = lodashStable.map(expected, function(radix) {
return _.parseInt('10', radix);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should use a radix of `10`, for non-hexadecimals, if `radix` is `undefined` or `0`', function(assert) {
assert.expect(4);
assert.strictEqual(_.parseInt('10'), 10);
assert.strictEqual(_.parseInt('10', 0), 10);
assert.strictEqual(_.parseInt('10', 10), 10);
assert.strictEqual(_.parseInt('10', undefined), 10);
});
QUnit.test('should use a radix of `16`, for hexadecimals, if `radix` is `undefined` or `0`', function(assert) {
assert.expect(8);
lodashStable.each(['0x20', '0X20'], function(string) {
assert.strictEqual(_.parseInt(string), 32);
assert.strictEqual(_.parseInt(string, 0), 32);
assert.strictEqual(_.parseInt(string, 16), 32);
assert.strictEqual(_.parseInt(string, undefined), 32);
});
});
QUnit.test('should use a radix of `10` for string with leading zeros', function(assert) {
assert.expect(2);
assert.strictEqual(_.parseInt('08'), 8);
assert.strictEqual(_.parseInt('08', 10), 8);
});
QUnit.test('should parse strings with leading whitespace', function(assert) {
assert.expect(2);
var expected = [8, 8, 10, 10, 32, 32, 32, 32];
lodashStable.times(2, function(index) {
var actual = [],
func = (index ? (lodashBizarro || {}) : _).parseInt;
if (func) {
lodashStable.times(2, function(otherIndex) {
var string = otherIndex ? '10' : '08';
actual.push(
func(whitespace + string, 10),
func(whitespace + string)
);
});
lodashStable.each(['0x20', '0X20'], function(string) {
actual.push(
func(whitespace + string),
func(whitespace + string, 16)
);
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
});
QUnit.test('should coerce `radix` to a number', function(assert) {
assert.expect(2);
var object = { 'valueOf': stubZero };
assert.strictEqual(_.parseInt('08', object), 8);
assert.strictEqual(_.parseInt('0x20', object), 32);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(2);
var strings = lodashStable.map(['6', '08', '10'], Object),
actual = lodashStable.map(strings, _.parseInt);
assert.deepEqual(actual, [6, 8, 10]);
actual = lodashStable.map('123', _.parseInt);
assert.deepEqual(actual, [1, 2, 3]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('partial methods');
lodashStable.each(['partial', 'partialRight'], function(methodName) {
var func = _[methodName],
isPartial = methodName == 'partial',
ph = func.placeholder;
QUnit.test('`_.' + methodName + '` partially applies arguments', function(assert) {
assert.expect(1);
var par = func(identity, 'a');
assert.strictEqual(par(), 'a');
});
QUnit.test('`_.' + methodName + '` creates a function that can be invoked with additional arguments', function(assert) {
assert.expect(1);
var fn = function(a, b) { return [a, b]; },
par = func(fn, 'a'),
expected = isPartial ? ['a', 'b'] : ['b', 'a'];
assert.deepEqual(par('b'), expected);
});
QUnit.test('`_.' + methodName + '` works when there are no partially applied arguments and the created function is invoked without additional arguments', function(assert) {
assert.expect(1);
var fn = function() { return arguments.length; },
par = func(fn);
assert.strictEqual(par(), 0);
});
QUnit.test('`_.' + methodName + '` works when there are no partially applied arguments and the created function is invoked with additional arguments', function(assert) {
assert.expect(1);
var par = func(identity);
assert.strictEqual(par('a'), 'a');
});
QUnit.test('`_.' + methodName + '` should support placeholders', function(assert) {
assert.expect(4);
var fn = function() { return slice.call(arguments); },
par = func(fn, ph, 'b', ph);
assert.deepEqual(par('a', 'c'), ['a', 'b', 'c']);
assert.deepEqual(par('a'), ['a', 'b', undefined]);
assert.deepEqual(par(), [undefined, 'b', undefined]);
if (isPartial) {
assert.deepEqual(par('a', 'c', 'd'), ['a', 'b', 'c', 'd']);
} else {
par = func(fn, ph, 'c', ph);
assert.deepEqual(par('a', 'b', 'd'), ['a', 'b', 'c', 'd']);
}
});
QUnit.test('`_.' + methodName + '` should use `_.placeholder` when set', function(assert) {
assert.expect(1);
if (!isModularize) {
var _ph = _.placeholder = {},
fn = function() { return slice.call(arguments); },
par = func(fn, _ph, 'b', ph),
expected = isPartial ? ['a', 'b', ph, 'c'] : ['a', 'c', 'b', ph];
assert.deepEqual(par('a', 'c'), expected);
delete _.placeholder;
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` creates a function with a `length` of `0`', function(assert) {
assert.expect(1);
var fn = function(a, b, c) {},
par = func(fn, 'a');
assert.strictEqual(par.length, 0);
});
QUnit.test('`_.' + methodName + '` should ensure `new par` is an instance of `func`', function(assert) {
assert.expect(2);
function Foo(value) {
return value && object;
}
var object = {},
par = func(Foo);
assert.ok(new par instanceof Foo);
assert.strictEqual(new par(true), object);
});
QUnit.test('`_.' + methodName + '` should clone metadata for created functions', function(assert) {
assert.expect(3);
function greet(greeting, name) {
return greeting + ' ' + name;
}
var par1 = func(greet, 'hi'),
par2 = func(par1, 'barney'),
par3 = func(par1, 'pebbles');
assert.strictEqual(par1('fred'), isPartial ? 'hi fred' : 'fred hi');
assert.strictEqual(par2(), isPartial ? 'hi barney' : 'barney hi');
assert.strictEqual(par3(), isPartial ? 'hi pebbles' : 'pebbles hi');
});
QUnit.test('`_.' + methodName + '` should work with curried functions', function(assert) {
assert.expect(2);
var fn = function(a, b, c) { return a + b + c; },
curried = _.curry(func(fn, 1), 2);
assert.strictEqual(curried(2, 3), 6);
assert.strictEqual(curried(2)(3), 6);
});
QUnit.test('should work with placeholders and curried functions', function(assert) {
assert.expect(1);
var fn = function() { return slice.call(arguments); },
curried = _.curry(fn),
par = func(curried, ph, 'b', ph, 'd');
assert.deepEqual(par('a', 'c'), ['a', 'b', 'c', 'd']);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.partialRight');
(function() {
QUnit.test('should work as a deep `_.defaults`', function(assert) {
assert.expect(1);
var object = { 'a': { 'b': 2 } },
source = { 'a': { 'b': 3, 'c': 3 } },
expected = { 'a': { 'b': 2, 'c': 3 } };
var defaultsDeep = _.partialRight(_.mergeWith, function deep(value, other) {
return lodashStable.isObject(value) ? _.mergeWith(value, other, deep) : value;
});
assert.deepEqual(defaultsDeep(object, source), expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('methods using `createWrapper`');
(function() {
function fn() {
return slice.call(arguments);
}
var ph1 = _.bind.placeholder,
ph2 = _.bindKey.placeholder,
ph3 = _.partial.placeholder,
ph4 = _.partialRight.placeholder;
QUnit.test('should work with combinations of partial functions', function(assert) {
assert.expect(1);
var a = _.partial(fn),
b = _.partialRight(a, 3),
c = _.partial(b, 1);
assert.deepEqual(c(2), [1, 2, 3]);
});
QUnit.test('should work with combinations of bound and partial functions', function(assert) {
assert.expect(3);
var fn = function() {
var result = [this.a];
push.apply(result, arguments);
return result;
};
var expected = [1, 2, 3, 4],
object = { 'a': 1, 'fn': fn };
var a = _.bindKey(object, 'fn'),
b = _.partialRight(a, 4),
c = _.partial(b, 2);
assert.deepEqual(c(3), expected);
a = _.bind(fn, object);
b = _.partialRight(a, 4);
c = _.partial(b, 2);
assert.deepEqual(c(3), expected);
a = _.partial(fn, 2);
b = _.bind(a, object);
c = _.partialRight(b, 4);
assert.deepEqual(c(3), expected);
});
QUnit.test('should ensure `new combo` is an instance of `func`', function(assert) {
assert.expect(2);
function Foo(a, b, c) {
return b === 0 && object;
}
var combo = _.partial(_.partialRight(Foo, 3), 1),
object = {};
assert.ok(new combo(2) instanceof Foo);
assert.strictEqual(new combo(0), object);
});
QUnit.test('should work with combinations of functions with placeholders', function(assert) {
assert.expect(3);
var expected = [1, 2, 3, 4, 5, 6],
object = { 'fn': fn };
var a = _.bindKey(object, 'fn', ph2, 2),
b = _.partialRight(a, ph4, 6),
c = _.partial(b, 1, ph3, 4);
assert.deepEqual(c(3, 5), expected);
a = _.bind(fn, object, ph1, 2);
b = _.partialRight(a, ph4, 6);
c = _.partial(b, 1, ph3, 4);
assert.deepEqual(c(3, 5), expected);
a = _.partial(fn, ph3, 2);
b = _.bind(a, object, 1, ph1, 4);
c = _.partialRight(b, ph4, 6);
assert.deepEqual(c(3, 5), expected);
});
QUnit.test('should work with combinations of functions with overlapping placeholders', function(assert) {
assert.expect(3);
var expected = [1, 2, 3, 4],
object = { 'fn': fn };
var a = _.bindKey(object, 'fn', ph2, 2),
b = _.partialRight(a, ph4, 4),
c = _.partial(b, ph3, 3);
assert.deepEqual(c(1), expected);
a = _.bind(fn, object, ph1, 2);
b = _.partialRight(a, ph4, 4);
c = _.partial(b, ph3, 3);
assert.deepEqual(c(1), expected);
a = _.partial(fn, ph3, 2);
b = _.bind(a, object, ph1, 3);
c = _.partialRight(b, ph4, 4);
assert.deepEqual(c(1), expected);
});
QUnit.test('should work with recursively bound functions', function(assert) {
assert.expect(1);
var fn = function() {
return this.a;
};
var a = _.bind(fn, { 'a': 1 }),
b = _.bind(a, { 'a': 2 }),
c = _.bind(b, { 'a': 3 });
assert.strictEqual(c(), 1);
});
QUnit.test('should work when hot', function(assert) {
assert.expect(12);
lodashStable.times(2, function(index) {
var fn = function() {
var result = [this];
push.apply(result, arguments);
return result;
};
var object = {},
bound1 = index ? _.bind(fn, object, 1) : _.bind(fn, object),
expected = [object, 1, 2, 3];
var actual = _.last(lodashStable.times(HOT_COUNT, function() {
var bound2 = index ? _.bind(bound1, null, 2) : _.bind(bound1);
return index ? bound2(3) : bound2(1, 2, 3);
}));
assert.deepEqual(actual, expected);
actual = _.last(lodashStable.times(HOT_COUNT, function() {
var bound1 = index ? _.bind(fn, object, 1) : _.bind(fn, object),
bound2 = index ? _.bind(bound1, null, 2) : _.bind(bound1);
return index ? bound2(3) : bound2(1, 2, 3);
}));
assert.deepEqual(actual, expected);
});
lodashStable.each(['curry', 'curryRight'], function(methodName, index) {
var fn = function(a, b, c) { return [a, b, c]; },
curried = _[methodName](fn),
expected = index ? [3, 2, 1] : [1, 2, 3];
var actual = _.last(lodashStable.times(HOT_COUNT, function() {
return curried(1)(2)(3);
}));
assert.deepEqual(actual, expected);
actual = _.last(lodashStable.times(HOT_COUNT, function() {
var curried = _[methodName](fn);
return curried(1)(2)(3);
}));
assert.deepEqual(actual, expected);
});
lodashStable.each(['partial', 'partialRight'], function(methodName, index) {
var func = _[methodName],
fn = function() { return slice.call(arguments); },
par1 = func(fn, 1),
expected = index ? [3, 2, 1] : [1, 2, 3];
var actual = _.last(lodashStable.times(HOT_COUNT, function() {
var par2 = func(par1, 2);
return par2(3);
}));
assert.deepEqual(actual, expected);
actual = _.last(lodashStable.times(HOT_COUNT, function() {
var par1 = func(fn, 1),
par2 = func(par1, 2);
return par2(3);
}));
assert.deepEqual(actual, expected);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.partition');
(function() {
var array = [1, 0, 1];
QUnit.test('should split elements into two groups by `predicate`', function(assert) {
assert.expect(3);
assert.deepEqual(_.partition([], identity), [[], []]);
assert.deepEqual(_.partition(array, stubTrue), [array, []]);
assert.deepEqual(_.partition(array, stubFalse), [[], array]);
});
QUnit.test('should use `_.identity` when `predicate` is nullish', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant([[1, 1], [0]]));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.partition(array, value) : _.partition(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var objects = [{ 'a': 1 }, { 'a': 1 }, { 'b': 2 }],
actual = _.partition(objects, 'a');
assert.deepEqual(actual, [objects.slice(0, 2), objects.slice(2)]);
});
QUnit.test('should work with a number for `predicate`', function(assert) {
assert.expect(2);
var array = [
[1, 0],
[0, 1],
[1, 0]
];
assert.deepEqual(_.partition(array, 0), [[array[0], array[2]], [array[1]]]);
assert.deepEqual(_.partition(array, 1), [[array[1]], [array[0], array[2]]]);
});
QUnit.test('should work with an object for `collection`', function(assert) {
assert.expect(1);
var actual = _.partition({ 'a': 1.1, 'b': 0.2, 'c': 1.3 }, Math.floor);
assert.deepEqual(actual, [[1.1, 1.3], [0.2]]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.pick');
(function() {
var args = toArgs(['a', 'c']),
object = { 'a': 1, 'b': 2, 'c': 3, 'd': 4 },
nested = { 'a': 1, 'b': { 'c': 2, 'd': 3 } };
QUnit.test('should flatten `paths`', function(assert) {
assert.expect(2);
assert.deepEqual(_.pick(object, 'a', 'c'), { 'a': 1, 'c': 3 });
assert.deepEqual(_.pick(object, ['a', 'd'], 'c'), { 'a': 1, 'c': 3, 'd': 4 });
});
QUnit.test('should support deep paths', function(assert) {
assert.expect(1);
assert.deepEqual(_.pick(nested, 'b.c'), { 'b': { 'c': 2 } });
});
QUnit.test('should support path arrays', function(assert) {
assert.expect(1);
var object = { 'a.b': 1, 'a': { 'b': 2 } },
actual = _.pick(object, [['a.b']]);
assert.deepEqual(actual, { 'a.b': 1 });
});
QUnit.test('should pick a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': 1, 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a.b']], function(path) {
assert.deepEqual(_.pick(object, path), { 'a.b': 1 });
});
});
QUnit.test('should coerce `paths` to strings', function(assert) {
assert.expect(1);
assert.deepEqual(_.pick({ '0': 'a', '1': 'b' }, 0), { '0': 'a' });
});
QUnit.test('should return an empty object when `object` is nullish', function(assert) {
assert.expect(2);
lodashStable.each([null, undefined], function(value) {
assert.deepEqual(_.pick(value, 'valueOf'), {});
});
});
QUnit.test('should work with a primitive `object`', function(assert) {
assert.expect(1);
assert.deepEqual(_.pick('', 'slice'), { 'slice': ''.slice });
});
QUnit.test('should work with `arguments` object `paths`', function(assert) {
assert.expect(1);
assert.deepEqual(_.pick(object, args), { 'a': 1, 'c': 3 });
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.pickBy');
(function() {
QUnit.test('should work with a predicate argument', function(assert) {
assert.expect(1);
var object = { 'a': 1, 'b': 2, 'c': 3, 'd': 4 };
var actual = _.pickBy(object, function(n) {
return n == 1 || n == 3;
});
assert.deepEqual(actual, { 'a': 1, 'c': 3 });
});
QUnit.test('should not treat keys with dots as deep paths', function(assert) {
assert.expect(1);
var object = { 'a.b.c': 1 },
actual = _.pickBy(object, stubTrue);
assert.deepEqual(actual, { 'a.b.c': 1 });
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('pick methods');
lodashStable.each(['pick', 'pickBy'], function(methodName) {
var expected = { 'a': 1, 'c': 3 },
func = _[methodName],
isPick = methodName == 'pick',
object = { 'a': 1, 'b': 2, 'c': 3, 'd': 4 },
resolve = lodashStable.nthArg(1);
if (methodName == 'pickBy') {
resolve = function(object, props) {
props = lodashStable.castArray(props);
return function(value) {
return lodashStable.some(props, function(key) {
key = lodashStable.isSymbol(key) ? key : lodashStable.toString(key);
return object[key] === value;
});
};
};
}
QUnit.test('`_.' + methodName + '` should create an object of picked string keyed properties', function(assert) {
assert.expect(2);
assert.deepEqual(func(object, resolve(object, 'a')), { 'a': 1 });
assert.deepEqual(func(object, resolve(object, ['a', 'c'])), expected);
});
QUnit.test('`_.' + methodName + '` should pick inherited string keyed properties', function(assert) {
assert.expect(1);
function Foo() {}
Foo.prototype = object;
var foo = new Foo;
assert.deepEqual(func(foo, resolve(foo, ['a', 'c'])), expected);
});
QUnit.test('`_.' + methodName + '` should preserve the sign of `0`', function(assert) {
assert.expect(1);
var object = { '-0': 'a', '0': 'b' },
props = [-0, Object(-0), 0, Object(0)],
expected = [{ '-0': 'a' }, { '-0': 'a' }, { '0': 'b' }, { '0': 'b' }];
var actual = lodashStable.map(props, function(key) {
return func(object, resolve(object, key));
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should pick symbols', function(assert) {
assert.expect(3);
function Foo() {
this[symbol] = 1;
}
if (Symbol) {
var symbol2 = Symbol('b');
Foo.prototype[symbol2] = 2;
var symbol3 = Symbol('c');
defineProperty(Foo.prototype, symbol3, {
'configurable': true,
'enumerable': false,
'writable': true,
'value': 3
});
var foo = new Foo,
actual = func(foo, resolve(foo, [symbol, symbol2, symbol3]));
assert.strictEqual(actual[symbol], 1);
assert.strictEqual(actual[symbol2], 2);
if (isPick) {
assert.strictEqual(actual[symbol3], 3);
} else {
assert.notOk(symbol3 in actual);
}
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('`_.' + methodName + '` should work with an array `object`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
assert.deepEqual(func(array, resolve(array, '1')), { '1': 2 });
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.property');
(function() {
QUnit.test('should create a function that plucks a property value of a given object', function(assert) {
assert.expect(4);
var object = { 'a': 1 };
lodashStable.each(['a', ['a']], function(path) {
var prop = _.property(path);
assert.strictEqual(prop.length, 1);
assert.strictEqual(prop(object), 1);
});
});
QUnit.test('should pluck deep property values', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var prop = _.property(path);
assert.strictEqual(prop(object), 2);
});
});
QUnit.test('should pluck inherited property values', function(assert) {
assert.expect(2);
function Foo() {}
Foo.prototype.a = 1;
lodashStable.each(['a', ['a']], function(path) {
var prop = _.property(path);
assert.strictEqual(prop(new Foo), 1);
});
});
QUnit.test('should work with a non-string `path`', function(assert) {
assert.expect(2);
var array = [1, 2, 3];
lodashStable.each([1, [1]], function(path) {
var prop = _.property(path);
assert.strictEqual(prop(array), 2);
});
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var object = { '-0': 'a', '0': 'b' },
props = [-0, Object(-0), 0, Object(0)];
var actual = lodashStable.map(props, function(key) {
var prop = _.property(key);
return prop(object);
});
assert.deepEqual(actual, ['a', 'a', 'b', 'b']);
});
QUnit.test('should coerce `path` to a string', function(assert) {
assert.expect(2);
function fn() {}
fn.toString = lodashStable.constant('fn');
var expected = [1, 2, 3, 4],
object = { 'null': 1, 'undefined': 2, 'fn': 3, '[object Object]': 4 },
paths = [null, undefined, fn, {}];
lodashStable.times(2, function(index) {
var actual = lodashStable.map(paths, function(path) {
var prop = _.property(index ? [path] : path);
return prop(object);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should pluck a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': 1, 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a.b']], function(path) {
var prop = _.property(path);
assert.strictEqual(prop(object), 1);
});
});
QUnit.test('should return `undefined` when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, noop);
lodashStable.each(['constructor', ['constructor']], function(path) {
var prop = _.property(path);
var actual = lodashStable.map(values, function(value, index) {
return index ? prop(value) : prop();
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `undefined` for deep paths when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, noop);
lodashStable.each(['constructor.prototype.valueOf', ['constructor', 'prototype', 'valueOf']], function(path) {
var prop = _.property(path);
var actual = lodashStable.map(values, function(value, index) {
return index ? prop(value) : prop();
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `undefined` if parts of `path` are missing', function(assert) {
assert.expect(4);
var object = {};
lodashStable.each(['a', 'a[1].b.c', ['a'], ['a', '1', 'b', 'c']], function(path) {
var prop = _.property(path);
assert.strictEqual(prop(object), undefined);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.propertyOf');
(function() {
QUnit.test('should create a function that plucks a property value of a given key', function(assert) {
assert.expect(3);
var object = { 'a': 1 },
propOf = _.propertyOf(object);
assert.strictEqual(propOf.length, 1);
lodashStable.each(['a', ['a']], function(path) {
assert.strictEqual(propOf(path), 1);
});
});
QUnit.test('should pluck deep property values', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': 2 } },
propOf = _.propertyOf(object);
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.strictEqual(propOf(path), 2);
});
});
QUnit.test('should pluck inherited property values', function(assert) {
assert.expect(2);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var propOf = _.propertyOf(new Foo);
lodashStable.each(['b', ['b']], function(path) {
assert.strictEqual(propOf(path), 2);
});
});
QUnit.test('should work with a non-string `path`', function(assert) {
assert.expect(2);
var array = [1, 2, 3],
propOf = _.propertyOf(array);
lodashStable.each([1, [1]], function(path) {
assert.strictEqual(propOf(path), 2);
});
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var object = { '-0': 'a', '0': 'b' },
props = [-0, Object(-0), 0, Object(0)];
var actual = lodashStable.map(props, function(key) {
var propOf = _.propertyOf(object);
return propOf(key);
});
assert.deepEqual(actual, ['a', 'a', 'b', 'b']);
});
QUnit.test('should coerce `path` to a string', function(assert) {
assert.expect(2);
function fn() {}
fn.toString = lodashStable.constant('fn');
var expected = [1, 2, 3, 4],
object = { 'null': 1, 'undefined': 2, 'fn': 3, '[object Object]': 4 },
paths = [null, undefined, fn, {}];
lodashStable.times(2, function(index) {
var actual = lodashStable.map(paths, function(path) {
var propOf = _.propertyOf(object);
return propOf(index ? [path] : path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should pluck a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': 1, 'a': { 'b': 2 } },
propOf = _.propertyOf(object);
lodashStable.each(['a.b', ['a.b']], function(path) {
assert.strictEqual(propOf(path), 1);
});
});
QUnit.test('should return `undefined` when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, noop);
lodashStable.each(['constructor', ['constructor']], function(path) {
var actual = lodashStable.map(values, function(value, index) {
var propOf = index ? _.propertyOf(value) : _.propertyOf();
return propOf(path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `undefined` for deep paths when `object` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, noop);
lodashStable.each(['constructor.prototype.valueOf', ['constructor', 'prototype', 'valueOf']], function(path) {
var actual = lodashStable.map(values, function(value, index) {
var propOf = index ? _.propertyOf(value) : _.propertyOf();
return propOf(path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should return `undefined` if parts of `path` are missing', function(assert) {
assert.expect(4);
var propOf = _.propertyOf({});
lodashStable.each(['a', 'a[1].b.c', ['a'], ['a', '1', 'b', 'c']], function(path) {
assert.strictEqual(propOf(path), undefined);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.pullAll');
(function() {
QUnit.test('should work with the same value for `array` and `values`', function(assert) {
assert.expect(1);
var array = [{ 'a': 1 }, { 'b': 2 }],
actual = _.pullAll(array, array);
assert.deepEqual(actual, []);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.pullAllBy');
(function() {
QUnit.test('should accept an `iteratee`', function(assert) {
assert.expect(1);
var array = [{ 'x': 1 }, { 'x': 2 }, { 'x': 3 }, { 'x': 1 }];
var actual = _.pullAllBy(array, [{ 'x': 1 }, { 'x': 3 }], function(object) {
return object.x;
});
assert.deepEqual(actual, [{ 'x': 2 }]);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args,
array = [{ 'x': 1 }, { 'x': 2 }, { 'x': 3 }, { 'x': 1 }];
_.pullAllBy(array, [{ 'x': 1 }, { 'x': 3 }], function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [{ 'x': 1 }]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.pullAllWith');
(function() {
QUnit.test('should work with a `comparator`', function(assert) {
assert.expect(1);
var objects = [{ 'x': 1, 'y': 1 }, { 'x': 2, 'y': 2 }, { 'x': 3, 'y': 3 }],
expected = [objects[0], objects[2]],
actual = _.pullAllWith(objects, [{ 'x': 2, 'y': 2 }], lodashStable.isEqual);
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('pull methods');
lodashStable.each(['pull', 'pullAll', 'pullAllWith'], function(methodName) {
var func = _[methodName],
isPull = methodName == 'pull';
function pull(array, values) {
return isPull
? func.apply(undefined, [array].concat(values))
: func(array, values);
}
QUnit.test('`_.' + methodName + '` should modify and return the array', function(assert) {
assert.expect(2);
var array = [1, 2, 3],
actual = pull(array, [1, 3]);
assert.strictEqual(actual, array);
assert.deepEqual(array, [2]);
});
QUnit.test('`_.' + methodName + '` should preserve holes in arrays', function(assert) {
assert.expect(2);
var array = [1, 2, 3, 4];
delete array[1];
delete array[3];
pull(array, [1]);
assert.notOk('0' in array);
assert.notOk('2' in array);
});
QUnit.test('`_.' + methodName + '` should treat holes as `undefined`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
delete array[1];
pull(array, [undefined]);
assert.deepEqual(array, [1, 3]);
});
QUnit.test('`_.' + methodName + '` should match `NaN`', function(assert) {
assert.expect(1);
var array = [1, NaN, 3, NaN];
pull(array, [NaN]);
assert.deepEqual(array, [1, 3]);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.pullAt');
(function() {
QUnit.test('should modify the array and return removed elements', function(assert) {
assert.expect(2);
var array = [1, 2, 3],
actual = _.pullAt(array, [0, 1]);
assert.deepEqual(array, [3]);
assert.deepEqual(actual, [1, 2]);
});
QUnit.test('should work with unsorted indexes', function(assert) {
assert.expect(2);
var array = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
actual = _.pullAt(array, [1, 3, 11, 7, 5, 9]);
assert.deepEqual(array, [1, 3, 5, 7, 9, 11]);
assert.deepEqual(actual, [2, 4, 12, 8, 6, 10]);
});
QUnit.test('should work with repeated indexes', function(assert) {
assert.expect(2);
var array = [1, 2, 3, 4],
actual = _.pullAt(array, [0, 2, 0, 1, 0, 2]);
assert.deepEqual(array, [4]);
assert.deepEqual(actual, [1, 3, 1, 2, 1, 3]);
});
QUnit.test('should use `undefined` for nonexistent indexes', function(assert) {
assert.expect(2);
var array = ['a', 'b', 'c'],
actual = _.pullAt(array, [2, 4, 0]);
assert.deepEqual(array, ['b']);
assert.deepEqual(actual, ['c', undefined, 'a']);
});
QUnit.test('should flatten `indexes`', function(assert) {
assert.expect(4);
var array = ['a', 'b', 'c'];
assert.deepEqual(_.pullAt(array, 2, 0), ['c', 'a']);
assert.deepEqual(array, ['b']);
array = ['a', 'b', 'c', 'd'];
assert.deepEqual(_.pullAt(array, [3, 0], 2), ['d', 'a', 'c']);
assert.deepEqual(array, ['b']);
});
QUnit.test('should return an empty array when no indexes are given', function(assert) {
assert.expect(4);
var array = ['a', 'b', 'c'],
actual = _.pullAt(array);
assert.deepEqual(array, ['a', 'b', 'c']);
assert.deepEqual(actual, []);
actual = _.pullAt(array, [], []);
assert.deepEqual(array, ['a', 'b', 'c']);
assert.deepEqual(actual, []);
});
QUnit.test('should work with non-index paths', function(assert) {
assert.expect(2);
var values = lodashStable.reject(empties, function(value) {
return (value === 0) || lodashStable.isArray(value);
}).concat(-1, 1.1);
var array = lodashStable.transform(values, function(result, value) {
result[value] = 1;
}, []);
var expected = lodashStable.map(values, stubOne),
actual = _.pullAt(array, values);
assert.deepEqual(actual, expected);
expected = lodashStable.map(values, noop);
actual = lodashStable.at(array, values);
assert.deepEqual(actual, expected);
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var props = [-0, Object(-0), 0, Object(0)];
var actual = lodashStable.map(props, function(key) {
var array = [-1];
array['-0'] = -2;
return _.pullAt(array, key);
});
assert.deepEqual(actual, [[-2], [-2], [-1], [-1]]);
});
QUnit.test('should support deep paths', function(assert) {
assert.expect(3);
var array = [];
array.a = { 'b': 2 };
var actual = _.pullAt(array, 'a.b');
assert.deepEqual(actual, [2]);
assert.deepEqual(array.a, {});
try {
actual = _.pullAt(array, 'a.b.c');
} catch (e) {}
assert.deepEqual(actual, [undefined]);
});
QUnit.test('should work with a falsey `array` when keys are given', function(assert) {
assert.expect(1);
var values = falsey.slice(),
expected = lodashStable.map(values, lodashStable.constant(Array(4)));
var actual = lodashStable.map(values, function(array) {
try {
return _.pullAt(array, 0, 1, 'pop', 'push');
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.random');
(function() {
var array = Array(1000);
QUnit.test('should return `0` or `1` when no arguments are given', function(assert) {
assert.expect(1);
var actual = lodashStable.uniq(lodashStable.map(array, function() {
return _.random();
})).sort();
assert.deepEqual(actual, [0, 1]);
});
QUnit.test('should support a `min` and `max`', function(assert) {
assert.expect(1);
var min = 5,
max = 10;
assert.ok(lodashStable.some(array, function() {
var result = _.random(min, max);
return result >= min && result <= max;
}));
});
QUnit.test('should support not providing a `max`', function(assert) {
assert.expect(1);
var min = 0,
max = 5;
assert.ok(lodashStable.some(array, function() {
var result = _.random(max);
return result >= min && result <= max;
}));
});
QUnit.test('should swap `min` and `max` when `min` > `max`', function(assert) {
assert.expect(1);
var min = 4,
max = 2,
expected = [2, 3, 4];
var actual = lodashStable.uniq(lodashStable.map(array, function() {
return _.random(min, max);
})).sort();
assert.deepEqual(actual, expected);
});
QUnit.test('should support large integer values', function(assert) {
assert.expect(2);
var min = Math.pow(2, 31),
max = Math.pow(2, 62);
assert.ok(lodashStable.every(array, function() {
var result = _.random(min, max);
return result >= min && result <= max;
}));
assert.ok(lodashStable.some(array, function() {
return _.random(MAX_INTEGER);
}));
});
QUnit.test('should coerce arguments to finite numbers', function(assert) {
assert.expect(1);
var actual = [
_.random(NaN, NaN),
_.random('1', '1'),
_.random(Infinity, Infinity)
];
assert.deepEqual(actual, [0, 1, MAX_INTEGER]);
});
QUnit.test('should support floats', function(assert) {
assert.expect(2);
var min = 1.5,
max = 1.6,
actual = _.random(min, max);
assert.ok(actual % 1);
assert.ok(actual >= min && actual <= max);
});
QUnit.test('should support providing a `floating`', function(assert) {
assert.expect(3);
var actual = _.random(true);
assert.ok(actual % 1 && actual >= 0 && actual <= 1);
actual = _.random(2, true);
assert.ok(actual % 1 && actual >= 0 && actual <= 2);
actual = _.random(2, 4, true);
assert.ok(actual % 1 && actual >= 2 && actual <= 4);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [1, 2, 3],
expected = lodashStable.map(array, stubTrue),
randoms = lodashStable.map(array, _.random);
var actual = lodashStable.map(randoms, function(result, index) {
return result >= 0 && result <= array[index] && (result % 1) == 0;
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('range methods');
lodashStable.each(['range', 'rangeRight'], function(methodName) {
var func = _[methodName],
isRange = methodName == 'range';
function resolve(range) {
return isRange ? range : range.reverse();
}
QUnit.test('`_.' + methodName + '` should infer the sign of `step` when only `end` is given', function(assert) {
assert.expect(2);
assert.deepEqual(func(4), resolve([0, 1, 2, 3]));
assert.deepEqual(func(-4), resolve([0, -1, -2, -3]));
});
QUnit.test('`_.' + methodName + '` should infer the sign of `step` when only `start` and `end` are given', function(assert) {
assert.expect(2);
assert.deepEqual(func(1, 5), resolve([1, 2, 3, 4]));
assert.deepEqual(func(5, 1), resolve([5, 4, 3, 2]));
});
QUnit.test('`_.' + methodName + '` should work with a `start`, `end`, and `step`', function(assert) {
assert.expect(3);
assert.deepEqual(func(0, -4, -1), resolve([0, -1, -2, -3]));
assert.deepEqual(func(5, 1, -1), resolve([5, 4, 3, 2]));
assert.deepEqual(func(0, 20, 5), resolve([0, 5, 10, 15]));
});
QUnit.test('`_.' + methodName + '` should support a `step` of `0`', function(assert) {
assert.expect(1);
assert.deepEqual(func(1, 4, 0), [1, 1, 1]);
});
QUnit.test('`_.' + methodName + '` should work with a `step` larger than `end`', function(assert) {
assert.expect(1);
assert.deepEqual(func(1, 5, 20), [1]);
});
QUnit.test('`_.' + methodName + '` should work with a negative `step`', function(assert) {
assert.expect(2);
assert.deepEqual(func(0, -4, -1), resolve([0, -1, -2, -3]));
assert.deepEqual(func(21, 10, -3), resolve([21, 18, 15, 12]));
});
QUnit.test('`_.' + methodName + '` should support `start` of `-0`', function(assert) {
assert.expect(1);
var actual = func(-0, 1);
assert.strictEqual(1 / actual[0], -Infinity);
});
QUnit.test('`_.' + methodName + '` should treat falsey `start` as `0`', function(assert) {
assert.expect(13);
lodashStable.each(falsey, function(value, index) {
if (index) {
assert.deepEqual(func(value), []);
assert.deepEqual(func(value, 1), [0]);
} else {
assert.deepEqual(func(), []);
}
});
});
QUnit.test('`_.' + methodName + '` should coerce arguments to finite numbers', function(assert) {
assert.expect(1);
var actual = [
func('1'),
func('0', 1),
func(0, 1, '1'),
func(NaN),
func(NaN, NaN)
];
assert.deepEqual(actual, [[0], [0], [0], [], []]);
});
QUnit.test('`_.' + methodName + '` should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(2);
var array = [1, 2, 3],
object = { 'a': 1, 'b': 2, 'c': 3 },
expected = lodashStable.map([[0], [0, 1], [0, 1, 2]], resolve);
lodashStable.each([array, object], function(collection) {
var actual = lodashStable.map(collection, func);
assert.deepEqual(actual, expected);
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.rearg');
(function() {
function fn() {
return slice.call(arguments);
}
QUnit.test('should reorder arguments provided to `func`', function(assert) {
assert.expect(1);
var rearged = _.rearg(fn, [2, 0, 1]);
assert.deepEqual(rearged('b', 'c', 'a'), ['a', 'b', 'c']);
});
QUnit.test('should work with repeated indexes', function(assert) {
assert.expect(1);
var rearged = _.rearg(fn, [1, 1, 1]);
assert.deepEqual(rearged('c', 'a', 'b'), ['a', 'a', 'a']);
});
QUnit.test('should use `undefined` for nonexistent indexes', function(assert) {
assert.expect(1);
var rearged = _.rearg(fn, [1, 4]);
assert.deepEqual(rearged('b', 'a', 'c'), ['a', undefined, 'c']);
});
QUnit.test('should use `undefined` for non-index values', function(assert) {
assert.expect(1);
var values = lodashStable.reject(empties, function(value) {
return (value === 0) || lodashStable.isArray(value);
}).concat(-1, 1.1);
var expected = lodashStable.map(values, lodashStable.constant([undefined, 'b', 'c']));
var actual = lodashStable.map(values, function(value) {
var rearged = _.rearg(fn, [value]);
return rearged('a', 'b', 'c');
});
assert.deepEqual(actual, expected);
});
QUnit.test('should not rearrange arguments when no indexes are given', function(assert) {
assert.expect(2);
var rearged = _.rearg(fn);
assert.deepEqual(rearged('a', 'b', 'c'), ['a', 'b', 'c']);
rearged = _.rearg(fn, [], []);
assert.deepEqual(rearged('a', 'b', 'c'), ['a', 'b', 'c']);
});
QUnit.test('should accept multiple index arguments', function(assert) {
assert.expect(1);
var rearged = _.rearg(fn, 2, 0, 1);
assert.deepEqual(rearged('b', 'c', 'a'), ['a', 'b', 'c']);
});
QUnit.test('should accept multiple arrays of indexes', function(assert) {
assert.expect(1);
var rearged = _.rearg(fn, [2], [0, 1]);
assert.deepEqual(rearged('b', 'c', 'a'), ['a', 'b', 'c']);
});
QUnit.test('should work with fewer indexes than arguments', function(assert) {
assert.expect(1);
var rearged = _.rearg(fn, [1, 0]);
assert.deepEqual(rearged('b', 'a', 'c'), ['a', 'b', 'c']);
});
QUnit.test('should work on functions that have been rearged', function(assert) {
assert.expect(1);
var rearged1 = _.rearg(fn, 2, 1, 0),
rearged2 = _.rearg(rearged1, 1, 0, 2);
assert.deepEqual(rearged2('b', 'c', 'a'), ['a', 'b', 'c']);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.reduce');
(function() {
var array = [1, 2, 3];
QUnit.test('should use the first element of a collection as the default `accumulator`', function(assert) {
assert.expect(1);
assert.strictEqual(_.reduce(array), 1);
});
QUnit.test('should provide correct `iteratee` arguments when iterating an array', function(assert) {
assert.expect(2);
var args;
_.reduce(array, function() {
args || (args = slice.call(arguments));
}, 0);
assert.deepEqual(args, [0, 1, 0, array]);
args = undefined;
_.reduce(array, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [1, 2, 1, array]);
});
QUnit.test('should provide correct `iteratee` arguments when iterating an object', function(assert) {
assert.expect(2);
var args,
object = { 'a': 1, 'b': 2 },
firstKey = _.head(_.keys(object));
var expected = firstKey == 'a'
? [0, 1, 'a', object]
: [0, 2, 'b', object];
_.reduce(object, function() {
args || (args = slice.call(arguments));
}, 0);
assert.deepEqual(args, expected);
args = undefined;
expected = firstKey == 'a'
? [1, 2, 'b', object]
: [2, 1, 'a', object];
_.reduce(object, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.reduceRight');
(function() {
var array = [1, 2, 3];
QUnit.test('should use the last element of a collection as the default `accumulator`', function(assert) {
assert.expect(1);
assert.strictEqual(_.reduceRight(array), 3);
});
QUnit.test('should provide correct `iteratee` arguments when iterating an array', function(assert) {
assert.expect(2);
var args;
_.reduceRight(array, function() {
args || (args = slice.call(arguments));
}, 0);
assert.deepEqual(args, [0, 3, 2, array]);
args = undefined;
_.reduceRight(array, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [3, 2, 1, array]);
});
QUnit.test('should provide correct `iteratee` arguments when iterating an object', function(assert) {
assert.expect(2);
var args,
object = { 'a': 1, 'b': 2 },
isFIFO = lodashStable.keys(object)[0] == 'a';
var expected = isFIFO
? [0, 2, 'b', object]
: [0, 1, 'a', object];
_.reduceRight(object, function() {
args || (args = slice.call(arguments));
}, 0);
assert.deepEqual(args, expected);
args = undefined;
expected = isFIFO
? [2, 1, 'a', object]
: [1, 2, 'b', object];
_.reduceRight(object, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('reduce methods');
lodashStable.each(['reduce', 'reduceRight'], function(methodName) {
var func = _[methodName],
array = [1, 2, 3],
isReduce = methodName == 'reduce';
QUnit.test('`_.' + methodName + '` should reduce a collection to a single value', function(assert) {
assert.expect(1);
var actual = func(['a', 'b', 'c'], function(accumulator, value) {
return accumulator + value;
}, '');
assert.strictEqual(actual, isReduce ? 'abc' : 'cba');
});
QUnit.test('`_.' + methodName + '` should support empty collections without an initial `accumulator` value', function(assert) {
assert.expect(1);
var actual = [],
expected = lodashStable.map(empties, noop);
lodashStable.each(empties, function(value) {
try {
actual.push(func(value, noop));
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should support empty collections with an initial `accumulator` value', function(assert) {
assert.expect(1);
var expected = lodashStable.map(empties, lodashStable.constant('x'));
var actual = lodashStable.map(empties, function(value) {
try {
return func(value, noop, 'x');
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should handle an initial `accumulator` value of `undefined`', function(assert) {
assert.expect(1);
var actual = func([], noop, undefined);
assert.strictEqual(actual, undefined);
});
QUnit.test('`_.' + methodName + '` should return `undefined` for empty collections when no `accumulator` is given (test in IE > 9 and modern browsers)', function(assert) {
assert.expect(2);
var array = [],
object = { '0': 1, 'length': 0 };
if ('__proto__' in array) {
array.__proto__ = object;
assert.strictEqual(func(array, noop), undefined);
}
else {
skipAssert(assert);
}
assert.strictEqual(func(object, noop), undefined);
});
QUnit.test('`_.' + methodName + '` should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.strictEqual(_(array)[methodName](add), 6);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(array).chain()[methodName](add) instanceof _);
}
else {
skipAssert(assert);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.reject');
(function() {
var array = [1, 2, 3];
QUnit.test('should return elements the `predicate` returns falsey for', function(assert) {
assert.expect(1);
assert.deepEqual(_.reject(array, isEven), [1, 3]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('filter methods');
lodashStable.each(['filter', 'reject'], function(methodName) {
var array = [1, 2, 3, 4],
func = _[methodName],
isFilter = methodName == 'filter',
objects = [{ 'a': 0 }, { 'a': 1 }];
QUnit.test('`_.' + methodName + '` should not modify the resulting value from within `predicate`', function(assert) {
assert.expect(1);
var actual = func([0], function(value, index, array) {
array[index] = 1;
return isFilter;
});
assert.deepEqual(actual, [0]);
});
QUnit.test('`_.' + methodName + '` should work with `_.property` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(func(objects, 'a'), [objects[isFilter ? 1 : 0]]);
});
QUnit.test('`_.' + methodName + '` should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(func(objects, objects[1]), [objects[isFilter ? 1 : 0]]);
});
QUnit.test('`_.' + methodName + '` should not modify wrapped values', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _(array);
var actual = wrapped[methodName](function(n) {
return n < 3;
});
assert.deepEqual(actual.value(), isFilter ? [1, 2] : [3, 4]);
actual = wrapped[methodName](function(n) {
return n > 2;
});
assert.deepEqual(actual.value(), isFilter ? [3, 4] : [1, 2]);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('`_.' + methodName + '` should work in a lazy sequence', function(assert) {
assert.expect(2);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE + 1),
predicate = function(value) { return isFilter ? isEven(value) : !isEven(value); };
var object = lodashStable.zipObject(lodashStable.times(LARGE_ARRAY_SIZE, function(index) {
return ['key' + index, index];
}));
var actual = _(array).slice(1).map(square)[methodName](predicate).value();
assert.deepEqual(actual, _[methodName](lodashStable.map(array.slice(1), square), predicate));
actual = _(object).mapValues(square)[methodName](predicate).value();
assert.deepEqual(actual, _[methodName](lodashStable.mapValues(object, square), predicate));
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('`_.' + methodName + '` should provide correct `predicate` arguments in a lazy sequence', function(assert) {
assert.expect(5);
if (!isNpm) {
var args,
array = lodashStable.range(LARGE_ARRAY_SIZE + 1),
expected = [1, 0, lodashStable.map(array.slice(1), square)];
_(array).slice(1)[methodName](function(value, index, array) {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, [1, 0, array.slice(1)]);
args = undefined;
_(array).slice(1).map(square)[methodName](function(value, index, array) {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, expected);
args = undefined;
_(array).slice(1).map(square)[methodName](function(value, index) {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, expected);
args = undefined;
_(array).slice(1).map(square)[methodName](function(value) {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, [1]);
args = undefined;
_(array).slice(1).map(square)[methodName](function() {
args || (args = slice.call(arguments));
}).value();
assert.deepEqual(args, expected);
}
else {
skipAssert(assert, 5);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.remove');
(function() {
QUnit.test('should modify the array and return removed elements', function(assert) {
assert.expect(2);
var array = [1, 2, 3, 4],
actual = _.remove(array, isEven);
assert.deepEqual(array, [1, 3]);
assert.deepEqual(actual, [2, 4]);
});
QUnit.test('should provide correct `predicate` arguments', function(assert) {
assert.expect(1);
var argsList = [],
array = [1, 2, 3],
clone = array.slice();
_.remove(array, function(n, index) {
var args = slice.call(arguments);
args[2] = args[2].slice();
argsList.push(args);
return isEven(index);
});
assert.deepEqual(argsList, [[1, 0, clone], [2, 1, clone], [3, 2, clone]]);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
var objects = [{ 'a': 0, 'b': 1 }, { 'a': 1, 'b': 2 }];
_.remove(objects, { 'a': 1 });
assert.deepEqual(objects, [{ 'a': 0, 'b': 1 }]);
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(1);
var objects = [{ 'a': 0, 'b': 1 }, { 'a': 1, 'b': 2 }];
_.remove(objects, ['a', 1]);
assert.deepEqual(objects, [{ 'a': 0, 'b': 1 }]);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var objects = [{ 'a': 0 }, { 'a': 1 }];
_.remove(objects, 'a');
assert.deepEqual(objects, [{ 'a': 0 }]);
});
QUnit.test('should preserve holes in arrays', function(assert) {
assert.expect(2);
var array = [1, 2, 3, 4];
delete array[1];
delete array[3];
_.remove(array, function(n) {
return n === 1;
});
assert.notOk('0' in array);
assert.notOk('2' in array);
});
QUnit.test('should treat holes as `undefined`', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
delete array[1];
_.remove(array, function(n) {
return n == null;
});
assert.deepEqual(array, [1, 3]);
});
QUnit.test('should not mutate the array until all elements to remove are determined', function(assert) {
assert.expect(1);
var array = [1, 2, 3];
_.remove(array, function(n, index) {
return isEven(index);
});
assert.deepEqual(array, [2]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.repeat');
(function() {
var string = 'abc';
QUnit.test('should repeat a string `n` times', function(assert) {
assert.expect(2);
assert.strictEqual(_.repeat('*', 3), '***');
assert.strictEqual(_.repeat(string, 2), 'abcabc');
});
QUnit.test('should treat falsey `n` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? string : '';
});
var actual = lodashStable.map(falsey, function(n, index) {
return index ? _.repeat(string, n) : _.repeat(string);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return an empty string if `n` is <= `0`', function(assert) {
assert.expect(2);
assert.strictEqual(_.repeat(string, 0), '');
assert.strictEqual(_.repeat(string, -2), '');
});
QUnit.test('should coerce `n` to an integer', function(assert) {
assert.expect(3);
assert.strictEqual(_.repeat(string, '2'), 'abcabc');
assert.strictEqual(_.repeat(string, 2.6), 'abcabc');
assert.strictEqual(_.repeat('*', { 'valueOf': stubThree }), '***');
});
QUnit.test('should coerce `string` to a string', function(assert) {
assert.expect(2);
assert.strictEqual(_.repeat(Object(string), 2), 'abcabc');
assert.strictEqual(_.repeat({ 'toString': lodashStable.constant('*') }, 3), '***');
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var actual = lodashStable.map(['a', 'b', 'c'], _.repeat);
assert.deepEqual(actual, ['a', 'b', 'c']);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.replace');
(function() {
QUnit.test('should replace the matched pattern', function(assert) {
assert.expect(2);
var string = 'abcde';
assert.strictEqual(_.replace(string, 'de', '123'), 'abc123');
assert.strictEqual(_.replace(string, /[bd]/g, '-'), 'a-c-e');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.result');
(function() {
var object = { 'a': 1, 'b': stubB };
QUnit.test('should invoke function values', function(assert) {
assert.expect(1);
assert.strictEqual(_.result(object, 'b'), 'b');
});
QUnit.test('should invoke default function values', function(assert) {
assert.expect(1);
var actual = _.result(object, 'c', object.b);
assert.strictEqual(actual, 'b');
});
QUnit.test('should invoke nested function values', function(assert) {
assert.expect(2);
var value = { 'a': lodashStable.constant({ 'b': stubB }) };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.strictEqual(_.result(value, path), 'b');
});
});
QUnit.test('should invoke deep property methods with the correct `this` binding', function(assert) {
assert.expect(2);
var value = { 'a': { 'b': function() { return this.c; }, 'c': 1 } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.strictEqual(_.result(value, path), 1);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.get and lodash.result');
lodashStable.each(['get', 'result'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should get string keyed property values', function(assert) {
assert.expect(2);
var object = { 'a': 1 };
lodashStable.each(['a', ['a']], function(path) {
assert.strictEqual(func(object, path), 1);
});
});
QUnit.test('`_.' + methodName + '` should preserve the sign of `0`', function(assert) {
assert.expect(1);
var object = { '-0': 'a', '0': 'b' },
props = [-0, Object(-0), 0, Object(0)];
var actual = lodashStable.map(props, function(key) {
return func(object, key);
});
assert.deepEqual(actual, ['a', 'a', 'b', 'b']);
});
QUnit.test('`_.' + methodName + '` should get symbol keyed property values', function(assert) {
assert.expect(1);
if (Symbol) {
var object = {};
object[symbol] = 1;
assert.strictEqual(func(object, symbol), 1);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should get deep property values', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.strictEqual(func(object, path), 2);
});
});
QUnit.test('`_.' + methodName + '` should get a key over a path', function(assert) {
assert.expect(2);
var object = { 'a.b': 1, 'a': { 'b': 2 } };
lodashStable.each(['a.b', ['a.b']], function(path) {
assert.strictEqual(func(object, path), 1);
});
});
QUnit.test('`_.' + methodName + '` should not coerce array paths to strings', function(assert) {
assert.expect(1);
var object = { 'a,b,c': 3, 'a': { 'b': { 'c': 4 } } };
assert.strictEqual(func(object, ['a', 'b', 'c']), 4);
});
QUnit.test('`_.' + methodName + '` should not ignore empty brackets', function(assert) {
assert.expect(1);
var object = { 'a': { '': 1 } };
assert.strictEqual(func(object, 'a[]'), 1);
});
QUnit.test('`_.' + methodName + '` should handle empty paths', function(assert) {
assert.expect(4);
lodashStable.each([['', ''], [[], ['']]], function(pair) {
assert.strictEqual(func({}, pair[0]), undefined);
assert.strictEqual(func({ '': 3 }, pair[1]), 3);
});
});
QUnit.test('`_.' + methodName + '` should handle complex paths', function(assert) {
assert.expect(2);
var object = { 'a': { '-1.23': { '["b"]': { 'c': { "['d']": { '\ne\n': { 'f': { 'g': 8 } } } } } } } };
var paths = [
'a[-1.23]["[\\"b\\"]"].c[\'[\\\'d\\\']\'][\ne\n][f].g',
['a', '-1.23', '["b"]', 'c', "['d']", '\ne\n', 'f', 'g']
];
lodashStable.each(paths, function(path) {
assert.strictEqual(func(object, path), 8);
});
});
QUnit.test('`_.' + methodName + '` should return `undefined` when `object` is nullish', function(assert) {
assert.expect(4);
lodashStable.each(['constructor', ['constructor']], function(path) {
assert.strictEqual(func(null, path), undefined);
assert.strictEqual(func(undefined, path), undefined);
});
});
QUnit.test('`_.' + methodName + '` should return `undefined` for deep paths when `object` is nullish', function(assert) {
assert.expect(2);
var values = [null, undefined],
expected = lodashStable.map(values, noop),
paths = ['constructor.prototype.valueOf', ['constructor', 'prototype', 'valueOf']];
lodashStable.each(paths, function(path) {
var actual = lodashStable.map(values, function(value) {
return func(value, path);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should return `undefined` if parts of `path` are missing', function(assert) {
assert.expect(2);
var object = { 'a': [, null] };
lodashStable.each(['a[1].b.c', ['a', '1', 'b', 'c']], function(path) {
assert.strictEqual(func(object, path), undefined);
});
});
QUnit.test('`_.' + methodName + '` should be able to return `null` values', function(assert) {
assert.expect(2);
var object = { 'a': { 'b': null } };
lodashStable.each(['a.b', ['a', 'b']], function(path) {
assert.strictEqual(func(object, path), null);
});
});
QUnit.test('`_.' + methodName + '` should follow `path` over non-plain objects', function(assert) {
assert.expect(2);
var paths = ['a.b', ['a', 'b']];
lodashStable.each(paths, function(path) {
numberProto.a = { 'b': 2 };
assert.strictEqual(func(0, path), 2);
delete numberProto.a;
});
});
QUnit.test('`_.' + methodName + '` should return the default value for `undefined` values', function(assert) {
assert.expect(2);
var object = { 'a': {} },
values = empties.concat(true, new Date, 1, /x/, 'a'),
expected = lodashStable.map(values, function(value) { return [value, value]; });
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var actual = lodashStable.map(values, function(value) {
return [func(object, path, value), func(null, path, value)];
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should return the default value when `path` is empty', function(assert) {
assert.expect(1);
assert.strictEqual(func({}, [], 'a'), 'a');
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.rest');
(function() {
function fn(a, b, c) {
return slice.call(arguments);
}
QUnit.test('should apply a rest parameter to `func`', function(assert) {
assert.expect(1);
var rest = _.rest(fn);
assert.deepEqual(rest(1, 2, 3, 4), [1, 2, [3, 4]]);
});
QUnit.test('should work with `start`', function(assert) {
assert.expect(1);
var rest = _.rest(fn, 1);
assert.deepEqual(rest(1, 2, 3, 4), [1, [2, 3, 4]]);
});
QUnit.test('should treat `start` as `0` for `NaN` or negative values', function(assert) {
assert.expect(1);
var values = [-1, NaN, 'a'],
expected = lodashStable.map(values, lodashStable.constant([[1, 2, 3, 4]]));
var actual = lodashStable.map(values, function(value) {
var rest = _.rest(fn, value);
return rest(1, 2, 3, 4);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should coerce `start` to an integer', function(assert) {
assert.expect(1);
var rest = _.rest(fn, 1.6);
assert.deepEqual(rest(1, 2, 3), [1, [2, 3]]);
});
QUnit.test('should use an empty array when `start` is not reached', function(assert) {
assert.expect(1);
var rest = _.rest(fn);
assert.deepEqual(rest(1), [1, undefined, []]);
});
QUnit.test('should work on functions with more than three parameters', function(assert) {
assert.expect(1);
var rest = _.rest(function(a, b, c, d) {
return slice.call(arguments);
});
assert.deepEqual(rest(1, 2, 3, 4, 5), [1, 2, 3, [4, 5]]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.reverse');
(function() {
var largeArray = lodashStable.range(LARGE_ARRAY_SIZE).concat(null),
smallArray = [0, 1, 2, null];
QUnit.test('should reverse `array`', function(assert) {
assert.expect(2);
var array = [1, 2, 3],
actual = _.reverse(array);
assert.strictEqual(actual, array);
assert.deepEqual(array, [3, 2, 1]);
});
QUnit.test('should return the wrapped reversed `array`', function(assert) {
assert.expect(6);
if (!isNpm) {
lodashStable.times(2, function(index) {
var array = (index ? largeArray : smallArray).slice(),
clone = array.slice(),
wrapped = _(array).reverse(),
actual = wrapped.value();
assert.ok(wrapped instanceof _);
assert.strictEqual(actual, array);
assert.deepEqual(actual, clone.slice().reverse());
});
}
else {
skipAssert(assert, 6);
}
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(4);
if (!isNpm) {
lodashStable.times(2, function(index) {
var array = (index ? largeArray : smallArray).slice(),
expected = array.slice(),
actual = _(array).slice(1).reverse().value();
assert.deepEqual(actual, expected.slice(1).reverse());
assert.deepEqual(array, expected);
});
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should be lazy when in a lazy sequence', function(assert) {
assert.expect(3);
if (!isNpm) {
var spy = {
'toString': function() {
throw new Error('spy was revealed');
}
};
var array = largeArray.concat(spy),
expected = array.slice();
try {
var wrapped = _(array).slice(1).map(String).reverse(),
actual = wrapped.last();
} catch (e) {}
assert.ok(wrapped instanceof _);
assert.strictEqual(actual, '1');
assert.deepEqual(array, expected);
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should work in a hybrid sequence', function(assert) {
assert.expect(8);
if (!isNpm) {
lodashStable.times(2, function(index) {
var clone = (index ? largeArray : smallArray).slice();
lodashStable.each(['map', 'filter'], function(methodName) {
var array = clone.slice(),
expected = clone.slice(1, -1).reverse(),
actual = _(array)[methodName](identity).thru(_.compact).reverse().value();
assert.deepEqual(actual, expected);
array = clone.slice();
actual = _(array).thru(_.compact)[methodName](identity).pull(1).push(3).reverse().value();
assert.deepEqual(actual, [3].concat(expected.slice(0, -1)));
});
});
}
else {
skipAssert(assert, 8);
}
});
QUnit.test('should track the `__chain__` value of a wrapper', function(assert) {
assert.expect(6);
if (!isNpm) {
lodashStable.times(2, function(index) {
var array = (index ? largeArray : smallArray).slice(),
expected = array.slice().reverse(),
wrapped = _(array).chain().reverse().head();
assert.ok(wrapped instanceof _);
assert.strictEqual(wrapped.value(), _.head(expected));
assert.deepEqual(array, expected);
});
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('round methods');
lodashStable.each(['ceil', 'floor', 'round'], function(methodName) {
var func = _[methodName],
isCeil = methodName == 'ceil',
isFloor = methodName == 'floor';
QUnit.test('`_.' + methodName + '` should return a rounded number without a precision', function(assert) {
assert.expect(1);
var actual = func(4.006);
assert.strictEqual(actual, isCeil ? 5 : 4);
});
QUnit.test('`_.' + methodName + '` should work with a precision of `0`', function(assert) {
assert.expect(1);
var actual = func(4.006, 0);
assert.strictEqual(actual, isCeil ? 5 : 4);
});
QUnit.test('`_.' + methodName + '` should work with a positive precision', function(assert) {
assert.expect(2);
var actual = func(4.016, 2);
assert.strictEqual(actual, isFloor ? 4.01 : 4.02);
actual = func(4.1, 2);
assert.strictEqual(actual, 4.1);
});
QUnit.test('`_.' + methodName + '` should work with a negative precision', function(assert) {
assert.expect(1);
var actual = func(4160, -2);
assert.strictEqual(actual, isFloor ? 4100 : 4200);
});
QUnit.test('`_.' + methodName + '` should coerce `precision` to an integer', function(assert) {
assert.expect(3);
var actual = func(4.006, NaN);
assert.strictEqual(actual, isCeil ? 5 : 4);
var expected = isFloor ? 4.01 : 4.02;
actual = func(4.016, 2.6);
assert.strictEqual(actual, expected);
actual = func(4.016, '+2');
assert.strictEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with exponential notation and `precision`', function(assert) {
assert.expect(3);
var actual = func(5e1, 2);
assert.deepEqual(actual, 50);
actual = func('5e', 1);
assert.deepEqual(actual, NaN);
actual = func('5e1e1', 1);
assert.deepEqual(actual, NaN);
});
QUnit.test('`_.' + methodName + '` should preserve the sign of `0`', function(assert) {
assert.expect(1);
var values = [[0], [-0], ['0'], ['-0'], [0, 1], [-0, 1], ['0', 1], ['-0', 1]],
expected = [Infinity, -Infinity, Infinity, -Infinity, Infinity, -Infinity, Infinity, -Infinity];
var actual = lodashStable.map(values, function(args) {
return 1 / func.apply(undefined, args);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should not return `NaN` for large `precision` values', function(assert) {
assert.expect(1);
var results = [
_.round(10.0000001, 1000),
_.round(MAX_SAFE_INTEGER, 293)
];
var expected = lodashStable.map(results, stubFalse),
actual = lodashStable.map(results, lodashStable.isNaN);
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should return `Infinity` given `Infinity` regardless of `precision`', function(assert) {
assert.expect(6);
var actual = func(Infinity);
assert.strictEqual(actual, Infinity);
actual = func(Infinity, 0);
assert.strictEqual(actual, Infinity);
actual = func(Infinity, 2);
assert.strictEqual(actual, Infinity);
actual = func(Infinity, -2);
assert.strictEqual(actual, Infinity);
actual = func(Infinity, 2);
assert.strictEqual(actual, isFloor ? Infinity : Infinity);
actual = func(Infinity, 2);
assert.strictEqual(actual, isCeil ? Infinity : Infinity);
});
QUnit.test('`_.' + methodName + '` should return `-Infinity` given `-Infinity` regardless of `precision`', function(assert) {
assert.expect(6);
var actual = func(-Infinity);
assert.strictEqual(actual, -Infinity);
actual = func(-Infinity, 0);
assert.strictEqual(actual, -Infinity);
actual = func(-Infinity, 2);
assert.strictEqual(actual, -Infinity);
actual = func(-Infinity, -2);
assert.strictEqual(actual, -Infinity);
actual = func(-Infinity, 2);
assert.strictEqual(actual, isFloor ? -Infinity : -Infinity);
actual = func(-Infinity, 2);
assert.strictEqual(actual, isCeil ? -Infinity : -Infinity);
});
QUnit.test('`_.' + methodName + '` should return `NaN` given `NaN` regardless of `precision`', function(assert) {
assert.expect(6);
var actual = func(NaN);
assert.deepEqual(actual, NaN);
actual = func(NaN, 0);
assert.deepEqual(actual, NaN);
actual = func(NaN, 2);
assert.deepEqual(actual, NaN);
actual = func(NaN, -2);
assert.deepEqual(actual, NaN);
actual = func(NaN, 2);
assert.deepEqual(actual, isFloor ? NaN : NaN);
actual = func(NaN, 2);
assert.deepEqual(actual, isCeil ? NaN : NaN);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.runInContext');
(function() {
QUnit.test('should not require a fully populated `context` object', function(assert) {
assert.expect(1);
if (!isModularize) {
var lodash = _.runInContext({
'setTimeout': function(func) { func(); }
});
var pass = false;
lodash.delay(function() { pass = true; }, 32);
assert.ok(pass);
}
else {
skipAssert(assert);
}
});
QUnit.test('should use a zeroed `_.uniqueId` counter', function(assert) {
assert.expect(3);
if (!isModularize) {
lodashStable.times(2, _.uniqueId);
var oldId = Number(_.uniqueId()),
lodash = _.runInContext();
assert.ok(_.uniqueId() > oldId);
var id = lodash.uniqueId();
assert.strictEqual(id, '1');
assert.ok(id < oldId);
}
else {
skipAssert(assert, 3);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.sample');
(function() {
var array = [1, 2, 3];
QUnit.test('should return a random element', function(assert) {
assert.expect(1);
var actual = _.sample(array);
assert.ok(lodashStable.includes(array, actual));
});
QUnit.test('should return `undefined` when sampling empty collections', function(assert) {
assert.expect(1);
var expected = lodashStable.map(empties, noop);
var actual = lodashStable.transform(empties, function(result, value) {
try {
result.push(_.sample(value));
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should sample an object', function(assert) {
assert.expect(1);
var object = { 'a': 1, 'b': 2, 'c': 3 },
actual = _.sample(object);
assert.ok(lodashStable.includes(array, actual));
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.sampleSize');
(function() {
var array = [1, 2, 3];
QUnit.test('should return an array of random elements', function(assert) {
assert.expect(2);
var actual = _.sampleSize(array, 2);
assert.strictEqual(actual.length, 2);
assert.deepEqual(lodashStable.difference(actual, array), []);
});
QUnit.test('should contain elements of the collection', function(assert) {
assert.expect(1);
var actual = _.sampleSize(array, array.length).sort();
assert.deepEqual(actual, array);
});
QUnit.test('should treat falsey `size` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? ['a'] : [];
});
var actual = lodashStable.map(falsey, function(size, index) {
return index ? _.sampleSize(['a'], size) : _.sampleSize(['a']);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return an empty array when `n` < `1` or `NaN`', function(assert) {
assert.expect(3);
lodashStable.each([0, -1, -Infinity], function(n) {
assert.deepEqual(_.sampleSize(array, n), []);
});
});
QUnit.test('should return all elements when `n` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(n) {
var actual = _.sampleSize(array, n).sort();
assert.deepEqual(actual, array);
});
});
QUnit.test('should coerce `n` to an integer', function(assert) {
assert.expect(1);
var actual = _.sampleSize(array, 1.6);
assert.strictEqual(actual.length, 1);
});
QUnit.test('should return an empty array for empty collections', function(assert) {
assert.expect(1);
var expected = lodashStable.map(empties, stubArray);
var actual = lodashStable.transform(empties, function(result, value) {
try {
result.push(_.sampleSize(value, 1));
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should sample an object', function(assert) {
assert.expect(2);
var object = { 'a': 1, 'b': 2, 'c': 3 },
actual = _.sampleSize(object, 2);
assert.strictEqual(actual.length, 2);
assert.deepEqual(lodashStable.difference(actual, lodashStable.values(object)), []);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var actual = lodashStable.map([['a']], _.sampleSize);
assert.deepEqual(actual, [['a']]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.setWith');
(function() {
QUnit.test('should work with a `customizer` callback', function(assert) {
assert.expect(1);
var actual = _.setWith({ '0': {} }, '[0][1][2]', 3, function(value) {
return lodashStable.isObject(value) ? undefined : {};
});
assert.deepEqual(actual, { '0': { '1': { '2': 3 } } });
});
QUnit.test('should work with a `customizer` that returns `undefined`', function(assert) {
assert.expect(1);
var actual = _.setWith({}, 'a[0].b.c', 4, noop);
assert.deepEqual(actual, { 'a': [{ 'b': { 'c': 4 } }] });
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('set methods');
lodashStable.each(['update', 'updateWith', 'set', 'setWith'], function(methodName) {
var func = _[methodName],
isUpdate = /^update/.test(methodName);
var oldValue = 1,
value = 2,
updater = isUpdate ? lodashStable.constant(value) : value;
QUnit.test('`_.' + methodName + '` should set property values', function(assert) {
assert.expect(4);
lodashStable.each(['a', ['a']], function(path) {
var object = { 'a': oldValue },
actual = func(object, path, updater);
assert.strictEqual(actual, object);
assert.strictEqual(object.a, value);
});
});
QUnit.test('`_.' + methodName + '` should preserve the sign of `0`', function(assert) {
assert.expect(1);
var props = [-0, Object(-0), 0, Object(0)],
expected = lodashStable.map(props, lodashStable.constant(value));
var actual = lodashStable.map(props, function(key) {
var object = { '-0': 'a', '0': 'b' };
func(object, key, updater);
return object[lodashStable.toString(key)];
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should unset symbol keyed property values', function(assert) {
assert.expect(2);
if (Symbol) {
var object = {};
object[symbol] = 1;
assert.strictEqual(_.unset(object, symbol), true);
assert.notOk(symbol in object);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('`_.' + methodName + '` should set deep property values', function(assert) {
assert.expect(4);
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var object = { 'a': { 'b': oldValue } },
actual = func(object, path, updater);
assert.strictEqual(actual, object);
assert.strictEqual(object.a.b, value);
});
});
QUnit.test('`_.' + methodName + '` should set a key over a path', function(assert) {
assert.expect(4);
lodashStable.each(['a.b', ['a.b']], function(path) {
var object = { 'a.b': oldValue },
actual = func(object, path, updater);
assert.strictEqual(actual, object);
assert.deepEqual(object, { 'a.b': value });
});
});
QUnit.test('`_.' + methodName + '` should not coerce array paths to strings', function(assert) {
assert.expect(1);
var object = { 'a,b,c': 1, 'a': { 'b': { 'c': 1 } } };
func(object, ['a', 'b', 'c'], updater);
assert.strictEqual(object.a.b.c, value);
});
QUnit.test('`_.' + methodName + '` should not ignore empty brackets', function(assert) {
assert.expect(1);
var object = {};
func(object, 'a[]', updater);
assert.deepEqual(object, { 'a': { '': value } });
});
QUnit.test('`_.' + methodName + '` should handle empty paths', function(assert) {
assert.expect(4);
lodashStable.each([['', ''], [[], ['']]], function(pair, index) {
var object = {};
func(object, pair[0], updater);
assert.deepEqual(object, index ? {} : { '': value });
func(object, pair[1], updater);
assert.deepEqual(object, { '': value });
});
});
QUnit.test('`_.' + methodName + '` should handle complex paths', function(assert) {
assert.expect(2);
var object = { 'a': { '1.23': { '["b"]': { 'c': { "['d']": { '\ne\n': { 'f': { 'g': oldValue } } } } } } } };
var paths = [
'a[-1.23]["[\\"b\\"]"].c[\'[\\\'d\\\']\'][\ne\n][f].g',
['a', '-1.23', '["b"]', 'c', "['d']", '\ne\n', 'f', 'g']
];
lodashStable.each(paths, function(path) {
func(object, path, updater);
assert.strictEqual(object.a[-1.23]['["b"]'].c["['d']"]['\ne\n'].f.g, value);
object.a[-1.23]['["b"]'].c["['d']"]['\ne\n'].f.g = oldValue;
});
});
QUnit.test('`_.' + methodName + '` should create parts of `path` that are missing', function(assert) {
assert.expect(6);
var object = {};
lodashStable.each(['a[1].b.c', ['a', '1', 'b', 'c']], function(path) {
var actual = func(object, path, updater);
assert.strictEqual(actual, object);
assert.deepEqual(actual, { 'a': [undefined, { 'b': { 'c': value } }] });
assert.notOk('0' in object.a);
delete object.a;
});
});
QUnit.test('`_.' + methodName + '` should not error when `object` is nullish', function(assert) {
assert.expect(1);
var values = [null, undefined],
expected = [[null, null], [undefined, undefined]];
var actual = lodashStable.map(values, function(value) {
try {
return [func(value, 'a.b', updater), func(value, ['a', 'b'], updater)];
} catch (e) {
return e.message;
}
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should overwrite primitives in the path', function(assert) {
assert.expect(2);
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var object = { 'a': '' };
func(object, path, updater);
assert.deepEqual(object, { 'a': { 'b': 2 } });
});;
});
QUnit.test('`_.' + methodName + '` should not create an array for missing non-index property names that start with numbers', function(assert) {
assert.expect(1);
var object = {};
func(object, ['1a', '2b', '3c'], updater);
assert.deepEqual(object, { '1a': { '2b': { '3c': value } } });
});
QUnit.test('`_.' + methodName + '` should not assign values that are the same as their destinations', function(assert) {
assert.expect(4);
lodashStable.each(['a', ['a'], { 'a': 1 }, NaN], function(value) {
var object = {},
pass = true,
updater = isUpdate ? lodashStable.constant(value) : value;
defineProperty(object, 'a', {
'configurable': true,
'enumerable': true,
'get': lodashStable.constant(value),
'set': function() { pass = false; }
});
func(object, 'a', updater);
assert.ok(pass);
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.shuffle');
(function() {
var array = [1, 2, 3],
object = { 'a': 1, 'b': 2, 'c': 3 };
QUnit.test('should return a new array', function(assert) {
assert.expect(1);
assert.notStrictEqual(_.shuffle(array), array);
});
QUnit.test('should contain the same elements after a collection is shuffled', function(assert) {
assert.expect(2);
assert.deepEqual(_.shuffle(array).sort(), array);
assert.deepEqual(_.shuffle(object).sort(), array);
});
QUnit.test('should shuffle small collections', function(assert) {
assert.expect(1);
var actual = lodashStable.times(1000, function(assert) {
return _.shuffle([1, 2]);
});
assert.deepEqual(lodashStable.sortBy(lodashStable.uniqBy(actual, String), '0'), [[1, 2], [2, 1]]);
});
QUnit.test('should treat number values for `collection` as empty', function(assert) {
assert.expect(1);
assert.deepEqual(_.shuffle(1), []);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.size');
(function() {
var array = [1, 2, 3];
QUnit.test('should return the number of own enumerable string keyed properties of an object', function(assert) {
assert.expect(1);
assert.strictEqual(_.size({ 'one': 1, 'two': 2, 'three': 3 }), 3);
});
QUnit.test('should return the length of an array', function(assert) {
assert.expect(1);
assert.strictEqual(_.size(array), 3);
});
QUnit.test('should accept a falsey `object`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubZero);
var actual = lodashStable.map(falsey, function(object, index) {
try {
return index ? _.size(object) : _.size();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `arguments` objects', function(assert) {
assert.expect(1);
assert.strictEqual(_.size(args), 3);
});
QUnit.test('should work with jQuery/MooTools DOM query collections', function(assert) {
assert.expect(1);
function Foo(elements) {
push.apply(this, elements);
}
Foo.prototype = { 'length': 0, 'splice': arrayProto.splice };
assert.strictEqual(_.size(new Foo(array)), 3);
});
QUnit.test('should work with maps', function(assert) {
assert.expect(2);
if (Map) {
lodashStable.each([new Map, realm.map], function(map) {
map.set('a', 1);
map.set('b', 2);
assert.strictEqual(_.size(map), 2);
map.clear();
});
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should work with sets', function(assert) {
assert.expect(2);
if (Set) {
lodashStable.each([new Set, realm.set], function(set) {
set.add(1);
set.add(2);
assert.strictEqual(_.size(set), 2);
set.clear();
});
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should not treat objects with negative lengths as array-like', function(assert) {
assert.expect(1);
assert.strictEqual(_.size({ 'length': -1 }), 1);
});
QUnit.test('should not treat objects with lengths larger than `MAX_SAFE_INTEGER` as array-like', function(assert) {
assert.expect(1);
assert.strictEqual(_.size({ 'length': MAX_SAFE_INTEGER + 1 }), 1);
});
QUnit.test('should not treat objects with non-number lengths as array-like', function(assert) {
assert.expect(1);
assert.strictEqual(_.size({ 'length': '0' }), 1);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.slice');
(function() {
var array = [1, 2, 3];
QUnit.test('should use a default `start` of `0` and a default `end` of `length`', function(assert) {
assert.expect(2);
var actual = _.slice(array);
assert.deepEqual(actual, array);
assert.notStrictEqual(actual, array);
});
QUnit.test('should work with a positive `start`', function(assert) {
assert.expect(2);
assert.deepEqual(_.slice(array, 1), [2, 3]);
assert.deepEqual(_.slice(array, 1, 3), [2, 3]);
});
QUnit.test('should work with a `start` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(start) {
assert.deepEqual(_.slice(array, start), []);
});
});
QUnit.test('should treat falsey `start` values as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, lodashStable.constant(array));
var actual = lodashStable.map(falsey, function(start) {
return _.slice(array, start);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with a negative `start`', function(assert) {
assert.expect(1);
assert.deepEqual(_.slice(array, -1), [3]);
});
QUnit.test('should work with a negative `start` <= negative `length`', function(assert) {
assert.expect(3);
lodashStable.each([-3, -4, -Infinity], function(start) {
assert.deepEqual(_.slice(array, start), array);
});
});
QUnit.test('should work with `start` >= `end`', function(assert) {
assert.expect(2);
lodashStable.each([2, 3], function(start) {
assert.deepEqual(_.slice(array, start, 2), []);
});
});
QUnit.test('should work with a positive `end`', function(assert) {
assert.expect(1);
assert.deepEqual(_.slice(array, 0, 1), [1]);
});
QUnit.test('should work with a `end` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(end) {
assert.deepEqual(_.slice(array, 0, end), array);
});
});
QUnit.test('should treat falsey `end` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? array : [];
});
var actual = lodashStable.map(falsey, function(end, index) {
return index ? _.slice(array, 0, end) : _.slice(array, 0);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with a negative `end`', function(assert) {
assert.expect(1);
assert.deepEqual(_.slice(array, 0, -1), [1, 2]);
});
QUnit.test('should work with a negative `end` <= negative `length`', function(assert) {
assert.expect(3);
lodashStable.each([-3, -4, -Infinity], function(end) {
assert.deepEqual(_.slice(array, 0, end), []);
});
});
QUnit.test('should coerce `start` and `end` to integers', function(assert) {
assert.expect(1);
var positions = [[0.1, 1.6], ['0', 1], [0, '1'], ['1'], [NaN, 1], [1, NaN]];
var actual = lodashStable.map(positions, function(pos) {
return _.slice.apply(_, [array].concat(pos));
});
assert.deepEqual(actual, [[1], [1], [1], [2, 3], [1], []]);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(2);
var array = [[1], [2, 3]],
actual = lodashStable.map(array, _.slice);
assert.deepEqual(actual, array);
assert.notStrictEqual(actual, array);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(38);
if (!isNpm) {
var array = lodashStable.range(1, LARGE_ARRAY_SIZE + 1),
length = array.length,
wrapped = _(array);
lodashStable.each(['map', 'filter'], function(methodName) {
assert.deepEqual(wrapped[methodName]().slice(0, -1).value(), array.slice(0, -1));
assert.deepEqual(wrapped[methodName]().slice(1).value(), array.slice(1));
assert.deepEqual(wrapped[methodName]().slice(1, 3).value(), array.slice(1, 3));
assert.deepEqual(wrapped[methodName]().slice(-1).value(), array.slice(-1));
assert.deepEqual(wrapped[methodName]().slice(length).value(), array.slice(length));
assert.deepEqual(wrapped[methodName]().slice(3, 2).value(), array.slice(3, 2));
assert.deepEqual(wrapped[methodName]().slice(0, -length).value(), array.slice(0, -length));
assert.deepEqual(wrapped[methodName]().slice(0, null).value(), array.slice(0, null));
assert.deepEqual(wrapped[methodName]().slice(0, length).value(), array.slice(0, length));
assert.deepEqual(wrapped[methodName]().slice(-length).value(), array.slice(-length));
assert.deepEqual(wrapped[methodName]().slice(null).value(), array.slice(null));
assert.deepEqual(wrapped[methodName]().slice(0, 1).value(), array.slice(0, 1));
assert.deepEqual(wrapped[methodName]().slice(NaN, '1').value(), array.slice(NaN, '1'));
assert.deepEqual(wrapped[methodName]().slice(0.1, 1.1).value(), array.slice(0.1, 1.1));
assert.deepEqual(wrapped[methodName]().slice('0', 1).value(), array.slice('0', 1));
assert.deepEqual(wrapped[methodName]().slice(0, '1').value(), array.slice(0, '1'));
assert.deepEqual(wrapped[methodName]().slice('1').value(), array.slice('1'));
assert.deepEqual(wrapped[methodName]().slice(NaN, 1).value(), array.slice(NaN, 1));
assert.deepEqual(wrapped[methodName]().slice(1, NaN).value(), array.slice(1, NaN));
});
}
else {
skipAssert(assert, 38);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.some');
(function() {
QUnit.test('should return `true` if `predicate` returns truthy for any element', function(assert) {
assert.expect(2);
assert.strictEqual(_.some([false, 1, ''], identity), true);
assert.strictEqual(_.some([null, 'a', 0], identity), true);
});
QUnit.test('should return `false` for empty collections', function(assert) {
assert.expect(1);
var expected = lodashStable.map(empties, stubFalse);
var actual = lodashStable.map(empties, function(value) {
try {
return _.some(value, identity);
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return `true` as soon as `predicate` returns truthy', function(assert) {
assert.expect(2);
var count = 0;
assert.strictEqual(_.some([null, true, null], function(value) {
count++;
return value;
}), true);
assert.strictEqual(count, 2);
});
QUnit.test('should return `false` if `predicate` returns falsey for all elements', function(assert) {
assert.expect(2);
assert.strictEqual(_.some([false, false, false], identity), false);
assert.strictEqual(_.some([null, 0, ''], identity), false);
});
QUnit.test('should use `_.identity` when `predicate` is nullish', function(assert) {
assert.expect(2);
var values = [, null, undefined],
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value, index) {
var array = [0, 0];
return index ? _.some(array, value) : _.some(array);
});
assert.deepEqual(actual, expected);
expected = lodashStable.map(values, stubTrue);
actual = lodashStable.map(values, function(value, index) {
var array = [0, 1];
return index ? _.some(array, value) : _.some(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(2);
var objects = [{ 'a': 0, 'b': 0 }, { 'a': 0, 'b': 1 }];
assert.strictEqual(_.some(objects, 'a'), false);
assert.strictEqual(_.some(objects, 'b'), true);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(2);
var objects = [{ 'a': 0, 'b': 0 }, { 'a': 1, 'b': 1}];
assert.strictEqual(_.some(objects, { 'a': 0 }), true);
assert.strictEqual(_.some(objects, { 'b': 2 }), false);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var actual = lodashStable.map([[1]], _.some);
assert.deepEqual(actual, [true]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.sortBy');
(function() {
var objects = [
{ 'a': 'x', 'b': 3 },
{ 'a': 'y', 'b': 4 },
{ 'a': 'x', 'b': 1 },
{ 'a': 'y', 'b': 2 }
];
QUnit.test('should sort in ascending order by `iteratee`', function(assert) {
assert.expect(1);
var actual = lodashStable.map(_.sortBy(objects, function(object) {
return object.b;
}), 'b');
assert.deepEqual(actual, [1, 2, 3, 4]);
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var array = [3, 2, 1],
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant([1, 2, 3]));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.sortBy(array, value) : _.sortBy(array);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var actual = lodashStable.map(_.sortBy(objects.concat(undefined), 'b'), 'b');
assert.deepEqual(actual, [1, 2, 3, 4, undefined]);
});
QUnit.test('should work with an object for `collection`', function(assert) {
assert.expect(1);
var actual = _.sortBy({ 'a': 1, 'b': 2, 'c': 3 }, Math.sin);
assert.deepEqual(actual, [3, 1, 2]);
});
QUnit.test('should move `NaN`, nullish, and symbol values to the end', function(assert) {
assert.expect(2);
var symbol1 = Symbol ? Symbol('a') : null,
symbol2 = Symbol ? Symbol('b') : null,
array = [NaN, undefined, null, 4, symbol1, null, 1, symbol2, undefined, 3, NaN, 2],
expected = [1, 2, 3, 4, symbol1, symbol2, null, null, undefined, undefined, NaN, NaN];
assert.deepEqual(_.sortBy(array), expected);
array = [NaN, undefined, symbol1, null, 'd', null, 'a', symbol2, undefined, 'c', NaN, 'b'];
expected = ['a', 'b', 'c', 'd', symbol1, symbol2, null, null, undefined, undefined, NaN, NaN];
assert.deepEqual(_.sortBy(array), expected);
});
QUnit.test('should treat number values for `collection` as empty', function(assert) {
assert.expect(1);
assert.deepEqual(_.sortBy(1), []);
});
QUnit.test('should coerce arrays returned from `iteratee`', function(assert) {
assert.expect(1);
var actual = _.sortBy(objects, function(object) {
var result = [object.a, object.b];
result.toString = function() { return String(this[0]); };
return result;
});
assert.deepEqual(actual, [objects[0], objects[2], objects[1], objects[3]]);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var actual = lodashStable.map([[2, 1, 3], [3, 2, 1]], _.sortBy);
assert.deepEqual(actual, [[1, 2, 3], [1, 2, 3]]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('sortBy methods');
lodashStable.each(['orderBy', 'sortBy'], function(methodName) {
var func = _[methodName];
function Pair(a, b, c) {
this.a = a;
this.b = b;
this.c = c;
}
var objects = [
{ 'a': 'x', 'b': 3 },
{ 'a': 'y', 'b': 4 },
{ 'a': 'x', 'b': 1 },
{ 'a': 'y', 'b': 2 }
];
var stableArray = [
new Pair(1, 1, 1), new Pair(1, 2, 1),
new Pair(1, 1, 1), new Pair(1, 2, 1),
new Pair(1, 3, 1), new Pair(1, 4, 1),
new Pair(1, 5, 1), new Pair(1, 6, 1),
new Pair(2, 1, 2), new Pair(2, 2, 2),
new Pair(2, 3, 2), new Pair(2, 4, 2),
new Pair(2, 5, 2), new Pair(2, 6, 2),
new Pair(undefined, 1, 1), new Pair(undefined, 2, 1),
new Pair(undefined, 3, 1), new Pair(undefined, 4, 1),
new Pair(undefined, 5, 1), new Pair(undefined, 6, 1)
];
var stableObject = lodashStable.zipObject('abcdefghijklmnopqrst'.split(''), stableArray);
QUnit.test('`_.' + methodName + '` should sort multiple properties in ascending order', function(assert) {
assert.expect(1);
var actual = func(objects, ['a', 'b']);
assert.deepEqual(actual, [objects[2], objects[0], objects[3], objects[1]]);
});
QUnit.test('`_.' + methodName + '` should support iteratees', function(assert) {
assert.expect(1);
var actual = func(objects, ['a', function(object) { return object.b; }]);
assert.deepEqual(actual, [objects[2], objects[0], objects[3], objects[1]]);
});
QUnit.test('`_.' + methodName + '` should perform a stable sort (test in IE > 8 and V8)', function(assert) {
assert.expect(2);
lodashStable.each([stableArray, stableObject], function(value, index) {
var actual = func(value, ['a', 'c']);
assert.deepEqual(actual, stableArray, index ? 'object' : 'array');
});
});
QUnit.test('`_.' + methodName + '` should not error on nullish elements', function(assert) {
assert.expect(1);
try {
var actual = func(objects.concat(null, undefined), ['a', 'b']);
} catch (e) {}
assert.deepEqual(actual, [objects[2], objects[0], objects[3], objects[1], null, undefined]);
});
QUnit.test('`_.' + methodName + '` should work as an iteratee for methods like `_.reduce`', function(assert) {
assert.expect(3);
var objects = [
{ 'a': 'x', '0': 3 },
{ 'a': 'y', '0': 4 },
{ 'a': 'x', '0': 1 },
{ 'a': 'y', '0': 2 }
];
var funcs = [func, lodashStable.partialRight(func, 'bogus')];
lodashStable.each(['a', 0, [0]], function(props, index) {
var expected = lodashStable.map(funcs, lodashStable.constant(
index
? [objects[2], objects[3], objects[0], objects[1]]
: [objects[0], objects[2], objects[1], objects[3]]
));
var actual = lodashStable.map(funcs, function(func) {
return lodashStable.reduce([props], func, objects);
});
assert.deepEqual(actual, expected);
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('sortedIndex methods');
lodashStable.each(['sortedIndex', 'sortedLastIndex'], function(methodName) {
var func = _[methodName],
isSortedIndex = methodName == 'sortedIndex';
QUnit.test('`_.' + methodName + '` should return the insert index', function(assert) {
assert.expect(1);
var array = [30, 50],
values = [30, 40, 50],
expected = isSortedIndex ? [0, 1, 1] : [1, 1, 2];
var actual = lodashStable.map(values, function(value) {
return func(array, value);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with an array of strings', function(assert) {
assert.expect(1);
var array = ['a', 'c'],
values = ['a', 'b', 'c'],
expected = isSortedIndex ? [0, 1, 1] : [1, 1, 2];
var actual = lodashStable.map(values, function(value) {
return func(array, value);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should accept a nullish `array` and a `value`', function(assert) {
assert.expect(1);
var values = [null, undefined],
expected = lodashStable.map(values, lodashStable.constant([0, 0, 0]));
var actual = lodashStable.map(values, function(array) {
return [func(array, 1), func(array, undefined), func(array, NaN)];
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should align with `_.sortBy`', function(assert) {
assert.expect(12);
var symbol1 = Symbol ? Symbol('a') : null,
symbol2 = Symbol ? Symbol('b') : null,
symbol3 = Symbol ? Symbol('c') : null,
expected = [1, '2', {}, symbol1, symbol2, null, undefined, NaN, NaN];
lodashStable.each([
[NaN, symbol1, null, 1, '2', {}, symbol2, NaN, undefined],
['2', null, 1, symbol1, NaN, {}, NaN, symbol2, undefined]
], function(array) {
assert.deepEqual(_.sortBy(array), expected);
assert.strictEqual(func(expected, 3), 2);
assert.strictEqual(func(expected, symbol3), isSortedIndex ? 3 : (Symbol ? 5 : 6));
assert.strictEqual(func(expected, null), isSortedIndex ? (Symbol ? 5 : 3) : 6);
assert.strictEqual(func(expected, undefined), isSortedIndex ? 6 : 7);
assert.strictEqual(func(expected, NaN), isSortedIndex ? 7 : 9);
});
});
QUnit.test('`_.' + methodName + '` should align with `_.sortBy` for nulls', function(assert) {
assert.expect(3);
var array = [null, null];
assert.strictEqual(func(array, null), isSortedIndex ? 0 : 2);
assert.strictEqual(func(array, 1), 0);
assert.strictEqual(func(array, 'a'), 0);
});
QUnit.test('`_.' + methodName + '` should align with `_.sortBy` for symbols', function(assert) {
assert.expect(3);
var symbol1 = Symbol ? Symbol('a') : null,
symbol2 = Symbol ? Symbol('b') : null,
symbol3 = Symbol ? Symbol('c') : null,
array = [symbol1, symbol2];
assert.strictEqual(func(array, symbol3), isSortedIndex ? 0 : 2);
assert.strictEqual(func(array, 1), 0);
assert.strictEqual(func(array, 'a'), 0);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('sortedIndexBy methods');
lodashStable.each(['sortedIndexBy', 'sortedLastIndexBy'], function(methodName) {
var func = _[methodName],
isSortedIndexBy = methodName == 'sortedIndexBy';
QUnit.test('`_.' + methodName + '` should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
func([30, 50], 40, function(assert) {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [40]);
});
QUnit.test('`_.' + methodName + '` should work with `_.property` shorthands', function(assert) {
assert.expect(1);
var objects = [{ 'x': 30 }, { 'x': 50 }],
actual = func(objects, { 'x': 40 }, 'x');
assert.strictEqual(actual, 1);
});
QUnit.test('`_.' + methodName + '` should avoid calling iteratee when length is 0', function(assert) {
var objects = [],
iteratee = function() {
throw new Error;
},
actual = func(objects, { 'x': 50 }, iteratee);
assert.strictEqual(actual, 0);
});
QUnit.test('`_.' + methodName + '` should support arrays larger than `MAX_ARRAY_LENGTH / 2`', function(assert) {
assert.expect(12);
lodashStable.each([Math.ceil(MAX_ARRAY_LENGTH / 2), MAX_ARRAY_LENGTH], function(length) {
var array = [],
values = [MAX_ARRAY_LENGTH, NaN, undefined];
array.length = length;
lodashStable.each(values, function(value) {
var steps = 0;
var actual = func(array, value, function(value) {
steps++;
return value;
});
var expected = (isSortedIndexBy ? !lodashStable.isNaN(value) : lodashStable.isFinite(value))
? 0
: Math.min(length, MAX_ARRAY_INDEX);
assert.ok(steps == 32 || steps == 33);
assert.strictEqual(actual, expected);
});
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('sortedIndexOf methods');
lodashStable.each(['sortedIndexOf', 'sortedLastIndexOf'], function(methodName) {
var func = _[methodName],
isSortedIndexOf = methodName == 'sortedIndexOf';
QUnit.test('`_.' + methodName + '` should perform a binary search', function(assert) {
assert.expect(1);
var sorted = [4, 4, 5, 5, 6, 6];
assert.deepEqual(func(sorted, 5), isSortedIndexOf ? 2 : 3);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.sortedUniq');
(function() {
QUnit.test('should return unique values of a sorted array', function(assert) {
assert.expect(3);
var expected = [1, 2, 3];
lodashStable.each([[1, 2, 3], [1, 1, 2, 2, 3], [1, 2, 3, 3, 3, 3, 3]], function(array) {
assert.deepEqual(_.sortedUniq(array), expected);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.split');
(function() {
QUnit.test('should split a string by `separator`', function(assert) {
assert.expect(3);
var string = 'abcde';
assert.deepEqual(_.split(string, 'c'), ['ab', 'de']);
assert.deepEqual(_.split(string, /[bd]/), ['a', 'c', 'e']);
assert.deepEqual(_.split(string, '', 2), ['a', 'b']);
});
QUnit.test('should return an array containing an empty string for empty values', function(assert) {
assert.expect(1);
var values = [, null, undefined, ''],
expected = lodashStable.map(values, lodashStable.constant(['']));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.split(value) : _.split();
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var strings = ['abc', 'def', 'ghi'],
actual = lodashStable.map(strings, _.split);
assert.deepEqual(actual, [['abc'], ['def'], ['ghi']]);
});
QUnit.test('should allow mixed string and array prototype methods', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _('abc');
assert.strictEqual(wrapped.split('b').join(','), 'a,c');
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.spread');
(function() {
function fn(a, b, c) {
return slice.call(arguments);
}
QUnit.test('should spread arguments to `func`', function(assert) {
assert.expect(2);
var spread = _.spread(fn),
expected = [1, 2];
assert.deepEqual(spread([1, 2]), expected);
assert.deepEqual(spread([1, 2], 3), expected);
});
QUnit.test('should accept a falsey `array`', function(assert) {
assert.expect(1);
var spread = _.spread(stubTrue),
expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(array, index) {
try {
return index ? spread(array) : spread();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with `start`', function(assert) {
assert.expect(2);
var spread = _.spread(fn, 1),
expected = [1, 2, 3];
assert.deepEqual(spread(1, [2, 3]), expected);
assert.deepEqual(spread(1, [2, 3], 4), expected);
});
QUnit.test('should treat `start` as `0` for negative or `NaN` values', function(assert) {
assert.expect(1);
var values = [-1, NaN, 'a'],
expected = lodashStable.map(values, lodashStable.constant([1, 2]));
var actual = lodashStable.map(values, function(value) {
var spread = _.spread(fn, value);
return spread([1, 2]);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should coerce `start` to an integer', function(assert) {
assert.expect(2);
var spread = _.spread(fn, 1.6),
expected = [1, 2, 3];
assert.deepEqual(spread(1, [2, 3]), expected);
assert.deepEqual(spread(1, [2, 3], 4), expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.startCase');
(function() {
QUnit.test('should uppercase only the first character of each word', function(assert) {
assert.expect(3);
assert.strictEqual(_.startCase('--foo-bar--'), 'Foo Bar');
assert.strictEqual(_.startCase('fooBar'), 'Foo Bar');
assert.strictEqual(_.startCase('__FOO_BAR__'), 'FOO BAR');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.startsWith');
(function() {
var string = 'abc';
QUnit.test('should return `true` if a string starts with `target`', function(assert) {
assert.expect(1);
assert.strictEqual(_.startsWith(string, 'a'), true);
});
QUnit.test('should return `false` if a string does not start with `target`', function(assert) {
assert.expect(1);
assert.strictEqual(_.startsWith(string, 'b'), false);
});
QUnit.test('should work with a `position`', function(assert) {
assert.expect(1);
assert.strictEqual(_.startsWith(string, 'b', 1), true);
});
QUnit.test('should work with `position` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 5, MAX_SAFE_INTEGER, Infinity], function(position) {
assert.strictEqual(_.startsWith(string, 'a', position), false);
});
});
QUnit.test('should treat falsey `position` values as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(position) {
return _.startsWith(string, 'a', position);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should treat a negative `position` as `0`', function(assert) {
assert.expect(6);
lodashStable.each([-1, -3, -Infinity], function(position) {
assert.strictEqual(_.startsWith(string, 'a', position), true);
assert.strictEqual(_.startsWith(string, 'b', position), false);
});
});
QUnit.test('should coerce `position` to an integer', function(assert) {
assert.expect(1);
assert.strictEqual(_.startsWith(string, 'bc', 1.2), true);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.startsWith and lodash.endsWith');
lodashStable.each(['startsWith', 'endsWith'], function(methodName) {
var func = _[methodName],
isStartsWith = methodName == 'startsWith';
var string = 'abc',
chr = isStartsWith ? 'a' : 'c';
QUnit.test('`_.' + methodName + '` should coerce `string` to a string', function(assert) {
assert.expect(2);
assert.strictEqual(func(Object(string), chr), true);
assert.strictEqual(func({ 'toString': lodashStable.constant(string) }, chr), true);
});
QUnit.test('`_.' + methodName + '` should coerce `target` to a string', function(assert) {
assert.expect(2);
assert.strictEqual(func(string, Object(chr)), true);
assert.strictEqual(func(string, { 'toString': lodashStable.constant(chr) }), true);
});
QUnit.test('`_.' + methodName + '` should coerce `position` to a number', function(assert) {
assert.expect(2);
var position = isStartsWith ? 1 : 2;
assert.strictEqual(func(string, 'b', Object(position)), true);
assert.strictEqual(func(string, 'b', { 'toString': lodashStable.constant(String(position)) }), true);
});
QUnit.test('should return `true` when `target` is an empty string regardless of `position`', function(assert) {
assert.expect(1);
var positions = [-Infinity, NaN, -3, -1, 0, 1, 2, 3, 5, MAX_SAFE_INTEGER, Infinity];
assert.ok(lodashStable.every(positions, function(position) {
return func(string, '', position);
}));
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('stub methods');
lodashStable.each(['noop', 'stubTrue', 'stubFalse', 'stubArray', 'stubObject', 'stubString'], function(methodName) {
var func = _[methodName];
var pair = ({
'stubArray': [[], 'an empty array'],
'stubFalse': [false, '`false`'],
'stubObject': [{}, 'an empty object'],
'stubString': ['', 'an empty string'],
'stubTrue': [true, '`true`'],
'noop': [undefined, '`undefined`']
})[methodName];
var values = Array(2).concat(empties, true, 1, 'a'),
expected = lodashStable.map(values, lodashStable.constant(pair[0]));
QUnit.test('`_.' + methodName + '` should return ' + pair[1], function(assert) {
assert.expect(1);
var actual = lodashStable.map(values, function(value, index) {
if (index < 2) {
return index ? func.call({}) : func();
}
return func(value);
});
assert.deepEqual(actual, expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.subtract');
(function() {
QUnit.test('should subtract two numbers', function(assert) {
assert.expect(3);
assert.strictEqual(_.subtract(6, 4), 2);
assert.strictEqual(_.subtract(-6, 4), -10);
assert.strictEqual(_.subtract(-6, -4), -2);
});
QUnit.test('should coerce arguments to numbers', function(assert) {
assert.expect(2);
assert.strictEqual(_.subtract('6', '4'), 2);
assert.deepEqual(_.subtract('x', 'y'), NaN);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('math operator methods');
lodashStable.each(['add', 'divide', 'multiply', 'subtract'], function(methodName) {
var func = _[methodName],
isAddSub = methodName == 'add' || methodName == 'subtract';
QUnit.test('`_.' + methodName + '` should return `' + (isAddSub ? 0 : 1) + '` when no arguments are given', function(assert) {
assert.expect(1);
assert.strictEqual(func(), isAddSub ? 0 : 1);
});
QUnit.test('`_.' + methodName + '` should work with only one defined argument', function(assert) {
assert.expect(3);
assert.strictEqual(func(6), 6);
assert.strictEqual(func(6, undefined), 6);
assert.strictEqual(func(undefined, 4), 4);
});
QUnit.test('`_.' + methodName + '` should preserve the sign of `0`', function(assert) {
assert.expect(2);
var values = [0, '0', -0, '-0'],
expected = [[0, Infinity], ['0', Infinity], [-0, -Infinity], ['-0', -Infinity]];
lodashStable.times(2, function(index) {
var actual = lodashStable.map(values, function(value) {
var result = index ? func(undefined, value) : func(value);
return [result, 1 / result];
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('`_.' + methodName + '` should convert objects to `NaN`', function(assert) {
assert.expect(2);
assert.deepEqual(func(0, {}), NaN);
assert.deepEqual(func({}, 0), NaN);
});
QUnit.test('`_.' + methodName + '` should convert symbols to `NaN`', function(assert) {
assert.expect(2);
if (Symbol) {
assert.deepEqual(func(0, symbol), NaN);
assert.deepEqual(func(symbol, 0), NaN);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('`_.' + methodName + '` should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = _(1)[methodName](2);
assert.notOk(actual instanceof _);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = _(1).chain()[methodName](2);
assert.ok(actual instanceof _);
}
else {
skipAssert(assert);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.sumBy');
(function() {
var array = [6, 4, 2],
objects = [{ 'a': 2 }, { 'a': 3 }, { 'a': 1 }];
QUnit.test('should work with an `iteratee`', function(assert) {
assert.expect(1);
var actual = _.sumBy(objects, function(object) {
return object.a;
});
assert.deepEqual(actual, 6);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.sumBy(array, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [6]);
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(2);
var arrays = [[2], [3], [1]];
assert.strictEqual(_.sumBy(arrays, 0), 6);
assert.strictEqual(_.sumBy(objects, 'a'), 6);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('sum methods');
lodashStable.each(['sum', 'sumBy'], function(methodName) {
var array = [6, 4, 2],
func = _[methodName];
QUnit.test('`_.' + methodName + '` should return the sum of an array of numbers', function(assert) {
assert.expect(1);
assert.strictEqual(func(array), 12);
});
QUnit.test('`_.' + methodName + '` should return `0` when passing empty `array` values', function(assert) {
assert.expect(1);
var expected = lodashStable.map(empties, stubZero);
var actual = lodashStable.map(empties, function(value) {
return func(value);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should skip `undefined` values', function(assert) {
assert.expect(1);
assert.strictEqual(func([1, undefined]), 1);
});
QUnit.test('`_.' + methodName + '` should not skip `NaN` values', function(assert) {
assert.expect(1);
assert.deepEqual(func([1, NaN]), NaN);
});
QUnit.test('`_.' + methodName + '` should not coerce values to numbers', function(assert) {
assert.expect(1);
assert.strictEqual(func(['1', '2']), '12');
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.tail');
(function() {
var array = [1, 2, 3];
QUnit.test('should accept a falsey `array`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubArray);
var actual = lodashStable.map(falsey, function(array, index) {
try {
return index ? _.tail(array) : _.tail();
} catch (e) {}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should exclude the first element', function(assert) {
assert.expect(1);
assert.deepEqual(_.tail(array), [2, 3]);
});
QUnit.test('should return an empty when querying empty arrays', function(assert) {
assert.expect(1);
assert.deepEqual(_.tail([]), []);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
actual = lodashStable.map(array, _.tail);
assert.deepEqual(actual, [[2, 3], [5, 6], [8, 9]]);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(4);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE),
values = [];
var actual = _(array).tail().filter(function(value) {
values.push(value);
return false;
})
.value();
assert.deepEqual(actual, []);
assert.deepEqual(values, array.slice(1));
values = [];
actual = _(array).filter(function(value) {
values.push(value);
return isEven(value);
})
.tail()
.value();
assert.deepEqual(actual, _.tail(_.filter(array, isEven)));
assert.deepEqual(values, array);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should not execute subsequent iteratees on an empty array in a lazy sequence', function(assert) {
assert.expect(4);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE),
iteratee = function() { pass = false; },
pass = true,
actual = _(array).slice(0, 1).tail().map(iteratee).value();
assert.ok(pass);
assert.deepEqual(actual, []);
pass = true;
actual = _(array).filter().slice(0, 1).tail().map(iteratee).value();
assert.ok(pass);
assert.deepEqual(actual, []);
}
else {
skipAssert(assert, 4);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.take');
(function() {
var array = [1, 2, 3];
QUnit.test('should take the first two elements', function(assert) {
assert.expect(1);
assert.deepEqual(_.take(array, 2), [1, 2]);
});
QUnit.test('should treat falsey `n` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? [1] : [];
});
var actual = lodashStable.map(falsey, function(n) {
return _.take(array, n);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return an empty array when `n` < `1`', function(assert) {
assert.expect(3);
lodashStable.each([0, -1, -Infinity], function(n) {
assert.deepEqual(_.take(array, n), []);
});
});
QUnit.test('should return all elements when `n` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(n) {
assert.deepEqual(_.take(array, n), array);
});
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
actual = lodashStable.map(array, _.take);
assert.deepEqual(actual, [[1], [4], [7]]);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(6);
if (!isNpm) {
var array = lodashStable.range(1, LARGE_ARRAY_SIZE + 1),
predicate = function(value) { values.push(value); return isEven(value); },
values = [],
actual = _(array).take(2).take().value();
assert.deepEqual(actual, _.take(_.take(array, 2)));
actual = _(array).filter(predicate).take(2).take().value();
assert.deepEqual(values, [1, 2]);
assert.deepEqual(actual, _.take(_.take(_.filter(array, predicate), 2)));
actual = _(array).take(6).takeRight(4).take(2).takeRight().value();
assert.deepEqual(actual, _.takeRight(_.take(_.takeRight(_.take(array, 6), 4), 2)));
values = [];
actual = _(array).take(array.length - 1).filter(predicate).take(6).takeRight(4).take(2).takeRight().value();
assert.deepEqual(values, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]);
assert.deepEqual(actual, _.takeRight(_.take(_.takeRight(_.take(_.filter(_.take(array, array.length - 1), predicate), 6), 4), 2)));
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.takeRight');
(function() {
var array = [1, 2, 3];
QUnit.test('should take the last two elements', function(assert) {
assert.expect(1);
assert.deepEqual(_.takeRight(array, 2), [2, 3]);
});
QUnit.test('should treat falsey `n` values, except `undefined`, as `0`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, function(value) {
return value === undefined ? [3] : [];
});
var actual = lodashStable.map(falsey, function(n) {
return _.takeRight(array, n);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return an empty array when `n` < `1`', function(assert) {
assert.expect(3);
lodashStable.each([0, -1, -Infinity], function(n) {
assert.deepEqual(_.takeRight(array, n), []);
});
});
QUnit.test('should return all elements when `n` >= `length`', function(assert) {
assert.expect(4);
lodashStable.each([3, 4, Math.pow(2, 32), Infinity], function(n) {
assert.deepEqual(_.takeRight(array, n), array);
});
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
actual = lodashStable.map(array, _.takeRight);
assert.deepEqual(actual, [[3], [6], [9]]);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(6);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE),
predicate = function(value) { values.push(value); return isEven(value); },
values = [],
actual = _(array).takeRight(2).takeRight().value();
assert.deepEqual(actual, _.takeRight(_.takeRight(array)));
actual = _(array).filter(predicate).takeRight(2).takeRight().value();
assert.deepEqual(values, array);
assert.deepEqual(actual, _.takeRight(_.takeRight(_.filter(array, predicate), 2)));
actual = _(array).takeRight(6).take(4).takeRight(2).take().value();
assert.deepEqual(actual, _.take(_.takeRight(_.take(_.takeRight(array, 6), 4), 2)));
values = [];
actual = _(array).filter(predicate).takeRight(6).take(4).takeRight(2).take().value();
assert.deepEqual(values, array);
assert.deepEqual(actual, _.take(_.takeRight(_.take(_.takeRight(_.filter(array, predicate), 6), 4), 2)));
}
else {
skipAssert(assert, 6);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.takeRightWhile');
(function() {
var array = [1, 2, 3, 4];
var objects = [
{ 'a': 0, 'b': 0 },
{ 'a': 1, 'b': 1 },
{ 'a': 2, 'b': 2 }
];
QUnit.test('should take elements while `predicate` returns truthy', function(assert) {
assert.expect(1);
var actual = _.takeRightWhile(array, function(n) {
return n > 2;
});
assert.deepEqual(actual, [3, 4]);
});
QUnit.test('should provide correct `predicate` arguments', function(assert) {
assert.expect(1);
var args;
_.takeRightWhile(array, function() {
args = slice.call(arguments);
});
assert.deepEqual(args, [4, 3, array]);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.takeRightWhile(objects, { 'b': 2 }), objects.slice(2));
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.takeRightWhile(objects, ['b', 2]), objects.slice(2));
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.takeRightWhile(objects, 'b'), objects.slice(1));
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(3);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE),
predicate = function(n) { return n > 2; },
expected = _.takeRightWhile(array, predicate),
wrapped = _(array).takeRightWhile(predicate);
assert.deepEqual(wrapped.value(), expected);
assert.deepEqual(wrapped.reverse().value(), expected.slice().reverse());
assert.strictEqual(wrapped.last(), _.last(expected));
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should provide correct `predicate` arguments in a lazy sequence', function(assert) {
assert.expect(5);
if (!isNpm) {
var args,
array = lodashStable.range(LARGE_ARRAY_SIZE + 1);
var expected = [
square(LARGE_ARRAY_SIZE),
LARGE_ARRAY_SIZE - 1,
lodashStable.map(array.slice(1), square)
];
_(array).slice(1).takeRightWhile(function(value, index, array) {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, [LARGE_ARRAY_SIZE, LARGE_ARRAY_SIZE - 1, array.slice(1)]);
_(array).slice(1).map(square).takeRightWhile(function(value, index, array) {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, expected);
_(array).slice(1).map(square).takeRightWhile(function(value, index) {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, expected);
_(array).slice(1).map(square).takeRightWhile(function(index) {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, [square(LARGE_ARRAY_SIZE)]);
_(array).slice(1).map(square).takeRightWhile(function() {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, expected);
}
else {
skipAssert(assert, 5);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.takeWhile');
(function() {
var array = [1, 2, 3, 4];
var objects = [
{ 'a': 2, 'b': 2 },
{ 'a': 1, 'b': 1 },
{ 'a': 0, 'b': 0 }
];
QUnit.test('should take elements while `predicate` returns truthy', function(assert) {
assert.expect(1);
var actual = _.takeWhile(array, function(n) {
return n < 3;
});
assert.deepEqual(actual, [1, 2]);
});
QUnit.test('should provide correct `predicate` arguments', function(assert) {
assert.expect(1);
var args;
_.takeWhile(array, function() {
args = slice.call(arguments);
});
assert.deepEqual(args, [1, 0, array]);
});
QUnit.test('should work with `_.matches` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.takeWhile(objects, { 'b': 2 }), objects.slice(0, 1));
});
QUnit.test('should work with `_.matchesProperty` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.takeWhile(objects, ['b', 2]), objects.slice(0, 1));
});
QUnit.test('should work with `_.property` shorthands', function(assert) {
assert.expect(1);
assert.deepEqual(_.takeWhile(objects, 'b'), objects.slice(0, 2));
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(3);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE),
predicate = function(n) { return n < 3; },
expected = _.takeWhile(array, predicate),
wrapped = _(array).takeWhile(predicate);
assert.deepEqual(wrapped.value(), expected);
assert.deepEqual(wrapped.reverse().value(), expected.slice().reverse());
assert.strictEqual(wrapped.last(), _.last(expected));
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should work in a lazy sequence with `take`', function(assert) {
assert.expect(1);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE);
var actual = _(array)
.takeWhile(function(n) { return n < 4; })
.take(2)
.takeWhile(function(n) { return n == 0; })
.value();
assert.deepEqual(actual, [0]);
}
else {
skipAssert(assert);
}
});
QUnit.test('should provide correct `predicate` arguments in a lazy sequence', function(assert) {
assert.expect(5);
if (!isNpm) {
var args,
array = lodashStable.range(LARGE_ARRAY_SIZE + 1),
expected = [1, 0, lodashStable.map(array.slice(1), square)];
_(array).slice(1).takeWhile(function(value, index, array) {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, [1, 0, array.slice(1)]);
_(array).slice(1).map(square).takeWhile(function(value, index, array) {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, expected);
_(array).slice(1).map(square).takeWhile(function(value, index) {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, expected);
_(array).slice(1).map(square).takeWhile(function(value) {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, [1]);
_(array).slice(1).map(square).takeWhile(function() {
args = slice.call(arguments);
}).value();
assert.deepEqual(args, expected);
}
else {
skipAssert(assert, 5);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.tap');
(function() {
QUnit.test('should intercept and return the given value', function(assert) {
assert.expect(2);
if (!isNpm) {
var intercepted,
array = [1, 2, 3];
var actual = _.tap(array, function(value) {
intercepted = value;
});
assert.strictEqual(actual, array);
assert.strictEqual(intercepted, array);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should intercept unwrapped values and return wrapped values when chaining', function(assert) {
assert.expect(2);
if (!isNpm) {
var intercepted,
array = [1, 2, 3];
var wrapped = _(array).tap(function(value) {
intercepted = value;
value.pop();
});
assert.ok(wrapped instanceof _);
wrapped.value();
assert.strictEqual(intercepted, array);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.template');
(function() {
QUnit.test('should escape values in "escape" delimiters', function(assert) {
assert.expect(1);
var strings = ['<p><%- value %></p>', '<p><%-value%></p>', '<p><%-\nvalue\n%></p>'],
expected = lodashStable.map(strings, lodashStable.constant('<p>&<>"'/</p>')),
data = { 'value': '&<>"\'/' };
var actual = lodashStable.map(strings, function(string) {
return _.template(string)(data);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should not reference `_.escape` when "escape" delimiters are not used', function(assert) {
assert.expect(1);
var compiled = _.template('<%= typeof __e %>');
assert.strictEqual(compiled({}), 'undefined');
});
QUnit.test('should evaluate JavaScript in "evaluate" delimiters', function(assert) {
assert.expect(1);
var compiled = _.template(
'<ul><%\
for (var key in collection) {\
%><li><%= collection[key] %></li><%\
} %></ul>'
);
var data = { 'collection': { 'a': 'A', 'b': 'B' } },
actual = compiled(data);
assert.strictEqual(actual, '<ul><li>A</li><li>B</li></ul>');
});
QUnit.test('should support "evaluate" delimiters with single line comments (test production builds)', function(assert) {
assert.expect(1);
var compiled = _.template('<% // A code comment. %><% if (value) { %>yap<% } else { %>nope<% } %>'),
data = { 'value': true };
assert.strictEqual(compiled(data), 'yap');
});
QUnit.test('should support referencing variables declared in "evaluate" delimiters from other delimiters', function(assert) {
assert.expect(1);
var compiled = _.template('<% var b = a; %><%= b.value %>'),
data = { 'a': { 'value': 1 } };
assert.strictEqual(compiled(data), '1');
});
QUnit.test('should interpolate data properties in "interpolate" delimiters', function(assert) {
assert.expect(1);
var strings = ['<%= a %>BC', '<%=a%>BC', '<%=\na\n%>BC'],
expected = lodashStable.map(strings, lodashStable.constant('ABC')),
data = { 'a': 'A' };
var actual = lodashStable.map(strings, function(string) {
return _.template(string)(data);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should support "interpolate" delimiters with escaped values', function(assert) {
assert.expect(1);
var compiled = _.template('<%= a ? "a=\\"A\\"" : "" %>'),
data = { 'a': true };
assert.strictEqual(compiled(data), 'a="A"');
});
QUnit.test('should support "interpolate" delimiters containing ternary operators', function(assert) {
assert.expect(1);
var compiled = _.template('<%= value ? value : "b" %>'),
data = { 'value': 'a' };
assert.strictEqual(compiled(data), 'a');
});
QUnit.test('should support "interpolate" delimiters containing global values', function(assert) {
assert.expect(1);
var compiled = _.template('<%= typeof Math.abs %>');
try {
var actual = compiled();
} catch (e) {}
assert.strictEqual(actual, 'function');
});
QUnit.test('should support complex "interpolate" delimiters', function(assert) {
assert.expect(22);
lodashStable.forOwn({
'<%= a + b %>': '3',
'<%= b - a %>': '1',
'<%= a = b %>': '2',
'<%= !a %>': 'false',
'<%= ~a %>': '-2',
'<%= a * b %>': '2',
'<%= a / b %>': '0.5',
'<%= a % b %>': '1',
'<%= a >> b %>': '0',
'<%= a << b %>': '4',
'<%= a & b %>': '0',
'<%= a ^ b %>': '3',
'<%= a | b %>': '3',
'<%= {}.toString.call(0) %>': numberTag,
'<%= a.toFixed(2) %>': '1.00',
'<%= obj["a"] %>': '1',
'<%= delete a %>': 'true',
'<%= "a" in obj %>': 'true',
'<%= obj instanceof Object %>': 'true',
'<%= new Boolean %>': 'false',
'<%= typeof a %>': 'number',
'<%= void a %>': ''
},
function(value, key) {
var compiled = _.template(key),
data = { 'a': 1, 'b': 2 };
assert.strictEqual(compiled(data), value, key);
});
});
QUnit.test('should support ES6 template delimiters', function(assert) {
assert.expect(2);
var data = { 'value': 2 };
assert.strictEqual(_.template('1${value}3')(data), '123');
assert.strictEqual(_.template('${"{" + value + "\\}"}')(data), '{2}');
});
QUnit.test('should support the "imports" option', function(assert) {
assert.expect(1);
var compiled = _.template('<%= a %>', { 'imports': { 'a': 1 } });
assert.strictEqual(compiled({}), '1');
});
QUnit.test('should support the "variable" options', function(assert) {
assert.expect(1);
var compiled = _.template(
'<% _.each( data.a, function( value ) { %>' +
'<%= value.valueOf() %>' +
'<% }) %>', { 'variable': 'data' }
);
var data = { 'a': [1, 2, 3] };
try {
assert.strictEqual(compiled(data), '123');
} catch (e) {
assert.ok(false, e.message);
}
});
QUnit.test('should forbid code injection through the "variable" options', function(assert) {
assert.expect(1);
assert.raises(function() {
_.template('', { 'variable': '){console.log(process.env)}; with(obj' });
});
});
QUnit.test('should support custom delimiters', function(assert) {
assert.expect(2);
lodashStable.times(2, function(index) {
var settingsClone = lodashStable.clone(_.templateSettings);
var settings = lodashStable.assign(index ? _.templateSettings : {}, {
'escape': /\{\{-([\s\S]+?)\}\}/g,
'evaluate': /\{\{([\s\S]+?)\}\}/g,
'interpolate': /\{\{=([\s\S]+?)\}\}/g
});
var expected = '<ul><li>0: a & A</li><li>1: b & B</li></ul>',
compiled = _.template('<ul>{{ _.each(collection, function(value, index) {}}<li>{{= index }}: {{- value }}</li>{{}); }}</ul>', index ? null : settings),
data = { 'collection': ['a & A', 'b & B'] };
assert.strictEqual(compiled(data), expected);
lodashStable.assign(_.templateSettings, settingsClone);
});
});
QUnit.test('should support custom delimiters containing special characters', function(assert) {
assert.expect(2);
lodashStable.times(2, function(index) {
var settingsClone = lodashStable.clone(_.templateSettings);
var settings = lodashStable.assign(index ? _.templateSettings : {}, {
'escape': /<\?-([\s\S]+?)\?>/g,
'evaluate': /<\?([\s\S]+?)\?>/g,
'interpolate': /<\?=([\s\S]+?)\?>/g
});
var expected = '<ul><li>0: a & A</li><li>1: b & B</li></ul>',
compiled = _.template('<ul><? _.each(collection, function(value, index) { ?><li><?= index ?>: <?- value ?></li><? }); ?></ul>', index ? null : settings),
data = { 'collection': ['a & A', 'b & B'] };
assert.strictEqual(compiled(data), expected);
lodashStable.assign(_.templateSettings, settingsClone);
});
});
QUnit.test('should use a `with` statement by default', function(assert) {
assert.expect(1);
var compiled = _.template('<%= index %><%= collection[index] %><% _.each(collection, function(value, index) { %><%= index %><% }); %>'),
actual = compiled({ 'index': 1, 'collection': ['a', 'b', 'c'] });
assert.strictEqual(actual, '1b012');
});
QUnit.test('should use `_.templateSettings.imports._.templateSettings`', function(assert) {
assert.expect(1);
var lodash = _.templateSettings.imports._,
settingsClone = lodashStable.clone(lodash.templateSettings);
lodash.templateSettings = lodashStable.assign(lodash.templateSettings, {
'interpolate': /\{\{=([\s\S]+?)\}\}/g
});
var compiled = _.template('{{= a }}');
assert.strictEqual(compiled({ 'a': 1 }), '1');
if (settingsClone) {
lodashStable.assign(lodash.templateSettings, settingsClone);
} else {
delete lodash.templateSettings;
}
});
QUnit.test('should fallback to `_.templateSettings`', function(assert) {
assert.expect(1);
var lodash = _.templateSettings.imports._,
delimiter = _.templateSettings.interpolate;
_.templateSettings.imports._ = { 'escape': lodashStable.escape };
_.templateSettings.interpolate = /\{\{=([\s\S]+?)\}\}/g;
var compiled = _.template('{{= a }}');
assert.strictEqual(compiled({ 'a': 1 }), '1');
_.templateSettings.imports._ = lodash;
_.templateSettings.interpolate = delimiter;
});
QUnit.test('should ignore `null` delimiters', function(assert) {
assert.expect(3);
var delimiter = {
'escape': /\{\{-([\s\S]+?)\}\}/g,
'evaluate': /\{\{([\s\S]+?)\}\}/g,
'interpolate': /\{\{=([\s\S]+?)\}\}/g
};
lodashStable.forOwn({
'escape': '{{- a }}',
'evaluate': '{{ print(a) }}',
'interpolate': '{{= a }}'
},
function(value, key) {
var settings = { 'escape': null, 'evaluate': null, 'interpolate': null };
settings[key] = delimiter[key];
var expected = '1 <%- a %> <% print(a) %> <%= a %>',
compiled = _.template(value + ' <%- a %> <% print(a) %> <%= a %>', settings),
data = { 'a': 1 };
assert.strictEqual(compiled(data), expected);
});
});
QUnit.test('should work without delimiters', function(assert) {
assert.expect(1);
var expected = 'abc';
assert.strictEqual(_.template(expected)({}), expected);
});
QUnit.test('should work with `this` references', function(assert) {
assert.expect(2);
var compiled = _.template('a<%= this.String("b") %>c');
assert.strictEqual(compiled(), 'abc');
var object = { 'b': 'B' };
object.compiled = _.template('A<%= this.b %>C', { 'variable': 'obj' });
assert.strictEqual(object.compiled(), 'ABC');
});
QUnit.test('should work with backslashes', function(assert) {
assert.expect(1);
var compiled = _.template('<%= a %> \\b'),
data = { 'a': 'A' };
assert.strictEqual(compiled(data), 'A \\b');
});
QUnit.test('should work with escaped characters in string literals', function(assert) {
assert.expect(2);
var compiled = _.template('<% print("\'\\n\\r\\t\\u2028\\u2029\\\\") %>');
assert.strictEqual(compiled(), "'\n\r\t\u2028\u2029\\");
var data = { 'a': 'A' };
compiled = _.template('\'\n\r\t<%= a %>\u2028\u2029\\"');
assert.strictEqual(compiled(data), '\'\n\r\tA\u2028\u2029\\"');
});
QUnit.test('should handle \\u2028 & \\u2029 characters', function(assert) {
assert.expect(1);
var compiled = _.template('\u2028<%= "\\u2028\\u2029" %>\u2029');
assert.strictEqual(compiled(), '\u2028\u2028\u2029\u2029');
});
QUnit.test('should work with statements containing quotes', function(assert) {
assert.expect(1);
var compiled = _.template("<%\
if (a == 'A' || a == \"a\") {\
%>'a',\"A\"<%\
} %>"
);
var data = { 'a': 'A' };
assert.strictEqual(compiled(data), "'a',\"A\"");
});
QUnit.test('should work with templates containing newlines and comments', function(assert) {
assert.expect(1);
var compiled = _.template('<%\n\
// A code comment.\n\
if (value) { value += 3; }\n\
%><p><%= value %></p>'
);
assert.strictEqual(compiled({ 'value': 3 }), '<p>6</p>');
});
QUnit.test('should tokenize delimiters', function(assert) {
assert.expect(1);
var compiled = _.template('<span class="icon-<%= type %>2"></span>'),
data = { 'type': 1 };
assert.strictEqual(compiled(data), '<span class="icon-12"></span>');
});
QUnit.test('should evaluate delimiters once', function(assert) {
assert.expect(1);
var actual = [],
compiled = _.template('<%= func("a") %><%- func("b") %><% func("c") %>'),
data = { 'func': function(value) { actual.push(value); } };
compiled(data);
assert.deepEqual(actual, ['a', 'b', 'c']);
});
QUnit.test('should match delimiters before escaping text', function(assert) {
assert.expect(1);
var compiled = _.template('<<\n a \n>>', { 'evaluate': /<<(.*?)>>/g });
assert.strictEqual(compiled(), '<<\n a \n>>');
});
QUnit.test('should resolve nullish values to an empty string', function(assert) {
assert.expect(3);
var compiled = _.template('<%= a %><%- a %>'),
data = { 'a': null };
assert.strictEqual(compiled(data), '');
data = { 'a': undefined };
assert.strictEqual(compiled(data), '');
data = { 'a': {} };
compiled = _.template('<%= a.b %><%- a.b %>');
assert.strictEqual(compiled(data), '');
});
QUnit.test('should return an empty string for empty values', function(assert) {
assert.expect(1);
var values = [, null, undefined, ''],
expected = lodashStable.map(values, stubString),
data = { 'a': 1 };
var actual = lodashStable.map(values, function(value, index) {
var compiled = index ? _.template(value) : _.template();
return compiled(data);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should parse delimiters without newlines', function(assert) {
assert.expect(1);
var expected = '<<\nprint("<p>" + (value ? "yes" : "no") + "</p>")\n>>',
compiled = _.template(expected, { 'evaluate': /<<(.+?)>>/g }),
data = { 'value': true };
assert.strictEqual(compiled(data), expected);
});
QUnit.test('should support recursive calls', function(assert) {
assert.expect(1);
var compiled = _.template('<%= a %><% a = _.template(c)(obj) %><%= a %>'),
data = { 'a': 'A', 'b': 'B', 'c': '<%= b %>' };
assert.strictEqual(compiled(data), 'AB');
});
QUnit.test('should coerce `text` to a string', function(assert) {
assert.expect(1);
var object = { 'toString': lodashStable.constant('<%= a %>') },
data = { 'a': 1 };
assert.strictEqual(_.template(object)(data), '1');
});
QUnit.test('should not modify the `options` object', function(assert) {
assert.expect(1);
var options = {};
_.template('', options);
assert.deepEqual(options, {});
});
QUnit.test('should not modify `_.templateSettings` when `options` are given', function(assert) {
assert.expect(2);
var data = { 'a': 1 };
assert.notOk('a' in _.templateSettings);
_.template('', {}, data);
assert.notOk('a' in _.templateSettings);
delete _.templateSettings.a;
});
QUnit.test('should not error for non-object `data` and `options` values', function(assert) {
assert.expect(2);
_.template('')(1);
assert.ok(true, '`data` value');
_.template('', 1)(1);
assert.ok(true, '`options` value');
});
QUnit.test('should expose the source on compiled templates', function(assert) {
assert.expect(1);
var compiled = _.template('x'),
values = [String(compiled), compiled.source],
expected = lodashStable.map(values, stubTrue);
var actual = lodashStable.map(values, function(value) {
return lodashStable.includes(value, '__p');
});
assert.deepEqual(actual, expected);
});
QUnit.test('should expose the source on SyntaxErrors', function(assert) {
assert.expect(1);
try {
_.template('<% if x %>');
} catch (e) {
var source = e.source;
}
assert.ok(lodashStable.includes(source, '__p'));
});
QUnit.test('should not include sourceURLs in the source', function(assert) {
assert.expect(1);
var options = { 'sourceURL': '/a/b/c' },
compiled = _.template('x', options),
values = [compiled.source, undefined];
try {
_.template('<% if x %>', options);
} catch (e) {
values[1] = e.source;
}
var expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(values, function(value) {
return lodashStable.includes(value, 'sourceURL');
});
assert.deepEqual(actual, expected);
});
QUnit.test('should not let a sourceURL inject code', function(assert) {
assert.expect(1);
var actual,
expected = 'no error';
try {
actual = _.template(expected, {'sourceURL': '\u2028\u2029\n!this would err if it was executed!'})();
} catch (e) {}
assert.equal(actual, expected);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = ['<%= a %>', '<%- b %>', '<% print(c) %>'],
compiles = lodashStable.map(array, _.template),
data = { 'a': 'one', 'b': '"two"', 'c': 'three' };
var actual = lodashStable.map(compiles, function(compiled) {
return compiled(data);
});
assert.deepEqual(actual, ['one', '"two"', 'three']);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.truncate');
(function() {
var string = 'hi-diddly-ho there, neighborino';
QUnit.test('should use a default `length` of `30`', function(assert) {
assert.expect(1);
assert.strictEqual(_.truncate(string), 'hi-diddly-ho there, neighbo...');
});
QUnit.test('should not truncate if `string` is <= `length`', function(assert) {
assert.expect(2);
assert.strictEqual(_.truncate(string, { 'length': string.length }), string);
assert.strictEqual(_.truncate(string, { 'length': string.length + 2 }), string);
});
QUnit.test('should truncate string the given length', function(assert) {
assert.expect(1);
assert.strictEqual(_.truncate(string, { 'length': 24 }), 'hi-diddly-ho there, n...');
});
QUnit.test('should support a `omission` option', function(assert) {
assert.expect(1);
assert.strictEqual(_.truncate(string, { 'omission': ' [...]' }), 'hi-diddly-ho there, neig [...]');
});
QUnit.test('should coerce nullish `omission` values to strings', function(assert) {
assert.expect(2);
assert.strictEqual(_.truncate(string, { 'omission': null }), 'hi-diddly-ho there, neighbnull');
assert.strictEqual(_.truncate(string, { 'omission': undefined }), 'hi-diddly-ho there, nundefined');
});
QUnit.test('should support a `length` option', function(assert) {
assert.expect(1);
assert.strictEqual(_.truncate(string, { 'length': 4 }), 'h...');
});
QUnit.test('should support a `separator` option', function(assert) {
assert.expect(3);
assert.strictEqual(_.truncate(string, { 'length': 24, 'separator': ' ' }), 'hi-diddly-ho there,...');
assert.strictEqual(_.truncate(string, { 'length': 24, 'separator': /,? +/ }), 'hi-diddly-ho there...');
assert.strictEqual(_.truncate(string, { 'length': 24, 'separator': /,? +/g }), 'hi-diddly-ho there...');
});
QUnit.test('should treat negative `length` as `0`', function(assert) {
assert.expect(2);
lodashStable.each([0, -2], function(length) {
assert.strictEqual(_.truncate(string, { 'length': length }), '...');
});
});
QUnit.test('should coerce `length` to an integer', function(assert) {
assert.expect(4);
lodashStable.each(['', NaN, 4.6, '4'], function(length, index) {
var actual = index > 1 ? 'h...' : '...';
assert.strictEqual(_.truncate(string, { 'length': { 'valueOf': lodashStable.constant(length) } }), actual);
});
});
QUnit.test('should coerce `string` to a string', function(assert) {
assert.expect(2);
assert.strictEqual(_.truncate(Object(string), { 'length': 4 }), 'h...');
assert.strictEqual(_.truncate({ 'toString': lodashStable.constant(string) }, { 'length': 5 }), 'hi...');
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var actual = lodashStable.map([string, string, string], _.truncate),
truncated = 'hi-diddly-ho there, neighbo...';
assert.deepEqual(actual, [truncated, truncated, truncated]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.throttle');
(function() {
QUnit.test('should throttle a function', function(assert) {
assert.expect(2);
var done = assert.async();
var callCount = 0,
throttled = _.throttle(function() { callCount++; }, 32);
throttled();
throttled();
throttled();
var lastCount = callCount;
assert.ok(callCount);
setTimeout(function() {
assert.ok(callCount > lastCount);
done();
}, 64);
});
QUnit.test('subsequent calls should return the result of the first call', function(assert) {
assert.expect(5);
var done = assert.async();
var throttled = _.throttle(identity, 32),
results = [throttled('a'), throttled('b')];
assert.deepEqual(results, ['a', 'a']);
setTimeout(function() {
var results = [throttled('c'), throttled('d')];
assert.notEqual(results[0], 'a');
assert.notStrictEqual(results[0], undefined);
assert.notEqual(results[1], 'd');
assert.notStrictEqual(results[1], undefined);
done();
}, 64);
});
QUnit.test('should clear timeout when `func` is called', function(assert) {
assert.expect(1);
var done = assert.async();
if (!isModularize) {
var callCount = 0,
dateCount = 0;
var lodash = _.runInContext({
'Date': {
'now': function() {
return ++dateCount == 5 ? Infinity : +new Date;
}
}
});
var throttled = lodash.throttle(function() { callCount++; }, 32);
throttled();
throttled();
setTimeout(function() {
assert.strictEqual(callCount, 2);
done();
}, 64);
}
else {
skipAssert(assert);
done();
}
});
QUnit.test('should not trigger a trailing call when invoked once', function(assert) {
assert.expect(2);
var done = assert.async();
var callCount = 0,
throttled = _.throttle(function() { callCount++; }, 32);
throttled();
assert.strictEqual(callCount, 1);
setTimeout(function() {
assert.strictEqual(callCount, 1);
done();
}, 64);
});
lodashStable.times(2, function(index) {
QUnit.test('should trigger a call when invoked repeatedly' + (index ? ' and `leading` is `false`' : ''), function(assert) {
assert.expect(1);
var done = assert.async();
var callCount = 0,
limit = (argv || isPhantom) ? 1000 : 320,
options = index ? { 'leading': false } : {},
throttled = _.throttle(function() { callCount++; }, 32, options);
var start = +new Date;
while ((new Date - start) < limit) {
throttled();
}
var actual = callCount > 1;
setTimeout(function() {
assert.ok(actual);
done();
}, 1);
});
});
QUnit.test('should trigger a second throttled call as soon as possible', function(assert) {
assert.expect(3);
var done = assert.async();
var callCount = 0;
var throttled = _.throttle(function() {
callCount++;
}, 128, { 'leading': false });
throttled();
setTimeout(function() {
assert.strictEqual(callCount, 1);
throttled();
}, 192);
setTimeout(function() {
assert.strictEqual(callCount, 1);
}, 254);
setTimeout(function() {
assert.strictEqual(callCount, 2);
done();
}, 384);
});
QUnit.test('should apply default options', function(assert) {
assert.expect(2);
var done = assert.async();
var callCount = 0,
throttled = _.throttle(function() { callCount++; }, 32, {});
throttled();
throttled();
assert.strictEqual(callCount, 1);
setTimeout(function() {
assert.strictEqual(callCount, 2);
done();
}, 128);
});
QUnit.test('should support a `leading` option', function(assert) {
assert.expect(2);
var withLeading = _.throttle(identity, 32, { 'leading': true });
assert.strictEqual(withLeading('a'), 'a');
var withoutLeading = _.throttle(identity, 32, { 'leading': false });
assert.strictEqual(withoutLeading('a'), undefined);
});
QUnit.test('should support a `trailing` option', function(assert) {
assert.expect(6);
var done = assert.async();
var withCount = 0,
withoutCount = 0;
var withTrailing = _.throttle(function(value) {
withCount++;
return value;
}, 64, { 'trailing': true });
var withoutTrailing = _.throttle(function(value) {
withoutCount++;
return value;
}, 64, { 'trailing': false });
assert.strictEqual(withTrailing('a'), 'a');
assert.strictEqual(withTrailing('b'), 'a');
assert.strictEqual(withoutTrailing('a'), 'a');
assert.strictEqual(withoutTrailing('b'), 'a');
setTimeout(function() {
assert.strictEqual(withCount, 2);
assert.strictEqual(withoutCount, 1);
done();
}, 256);
});
QUnit.test('should not update `lastCalled`, at the end of the timeout, when `trailing` is `false`', function(assert) {
assert.expect(1);
var done = assert.async();
var callCount = 0;
var throttled = _.throttle(function() {
callCount++;
}, 64, { 'trailing': false });
throttled();
throttled();
setTimeout(function() {
throttled();
throttled();
}, 96);
setTimeout(function() {
assert.ok(callCount > 1);
done();
}, 192);
});
QUnit.test('should work with a system time of `0`', function(assert) {
assert.expect(3);
var done = assert.async();
if (!isModularize) {
var callCount = 0,
dateCount = 0;
var lodash = _.runInContext({
'Date': {
'now': function() {
return ++dateCount < 4 ? 0 : +new Date;
}
}
});
var throttled = lodash.throttle(function(value) {
callCount++;
return value;
}, 32);
var results = [throttled('a'), throttled('b'), throttled('c')];
assert.deepEqual(results, ['a', 'a', 'a']);
assert.strictEqual(callCount, 1);
setTimeout(function() {
assert.strictEqual(callCount, 2);
done();
}, 64);
}
else {
skipAssert(assert, 3);
done();
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.debounce and lodash.throttle');
lodashStable.each(['debounce', 'throttle'], function(methodName) {
var func = _[methodName],
isDebounce = methodName == 'debounce';
QUnit.test('`_.' + methodName + '` should not error for non-object `options` values', function(assert) {
assert.expect(1);
func(noop, 32, 1);
assert.ok(true);
});
QUnit.test('`_.' + methodName + '` should use a default `wait` of `0`', function(assert) {
assert.expect(1);
var done = assert.async();
var callCount = 0,
funced = func(function() { callCount++; });
funced();
setTimeout(function() {
funced();
assert.strictEqual(callCount, isDebounce ? 1 : 2);
done();
}, 32);
});
QUnit.test('`_.' + methodName + '` should invoke `func` with the correct `this` binding', function(assert) {
assert.expect(1);
var done = assert.async();
var actual = [],
object = { 'funced': func(function() { actual.push(this); }, 32) },
expected = lodashStable.times(isDebounce ? 1 : 2, lodashStable.constant(object));
object.funced();
if (!isDebounce) {
object.funced();
}
setTimeout(function() {
assert.deepEqual(actual, expected);
done();
}, 64);
});
QUnit.test('`_.' + methodName + '` supports recursive calls', function(assert) {
assert.expect(2);
var done = assert.async();
var actual = [],
args = lodashStable.map(['a', 'b', 'c'], function(chr) { return [{}, chr]; }),
expected = args.slice(),
queue = args.slice();
var funced = func(function() {
var current = [this];
push.apply(current, arguments);
actual.push(current);
var next = queue.shift();
if (next) {
funced.call(next[0], next[1]);
}
}, 32);
var next = queue.shift();
funced.call(next[0], next[1]);
assert.deepEqual(actual, expected.slice(0, isDebounce ? 0 : 1));
setTimeout(function() {
assert.deepEqual(actual, expected.slice(0, actual.length));
done();
}, 256);
});
QUnit.test('`_.' + methodName + '` should work if the system time is set backwards', function(assert) {
assert.expect(1);
var done = assert.async();
if (!isModularize) {
var callCount = 0,
dateCount = 0;
var lodash = _.runInContext({
'Date': {
'now': function() {
return ++dateCount == 4
? +new Date(2012, 3, 23, 23, 27, 18)
: +new Date;
}
}
});
var funced = lodash[methodName](function() {
callCount++;
}, 32);
funced();
setTimeout(function() {
funced();
assert.strictEqual(callCount, isDebounce ? 1 : 2);
done();
}, 64);
}
else {
skipAssert(assert);
done();
}
});
QUnit.test('`_.' + methodName + '` should support cancelling delayed calls', function(assert) {
assert.expect(1);
var done = assert.async();
var callCount = 0;
var funced = func(function() {
callCount++;
}, 32, { 'leading': false });
funced();
funced.cancel();
setTimeout(function() {
assert.strictEqual(callCount, 0);
done();
}, 64);
});
QUnit.test('`_.' + methodName + '` should reset `lastCalled` after cancelling', function(assert) {
assert.expect(3);
var done = assert.async();
var callCount = 0;
var funced = func(function() {
return ++callCount;
}, 32, { 'leading': true });
assert.strictEqual(funced(), 1);
funced.cancel();
assert.strictEqual(funced(), 2);
funced();
setTimeout(function() {
assert.strictEqual(callCount, 3);
done();
}, 64);
});
QUnit.test('`_.' + methodName + '` should support flushing delayed calls', function(assert) {
assert.expect(2);
var done = assert.async();
var callCount = 0;
var funced = func(function() {
return ++callCount;
}, 32, { 'leading': false });
funced();
assert.strictEqual(funced.flush(), 1);
setTimeout(function() {
assert.strictEqual(callCount, 1);
done();
}, 64);
});
QUnit.test('`_.' + methodName + '` should noop `cancel` and `flush` when nothing is queued', function(assert) {
assert.expect(2);
var done = assert.async();
var callCount = 0,
funced = func(function() { callCount++; }, 32);
funced.cancel();
assert.strictEqual(funced.flush(), undefined);
setTimeout(function() {
assert.strictEqual(callCount, 0);
done();
}, 64);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.times');
(function() {
QUnit.test('should coerce non-finite `n` values to `0`', function(assert) {
assert.expect(3);
lodashStable.each([-Infinity, NaN, Infinity], function(n) {
assert.deepEqual(_.times(n), []);
});
});
QUnit.test('should coerce `n` to an integer', function(assert) {
assert.expect(1);
var actual = _.times(2.6, _.identity);
assert.deepEqual(actual, [0, 1]);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.times(1, function(assert) {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [0]);
});
QUnit.test('should use `_.identity` when `iteratee` is nullish', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant([0, 1, 2]));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.times(3, value) : _.times(3);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return an array of the results of each `iteratee` execution', function(assert) {
assert.expect(1);
assert.deepEqual(_.times(3, doubled), [0, 2, 4]);
});
QUnit.test('should return an empty array for falsey and negative `n` values', function(assert) {
assert.expect(1);
var values = falsey.concat(-1, -Infinity),
expected = lodashStable.map(values, stubArray);
var actual = lodashStable.map(values, function(value, index) {
return index ? _.times(value) : _.times();
});
assert.deepEqual(actual, expected);
});
QUnit.test('should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.deepEqual(_(3).times(), [0, 1, 2]);
}
else {
skipAssert(assert);
}
});
QUnit.test('should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
assert.ok(_(3).chain().times() instanceof _);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toArray');
(function() {
QUnit.test('should convert objects to arrays', function(assert) {
assert.expect(1);
assert.deepEqual(_.toArray({ 'a': 1, 'b': 2 }), [1, 2]);
});
QUnit.test('should convert iterables to arrays', function(assert) {
assert.expect(1);
if (Symbol && Symbol.iterator) {
var object = { '0': 'a', 'length': 1 };
object[Symbol.iterator] = arrayProto[Symbol.iterator];
assert.deepEqual(_.toArray(object), ['a']);
}
else {
skipAssert(assert);
}
});
QUnit.test('should convert maps to arrays', function(assert) {
assert.expect(1);
if (Map) {
var map = new Map;
map.set('a', 1);
map.set('b', 2);
assert.deepEqual(_.toArray(map), [['a', 1], ['b', 2]]);
}
else {
skipAssert(assert);
}
});
QUnit.test('should convert strings to arrays', function(assert) {
assert.expect(3);
assert.deepEqual(_.toArray(''), []);
assert.deepEqual(_.toArray('ab'), ['a', 'b']);
assert.deepEqual(_.toArray(Object('ab')), ['a', 'b']);
});
QUnit.test('should work in a lazy sequence', function(assert) {
assert.expect(2);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE + 1);
var object = lodashStable.zipObject(lodashStable.times(LARGE_ARRAY_SIZE, function(index) {
return ['key' + index, index];
}));
var actual = _(array).slice(1).map(String).toArray().value();
assert.deepEqual(actual, lodashStable.map(array.slice(1), String));
actual = _(object).toArray().slice(1).map(String).value();
assert.deepEqual(actual, _.map(_.toArray(object).slice(1), String));
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toLower');
(function() {
QUnit.test('should convert whole string to lower case', function(assert) {
assert.expect(3);
assert.deepEqual(_.toLower('--Foo-Bar--'), '--foo-bar--');
assert.deepEqual(_.toLower('fooBar'), 'foobar');
assert.deepEqual(_.toLower('__FOO_BAR__'), '__foo_bar__');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toUpper');
(function() {
QUnit.test('should convert whole string to upper case', function(assert) {
assert.expect(3);
assert.deepEqual(_.toUpper('--Foo-Bar'), '--FOO-BAR');
assert.deepEqual(_.toUpper('fooBar'), 'FOOBAR');
assert.deepEqual(_.toUpper('__FOO_BAR__'), '__FOO_BAR__');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.slice and lodash.toArray');
lodashStable.each(['slice', 'toArray'], function(methodName) {
var array = [1, 2, 3],
func = _[methodName];
QUnit.test('`_.' + methodName + '` should return a dense array', function(assert) {
assert.expect(3);
var sparse = Array(3);
sparse[1] = 2;
var actual = func(sparse);
assert.ok('0' in actual);
assert.ok('2' in actual);
assert.deepEqual(actual, sparse);
});
QUnit.test('`_.' + methodName + '` should treat array-like objects like arrays', function(assert) {
assert.expect(2);
var object = { '0': 'a', 'length': 1 };
assert.deepEqual(func(object), ['a']);
assert.deepEqual(func(args), array);
});
QUnit.test('`_.' + methodName + '` should return a shallow clone of arrays', function(assert) {
assert.expect(2);
var actual = func(array);
assert.deepEqual(actual, array);
assert.notStrictEqual(actual, array);
});
QUnit.test('`_.' + methodName + '` should work with a node list for `collection`', function(assert) {
assert.expect(1);
if (document) {
try {
var actual = func(document.getElementsByTagName('body'));
} catch (e) {}
assert.deepEqual(actual, [body]);
}
else {
skipAssert(assert);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('toInteger methods');
lodashStable.each(['toInteger', 'toSafeInteger'], function(methodName) {
var func = _[methodName],
isSafe = methodName == 'toSafeInteger';
QUnit.test('`_.' + methodName + '` should convert values to integers', function(assert) {
assert.expect(6);
assert.strictEqual(func(-5.6), -5);
assert.strictEqual(func('5.6'), 5);
assert.strictEqual(func(), 0);
assert.strictEqual(func(NaN), 0);
var expected = isSafe ? MAX_SAFE_INTEGER : MAX_INTEGER;
assert.strictEqual(func(Infinity), expected);
assert.strictEqual(func(-Infinity), -expected);
});
QUnit.test('`_.' + methodName + '` should support `value` of `-0`', function(assert) {
assert.expect(1);
assert.strictEqual(1 / func(-0), -Infinity);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toLength');
(function() {
QUnit.test('should return a valid length', function(assert) {
assert.expect(4);
assert.strictEqual(_.toLength(-1), 0);
assert.strictEqual(_.toLength('1'), 1);
assert.strictEqual(_.toLength(1.1), 1);
assert.strictEqual(_.toLength(MAX_INTEGER), MAX_ARRAY_LENGTH);
});
QUnit.test('should return `value` if a valid length', function(assert) {
assert.expect(3);
assert.strictEqual(_.toLength(0), 0);
assert.strictEqual(_.toLength(3), 3);
assert.strictEqual(_.toLength(MAX_ARRAY_LENGTH), MAX_ARRAY_LENGTH);
});
QUnit.test('should convert `-0` to `0`', function(assert) {
assert.expect(1);
assert.strictEqual(1 / _.toLength(-0), Infinity);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('number coercion methods');
lodashStable.each(['toFinite', 'toInteger', 'toNumber', 'toSafeInteger'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should preserve the sign of `0`', function(assert) {
assert.expect(2);
var values = [0, '0', -0, '-0'],
expected = [[0, Infinity], [0, Infinity], [-0, -Infinity], [-0, -Infinity]];
lodashStable.times(2, function(index) {
var others = lodashStable.map(values, index ? Object : identity);
var actual = lodashStable.map(others, function(value) {
var result = func(value);
return [result, 1 / result];
});
assert.deepEqual(actual, expected);
});
});
});
lodashStable.each(['toFinite', 'toInteger', 'toLength', 'toNumber', 'toSafeInteger'], function(methodName) {
var func = _[methodName],
isToFinite = methodName == 'toFinite',
isToLength = methodName == 'toLength',
isToNumber = methodName == 'toNumber',
isToSafeInteger = methodName == 'toSafeInteger';
function negative(string) {
return '-' + string;
}
function pad(string) {
return whitespace + string + whitespace;
}
function positive(string) {
return '+' + string;
}
QUnit.test('`_.' + methodName + '` should pass thru primitive number values', function(assert) {
assert.expect(1);
var values = [0, 1, NaN];
var expected = lodashStable.map(values, function(value) {
return (!isToNumber && value !== value) ? 0 : value;
});
var actual = lodashStable.map(values, func);
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should convert number primitives and objects to numbers', function(assert) {
assert.expect(1);
var values = [2, 1.2, MAX_SAFE_INTEGER, MAX_INTEGER, Infinity, NaN];
var expected = lodashStable.map(values, function(value) {
if (!isToNumber) {
if (!isToFinite && value == 1.2) {
value = 1;
}
else if (value == Infinity) {
value = MAX_INTEGER;
}
else if (value !== value) {
value = 0;
}
if (isToLength || isToSafeInteger) {
value = Math.min(value, isToLength ? MAX_ARRAY_LENGTH : MAX_SAFE_INTEGER);
}
}
var neg = isToLength ? 0 : -value;
return [value, value, neg, neg];
});
var actual = lodashStable.map(values, function(value) {
return [func(value), func(Object(value)), func(-value), func(Object(-value))];
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should convert string primitives and objects to numbers', function(assert) {
assert.expect(1);
var transforms = [identity, pad, positive, negative];
var values = [
'10', '1.234567890', (MAX_SAFE_INTEGER + ''),
'1e+308', '1e308', '1E+308', '1E308',
'5e-324', '5E-324',
'Infinity', 'NaN'
];
var expected = lodashStable.map(values, function(value) {
var n = +value;
if (!isToNumber) {
if (!isToFinite && n == 1.234567890) {
n = 1;
}
else if (n == Infinity) {
n = MAX_INTEGER;
}
else if ((!isToFinite && n == Number.MIN_VALUE) || n !== n) {
n = 0;
}
if (isToLength || isToSafeInteger) {
n = Math.min(n, isToLength ? MAX_ARRAY_LENGTH : MAX_SAFE_INTEGER);
}
}
var neg = isToLength ? 0 : -n;
return [n, n, n, n, n, n, neg, neg];
});
var actual = lodashStable.map(values, function(value) {
return lodashStable.flatMap(transforms, function(mod) {
return [func(mod(value)), func(Object(mod(value)))];
});
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should convert binary/octal strings to numbers', function(assert) {
assert.expect(1);
var numbers = [42, 5349, 1715004],
transforms = [identity, pad],
values = ['0b101010', '0o12345', '0x1a2b3c'];
var expected = lodashStable.map(numbers, function(n) {
return lodashStable.times(8, lodashStable.constant(n));
});
var actual = lodashStable.map(values, function(value) {
var upper = value.toUpperCase();
return lodashStable.flatMap(transforms, function(mod) {
return [func(mod(value)), func(Object(mod(value))), func(mod(upper)), func(Object(mod(upper)))];
});
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should convert invalid binary/octal strings to `' + (isToNumber ? 'NaN' : '0') + '`', function(assert) {
assert.expect(1);
var transforms = [identity, pad, positive, negative],
values = ['0b', '0o', '0x', '0b1010102', '0o123458', '0x1a2b3x'];
var expected = lodashStable.map(values, function(n) {
return lodashStable.times(8, lodashStable.constant(isToNumber ? NaN : 0));
});
var actual = lodashStable.map(values, function(value) {
return lodashStable.flatMap(transforms, function(mod) {
return [func(mod(value)), func(Object(mod(value)))];
});
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should convert symbols to `' + (isToNumber ? 'NaN' : '0') + '`', function(assert) {
assert.expect(1);
if (Symbol) {
var object1 = Object(symbol),
object2 = Object(symbol),
values = [symbol, object1, object2],
expected = lodashStable.map(values, lodashStable.constant(isToNumber ? NaN : 0));
object2.valueOf = undefined;
var actual = lodashStable.map(values, func);
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should convert empty values to `0` or `NaN`', function(assert) {
assert.expect(1);
var values = falsey.concat(whitespace);
var expected = lodashStable.map(values, function(value) {
return (isToNumber && value !== whitespace) ? Number(value) : 0;
});
var actual = lodashStable.map(values, function(value, index) {
return index ? func(value) : func();
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should coerce objects to numbers', function(assert) {
assert.expect(1);
var values = [
{},
[],
[1],
[1, 2],
{ 'valueOf': '1.1' },
{ 'valueOf': '1.1', 'toString': lodashStable.constant('2.2') },
{ 'valueOf': lodashStable.constant('1.1'), 'toString': '2.2' },
{ 'valueOf': lodashStable.constant('1.1'), 'toString': lodashStable.constant('2.2') },
{ 'valueOf': lodashStable.constant('-0x1a2b3c') },
{ 'toString': lodashStable.constant('-0x1a2b3c') },
{ 'valueOf': lodashStable.constant('0o12345') },
{ 'toString': lodashStable.constant('0o12345') },
{ 'valueOf': lodashStable.constant('0b101010') },
{ 'toString': lodashStable.constant('0b101010') }
];
var expected = [
NaN, 0, 1, NaN,
NaN, 2.2, 1.1, 1.1,
NaN, NaN,
5349, 5349,
42, 42
];
if (isToFinite) {
expected = [
0, 0, 1, 0,
0, 2.2, 1.1, 1.1,
0, 0,
5349, 5349,
42, 42
];
}
else if (!isToNumber) {
expected = [
0, 0, 1, 0,
0, 2, 1, 1,
0, 0,
5349, 5349,
42, 42
];
}
var actual = lodashStable.map(values, func);
assert.deepEqual(actual, expected);
});
QUnit.test('`_.`' + methodName + '` should prevent ReDoS', function(assert) {
assert.expect(2);
var largeStrLen = 50000,
largeStr = '1' + lodashStable.repeat(' ', largeStrLen) + '1',
maxMs = 1000,
startTime = lodashStable.now();
assert.deepEqual(_[methodName](largeStr), methodName == 'toNumber' ? NaN : 0);
var endTime = lodashStable.now(),
timeSpent = endTime - startTime;
assert.ok(timeSpent < maxMs, 'operation took ' + timeSpent + 'ms');
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toPairs');
(function() {
QUnit.test('should be aliased', function(assert) {
assert.expect(1);
assert.strictEqual(_.entries, _.toPairs);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toPairsIn');
(function() {
QUnit.test('should be aliased', function(assert) {
assert.expect(1);
assert.strictEqual(_.entriesIn, _.toPairsIn);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('toPairs methods');
lodashStable.each(['toPairs', 'toPairsIn'], function(methodName) {
var func = _[methodName],
isToPairs = methodName == 'toPairs';
QUnit.test('`_.' + methodName + '` should create an array of string keyed-value pairs', function(assert) {
assert.expect(1);
var object = { 'a': 1, 'b': 2 },
actual = lodashStable.sortBy(func(object), 0);
assert.deepEqual(actual, [['a', 1], ['b', 2]]);
});
QUnit.test('`_.' + methodName + '` should ' + (isToPairs ? 'not ' : '') + 'include inherited string keyed property values', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var expected = isToPairs ? [['a', 1]] : [['a', 1], ['b', 2]],
actual = lodashStable.sortBy(func(new Foo), 0);
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should convert objects with a `length` property', function(assert) {
assert.expect(1);
var object = { '0': 'a', '1': 'b', 'length': 2 },
actual = lodashStable.sortBy(func(object), 0);
assert.deepEqual(actual, [['0', 'a'], ['1', 'b'], ['length', 2]]);
});
QUnit.test('`_.' + methodName + '` should convert maps', function(assert) {
assert.expect(1);
if (Map) {
var map = new Map;
map.set('a', 1);
map.set('b', 2);
assert.deepEqual(func(map), [['a', 1], ['b', 2]]);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should convert sets', function(assert) {
assert.expect(1);
if (Set) {
var set = new Set;
set.add(1);
set.add(2);
assert.deepEqual(func(set), [[1, 1], [2, 2]]);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should convert strings', function(assert) {
assert.expect(2);
lodashStable.each(['xo', Object('xo')], function(string) {
var actual = lodashStable.sortBy(func(string), 0);
assert.deepEqual(actual, [['0', 'x'], ['1', 'o']]);
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toPath');
(function() {
QUnit.test('should convert a string to a path', function(assert) {
assert.expect(2);
assert.deepEqual(_.toPath('a.b.c'), ['a', 'b', 'c']);
assert.deepEqual(_.toPath('a[0].b.c'), ['a', '0', 'b', 'c']);
});
QUnit.test('should coerce array elements to strings', function(assert) {
assert.expect(4);
var array = ['a', 'b', 'c'];
lodashStable.each([array, lodashStable.map(array, Object)], function(value) {
var actual = _.toPath(value);
assert.deepEqual(actual, array);
assert.notStrictEqual(actual, array);
});
});
QUnit.test('should return new path array', function(assert) {
assert.expect(1);
assert.notStrictEqual(_.toPath('a.b.c'), _.toPath('a.b.c'));
});
QUnit.test('should not coerce symbols to strings', function(assert) {
assert.expect(4);
if (Symbol) {
var object = Object(symbol);
lodashStable.each([symbol, object, [symbol], [object]], function(value) {
var actual = _.toPath(value);
assert.ok(lodashStable.isSymbol(actual[0]));
});
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should handle complex paths', function(assert) {
assert.expect(1);
var actual = _.toPath('a[-1.23]["[\\"b\\"]"].c[\'[\\\'d\\\']\'][\ne\n][f].g');
assert.deepEqual(actual, ['a', '-1.23', '["b"]', 'c', "['d']", '\ne\n', 'f', 'g']);
});
QUnit.test('should handle consecutive empty brackets and dots', function(assert) {
assert.expect(12);
var expected = ['', 'a'];
assert.deepEqual(_.toPath('.a'), expected);
assert.deepEqual(_.toPath('[].a'), expected);
expected = ['', '', 'a'];
assert.deepEqual(_.toPath('..a'), expected);
assert.deepEqual(_.toPath('[][].a'), expected);
expected = ['a', '', 'b'];
assert.deepEqual(_.toPath('a..b'), expected);
assert.deepEqual(_.toPath('a[].b'), expected);
expected = ['a', '', '', 'b'];
assert.deepEqual(_.toPath('a...b'), expected);
assert.deepEqual(_.toPath('a[][].b'), expected);
expected = ['a', ''];
assert.deepEqual(_.toPath('a.'), expected);
assert.deepEqual(_.toPath('a[]'), expected);
expected = ['a', '', ''];
assert.deepEqual(_.toPath('a..'), expected);
assert.deepEqual(_.toPath('a[][]'), expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toPlainObject');
(function() {
QUnit.test('should flatten inherited string keyed properties', function(assert) {
assert.expect(1);
function Foo() {
this.b = 2;
}
Foo.prototype.c = 3;
var actual = lodashStable.assign({ 'a': 1 }, _.toPlainObject(new Foo));
assert.deepEqual(actual, { 'a': 1, 'b': 2, 'c': 3 });
});
QUnit.test('should convert `arguments` objects to plain objects', function(assert) {
assert.expect(1);
var actual = _.toPlainObject(args),
expected = { '0': 1, '1': 2, '2': 3 };
assert.deepEqual(actual, expected);
});
QUnit.test('should convert arrays to plain objects', function(assert) {
assert.expect(1);
var actual = _.toPlainObject(['a', 'b', 'c']),
expected = { '0': 'a', '1': 'b', '2': 'c' };
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.toString');
(function() {
QUnit.test('should treat nullish values as empty strings', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, stubString);
var actual = lodashStable.map(values, function(value, index) {
return index ? _.toString(value) : _.toString();
});
assert.deepEqual(actual, expected);
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var values = [-0, Object(-0), 0, Object(0)],
expected = ['-0', '-0', '0', '0'],
actual = lodashStable.map(values, _.toString);
assert.deepEqual(actual, expected);
});
QUnit.test('should preserve the sign of `0` in an array', function(assert) {
assert.expect(1);
var values = [-0, Object(-0), 0, Object(0)];
assert.deepEqual(_.toString(values), '-0,-0,0,0');
});
QUnit.test('should not error on symbols', function(assert) {
assert.expect(1);
if (Symbol) {
try {
assert.strictEqual(_.toString(symbol), 'Symbol(a)');
} catch (e) {
assert.ok(false, e.message);
}
}
else {
skipAssert(assert);
}
});
QUnit.test('should not error on an array of symbols', function(assert) {
assert.expect(1);
if (Symbol) {
try {
assert.strictEqual(_.toString([symbol]), 'Symbol(a)');
} catch (e) {
assert.ok(false, e.message);
}
}
else {
skipAssert(assert);
}
});
QUnit.test('should return the `toString` result of the wrapped value', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _([1, 2, 3]);
assert.strictEqual(wrapped.toString(), '1,2,3');
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.transform');
(function() {
function Foo() {
this.a = 1;
this.b = 2;
this.c = 3;
}
QUnit.test('should create an object with the same `[[Prototype]]` as `object` when `accumulator` is nullish', function(assert) {
assert.expect(4);
var accumulators = [, null, undefined],
object = new Foo,
expected = lodashStable.map(accumulators, stubTrue);
var iteratee = function(result, value, key) {
result[key] = square(value);
};
var mapper = function(accumulator, index) {
return index ? _.transform(object, iteratee, accumulator) : _.transform(object, iteratee);
};
var results = lodashStable.map(accumulators, mapper);
var actual = lodashStable.map(results, function(result) {
return result instanceof Foo;
});
assert.deepEqual(actual, expected);
expected = lodashStable.map(accumulators, lodashStable.constant({ 'a': 1, 'b': 4, 'c': 9 }));
actual = lodashStable.map(results, lodashStable.toPlainObject);
assert.deepEqual(actual, expected);
object = { 'a': 1, 'b': 2, 'c': 3 };
actual = lodashStable.map(accumulators, mapper);
assert.deepEqual(actual, expected);
object = [1, 2, 3];
expected = lodashStable.map(accumulators, lodashStable.constant([1, 4, 9]));
actual = lodashStable.map(accumulators, mapper);
assert.deepEqual(actual, expected);
});
QUnit.test('should create regular arrays from typed arrays', function(assert) {
assert.expect(1);
var expected = lodashStable.map(typedArrays, stubTrue);
var actual = lodashStable.map(typedArrays, function(type) {
var Ctor = root[type],
array = Ctor ? new Ctor(new ArrayBuffer(24)) : [];
return lodashStable.isArray(_.transform(array, noop));
});
assert.deepEqual(actual, expected);
});
QUnit.test('should support an `accumulator` value', function(assert) {
assert.expect(6);
var values = [new Foo, [1, 2, 3], { 'a': 1, 'b': 2, 'c': 3 }],
expected = lodashStable.map(values, lodashStable.constant([1, 4, 9]));
var actual = lodashStable.map(values, function(value) {
return _.transform(value, function(result, value) {
result.push(square(value));
}, []);
});
assert.deepEqual(actual, expected);
var object = { 'a': 1, 'b': 4, 'c': 9 },
expected = [object, { '0': 1, '1': 4, '2': 9 }, object];
actual = lodashStable.map(values, function(value) {
return _.transform(value, function(result, value, key) {
result[key] = square(value);
}, {});
});
assert.deepEqual(actual, expected);
lodashStable.each([[], {}], function(accumulator) {
var actual = lodashStable.map(values, function(value) {
return _.transform(value, noop, accumulator);
});
assert.ok(lodashStable.every(actual, function(result) {
return result === accumulator;
}));
assert.strictEqual(_.transform(null, null, accumulator), accumulator);
});
});
QUnit.test('should treat sparse arrays as dense', function(assert) {
assert.expect(1);
var actual = _.transform(Array(1), function(result, value, index) {
result[index] = String(value);
});
assert.deepEqual(actual, ['undefined']);
});
QUnit.test('should work without an `iteratee`', function(assert) {
assert.expect(1);
assert.ok(_.transform(new Foo) instanceof Foo);
});
QUnit.test('should ensure `object` is an object before using its `[[Prototype]]`', function(assert) {
assert.expect(2);
var Ctors = [Boolean, Boolean, Number, Number, Number, String, String],
values = [false, true, 0, 1, NaN, '', 'a'],
expected = lodashStable.map(values, stubObject);
var results = lodashStable.map(values, function(value) {
return _.transform(value);
});
assert.deepEqual(results, expected);
expected = lodashStable.map(values, stubFalse);
var actual = lodashStable.map(results, function(value, index) {
return value instanceof Ctors[index];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should ensure `object` constructor is a function before using its `[[Prototype]]`', function(assert) {
assert.expect(1);
Foo.prototype.constructor = null;
assert.notOk(_.transform(new Foo) instanceof Foo);
Foo.prototype.constructor = Foo;
});
QUnit.test('should create an empty object when given a falsey `object`', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubObject);
var actual = lodashStable.map(falsey, function(object, index) {
return index ? _.transform(object) : _.transform();
});
assert.deepEqual(actual, expected);
});
lodashStable.each({
'array': [1, 2, 3],
'object': { 'a': 1, 'b': 2, 'c': 3 }
},
function(object, key) {
QUnit.test('should provide correct `iteratee` arguments when transforming an ' + key, function(assert) {
assert.expect(2);
var args;
_.transform(object, function() {
args || (args = slice.call(arguments));
});
var first = args[0];
if (key == 'array') {
assert.ok(first !== object && lodashStable.isArray(first));
assert.deepEqual(args, [first, 1, 0, object]);
} else {
assert.ok(first !== object && lodashStable.isPlainObject(first));
assert.deepEqual(args, [first, 1, 'a', object]);
}
});
});
QUnit.test('should create an object from the same realm as `object`', function(assert) {
assert.expect(1);
var objects = lodashStable.filter(realm, function(value) {
return lodashStable.isObject(value) && !lodashStable.isElement(value);
});
var expected = lodashStable.map(objects, stubTrue);
var actual = lodashStable.map(objects, function(object) {
var Ctor = object.constructor,
result = _.transform(object);
if (result === object) {
return false;
}
if (lodashStable.isTypedArray(object)) {
return result instanceof Array;
}
return result instanceof Ctor || !(new Ctor instanceof Ctor);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('trim methods');
lodashStable.each(['trim', 'trimStart', 'trimEnd'], function(methodName, index) {
var func = _[methodName],
parts = [];
if (index != 2) {
parts.push('leading');
}
if (index != 1) {
parts.push('trailing');
}
parts = parts.join(' and ');
QUnit.test('`_.' + methodName + '` should remove ' + parts + ' whitespace', function(assert) {
assert.expect(1);
var string = whitespace + 'a b c' + whitespace,
expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : '');
assert.strictEqual(func(string), expected);
});
QUnit.test('`_.' + methodName + '` should coerce `string` to a string', function(assert) {
assert.expect(1);
var object = { 'toString': lodashStable.constant(whitespace + 'a b c' + whitespace) },
expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : '');
assert.strictEqual(func(object), expected);
});
QUnit.test('`_.' + methodName + '` should remove ' + parts + ' `chars`', function(assert) {
assert.expect(1);
var string = '-_-a-b-c-_-',
expected = (index == 2 ? '-_-' : '') + 'a-b-c' + (index == 1 ? '-_-' : '');
assert.strictEqual(func(string, '_-'), expected);
});
QUnit.test('`_.' + methodName + '` should coerce `chars` to a string', function(assert) {
assert.expect(1);
var object = { 'toString': lodashStable.constant('_-') },
string = '-_-a-b-c-_-',
expected = (index == 2 ? '-_-' : '') + 'a-b-c' + (index == 1 ? '-_-' : '');
assert.strictEqual(func(string, object), expected);
});
QUnit.test('`_.' + methodName + '` should return an empty string for empty values and `chars`', function(assert) {
assert.expect(6);
lodashStable.each([null, '_-'], function(chars) {
assert.strictEqual(func(null, chars), '');
assert.strictEqual(func(undefined, chars), '');
assert.strictEqual(func('', chars), '');
});
});
QUnit.test('`_.' + methodName + '` should work with `undefined` or empty string values for `chars`', function(assert) {
assert.expect(2);
var string = whitespace + 'a b c' + whitespace,
expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : '');
assert.strictEqual(func(string, undefined), expected);
assert.strictEqual(func(string, ''), string);
});
QUnit.test('`_.`' + methodName + '` should prevent ReDoS', function(assert) {
assert.expect(2);
var largeStrLen = 50000,
largeStr = 'A' + lodashStable.repeat(' ', largeStrLen) + 'A',
maxMs = 1000,
startTime = lodashStable.now();
assert.strictEqual(_[methodName](largeStr), largeStr);
var endTime = lodashStable.now(),
timeSpent = endTime - startTime;
assert.ok(timeSpent < maxMs, 'operation took ' + timeSpent + 'ms');
});
QUnit.test('`_.' + methodName + '` should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var string = Object(whitespace + 'a b c' + whitespace),
trimmed = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : ''),
actual = lodashStable.map([string, string, string], func);
assert.deepEqual(actual, [trimmed, trimmed, trimmed]);
});
QUnit.test('`_.' + methodName + '` should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var string = whitespace + 'a b c' + whitespace,
expected = (index == 2 ? whitespace : '') + 'a b c' + (index == 1 ? whitespace : '');
assert.strictEqual(_(string)[methodName](), expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var string = whitespace + 'a b c' + whitespace;
assert.ok(_(string).chain()[methodName]() instanceof _);
}
else {
skipAssert(assert);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('uncommon symbols');
(function() {
var flag = '\ud83c\uddfa\ud83c\uddf8',
heart = '\u2764' + emojiVar,
hearts = '\ud83d\udc95',
comboGlyph = '\ud83d\udc68\u200d' + heart + '\u200d\ud83d\udc8B\u200d\ud83d\udc68',
hashKeycap = '#' + emojiVar + '\u20e3',
leafs = '\ud83c\udf42',
mic = '\ud83c\udf99',
noMic = mic + '\u20e0',
raisedHand = '\u270B' + emojiVar,
rocket = '\ud83d\ude80',
thumbsUp = '\ud83d\udc4d';
QUnit.test('should account for astral symbols', function(assert) {
assert.expect(34);
var allHearts = _.repeat(hearts, 10),
chars = hearts + comboGlyph,
string = 'A ' + leafs + ', ' + comboGlyph + ', and ' + rocket,
trimChars = comboGlyph + hearts,
trimString = trimChars + string + trimChars;
assert.strictEqual(_.camelCase(hearts + ' the ' + leafs), hearts + 'The' + leafs);
assert.strictEqual(_.camelCase(string), 'a' + leafs + comboGlyph + 'And' + rocket);
assert.strictEqual(_.capitalize(rocket), rocket);
assert.strictEqual(_.pad(string, 16), ' ' + string + ' ');
assert.strictEqual(_.padStart(string, 16), ' ' + string);
assert.strictEqual(_.padEnd(string, 16), string + ' ');
assert.strictEqual(_.pad(string, 16, chars), hearts + string + chars);
assert.strictEqual(_.padStart(string, 16, chars), chars + hearts + string);
assert.strictEqual(_.padEnd(string, 16, chars), string + chars + hearts);
assert.strictEqual(_.size(string), 13);
assert.deepEqual(_.split(string, ' '), ['A', leafs + ',', comboGlyph + ',', 'and', rocket]);
assert.deepEqual(_.split(string, ' ', 3), ['A', leafs + ',', comboGlyph + ',']);
assert.deepEqual(_.split(string, undefined), [string]);
assert.deepEqual(_.split(string, undefined, -1), [string]);
assert.deepEqual(_.split(string, undefined, 0), []);
var expected = ['A', ' ', leafs, ',', ' ', comboGlyph, ',', ' ', 'a', 'n', 'd', ' ', rocket];
assert.deepEqual(_.split(string, ''), expected);
assert.deepEqual(_.split(string, '', 6), expected.slice(0, 6));
assert.deepEqual(_.toArray(string), expected);
assert.strictEqual(_.trim(trimString, chars), string);
assert.strictEqual(_.trimStart(trimString, chars), string + trimChars);
assert.strictEqual(_.trimEnd(trimString, chars), trimChars + string);
assert.strictEqual(_.truncate(string, { 'length': 13 }), string);
assert.strictEqual(_.truncate(string, { 'length': 6 }), 'A ' + leafs + '...');
assert.deepEqual(_.words(string), ['A', leafs, comboGlyph, 'and', rocket]);
assert.deepEqual(_.toArray(hashKeycap), [hashKeycap]);
assert.deepEqual(_.toArray(noMic), [noMic]);
lodashStable.times(2, function(index) {
var separator = index ? RegExp(hearts) : hearts,
options = { 'length': 4, 'separator': separator },
actual = _.truncate(string, options);
assert.strictEqual(actual, 'A...');
assert.strictEqual(actual.length, 4);
actual = _.truncate(allHearts, options);
assert.strictEqual(actual, hearts + '...');
assert.strictEqual(actual.length, 5);
});
});
QUnit.test('should account for combining diacritical marks', function(assert) {
assert.expect(1);
var values = lodashStable.map(comboMarks, function(mark) {
return 'o' + mark;
});
var expected = lodashStable.map(values, function(value) {
return [1, [value], [value]];
});
var actual = lodashStable.map(values, function(value) {
return [_.size(value), _.toArray(value), _.words(value)];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should account for fitzpatrick modifiers', function(assert) {
assert.expect(1);
var values = lodashStable.map(fitzModifiers, function(modifier) {
return thumbsUp + modifier;
});
var expected = lodashStable.map(values, function(value) {
return [1, [value], [value]];
});
var actual = lodashStable.map(values, function(value) {
return [_.size(value), _.toArray(value), _.words(value)];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should account for regional symbols', function(assert) {
assert.expect(6);
var pair = flag.match(/\ud83c[\udde6-\uddff]/g),
regionals = pair.join(' ');
assert.strictEqual(_.size(flag), 1);
assert.strictEqual(_.size(regionals), 3);
assert.deepEqual(_.toArray(flag), [flag]);
assert.deepEqual(_.toArray(regionals), [pair[0], ' ', pair[1]]);
assert.deepEqual(_.words(flag), [flag]);
assert.deepEqual(_.words(regionals), [pair[0], pair[1]]);
});
QUnit.test('should account for variation selectors', function(assert) {
assert.expect(3);
assert.strictEqual(_.size(heart), 1);
assert.deepEqual(_.toArray(heart), [heart]);
assert.deepEqual(_.words(heart), [heart]);
});
QUnit.test('should account for variation selectors with fitzpatrick modifiers', function(assert) {
assert.expect(1);
var values = lodashStable.map(fitzModifiers, function(modifier) {
return raisedHand + modifier;
});
var expected = lodashStable.map(values, function(value) {
return [1, [value], [value]];
});
var actual = lodashStable.map(values, function(value) {
return [_.size(value), _.toArray(value), _.words(value)];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should match lone surrogates', function(assert) {
assert.expect(3);
var pair = hearts.split(''),
surrogates = pair[0] + ' ' + pair[1];
assert.strictEqual(_.size(surrogates), 3);
assert.deepEqual(_.toArray(surrogates), [pair[0], ' ', pair[1]]);
assert.deepEqual(_.words(surrogates), []);
});
QUnit.test('should match side by side fitzpatrick modifiers separately ', function(assert) {
assert.expect(1);
var string = fitzModifiers[0] + fitzModifiers[0];
assert.deepEqual(_.toArray(string), [fitzModifiers[0], fitzModifiers[0]]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.unary');
(function() {
function fn() {
return slice.call(arguments);
}
QUnit.test('should cap the number of arguments provided to `func`', function(assert) {
assert.expect(1);
var actual = lodashStable.map(['6', '8', '10'], _.unary(parseInt));
assert.deepEqual(actual, [6, 8, 10]);
});
QUnit.test('should not force a minimum argument count', function(assert) {
assert.expect(1);
var capped = _.unary(fn);
assert.deepEqual(capped(), []);
});
QUnit.test('should use `this` binding of function', function(assert) {
assert.expect(1);
var capped = _.unary(function(a, b) { return this; }),
object = { 'capped': capped };
assert.strictEqual(object.capped(), object);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.unescape');
(function() {
var escaped = '&<>"'/',
unescaped = '&<>"\'/';
escaped += escaped;
unescaped += unescaped;
QUnit.test('should unescape entities in order', function(assert) {
assert.expect(1);
assert.strictEqual(_.unescape('&lt;'), '<');
});
QUnit.test('should unescape the proper entities', function(assert) {
assert.expect(1);
assert.strictEqual(_.unescape(escaped), unescaped);
});
QUnit.test('should handle strings with nothing to unescape', function(assert) {
assert.expect(1);
assert.strictEqual(_.unescape('abc'), 'abc');
});
QUnit.test('should unescape the same characters escaped by `_.escape`', function(assert) {
assert.expect(1);
assert.strictEqual(_.unescape(_.escape(unescaped)), unescaped);
});
lodashStable.each(['`', '/'], function(entity) {
QUnit.test('should not unescape the "' + entity + '" entity', function(assert) {
assert.expect(1);
assert.strictEqual(_.unescape(entity), entity);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('union methods');
lodashStable.each(['union', 'unionBy', 'unionWith'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should return the union of two arrays', function(assert) {
assert.expect(1);
var actual = func([2], [1, 2]);
assert.deepEqual(actual, [2, 1]);
});
QUnit.test('`_.' + methodName + '` should return the union of multiple arrays', function(assert) {
assert.expect(1);
var actual = func([2], [1, 2], [2, 3]);
assert.deepEqual(actual, [2, 1, 3]);
});
QUnit.test('`_.' + methodName + '` should not flatten nested arrays', function(assert) {
assert.expect(1);
var actual = func([1, 3, 2], [1, [5]], [2, [4]]);
assert.deepEqual(actual, [1, 3, 2, [5], [4]]);
});
QUnit.test('`_.' + methodName + '` should ignore values that are not arrays or `arguments` objects', function(assert) {
assert.expect(3);
var array = [0];
assert.deepEqual(func(array, 3, { '0': 1 }, null), array);
assert.deepEqual(func(null, array, null, [2, 1]), [0, 2, 1]);
assert.deepEqual(func(array, null, args, null), [0, 1, 2, 3]);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.unionBy');
(function() {
QUnit.test('should accept an `iteratee`', function(assert) {
assert.expect(2);
var actual = _.unionBy([2.1], [1.2, 2.3], Math.floor);
assert.deepEqual(actual, [2.1, 1.2]);
actual = _.unionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x');
assert.deepEqual(actual, [{ 'x': 1 }, { 'x': 2 }]);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.unionBy([2.1], [1.2, 2.3], function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [2.1]);
});
QUnit.test('should output values from the first possible array', function(assert) {
assert.expect(1);
var actual = _.unionBy([{ 'x': 1, 'y': 1 }], [{ 'x': 1, 'y': 2 }], 'x');
assert.deepEqual(actual, [{ 'x': 1, 'y': 1 }]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.unionWith');
(function() {
QUnit.test('should work with a `comparator`', function(assert) {
assert.expect(1);
var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }],
others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }],
actual = _.unionWith(objects, others, lodashStable.isEqual);
assert.deepEqual(actual, [objects[0], objects[1], others[0]]);
});
QUnit.test('should output values from the first possible array', function(assert) {
assert.expect(1);
var objects = [{ 'x': 1, 'y': 1 }],
others = [{ 'x': 1, 'y': 2 }];
var actual = _.unionWith(objects, others, function(a, b) {
return a.x == b.x;
});
assert.deepEqual(actual, [{ 'x': 1, 'y': 1 }]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('uniq methods');
lodashStable.each(['uniq', 'uniqBy', 'uniqWith', 'sortedUniq', 'sortedUniqBy'], function(methodName) {
var func = _[methodName],
isSorted = /^sorted/.test(methodName),
objects = [{ 'a': 2 }, { 'a': 3 }, { 'a': 1 }, { 'a': 2 }, { 'a': 3 }, { 'a': 1 }];
if (isSorted) {
objects = _.sortBy(objects, 'a');
}
else {
QUnit.test('`_.' + methodName + '` should return unique values of an unsorted array', function(assert) {
assert.expect(1);
var array = [2, 1, 2];
assert.deepEqual(func(array), [2, 1]);
});
}
QUnit.test('`_.' + methodName + '` should return unique values of a sorted array', function(assert) {
assert.expect(1);
var array = [1, 2, 2];
assert.deepEqual(func(array), [1, 2]);
});
QUnit.test('`_.' + methodName + '` should treat object instances as unique', function(assert) {
assert.expect(1);
assert.deepEqual(func(objects), objects);
});
QUnit.test('`_.' + methodName + '` should treat `-0` as `0`', function(assert) {
assert.expect(1);
var actual = lodashStable.map(func([-0, 0]), lodashStable.toString);
assert.deepEqual(actual, ['0']);
});
QUnit.test('`_.' + methodName + '` should match `NaN`', function(assert) {
assert.expect(1);
assert.deepEqual(func([NaN, NaN]), [NaN]);
});
QUnit.test('`_.' + methodName + '` should work with large arrays', function(assert) {
assert.expect(1);
var largeArray = [],
expected = [0, {}, 'a'],
count = Math.ceil(LARGE_ARRAY_SIZE / expected.length);
lodashStable.each(expected, function(value) {
lodashStable.times(count, function() {
largeArray.push(value);
});
});
assert.deepEqual(func(largeArray), expected);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of `-0` as `0`', function(assert) {
assert.expect(1);
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, function(index) {
return isEven(index) ? -0 : 0;
});
var actual = lodashStable.map(func(largeArray), lodashStable.toString);
assert.deepEqual(actual, ['0']);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of boolean, `NaN`, and nullish values', function(assert) {
assert.expect(1);
var largeArray = [],
expected = [null, undefined, false, true, NaN],
count = Math.ceil(LARGE_ARRAY_SIZE / expected.length);
lodashStable.each(expected, function(value) {
lodashStable.times(count, function() {
largeArray.push(value);
});
});
assert.deepEqual(func(largeArray), expected);
});
QUnit.test('`_.' + methodName + '` should work with large arrays of symbols', function(assert) {
assert.expect(1);
if (Symbol) {
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, Symbol);
assert.deepEqual(func(largeArray), largeArray);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should work with large arrays of well-known symbols', function(assert) {
assert.expect(1);
// See http://www.ecma-international.org/ecma-262/6.0/#sec-well-known-symbols.
if (Symbol) {
var expected = [
Symbol.hasInstance, Symbol.isConcatSpreadable, Symbol.iterator,
Symbol.match, Symbol.replace, Symbol.search, Symbol.species,
Symbol.split, Symbol.toPrimitive, Symbol.toStringTag, Symbol.unscopables
];
var largeArray = [],
count = Math.ceil(LARGE_ARRAY_SIZE / expected.length);
expected = lodashStable.map(expected, function(symbol) {
return symbol || {};
});
lodashStable.each(expected, function(value) {
lodashStable.times(count, function() {
largeArray.push(value);
});
});
assert.deepEqual(func(largeArray), expected);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should distinguish between numbers and numeric strings', function(assert) {
assert.expect(1);
var largeArray = [],
expected = ['2', 2, Object('2'), Object(2)],
count = Math.ceil(LARGE_ARRAY_SIZE / expected.length);
lodashStable.each(expected, function(value) {
lodashStable.times(count, function() {
largeArray.push(value);
});
});
assert.deepEqual(func(largeArray), expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.uniq');
(function() {
QUnit.test('should perform an unsorted uniq when used as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var array = [[2, 1, 2], [1, 2, 1]],
actual = lodashStable.map(array, lodashStable.uniq);
assert.deepEqual(actual, [[2, 1], [1, 2]]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('uniqBy methods');
lodashStable.each(['uniqBy', 'sortedUniqBy'], function(methodName) {
var func = _[methodName],
isSorted = methodName == 'sortedUniqBy',
objects = [{ 'a': 2 }, { 'a': 3 }, { 'a': 1 }, { 'a': 2 }, { 'a': 3 }, { 'a': 1 }];
if (isSorted) {
objects = _.sortBy(objects, 'a');
}
QUnit.test('`_.' + methodName + '` should work with an `iteratee`', function(assert) {
assert.expect(1);
var expected = isSorted ? [{ 'a': 1 }, { 'a': 2 }, { 'a': 3 }] : objects.slice(0, 3);
var actual = func(objects, function(object) {
return object.a;
});
assert.deepEqual(actual, expected);
});
QUnit.test('should work with large arrays', function(assert) {
assert.expect(2);
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, function() {
return [1, 2];
});
var actual = func(largeArray, String);
assert.strictEqual(actual[0], largeArray[0]);
assert.deepEqual(actual, [[1, 2]]);
});
QUnit.test('`_.' + methodName + '` should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
func(objects, function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [objects[0]]);
});
QUnit.test('`_.' + methodName + '` should work with `_.property` shorthands', function(assert) {
assert.expect(2);
var expected = isSorted ? [{ 'a': 1 }, { 'a': 2 }, { 'a': 3 }] : objects.slice(0, 3),
actual = func(objects, 'a');
assert.deepEqual(actual, expected);
var arrays = [[2], [3], [1], [2], [3], [1]];
if (isSorted) {
arrays = lodashStable.sortBy(arrays, 0);
}
expected = isSorted ? [[1], [2], [3]] : arrays.slice(0, 3);
actual = func(arrays, 0);
assert.deepEqual(actual, expected);
});
lodashStable.each({
'an array': [0, 'a'],
'an object': { '0': 'a' },
'a number': 0,
'a string': '0'
},
function(iteratee, key) {
QUnit.test('`_.' + methodName + '` should work with ' + key + ' for `iteratee`', function(assert) {
assert.expect(1);
var actual = func([['a'], ['a'], ['b']], iteratee);
assert.deepEqual(actual, [['a'], ['b']]);
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.uniqWith');
(function() {
QUnit.test('should work with a `comparator`', function(assert) {
assert.expect(1);
var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 2 }],
actual = _.uniqWith(objects, lodashStable.isEqual);
assert.deepEqual(actual, [objects[0], objects[1]]);
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var largeArray = lodashStable.times(LARGE_ARRAY_SIZE, function(index) {
return isEven(index) ? -0 : 0;
});
var arrays = [[-0, 0], largeArray],
expected = lodashStable.map(arrays, lodashStable.constant(['-0']));
var actual = lodashStable.map(arrays, function(array) {
return lodashStable.map(_.uniqWith(array, lodashStable.eq), lodashStable.toString);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.uniqueId');
(function() {
QUnit.test('should generate unique ids', function(assert) {
assert.expect(1);
var actual = lodashStable.times(1000, function(assert) {
return _.uniqueId();
});
assert.strictEqual(lodashStable.uniq(actual).length, actual.length);
});
QUnit.test('should return a string value when not providing a `prefix`', function(assert) {
assert.expect(1);
assert.strictEqual(typeof _.uniqueId(), 'string');
});
QUnit.test('should coerce the prefix argument to a string', function(assert) {
assert.expect(1);
var actual = [_.uniqueId(3), _.uniqueId(2), _.uniqueId(1)];
assert.ok(/3\d+,2\d+,1\d+/.test(actual));
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.unset');
(function() {
QUnit.test('should unset property values', function(assert) {
assert.expect(4);
lodashStable.each(['a', ['a']], function(path) {
var object = { 'a': 1, 'c': 2 };
assert.strictEqual(_.unset(object, path), true);
assert.deepEqual(object, { 'c': 2 });
});
});
QUnit.test('should preserve the sign of `0`', function(assert) {
assert.expect(1);
var props = [-0, Object(-0), 0, Object(0)],
expected = lodashStable.map(props, lodashStable.constant([true, false]));
var actual = lodashStable.map(props, function(key) {
var object = { '-0': 'a', '0': 'b' };
return [_.unset(object, key), lodashStable.toString(key) in object];
});
assert.deepEqual(actual, expected);
});
QUnit.test('should unset symbol keyed property values', function(assert) {
assert.expect(2);
if (Symbol) {
var object = {};
object[symbol] = 1;
assert.strictEqual(_.unset(object, symbol), true);
assert.notOk(symbol in object);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should unset deep property values', function(assert) {
assert.expect(4);
lodashStable.each(['a.b', ['a', 'b']], function(path) {
var object = { 'a': { 'b': null } };
assert.strictEqual(_.unset(object, path), true);
assert.deepEqual(object, { 'a': {} });
});
});
QUnit.test('should handle complex paths', function(assert) {
assert.expect(4);
var paths = [
'a[-1.23]["[\\"b\\"]"].c[\'[\\\'d\\\']\'][\ne\n][f].g',
['a', '-1.23', '["b"]', 'c', "['d']", '\ne\n', 'f', 'g']
];
lodashStable.each(paths, function(path) {
var object = { 'a': { '-1.23': { '["b"]': { 'c': { "['d']": { '\ne\n': { 'f': { 'g': 8 } } } } } } } };
assert.strictEqual(_.unset(object, path), true);
assert.notOk('g' in object.a[-1.23]['["b"]'].c["['d']"]['\ne\n'].f);
});
});
QUnit.test('should return `true` for nonexistent paths', function(assert) {
assert.expect(5);
var object = { 'a': { 'b': { 'c': null } } };
lodashStable.each(['z', 'a.z', 'a.b.z', 'a.b.c.z'], function(path) {
assert.strictEqual(_.unset(object, path), true);
});
assert.deepEqual(object, { 'a': { 'b': { 'c': null } } });
});
QUnit.test('should not error when `object` is nullish', function(assert) {
assert.expect(1);
var values = [null, undefined],
expected = [[true, true], [true, true]];
var actual = lodashStable.map(values, function(value) {
try {
return [_.unset(value, 'a.b'), _.unset(value, ['a', 'b'])];
} catch (e) {
return e.message;
}
});
assert.deepEqual(actual, expected);
});
QUnit.test('should follow `path` over non-plain objects', function(assert) {
assert.expect(8);
var object = { 'a': '' },
paths = ['constructor.prototype.a', ['constructor', 'prototype', 'a']];
lodashStable.each(paths, function(path) {
numberProto.a = 1;
var actual = _.unset(0, path);
assert.strictEqual(actual, true);
assert.notOk('a' in numberProto);
delete numberProto.a;
});
lodashStable.each(['a.replace.b', ['a', 'replace', 'b']], function(path) {
stringProto.replace.b = 1;
var actual = _.unset(object, path);
assert.strictEqual(actual, true);
assert.notOk('a' in stringProto.replace);
delete stringProto.replace.b;
});
});
QUnit.test('should return `false` for non-configurable properties', function(assert) {
assert.expect(1);
var object = {};
if (!isStrict) {
defineProperty(object, 'a', {
'configurable': false,
'enumerable': true,
'writable': true,
'value': 1,
});
assert.strictEqual(_.unset(object, 'a'), false);
}
else {
skipAssert(assert);
}
});
// Prevent regression for https://github.com/lodash/lodash/security/advisories/GHSA-xxjr-mmjv-4gpg
QUnit.test('Security: _.unset should not allow modifying prototype or constructor properties', function(assert) {
assert.expect(3);
var testStr1 = 'ABC';
assert.strictEqual(typeof testStr1.toLowerCase, 'function', 'String.toLowerCase should exist before unset');
_.unset({ foo: 'bar' }, 'foo.__proto__.toLowerCase');
_.unset({ foo: 'bar' }, 'foo.constructor.prototype.toLowerCase');
var testStr2 = 'ABC';
assert.strictEqual(typeof testStr2.toLowerCase, 'function', 'String.toLowerCase should still exist after unset');
assert.strictEqual(testStr2.toLowerCase(), 'abc', 'String.toLowerCase should work as expected');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.unzipWith');
(function() {
QUnit.test('should unzip arrays combining regrouped elements with `iteratee`', function(assert) {
assert.expect(1);
var array = [[1, 4], [2, 5], [3, 6]];
var actual = _.unzipWith(array, function(a, b, c) {
return a + b + c;
});
assert.deepEqual(actual, [6, 15]);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.unzipWith([[1, 3, 5], [2, 4, 6]], function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [1, 2]);
});
QUnit.test('should perform a basic unzip when `iteratee` is nullish', function(assert) {
assert.expect(1);
var array = [[1, 3], [2, 4]],
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant(_.unzip(array)));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.unzipWith(array, value) : _.unzipWith(array);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.updateWith');
(function() {
QUnit.test('should work with a `customizer` callback', function(assert) {
assert.expect(1);
var actual = _.updateWith({ '0': {} }, '[0][1][2]', stubThree, function(value) {
return lodashStable.isObject(value) ? undefined : {};
});
assert.deepEqual(actual, { '0': { '1': { '2': 3 } } });
});
QUnit.test('should work with a `customizer` that returns `undefined`', function(assert) {
assert.expect(1);
var actual = _.updateWith({}, 'a[0].b.c', stubFour, noop);
assert.deepEqual(actual, { 'a': [{ 'b': { 'c': 4 } }] });
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('update methods');
lodashStable.each(['update', 'updateWith'], function(methodName) {
var func = _[methodName],
oldValue = 1;
QUnit.test('`_.' + methodName + '` should invoke `updater` with the value on `path` of `object`', function(assert) {
assert.expect(4);
var object = { 'a': [{ 'b': { 'c': oldValue } }] },
expected = oldValue + 1;
lodashStable.each(['a[0].b.c', ['a', '0', 'b', 'c']], function(path) {
func(object, path, function(n) {
assert.strictEqual(n, oldValue);
return ++n;
});
assert.strictEqual(object.a[0].b.c, expected);
object.a[0].b.c = oldValue;
});
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.upperCase');
(function() {
QUnit.test('should uppercase as space-separated words', function(assert) {
assert.expect(3);
assert.strictEqual(_.upperCase('--foo-bar--'), 'FOO BAR');
assert.strictEqual(_.upperCase('fooBar'), 'FOO BAR');
assert.strictEqual(_.upperCase('__foo_bar__'), 'FOO BAR');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.upperFirst');
(function() {
QUnit.test('should uppercase only the first character', function(assert) {
assert.expect(3);
assert.strictEqual(_.upperFirst('fred'), 'Fred');
assert.strictEqual(_.upperFirst('Fred'), 'Fred');
assert.strictEqual(_.upperFirst('FRED'), 'FRED');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('values methods');
lodashStable.each(['values', 'valuesIn'], function(methodName) {
var func = _[methodName],
isValues = methodName == 'values';
QUnit.test('`_.' + methodName + '` should get string keyed values of `object`', function(assert) {
assert.expect(1);
var object = { 'a': 1, 'b': 2 },
actual = func(object).sort();
assert.deepEqual(actual, [1, 2]);
});
QUnit.test('`_.' + methodName + '` should work with an object that has a `length` property', function(assert) {
assert.expect(1);
var object = { '0': 'a', '1': 'b', 'length': 2 },
actual = func(object).sort();
assert.deepEqual(actual, [2, 'a', 'b']);
});
QUnit.test('`_.' + methodName + '` should ' + (isValues ? 'not ' : '') + 'include inherited string keyed property values', function(assert) {
assert.expect(1);
function Foo() {
this.a = 1;
}
Foo.prototype.b = 2;
var expected = isValues ? [1] : [1, 2],
actual = func(new Foo).sort();
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should work with `arguments` objects', function(assert) {
assert.expect(1);
var values = [args, strictArgs],
expected = lodashStable.map(values, lodashStable.constant([1, 2, 3]));
var actual = lodashStable.map(values, function(value) {
return func(value).sort();
});
assert.deepEqual(actual, expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.without');
(function() {
QUnit.test('should return the difference of values', function(assert) {
assert.expect(1);
var actual = _.without([2, 1, 2, 3], 1, 2);
assert.deepEqual(actual, [3]);
});
QUnit.test('should use strict equality to determine the values to reject', function(assert) {
assert.expect(2);
var object1 = { 'a': 1 },
object2 = { 'b': 2 },
array = [object1, object2];
assert.deepEqual(_.without(array, { 'a': 1 }), array);
assert.deepEqual(_.without(array, object1), [object2]);
});
QUnit.test('should remove all occurrences of each value from an array', function(assert) {
assert.expect(1);
var array = [1, 2, 3, 1, 2, 3];
assert.deepEqual(_.without(array, 1, 2), [3, 3]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.words');
(function() {
QUnit.test('should match words containing Latin Unicode letters', function(assert) {
assert.expect(1);
var expected = lodashStable.map(burredLetters, function(letter) {
return [letter];
});
var actual = lodashStable.map(burredLetters, function(letter) {
return _.words(letter);
});
assert.deepEqual(actual, expected);
});
QUnit.test('should support a `pattern`', function(assert) {
assert.expect(2);
assert.deepEqual(_.words('abcd', /ab|cd/g), ['ab', 'cd']);
assert.deepEqual(_.words('abcd', 'ab|cd'), ['ab']);
});
QUnit.test('should work with compound words', function(assert) {
assert.expect(12);
assert.deepEqual(_.words('12ft'), ['12', 'ft']);
assert.deepEqual(_.words('aeiouAreVowels'), ['aeiou', 'Are', 'Vowels']);
assert.deepEqual(_.words('enable 6h format'), ['enable', '6', 'h', 'format']);
assert.deepEqual(_.words('enable 24H format'), ['enable', '24', 'H', 'format']);
assert.deepEqual(_.words('isISO8601'), ['is', 'ISO', '8601']);
assert.deepEqual(_.words('LETTERSAeiouAreVowels'), ['LETTERS', 'Aeiou', 'Are', 'Vowels']);
assert.deepEqual(_.words('tooLegit2Quit'), ['too', 'Legit', '2', 'Quit']);
assert.deepEqual(_.words('walk500Miles'), ['walk', '500', 'Miles']);
assert.deepEqual(_.words('xhr2Request'), ['xhr', '2', 'Request']);
assert.deepEqual(_.words('XMLHttp'), ['XML', 'Http']);
assert.deepEqual(_.words('XmlHTTP'), ['Xml', 'HTTP']);
assert.deepEqual(_.words('XmlHttp'), ['Xml', 'Http']);
});
QUnit.test('should work with compound words containing diacritical marks', function(assert) {
assert.expect(3);
assert.deepEqual(_.words('LETTERSÆiouAreVowels'), ['LETTERS', 'Æiou', 'Are', 'Vowels']);
assert.deepEqual(_.words('æiouAreVowels'), ['æiou', 'Are', 'Vowels']);
assert.deepEqual(_.words('æiou2Consonants'), ['æiou', '2', 'Consonants']);
});
QUnit.test('should not treat contractions as separate words', function(assert) {
assert.expect(4);
var postfixes = ['d', 'll', 'm', 're', 's', 't', 've'];
lodashStable.each(["'", '\u2019'], function(apos) {
lodashStable.times(2, function(index) {
var actual = lodashStable.map(postfixes, function(postfix) {
var string = 'a b' + apos + postfix + ' c';
return _.words(string[index ? 'toUpperCase' : 'toLowerCase']());
});
var expected = lodashStable.map(postfixes, function(postfix) {
var words = ['a', 'b' + apos + postfix, 'c'];
return lodashStable.map(words, function(word) {
return word[index ? 'toUpperCase' : 'toLowerCase']();
});
});
assert.deepEqual(actual, expected);
});
});
});
QUnit.test('should not treat ordinal numbers as separate words', function(assert) {
assert.expect(2);
var ordinals = ['1st', '2nd', '3rd', '4th'];
lodashStable.times(2, function(index) {
var expected = lodashStable.map(ordinals, function(ordinal) {
return [ordinal[index ? 'toUpperCase' : 'toLowerCase']()];
});
var actual = lodashStable.map(expected, function(words) {
return _.words(words[0]);
});
assert.deepEqual(actual, expected);
});
});
QUnit.test('should not treat mathematical operators as words', function(assert) {
assert.expect(1);
var operators = ['\xac', '\xb1', '\xd7', '\xf7'],
expected = lodashStable.map(operators, stubArray),
actual = lodashStable.map(operators, _.words);
assert.deepEqual(actual, expected);
});
QUnit.test('should not treat punctuation as words', function(assert) {
assert.expect(1);
var marks = [
'\u2012', '\u2013', '\u2014', '\u2015',
'\u2024', '\u2025', '\u2026',
'\u205d', '\u205e'
];
var expected = lodashStable.map(marks, stubArray),
actual = lodashStable.map(marks, _.words);
assert.deepEqual(actual, expected);
});
QUnit.test('should work as an iteratee for methods like `_.map`', function(assert) {
assert.expect(1);
var strings = lodashStable.map(['a', 'b', 'c'], Object),
actual = lodashStable.map(strings, _.words);
assert.deepEqual(actual, [['a'], ['b'], ['c']]);
});
QUnit.test('should prevent ReDoS', function(assert) {
assert.expect(2);
var largeWordLen = 50000,
largeWord = _.repeat('A', largeWordLen),
maxMs = 1000,
startTime = lodashStable.now();
assert.deepEqual(_.words(largeWord + 'ÆiouAreVowels'), [largeWord, 'Æiou', 'Are', 'Vowels']);
var endTime = lodashStable.now(),
timeSpent = endTime - startTime;
assert.ok(timeSpent < maxMs, 'operation took ' + timeSpent + 'ms');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.wrap');
(function() {
QUnit.test('should create a wrapped function', function(assert) {
assert.expect(1);
var p = _.wrap(lodashStable.escape, function(func, text) {
return '<p>' + func(text) + '</p>';
});
assert.strictEqual(p('fred, barney, & pebbles'), '<p>fred, barney, & pebbles</p>');
});
QUnit.test('should provide correct `wrapper` arguments', function(assert) {
assert.expect(1);
var args;
var wrapped = _.wrap(noop, function() {
args || (args = slice.call(arguments));
});
wrapped(1, 2, 3);
assert.deepEqual(args, [noop, 1, 2, 3]);
});
QUnit.test('should use `_.identity` when `wrapper` is nullish', function(assert) {
assert.expect(1);
var values = [, null, undefined],
expected = lodashStable.map(values, stubA);
var actual = lodashStable.map(values, function(value, index) {
var wrapped = index ? _.wrap('a', value) : _.wrap('a');
return wrapped('b', 'c');
});
assert.deepEqual(actual, expected);
});
QUnit.test('should use `this` binding of function', function(assert) {
assert.expect(1);
var p = _.wrap(lodashStable.escape, function(func) {
return '<p>' + func(this.text) + '</p>';
});
var object = { 'p': p, 'text': 'fred, barney, & pebbles' };
assert.strictEqual(object.p(), '<p>fred, barney, & pebbles</p>');
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('xor methods');
lodashStable.each(['xor', 'xorBy', 'xorWith'], function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should return the symmetric difference of two arrays', function(assert) {
assert.expect(1);
var actual = func([2, 1], [2, 3]);
assert.deepEqual(actual, [1, 3]);
});
QUnit.test('`_.' + methodName + '` should return the symmetric difference of multiple arrays', function(assert) {
assert.expect(2);
var actual = func([2, 1], [2, 3], [3, 4]);
assert.deepEqual(actual, [1, 4]);
actual = func([1, 2], [2, 1], [1, 2]);
assert.deepEqual(actual, []);
});
QUnit.test('`_.' + methodName + '` should return an empty array when comparing the same array', function(assert) {
assert.expect(1);
var array = [1],
actual = func(array, array, array);
assert.deepEqual(actual, []);
});
QUnit.test('`_.' + methodName + '` should return an array of unique values', function(assert) {
assert.expect(2);
var actual = func([1, 1, 2, 5], [2, 2, 3, 5], [3, 4, 5, 5]);
assert.deepEqual(actual, [1, 4]);
actual = func([1, 1]);
assert.deepEqual(actual, [1]);
});
QUnit.test('`_.' + methodName + '` should return a new array when a single array is given', function(assert) {
assert.expect(1);
var array = [1];
assert.notStrictEqual(func(array), array);
});
QUnit.test('`_.' + methodName + '` should ignore individual secondary arguments', function(assert) {
assert.expect(1);
var array = [0];
assert.deepEqual(func(array, 3, null, { '0': 1 }), array);
});
QUnit.test('`_.' + methodName + '` should ignore values that are not arrays or `arguments` objects', function(assert) {
assert.expect(3);
var array = [1, 2];
assert.deepEqual(func(array, 3, { '0': 1 }, null), array);
assert.deepEqual(func(null, array, null, [2, 3]), [1, 3]);
assert.deepEqual(func(array, null, args, null), [3]);
});
QUnit.test('`_.' + methodName + '` should return a wrapped value when chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _([1, 2, 3])[methodName]([5, 2, 1, 4]);
assert.ok(wrapped instanceof _);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_.' + methodName + '` should work when in a lazy sequence before `head` or `last`', function(assert) {
assert.expect(1);
if (!isNpm) {
var array = lodashStable.range(LARGE_ARRAY_SIZE + 1),
wrapped = _(array).slice(1)[methodName]([LARGE_ARRAY_SIZE, LARGE_ARRAY_SIZE + 1]);
var actual = lodashStable.map(['head', 'last'], function(methodName) {
return wrapped[methodName]();
});
assert.deepEqual(actual, [1, LARGE_ARRAY_SIZE + 1]);
}
else {
skipAssert(assert);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.xorBy');
(function() {
QUnit.test('should accept an `iteratee`', function(assert) {
assert.expect(2);
var actual = _.xorBy([2.1, 1.2], [2.3, 3.4], Math.floor);
assert.deepEqual(actual, [1.2, 3.4]);
actual = _.xorBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x');
assert.deepEqual(actual, [{ 'x': 2 }]);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.xorBy([2.1, 1.2], [2.3, 3.4], function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [2.3]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.xorWith');
(function() {
QUnit.test('should work with a `comparator`', function(assert) {
assert.expect(1);
var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }],
others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }],
actual = _.xorWith(objects, others, lodashStable.isEqual);
assert.deepEqual(actual, [objects[1], others[0]]);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('zipObject methods');
lodashStable.each(['zipObject', 'zipObjectDeep'], function(methodName) {
var func = _[methodName],
object = { 'barney': 36, 'fred': 40 },
isDeep = methodName == 'zipObjectDeep';
QUnit.test('`_.' + methodName + '` should zip together key/value arrays into an object', function(assert) {
assert.expect(1);
var actual = func(['barney', 'fred'], [36, 40]);
assert.deepEqual(actual, object);
});
QUnit.test('`_.' + methodName + '` should ignore extra `values`', function(assert) {
assert.expect(1);
assert.deepEqual(func(['a'], [1, 2]), { 'a': 1 });
});
QUnit.test('`_.' + methodName + '` should assign `undefined` values for extra `keys`', function(assert) {
assert.expect(1);
assert.deepEqual(func(['a', 'b'], [1]), { 'a': 1, 'b': undefined });
});
QUnit.test('`_.' + methodName + '` should ' + (isDeep ? '' : 'not ') + 'support deep paths', function(assert) {
assert.expect(2);
lodashStable.each(['a.b.c', ['a', 'b', 'c']], function(path, index) {
var expected = isDeep ? ({ 'a': { 'b': { 'c': 1 } } }) : (index ? { 'a,b,c': 1 } : { 'a.b.c': 1 });
assert.deepEqual(func([path], [1]), expected);
});
});
QUnit.test('`_.' + methodName + '` should work in a lazy sequence', function(assert) {
assert.expect(1);
if (!isNpm) {
var values = lodashStable.range(LARGE_ARRAY_SIZE),
props = lodashStable.map(values, function(value) { return 'key' + value; }),
actual = _(props)[methodName](values).map(square).filter(isEven).take().value();
assert.deepEqual(actual, _.take(_.filter(_.map(func(props, values), square), isEven)));
}
else {
skipAssert(assert);
}
});
});
// zipObjectDeep prototype pollution
['__proto__', 'constructor', 'prototype'].forEach(function(keyToTest) {
QUnit.test('zipObjectDeep is not setting ' + keyToTest + ' on global', function(assert) {
assert.expect(1);
_.zipObjectDeep([keyToTest + '.a'], ['newValue']);
// Can't access plain `a` as it's not defined and test fails
assert.notEqual(root.a, 'newValue');
});
QUnit.test('zipObjectDeep is not overwriting ' + keyToTest + ' on vars', function(assert) {
assert.expect(3);
const b = 'oldValue';
_.zipObjectDeep([keyToTest + '.b'], ['newValue']);
assert.equal(b, 'oldValue');
assert.notEqual(root.b, 'newValue');
// ensure nothing was created
assert.notOk(root.b);
});
QUnit.test('zipObjectDeep is not overwriting global.' + keyToTest, function(assert) {
assert.expect(2);
_.zipObjectDeep([root + '.' + keyToTest + '.c'], ['newValue']);
assert.notEqual(root.c, 'newValue');
// ensure nothing was created
assert.notOk(root.c);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.zipWith');
(function() {
QUnit.test('should zip arrays combining grouped elements with `iteratee`', function(assert) {
assert.expect(2);
var array1 = [1, 2, 3],
array2 = [4, 5, 6],
array3 = [7, 8, 9];
var actual = _.zipWith(array1, array2, array3, function(a, b, c) {
return a + b + c;
});
assert.deepEqual(actual, [12, 15, 18]);
var actual = _.zipWith(array1, [], function(a, b) {
return a + (b || 0);
});
assert.deepEqual(actual, [1, 2, 3]);
});
QUnit.test('should provide correct `iteratee` arguments', function(assert) {
assert.expect(1);
var args;
_.zipWith([1, 2], [3, 4], [5, 6], function() {
args || (args = slice.call(arguments));
});
assert.deepEqual(args, [1, 3, 5]);
});
QUnit.test('should perform a basic zip when `iteratee` is nullish', function(assert) {
assert.expect(1);
var array1 = [1, 2],
array2 = [3, 4],
values = [, null, undefined],
expected = lodashStable.map(values, lodashStable.constant(_.zip(array1, array2)));
var actual = lodashStable.map(values, function(value, index) {
return index ? _.zipWith(array1, array2, value) : _.zipWith(array1, array2);
});
assert.deepEqual(actual, expected);
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash.unzip and lodash.zip');
lodashStable.each(['unzip', 'zip'], function(methodName, index) {
var func = _[methodName];
func = lodashStable.bind(index ? func.apply : func.call, func, null);
var object = {
'an empty array': [
[],
[]
],
'0-tuples': [
[[], []],
[]
],
'2-tuples': [
[['barney', 'fred'], [36, 40]],
[['barney', 36], ['fred', 40]]
],
'3-tuples': [
[['barney', 'fred'], [36, 40], [false, true]],
[['barney', 36, false], ['fred', 40, true]]
]
};
lodashStable.forOwn(object, function(pair, key) {
QUnit.test('`_.' + methodName + '` should work with ' + key, function(assert) {
assert.expect(2);
var actual = func(pair[0]);
assert.deepEqual(actual, pair[1]);
assert.deepEqual(func(actual), actual.length ? pair[0] : []);
});
});
QUnit.test('`_.' + methodName + '` should work with tuples of different lengths', function(assert) {
assert.expect(4);
var pair = [
[['barney', 36], ['fred', 40, false]],
[['barney', 'fred'], [36, 40], [undefined, false]]
];
var actual = func(pair[0]);
assert.ok('0' in actual[2]);
assert.deepEqual(actual, pair[1]);
actual = func(actual);
assert.ok('2' in actual[0]);
assert.deepEqual(actual, [['barney', 36, undefined], ['fred', 40, false]]);
});
QUnit.test('`_.' + methodName + '` should treat falsey values as empty arrays', function(assert) {
assert.expect(1);
var expected = lodashStable.map(falsey, stubArray);
var actual = lodashStable.map(falsey, function(value) {
return func([value, value, value]);
});
assert.deepEqual(actual, expected);
});
QUnit.test('`_.' + methodName + '` should ignore values that are not arrays or `arguments` objects', function(assert) {
assert.expect(1);
var array = [[1, 2], [3, 4], null, undefined, { '0': 1 }];
assert.deepEqual(func(array), [[1, 3], [2, 4]]);
});
QUnit.test('`_.' + methodName + '` should support consuming its return value', function(assert) {
assert.expect(1);
var expected = [['barney', 'fred'], [36, 40]];
assert.deepEqual(func(func(func(func(expected)))), expected);
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).commit');
(function() {
QUnit.test('should execute the chained sequence and returns the wrapped result', function(assert) {
assert.expect(4);
if (!isNpm) {
var array = [1],
wrapped = _(array).push(2).push(3);
assert.deepEqual(array, [1]);
var otherWrapper = wrapped.commit();
assert.ok(otherWrapper instanceof _);
assert.deepEqual(otherWrapper.value(), [1, 2, 3]);
assert.deepEqual(wrapped.value(), [1, 2, 3, 2, 3]);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should track the `__chain__` value of a wrapper', function(assert) {
assert.expect(2);
if (!isNpm) {
var wrapped = _([1]).chain().commit().head();
assert.ok(wrapped instanceof _);
assert.strictEqual(wrapped.value(), 1);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).next');
lodashStable.each([false, true], function(implicit) {
function chain(value) {
return implicit ? _(value) : _.chain(value);
}
var chainType = 'in an ' + (implicit ? 'implicit' : 'explict') + ' chain';
QUnit.test('should follow the iterator protocol ' + chainType, function(assert) {
assert.expect(3);
if (!isNpm) {
var wrapped = chain([1, 2]);
assert.deepEqual(wrapped.next(), { 'done': false, 'value': 1 });
assert.deepEqual(wrapped.next(), { 'done': false, 'value': 2 });
assert.deepEqual(wrapped.next(), { 'done': true, 'value': undefined });
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should act as an iterable ' + chainType, function(assert) {
assert.expect(2);
if (!isNpm && Symbol && Symbol.iterator) {
var array = [1, 2],
wrapped = chain(array);
assert.strictEqual(wrapped[Symbol.iterator](), wrapped);
assert.deepEqual(lodashStable.toArray(wrapped), array);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should use `_.toArray` to generate the iterable result ' + chainType, function(assert) {
assert.expect(3);
if (!isNpm && Array.from) {
var hearts = '\ud83d\udc95',
values = [[1], { 'a': 1 }, hearts];
lodashStable.each(values, function(value) {
var wrapped = chain(value);
assert.deepEqual(Array.from(wrapped), _.toArray(value));
});
}
else {
skipAssert(assert, 3);
}
});
QUnit.test('should reset the iterator correctly ' + chainType, function(assert) {
assert.expect(4);
if (!isNpm && Symbol && Symbol.iterator) {
var array = [1, 2],
wrapped = chain(array);
assert.deepEqual(lodashStable.toArray(wrapped), array);
assert.deepEqual(lodashStable.toArray(wrapped), [], 'produces an empty array for exhausted iterator');
var other = wrapped.filter();
assert.deepEqual(lodashStable.toArray(other), array, 'reset for new chain segments');
assert.deepEqual(lodashStable.toArray(wrapped), [], 'iterator is still exhausted');
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should work in a lazy sequence ' + chainType, function(assert) {
assert.expect(3);
if (!isNpm && Symbol && Symbol.iterator) {
var array = lodashStable.range(LARGE_ARRAY_SIZE),
predicate = function(value) { values.push(value); return isEven(value); },
values = [],
wrapped = chain(array);
assert.deepEqual(lodashStable.toArray(wrapped), array);
wrapped = wrapped.filter(predicate);
assert.deepEqual(lodashStable.toArray(wrapped), _.filter(array, isEven), 'reset for new lazy chain segments');
assert.deepEqual(values, array, 'memoizes iterator values');
}
else {
skipAssert(assert, 3);
}
});
});
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).plant');
(function() {
QUnit.test('should clone the chained sequence planting `value` as the wrapped value', function(assert) {
assert.expect(2);
if (!isNpm) {
var array1 = [5, null, 3, null, 1],
array2 = [10, null, 8, null, 6],
wrapped1 = _(array1).thru(_.compact).map(square).takeRight(2).sort(),
wrapped2 = wrapped1.plant(array2);
assert.deepEqual(wrapped2.value(), [36, 64]);
assert.deepEqual(wrapped1.value(), [1, 9]);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should clone `chainAll` settings', function(assert) {
assert.expect(1);
if (!isNpm) {
var array1 = [2, 4],
array2 = [6, 8],
wrapped1 = _(array1).chain().map(square),
wrapped2 = wrapped1.plant(array2);
assert.deepEqual(wrapped2.head().value(), 36);
}
else {
skipAssert(assert);
}
});
QUnit.test('should reset iterator data on cloned sequences', function(assert) {
assert.expect(3);
if (!isNpm && Symbol && Symbol.iterator) {
var array1 = [2, 4],
array2 = [6, 8],
wrapped1 = _(array1).map(square);
assert.deepEqual(lodashStable.toArray(wrapped1), [4, 16]);
assert.deepEqual(lodashStable.toArray(wrapped1), []);
var wrapped2 = wrapped1.plant(array2);
assert.deepEqual(lodashStable.toArray(wrapped2), [36, 64]);
}
else {
skipAssert(assert, 3);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).pop');
(function() {
QUnit.test('should remove elements from the end of `array`', function(assert) {
assert.expect(5);
if (!isNpm) {
var array = [1, 2],
wrapped = _(array);
assert.strictEqual(wrapped.pop(), 2);
assert.deepEqual(wrapped.value(), [1]);
assert.strictEqual(wrapped.pop(), 1);
var actual = wrapped.value();
assert.strictEqual(actual, array);
assert.deepEqual(actual, []);
}
else {
skipAssert(assert, 5);
}
});
QUnit.test('should accept falsey arguments', function(assert) {
assert.expect(1);
if (!isNpm) {
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(value, index) {
try {
var result = index ? _(value).pop() : _().pop();
return result === undefined;
} catch (e) {}
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).push');
(function() {
QUnit.test('should append elements to `array`', function(assert) {
assert.expect(2);
if (!isNpm) {
var array = [1],
wrapped = _(array).push(2, 3),
actual = wrapped.value();
assert.strictEqual(actual, array);
assert.deepEqual(actual, [1, 2, 3]);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should accept falsey arguments', function(assert) {
assert.expect(1);
if (!isNpm) {
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(value, index) {
try {
var result = index ? _(value).push(1).value() : _().push(1).value();
return lodashStable.eq(result, value);
} catch (e) {}
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).shift');
(function() {
QUnit.test('should remove elements from the front of `array`', function(assert) {
assert.expect(5);
if (!isNpm) {
var array = [1, 2],
wrapped = _(array);
assert.strictEqual(wrapped.shift(), 1);
assert.deepEqual(wrapped.value(), [2]);
assert.strictEqual(wrapped.shift(), 2);
var actual = wrapped.value();
assert.strictEqual(actual, array);
assert.deepEqual(actual, []);
}
else {
skipAssert(assert, 5);
}
});
QUnit.test('should accept falsey arguments', function(assert) {
assert.expect(1);
if (!isNpm) {
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(value, index) {
try {
var result = index ? _(value).shift() : _().shift();
return result === undefined;
} catch (e) {}
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).sort');
(function() {
QUnit.test('should return the wrapped sorted `array`', function(assert) {
assert.expect(2);
if (!isNpm) {
var array = [3, 1, 2],
wrapped = _(array).sort(),
actual = wrapped.value();
assert.strictEqual(actual, array);
assert.deepEqual(actual, [1, 2, 3]);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should accept falsey arguments', function(assert) {
assert.expect(1);
if (!isNpm) {
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(value, index) {
try {
var result = index ? _(value).sort().value() : _().sort().value();
return lodashStable.eq(result, value);
} catch (e) {}
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).splice');
(function() {
QUnit.test('should support removing and inserting elements', function(assert) {
assert.expect(5);
if (!isNpm) {
var array = [1, 2],
wrapped = _(array);
assert.deepEqual(wrapped.splice(1, 1, 3).value(), [2]);
assert.deepEqual(wrapped.value(), [1, 3]);
assert.deepEqual(wrapped.splice(0, 2).value(), [1, 3]);
var actual = wrapped.value();
assert.strictEqual(actual, array);
assert.deepEqual(actual, []);
}
else {
skipAssert(assert, 5);
}
});
QUnit.test('should accept falsey arguments', function(assert) {
assert.expect(1);
if (!isNpm) {
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(value, index) {
try {
var result = index ? _(value).splice(0, 1).value() : _().splice(0, 1).value();
return lodashStable.isEqual(result, []);
} catch (e) {}
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).unshift');
(function() {
QUnit.test('should prepend elements to `array`', function(assert) {
assert.expect(2);
if (!isNpm) {
var array = [3],
wrapped = _(array).unshift(1, 2),
actual = wrapped.value();
assert.strictEqual(actual, array);
assert.deepEqual(actual, [1, 2, 3]);
}
else {
skipAssert(assert, 2);
}
});
QUnit.test('should accept falsey arguments', function(assert) {
assert.expect(1);
if (!isNpm) {
var expected = lodashStable.map(falsey, stubTrue);
var actual = lodashStable.map(falsey, function(value, index) {
try {
var result = index ? _(value).unshift(1).value() : _().unshift(1).value();
return lodashStable.eq(result, value);
} catch (e) {}
});
assert.deepEqual(actual, expected);
}
else {
skipAssert(assert);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...).value');
(function() {
QUnit.test('should execute the chained sequence and extract the unwrapped value', function(assert) {
assert.expect(4);
if (!isNpm) {
var array = [1],
wrapped = _(array).push(2).push(3);
assert.deepEqual(array, [1]);
assert.deepEqual(wrapped.value(), [1, 2, 3]);
assert.deepEqual(wrapped.value(), [1, 2, 3, 2, 3]);
assert.deepEqual(array, [1, 2, 3, 2, 3]);
}
else {
skipAssert(assert, 4);
}
});
QUnit.test('should return the `valueOf` result of the wrapped value', function(assert) {
assert.expect(1);
if (!isNpm) {
var wrapped = _(123);
assert.strictEqual(Number(wrapped), 123);
}
else {
skipAssert(assert);
}
});
QUnit.test('should stringify the wrapped value when used by `JSON.stringify`', function(assert) {
assert.expect(1);
if (!isNpm && JSON) {
var wrapped = _([1, 2, 3]);
assert.strictEqual(JSON.stringify(wrapped), '[1,2,3]');
}
else {
skipAssert(assert);
}
});
QUnit.test('should be aliased', function(assert) {
assert.expect(2);
if (!isNpm) {
var expected = _.prototype.value;
assert.strictEqual(_.prototype.toJSON, expected);
assert.strictEqual(_.prototype.valueOf, expected);
}
else {
skipAssert(assert, 2);
}
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...) methods that return the wrapped modified array');
(function() {
var funcs = [
'push',
'reverse',
'sort',
'unshift'
];
lodashStable.each(funcs, function(methodName) {
QUnit.test('`_(...).' + methodName + '` should return a new wrapper', function(assert) {
assert.expect(2);
if (!isNpm) {
var array = [1, 2, 3],
wrapped = _(array),
actual = wrapped[methodName]();
assert.ok(actual instanceof _);
assert.notStrictEqual(actual, wrapped);
}
else {
skipAssert(assert, 2);
}
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...) methods that return new wrapped values');
(function() {
var funcs = [
'castArray',
'concat',
'difference',
'differenceBy',
'differenceWith',
'intersection',
'intersectionBy',
'intersectionWith',
'pull',
'pullAll',
'pullAt',
'sampleSize',
'shuffle',
'slice',
'splice',
'split',
'toArray',
'union',
'unionBy',
'unionWith',
'uniq',
'uniqBy',
'uniqWith',
'words',
'xor',
'xorBy',
'xorWith'
];
lodashStable.each(funcs, function(methodName) {
QUnit.test('`_(...).' + methodName + '` should return a new wrapped value', function(assert) {
assert.expect(2);
if (!isNpm) {
var value = methodName == 'split' ? 'abc' : [1, 2, 3],
wrapped = _(value),
actual = wrapped[methodName]();
assert.ok(actual instanceof _);
assert.notStrictEqual(actual, wrapped);
}
else {
skipAssert(assert, 2);
}
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash(...) methods that return unwrapped values');
(function() {
var funcs = [
'add',
'camelCase',
'capitalize',
'ceil',
'clone',
'deburr',
'defaultTo',
'divide',
'endsWith',
'escape',
'escapeRegExp',
'every',
'find',
'floor',
'has',
'hasIn',
'head',
'includes',
'isArguments',
'isArray',
'isArrayBuffer',
'isArrayLike',
'isBoolean',
'isBuffer',
'isDate',
'isElement',
'isEmpty',
'isEqual',
'isError',
'isFinite',
'isFunction',
'isInteger',
'isMap',
'isNaN',
'isNative',
'isNil',
'isNull',
'isNumber',
'isObject',
'isObjectLike',
'isPlainObject',
'isRegExp',
'isSafeInteger',
'isSet',
'isString',
'isUndefined',
'isWeakMap',
'isWeakSet',
'join',
'kebabCase',
'last',
'lowerCase',
'lowerFirst',
'max',
'maxBy',
'min',
'minBy',
'multiply',
'nth',
'pad',
'padEnd',
'padStart',
'parseInt',
'pop',
'random',
'reduce',
'reduceRight',
'repeat',
'replace',
'round',
'sample',
'shift',
'size',
'snakeCase',
'some',
'startCase',
'startsWith',
'subtract',
'sum',
'toFinite',
'toInteger',
'toLower',
'toNumber',
'toSafeInteger',
'toString',
'toUpper',
'trim',
'trimEnd',
'trimStart',
'truncate',
'unescape',
'upperCase',
'upperFirst'
];
lodashStable.each(funcs, function(methodName) {
QUnit.test('`_(...).' + methodName + '` should return an unwrapped value when implicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = _()[methodName]();
assert.notOk(actual instanceof _);
}
else {
skipAssert(assert);
}
});
QUnit.test('`_(...).' + methodName + '` should return a wrapped value when explicitly chaining', function(assert) {
assert.expect(1);
if (!isNpm) {
var actual = _().chain()[methodName]();
assert.ok(actual instanceof _);
}
else {
skipAssert(assert);
}
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('"Arrays" category methods');
(function() {
var args = toArgs([1, null, [3], null, 5]),
sortedArgs = toArgs([1, [3], 5, null, null]),
array = [1, 2, 3, 4, 5, 6];
QUnit.test('should work with `arguments` objects', function(assert) {
assert.expect(30);
function message(methodName) {
return '`_.' + methodName + '` should work with `arguments` objects';
}
assert.deepEqual(_.difference(args, [null]), [1, [3], 5], message('difference'));
assert.deepEqual(_.difference(array, args), [2, 3, 4, 6], '_.difference should work with `arguments` objects as secondary arguments');
assert.deepEqual(_.union(args, [null, 6]), [1, null, [3], 5, 6], message('union'));
assert.deepEqual(_.union(array, args), array.concat([null, [3]]), '_.union should work with `arguments` objects as secondary arguments');
assert.deepEqual(_.compact(args), [1, [3], 5], message('compact'));
assert.deepEqual(_.drop(args, 3), [null, 5], message('drop'));
assert.deepEqual(_.dropRight(args, 3), [1, null], message('dropRight'));
assert.deepEqual(_.dropRightWhile(args,identity), [1, null, [3], null], message('dropRightWhile'));
assert.deepEqual(_.dropWhile(args,identity), [null, [3], null, 5], message('dropWhile'));
assert.deepEqual(_.findIndex(args, identity), 0, message('findIndex'));
assert.deepEqual(_.findLastIndex(args, identity), 4, message('findLastIndex'));
assert.deepEqual(_.flatten(args), [1, null, 3, null, 5], message('flatten'));
assert.deepEqual(_.head(args), 1, message('head'));
assert.deepEqual(_.indexOf(args, 5), 4, message('indexOf'));
assert.deepEqual(_.initial(args), [1, null, [3], null], message('initial'));
assert.deepEqual(_.intersection(args, [1]), [1], message('intersection'));
assert.deepEqual(_.last(args), 5, message('last'));
assert.deepEqual(_.lastIndexOf(args, 1), 0, message('lastIndexOf'));
assert.deepEqual(_.sortedIndex(sortedArgs, 6), 3, message('sortedIndex'));
assert.deepEqual(_.sortedIndexOf(sortedArgs, 5), 2, message('sortedIndexOf'));
assert.deepEqual(_.sortedLastIndex(sortedArgs, 5), 3, message('sortedLastIndex'));
assert.deepEqual(_.sortedLastIndexOf(sortedArgs, 1), 0, message('sortedLastIndexOf'));
assert.deepEqual(_.tail(args, 4), [null, [3], null, 5], message('tail'));
assert.deepEqual(_.take(args, 2), [1, null], message('take'));
assert.deepEqual(_.takeRight(args, 1), [5], message('takeRight'));
assert.deepEqual(_.takeRightWhile(args, identity), [5], message('takeRightWhile'));
assert.deepEqual(_.takeWhile(args, identity), [1], message('takeWhile'));
assert.deepEqual(_.uniq(args), [1, null, [3], 5], message('uniq'));
assert.deepEqual(_.without(args, null), [1, [3], 5], message('without'));
assert.deepEqual(_.zip(args, args), [[1, 1], [null, null], [[3], [3]], [null, null], [5, 5]], message('zip'));
});
QUnit.test('should accept falsey primary arguments', function(assert) {
assert.expect(4);
function message(methodName) {
return '`_.' + methodName + '` should accept falsey primary arguments';
}
assert.deepEqual(_.difference(null, array), [], message('difference'));
assert.deepEqual(_.intersection(null, array), [], message('intersection'));
assert.deepEqual(_.union(null, array), array, message('union'));
assert.deepEqual(_.xor(null, array), array, message('xor'));
});
QUnit.test('should accept falsey secondary arguments', function(assert) {
assert.expect(3);
function message(methodName) {
return '`_.' + methodName + '` should accept falsey secondary arguments';
}
assert.deepEqual(_.difference(array, null), array, message('difference'));
assert.deepEqual(_.intersection(array, null), [], message('intersection'));
assert.deepEqual(_.union(array, null), array, message('union'));
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('"Strings" category methods');
(function() {
var stringMethods = [
'camelCase',
'capitalize',
'escape',
'kebabCase',
'lowerCase',
'lowerFirst',
'pad',
'padEnd',
'padStart',
'repeat',
'snakeCase',
'toLower',
'toUpper',
'trim',
'trimEnd',
'trimStart',
'truncate',
'unescape',
'upperCase',
'upperFirst'
];
lodashStable.each(stringMethods, function(methodName) {
var func = _[methodName];
QUnit.test('`_.' + methodName + '` should return an empty string for empty values', function(assert) {
assert.expect(1);
var values = [, null, undefined, ''],
expected = lodashStable.map(values, stubString);
var actual = lodashStable.map(values, function(value, index) {
return index ? func(value) : func();
});
assert.deepEqual(actual, expected);
});
});
}());
/*--------------------------------------------------------------------------*/
QUnit.module('lodash methods');
(function() {
var allMethods = lodashStable.reject(_.functions(_).sort(), function(methodName) {
return lodashStable.startsWith(methodName, '_');
});
var checkFuncs = [
'after',
'ary',
'before',
'bind',
'curry',
'curryRight',
'debounce',
'defer',
'delay',
'flip',
'flow',
'flowRight',
'memoize',
'negate',
'once',
'partial',
'partialRight',
'rearg',
'rest',
'spread',
'throttle',
'unary'
];
var noBinding = [
'flip',
'memoize',
'negate',
'once',
'overArgs',
'partial',
'partialRight',
'rearg',
'rest',
'spread'
];
var rejectFalsey = [
'tap',
'thru'
].concat(checkFuncs);
var returnArrays = [
'at',
'chunk',
'compact',
'difference',
'drop',
'filter',
'flatten',
'functions',
'initial',
'intersection',
'invokeMap',
'keys',
'map',
'orderBy',
'pull',
'pullAll',
'pullAt',
'range',
'rangeRight',
'reject',
'remove',
'shuffle',
'sortBy',
'tail',
'take',
'times',
'toArray',
'toPairs',
'toPairsIn',
'union',
'uniq',
'values',
'without',
'xor',
'zip'
];
var acceptFalsey = lodashStable.difference(allMethods, rejectFalsey);
QUnit.test('should accept falsey arguments', function(assert) {
assert.expect(316);
var arrays = lodashStable.map(falsey, stubArray);
lodashStable.each(acceptFalsey, function(methodName) {
var expected = arrays,
func = _[methodName];
var actual = lodashStable.map(falsey, function(value, index) {
return index ? func(value) : func();
});
if (methodName == 'noConflict') {
root._ = oldDash;
}
else if (methodName == 'pull' || methodName == 'pullAll') {
expected = falsey;
}
if (lodashStable.includes(returnArrays, methodName) && methodName != 'sample') {
assert.deepEqual(actual, expected, '_.' + methodName + ' returns an array');
}
assert.ok(true, '`_.' + methodName + '` accepts falsey arguments');
});
// Skip tests for missing methods of modularized builds.
lodashStable.each(['chain', 'noConflict', 'runInContext'], function(methodName) {
if (!_[methodName]) {
skipAssert(assert);
}
});
});
QUnit.test('should return an array', function(assert) {
assert.expect(70);
var array = [1, 2, 3];
lodashStable.each(returnArrays, function(methodName) {
var actual,
func = _[methodName];
switch (methodName) {
case 'invokeMap':
actual = func(array, 'toFixed');
break;
case 'sample':
actual = func(array, 1);
break;
default:
actual = func(array);
}
assert.ok(lodashStable.isArray(actual), '_.' + methodName + ' returns an array');
var isPull = methodName == 'pull' || methodName == 'pullAll';
assert.strictEqual(actual === array, isPull, '_.' + methodName + ' should ' + (isPull ? '' : 'not ') + 'return the given array');
});
});
QUnit.test('should throw an error for falsey arguments', function(assert) {
assert.expect(24);
lodashStable.each(rejectFalsey, function(methodName) {
var expected = lodashStable.map(falsey, stubTrue),
func = _[methodName];
var actual = lodashStable.map(falsey, function(value, index) {
var pass = !index && /^(?:backflow|compose|cond|flow(Right)?|over(?:Every|Some)?)$/.test(methodName);
try {
index ? func(value) : func();
} catch (e) {
pass = !pass && (e instanceof TypeError) &&
(!lodashStable.includes(checkFuncs, methodName) || (e.message == FUNC_ERROR_TEXT));
}
return pass;
});
assert.deepEqual(actual, expected, '`_.' + methodName + '` rejects falsey arguments');
});
});
QUnit.test('should use `this` binding of function', function(assert) {
assert.expect(30);
lodashStable.each(noBinding, function(methodName) {
var fn = function() { return this.a; },
func = _[methodName],
isNegate = methodName == 'negate',
object = { 'a': 1 },
expected = isNegate ? false : 1;
var wrapper = func(_.bind(fn, object));
assert.strictEqual(wrapper(), expected, '`_.' + methodName + '` can consume a bound function');
wrapper = _.bind(func(fn), object);
assert.strictEqual(wrapper(), expected, '`_.' + methodName + '` can be bound');
object.wrapper = func(fn);
assert.strictEqual(object.wrapper(), expected, '`_.' + methodName + '` uses the `this` of its parent object');
});
});
QUnit.test('should not contain minified method names (test production builds)', function(assert) {
assert.expect(1);
var shortNames = ['_', 'at', 'eq', 'gt', 'lt'];
assert.ok(lodashStable.every(_.functions(_), function(methodName) {
return methodName.length > 2 || lodashStable.includes(shortNames, methodName);
}));
});
}());
/*--------------------------------------------------------------------------*/
QUnit.config.asyncRetries = 10;
QUnit.config.hidepassed = true;
if (!document) {
QUnit.config.noglobals = true;
QUnit.load();
QUnit.start();
}
}.call(this));
|
javascript
|
github
|
https://github.com/lodash/lodash
|
test/test.js
|
# SPDX-License-Identifier: (GPL-2.0+ OR MIT)
%YAML 1.2
---
$id: http://devicetree.org/schemas/clock/rockchip,rk3228-cru.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: Rockchip RK3228 Clock and Reset Unit (CRU)
maintainers:
- Elaine Zhang <zhangqing@rock-chips.com>
- Heiko Stuebner <heiko@sntech.de>
description: |
The RK3228 clock controller generates and supplies clocks to various
controllers within the SoC and also implements a reset controller for SoC
peripherals.
Each clock is assigned an identifier and client nodes can use this identifier
to specify the clock which they consume. All available clocks are defined as
preprocessor macros in the dt-bindings/clock/rk3228-cru.h headers and can be
used in device tree sources. Similar macros exist for the reset sources in
these files.
There are several clocks that are generated outside the SoC. It is expected
that they are defined using standard clock bindings with following
clock-output-names:
- "xin24m" - crystal input - required
- "ext_i2s" - external I2S clock - optional
- "ext_gmac" - external GMAC clock - optional
- "ext_hsadc" - external HSADC clock - optional
- "phy_50m_out" - output clock of the pll in the mac phy
properties:
compatible:
enum:
- rockchip,rk3228-cru
reg:
maxItems: 1
"#clock-cells":
const: 1
"#reset-cells":
const: 1
clocks:
maxItems: 1
clock-names:
const: xin24m
rockchip,grf:
$ref: /schemas/types.yaml#/definitions/phandle
description:
Phandle to the syscon managing the "general register files" (GRF),
if missing pll rates are not changeable, due to the missing pll
lock status.
required:
- compatible
- reg
- "#clock-cells"
- "#reset-cells"
additionalProperties: false
examples:
- |
cru: clock-controller@20000000 {
compatible = "rockchip,rk3228-cru";
reg = <0x20000000 0x1000>;
rockchip,grf = <&grf>;
#clock-cells = <1>;
#reset-cells = <1>;
};
|
unknown
|
github
|
https://github.com/torvalds/linux
|
Documentation/devicetree/bindings/clock/rockchip,rk3228-cru.yaml
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.dev/license
*/
import {Component, inject} from '@angular/core';
import {Router, RouterOutlet} from '@angular/router';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss'],
imports: [RouterOutlet],
})
export class AppComponent {
readonly router = inject(Router);
}
@Component({
selector: 'empty-component',
template: ``,
})
export class EmptyComponent {
// This component is just for demonstration purposes.
// used to test Angular DevTools traversal logic when multiple applications are present.
}
@Component({
selector: 'other-app',
template: `
@defer {
<empty-component />
} @placeholder (minimum 2s) {
<b>Stuff will be loaded here</b>
}
`,
imports: [EmptyComponent],
})
export class OtherAppComponent {
// This component is just for demonstration purposes.
// used to test Angular DevTools traversal logic when multiple applications are present.
}
|
typescript
|
github
|
https://github.com/angular/angular
|
devtools/src/app/app.component.ts
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
RandomExtract.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import random
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterNumber
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class RandomExtract(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
METHOD = 'METHOD'
NUMBER = 'NUMBER'
METHODS = ['Number of selected features',
'Percentage of selected features']
def defineCharacteristics(self):
self.name = 'Random extract'
self.group = 'Vector selection tools'
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterSelection(self.METHOD,
self.tr('Method'), self.METHODS, 0))
self.addParameter(ParameterNumber(self.NUMBER,
self.tr('Number/percentage of selected features'), 0, None, 10))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Extracted (random)')))
def processAlgorithm(self, progress):
filename = self.getParameterValue(self.INPUT)
layer = dataobjects.getObjectFromUri(filename)
method = self.getParameterValue(self.METHOD)
features = vector.features(layer)
featureCount = len(features)
value = int(self.getParameterValue(self.NUMBER))
if method == 0:
if value > featureCount:
raise GeoAlgorithmExecutionException(
self.tr('Selected number is greater than feature count. '
'Choose a lower value and try again.'))
else:
if value > 100:
raise GeoAlgorithmExecutionException(
self.tr("Percentage can't be greater than 100. Set a "
"different value and try again."))
value = int(round(value / 100.0000, 4) * featureCount)
selran = random.sample(xrange(0, featureCount), value)
writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
layer.pendingFields().toList(), layer.wkbType(), layer.crs())
for (i, feat) in enumerate(features):
if i in selran:
writer.addFeature(feat)
progress.setPercentage(100 * i / float(featureCount))
del writer
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Support code for building Python extensions on Windows.
# NT stuff
# 1. Make sure libpython<version>.a exists for gcc. If not, build it.
# 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
# 3. Force windows to use g77
"""
from __future__ import division, absolute_import, print_function
import os
import sys
import subprocess
import re
# Overwrite certain distutils.ccompiler functions:
import numpy.distutils.ccompiler
if sys.version_info[0] < 3:
from . import log
else:
from numpy.distutils import log
# NT stuff
# 1. Make sure libpython<version>.a exists for gcc. If not, build it.
# 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
# --> this is done in numpy/distutils/ccompiler.py
# 3. Force windows to use g77
import distutils.cygwinccompiler
from distutils.version import StrictVersion
from numpy.distutils.ccompiler import gen_preprocess_options, gen_lib_options
from distutils.unixccompiler import UnixCCompiler
from distutils.msvccompiler import get_build_version as get_build_msvc_version
from distutils.errors import (DistutilsExecError, CompileError,
UnknownFileError)
from numpy.distutils.misc_util import (msvc_runtime_library,
get_build_architecture)
# Useful to generate table of symbols from a dll
_START = re.compile(r'\[Ordinal/Name Pointer\] Table')
_TABLE = re.compile(r'^\s+\[([\s*[0-9]*)\] ([a-zA-Z0-9_]*)')
# the same as cygwin plus some additional parameters
class Mingw32CCompiler(distutils.cygwinccompiler.CygwinCCompiler):
""" A modified MingW32 compiler compatible with an MSVC built Python.
"""
compiler_type = 'mingw32'
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
distutils.cygwinccompiler.CygwinCCompiler.__init__ (self, verbose,
dry_run, force)
# we need to support 3.2 which doesn't match the standard
# get_versions methods regex
if self.gcc_version is None:
import re
p = subprocess.Popen(['gcc', '-dumpversion'], shell=True,
stdout=subprocess.PIPE)
out_string = p.stdout.read()
p.stdout.close()
result = re.search('(\d+\.\d+)', out_string)
if result:
self.gcc_version = StrictVersion(result.group(1))
# A real mingw32 doesn't need to specify a different entry point,
# but cygwin 2.91.57 in no-cygwin-mode needs it.
if self.gcc_version <= "2.91.57":
entry_point = '--entry _DllMain@12'
else:
entry_point = ''
if self.linker_dll == 'dllwrap':
# Commented out '--driver-name g++' part that fixes weird
# g++.exe: g++: No such file or directory
# error (mingw 1.0 in Enthon24 tree, gcc-3.4.5).
# If the --driver-name part is required for some environment
# then make the inclusion of this part specific to that
# environment.
self.linker = 'dllwrap' # --driver-name g++'
elif self.linker_dll == 'gcc':
self.linker = 'g++'
# **changes: eric jones 4/11/01
# 1. Check for import library on Windows. Build if it doesn't exist.
build_import_library()
# Check for custom msvc runtime library on Windows. Build if it doesn't exist.
msvcr_success = build_msvcr_library()
msvcr_dbg_success = build_msvcr_library(debug=True)
if msvcr_success or msvcr_dbg_success:
# add preprocessor statement for using customized msvcr lib
self.define_macro('NPY_MINGW_USE_CUSTOM_MSVCR')
# Define the MSVC version as hint for MinGW
msvcr_version = '0x%03i0' % int(msvc_runtime_library().lstrip('msvcr'))
self.define_macro('__MSVCRT_VERSION__', msvcr_version)
# MS_WIN64 should be defined when building for amd64 on windows,
# but python headers define it only for MS compilers, which has all
# kind of bad consequences, like using Py_ModuleInit4 instead of
# Py_ModuleInit4_64, etc... So we add it here
if get_build_architecture() == 'AMD64':
if self.gcc_version < "4.0":
self.set_executables(
compiler='gcc -g -DDEBUG -DMS_WIN64 -mno-cygwin -O0 -Wall',
compiler_so='gcc -g -DDEBUG -DMS_WIN64 -mno-cygwin -O0'
' -Wall -Wstrict-prototypes',
linker_exe='gcc -g -mno-cygwin',
linker_so='gcc -g -mno-cygwin -shared')
else:
# gcc-4 series releases do not support -mno-cygwin option
self.set_executables(
compiler='gcc -g -DDEBUG -DMS_WIN64 -O0 -Wall',
compiler_so='gcc -g -DDEBUG -DMS_WIN64 -O0 -Wall -Wstrict-prototypes',
linker_exe='gcc -g',
linker_so='gcc -g -shared')
else:
if self.gcc_version <= "3.0.0":
self.set_executables(
compiler='gcc -mno-cygwin -O2 -w',
compiler_so='gcc -mno-cygwin -mdll -O2 -w'
' -Wstrict-prototypes',
linker_exe='g++ -mno-cygwin',
linker_so='%s -mno-cygwin -mdll -static %s' %
(self.linker, entry_point))
elif self.gcc_version < "4.0":
self.set_executables(
compiler='gcc -mno-cygwin -O2 -Wall',
compiler_so='gcc -mno-cygwin -O2 -Wall'
' -Wstrict-prototypes',
linker_exe='g++ -mno-cygwin',
linker_so='g++ -mno-cygwin -shared')
else:
# gcc-4 series releases do not support -mno-cygwin option
self.set_executables(compiler='gcc -O2 -Wall',
compiler_so='gcc -O2 -Wall -Wstrict-prototypes',
linker_exe='g++ ',
linker_so='g++ -shared')
# added for python2.3 support
# we can't pass it through set_executables because pre 2.2 would fail
self.compiler_cxx = ['g++']
# Maybe we should also append -mthreads, but then the finished dlls
# need another dll (mingwm10.dll see Mingw32 docs) (-mthreads: Support
# thread-safe exception handling on `Mingw32')
# no additional libraries needed
#self.dll_libraries=[]
return
# __init__ ()
def link(self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
export_symbols = None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# Include the appropiate MSVC runtime library if Python was built
# with MSVC >= 7.0 (MinGW standard is msvcrt)
runtime_library = msvc_runtime_library()
if runtime_library:
if not libraries:
libraries = []
libraries.append(runtime_library)
args = (self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
None, #export_symbols, we do this in our def-file
debug,
extra_preargs,
extra_postargs,
build_temp,
target_lang)
if self.gcc_version < "3.0.0":
func = distutils.cygwinccompiler.CygwinCCompiler.link
else:
func = UnixCCompiler.link
func(*args[:func.__code__.co_argcount])
return
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
# added these lines to strip off windows drive letters
# without it, .o files are placed next to .c files
# instead of the build directory
drv, base = os.path.splitdrive(base)
if drv:
base = base[1:]
if ext not in (self.src_extensions + ['.rc', '.res']):
raise UnknownFileError(
"unknown file type '%s' (from '%s')" % \
(ext, src_name))
if strip_dir:
base = os.path.basename (base)
if ext == '.res' or ext == '.rc':
# these need to be compiled to object files
obj_names.append (os.path.join (output_dir,
base + ext + self.obj_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def find_python_dll():
maj, min, micro = [int(i) for i in sys.version_info[:3]]
dllname = 'python%d%d.dll' % (maj, min)
print("Looking for %s" % dllname)
# We can't do much here:
# - find it in python main dir
# - in system32,
# - ortherwise (Sxs), I don't know how to get it.
lib_dirs = [sys.prefix, os.path.join(sys.prefix, 'lib')]
try:
lib_dirs.append(os.path.join(os.environ['SYSTEMROOT'], 'system32'))
except KeyError:
pass
for d in lib_dirs:
dll = os.path.join(d, dllname)
if os.path.exists(dll):
return dll
raise ValueError("%s not found in %s" % (dllname, lib_dirs))
def dump_table(dll):
st = subprocess.Popen(["objdump.exe", "-p", dll], stdout=subprocess.PIPE)
return st.stdout.readlines()
def generate_def(dll, dfile):
"""Given a dll file location, get all its exported symbols and dump them
into the given def file.
The .def file will be overwritten"""
dump = dump_table(dll)
for i in range(len(dump)):
if _START.match(dump[i].decode()):
break
else:
raise ValueError("Symbol table not found")
syms = []
for j in range(i+1, len(dump)):
m = _TABLE.match(dump[j].decode())
if m:
syms.append((int(m.group(1).strip()), m.group(2)))
else:
break
if len(syms) == 0:
log.warn('No symbols found in %s' % dll)
d = open(dfile, 'w')
d.write('LIBRARY %s\n' % os.path.basename(dll))
d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n')
d.write(';DATA PRELOAD SINGLE\n')
d.write('\nEXPORTS\n')
for s in syms:
#d.write('@%d %s\n' % (s[0], s[1]))
d.write('%s\n' % s[1])
d.close()
def find_dll(dll_name):
arch = {'AMD64' : 'amd64',
'Intel' : 'x86'}[get_build_architecture()]
def _find_dll_in_winsxs(dll_name):
# Walk through the WinSxS directory to find the dll.
winsxs_path = os.path.join(os.environ['WINDIR'], 'winsxs')
if not os.path.exists(winsxs_path):
return None
for root, dirs, files in os.walk(winsxs_path):
if dll_name in files and arch in root:
return os.path.join(root, dll_name)
return None
def _find_dll_in_path(dll_name):
# First, look in the Python directory, then scan PATH for
# the given dll name.
for path in [sys.prefix] + os.environ['PATH'].split(';'):
filepath = os.path.join(path, dll_name)
if os.path.exists(filepath):
return os.path.abspath(filepath)
return _find_dll_in_winsxs(dll_name) or _find_dll_in_path(dll_name)
def build_msvcr_library(debug=False):
if os.name != 'nt':
return False
msvcr_name = msvc_runtime_library()
# Skip using a custom library for versions < MSVC 8.0
if int(msvcr_name.lstrip('msvcr')) < 80:
log.debug('Skip building msvcr library:'
' custom functionality not present')
return False
if debug:
msvcr_name += 'd'
# Skip if custom library already exists
out_name = "lib%s.a" % msvcr_name
out_file = os.path.join(sys.prefix, 'libs', out_name)
if os.path.isfile(out_file):
log.debug('Skip building msvcr library: "%s" exists' %
(out_file,))
return True
# Find the msvcr dll
msvcr_dll_name = msvcr_name + '.dll'
dll_file = find_dll(msvcr_dll_name)
if not dll_file:
log.warn('Cannot build msvcr library: "%s" not found' %
msvcr_dll_name)
return False
def_name = "lib%s.def" % msvcr_name
def_file = os.path.join(sys.prefix, 'libs', def_name)
log.info('Building msvcr library: "%s" (from %s)' \
% (out_file, dll_file))
# Generate a symbol definition file from the msvcr dll
generate_def(dll_file, def_file)
# Create a custom mingw library for the given symbol definitions
cmd = ['dlltool', '-d', def_file, '-l', out_file]
retcode = subprocess.call(cmd)
# Clean up symbol definitions
os.remove(def_file)
return (not retcode)
def build_import_library():
if os.name != 'nt':
return
arch = get_build_architecture()
if arch == 'AMD64':
return _build_import_library_amd64()
elif arch == 'Intel':
return _build_import_library_x86()
else:
raise ValueError("Unhandled arch %s" % arch)
def _build_import_library_amd64():
dll_file = find_python_dll()
out_name = "libpython%d%d.a" % tuple(sys.version_info[:2])
out_file = os.path.join(sys.prefix, 'libs', out_name)
if os.path.isfile(out_file):
log.debug('Skip building import library: "%s" exists' %
(out_file))
return
def_name = "python%d%d.def" % tuple(sys.version_info[:2])
def_file = os.path.join(sys.prefix, 'libs', def_name)
log.info('Building import library (arch=AMD64): "%s" (from %s)' %
(out_file, dll_file))
generate_def(dll_file, def_file)
cmd = ['dlltool', '-d', def_file, '-l', out_file]
subprocess.Popen(cmd)
def _build_import_library_x86():
""" Build the import libraries for Mingw32-gcc on Windows
"""
lib_name = "python%d%d.lib" % tuple(sys.version_info[:2])
lib_file = os.path.join(sys.prefix, 'libs', lib_name)
out_name = "libpython%d%d.a" % tuple(sys.version_info[:2])
out_file = os.path.join(sys.prefix, 'libs', out_name)
if not os.path.isfile(lib_file):
log.warn('Cannot build import library: "%s" not found' % (lib_file))
return
if os.path.isfile(out_file):
log.debug('Skip building import library: "%s" exists' % (out_file))
return
log.info('Building import library (ARCH=x86): "%s"' % (out_file))
from numpy.distutils import lib2def
def_name = "python%d%d.def" % tuple(sys.version_info[:2])
def_file = os.path.join(sys.prefix, 'libs', def_name)
nm_cmd = '%s %s' % (lib2def.DEFAULT_NM, lib_file)
nm_output = lib2def.getnm(nm_cmd)
dlist, flist = lib2def.parse_nm(nm_output)
lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, open(def_file, 'w'))
dll_name = "python%d%d.dll" % tuple(sys.version_info[:2])
args = (dll_name, def_file, out_file)
cmd = 'dlltool --dllname %s --def %s --output-lib %s' % args
status = os.system(cmd)
# for now, fail silently
if status:
log.warn('Failed to build import library for gcc. Linking will fail.')
return
#=====================================
# Dealing with Visual Studio MANIFESTS
#=====================================
# Functions to deal with visual studio manifests. Manifest are a mechanism to
# enforce strong DLL versioning on windows, and has nothing to do with
# distutils MANIFEST. manifests are XML files with version info, and used by
# the OS loader; they are necessary when linking against a DLL not in the
# system path; in particular, official python 2.6 binary is built against the
# MS runtime 9 (the one from VS 2008), which is not available on most windows
# systems; python 2.6 installer does install it in the Win SxS (Side by side)
# directory, but this requires the manifest for this to work. This is a big
# mess, thanks MS for a wonderful system.
# XXX: ideally, we should use exactly the same version as used by python. I
# submitted a patch to get this version, but it was only included for python
# 2.6.1 and above. So for versions below, we use a "best guess".
_MSVCRVER_TO_FULLVER = {}
if sys.platform == 'win32':
try:
import msvcrt
# I took one version in my SxS directory: no idea if it is the good
# one, and we can't retrieve it from python
_MSVCRVER_TO_FULLVER['80'] = "8.0.50727.42"
_MSVCRVER_TO_FULLVER['90'] = "9.0.21022.8"
# Value from msvcrt.CRT_ASSEMBLY_VERSION under Python 3.3.0
# on Windows XP:
_MSVCRVER_TO_FULLVER['100'] = "10.0.30319.460"
if hasattr(msvcrt, "CRT_ASSEMBLY_VERSION"):
major, minor, rest = msvcrt.CRT_ASSEMBLY_VERSION.split(".", 2)
_MSVCRVER_TO_FULLVER[major + minor] = msvcrt.CRT_ASSEMBLY_VERSION
del major, minor, rest
except ImportError:
# If we are here, means python was not built with MSVC. Not sure what
# to do in that case: manifest building will fail, but it should not be
# used in that case anyway
log.warn('Cannot import msvcrt: using manifest will not be possible')
def msvc_manifest_xml(maj, min):
"""Given a major and minor version of the MSVCR, returns the
corresponding XML file."""
try:
fullver = _MSVCRVER_TO_FULLVER[str(maj * 10 + min)]
except KeyError:
raise ValueError("Version %d,%d of MSVCRT not supported yet" %
(maj, min))
# Don't be fooled, it looks like an XML, but it is not. In particular, it
# should not have any space before starting, and its size should be
# divisible by 4, most likely for alignement constraints when the xml is
# embedded in the binary...
# This template was copied directly from the python 2.6 binary (using
# strings.exe from mingw on python.exe).
template = """\
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC%(maj)d%(min)d.CRT" version="%(fullver)s" processorArchitecture="*" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>"""
return template % {'fullver': fullver, 'maj': maj, 'min': min}
def manifest_rc(name, type='dll'):
"""Return the rc file used to generate the res file which will be embedded
as manifest for given manifest file name, of given type ('dll' or
'exe').
Parameters
----------
name : str
name of the manifest file to embed
type : str {'dll', 'exe'}
type of the binary which will embed the manifest
"""
if type == 'dll':
rctype = 2
elif type == 'exe':
rctype = 1
else:
raise ValueError("Type %s not supported" % type)
return """\
#include "winuser.h"
%d RT_MANIFEST %s""" % (rctype, name)
def check_embedded_msvcr_match_linked(msver):
"""msver is the ms runtime version used for the MANIFEST."""
# check msvcr major version are the same for linking and
# embedding
msvcv = msvc_runtime_library()
if msvcv:
assert msvcv.startswith("msvcr"), msvcv
# Dealing with something like "mscvr90" or "mscvr100", the last
# last digit is the minor release, want int("9") or int("10"):
maj = int(msvcv[5:-1])
if not maj == int(msver):
raise ValueError(
"Discrepancy between linked msvcr " \
"(%d) and the one about to be embedded " \
"(%d)" % (int(msver), maj))
def configtest_name(config):
base = os.path.basename(config._gen_temp_sourcefile("yo", [], "c"))
return os.path.splitext(base)[0]
def manifest_name(config):
# Get configest name (including suffix)
root = configtest_name(config)
exext = config.compiler.exe_extension
return root + exext + ".manifest"
def rc_name(config):
# Get configtest name (including suffix)
root = configtest_name(config)
return root + ".rc"
def generate_manifest(config):
msver = get_build_msvc_version()
if msver is not None:
if msver >= 8:
check_embedded_msvcr_match_linked(msver)
ma = int(msver)
mi = int((msver - ma) * 10)
# Write the manifest file
manxml = msvc_manifest_xml(ma, mi)
man = open(manifest_name(config), "w")
config.temp_files.append(manifest_name(config))
man.write(manxml)
man.close()
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright (c) 2017 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito.quality;
import org.mockito.MockitoSession;
import org.mockito.exceptions.misusing.PotentialStubbingProblem;
import org.mockito.exceptions.misusing.UnnecessaryStubbingException;
import org.mockito.internal.junit.JUnitRule;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.junit.MockitoRule;
/**
* Configures the "strictness" of Mockito, affecting the behavior of stubbings and verification.
* "Strict stubbing" is a new feature in Mockito 2 that drives cleaner tests and better productivity.
* The easiest way to leverage it is via Mockito's JUnit support ({@link MockitoJUnit}) or Mockito Session ({@link MockitoSession}).
* <p>
* How strictness influences the behavior of the test?
* <ol>
* <li>{@link Strictness#STRICT_STUBS} - ensures clean tests, reduces test code duplication, improves debuggability.
* Best combination of flexibility and productivity. Highly recommended.
* Planned as default for Mockito v4.
* Enable it via {@link MockitoRule}, {@link MockitoJUnitRunner} or {@link MockitoSession}.
* See {@link #STRICT_STUBS} for the details.</li>
* <li>{@link Strictness#LENIENT} - no added behavior.
* The default of Mockito 1.x.
* Recommended only if you cannot use {@link #STRICT_STUBS}</li>
* <li>{@link Strictness#WARN} - cleaner tests but only if you read the console output.
* Reports console warnings about unused stubs
* and stubbing argument mismatch (see {@link org.mockito.quality.MockitoHint}).
* The default behavior of Mockito 2.x when {@link JUnitRule} or {@link MockitoJUnitRunner} are used.
* Recommended if you cannot use {@link #STRICT_STUBS}.
* Introduced originally with Mockito 2 because console warnings was the only compatible way of adding such feature.</li>
* </ol>
*
* @since 2.3.0
*/
public enum Strictness {
/**
* No extra strictness. Mockito 1.x behavior.
* Recommended only if you cannot use {@link #STRICT_STUBS}.
* <p>
* For more information see {@link Strictness}.
*
* @since 2.3.0
*/
LENIENT,
/**
* Helps keeping tests clean and improves debuggability only if you read the console output.
* Extra warnings emitted to the console, see {@link MockitoHint}.
* Default Mockito 2.x behavior.
* Recommended only if you cannot use {@link #STRICT_STUBS} because console output is ignored most of the time.
* <p>
* For more information see {@link Strictness}.
*
* @since 2.3.0
*/
WARN,
/**
* Ensures clean tests, reduces test code duplication, improves debuggability.
* Offers best combination of flexibility and productivity.
* Highly recommended.
* Enable it via our JUnit support ({@link MockitoJUnit}) or {@link MockitoSession}.
* <p>
* Adds following behavior:
* <ul>
* <li>Improved productivity: the test fails early when code under test invokes
* stubbed method with different arguments (see {@link PotentialStubbingProblem}).</li>
* <li>Cleaner tests without unnecessary stubbings:
* the test fails when unused stubs are present (see {@link UnnecessaryStubbingException}).</li>
* <li>Cleaner, more DRY tests ("Don't Repeat Yourself"):
* If you use {@link org.mockito.Mockito#verifyNoMoreInteractions(Object...)}
* you no longer need to explicitly verify stubbed invocations.
* They are automatically verified for you.</li>
* </ul>
*
* For more information see {@link Strictness}.
*
* @since 2.3.0
*/
STRICT_STUBS;
}
|
java
|
github
|
https://github.com/mockito/mockito
|
mockito-core/src/main/java/org/mockito/quality/Strictness.java
|
from testfixtures import log_capture
from testsuite.base_fs import BaseFilesystem
from testsuite import config
from core.sessions import SessionURL
from core import modules
import utils
from core import messages
import subprocess
import os
class FindPerms(BaseFilesystem):
def setUp(self):
self.session = SessionURL(
self.url,
self.password,
volatile = True
)
modules.load_modules(self.session)
# Create the folder tree
self.folders_abs, self.folders_rel = self.populate_folders()
self.files_abs, self.files_rel = self.populate_files(
self.folders_abs,
[ 'executable', 'writable', 'write-executable', 'readable' ]
)
# Change mode of the first file to ---x--x--x 0111 execute
self.check_call(
config.cmd_env_chmod_s_s % ('0111', self.files_abs[0]),
shell=True)
# Change mode of the second file to --w--w--w- 0222 write
self.check_call(
config.cmd_env_chmod_s_s % ('0222', self.files_abs[1]),
shell=True)
# Change mode of the third file to --wx-wx-wx 0333 write & execute
self.check_call(
config.cmd_env_chmod_s_s % ('0333', self.files_abs[2]),
shell=True)
# Change mode of the forth file to -r--r--r-- 0444 read
self.check_call(
config.cmd_env_chmod_s_s % ('0444', self.files_abs[3]),
shell=True)
# Change mode of the first folder to -rwxrwxrwx 0777 read, write, & execute
self.check_call(
config.cmd_env_chmod_s_s % ('0777', self.folders_abs[1]),
shell=True)
self.run_argv = modules.loaded['file_find'].run_argv
def tearDown(self):
# Reset recursively all the permissions to 0777
self.check_call(
config.cmd_env_chmod_s_s % ('-R 0777', self.folders_abs[0]),
shell=True)
for folder in reversed(self.folders_abs):
self.check_call(
config.cmd_env_remove_s % (self.files_abs.pop()),
shell=True)
self.check_call(
config.cmd_env_rmdir_s % (folder),
shell=True)
def test_file_find_php(self):
# find first writable starting from folder[0]
self.assertItemsEqual(self.run_argv([ '-writable', '-quit', self.folders_rel[0] ]), [ self.folders_rel[1] ])
# find first writable file from folder[0]
self.assertItemsEqual(self.run_argv([ '-writable', '-quit', self.folders_rel[0], '-ftype', 'f' ]), [ self.files_rel[1] ])
# find all executable starting from folder[0]
self.assertItemsEqual(self.run_argv([ '-executable', self.folders_rel[0] ]), self.folders_rel + [ self.files_rel[2], self.files_rel[0] ] )
# find all executable starting from folder[0] that matches the regexp 'te-ex' -> folder[2]
self.assertItemsEqual(self.run_argv([ '-executable', self.folders_rel[0], 'te-ex' ]), [ self.files_rel[2] ])
# find all starting from folder[0] that matches the regexp 'TE-EX' -> folder[2]
self.assertItemsEqual(self.run_argv([ self.folders_rel[0], 'TE-EX' ]), [ self.files_rel[2] ])
# find all starting from folder[0] that matches the regexp 'TE-EX' and case sensitive -> []
self.assertItemsEqual(self.run_argv([ '-case', self.folders_rel[0], 'TE-EX' ]), [ '' ])
# find all readable starting from folder[0]
self.assertItemsEqual(self.run_argv([ '-readable', self.folders_rel[0] ]), self.folders_rel + [ self.files_rel[3] ] )
# find all readable starting from folder[0] with a wrong regex -> none
self.assertItemsEqual(self.run_argv([ '-readable', self.folders_rel[0], 'bogus' ]), [ '' ] )
# find readable starting from folder[0] with no recursion
self.assertItemsEqual(self.run_argv([ '-readable', '-no-recursion', self.folders_rel[0] ]), self.folders_rel[:2] )
# test bogus path
self.assertEqual(self.run_argv([ '-readable', 'bogus' ]), [''] )
def test_file_find_sh(self):
# find first writable starting from folder[0]
self.assertItemsEqual(self.run_argv([ '-writable', '-vector', 'sh_find', '-quit', self.folders_rel[0] ]), [ self.folders_rel[1] ])
# find all executable starting from folder[0]
self.assertItemsEqual(self.run_argv([ '-executable', '-vector', 'sh_find', self.folders_rel[0] ]), self.folders_rel + [ self.files_rel[2], self.files_rel[0] ] )
# find all executable starting from folder[0] that matches the regexp '-' -> folder[2]
self.assertItemsEqual(self.run_argv([ '-executable', '-vector', 'sh_find', self.folders_rel[0], 'te-ex' ]), [ self.files_rel[2] ])
# find all starting from folder[0] that matches the regexp 'TE-EX' -> folder[2]
self.assertItemsEqual(self.run_argv([ '-vector', 'sh_find', self.folders_rel[0], 'TE-EX' ]), [ self.files_rel[2] ])
# find all starting from folder[0] that matches the regexp 'TE-EX' and case sensitive -> []
self.assertItemsEqual(self.run_argv([ '-case', '-vector', 'sh_find', self.folders_rel[0], 'TE-EX' ]), [ '' ])
# find all readable starting from folder[0]
self.assertItemsEqual(self.run_argv([ '-readable', '-vector', 'sh_find', self.folders_rel[0] ]), self.folders_rel + [ self.files_rel[3] ] )
# find all readable starting from folder[0] with a wrong regex -> none
self.assertItemsEqual(self.run_argv([ '-readable', self.folders_rel[0], 'bogus' ]), [ '' ] )
# find readable starting from folder[0] with no recursion
self.assertItemsEqual(self.run_argv([ '-readable', '-vector', 'sh_find', '-no-recursion', self.folders_rel[0] ]), self.folders_rel[:2] )
# test bogus path
self.assertEqual(self.run_argv([ '-readable', '-vector', 'sh_find', 'bogus' ]), [''] )
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import six
from six.moves.urllib import parse as urlparse
from sahara import conductor as c
from sahara import context
from sahara.plugins import base as plugin_base
from sahara.plugins import provisioning
from sahara.service import quotas
from sahara.utils import general as g
from sahara.utils.notification import sender
from sahara.utils.openstack import base as b
from sahara.utils.openstack import nova
conductor = c.API
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
OPS = None
def setup_service_api(ops):
global OPS
OPS = ops
# Cluster ops
def get_clusters(**kwargs):
return conductor.cluster_get_all(context.ctx(), **kwargs)
def get_cluster(id, show_progress=False):
return conductor.cluster_get(context.ctx(), id, show_progress)
def scale_cluster(id, data):
context.set_current_cluster_id(id)
ctx = context.ctx()
cluster = conductor.cluster_get(ctx, id)
plugin = plugin_base.PLUGINS.get_plugin(cluster.plugin_name)
existing_node_groups = data.get('resize_node_groups', [])
additional_node_groups = data.get('add_node_groups', [])
# the next map is the main object we will work with
# to_be_enlarged : {node_group_id: desired_amount_of_instances}
to_be_enlarged = {}
for ng in existing_node_groups:
ng_id = g.find(cluster.node_groups, name=ng['name'])['id']
to_be_enlarged.update({ng_id: ng['count']})
additional = construct_ngs_for_scaling(cluster, additional_node_groups)
cluster = conductor.cluster_get(ctx, cluster)
_add_ports_for_auto_sg(ctx, cluster, plugin)
try:
cluster = g.change_cluster_status(cluster, "Validating")
quotas.check_scaling(cluster, to_be_enlarged, additional)
plugin.validate_scaling(cluster, to_be_enlarged, additional)
except Exception as e:
with excutils.save_and_reraise_exception():
g.clean_cluster_from_empty_ng(cluster)
g.change_cluster_status(cluster, "Active", six.text_type(e))
# If we are here validation is successful.
# So let's update to_be_enlarged map:
to_be_enlarged.update(additional)
for node_group in cluster.node_groups:
if node_group.id not in to_be_enlarged:
to_be_enlarged[node_group.id] = node_group.count
OPS.provision_scaled_cluster(id, to_be_enlarged)
return cluster
def create_cluster(values):
ctx = context.ctx()
cluster = conductor.cluster_create(ctx, values)
context.set_current_cluster_id(cluster.id)
sender.notify(ctx, cluster.id, cluster.name, "New",
"create")
plugin = plugin_base.PLUGINS.get_plugin(cluster.plugin_name)
_add_ports_for_auto_sg(ctx, cluster, plugin)
# validating cluster
try:
cluster = g.change_cluster_status(cluster, "Validating")
quotas.check_cluster(cluster)
plugin.validate(cluster)
except Exception as e:
with excutils.save_and_reraise_exception():
g.change_cluster_status(cluster, "Error",
six.text_type(e))
OPS.provision_cluster(cluster.id)
return cluster
def _add_ports_for_auto_sg(ctx, cluster, plugin):
for ng in cluster.node_groups:
if ng.auto_security_group:
ports = {'open_ports': plugin.get_open_ports(ng)}
conductor.node_group_update(ctx, ng, ports)
def terminate_cluster(id):
context.set_current_cluster_id(id)
cluster = g.change_cluster_status(id, "Deleting")
OPS.terminate_cluster(id)
sender.notify(context.ctx(), cluster.id, cluster.name, cluster.status,
"delete")
# ClusterTemplate ops
def get_cluster_templates(**kwargs):
return conductor.cluster_template_get_all(context.ctx(), **kwargs)
def get_cluster_template(id):
return conductor.cluster_template_get(context.ctx(), id)
def create_cluster_template(values):
return conductor.cluster_template_create(context.ctx(), values)
def terminate_cluster_template(id):
return conductor.cluster_template_destroy(context.ctx(), id)
def update_cluster_template(id, values):
return conductor.cluster_template_update(context.ctx(), id, values)
# NodeGroupTemplate ops
def get_node_group_templates(**kwargs):
return conductor.node_group_template_get_all(context.ctx(), **kwargs)
def get_node_group_template(id):
return conductor.node_group_template_get(context.ctx(), id)
def create_node_group_template(values):
return conductor.node_group_template_create(context.ctx(), values)
def terminate_node_group_template(id):
return conductor.node_group_template_destroy(context.ctx(), id)
def update_node_group_template(id, values):
return conductor.node_group_template_update(context.ctx(), id, values)
# Plugins ops
def get_plugins():
return plugin_base.PLUGINS.get_plugins(
base=provisioning.ProvisioningPluginBase)
def get_plugin(plugin_name, version=None):
plugin = plugin_base.PLUGINS.get_plugin(plugin_name)
if plugin:
res = plugin.as_resource()
if version:
if version in plugin.get_versions():
configs = plugin.get_configs(version)
res._info['configs'] = [c.dict for c in configs]
processes = plugin.get_node_processes(version)
res._info['node_processes'] = processes
required_image_tags = plugin.get_required_image_tags(version)
res._info['required_image_tags'] = required_image_tags
else:
return None
return res
def convert_to_cluster_template(plugin_name, version, template_name,
config_file):
plugin = plugin_base.PLUGINS.get_plugin(plugin_name)
return plugin.convert(config_file, plugin_name, version,
urlparse.unquote(template_name),
conductor.cluster_template_create)
def construct_ngs_for_scaling(cluster, additional_node_groups):
ctx = context.ctx()
additional = {}
for ng in additional_node_groups:
count = ng['count']
ng['count'] = 0
ng_id = conductor.node_group_add(ctx, cluster, ng)
additional.update({ng_id: count})
return additional
# Image Registry
def get_images(name, tags):
return b.execute_with_retries(
nova.client().images.list_registered, name, tags)
def get_image(**kwargs):
if len(kwargs) == 1 and 'id' in kwargs:
return b.execute_with_retries(nova.client().images.get, kwargs['id'])
else:
return b.execute_with_retries(nova.client().images.find, **kwargs)
def get_registered_image(id):
return b.execute_with_retries(
nova.client().images.get_registered_image, id)
def register_image(image_id, username, description=None):
client = nova.client()
b.execute_with_retries(
client.images.set_description, image_id, username, description)
return b.execute_with_retries(client.images.get, image_id)
def unregister_image(image_id):
client = nova.client()
b.execute_with_retries(client.images.unset_description, image_id)
return b.execute_with_retries(client.images.get, image_id)
def add_image_tags(image_id, tags):
client = nova.client()
b.execute_with_retries(client.images.tag, image_id, tags)
return b.execute_with_retries(client.images.get, image_id)
def remove_image_tags(image_id, tags):
client = nova.client()
b.execute_with_retries(client.images.untag, image_id, tags)
return b.execute_with_retries(client.images.get, image_id)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Usage: <win-path-to-pdb.pdb>
This tool will take a PDB on the command line, extract the source files that
were used in building the PDB, query the source server for which repository
and revision these files are at, and then finally write this information back
into the PDB in a format that the debugging tools understand. This allows for
automatic source debugging, as all of the information is contained in the PDB,
and the debugger can go out and fetch the source files.
You most likely want to run these immediately after a build, since the source
input files need to match the generated PDB, and we want the correct
revision information for the exact files that were used for the build.
The following files from a windbg + source server installation are expected
to reside in the same directory as this python script:
dbghelp.dll
pdbstr.exe
srctool.exe
NOTE: Expected to run under a native win32 python, NOT cygwin. All paths are
dealt with as win32 paths, since we have to interact with the Microsoft tools.
"""
import os
import optparse
import sys
import tempfile
import time
import subprocess
import win32api
from collections import namedtuple
# This serves two purposes. First, it acts as a whitelist, and only files
# from repositories listed here will be source indexed. Second, it allows us
# to map from one URL to another, so we can map to external source servers. It
# also indicates if the source for this project will be retrieved in a base64
# encoded format.
# TODO(sebmarchand): Initialize this variable in the main function and pass it
# to the sub functions instead of having a global variable.
REPO_MAP = {
'http://src.chromium.org/svn': {
'url': 'https://src.chromium.org/chrome/'
'{file_path}?revision={revision}',
'base64': False
},
'https://src.chromium.org/svn': {
'url': 'https://src.chromium.org/chrome/'
'{file_path}?revision={revision}',
'base64': False
}
}
PROJECT_GROUPS = [
# Googlecode SVN projects
{
'projects': [
'angleproject',
'google-breakpad',
'google-cache-invalidation-api',
'google-url',
'googletest',
'leveldb',
'libphonenumber',
'libyuv',
'open-vcdiff',
'ots',
'sawbuck',
'sfntly',
'smhasher',
'v8',
'v8-i18n',
'webrtc',
],
'public_url': 'https://%s.googlecode.com/svn-history/' \
'r{revision}/{file_path}',
'svn_urls': [
'svn://svn-mirror.golo.chromium.org/%s',
'http://src.chromium.org/%s',
'https://src.chromium.org/%s',
'http://%s.googlecode.com/svn',
'https://%s.googlecode.com/svn',
],
},
# Googlecode Git projects
{
'projects': [
'syzygy',
],
'public_url': 'https://%s.googlecode.com/git-history/' \
'{revision}/{file_path}',
'svn_urls': [
'https://code.google.com/p/%s/',
],
},
# Chrome projects
{
'projects': [
'blink',
'chrome',
'multivm',
'native_client',
],
'public_url': 'https://src.chromium.org/%s/' \
'{file_path}?revision={revision}',
'svn_urls': [
'svn://chrome-svn/%s',
'svn://chrome-svn.corp.google.com/%s',
'svn://svn-mirror.golo.chromium.org/%s',
'svn://svn.chromium.org/%s',
],
},
]
# A named tuple used to store the information about a repository.
#
# It contains the following members:
# - repo: The URL of the repository;
# - rev: The revision (or hash) of the current checkout.
# - file_list: The list of files coming from this repository.
# - root_path: The root path of this checkout.
# - path_prefix: A prefix to apply to the filename of the files coming from
# this repository.
RevisionInfo = namedtuple('RevisionInfo',
['repo', 'rev', 'files', 'root_path', 'path_prefix'])
def GetCasedFilePath(filename):
"""Return the correctly cased path for a given filename"""
return win32api.GetLongPathName(win32api.GetShortPathName(unicode(filename)))
def FillRepositoriesMap():
""" Fill the repositories map with the whitelisted projects. """
for project_group in PROJECT_GROUPS:
for project in project_group['projects']:
for svn_url in project_group['svn_urls']:
REPO_MAP[svn_url % project] = {
'url': project_group['public_url'] % project,
'base64': False
}
REPO_MAP[project_group['public_url'] % project] = None
FillRepositoriesMap()
def FindFile(filename):
"""Return the full windows path to a file in the same dir as this code."""
thisdir = os.path.dirname(os.path.join(os.path.curdir, __file__))
return os.path.abspath(os.path.join(thisdir, filename))
def RunCommand(*cmd, **kwargs):
"""Runs a command.
Returns what have been printed to stdout by this command.
kwargs:
raise_on_failure: Indicates if an exception should be raised on failure, if
set to false then the function will return None.
"""
kwargs.setdefault('stdin', subprocess.PIPE)
kwargs.setdefault('stdout', subprocess.PIPE)
kwargs.setdefault('stderr', subprocess.PIPE)
kwargs.setdefault('universal_newlines', True)
raise_on_failure = kwargs.pop('raise_on_failure', True)
proc = subprocess.Popen(cmd, **kwargs)
ret, err = proc.communicate()
if proc.returncode != 0:
if raise_on_failure:
print 'Error: %s' % err
raise subprocess.CalledProcessError(proc.returncode, cmd)
return
ret = (ret or '').rstrip('\n')
return ret
def ExtractSourceFiles(pdb_filename):
"""Extract a list of local paths of the source files from a PDB."""
src_files = RunCommand(FindFile('srctool.exe'), '-r', pdb_filename)
if not src_files or src_files.startswith("srctool: "):
raise Exception("srctool failed: " + src_files)
return set(x.lower() for x in src_files.split('\n') if len(x) != 0)
def ReadSourceStream(pdb_filename):
"""Read the contents of the source information stream from a PDB."""
srctool = subprocess.Popen([FindFile('pdbstr.exe'),
'-r', '-s:srcsrv',
'-p:%s' % pdb_filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
data, _ = srctool.communicate()
if ((srctool.returncode != 0 and srctool.returncode != -1) or
data.startswith("pdbstr: ")):
raise Exception("pdbstr failed: " + data)
return data
def WriteSourceStream(pdb_filename, data):
"""Write the contents of the source information stream to a PDB."""
# Write out the data to a temporary filename that we can pass to pdbstr.
(f, fname) = tempfile.mkstemp()
f = os.fdopen(f, "wb")
f.write(data)
f.close()
srctool = subprocess.Popen([FindFile('pdbstr.exe'),
'-w', '-s:srcsrv',
'-i:%s' % fname,
'-p:%s' % pdb_filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
data, _ = srctool.communicate()
if ((srctool.returncode != 0 and srctool.returncode != -1) or
data.startswith("pdbstr: ")):
raise Exception("pdbstr failed: " + data)
os.unlink(fname)
def GetSVNRepoInfo(local_path):
"""Calls svn info to extract the SVN information about a path."""
# We call svn.bat to make sure and get the depot tools SVN and not cygwin.
info = RunCommand('svn.bat', 'info', local_path, raise_on_failure=False)
if not info:
return
# Hack up into a dictionary of the fields printed by svn info.
vals = dict((y.split(': ', 2) for y in info.split('\n') if y))
return vals
def ExtractSVNInfo(local_filename):
"""Checks if a file is coming from a svn repository and if so returns some
information about it.
Args:
local_filename: The name of the file that we want to check.
Returns:
None if the file doesn't come from a svn repository, otherwise it returns a
RevisionInfo tuple.
"""
# Try to get the svn information about this file.
vals = GetSVNRepoInfo(local_filename)
if not vals:
return
repo = vals['Repository Root']
if not vals['URL'].startswith(repo):
raise Exception("URL is not inside of the repository root?!?")
rev = vals['Revision']
svn_local_root = os.path.split(local_filename)[0]
# We need to look at the SVN URL of the current path to handle the case when
# we do a partial SVN checkout inside another checkout of the same repository.
# This happens in Chromium where we do some checkout of
# '/trunk/deps/third_party' in 'src/third_party'.
svn_root_url = os.path.dirname(vals['URL'])
# Don't try to list all the files from this repository as this seem to slow
# down the indexing, instead index one file at a time.
file_list = [local_filename.replace(svn_local_root, '').lstrip(os.path.sep)]
return RevisionInfo(repo=repo, rev=rev, files=file_list,
root_path=svn_local_root, path_prefix=svn_root_url.replace(repo, ''))
def ExtractGitInfo(local_filename):
"""Checks if a file is coming from a git repository and if so returns some
information about it.
Args:
local_filename: The name of the file that we want to check.
Returns:
None if the file doesn't come from a git repository, otherwise it returns a
RevisionInfo tuple.
"""
# Starts by checking if this file is coming from a git repository. For that
# we'll start by calling 'git info' on this file; for this to work we need to
# make sure that the current working directory is correctly cased. It turns
# out that even on Windows the casing of the path passed in the |cwd| argument
# of subprocess.Popen matters and if it's not correctly cased then 'git info'
# will return None even if the file is coming from a git repository. This
# is not the case if we're just interested in checking if the path containing
# |local_filename| is coming from a git repository, in this case the casing
# doesn't matter.
local_filename = GetCasedFilePath(local_filename)
local_file_basename = os.path.basename(local_filename)
local_file_dir = os.path.dirname(local_filename)
file_info = RunCommand('git.bat', 'log', '-n', '1', local_file_basename,
cwd=local_file_dir, raise_on_failure=False)
if not file_info:
return
# Get the revision of the master branch.
rev = RunCommand('git.bat', 'rev-parse', 'HEAD', cwd=local_file_dir)
# Get the url of the remote repository.
repo = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
cwd=local_file_dir)
# If the repository point to a local directory then we need to run this
# command one more time from this directory to get the repository url.
if os.path.isdir(repo):
repo = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
cwd=repo)
# Don't use the authenticated path.
repo = repo.replace('googlesource.com/a/', 'googlesource.com/')
# Get the relative file path for this file in the git repository.
git_path = RunCommand('git.bat', 'ls-tree', '--full-name', '--name-only',
'HEAD', local_file_basename, cwd=local_file_dir).replace('/','\\')
if not git_path:
return
git_root_path = local_filename.replace(git_path, '')
if repo not in REPO_MAP:
# Automatically adds the project coming from a git GoogleCode repository to
# the repository map. The files from these repositories are accessible via
# gitiles in a base64 encoded format.
if 'chromium.googlesource.com' in repo:
REPO_MAP[repo] = {
'url': '%s/+/{revision}/{file_path}?format=TEXT' % repo,
'base64': True
}
# Get the list of files coming from this repository.
git_file_list = RunCommand('git.bat', 'ls-tree', '--full-name', '--name-only',
'HEAD', '-r', cwd=git_root_path)
file_list = [x for x in git_file_list.splitlines() if len(x) != 0]
return RevisionInfo(repo=repo, rev=rev, files=file_list,
root_path=git_root_path, path_prefix=None)
def IndexFilesFromRepo(local_filename, file_list, output_lines):
"""Checks if a given file is a part of a revision control repository (svn or
git) and index all the files from this repository if it's the case.
Args:
local_filename: The filename of the current file.
file_list: The list of files that should be indexed.
output_lines: The source indexing lines that will be appended to the PDB.
Returns the number of indexed files.
"""
indexed_files = 0
# Try to extract the revision info for the current file.
info = ExtractGitInfo(local_filename)
if not info:
info = ExtractSVNInfo(local_filename)
repo = info.repo
rev = info.rev
files = info.files
root_path = info.root_path.lower()
# Checks if we should index this file and if the source that we'll retrieve
# will be base64 encoded.
should_index = False
base_64 = False
if repo in REPO_MAP:
should_index = True
base_64 = REPO_MAP[repo].get('base64')
else:
repo = None
# Iterates over the files from this repo and index them if needed.
for file_iter in files:
current_filename = file_iter.lower()
full_file_path = os.path.normpath(os.path.join(root_path, current_filename))
# Checks if the file is in the list of files to be indexed.
if full_file_path in file_list:
if should_index:
source_url = ''
current_file = file_iter
# Prefix the filename with the prefix for this repository if needed.
if info.path_prefix:
current_file = os.path.join(info.path_prefix, current_file)
source_url = REPO_MAP[repo].get('url').format(revision=rev,
file_path=os.path.normpath(current_file).replace('\\', '/'))
output_lines.append('%s*%s*%s*%s*%s' % (full_file_path, current_file,
rev, source_url, 'base64.b64decode' if base_64 else ''))
indexed_files += 1
file_list.remove(full_file_path)
# The input file should have been removed from the list of files to index.
if indexed_files and local_filename in file_list:
print '%s shouldn\'t be in the list of files to index anymore.' % \
local_filename
# TODO(sebmarchand): Turn this into an exception once I've confirmed that
# this doesn't happen on the official builder.
file_list.remove(local_filename)
return indexed_files
def DirectoryIsUnderPublicVersionControl(local_dir):
# Checks if this directory is from a Git checkout.
info = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
cwd=local_dir, raise_on_failure=False)
if info:
return True
# If not checks if it's from a SVN checkout.
info = GetSVNRepoInfo(local_dir)
if info:
return True
return False
def UpdatePDB(pdb_filename, verbose=True, build_dir=None, toolchain_dir=None):
"""Update a pdb file with source information."""
dir_blacklist = { }
if build_dir:
# Blacklisting the build directory allows skipping the generated files, for
# Chromium this makes the indexing ~10x faster.
build_dir = (os.path.normpath(build_dir)).lower()
for directory, _, _ in os.walk(build_dir):
dir_blacklist[directory.lower()] = True
dir_blacklist[build_dir.lower()] = True
if toolchain_dir:
# Blacklisting the directories from the toolchain as we don't have revision
# info for them.
toolchain_dir = (os.path.normpath(toolchain_dir)).lower()
for directory, _, _ in os.walk(build_dir):
dir_blacklist[directory.lower()] = True
dir_blacklist[toolchain_dir.lower()] = True
# Writes the header of the source index stream.
#
# Here's the description of the variables used in the SRC_* macros (those
# variables have to be defined for every source file that we want to index):
# var1: The file path.
# var2: The name of the file without its path.
# var3: The revision or the hash of this file's repository.
# var4: The URL to this file.
# var5: (optional) The python method to call to decode this file, e.g. for
# a base64 encoded file this value should be 'base64.b64decode'.
lines = [
'SRCSRV: ini ------------------------------------------------',
'VERSION=1',
'INDEXVERSION=2',
'VERCTRL=Subversion',
'DATETIME=%s' % time.asctime(),
'SRCSRV: variables ------------------------------------------',
'SRC_EXTRACT_TARGET_DIR=%targ%\%fnbksl%(%var2%)\%var3%',
'SRC_EXTRACT_TARGET=%SRC_EXTRACT_TARGET_DIR%\%fnfile%(%var1%)',
'SRC_EXTRACT_CMD=cmd /c "mkdir "%SRC_EXTRACT_TARGET_DIR%" & python -c '
'"import urllib2, base64;'
'url = \\\"%var4%\\\";'
'u = urllib2.urlopen(url);'
'print %var5%(u.read());" > "%SRC_EXTRACT_TARGET%""',
'SRCSRVTRG=%SRC_EXTRACT_TARGET%',
'SRCSRVCMD=%SRC_EXTRACT_CMD%',
'SRCSRV: source files ---------------------------------------',
]
if ReadSourceStream(pdb_filename):
raise Exception("PDB already has source indexing information!")
filelist = ExtractSourceFiles(pdb_filename)
number_of_files = len(filelist)
indexed_files_total = 0
while filelist:
filename = next(iter(filelist))
filedir = os.path.dirname(filename)
if verbose:
print "[%d / %d] Processing: %s" % (number_of_files - len(filelist),
number_of_files, filename)
# This directory is blacklisted, either because it's not part of a
# repository, or from one we're not interested in indexing.
if dir_blacklist.get(filedir, False):
if verbose:
print " skipping, directory is blacklisted."
filelist.remove(filename)
continue
# Skip the files that don't exist on the current machine.
if not os.path.exists(filename):
filelist.remove(filename)
continue
# Try to index the current file and all the ones coming from the same
# repository.
indexed_files = IndexFilesFromRepo(filename, filelist, lines)
if not indexed_files:
if not DirectoryIsUnderPublicVersionControl(filedir):
dir_blacklist[filedir] = True
if verbose:
print "Adding %s to the blacklist." % filedir
filelist.remove(filename)
continue
indexed_files_total += indexed_files
if verbose:
print " %d files have been indexed." % indexed_files
lines.append('SRCSRV: end ------------------------------------------------')
WriteSourceStream(pdb_filename, '\r\n'.join(lines))
if verbose:
print "%d / %d files have been indexed." % (indexed_files_total,
number_of_files)
def main():
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False)
parser.add_option('--build-dir', help='The original build directory, if set '
'all the files present in this directory (or one of its subdirectories) '
'will be skipped.')
parser.add_option('--toolchain-dir', help='The directory containing the '
'toolchain that has been used for this build. If set all the files '
'present in this directory (or one of its subdirectories) will be '
'skipped.')
options, args = parser.parse_args()
if not args:
parser.error('Specify a pdb')
for pdb in args:
UpdatePDB(pdb, options.verbose, options.build_dir)
return 0
if __name__ == '__main__':
sys.exit(main())
|
unknown
|
codeparrot/codeparrot-clean
| ||
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# ===========================================================================
# Copyright (c) 2007-2011 Barend Gehrels, Amsterdam, the Netherlands.
# Copyright (c) 2008-2011 Bruno Lalande, Paris, France.
# Copyright (c) 2009-2011 Mateusz Loskot (mateusz@loskot.net), London, UK
#
# Use, modification and distribution is subject to the Boost Software License,
# Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# ============================================================================
import os, sys
os.chdir(os.path.dirname(sys.argv[0]))
if 'DOXYGEN' in os.environ:
doxygen_cmd = os.environ['DOXYGEN']
else:
doxygen_cmd = 'doxygen'
if 'DOXYGEN_XML2QBK' in os.environ:
doxygen_xml2qbk_cmd = os.environ['DOXYGEN_XML2QBK']
else:
doxygen_xml2qbk_cmd = 'doxygen_xml2qbk'
cmd = doxygen_xml2qbk_cmd
cmd = cmd + " --xml doxy/doxygen_output/xml/%s.xml"
cmd = cmd + " --start_include boost/geometry/"
cmd = cmd + " --convenience_header_path ../../../boost/geometry/"
cmd = cmd + " --convenience_headers geometry.hpp,geometries/geometries.hpp,multi/multi.hpp"
cmd = cmd + " --skip_namespace boost::geometry::"
cmd = cmd + " --copyright src/copyright_block.qbk"
cmd = cmd + " > generated/%s.qbk"
def call_doxygen():
os.chdir("doxy");
os.system(doxygen_cmd)
os.chdir("..")
def group_to_quickbook(section):
os.system(cmd % ("group__" + section.replace("_", "__"), section))
def model_to_quickbook(section):
os.system(cmd % ("classboost_1_1geometry_1_1model_1_1" + section.replace("_", "__"), section))
def model_to_quickbook2(classname, section):
os.system(cmd % ("classboost_1_1geometry_1_1model_1_1" + classname, section))
def struct_to_quickbook(section):
os.system(cmd % ("structboost_1_1geometry_1_1" + section.replace("_", "__"), section))
def class_to_quickbook(section):
os.system(cmd % ("classboost_1_1geometry_1_1" + section.replace("_", "__"), section))
def strategy_to_quickbook(section):
p = section.find("::")
ns = section[:p]
strategy = section[p+2:]
os.system(cmd % ("classboost_1_1geometry_1_1strategy_1_1"
+ ns.replace("_", "__") + "_1_1" + strategy.replace("_", "__"),
ns + "_" + strategy))
def cs_to_quickbook(section):
os.system(cmd % ("structboost_1_1geometry_1_1cs_1_1" + section.replace("_", "__"), section))
call_doxygen()
algorithms = ["append", "assign", "make", "clear"
, "area", "buffer", "centroid", "convert", "correct"
, "convex_hull", "difference", "disjoint", "distance"
, "envelope", "equals", "expand", "for_each", "intersection", "intersects"
, "length", "num_geometries", "num_interior_rings", "num_points"
, "overlaps", "perimeter", "reverse", "simplify", "sym_difference"
, "transform", "union", "unique", "within"]
access_functions = ["get", "set", "exterior_ring", "interior_rings"
, "num_points", "num_interior_rings", "num_geometries"]
coordinate_systems = ["cartesian", "geographic", "polar", "spherical", "spherical_equatorial"]
core = ["closure", "coordinate_system", "coordinate_type", "cs_tag"
, "dimension", "exception", "interior_type"
, "degree", "radian"
, "is_radian", "point_order"
, "point_type", "ring_type", "tag", "tag_cast" ]
exceptions = ["exception", "centroid_exception"];
iterators = ["circular_iterator", "closing_iterator"
, "ever_circling_iterator"]
models = ["point", "linestring", "box"
, "polygon", "segment", "ring"
, "multi_linestring", "multi_point", "multi_polygon", "referring_segment"]
strategies = ["distance::pythagoras", "distance::haversine"
, "distance::cross_track", "distance::projected_point"
, "within::winding", "within::franklin", "within::crossings_multiply"
, "area::surveyor", "area::huiller"
, "centroid::bashein_detmer", "centroid::average"
, "convex_hull::graham_andrew"
, "simplify::douglas_peucker"
, "side::side_by_triangle", "side::side_by_cross_track", "side::spherical_side_formula"
, "transform::inverse_transformer", "transform::map_transformer"
, "transform::rotate_transformer", "transform::scale_transformer"
, "transform::translate_transformer", "transform::ublas_transformer"
]
views = ["box_view", "segment_view"
, "closeable_view", "reversible_view", "identity_view"]
for i in algorithms:
group_to_quickbook(i)
for i in access_functions:
group_to_quickbook(i)
for i in coordinate_systems:
cs_to_quickbook(i)
for i in core:
struct_to_quickbook(i)
for i in exceptions:
class_to_quickbook(i)
for i in iterators:
struct_to_quickbook(i)
for i in models:
model_to_quickbook(i)
for i in strategies:
strategy_to_quickbook(i)
for i in views:
struct_to_quickbook(i)
model_to_quickbook2("d2_1_1point__xy", "point_xy")
group_to_quickbook("arithmetic")
group_to_quickbook("register")
group_to_quickbook("enum")
os.system("bjam")
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Resource manager.
"""
import weakref
class ResourceManager (object):
"""
A registry of objects and resources that should be closed when those
objects are deleted.
This is meant to be a safer alternative to Python's ``__del__`` method,
which can cause reference cycles to never be collected. Objects registered
with the ResourceManager can be collected but still free resources when
they die.
Resources are registered using `register`, and when an object is garbage
collected, each registered resource is closed by having its ``close()``
method called. Multiple resources may be registered per object, but a
resource will only be closed once, even if multiple objects register it.
(The last object to register it wins.)
"""
def __init__(self):
self._table = {}
def register(self, obj, resource):
"""
Register a resource to be closed with an object is collected.
When the given ``obj`` is garbage-collected by the Python interpreter,
the ``resource`` will be closed by having its ``close()`` method called.
Any exceptions are ignored.
:param object obj: the object to track
:param object resource:
the resource to close when the object is collected
"""
def callback(ref):
try:
resource.close()
except:
pass
del self._table[id(resource)]
# keep the weakref in a table so it sticks around long enough to get
# its callback called. :)
self._table[id(resource)] = weakref.ref(obj, callback)
# singleton
ResourceManager = ResourceManager()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# This module defines many standard colors that should be useful.
# These colors should be exactly the same as the ones defined in
# colors.tcl.
# Whites
antique_white = (0.9804, 0.9216, 0.8431)
azure = (0.9412, 1.0000, 1.0000)
bisque = (1.0000, 0.8941, 0.7686)
blanched_almond = (1.0000, 0.9216, 0.8039)
cornsilk = (1.0000, 0.9725, 0.8627)
eggshell = (0.9900, 0.9000, 0.7900)
floral_white = (1.0000, 0.9804, 0.9412)
gainsboro = (0.8627, 0.8627, 0.8627)
ghost_white = (0.9725, 0.9725, 1.0000)
honeydew = (0.9412, 1.0000, 0.9412)
ivory = (1.0000, 1.0000, 0.9412)
lavender = (0.9020, 0.9020, 0.9804)
lavender_blush = (1.0000, 0.9412, 0.9608)
lemon_chiffon = (1.0000, 0.9804, 0.8039)
linen = (0.9804, 0.9412, 0.9020)
mint_cream = (0.9608, 1.0000, 0.9804)
misty_rose = (1.0000, 0.8941, 0.8824)
moccasin = (1.0000, 0.8941, 0.7098)
navajo_white = (1.0000, 0.8706, 0.6784)
old_lace = (0.9922, 0.9608, 0.9020)
papaya_whip = (1.0000, 0.9373, 0.8353)
peach_puff = (1.0000, 0.8549, 0.7255)
seashell = (1.0000, 0.9608, 0.9333)
snow = (1.0000, 0.9804, 0.9804)
thistle = (0.8471, 0.7490, 0.8471)
titanium_white = (0.9900, 1.0000, 0.9400)
wheat = (0.9608, 0.8706, 0.7020)
white = (1.0000, 1.0000, 1.0000)
white_smoke = (0.9608, 0.9608, 0.9608)
zinc_white = (0.9900, 0.9700, 1.0000)
# Greys
cold_grey = (0.5000, 0.5400, 0.5300)
dim_grey = (0.4118, 0.4118, 0.4118)
grey = (0.7529, 0.7529, 0.7529)
light_grey = (0.8275, 0.8275, 0.8275)
slate_grey = (0.4392, 0.5020, 0.5647)
slate_grey_dark = (0.1843, 0.3098, 0.3098)
slate_grey_light = (0.4667, 0.5333, 0.6000)
warm_grey = (0.5000, 0.5000, 0.4100)
# Blacks
black = (0.0000, 0.0000, 0.0000)
ivory_black = (0.1600, 0.1400, 0.1300)
lamp_black = (0.1800, 0.2800, 0.2300)
# Reds
alizarin_crimson = (0.8900, 0.1500, 0.2100)
brick = (0.6100, 0.4000, 0.1200)
cadmium_red_deep = (0.8900, 0.0900, 0.0500)
coral = (1.0000, 0.4980, 0.3137)
coral_light = (0.9412, 0.5020, 0.5020)
deep_pink = (1.0000, 0.0784, 0.5765)
english_red = (0.8300, 0.2400, 0.1000)
firebrick = (0.6980, 0.1333, 0.1333)
geranium_lake = (0.8900, 0.0700, 0.1900)
hot_pink = (1.0000, 0.4118, 0.7059)
indian_red = (0.6900, 0.0900, 0.1200)
light_salmon = (1.0000, 0.6275, 0.4784)
madder_lake_deep = (0.8900, 0.1800, 0.1900)
maroon = (0.6902, 0.1882, 0.3765)
pink = (1.0000, 0.7529, 0.7961)
pink_light = (1.0000, 0.7137, 0.7569)
raspberry = (0.5300, 0.1500, 0.3400)
red = (1.0000, 0.0000, 0.0000)
rose_madder = (0.8900, 0.2100, 0.2200)
salmon = (0.9804, 0.5020, 0.4471)
tomato = (1.0000, 0.3882, 0.2784)
venetian_red = (0.8300, 0.1000, 0.1200)
# Browns
beige = (0.6400, 0.5800, 0.5000)
brown = (0.5000, 0.1647, 0.1647)
brown_madder = (0.8600, 0.1600, 0.1600)
brown_ochre = (0.5300, 0.2600, 0.1200)
burlywood = (0.8706, 0.7216, 0.5294)
burnt_sienna = (0.5400, 0.2100, 0.0600)
burnt_umber = (0.5400, 0.2000, 0.1400)
chocolate = (0.8235, 0.4118, 0.1176)
deep_ochre = (0.4500, 0.2400, 0.1000)
flesh = (1.0000, 0.4900, 0.2500)
flesh_ochre = (1.0000, 0.3400, 0.1300)
gold_ochre = (0.7800, 0.4700, 0.1500)
greenish_umber = (1.0000, 0.2400, 0.0500)
khaki = (0.9412, 0.9020, 0.5490)
khaki_dark = (0.7412, 0.7176, 0.4196)
light_beige = (0.9608, 0.9608, 0.8627)
peru = (0.8039, 0.5216, 0.2471)
rosy_brown = (0.7373, 0.5608, 0.5608)
raw_sienna = (0.7800, 0.3800, 0.0800)
raw_umber = (0.4500, 0.2900, 0.0700)
sepia = (0.3700, 0.1500, 0.0700)
sienna = (0.6275, 0.3216, 0.1765)
saddle_brown = (0.5451, 0.2706, 0.0745)
sandy_brown = (0.9569, 0.6431, 0.3765)
tan = (0.8235, 0.7059, 0.5490)
van_dyke_brown = (0.3700, 0.1500, 0.0200)
# Oranges
cadmium_orange = (1.0000, 0.3800, 0.0100)
cadmium_red_light = (1.0000, 0.0100, 0.0500)
carrot = (0.9300, 0.5700, 0.1300)
dark_orange = (1.0000, 0.5490, 0.0000)
mars_orange = (0.5900, 0.2700, 0.0800)
mars_yellow = (0.8900, 0.4400, 0.1000)
orange = (1.0000, 0.5000, 0.0000)
orange_red = (1.0000, 0.2706, 0.0000)
yellow_ochre = (0.8900, 0.5100, 0.0900)
# Yellows
aureoline_yellow = (1.0000, 0.6600, 0.1400)
banana = (0.8900, 0.8100, 0.3400)
cadmium_lemon = (1.0000, 0.8900, 0.0100)
cadmium_yellow = (1.0000, 0.6000, 0.0700)
cadmium_yellow_light = (1.0000, 0.6900, 0.0600)
gold = (1.0000, 0.8431, 0.0000)
goldenrod = (0.8549, 0.6471, 0.1255)
goldenrod_dark = (0.7216, 0.5255, 0.0431)
goldenrod_light = (0.9804, 0.9804, 0.8235)
goldenrod_pale = (0.9333, 0.9098, 0.6667)
light_goldenrod = (0.9333, 0.8667, 0.5098)
melon = (0.8900, 0.6600, 0.4100)
naples_yellow_deep = (1.0000, 0.6600, 0.0700)
yellow = (1.0000, 1.0000, 0.0000)
yellow_light = (1.0000, 1.0000, 0.8784)
# Greens
chartreuse = (0.4980, 1.0000, 0.0000)
chrome_oxide_green = (0.4000, 0.5000, 0.0800)
cinnabar_green = (0.3800, 0.7000, 0.1600)
cobalt_green = (0.2400, 0.5700, 0.2500)
emerald_green = (0.0000, 0.7900, 0.3400)
forest_green = (0.1333, 0.5451, 0.1333)
green = (0.0000, 1.0000, 0.0000)
green_dark = (0.0000, 0.3922, 0.0000)
green_pale = (0.5961, 0.9843, 0.5961)
green_yellow = (0.6784, 1.0000, 0.1843)
lawn_green = (0.4863, 0.9882, 0.0000)
lime_green = (0.1961, 0.8039, 0.1961)
mint = (0.7400, 0.9900, 0.7900)
olive = (0.2300, 0.3700, 0.1700)
olive_drab = (0.4196, 0.5569, 0.1373)
olive_green_dark = (0.3333, 0.4196, 0.1843)
permanent_green = (0.0400, 0.7900, 0.1700)
sap_green = (0.1900, 0.5000, 0.0800)
sea_green = (0.1804, 0.5451, 0.3412)
sea_green_dark = (0.5608, 0.7373, 0.5608)
sea_green_medium = (0.2353, 0.7020, 0.4431)
sea_green_light = (0.1255, 0.6980, 0.6667)
spring_green = (0.0000, 1.0000, 0.4980)
spring_green_medium = (0.0000, 0.9804, 0.6039)
terre_verte = (0.2200, 0.3700, 0.0600)
viridian_light = (0.4300, 1.0000, 0.4400)
yellow_green = (0.6039, 0.8039, 0.1961)
# Cyans
aquamarine = (0.4980, 1.0000, 0.8314)
aquamarine_medium = (0.4000, 0.8039, 0.6667)
cyan = (0.0000, 1.0000, 1.0000)
cyan_white = (0.8784, 1.0000, 1.0000)
turquoise = (0.2510, 0.8784, 0.8157)
turquoise_dark = (0.0000, 0.8078, 0.8196)
turquoise_medium = (0.2824, 0.8196, 0.8000)
turquoise_pale = (0.6863, 0.9333, 0.9333)
# Blues
alice_blue = (0.9412, 0.9725, 1.0000)
blue = (0.0000, 0.0000, 1.0000)
blue_light = (0.6784, 0.8471, 0.9020)
blue_medium = (0.0000, 0.0000, 0.8039)
cadet = (0.3725, 0.6196, 0.6275)
cobalt = (0.2400, 0.3500, 0.6700)
cornflower = (0.3922, 0.5843, 0.9294)
cerulean = (0.0200, 0.7200, 0.8000)
dodger_blue = (0.1176, 0.5647, 1.0000)
indigo = (0.0300, 0.1800, 0.3300)
manganese_blue = (0.0100, 0.6600, 0.6200)
midnight_blue = (0.0980, 0.0980, 0.4392)
navy = (0.0000, 0.0000, 0.5020)
peacock = (0.2000, 0.6300, 0.7900)
powder_blue = (0.6902, 0.8784, 0.9020)
royal_blue = (0.2549, 0.4118, 0.8824)
slate_blue = (0.4157, 0.3529, 0.8039)
slate_blue_dark = (0.2824, 0.2392, 0.5451)
slate_blue_light = (0.5176, 0.4392, 1.0000)
slate_blue_medium = (0.4824, 0.4078, 0.9333)
sky_blue = (0.5294, 0.8078, 0.9216)
sky_blue_deep = (0.0000, 0.7490, 1.0000)
sky_blue_light = (0.5294, 0.8078, 0.9804)
steel_blue = (0.2745, 0.5098, 0.7059)
steel_blue_light = (0.6902, 0.7686, 0.8706)
turquoise_blue = (0.0000, 0.7800, 0.5500)
ultramarine = (0.0700, 0.0400, 0.5600)
# Magentas
blue_violet = (0.5412, 0.1686, 0.8863)
cobalt_violet_deep = (0.5700, 0.1300, 0.6200)
magenta = (1.0000, 0.0000, 1.0000)
orchid = (0.8549, 0.4392, 0.8392)
orchid_dark = (0.6000, 0.1961, 0.8000)
orchid_medium = (0.7294, 0.3333, 0.8275)
permanent_red_violet = (0.8600, 0.1500, 0.2700)
plum = (0.8667, 0.6275, 0.8667)
purple = (0.6275, 0.1255, 0.9412)
purple_medium = (0.5765, 0.4392, 0.8588)
ultramarine_violet = (0.3600, 0.1400, 0.4300)
violet = (0.5600, 0.3700, 0.6000)
violet_dark = (0.5804, 0.0000, 0.8275)
violet_red = (0.8157, 0.1255, 0.5647)
violet_red_medium = (0.7804, 0.0824, 0.5216)
violet_red_pale = (0.8588, 0.4392, 0.5765)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Rekall Memory Forensics
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""
The Rekall Entity Layer.
"""
__author__ = "Adam Sindelar <adamsh@google.com>"
import copy
import itertools
import logging
import traceback
from rekall.entities import collector as entity_collector
from rekall.entities import component as entity_component
from rekall.entities import entity as entity_module
from rekall.entities import definitions
from rekall.entities import identity as entity_id
from rekall.entities import lookup_table as entity_lookup
from efilter import expression
from efilter import query as entity_query
from efilter.engines import matcher as query_matcher
class IngestionPipeline(object):
"""Keeps track of new and updated entities during collection."""
empty = True
def __init__(self, queries):
self.queues = {}
self.matchers = {}
self.outcomes = {}
for query in queries:
self.queues[query] = []
self.matchers[query] = query_matcher.ObjectMatcher(query)
def seed(self, query, entities):
"""Set up the queue for query with entities."""
self.queues[query] = list(entities)
if self.queues[query]:
self.empty = False
def find(self, query):
"""Return entities available to satisfy the query."""
if isinstance(query, dict):
results = {}
for key, value in query.iteritems():
results[key] = self.find(value)
return results
return self.queues[query]
def fill(self, ingest, collector, wanted_matcher=None, wanted_handler=None):
"""Fills appropriate queues with entities from ingest.
Arguments:
ingest: An iterable containing entities and effects of adding them.
The effects are a dict of:
None: How many entities were duplicates, including contents.
entity_collector.EffectEnum.Merged: How many entities
changed by merging.
entity_collector.EffectEnum.Added: How many new entities
created.
"fill": How many entities were enqueued for ingestion by
other collectors.
collector: The collector object from which ingest was collected.
"""
counts = {entity_collector.EffectEnum.Duplicate: 0,
entity_collector.EffectEnum.Merged: 0,
entity_collector.EffectEnum.Added: 0,
entity_collector.EffectEnum.Enqueued: 0}
for entity, effect in ingest:
counts[effect] += 1
if effect == entity_collector.EffectEnum.Duplicate:
continue
if wanted_handler and wanted_matcher.run(entity):
wanted_handler(entity)
for query in self.queries:
if self.matchers[query].run(entity):
self.queues[query].append(entity)
counts[entity_collector.EffectEnum.Enqueued] += 1
self.empty = False
if any(counts.itervalues()):
logging.debug(
"%s results: %d new, %d updated, %d requeued, %d duplicates.",
collector.name,
counts[entity_collector.EffectEnum.Added],
counts[entity_collector.EffectEnum.Merged],
counts[entity_collector.EffectEnum.Enqueued],
counts[entity_collector.EffectEnum.Duplicate])
self.outcomes[collector] = counts
return counts
def flush(self):
queries = self.queues.keys()
for query in queries:
self.queues[query] = []
self.empty = True
def __getitem__(self, key):
return self.queues[key]
@property
def queries(self):
return set(self.queues.keys())
class EntityManager(object):
"""Database of entities."""
# Names of collectors that have produced all they're going to produce.
finished_collectors = None
# Dict of entities keyed by their identity.
entities = None
def __init__(self, session):
self.session = session
self.reset()
def reset(self):
self.entities = {}
self._collectors = {}
self.finished_collectors = set()
self._cached_query_analyses = {}
self._cached_matchers = {}
# Lookup table on component name is such a common use case that we
# always have it on. This actually speeds up searches by attribute that
# don't have a specific lookup table too.
def _component_indexer(entity):
for component in entity_component.Component.classes.keys():
if getattr(entity.components, component):
yield component
def _collector_indexer(entity):
for collector_name in entity.components.Entity.collectors:
yield collector_name
self.lookup_tables = {
"components": entity_lookup.EntityLookupTable(
key_name="components",
key_func=_component_indexer,
entity_manager=self),
"collectors": entity_lookup.EntityLookupTable(
key_name="collectors",
key_func=_collector_indexer,
entity_manager=self)}
@property
def collectors(self):
self.update_collectors()
return self._collectors
def update_collectors(self):
"""Refresh the list of active collectors. Do a diff if possible."""
for key, cls in entity_collector.EntityCollector.classes.iteritems():
if key in self._collectors:
if cls.is_active(self.session):
continue
else:
del self._collectors[key]
else:
if cls.is_active(self.session):
self._collectors[key] = cls(entity_manager=self)
@property
def identity_prefix(self):
"""Returns the prefix for all identities on this machine.
Currently this just returns "LOCALHOST", but in the future this will
be a way of semi-uniquelly identifying the image/machine of origin.
"""
# TODO: Implement proper machine identification.
return "LOCALHOST"
def identify(self, identity_dict):
"""Generate the appropriate type of identity based on identity dict.
Arguments:
identity_dict: a dictionary of attribute names (format being the
usual "Component/member") and expected values.
Returns:
An instance of Identity initialized with the identity dict and this
manager's global prefix.
"""
# Cast values to their correct types.
cast_dict = {}
for key, val in identity_dict.iteritems():
if isinstance(key, tuple):
cast_vals = []
for idx, attr in enumerate(key):
attribute = entity_module.Entity.reflect_attribute(attr)
cast_vals.append(attribute.typedesc.coerce(val[idx]))
cast_val = tuple(cast_vals)
else:
attribute = entity_module.Entity.reflect_attribute(key)
cast_val = attribute.typedesc.coerce(val)
cast_dict[key] = cast_val
return entity_id.Identity.from_dict(global_prefix=self.identity_prefix,
identity_dict=cast_dict)
def identify_no_cast(self, identity_dict):
return entity_id.Identity.from_dict(global_prefix=self.identity_prefix,
identity_dict=identity_dict)
# pylint: disable=protected-access
def register_components(self, identity, components, source_collector):
"""Find or create an entity for identity and add components to it.
Arguments:
identity: What the components are about. Should be a subclass of
Identity. As a special case, we also accept BaseObjects.
components: An iterable of components about the identity.
source_collector: Anything that responds to __unicode__ or __name__
and describes the source of this information (usually the
string name of the collector function).
Returns:
Tuple of entity and the effect of the new information.
The effect can be one of:
EffectEnum.Duplicate: No new information learned.
EffectEnum.Merged: As result of this call, data in one or more
entities was updated and entities may have merged.
EffectEnum.Added: A new entity was added.
"""
kwargs = {}
for component in components:
kwargs[component.component_name] = component
kwargs["Entity"] = definitions.Entity(
identity=identity,
collectors=frozenset((source_collector,)))
entity = entity_module.Entity(
entity_manager=self,
components=entity_component.CONTAINER_PROTOTYPE._replace(**kwargs))
indices = set(entity.indices)
existing_entities = list(self.find_by_identity(identity))
effect = entity_collector.EffectEnum.Added
if existing_entities:
if (len(existing_entities) == 1 and
existing_entities[0].issuperset(entity)):
# No new data, but let's give the collector credit for finding
# what we already knew.
entity_comp = existing_entities[0].components.Entity
entity_comp._mutate(
member="collectors",
value=entity_comp.collectors.union([source_collector]))
return (existing_entities[0],
entity_collector.EffectEnum.Duplicate)
for existing_entity in existing_entities:
# Entities exist for this already, but are not equivalent to
# the entity we found. Merge everything.
effect = entity_collector.EffectEnum.Merged
entity.update(existing_entity)
indices.update(existing_entity.indices)
# Overwrite all old indices with reference to the new entity.
for index in indices:
self.entities[index] = entity
for lookup_table in self.lookup_tables.itervalues():
lookup_table.update_index((entity,))
return entity, effect
def add_attribute_lookup(self, key):
"""Adds a fast-lookup index for the component/attribute key path.
This also causes the newly-created lookup table to rebuild its index.
Depending on how many entities already exist, building the index could
take a couple of seconds.
"""
# Don't add the same one twice.
if self.lookup_tables.get(key, None):
return
attribute = entity_module.Entity.reflect_attribute(key)
if not isinstance(attribute, entity_component.Field):
logging.info(
("Can't create a lookup for %s, because it's not a simple "
"field."), attribute)
return
logging.debug("Creating a lookup table for %s", key)
component, _ = key.split("/")
lookup_table = entity_lookup.AttributeLookupTable(
attribute=key,
entity_manager=self)
# Only use the entities that actually have the component to build the
# index.
lookup_table.update_index(
self.find(expression.ComponentLiteral(component), complete=False,
keep_cache=True))
self.lookup_tables[key] = lookup_table
def find_by_identity(self, identity, complete=False):
"""Yield the entities that matches the identity.
The number of entities yielded is almost always one or zero. The single
exception to that rule is when the identity parameter is both: (a) a
alternate identity and (b) not yet present in this entity manager. In
that case, multiple entities may match.
Arguments:
identity: The identity to search for.
complete: Should collectors be run to ensure complete results?
"""
if complete:
self.collect_for(identity.as_query())
results = set()
for index in identity.indices:
entity = self.entities.get(index, None)
if entity:
results.add(entity)
if complete:
results = [self.parse(entity) for entity in results]
return list(results)
def find_by_component(self, component, complete=True):
"""Finds all entities that have the component.
Arguments:
complete: If True, will attempt to collect the component.
"""
query = entity_query.Query(expression.ComponentLiteral(component))
if complete:
self.collect_for(query)
return list(self.lookup_tables["components"].lookup(component))
def find_by_collector(self, collector):
"""Find all entities touched by the collector."""
return list(self.lookup_tables["collectors"].lookup(str(collector)))
def matcher_for(self, query):
"""Returns a query matcher for the query (cached)."""
matcher = self._cached_matchers.setdefault(
query, query_matcher.ObjectMatcher(query))
return matcher
def parsers_for(self, entity):
"""Finds collectors that can parse this entity.
Yields: tuples of:
- collector instance
- name of the keyword argument on the collect method under which
the entity should be passed to the collector.
"""
for collector in self.collectors.itervalues():
if len(collector.collect_queries) != 1:
continue
for query_name, query in collector.collect_queries.iteritems():
matcher = self.matcher_for(query)
if matcher.run(entity):
yield collector, query_name
def parse(self, entity):
"""Parses the entity using available higher-order collectors."""
result = entity
for collector, collect_kwarg in self.parsers_for(entity):
collector_input = {collect_kwarg: [result]}
for parsed, effect in self.collect(collector=collector,
collector_input=collector_input,
hint=None):
if effect != entity_collector.EffectEnum.Duplicate:
logging.debug(
"Collector %s produced a hit in parser mode.",
collector.name)
result = parsed
return result
def analyze(self, wanted):
"""Finds collectors and indexing suggestions for the query.
Returns a dict of:
- collectors: list of collectors to run
- lookups: list of attributes to consider indexing for
- dependencies: list of SimpleDependency instances to include
- exclusions: list of SimpleDependency instances to exclude
"""
if not isinstance(wanted, entity_query.Query):
wanted = entity_query.Query(wanted)
# We cache by the source and not the query because we want to reanalyze
# queries that are logically equivalent, but expressed differently, in
# order to have the right cursor positions stored for highlighting in
# GUI.
cache_key = wanted.source
analysis = self._cached_query_analyses.get(cache_key, None)
if analysis:
# We want to make a copy exactly one level deep.
analysis_copy = {}
for key, value in analysis.iteritems():
analysis_copy[key] = copy.copy(value)
return analysis_copy
analyzer = wanted.run_engine("slashy_analyzer")
include = set()
for dependency in analyzer.include:
# Skip over general dependencies on Entity.
if dependency.component == "Entity" and not dependency.attribute:
continue
include.add(dependency)
exclude = analyzer.exclude
suggested_indices = analyzer.latest_indices
# A collector is a match if any of its promises match any of the
# dependencies of the query.
matched_collectors = []
for collector in self.collectors.itervalues():
for promise, dependency in itertools.product(
collector.promises, include):
if dependency.match(promise):
matched_collectors.append(collector)
break
# A collector is yielded unless each one of its promises matches
# an exclusion from dependencies.
collectors = set()
for collector in matched_collectors:
for promise, exclusion in itertools.product(
collector.promises, exclude):
if not exclusion.match(promise):
collectors.add(collector)
break
else:
# No exclusions.
collectors.add(collector)
# A component is guaranteed if any dependency lists it. It is likely
# if collectors we depend on output it (though not guaranteed).
guaranteed_components = set(analyzer.expected_components)
possible_components = set()
for dependency in include:
component = dependency.component
if component in guaranteed_components:
continue
possible_components.add(dependency.component)
for collector in collectors:
for promise in collector.promises:
component = promise.component
if component in guaranteed_components:
continue
possible_components.add(component)
analysis = dict(collectors=list(collectors),
lookups=suggested_indices,
dependencies=include,
exclusions=exclude,
guaranteed_components=guaranteed_components,
possible_components=possible_components)
self._cached_query_analyses[cache_key] = analysis
return analysis
def find(self, query, complete=True, validate=True, query_params=None,
retry_on_error=False, keep_cache=False, syntax="slashy"):
"""Runs the query and yields entities that match.
Arguments:
query: Either an instance of the query AST, a query string, or a
dictionary of queries. If a dict is given, a new dict will
be returned with the same keys and values replaced with
results.
query_params: If query accepts parameters (it's a template), you
may pass them here.
complete: If True, will trigger collectors as necessary, to ensure
completness of results.
validate: Will cause the query to be validated first (mostly for
type errors).
Arguments (live analysis only):
retry_on_error: Should query be retried on failed collection?
This argument is only respected on live systems.
keep_cache: Should we reuse cached data from previous searches?
This will greatly speed up analysis, but may lead to
outdated or inconsistent results on running systems.
This argument is only respected on live systems.
"""
if not self.session.volatile:
keep_cache = True
retry_on_error = False
if not keep_cache:
if not complete:
raise ValueError(
"keep_cache and complete cannot both be False.")
self.reset()
if isinstance(query, dict):
results = {}
for query_name, expr in query.iteritems():
results[query_name] = self.find(expr, complete=complete,
validate=validate,
query_params=query_params,
retry_on_error=retry_on_error,
keep_cache=keep_cache)
return results
if not isinstance(query, entity_query.Query):
query = entity_query.Query(query, params=query_params,
syntax=syntax)
if validate:
query.run_engine("validator")
if complete:
try:
self.collect_for(query)
except (entity_id.IdentityError, TypeError) as e:
if retry_on_error:
logging.error(
"Collect failed for query %r. It would appear Rekall "
"is running on live memory - it is possible contents "
"of memory changed between reads, causing a collector "
"to fail. Going to try again. Original error: %r\n%s",
query, e, traceback.format_exc())
self.reset()
return self.find(query=query, complete=True,
validate=validate,
query_params=query_params,
keep_cache=keep_cache,
retry_on_error=False)
else:
# We're not retrying and/or running on an image. Just
# rethrow the exception here.
raise
# Try to satisfy the query using available lookup tables.
search = entity_lookup.EntityQuerySearch(query)
return search.search(self.entities, self.lookup_tables)
def stream(self, query, handler, query_params=None):
query = entity_query.Query(query, params=query_params)
seen = set()
def _deduplicator(entity):
if entity in seen:
return
seen.add(entity)
handler(entity)
self.collect_for(query, result_stream_handler=_deduplicator)
for entity in self.find(query, complete=False, keep_cache=True):
_deduplicator(entity)
def find_first(self, query, complete=True, validate=True,
query_params=None):
"""Like find, but returns just the first result."""
for entity in self.find(query, complete, validate, query_params):
return entity
# pylint: disable=protected-access
def collect_for(self, wanted, use_hint=False, result_stream_handler=None):
"""Will find and run the appropriate collectors to satisfy the query.
If use_hint is set to True, 'wanted' will be passed on as hint to
the collectors. This may result in faster collection, but may result
in collectors having to run repeatedly.
"""
# Planning stage.
if callable(result_stream_handler):
wanted_matcher = query_matcher.ObjectMatcher(wanted)
else:
wanted_matcher = None
self.update_collectors()
# to_process is used as a FIFO queue below.
analysis = self.analyze(wanted)
to_process = analysis["collectors"][:]
suggested_indices = analysis["lookups"]
# Create indices as suggested by the analyzer.
for attribute in suggested_indices:
self.add_attribute_lookup(attribute)
collectors_seen = set(self.finished_collectors)
# Collectors with an ingest query are de-facto parsers for things
# produced by collectors with no ingest query. They may run repeatedly
# as required.
repeated = list()
# Collectors with no dependencies (my favorite).
simple = list()
# Queries that collectors depend on.
queries = set()
# Build up a list of collectors to run, based on dependencies.
while to_process:
collector = to_process.pop(0)
if collector.name in collectors_seen:
continue
collectors_seen.add(collector.name)
if collector.collect_queries:
logging.debug("Collector %s deferred until stage 2.",
collector.name)
repeated.append(collector)
queries |= set(collector.collect_queries.itervalues())
# Discard the indexing suggestions for ingestion queries
# because they don't represent normal usage.
additional = set()
for query in collector.collect_queries.itervalues():
additional |= set(self.analyze(query)["collectors"])
for dependency in additional:
logging.debug("Collector %s depends on collector %s.",
collector.name, dependency.name)
if dependency.name not in collectors_seen:
to_process.append(dependency)
else:
logging.debug("%s will run in stage 1.",
collector.name)
simple.append(collector)
if not collectors_seen.difference(self.finished_collectors):
# Looks like we're already populated - no need to do anything.
return
logging.info(
"Will now run %d first-order collectors and %d collectors with "
"dependencies to satisfy query %s.",
len(simple), len(repeated), wanted)
# Execution stage 1: no dependencies.
for collector in simple:
effects = {entity_collector.EffectEnum.Duplicate: 0,
entity_collector.EffectEnum.Merged: 0,
entity_collector.EffectEnum.Added: 0}
if use_hint or collector.enforce_hint:
hint = wanted
else:
hint = None
self.finished_collectors.add(collector.name)
for entity, effect in self.collect(collector, hint=hint):
if result_stream_handler and wanted_matcher.run(entity):
result_stream_handler(entity)
effects[effect] += 1
logging.debug(
"%s produced %d new entities, %d updated and %d duplicates",
collector.name,
effects[entity_collector.EffectEnum.Added],
effects[entity_collector.EffectEnum.Merged],
effects[entity_collector.EffectEnum.Duplicate])
if not repeated:
# No higher-order collectors scheduled. We're done.
return
# Seeding stage for higher-order collectors.
in_pipeline = IngestionPipeline(queries=queries)
out_pipeline = IngestionPipeline(queries=queries)
for query in queries:
results = self.find(query, complete=False, keep_cache=True)
in_pipeline.seed(query, results)
if results:
logging.debug("Pipeline seeded with %d entities matching '%s'",
len(results), query)
# Execution stage 2: collectors with dependencies.
# Collectors should run in FIFO order:
repeated.reverse()
repeat_counter = 0
# This will spin until none of the remaining collectors want to run.
while not in_pipeline.empty:
# Collectors will read from the in_pipeline and fill the
# out_pipeline. At the end of each spin the pipelines swap and
# the new out_pipeline is flushed.
for collector in repeated:
# If the collector wants complete input, we pull it from the
# database. If it just wants one entity at a time, we can use
# the ingestion pipeline. The semantics of both find methods
# are identical.
if collector.complete_input:
collector_input = self.find(collector.collect_queries,
complete=False,
keep_cache=True)
else:
collector_input = in_pipeline.find(
collector.collect_queries)
# The collector requests its prefilter to be called.
if collector.filter_input:
collector_input_filtered = {}
for key, val in collector_input.iteritems():
collector_input_filtered[key] = collector.input_filter(
hint=hint, entities=val)
collector_input = collector_input_filtered
# The collector requests that we always pass the query hint.
if use_hint or collector.enforce_hint:
hint = wanted
else:
hint = None
try:
# Feed output back into the pipeline.
results = self.collect(collector=collector,
collector_input=collector_input,
hint=hint)
out_pipeline.fill(collector=collector,
ingest=results,
wanted_handler=result_stream_handler,
wanted_matcher=wanted_matcher)
except entity_id.IdentityError as e:
logging.error(
"Collector %r has encountered invalid or inconsistent "
"data and could not recover. Details available with "
"debug logging." % collector)
logging.debug(
"Collector %r (hint %r) raised %r\n%s.\n"
"Collector input was %r."
% (collector, e, traceback.format_exc(),
collector_input, hint))
# Check for endless loops.
if in_pipeline.outcomes == out_pipeline.outcomes:
repeat_counter += 1
if repeat_counter < 5:
logging.warning(
"Detected a loop in collection run (%d cycles)." %
repeat_counter)
else:
logging.warning(
"Maximum number of cycles in collection run exceeded. "
"Terminating collection.")
break
else:
repeat_counter = 0
# Swap & flush, rinse & repeat.
in_pipeline, out_pipeline = out_pipeline, in_pipeline
out_pipeline.flush()
for collector in repeated:
if not use_hint and not collector.enforce_hint:
self.finished_collectors.add(collector.name)
def collect(self, collector, hint, collector_input=None):
"""Runs the collector, registers output and yields any new entities."""
if collector_input is None:
collector_input = {}
result_counter = 0
if self.session:
self.session.report_progress(
"Collecting %(collector)s %(spinner)s",
collector=collector.name)
for results in collector.collect(hint=hint, **collector_input):
if not isinstance(results, list):
# Just one component yielded.
results = [results]
# First result is either the first component or an identity.
first_result = results[0]
if isinstance(first_result, entity_id.Identity):
# If the collector gave as an identity then use that.
identity = first_result
results.pop(0)
else:
# If collector didn't give us an identity then we build
# one from the first component's first field. This is
# a good heuristic for about 90% of the time.
first_field = first_result.component_fields[0].name
attribute = "%s/%s" % (type(first_result).__name__,
first_field)
try:
identity = self.identify({attribute: first_result[0]})
except entity_id.IdentityError:
logging.warning(
("Invalid identity %r inferred from output of %r. "
"Entity skipped. Full results: %r"),
{attribute: first_result[0]},
collector,
results)
continue
try:
entity, effect = self.register_components(
identity=identity,
components=results,
source_collector=collector.name)
except entity_id.IdentityError as e:
logging.warning(
("Invalid identity %r inferred from output of %r. "
"Entity skipped. Full results: %r. "
"Original error: %s"),
identity, collector, results, e)
continue
result_counter += 1
if result_counter % 100 == 0 and self.session:
self.session.report_progress(
"Collecting %(collector)s %(spinner)s (%(count)d results)",
collector=collector.name,
count=result_counter)
yield entity, effect
|
unknown
|
codeparrot/codeparrot-clean
| ||
package overlay
import (
"strconv"
"github.com/moby/moby/v2/daemon/libnetwork/osl/kernel"
)
var ovConfig = map[string]*kernel.OSValue{
"net.ipv4.neigh.default.gc_thresh1": {Value: "8192", CheckFn: checkHigher},
"net.ipv4.neigh.default.gc_thresh2": {Value: "49152", CheckFn: checkHigher},
"net.ipv4.neigh.default.gc_thresh3": {Value: "65536", CheckFn: checkHigher},
}
func checkHigher(val1, val2 string) bool {
val1Int, _ := strconv.ParseInt(val1, 10, 32)
val2Int, _ := strconv.ParseInt(val2, 10, 32)
return val1Int < val2Int
}
func applyOStweaks() {
kernel.ApplyOSTweaks(ovConfig)
}
|
go
|
github
|
https://github.com/moby/moby
|
daemon/libnetwork/drivers/overlay/ostweaks_linux.go
|
# frozen_string_literal: true
require "helper"
require "jobs/hello_job"
require "jobs/logging_job"
require "jobs/nested_job"
class ActiveJobTestCaseTest < ActiveJob::TestCase
# this tests that this job class doesn't get its adapter set.
# that's the correct behavior since we don't want to break
# the `class_attribute` inheritance
class TestClassAttributeInheritanceJob < ActiveJob::Base
def self.queue_adapter=(*)
raise "Attempting to break `class_attribute` inheritance, bad!"
end
end
def test_include_helper
assert_includes self.class.ancestors, ActiveJob::TestHelper
end
def test_set_test_adapter
# The queue adapter the job uses depends on the Active Job config.
# See https://github.com/rails/rails/pull/48585 for logic.
expected = case ActiveJob::Base.queue_adapter_name.to_sym
when :test
ActiveJob::QueueAdapters::TestAdapter
when :inline
ActiveJob::QueueAdapters::InlineAdapter
when :async
ActiveJob::QueueAdapters::AsyncAdapter
when :backburner
ActiveJob::QueueAdapters::BackburnerAdapter
when :delayed_job
ActiveJob::QueueAdapters::DelayedJobAdapter
when :queue_classic
ActiveJob::QueueAdapters::QueueClassicAdapter
when :resque
ActiveJob::QueueAdapters::ResqueAdapter
when :sneakers
ActiveJob::QueueAdapters::SneakersAdapter
else
raise NotImplementedError.new
end
assert_kind_of expected, queue_adapter
end
def test_does_not_perform_enqueued_jobs_by_default
assert_nil ActiveJob::QueueAdapters::TestAdapter.new.perform_enqueued_jobs
end
end
|
ruby
|
github
|
https://github.com/rails/rails
|
activejob/test/cases/test_case_test.rb
|
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.json;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.lang.reflect.Field;
import org.jspecify.annotations.Nullable;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.core.ResolvableType;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
import org.springframework.util.ReflectionUtils;
/**
* Base class for AssertJ based JSON marshal testers. Exposes specific Asserts following a
* {@code read}, {@code write} or {@code parse} of JSON content. Typically used in
* combination with an AssertJ {@link org.assertj.core.api.Assertions#assertThat(Object)
* assertThat} call. For example:
*
* <pre class="code">
* public class ExampleObjectJsonTests {
*
* private AbstractJsonTester<ExampleObject> json = //...
*
* @Test
* public void testWriteJson() {
* ExampleObject object = //...
* assertThat(json.write(object)).isEqualToJson("expected.json");
* assertThat(json.read("expected.json")).isEqualTo(object);
* }
*
* }
* </pre>
*
* For a complete list of supported assertions see {@link JsonContentAssert} and
* {@link ObjectContentAssert}.
* <p>
* To use this library JSONAssert must be on the test classpath.
*
* @param <T> the type under test
* @author Phillip Webb
* @since 1.4.0
* @see JsonContentAssert
* @see ObjectContentAssert
*/
public abstract class AbstractJsonMarshalTester<T> {
private @Nullable Class<?> resourceLoadClass;
private @Nullable ResolvableType type;
/**
* Create a new uninitialized {@link AbstractJsonMarshalTester} instance.
*/
protected AbstractJsonMarshalTester() {
}
/**
* Create a new {@link AbstractJsonMarshalTester} instance.
* @param resourceLoadClass the source class used when loading relative classpath
* resources
* @param type the type under test
*/
public AbstractJsonMarshalTester(Class<?> resourceLoadClass, ResolvableType type) {
Assert.notNull(resourceLoadClass, "'resourceLoadClass' must not be null");
Assert.notNull(type, "'type' must not be null");
initialize(resourceLoadClass, type);
}
/**
* Initialize the marshal tester for use.
* @param resourceLoadClass the source class used when loading relative classpath
* resources
* @param type the type under test
*/
protected final void initialize(Class<?> resourceLoadClass, ResolvableType type) {
if (this.resourceLoadClass == null && this.type == null) {
this.resourceLoadClass = resourceLoadClass;
this.type = type;
}
}
/**
* Return the type under test.
* @return the type under test
*/
protected final @Nullable ResolvableType getType() {
return this.type;
}
private ResolvableType getTypeNotNull() {
ResolvableType type = getType();
Assert.state(type != null, "Instance has not been initialized");
return type;
}
/**
* Return class used to load relative resources.
* @return the resource load class
*/
protected final @Nullable Class<?> getResourceLoadClass() {
return this.resourceLoadClass;
}
private Class<?> getResourceLoadClassNotNull() {
Class<?> resourceLoadClass = getResourceLoadClass();
Assert.state(resourceLoadClass != null, "Instance has not been initialized");
return resourceLoadClass;
}
/**
* Return {@link JsonContent} from writing the specific value.
* @param value the value to write
* @return the {@link JsonContent}
* @throws IOException on write error
*/
public JsonContent<T> write(T value) throws IOException {
verify();
Assert.notNull(value, "'value' must not be null");
String json = writeObject(value, getTypeNotNull());
return getJsonContent(json);
}
/**
* Factory method used to get a {@link JsonContent} instance from a source JSON
* string.
* @param json the source JSON
* @return a new {@link JsonContent} instance
* @since 2.1.5
*/
protected JsonContent<T> getJsonContent(String json) {
return new JsonContent<>(getResourceLoadClassNotNull(), getType(), json);
}
/**
* Return the object created from parsing the specific JSON bytes.
* @param jsonBytes the source JSON bytes
* @return the resulting object
* @throws IOException on parse error
*/
public T parseObject(byte[] jsonBytes) throws IOException {
verify();
return parse(jsonBytes).getObject();
}
/**
* Return {@link ObjectContent} from parsing the specific JSON bytes.
* @param jsonBytes the source JSON bytes
* @return the {@link ObjectContent}
* @throws IOException on parse error
*/
public ObjectContent<T> parse(byte[] jsonBytes) throws IOException {
verify();
Assert.notNull(jsonBytes, "'jsonBytes' must not be null");
return read(new ByteArrayResource(jsonBytes));
}
/**
* Return the object created from parsing the specific JSON String.
* @param jsonString the source JSON string
* @return the resulting object
* @throws IOException on parse error
*/
public T parseObject(String jsonString) throws IOException {
verify();
return parse(jsonString).getObject();
}
/**
* Return {@link ObjectContent} from parsing the specific JSON String.
* @param jsonString the source JSON string
* @return the {@link ObjectContent}
* @throws IOException on parse error
*/
public ObjectContent<T> parse(String jsonString) throws IOException {
verify();
Assert.notNull(jsonString, "'jsonString' must not be null");
return read(new StringReader(jsonString));
}
/**
* Return the object created from reading from the specified classpath resource.
* @param resourcePath the source resource path. May be a full path or a path relative
* to the {@code resourceLoadClass} passed to the constructor
* @return the resulting object
* @throws IOException on read error
*/
public T readObject(String resourcePath) throws IOException {
verify();
return read(resourcePath).getObject();
}
/**
* Return {@link ObjectContent} from reading from the specified classpath resource.
* @param resourcePath the source resource path. May be a full path or a path relative
* to the {@code resourceLoadClass} passed to the constructor
* @return the {@link ObjectContent}
* @throws IOException on read error
*/
public ObjectContent<T> read(String resourcePath) throws IOException {
verify();
Assert.notNull(resourcePath, "'resourcePath' must not be null");
return read(new ClassPathResource(resourcePath, this.resourceLoadClass));
}
/**
* Return the object created from reading from the specified file.
* @param file the source file
* @return the resulting object
* @throws IOException on read error
*/
public T readObject(File file) throws IOException {
verify();
return read(file).getObject();
}
/**
* Return {@link ObjectContent} from reading from the specified file.
* @param file the source file
* @return the {@link ObjectContent}
* @throws IOException on read error
*/
public ObjectContent<T> read(File file) throws IOException {
verify();
Assert.notNull(file, "'file' must not be null");
return read(new FileSystemResource(file));
}
/**
* Return the object created from reading from the specified input stream.
* @param inputStream the source input stream
* @return the resulting object
* @throws IOException on read error
*/
public T readObject(InputStream inputStream) throws IOException {
verify();
return read(inputStream).getObject();
}
/**
* Return {@link ObjectContent} from reading from the specified input stream.
* @param inputStream the source input stream
* @return the {@link ObjectContent}
* @throws IOException on read error
*/
public ObjectContent<T> read(InputStream inputStream) throws IOException {
verify();
Assert.notNull(inputStream, "'inputStream' must not be null");
return read(new InputStreamResource(inputStream));
}
/**
* Return the object created from reading from the specified resource.
* @param resource the source resource
* @return the resulting object
* @throws IOException on read error
*/
public T readObject(Resource resource) throws IOException {
verify();
return read(resource).getObject();
}
/**
* Return {@link ObjectContent} from reading from the specified resource.
* @param resource the source resource
* @return the {@link ObjectContent}
* @throws IOException on read error
*/
public ObjectContent<T> read(Resource resource) throws IOException {
verify();
Assert.notNull(resource, "'resource' must not be null");
InputStream inputStream = resource.getInputStream();
T object = readObject(inputStream, getTypeNotNull());
closeQuietly(inputStream);
return new ObjectContent<>(this.type, object);
}
/**
* Return the object created from reading from the specified reader.
* @param reader the source reader
* @return the resulting object
* @throws IOException on read error
*/
public T readObject(Reader reader) throws IOException {
verify();
return read(reader).getObject();
}
/**
* Return {@link ObjectContent} from reading from the specified reader.
* @param reader the source reader
* @return the {@link ObjectContent}
* @throws IOException on read error
*/
public ObjectContent<T> read(Reader reader) throws IOException {
verify();
Assert.notNull(reader, "'reader' must not be null");
T object = readObject(reader, getTypeNotNull());
closeQuietly(reader);
return new ObjectContent<>(this.type, object);
}
private void closeQuietly(Closeable closeable) {
try {
closeable.close();
}
catch (IOException ex) {
// Ignore
}
}
private void verify() {
Assert.state(this.resourceLoadClass != null, "Uninitialized JsonMarshalTester (ResourceLoadClass is null)");
Assert.state(this.type != null, "Uninitialized JsonMarshalTester (Type is null)");
}
/**
* Write the specified object to a JSON string.
* @param value the source value (never {@code null})
* @param type the resulting type (never {@code null})
* @return the JSON string
* @throws IOException on write error
*/
protected abstract String writeObject(T value, ResolvableType type) throws IOException;
/**
* Read from the specified input stream to create an object of the specified type. The
* default implementation delegates to {@link #readObject(Reader, ResolvableType)}.
* @param inputStream the source input stream (never {@code null})
* @param type the resulting type (never {@code null})
* @return the resulting object
* @throws IOException on read error
*/
protected T readObject(InputStream inputStream, ResolvableType type) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
return readObject(reader, type);
}
/**
* Read from the specified reader to create an object of the specified type.
* @param reader the source reader (never {@code null})
* @param type the resulting type (never {@code null})
* @return the resulting object
* @throws IOException on read error
*/
protected abstract T readObject(Reader reader, ResolvableType type) throws IOException;
/**
* Utility class used to support field initialization. Used by subclasses to support
* {@code initFields}.
*
* @param <M> the marshaller type
*/
protected abstract static class FieldInitializer<M> {
private final Class<?> testerClass;
@SuppressWarnings("rawtypes")
protected FieldInitializer(Class<? extends AbstractJsonMarshalTester> testerClass) {
Assert.notNull(testerClass, "'testerClass' must not be null");
this.testerClass = testerClass;
}
public void initFields(Object testInstance, M marshaller) {
Assert.notNull(testInstance, "'testInstance' must not be null");
Assert.notNull(marshaller, "'marshaller' must not be null");
initFields(testInstance, () -> marshaller);
}
public void initFields(Object testInstance, final ObjectFactory<M> marshaller) {
Assert.notNull(testInstance, "'testInstance' must not be null");
Assert.notNull(marshaller, "'marshaller' must not be null");
ReflectionUtils.doWithFields(testInstance.getClass(),
(field) -> doWithField(field, testInstance, marshaller));
}
protected void doWithField(Field field, Object test, ObjectFactory<M> marshaller) {
if (this.testerClass.isAssignableFrom(field.getType())) {
ReflectionUtils.makeAccessible(field);
Object existingValue = ReflectionUtils.getField(field, test);
if (existingValue == null) {
setupField(field, test, marshaller);
}
}
}
private void setupField(Field field, Object test, ObjectFactory<M> marshaller) {
ResolvableType type = ResolvableType.forField(field).getGeneric();
ReflectionUtils.setField(field, test, createTester(test.getClass(), type, marshaller.getObject()));
}
protected abstract AbstractJsonMarshalTester<Object> createTester(Class<?> resourceLoadClass,
ResolvableType type, M marshaller);
}
}
|
java
|
github
|
https://github.com/spring-projects/spring-boot
|
core/spring-boot-test/src/main/java/org/springframework/boot/test/json/AbstractJsonMarshalTester.java
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions to perform Xcode-style build steps.
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
import fnmatch
import glob
import json
import os
import plistlib
import re
import shutil
import string
import subprocess
import sys
import tempfile
def main(args):
executor = MacTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class MacTool(object):
"""This class performs all the Mac tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecCopyBundleResource(self, source, dest):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
# TODO(thakis): This copies file attributes like mtime, while the
# single-file branch below doesn't. This should probably be changed to
# be consistent with the single-file branch.
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
elif extension == '.xib':
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
self._CopyStringsFile(source, dest)
else:
shutil.copy(source, dest)
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
base = os.path.dirname(os.path.realpath(__file__))
if os.path.relpath(source):
source = os.path.join(base, source)
if os.path.relpath(dest):
dest = os.path.join(base, dest)
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
'--output-format', 'human-readable-text', '--compile', dest, source]
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
current_section_header = None
for line in ibtoolout.stdout:
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
sys.stdout.write(current_section_header)
current_section_header = None
sys.stdout.write(line)
return ibtoolout.returncode
def _CopyStringsFile(self, source, dest):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
import CoreFoundation
s = open(source, 'rb').read()
d = CoreFoundation.CFDataCreate(None, s, len(s))
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
if error:
return
fp = open(dest, 'wb')
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
fp = open(file_name, 'rb')
try:
header = fp.read(3)
except e:
fp.close()
return None
fp.close()
if header.startswith("\xFE\xFF"):
return "UTF-16"
elif header.startswith("\xFF\xFE"):
return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
def ExecCopyInfoPlist(self, source, dest, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
lines = fd.read()
fd.close()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
plist = plistlib.readPlistFromString(lines)
if keys:
plist = dict(plist.items() + json.loads(keys[0]).items())
lines = plistlib.writePlistToString(plist)
# Go through all the environment variables and replace them as variables in
# the file.
IDENT_RE = re.compile('[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
evar = '${%s}' % key
evalue = os.environ[key]
lines = string.replace(lines, evar, evalue)
# Xcode supports various suffices on environment variables, which are
# all undocumented. :rfc1034identifier is used in the standard project
# template these days, and :identifier was used earlier. They are used to
# convert non-url characters into things that look like valid urls --
# except that the replacement character for :identifier, '_' isn't valid
# in a URL either -- oops, hence :rfc1034identifier was born.
evar = '${%s:identifier}' % key
evalue = IDENT_RE.sub('_', os.environ[key])
lines = string.replace(lines, evar, evalue)
evar = '${%s:rfc1034identifier}' % key
evalue = IDENT_RE.sub('-', os.environ[key])
lines = string.replace(lines, evar, evalue)
# Remove any keys with values that haven't been replaced.
lines = lines.split('\n')
for i in range(len(lines)):
if lines[i].strip().startswith("<string>${"):
lines[i] = None
lines[i - 1] = None
lines = '\n'.join(filter(lambda x: x is not None, lines))
# Write out the file with variables replaced.
fd = open(dest, 'w')
fd.write(lines)
fd.close()
# Now write out PkgInfo file now that the Info.plist file has been
# "compiled".
self._WritePkgInfo(dest)
def _WritePkgInfo(self, info_plist):
"""This writes the PkgInfo file from the data stored in Info.plist."""
plist = plistlib.readPlist(info_plist)
if not plist:
return
# Only create PkgInfo for executable types.
package_type = plist['CFBundlePackageType']
if package_type != 'APPL':
return
# The format of PkgInfo is eight characters, representing the bundle type
# and bundle signature, each four characters. If that is missing, four
# '?' characters are used instead.
signature_code = plist.get('CFBundleSignature', '????')
if len(signature_code) != 4: # Wrong length resets everything, too.
signature_code = '?' * 4
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
fp = open(dest, 'w')
fp.write('%s%s' % (package_type, signature_code))
fp.close()
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
fcntl.flock(fd, fcntl.LOCK_EX)
return subprocess.call(cmd_list)
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line):
print >>sys.stderr, line
return libtoolout.returncode
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split('.')[0]
CURRENT = 'Current'
RESOURCES = 'Resources'
VERSIONS = 'Versions'
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
# Binary-less frameworks don't seem to contain symlinks (see e.g.
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
return
# Move into the framework directory to set the symlinks correctly.
pwd = os.getcwd()
os.chdir(framework)
# Set up the Current version.
self._Relink(version, os.path.join(VERSIONS, CURRENT))
# Set up the root symlinks.
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
# Back to where we were before!
os.chdir(pwd)
def _Relink(self, dest, link):
"""Creates a symlink to |dest| named |link|. If |link| already exists,
it is overwritten."""
if os.path.lexists(link):
os.remove(link)
os.symlink(dest, link)
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. copy ResourceRules.plist from the user or the SDK into the bundle,
2. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
3. copy Entitlements.plist from user or SDK next to the bundle,
4. code sign the bundle.
"""
resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
subprocess.check_call([
'codesign', '--force', '--sign', key, '--resource-rules',
resource_rules_path, '--entitlements', entitlements_path,
os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['FULL_PRODUCT_NAME'])])
def _InstallResourceRules(self, resource_rules):
"""Installs ResourceRules.plist from user or SDK into the bundle.
Args:
resource_rules: string, optional, path to the ResourceRules.plist file
to use, default to "${SDKROOT}/ResourceRules.plist"
Returns:
Path to the copy of ResourceRules.plist into the bundle.
"""
source_path = resource_rules
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'ResourceRules.plist')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'], 'ResourceRules.plist')
shutil.copy2(source_path, target_path)
return target_path
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple containing two dictionary: variables substitutions and values
to overrides when generating the entitlements file.
"""
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
profile, bundle_identifier)
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'embedded.mobileprovision')
shutil.copy2(source_path, target_path)
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
return substitutions, provisioning_data['Entitlements']
def _FindProvisioningProfile(self, profile, bundle_identifier):
"""Finds the .mobileprovision file to use for signing the bundle.
Checks all the installed provisioning profiles (or if the user specified
the PROVISIONING_PROFILE variable, only consult it) and select the most
specific that correspond to the bundle identifier.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple of the path to the selected provisioning profile, the data of
the embedded plist in the provisioning profile and the team identifier
to use for code signing.
Raises:
SystemExit: if no .mobileprovision can be used to sign the bundle.
"""
profiles_dir = os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
if not os.path.isdir(profiles_dir):
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
provisioning_profiles = None
if profile:
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
if os.path.exists(profile_path):
provisioning_profiles = [profile_path]
if not provisioning_profiles:
provisioning_profiles = glob.glob(
os.path.join(profiles_dir, '*.mobileprovision'))
valid_provisioning_profiles = {}
for profile_path in provisioning_profiles:
profile_data = self._LoadProvisioningProfile(profile_path)
app_id_pattern = profile_data.get(
'Entitlements', {}).get('application-identifier', '')
for team_identifier in profile_data.get('TeamIdentifier', []):
app_id = '%s.%s' % (team_identifier, bundle_identifier)
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path, profile_data, team_identifier)
if not valid_provisioning_profiles:
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
# If the user has multiple provisioning profiles installed that can be
# used for ${bundle_identifier}, pick the most specific one (ie. the
# provisioning profile whose pattern is the longest).
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
return valid_provisioning_profiles[selected_key]
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
This is a wrapper around plistlib.readPlist that tries to convert the
plist to the XML format if it can't be parsed (assuming that it is in
the binary format).
Args:
plist_path: string, path to a plist file, in XML or binary format
Returns:
Content of the plist as a dictionary.
"""
try:
# First, try to read the file using plistlib that only supports XML,
# and if an exception is raised, convert a temporary copy to XML and
# load that copy.
return plistlib.readPlist(plist_path)
except:
pass
with tempfile.NamedTemporaryFile() as temp:
shutil.copy2(plist_path, temp.name)
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
return plistlib.readPlist(temp.name)
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
Args:
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
app_identifier_prefix: string, value for AppIdentifierPrefix
Returns:
Dictionary of substitutions to apply when generating Entitlements.plist.
"""
return {
'CFBundleIdentifier': bundle_identifier,
'AppIdentifierPrefix': app_identifier_prefix,
}
def _GetCFBundleIdentifier(self):
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
Returns:
Value of CFBundleIdentifier in the Info.plist located in the bundle.
"""
info_plist_path = os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['INFOPLIST_PATH'])
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
return info_plist_data['CFBundleIdentifier']
def _InstallEntitlements(self, entitlements, substitutions, overrides):
"""Generates and install the ${BundleName}.xcent entitlements file.
Expands variables "$(variable)" pattern in the source entitlements file,
add extra entitlements defined in the .mobileprovision file and the copy
the generated plist to "${BundlePath}.xcent".
Args:
entitlements: string, optional, path to the Entitlements.plist template
to use, defaults to "${SDKROOT}/Entitlements.plist"
substitutions: dictionary, variable substitutions
overrides: dictionary, values to add to the entitlements
Returns:
Path to the generated entitlements file.
"""
source_path = entitlements
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['PRODUCT_NAME'] + '.xcent')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'],
'Entitlements.plist')
shutil.copy2(source_path, target_path)
data = self._LoadPlistMaybeBinary(target_path)
data = self._ExpandVariables(data, substitutions)
if overrides:
for key in overrides:
if key not in data:
data[key] = overrides[key]
plistlib.writePlist(data, target_path)
return target_path
def _ExpandVariables(self, data, substitutions):
"""Expands variables "$(variable)" in data.
Args:
data: object, can be either string, list or dictionary
substitutions: dictionary, variable substitutions to perform
Returns:
Copy of data where each references to "$(variable)" has been replaced
by the corresponding value found in substitutions, or left intact if
the key was not found.
"""
if isinstance(data, str):
for key, value in substitutions.iteritems():
data = data.replace('$(%s)' % key, value)
return data
if isinstance(data, list):
return [self._ExpandVariables(v, substitutions) for v in data]
if isinstance(data, dict):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright (C) 2013 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.collect.testing.google;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.collect.testing.Helpers.mapEntry;
import static com.google.common.collect.testing.features.CollectionSize.SEVERAL;
import static com.google.common.collect.testing.features.MapFeature.SUPPORTS_REMOVE;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonMap;
import com.google.common.annotations.GwtCompatible;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.MapFeature;
import com.google.common.testing.EqualsTester;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
import org.junit.Ignore;
/**
* Testers for {@link SetMultimap#asMap}.
*
* @author Louis Wasserman
* @param <K> The key type of the tested multimap.
* @param <V> The value type of the tested multimap.
*/
@GwtCompatible
@Ignore("test runners must not instantiate and run this directly, only via suites we build")
// @Ignore affects the Android test runner, which respects JUnit 4 annotations on JUnit 3 tests.
@SuppressWarnings("JUnit4ClassUsedInJUnit3")
@NullMarked
public class SetMultimapAsMapTester<K extends @Nullable Object, V extends @Nullable Object>
extends AbstractMultimapTester<K, V, SetMultimap<K, V>> {
public void testAsMapValuesImplementSet() {
for (Collection<V> valueCollection : multimap().asMap().values()) {
assertTrue(valueCollection instanceof Set);
}
}
public void testAsMapGetImplementsSet() {
for (K key : multimap().keySet()) {
assertTrue(multimap().asMap().get(key) instanceof Set);
}
}
@MapFeature.Require(SUPPORTS_REMOVE)
public void testAsMapRemoveImplementsSet() {
List<K> keys = new ArrayList<>(multimap().keySet());
for (K key : keys) {
resetCollection();
assertTrue(multimap().asMap().remove(key) instanceof Set);
}
}
@CollectionSize.Require(SEVERAL)
public void testEquals() {
resetContainer(mapEntry(k0(), v0()), mapEntry(k1(), v0()), mapEntry(k0(), v3()));
Map<K, Collection<V>> expected = new HashMap<>();
expected.put(k0(), newHashSet(v0(), v3()));
expected.put(k1(), newHashSet(v0()));
new EqualsTester().addEqualityGroup(expected, multimap().asMap()).testEquals();
}
@CollectionSize.Require(SEVERAL)
public void testEntrySetEquals() {
resetContainer(mapEntry(k0(), v0()), mapEntry(k1(), v0()), mapEntry(k0(), v3()));
Set<Entry<K, Collection<V>>> expected = new HashSet<>();
expected.add(mapEntry(k0(), (Collection<V>) newHashSet(v0(), v3())));
expected.add(mapEntry(k1(), (Collection<V>) newHashSet(v0())));
new EqualsTester().addEqualityGroup(expected, multimap().asMap().entrySet()).testEquals();
}
@CollectionSize.Require(SEVERAL)
@MapFeature.Require(SUPPORTS_REMOVE)
/*
* SetMultimap.asMap essentially returns a Map<K, Set<V>>; we just can't declare it that way.
* Thus, calls like asMap().values().remove(someSet) are safe because they are comparing a set to
* a collection of other sets.
*/
@SuppressWarnings("CollectionUndefinedEquality")
public void testValuesRemove() {
resetContainer(mapEntry(k0(), v0()), mapEntry(k1(), v0()), mapEntry(k0(), v3()));
assertTrue(multimap().asMap().values().remove(singleton(v0())));
assertEquals(2, multimap().size());
assertEquals(singletonMap(k0(), newHashSet(v0(), v3())), multimap().asMap());
}
}
|
java
|
github
|
https://github.com/google/guava
|
android/guava-testlib/src/com/google/common/collect/testing/google/SetMultimapAsMapTester.java
|
"""
Management command `manage_group` is used to idempotently create Django groups
and set their permissions by name.
"""
from django.apps import apps
from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.utils.translation import gettext as _
class Command(BaseCommand):
# pylint: disable=missing-docstring
help = 'Creates the specified group, if it does not exist, and sets its permissions.'
def add_arguments(self, parser):
parser.add_argument('group_name')
parser.add_argument('--remove', dest='is_remove', action='store_true')
parser.add_argument('-p', '--permissions', nargs='*', default=[])
def _handle_remove(self, group_name):
try:
Group.objects.get(name=group_name).delete() # pylint: disable=no-member
self.stderr.write(_('Removed group: "{}"').format(group_name))
except Group.DoesNotExist:
self.stderr.write(_('Did not find a group with name "{}" - skipping.').format(group_name))
@transaction.atomic
def handle(self, group_name, is_remove, permissions=None, *args, **options):
if is_remove:
self._handle_remove(group_name)
return
old_permissions = set()
group, created = Group.objects.get_or_create(name=group_name) # pylint: disable=no-member
if created:
try:
# Needed for sqlite backend (i.e. in tests) because
# name.max_length won't be enforced by the db.
# See also http://www.sqlite.org/faq.html#q9
group.full_clean()
except ValidationError as exc:
# give a more helpful error
raise CommandError(
_(
'Invalid group name: "{group_name}". {messages}'
).format(
group_name=group_name,
messages=exc.messages[0]
)
)
self.stderr.write(_('Created new group: "{}"').format(group_name))
else:
self.stderr.write(_('Found existing group: "{}"').format(group_name))
old_permissions = set(group.permissions.all())
new_permissions = self._resolve_permissions(permissions or set())
add_permissions = new_permissions - old_permissions
remove_permissions = old_permissions - new_permissions
self.stderr.write(
_(
'Adding {codenames} permissions to group "{group}"'
).format(
codenames=[ap.name for ap in add_permissions],
group=group.name
)
)
self.stderr.write(
_(
'Removing {codenames} permissions from group "{group}"'
).format(
codenames=[rp.codename for rp in remove_permissions],
group=group.name
)
)
group.permissions = new_permissions
group.save()
def _resolve_permissions(self, permissions):
new_permissions = set()
for permission in permissions:
try:
app_label, model_name, codename = permission.split(':')
except ValueError:
# give a more helpful error
raise CommandError(_(
'Invalid permission option: "{}". Please specify permissions '
'using the format: app_label:model_name:permission_codename.'
).format(permission))
# this will raise a LookupError if it fails.
try:
model_class = apps.get_model(app_label, model_name)
except LookupError as exc:
raise CommandError(str(exc))
content_type = ContentType.objects.get_for_model(model_class)
try:
new_permission = Permission.objects.get( # pylint: disable=no-member
content_type=content_type,
codename=codename,
)
except Permission.DoesNotExist:
# give a more helpful error
raise CommandError(
_(
'Invalid permission codename: "{codename}". No such permission exists '
'for the model {module}.{model_name}.'
).format(
codename=codename,
module=model_class.__module__,
model_name=model_class.__name__,
)
)
new_permissions.add(new_permission)
return new_permissions
|
unknown
|
codeparrot/codeparrot-clean
| ||
# epydoc
#
# Copyright (C) 2005 Edward Loper
# Author: Edward Loper <edloper@loper.org>
# URL: <http://epydoc.sf.net>
#
# $Id: __init__.py 1691 2008-01-30 17:11:09Z edloper $
"""
Automatic Python reference documentation generator. Epydoc processes
Python modules and docstrings to generate formatted API documentation,
in the form of HTML pages. Epydoc can be used via a command-line
interface (`epydoc.cli`) and a graphical interface (`epydoc.gui`).
Both interfaces let the user specify a set of modules or other objects
to document, and produce API documentation using the following steps:
1. Extract basic information about the specified objects, and objects
that are related to them (such as the values defined by a module).
This can be done via introspection, parsing, or both:
* *Introspection* imports the objects, and examines them directly
using Python's introspection mechanisms.
* *Parsing* reads the Python source files that define the objects,
and extracts information from those files.
2. Combine and process that information.
* **Merging**: Merge the information obtained from introspection &
parsing each object into a single structure.
* **Linking**: Replace any \"pointers\" that were created for
imported variables with the documentation that they point to.
* **Naming**: Assign unique *canonical names* to each of the
specified objects, and any related objects.
* **Docstrings**: Parse the docstrings of each of the specified
objects.
* **Inheritance**: Add variables to classes for any values that
they inherit from their base classes.
3. Generate output. Output can be generated in a variety of formats:
* An HTML webpage.
* A LaTeX document (which can be rendered as a PDF file)
* A plaintext description.
.. digraph:: Overview of epydoc's architecture
:caption: The boxes represent steps in epydoc's processing chain.
Arrows are annotated with the data classes used to
communicate between steps. The lines along the right
side mark what portions of the processing chain are
initiated by build_doc_index() and cli(). Click on
any item to see its documentation.
/*
Python module or value * *
/ \ | |
V V | |
introspect_docs() parse_docs() | |
\ / | |
V V | |
merge_docs() | |
| build_doc_index() cli()
V | |
link_imports() | |
| | |
V | |
assign_canonical_names() | |
| | |
V | |
parse_docstrings() | |
| | |
V | |
inherit_docs() * |
/ | \ |
V V V |
HTMLWriter LaTeXWriter PlaintextWriter *
*/
ranksep = 0.1;
node [shape="box", height="0", width="0"]
{ /* Task nodes */
node [fontcolor=\"#000060\"]
introspect [label="Introspect value:\\nintrospect_docs()",
href="<docintrospecter.introspect_docs>"]
parse [label="Parse source code:\\nparse_docs()",
href="<docparser.parse_docs>"]
merge [label="Merge introspected & parsed docs:\\nmerge_docs()",
href="<docbuilder.merge_docs>", width="2.5"]
link [label="Link imports:\\nlink_imports()",
href="<docbuilder.link_imports>", width="2.5"]
name [label="Assign names:\\nassign_canonical_names()",
href="<docbuilder.assign_canonical_names>", width="2.5"]
docstrings [label="Parse docstrings:\\nparse_docstring()",
href="<docstringparser.parse_docstring>", width="2.5"]
inheritance [label="Inherit docs from bases:\\ninherit_docs()",
href="<docbuilder.inherit_docs>", width="2.5"]
write_html [label="Write HTML output:\\nHTMLWriter",
href="<docwriter.html>"]
write_latex [label="Write LaTeX output:\\nLaTeXWriter",
href="<docwriter.latex>"]
write_text [label="Write text output:\\nPlaintextWriter",
href="<docwriter.plaintext>"]
}
{ /* Input & Output nodes */
node [fontcolor=\"#602000\", shape="plaintext"]
input [label="Python module or value"]
output [label="DocIndex", href="<apidoc.DocIndex>"]
}
{ /* Graph edges */
edge [fontcolor=\"#602000\"]
input -> introspect
introspect -> merge [label="APIDoc", href="<apidoc.APIDoc>"]
input -> parse
parse -> merge [label="APIDoc", href="<apidoc.APIDoc>"]
merge -> link [label=" DocIndex", href="<apidoc.DocIndex>"]
link -> name [label=" DocIndex", href="<apidoc.DocIndex>"]
name -> docstrings [label=" DocIndex", href="<apidoc.DocIndex>"]
docstrings -> inheritance [label=" DocIndex", href="<apidoc.DocIndex>"]
inheritance -> output
output -> write_html
output -> write_latex
output -> write_text
}
{ /* Task collections */
node [shape="circle",label="",width=.1,height=.1]
edge [fontcolor="black", dir="none", fontcolor=\"#000060\"]
l3 -> l4 [label=" epydoc.\\l docbuilder.\\l build_doc_index()",
href="<docbuilder.build_doc_index>"]
l1 -> l2 [label=" epydoc.\\l cli()", href="<cli>"]
}
{ rank=same; l1 l3 input }
{ rank=same; l2 write_html }
{ rank=same; l4 output }
Package Organization
====================
The epydoc package contains the following subpackages and modules:
.. packagetree::
:style: UML
The user interfaces are provided by the `gui` and `cli` modules.
The `apidoc` module defines the basic data types used to record
information about Python objects. The programmatic interface to
epydoc is provided by `docbuilder`. Docstring markup parsing is
handled by the `markup` package, and output generation is handled by
the `docwriter` package. See the submodule list for more
information about the submodules and subpackages.
:group User Interface: gui, cli
:group Basic Data Types: apidoc
:group Documentation Generation: docbuilder, docintrospecter, docparser
:group Docstring Processing: docstringparser, markup
:group Output Generation: docwriter
:group Completeness Checking: checker
:group Miscellaneous: log, util, test, compat
:author: `Edward Loper <edloper@gradient.cis.upenn.edu>`__
:requires: Python 2.3+
:version: 3.0.1
:see: `The epydoc webpage <http://epydoc.sourceforge.net>`__
:see: `The epytext markup language
manual <http://epydoc.sourceforge.net/epytext.html>`__
:todo: Create a better default top_page than trees.html.
:todo: Fix trees.html to work when documenting non-top-level
modules/packages
:todo: Implement @include
:todo: Optimize epytext
:todo: More doctests
:todo: When introspecting, limit how much introspection you do (eg,
don't construct docs for imported modules' vars if it's
not necessary)
:bug: UserDict.* is interpreted as imported .. why??
:license: IBM Open Source License
:copyright: |copy| 2006 Edward Loper
:newfield contributor: Contributor, Contributors (Alphabetical Order)
:contributor: `Glyph Lefkowitz <mailto:glyph@twistedmatrix.com>`__
:contributor: `Edward Loper <mailto:edloper@gradient.cis.upenn.edu>`__
:contributor: `Bruce Mitchener <mailto:bruce@cubik.org>`__
:contributor: `Jeff O'Halloran <mailto:jeff@ohalloran.ca>`__
:contributor: `Simon Pamies <mailto:spamies@bipbap.de>`__
:contributor: `Christian Reis <mailto:kiko@async.com.br>`__
:contributor: `Daniele Varrazzo <mailto:daniele.varrazzo@gmail.com>`__
.. |copy| unicode:: 0xA9 .. copyright sign
"""
__docformat__ = 'restructuredtext en'
__version__ = '3.0.1'
"""The version of epydoc"""
__author__ = 'Edward Loper <edloper@gradient.cis.upenn.edu>'
"""The primary author of eypdoc"""
__url__ = 'http://epydoc.sourceforge.net'
"""The URL for epydoc's homepage"""
__license__ = 'IBM Open Source License'
"""The license governing the use and distribution of epydoc"""
# [xx] this should probably be a private variable:
DEBUG = False
"""True if debugging is turned on."""
# Changes needed for docs:
# - document the method for deciding what's public/private
# - epytext: fields are defined slightly differently (@group)
# - new fields
# - document __extra_epydoc_fields__ and @newfield
# - Add a faq?
# - @type a,b,c: ...
# - new command line option: --command-line-order
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
==============================================
Lasso model selection via information criteria
==============================================
This example reproduces the example of Fig. 2 of [ZHT2007]_. A
:class:`~sklearn.linear_model.LassoLarsIC` estimator is fit on a
diabetes dataset and the AIC and the BIC criteria are used to select
the best model.
.. note::
It is important to note that the optimization to find `alpha` with
:class:`~sklearn.linear_model.LassoLarsIC` relies on the AIC or BIC
criteria that are computed in-sample, thus on the training set directly.
This approach differs from the cross-validation procedure. For a comparison
of the two approaches, you can refer to the following example:
:ref:`sphx_glr_auto_examples_linear_model_plot_lasso_model_selection.py`.
.. rubric:: References
.. [ZHT2007] :arxiv:`Zou, Hui, Trevor Hastie, and Robert Tibshirani.
"On the degrees of freedom of the lasso."
The Annals of Statistics 35.5 (2007): 2173-2192.
<0712.0881>`
"""
# Authors: The scikit-learn developers
# SPDX-License-Identifier: BSD-3-Clause
# %%
# We will use the diabetes dataset.
from sklearn.datasets import load_diabetes
X, y = load_diabetes(return_X_y=True, as_frame=True)
n_samples = X.shape[0]
X.head()
# %%
# Scikit-learn provides an estimator called
# :class:`~sklearn.linear_model.LassoLarsIC` that uses either Akaike's
# information criterion (AIC) or the Bayesian information criterion (BIC) to
# select the best model. Before fitting
# this model, we will scale the dataset.
#
# In the following, we are going to fit two models to compare the values
# reported by AIC and BIC.
from sklearn.linear_model import LassoLarsIC
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
lasso_lars_ic = make_pipeline(StandardScaler(), LassoLarsIC(criterion="aic")).fit(X, y)
# %%
# To be in line with the definition in [ZHT2007]_, we need to rescale the
# AIC and the BIC. Indeed, Zou et al. are ignoring some constant terms
# compared to the original definition of AIC derived from the maximum
# log-likelihood of a linear model. You can refer to
# :ref:`mathematical detail section for the User Guide <lasso_lars_ic>`.
def zou_et_al_criterion_rescaling(criterion, n_samples, noise_variance):
"""Rescale the information criterion to follow the definition of Zou et al."""
return criterion - n_samples * np.log(2 * np.pi * noise_variance) - n_samples
# %%
import numpy as np
aic_criterion = zou_et_al_criterion_rescaling(
lasso_lars_ic[-1].criterion_,
n_samples,
lasso_lars_ic[-1].noise_variance_,
)
index_alpha_path_aic = np.flatnonzero(
lasso_lars_ic[-1].alphas_ == lasso_lars_ic[-1].alpha_
)[0]
# %%
lasso_lars_ic.set_params(lassolarsic__criterion="bic").fit(X, y)
bic_criterion = zou_et_al_criterion_rescaling(
lasso_lars_ic[-1].criterion_,
n_samples,
lasso_lars_ic[-1].noise_variance_,
)
index_alpha_path_bic = np.flatnonzero(
lasso_lars_ic[-1].alphas_ == lasso_lars_ic[-1].alpha_
)[0]
# %%
# Now that we collected the AIC and BIC, we can as well check that the minima
# of both criteria happen at the same alpha. Then, we can simplify the
# following plot.
index_alpha_path_aic == index_alpha_path_bic
# %%
# Finally, we can plot the AIC and BIC criterion and the subsequent selected
# regularization parameter.
import matplotlib.pyplot as plt
plt.plot(aic_criterion, color="tab:blue", marker="o", label="AIC criterion")
plt.plot(bic_criterion, color="tab:orange", marker="o", label="BIC criterion")
plt.vlines(
index_alpha_path_bic,
aic_criterion.min(),
aic_criterion.max(),
color="black",
linestyle="--",
label="Selected alpha",
)
plt.legend()
plt.ylabel("Information criterion")
plt.xlabel("Lasso model sequence")
_ = plt.title("Lasso model selection via AIC and BIC")
|
python
|
github
|
https://github.com/scikit-learn/scikit-learn
|
examples/linear_model/plot_lasso_lars_ic.py
|
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from datetime import datetime
import os
import tinctest
from tinctest.lib import local_path
from mpp.models import SQLPerformanceTestCase
import unittest2 as unittest
# we're testing SQLTestCase as it pertains to tinc.py (and only tinc.py)
# as such, any attempts by raw unit2 to discover and load MockSQLTestCase must be averted
@unittest.skip('mock')
class MockSQLPerformanceTestCase(SQLPerformanceTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
@repetitions 10
"""
def setUp(self):
pass
def test_00000(self):
"""
@baseline 4.2.2.0
@threshold 10
"""
pass
def tearDown(self):
pass
class SQLPerformanceTestCaseTests(unittest.TestCase):
def test_run_sql_test_success(self):
test_case = MockSQLPerformanceTestCase('test_query02')
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
self.assertEqual(test_case.name, "MockSQLPerformanceTestCase.test_query02")
self.assertEqual(test_case.author, 'kumara64')
self.assertEqual(test_case.description, 'test sql test case')
self.assertEqual(test_case.created_datetime, datetime.strptime('2012-07-05 12:00:00', '%Y-%m-%d %H:%M:%S'))
self.assertEqual(test_case.modified_datetime, datetime.strptime('2012-07-08 12:00:02', '%Y-%m-%d %H:%M:%S'))
self.assertEqual(test_case.tags, set(['orca', 'hashagg', 'executor']))
self.assertEqual(test_case.repetitions, 3)
self.assertEqual(test_case.threshold, 10)
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
def test_run_all_perf_tests(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLPerformanceTestCase)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
for test_case in test_suite._tests:
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_suite.run(test_result)
self.assertEqual(test_result.testsRun, 3)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
|
unknown
|
codeparrot/codeparrot-clean
| ||
use std::cell::RefCell;
use std::collections::hash_map::Entry;
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_middle::mir::{Body, MirDumper, MirPhase, RuntimePhase};
use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
use tracing::trace;
use crate::lint::lint_body;
use crate::{errors, validate};
thread_local! {
/// Maps MIR pass names to a snake case form to match profiling naming style
static PASS_TO_PROFILER_NAMES: RefCell<FxHashMap<&'static str, &'static str>> = {
RefCell::new(FxHashMap::default())
};
}
/// Converts a MIR pass name into a snake case form to match the profiling naming style.
fn to_profiler_name(type_name: &'static str) -> &'static str {
PASS_TO_PROFILER_NAMES.with(|names| match names.borrow_mut().entry(type_name) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
let snake_case: String = type_name
.chars()
.flat_map(|c| {
if c.is_ascii_uppercase() {
vec!['_', c.to_ascii_lowercase()]
} else if c == '-' {
vec!['_']
} else {
vec![c]
}
})
.collect();
let result = &*String::leak(format!("mir_pass{}", snake_case));
e.insert(result);
result
}
})
}
// A function that simplifies a pass's type_name. E.g. `Baz`, `Baz<'_>`,
// `foo::bar::Baz`, and `foo::bar::Baz<'a, 'b>` all become `Baz`.
//
// It's `const` for perf reasons: it's called a lot, and doing the string
// operations at runtime causes a non-trivial slowdown. If
// `split_once`/`rsplit_once` become `const` its body could be simplified to
// this:
// ```ignore (fragment)
// let name = if let Some((_, tail)) = name.rsplit_once(':') { tail } else { name };
// let name = if let Some((head, _)) = name.split_once('<') { head } else { name };
// name
// ```
const fn simplify_pass_type_name(name: &'static str) -> &'static str {
// FIXME(const-hack) Simplify the implementation once more `str` methods get const-stable.
// Work backwards from the end. If a ':' is hit, strip it and everything before it.
let bytes = name.as_bytes();
let mut i = bytes.len();
while i > 0 && bytes[i - 1] != b':' {
i -= 1;
}
let (_, bytes) = bytes.split_at(i);
// Work forwards from the start of what's left. If a '<' is hit, strip it and everything after
// it.
let mut i = 0;
while i < bytes.len() && bytes[i] != b'<' {
i += 1;
}
let (bytes, _) = bytes.split_at(i);
match std::str::from_utf8(bytes) {
Ok(name) => name,
Err(_) => panic!(),
}
}
/// A streamlined trait that you can implement to create a pass; the
/// pass will be named after the type, and it will consist of a main
/// loop that goes over each available MIR and applies `run_pass`.
pub(super) trait MirPass<'tcx> {
fn name(&self) -> &'static str {
const { simplify_pass_type_name(std::any::type_name::<Self>()) }
}
fn profiler_name(&self) -> &'static str {
to_profiler_name(self.name())
}
/// Returns `true` if this pass is enabled with the current combination of compiler flags.
fn is_enabled(&self, _sess: &Session) -> bool {
true
}
/// Returns `true` if this pass can be overridden by `-Zenable-mir-passes`. This should be
/// true for basically every pass other than those that are necessary for correctness.
fn can_be_overridden(&self) -> bool {
true
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>);
fn is_mir_dump_enabled(&self) -> bool {
true
}
/// Returns `true` if this pass must be run (i.e. it is required for soundness).
/// For passes which are strictly optimizations, this should return `false`.
/// If this is `false`, `#[optimize(none)]` will disable the pass.
fn is_required(&self) -> bool;
}
/// Just like `MirPass`, except it cannot mutate `Body`, and MIR dumping is
/// disabled (via the `Lint` adapter).
pub(super) trait MirLint<'tcx> {
fn name(&self) -> &'static str {
const { simplify_pass_type_name(std::any::type_name::<Self>()) }
}
fn is_enabled(&self, _sess: &Session) -> bool {
true
}
fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>);
}
/// An adapter for `MirLint`s that implements `MirPass`.
#[derive(Debug, Clone)]
pub(super) struct Lint<T>(pub T);
impl<'tcx, T> MirPass<'tcx> for Lint<T>
where
T: MirLint<'tcx>,
{
fn name(&self) -> &'static str {
self.0.name()
}
fn is_enabled(&self, sess: &Session) -> bool {
self.0.is_enabled(sess)
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
self.0.run_lint(tcx, body)
}
fn is_mir_dump_enabled(&self) -> bool {
false
}
fn is_required(&self) -> bool {
true
}
}
pub(super) struct WithMinOptLevel<T>(pub u32, pub T);
impl<'tcx, T> MirPass<'tcx> for WithMinOptLevel<T>
where
T: MirPass<'tcx>,
{
fn name(&self) -> &'static str {
self.1.name()
}
fn is_enabled(&self, sess: &Session) -> bool {
sess.mir_opt_level() >= self.0 as usize
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
self.1.run_pass(tcx, body)
}
fn is_required(&self) -> bool {
self.1.is_required()
}
}
/// Whether to allow non-[required] optimizations
///
/// [required]: MirPass::is_required
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum Optimizations {
Suppressed,
Allowed,
}
/// Run the sequence of passes without validating the MIR after each pass. The MIR is still
/// validated at the end.
pub(super) fn run_passes_no_validate<'tcx>(
tcx: TyCtxt<'tcx>,
body: &mut Body<'tcx>,
passes: &[&dyn MirPass<'tcx>],
phase_change: Option<MirPhase>,
) {
run_passes_inner(tcx, body, passes, phase_change, false, Optimizations::Allowed);
}
/// The optional `phase_change` is applied after executing all the passes, if present
pub(super) fn run_passes<'tcx>(
tcx: TyCtxt<'tcx>,
body: &mut Body<'tcx>,
passes: &[&dyn MirPass<'tcx>],
phase_change: Option<MirPhase>,
optimizations: Optimizations,
) {
run_passes_inner(tcx, body, passes, phase_change, true, optimizations);
}
pub(super) fn should_run_pass<'tcx, P>(
tcx: TyCtxt<'tcx>,
pass: &P,
optimizations: Optimizations,
) -> bool
where
P: MirPass<'tcx> + ?Sized,
{
let name = pass.name();
if !pass.can_be_overridden() {
return pass.is_enabled(tcx.sess);
}
let overridden_passes = &tcx.sess.opts.unstable_opts.mir_enable_passes;
let overridden =
overridden_passes.iter().rev().find(|(s, _)| s == &*name).map(|(_name, polarity)| {
trace!(
pass = %name,
"{} as requested by flag",
if *polarity { "Running" } else { "Not running" },
);
*polarity
});
let suppressed = !pass.is_required() && matches!(optimizations, Optimizations::Suppressed);
overridden.unwrap_or_else(|| !suppressed && pass.is_enabled(tcx.sess))
}
fn run_passes_inner<'tcx>(
tcx: TyCtxt<'tcx>,
body: &mut Body<'tcx>,
passes: &[&dyn MirPass<'tcx>],
phase_change: Option<MirPhase>,
validate_each: bool,
optimizations: Optimizations,
) {
let overridden_passes = &tcx.sess.opts.unstable_opts.mir_enable_passes;
trace!(?overridden_passes);
let named_passes: FxIndexSet<_> =
overridden_passes.iter().map(|(name, _)| name.as_str()).collect();
for &name in named_passes.difference(&*crate::PASS_NAMES) {
tcx.dcx().emit_warn(errors::UnknownPassName { name });
}
// Verify that no passes are missing from the `declare_passes` invocation
#[cfg(debug_assertions)]
{
let used_passes: FxIndexSet<_> = passes.iter().map(|p| p.name()).collect();
let undeclared = used_passes.difference(&*crate::PASS_NAMES).collect::<Vec<_>>();
if let Some((name, rest)) = undeclared.split_first() {
let mut err =
tcx.dcx().struct_bug(format!("pass `{name}` is not declared in `PASS_NAMES`"));
for name in rest {
err.note(format!("pass `{name}` is also not declared in `PASS_NAMES`"));
}
err.emit();
}
}
let prof_arg = tcx.sess.prof.enabled().then(|| format!("{:?}", body.source.def_id()));
if !body.should_skip() {
let validate = validate_each & tcx.sess.opts.unstable_opts.validate_mir;
let lint = tcx.sess.opts.unstable_opts.lint_mir;
for pass in passes {
let pass_name = pass.name();
if !should_run_pass(tcx, *pass, optimizations) {
continue;
};
let dumper = if pass.is_mir_dump_enabled()
&& let Some(dumper) = MirDumper::new(tcx, pass_name, body)
{
Some(dumper.set_show_pass_num().set_disambiguator(&"before"))
} else {
None
};
if let Some(dumper) = dumper.as_ref() {
dumper.dump_mir(body);
}
if let Some(prof_arg) = &prof_arg {
tcx.sess
.prof
.generic_activity_with_arg(pass.profiler_name(), &**prof_arg)
.run(|| pass.run_pass(tcx, body));
} else {
pass.run_pass(tcx, body);
}
if let Some(dumper) = dumper {
dumper.set_disambiguator(&"after").dump_mir(body);
}
if validate {
validate_body(tcx, body, format!("after pass {pass_name}"));
}
if lint {
lint_body(tcx, body, format!("after pass {pass_name}"));
}
body.pass_count += 1;
}
}
if let Some(new_phase) = phase_change {
if body.phase >= new_phase {
panic!("Invalid MIR phase transition from {:?} to {:?}", body.phase, new_phase);
}
body.phase = new_phase;
body.pass_count = 0;
dump_mir_for_phase_change(tcx, body);
let validate =
(validate_each & tcx.sess.opts.unstable_opts.validate_mir & !body.should_skip())
|| new_phase == MirPhase::Runtime(RuntimePhase::Optimized);
let lint = tcx.sess.opts.unstable_opts.lint_mir & !body.should_skip();
if validate {
validate_body(tcx, body, format!("after phase change to {}", new_phase.name()));
}
if lint {
lint_body(tcx, body, format!("after phase change to {}", new_phase.name()));
}
body.pass_count = 1;
}
}
pub(super) fn validate_body<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, when: String) {
validate::Validator { when }.run_pass(tcx, body);
}
pub(super) fn dump_mir_for_phase_change<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
assert_eq!(body.pass_count, 0);
if let Some(dumper) = MirDumper::new(tcx, body.phase.name(), body) {
dumper.set_show_pass_num().set_disambiguator(&"after").dump_mir(body)
}
}
|
rust
|
github
|
https://github.com/rust-lang/rust
|
compiler/rustc_mir_transform/src/pass_manager.rs
|
import pytest
from rest_framework.reverse import reverse
from osmaxx.conversion import status
authenticated_access_urls = [
reverse('clipping_area-list'),
reverse('conversion_job-list'),
reverse('conversion_parametrization-list'),
]
@pytest.fixture(params=authenticated_access_urls)
def access_url(request):
return request.param
def test_access_for_unauthorized_user_denied(client, access_url):
response = client.get(access_url)
assert response.status_code == 403
@pytest.mark.django_db()
def test_access_for_authenticated_client_allowed(authenticated_client, access_url):
response = authenticated_client.get(access_url)
assert response.status_code == 200
def test_access_for_admin_user_allowed(admin_client, access_url):
response = admin_client.get(access_url)
assert response.status_code == 200
@pytest.mark.django_db()
def test_conversion_parametrization_creation_success(authenticated_api_client, conversion_parametrization_data):
response = authenticated_api_client.post(reverse('conversion_parametrization-list'), conversion_parametrization_data, format='json')
assert response.status_code == 201
@pytest.mark.django_db()
def test_conversion_parametrization_creation_fails(api_client, conversion_parametrization_data):
response = api_client.post(reverse('conversion_parametrization-list'), conversion_parametrization_data, format='json')
assert response.status_code == 403
@pytest.mark.django_db()
def test_conversion_parametrization_detail_access_success(authenticated_api_client, conversion_parametrization, persisted_valid_clipping_area):
response = authenticated_api_client.get(reverse('conversion_parametrization-detail', kwargs={'pk': conversion_parametrization.id}))
assert response.status_code == 200
data = response.json()
assert data['id'] == conversion_parametrization.id
assert data['out_format'] == conversion_parametrization.out_format
assert data['out_srs'] == conversion_parametrization.out_srs
assert data['clipping_area'] == persisted_valid_clipping_area.id
@pytest.mark.django_db()
def test_conversion_parametrization_detail_access_fails(api_client, conversion_parametrization):
response = api_client.get(reverse('conversion_parametrization-detail', kwargs={'pk': conversion_parametrization.id}))
assert response.status_code == 403
@pytest.mark.django_db()
def test_conversion_job_creation_success(authenticated_api_client, conversion_job_data, mocker):
start_conversion_mock = mocker.patch('osmaxx.conversion.models.Job.start_conversion')
response = authenticated_api_client.post(reverse('conversion_job-list'), conversion_job_data, format='json')
data = response.json()
assert response.status_code == 201
assert data['callback_url'] == conversion_job_data['callback_url']
assert data['parametrization'] == conversion_job_data['parametrization']
assert start_conversion_mock.call_count == 1
@pytest.mark.django_db()
def test_conversion_job_creation_fails(api_client, conversion_job_data):
response = api_client.post(reverse('conversion_job-list'), conversion_job_data, format='json')
assert response.status_code == 403
@pytest.mark.django_db()
def test_conversion_job_detail_access_success(authenticated_api_client, conversion_job, conversion_parametrization):
response = authenticated_api_client.get(reverse('conversion_job-detail', kwargs={'pk': conversion_job.id}))
assert response.status_code == 200
data = response.json()
assert data['id'] == conversion_job.id
assert data['callback_url'] == conversion_job.callback_url
assert data['parametrization'] == conversion_parametrization.id
assert data['status'] == status.RECEIVED
assert data['resulting_file_path'] is None
@pytest.mark.django_db()
def test_conversion_job_detail_access_fails_with_anonymous_user(api_client, conversion_job):
response = api_client.get(reverse('conversion_job-detail', kwargs={'pk': conversion_job.id}))
assert response.status_code == 403
@pytest.mark.django_db()
def test_conversion_job_absolute_url_resolves_correct(conversion_job, server_url):
url = server_url + reverse('conversion_job-detail', kwargs={'pk': conversion_job.id})
assert conversion_job.get_absolute_url() == url
@pytest.mark.django_db()
def test_conversion_job_creation_enqueues(authenticated_api_client, conversion_job_data, rq_mock_return, mocker):
conversion_start_start_format_extraction_mock = mocker.patch('osmaxx.conversion.converters.converter.rq_enqueue_with_settings', return_value=rq_mock_return())
authenticated_api_client.post(reverse('conversion_job-list'), conversion_job_data, format='json')
assert conversion_start_start_format_extraction_mock.call_count == 1
|
unknown
|
codeparrot/codeparrot-clean
| ||
{{ formset.management_form }}{% for form in formset %}{{ form.as_p() }}{% endfor %}
|
html
|
github
|
https://github.com/django/django
|
django/forms/jinja2/django/forms/formsets/p.html
|
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import * as t from '@babel/types';
import {
CompilerError,
CompilerErrorDetail,
ErrorCategory,
} from '../CompilerError';
import {computeUnconditionalBlocks} from '../HIR/ComputeUnconditionalBlocks';
import {isHookName} from '../HIR/Environment';
import {
HIRFunction,
IdentifierId,
Place,
SourceLocation,
getHookKind,
} from '../HIR/HIR';
import {
eachInstructionLValue,
eachInstructionOperand,
eachTerminalOperand,
} from '../HIR/visitors';
import {assertExhaustive} from '../Utils/utils';
import {Result} from '../Utils/Result';
/**
* Represents the possible kinds of value which may be stored at a given Place during
* abstract interpretation. The kinds form a lattice, with earlier items taking
* precedence over later items (see joinKinds()).
*/
enum Kind {
// A potential/known hook which was already used in an invalid way
Error = 'Error',
/*
* A known hook. Sources include:
* - LoadGlobal instructions whose type was inferred as a hook
* - PropertyLoad, ComputedLoad, and Destructuring instructions
* where the object is a KnownHook
* - PropertyLoad, ComputedLoad, and Destructuring instructions
* where the object is a Global and the property name is hook-like
*/
KnownHook = 'KnownHook',
/*
* A potential hook. Sources include:
* - LValues (other than LoadGlobal) where the name is hook-like
* - PropertyLoad, ComputedLoad, and Destructuring instructions
* where the object is a potential hook or the property name
* is hook-like
*/
PotentialHook = 'PotentialHook',
// LoadGlobal values whose type was not inferred as a hook
Global = 'Global',
// All other values, ie local variables
Local = 'Local',
}
function joinKinds(a: Kind, b: Kind): Kind {
if (a === Kind.Error || b === Kind.Error) {
return Kind.Error;
} else if (a === Kind.KnownHook || b === Kind.KnownHook) {
return Kind.KnownHook;
} else if (a === Kind.PotentialHook || b === Kind.PotentialHook) {
return Kind.PotentialHook;
} else if (a === Kind.Global || b === Kind.Global) {
return Kind.Global;
} else {
return Kind.Local;
}
}
/*
* Validates that the function honors the [Rules of Hooks](https://react.dev/warnings/invalid-hook-call-warning)
* rule that hooks may only be called and not otherwise referenced as first-class values.
*
* Specifically this pass implements the following rules:
* - Known hooks may only be called unconditionally, and cannot be used as first-class values.
* See the note for Kind.KnownHook for sources of known hooks
* - Potential hooks may be referenced as first-class values, with the exception that they
* may not appear as the callee of a conditional call.
* See the note for Kind.PotentialHook for sources of potential hooks
*/
export function validateHooksUsage(
fn: HIRFunction,
): Result<void, CompilerError> {
const unconditionalBlocks = computeUnconditionalBlocks(fn);
const errors = new CompilerError();
const errorsByPlace = new Map<t.SourceLocation, CompilerErrorDetail>();
function recordError(
loc: SourceLocation,
errorDetail: CompilerErrorDetail,
): void {
if (typeof loc === 'symbol') {
errors.pushErrorDetail(errorDetail);
} else {
errorsByPlace.set(loc, errorDetail);
}
}
function recordConditionalHookError(place: Place): void {
// Once a particular hook has a conditional call error, don't report any further issues for this hook
setKind(place, Kind.Error);
const reason =
'Hooks must always be called in a consistent order, and may not be called conditionally. See the Rules of Hooks (https://react.dev/warnings/invalid-hook-call-warning)';
const previousError =
typeof place.loc !== 'symbol' ? errorsByPlace.get(place.loc) : undefined;
/*
* In some circumstances such as optional calls, we may first encounter a "hook may not be referenced as normal values" error.
* If that same place is also used as a conditional call, upgrade the error to a conditonal hook error
*/
if (previousError === undefined || previousError.reason !== reason) {
recordError(
place.loc,
new CompilerErrorDetail({
category: ErrorCategory.Hooks,
description: null,
reason,
loc: place.loc,
suggestions: null,
}),
);
}
}
function recordInvalidHookUsageError(place: Place): void {
const previousError =
typeof place.loc !== 'symbol' ? errorsByPlace.get(place.loc) : undefined;
if (previousError === undefined) {
recordError(
place.loc,
new CompilerErrorDetail({
category: ErrorCategory.Hooks,
description: null,
reason:
'Hooks may not be referenced as normal values, they must be called. See https://react.dev/reference/rules/react-calls-components-and-hooks#never-pass-around-hooks-as-regular-values',
loc: place.loc,
suggestions: null,
}),
);
}
}
function recordDynamicHookUsageError(place: Place): void {
const previousError =
typeof place.loc !== 'symbol' ? errorsByPlace.get(place.loc) : undefined;
if (previousError === undefined) {
recordError(
place.loc,
new CompilerErrorDetail({
category: ErrorCategory.Hooks,
description: null,
reason:
'Hooks must be the same function on every render, but this value may change over time to a different function. See https://react.dev/reference/rules/react-calls-components-and-hooks#dont-dynamically-use-hooks',
loc: place.loc,
suggestions: null,
}),
);
}
}
const valueKinds = new Map<IdentifierId, Kind>();
function getKindForPlace(place: Place): Kind {
const knownKind = valueKinds.get(place.identifier.id);
if (
place.identifier.name !== null &&
isHookName(place.identifier.name.value)
) {
return joinKinds(knownKind ?? Kind.Local, Kind.PotentialHook);
} else {
return knownKind ?? Kind.Local;
}
}
function visitPlace(place: Place): void {
const kind = valueKinds.get(place.identifier.id);
if (kind === Kind.KnownHook) {
recordInvalidHookUsageError(place);
}
}
function setKind(place: Place, kind: Kind): void {
valueKinds.set(place.identifier.id, kind);
}
for (const param of fn.params) {
const place = param.kind === 'Identifier' ? param : param.place;
const kind = getKindForPlace(place);
setKind(place, kind);
}
for (const [, block] of fn.body.blocks) {
for (const phi of block.phis) {
let kind: Kind =
phi.place.identifier.name !== null &&
isHookName(phi.place.identifier.name.value)
? Kind.PotentialHook
: Kind.Local;
for (const [, operand] of phi.operands) {
const operandKind = valueKinds.get(operand.identifier.id);
/*
* NOTE: we currently skip operands whose value is unknown
* (which can only occur for functions with loops), we may
* cause us to miss invalid code in some cases. We should
* expand this to a fixpoint iteration in a follow-up.
*/
if (operandKind !== undefined) {
kind = joinKinds(kind, operandKind);
}
}
valueKinds.set(phi.place.identifier.id, kind);
}
for (const instr of block.instructions) {
switch (instr.value.kind) {
case 'LoadGlobal': {
/*
* Globals are the one source of known hooks: they are either
* directly a hook, or infer a Global kind from which knownhooks
* can be derived later via property access (PropertyLoad etc)
*/
if (getHookKind(fn.env, instr.lvalue.identifier) != null) {
setKind(instr.lvalue, Kind.KnownHook);
} else {
setKind(instr.lvalue, Kind.Global);
}
break;
}
case 'LoadContext':
case 'LoadLocal': {
visitPlace(instr.value.place);
const kind = getKindForPlace(instr.value.place);
setKind(instr.lvalue, kind);
break;
}
case 'StoreLocal':
case 'StoreContext': {
visitPlace(instr.value.value);
const kind = joinKinds(
getKindForPlace(instr.value.value),
getKindForPlace(instr.value.lvalue.place),
);
setKind(instr.value.lvalue.place, kind);
setKind(instr.lvalue, kind);
break;
}
case 'ComputedLoad': {
visitPlace(instr.value.object);
const kind = getKindForPlace(instr.value.object);
setKind(instr.lvalue, joinKinds(getKindForPlace(instr.lvalue), kind));
break;
}
case 'PropertyLoad': {
const objectKind = getKindForPlace(instr.value.object);
const isHookProperty =
typeof instr.value.property === 'string' &&
isHookName(instr.value.property);
let kind: Kind;
switch (objectKind) {
case Kind.Error: {
kind = Kind.Error;
break;
}
case Kind.KnownHook: {
/**
* const useFoo;
* function Component() {
* let x = useFoo.useBar; // useFoo is KnownHook, any property from it inherits KnownHook
* }
*/
kind = isHookProperty ? Kind.KnownHook : Kind.Local;
break;
}
case Kind.PotentialHook: {
/**
* function Component(props) {
* let useFoo;
* let x = useFoo.useBar; // useFoo is PotentialHook, any property from it inherits PotentialHook
* }
*/
kind = Kind.PotentialHook;
break;
}
case Kind.Global: {
/**
* function Component() {
* let x = React.useState; // hook-named property of global is knownhook
* let y = React.foo; // else inherit Global
* }
*/
kind = isHookProperty ? Kind.KnownHook : Kind.Global;
break;
}
case Kind.Local: {
/**
* function Component() {
* let o = createObject();
* let x = o.useState; // hook-named property of local is potentialhook
* let y = o.foo; // else inherit local
* }
*/
kind = isHookProperty ? Kind.PotentialHook : Kind.Local;
break;
}
default: {
assertExhaustive(objectKind, `Unexpected kind \`${objectKind}\``);
}
}
setKind(instr.lvalue, kind);
break;
}
case 'CallExpression': {
const calleeKind = getKindForPlace(instr.value.callee);
const isHookCallee =
calleeKind === Kind.KnownHook || calleeKind === Kind.PotentialHook;
if (isHookCallee && !unconditionalBlocks.has(block.id)) {
recordConditionalHookError(instr.value.callee);
} else if (calleeKind === Kind.PotentialHook) {
recordDynamicHookUsageError(instr.value.callee);
}
/**
* We intentionally skip the callee because it's validated above
*/
for (const operand of eachInstructionOperand(instr)) {
if (operand === instr.value.callee) {
continue;
}
visitPlace(operand);
}
break;
}
case 'MethodCall': {
const calleeKind = getKindForPlace(instr.value.property);
const isHookCallee =
calleeKind === Kind.KnownHook || calleeKind === Kind.PotentialHook;
if (isHookCallee && !unconditionalBlocks.has(block.id)) {
recordConditionalHookError(instr.value.property);
} else if (calleeKind === Kind.PotentialHook) {
recordDynamicHookUsageError(instr.value.property);
}
/*
* We intentionally skip the property because it's validated above
*/
for (const operand of eachInstructionOperand(instr)) {
if (operand === instr.value.property) {
continue;
}
visitPlace(operand);
}
break;
}
case 'Destructure': {
visitPlace(instr.value.value);
const objectKind = getKindForPlace(instr.value.value);
for (const lvalue of eachInstructionLValue(instr)) {
const isHookProperty =
lvalue.identifier.name !== null &&
isHookName(lvalue.identifier.name.value);
let kind: Kind;
switch (objectKind) {
case Kind.Error: {
kind = Kind.Error;
break;
}
case Kind.KnownHook: {
kind = Kind.KnownHook;
break;
}
case Kind.PotentialHook: {
kind = Kind.PotentialHook;
break;
}
case Kind.Global: {
kind = isHookProperty ? Kind.KnownHook : Kind.Global;
break;
}
case Kind.Local: {
kind = isHookProperty ? Kind.PotentialHook : Kind.Local;
break;
}
default: {
assertExhaustive(
objectKind,
`Unexpected kind \`${objectKind}\``,
);
}
}
setKind(lvalue, kind);
}
break;
}
case 'ObjectMethod':
case 'FunctionExpression': {
visitFunctionExpression(errors, instr.value.loweredFunc.func);
break;
}
default: {
/*
* Else check usages of operands, but do *not* flow properties
* from operands into the lvalues. For example, `let x = identity(y)`
* does not infer `x` as a potential hook even if `y` is a potential hook.
*/
for (const operand of eachInstructionOperand(instr)) {
visitPlace(operand);
}
for (const lvalue of eachInstructionLValue(instr)) {
const kind = getKindForPlace(lvalue);
setKind(lvalue, kind);
}
}
}
}
for (const operand of eachTerminalOperand(block.terminal)) {
visitPlace(operand);
}
}
for (const [, error] of errorsByPlace) {
errors.pushErrorDetail(error);
}
return errors.asResult();
}
function visitFunctionExpression(errors: CompilerError, fn: HIRFunction): void {
for (const [, block] of fn.body.blocks) {
for (const instr of block.instructions) {
switch (instr.value.kind) {
case 'ObjectMethod':
case 'FunctionExpression': {
visitFunctionExpression(errors, instr.value.loweredFunc.func);
break;
}
case 'MethodCall':
case 'CallExpression': {
const callee =
instr.value.kind === 'CallExpression'
? instr.value.callee
: instr.value.property;
const hookKind = getHookKind(fn.env, callee.identifier);
if (hookKind != null) {
errors.pushErrorDetail(
new CompilerErrorDetail({
category: ErrorCategory.Hooks,
reason:
'Hooks must be called at the top level in the body of a function component or custom hook, and may not be called within function expressions. See the Rules of Hooks (https://react.dev/warnings/invalid-hook-call-warning)',
loc: callee.loc,
description: `Cannot call ${hookKind === 'Custom' ? 'hook' : hookKind} within a function expression`,
suggestions: null,
}),
);
}
break;
}
}
}
}
}
|
typescript
|
github
|
https://github.com/facebook/react
|
compiler/packages/babel-plugin-react-compiler/src/Validation/ValidateHooksUsage.ts
|
# Copyright 2010-2012 RethinkDB, all rights reserved.
import os
import socket
import time
import socket
# pythonic discriminated union I guess, this is kind of retarded.
# actually 0 need for a base class it's really more like a comment
# that happens to be runable code
class RunError(Exception):
def __init__(self, str):
self.str = str
def __str__(self):
return repr(self.str)
def ensure_socket(host, port):
start_time = time.time()
success = False
while (time.time() - start_time < 5 * 60): # give up after some number of seconds
try:
s = socket.create_connection((host, port))
success = True
break
except:
time.sleep(20)
pass
if not success:
raise RunError("Failed to create a connection.")
return s
class Refspec():
pass
class Tag(Refspec):
def __init__(self, tagname):
self.val = tagname
class Branch(Refspec):
def __init__(self, branchname):
self.val = branchname
def remove_local(string):
if (string[len(string) - len('.local'):] == '.local'):
return string[:len(string) - len('.local')]
else:
return string
def rpm_install(path):
return "rpm -i %s" % path
def rpm_get_binary(path):
return "rpm -qpil %s | grep /usr/bin" % path
def rpm_uninstall(cmd_name):
return "which %s | xargs readlink -f | xargs rpm -qf | xargs rpm -e" % cmd_name
def deb_install(path):
return "dpkg -i %s" % path
def deb_get_binary(path):
return "dpkg -c %s | grep /usr/bin/rethinkdb-.* | sed 's/^.*\(\/usr.*\)$/\\1/'" % path
def deb_uninstall(cmd_name):
return "which %s | xargs readlink -f | xargs dpkg -S | sed 's/^\(.*\):.*$/\\1/' | xargs dpkg -r" % cmd_name
class VM():
def __init__(self, uuid, hostname, username = 'rethinkdb', rootname = 'root', vbox_username = 'rethinkdb', vbox_hostname = 'deadshot', startup = True):
self.uuid = uuid
self.hostname = hostname
self.username = username
self.rootname = rootname
self.vbox_username = vbox_username
self.vbox_hostname = vbox_hostname
if (startup):
os.system("ssh %s@%s VBoxManage startvm %s --type headless" % (self.vbox_username, self.vbox_hostname, self.uuid))
start_time = time.time()
while (self.command("true") != 0) and time.time() - start_time < 5 * 60: # give up after some number of seconds
time.sleep(3)
if self.command("true") != 0:
raise RunError("Failed to connect to Virtual Machine %s." % uuid)
def __del__(self):
os.system("ssh %s@%s VBoxManage controlvm %s poweroff" % (self.vbox_username, self.vbox_hostname, self.uuid))
def command(self, cmd_str, root = False, bg = False):
str = "ssh -o ConnectTimeout=1000 %s@%s \"%s\"" % ((self.rootname if root else self.username), self.hostname, (cmd_str + ("&" if bg else ""))) + ("&" if bg else "")
print str
return os.system(str)
# send a file into the tmp directory of the vm
def copy_to_tmp(self, path):
str = "scp %s %s@%s:/tmp/" % (path, self.username, self.hostname)
assert(os.system(str) == 0)
def popen(self, cmd_str, mode):
#print cmd_str
return os.popen("ssh %s@%s \"%s\"" % (self.username, self.hostname, cmd_str), mode)
class target():
def __init__(self, build_uuid, build_hostname, username, build_cl, res_ext, install_cl_f, uninstall_cl_f, get_binary_f, vbox_username, vbox_hostname):
self.build_uuid = build_uuid
self.build_hostname = build_hostname
self.username = username
self.build_cl = build_cl
self.res_ext = res_ext
self.install_cl_f = install_cl_f # path -> install cmd
self.uninstall_cl_f = uninstall_cl_f
self.get_binary_f = get_binary_f
self.vbox_username = vbox_username # username and hostname for running VirtualBox through ssh
self.vbox_hostname = vbox_hostname
def start_vm(self):
return VM(self.build_uuid, self.build_hostname, self.username, vbox_username=self.vbox_username, vbox_hostname=self.vbox_hostname) # startup = True
def get_vm(self):
return VM(self.build_uuid, self.build_hostname, self.username, vbox_username=self.vbox_username, vbox_hostname=self.vbox_hostname, startup=False)
def interact(self, short_name):
build_vm = self.start_vm()
print "%s is now accessible via ssh at %s@%s" % (short_name, self.username, self.build_hostname)
print "Leave this process running in the background and when you're done interrupt it to clean up the virtual machine."
while True:
time.sleep(1)
def run(self, refspec, short_name):
def purge_installed_packages():
old_binaries_raw = build_vm.popen("ls /usr/bin/rethinkdb*", "r").readlines()
old_binaries = map(lambda x: x.strip('\n'), old_binaries_raw)
print "Binaries scheduled for removal: ", old_binaries
for old_binary in old_binaries:
build_vm.command(self.uninstall_cl_f(old_binary), True)
if (not os.path.exists("Built_Packages")):
os.mkdir("Built_Packages")
build_vm = self.start_vm()
def run_checked(cmd, root = False, bg = False):
res = build_vm.command(cmd, root, bg)
if res != 0:
raise RunError(cmd + " returned on %d exit." % res)
def run_unchecked(cmd, root = False, bg = False):
res = build_vm.command(cmd, root, bg)
if isinstance(refspec, Tag):
run_checked("cd rethinkdb && git fetch && git fetch origin tag %s && git checkout -f %s" % (refspec.val, refspec.val))
elif isinstance(refspec, Branch):
run_checked("cd rethinkdb && git fetch && git checkout -f %s && git pull" % refspec.val)
else:
raise RunError("Invalid refspec type, must be branch or tag.")
run_checked("cd rethinkdb/src &&" + self.build_cl)
dir = build_vm.popen("pwd", 'r').readline().strip('\n')
p = build_vm.popen("find rethinkdb/build/packages -regex .*\\\\\\\\.%s" % self.res_ext, 'r')
raw = p.readlines()
res_paths = map((lambda x: os.path.join(dir, x.strip('\n'))), raw)
print res_paths
dest = os.path.abspath("Built_Packages")
for path in res_paths:
purge_installed_packages()
if (not os.path.exists(os.path.join(dest, short_name))):
os.mkdir(os.path.join(dest, short_name))
# install antiquated packages here
# if os.path.exists('old_versions'):
# for old_version in os.listdir('old_versions'):
# pkg = os.listdir(os.path.join('old_versions', old_version, short_name))[0]
# build_vm.copy_to_tmp(os.path.join('old_versions', old_version, short_name, pkg))
# run_checked(self.install_cl_f(os.path.join('/tmp', pkg)), True)
# print "Installed: ", old_version
# install current versions
target_binary_name = build_vm.popen(self.get_binary_f(path), "r").readlines()[0].strip('\n')
print "Target binary name: ", target_binary_name
run_checked(self.install_cl_f(path), True)
# run smoke test
run_unchecked("rm -r test_data")
run_checked("rethinkdb --cluster-port 11211 --directory test_data", bg = True)
print "Starting tests..."
s = ensure_socket(build_vm.hostname, 11213)
from smoke_install_test import test_against
if (not test_against(build_vm.hostname, 11213)):
raise RunError("Tests failed")
s.send("rethinkdb shutdown\r\n")
scp_string = "scp %s@%s:%s %s" % (self.username, self.build_hostname, path, os.path.join(dest, short_name))
print scp_string
os.system(scp_string)
# the code below is not updated
# find legacy binaries
# leg_binaries_raw = build_vm.popen("ls /usr/bin/rethinkdb*", "r").readlines()
# leg_binaries = map(lambda x: x.strip('\n'), leg_binaries_raw)
# leg_binaries.remove('/usr/bin/rethinkdb') #remove the symbolic link
# leg_binaries.remove(target_binary_name)
# for leg_binary in leg_binaries:
# print "Testing migration %s --> %s..." % (leg_binary, target_binary_name)
# file_name = leg_binary.replace('/', '_').replace('-','_').replace('.', '_')
# # create the old data
# run_unchecked("rm %s_1 %s_2" % (file_name, file_name))
# run_checked("%s -p 11211 -f %s_1 -f %s_2" % (leg_binary, file_name, file_name), bg = True)
# s = ensure_socket(build_vm.hostname, 11211)
# from smoke_install_test import throw_migration_data
# throw_migration_data(build_vm.hostname, 11211)
# s.send("rethinkdb shutdown\r\n")
# # run migration
# run_unchecked("rm %s_mig_1 %s_mig_2 %s_intermediate" % ((file_name, ) * 3))
# run_checked("%s migrate --in -f %s_1 -f %s_2 --out -f %s_mig_1 -f %s_mig_2 --intermediate %s_intermediate" % ((target_binary_name,) + ((file_name,) * 5)))
# # check to see if the data is there
# run_checked("%s -p 11211 -f %s_mig_1 -f %s_mig_2" % (target_binary_name, file_name, file_name), bg = True)
# s = ensure_socket(build_vm.hostname, 11211)
# from smoke_install_test import check_migration_data
# check_migration_data(build_vm.hostname, 11211)
# s.send("rethinkdb shutdown\r\n")
# print "Done"
purge_installed_packages()
# clean up is used to just shutdown the machine, kind of a hack but oh well
def clean_up(self):
build_vm = get_vm()
return # this calls the build_vms __del__ method which shutsdown the machine
def build(targets):
os.mkdir("Built_Packages")
map((lambda x: x.run()), targets)
|
unknown
|
codeparrot/codeparrot-clean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.