import concurrent import concurrent.futures import json import os import subprocess import sys import numpy as np import pytest from numpy.testing import assert_array_equal import tiledb from .common import DiskTestCase, has_pandas, has_pyarrow pd = pytest.importorskip("pandas") tm = pd._testing class FixesTest(DiskTestCase): def test_sc50378_overflowerror_python_int_too_large_to_convert_to_c_long(self): uri = self.path( "test_sc50378_overflowerror_python_int_too_large_to_convert_to_c_long" ) MAX_UINT64 = np.iinfo(np.uint64).max dim = tiledb.Dim( name="id", domain=(0, MAX_UINT64 - 1), dtype=np.dtype(np.uint64), ) dom = tiledb.Domain(dim) text_attr = tiledb.Attr(name="text", dtype=np.dtype("U1"), var=True) attrs = [text_attr] schema = tiledb.ArraySchema( domain=dom, sparse=True, allows_duplicates=False, attrs=attrs, ) tiledb.Array.create(uri, schema) with tiledb.open(uri, "w") as A: external_ids = np.array([0, 100, MAX_UINT64 - 1], dtype=np.dtype(np.uint64)) data = {"text": np.array(["foo", "bar", "baz"], dtype="= (2, 27): assert """"Context.Query.Reader.loop_num": 1""" in stats_dump_str else: assert ( """"Context.StorageManager.Query.Reader.loop_num": 1""" in stats_dump_str ) tiledb.stats_disable() def test_sc58286_fix_stats_dump_return_value_broken(self): uri = self.path("test_sc58286_fix_stats_dump_return_value_broken") dim1 = tiledb.Dim(name="d1", dtype="int64", domain=(1, 3)) att = tiledb.Attr(name="a1", dtype="= (2, 27, 0): assert get_config_with_env({}, "vfs.s3.region") == "" else: assert get_config_with_env({}, "vfs.s3.region") == "us-east-1" assert get_config_with_env({"AWS_DEFAULT_REGION": ""}, "vfs.s3.region") == "" assert get_config_with_env({"AWS_REGION": ""}, "vfs.s3.region") == "" @pytest.mark.skipif(not has_pandas(), reason="pandas>=1.0,<3.0 not installed") @pytest.mark.parametrize("is_sparse", [True, False]) def test_sc1430_nonexisting_timestamp(self, is_sparse): path = self.path("nonexisting_timestamp") if is_sparse: tiledb.from_pandas( path, pd.DataFrame({"a": np.random.rand(4)}), sparse=True ) with tiledb.open(path, timestamp=1) as A: assert pd.DataFrame.equals( A.df[:]["a"], pd.Series([], dtype=np.float64) ) else: with tiledb.from_numpy(path, np.random.rand(4)) as A: pass with tiledb.open(path, timestamp=1) as A: assert_array_equal(A[:], np.ones(4) * np.nan) def test_sc27374_hilbert_default_tile_order(self): import os import shutil import tiledb uri = "repro" if os.path.exists(uri): shutil.rmtree(uri) dom = tiledb.Domain( tiledb.Dim( name="var_id", domain=(None, None), dtype="ascii", filters=[tiledb.ZstdFilter(level=1)], ), ) attrs = [] sch = tiledb.ArraySchema( domain=dom, attrs=attrs, sparse=True, allows_duplicates=False, offsets_filters=[ tiledb.DoubleDeltaFilter(), tiledb.BitWidthReductionFilter(), tiledb.ZstdFilter(), ], capacity=1000, cell_order="hilbert", tile_order=None, # <-------------------- note ) tiledb.Array.create(uri, sch) with tiledb.open(uri) as A: assert A.schema.cell_order == "hilbert" assert A.schema.tile_order is None def test_sc43221(self): # GroupMeta object did not have a representation test; repr failed due to non-existent attribute access in check. tiledb.Group.create("mem://tmp1") a = tiledb.Group("mem://tmp1") repr(a.meta) def test_sc56611(self): # test from_numpy with sparse argument set to True uri = self.path("test_sc56611") data = np.random.rand(10, 10) with pytest.raises(tiledb.cc.TileDBError) as exc_info: tiledb.from_numpy(uri, data, sparse=True) assert str(exc_info.value) == "from_numpy only supports dense arrays" class SOMA919Test(DiskTestCase): """ ORIGINAL CONTEXT: https://github.com/single-cell-data/TileDB-SOMA/issues/919 https://gist.github.com/atolopko-czi/26683305258a9f77a57ccc364916338f We've distilled @atolopko-czi's gist example using the TileDB-Py API directly. """ def run_test(self, use_timestamps): import tempfile import numpy as np import tiledb root_uri = tempfile.mkdtemp() if use_timestamps: group_ctx100 = tiledb.Ctx( { "sm.group.timestamp_start": 100, "sm.group.timestamp_end": 100, } ) timestamp = 100 else: group_ctx100 = tiledb.Ctx() timestamp = None # create the group and add a dummy subgroup "causes_bug" tiledb.Group.create(root_uri, ctx=group_ctx100) with tiledb.Group(root_uri, "w", ctx=group_ctx100) as expt: tiledb.Group.create(root_uri + "/causes_bug", ctx=group_ctx100) expt.add(name="causes_bug", uri=root_uri + "/causes_bug") # add an array to the group (in a separate write operation) with tiledb.Group(root_uri, mode="w", ctx=group_ctx100) as expt: df_path = os.path.join(root_uri, "df") tiledb.from_numpy(df_path, np.ones((100, 100)), timestamp=timestamp) expt.add(name="df", uri=df_path) # check our view of the group at current time; # (previously, "df" is sometimes missing (non-deterministic) with tiledb.Group(root_uri) as expt: assert "df" in expt # IMPORTANT: commenting out either line 29 or 32 (individually) makes df always visible. # That is, to invite the bug we must BOTH add the causes_bug sibling element AND then reopen # the group write handle to add df. The separate reopen (line 32) simulates # tiledbsoma.tdb_handles.Wrapper._flush_hack(). @pytest.mark.skipif( tiledb.libtiledb.version() < (2, 15, 0), reason="SOMA919 fix implemented in libtiledb 2.15", ) @pytest.mark.parametrize("use_timestamps", [True, False]) def test_soma919(self, use_timestamps): N = 100 fails = 0 for i in range(N): try: self.run_test(use_timestamps) except AssertionError: fails += 1 if fails > 0: pytest.fail(f"SOMA919 test, failure rate {100*fails/N}%")