From 87e91013048459fc285b515bf5de4c348a6f9964 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 16:58:18 +0100 Subject: [PATCH 01/27] script to generate and test CI job --- .github/workflows/build_wheels_testing.yml | 44 +++++++ .gitignore | 3 + apis/python/src/tiledb/vector_search/utils.py | 26 ++++- apis/python/test/common.py | 1 + .../test/test_backwards_compatability.py | 110 ++++++++++++++++++ apis/python/test/test_ingestion.py | 67 ----------- apis/python/test/test_utils.py | 30 +++++ backwards-compatability-data/README.md | 10 ++ backwards-compatability-data/generate_data.py | 50 ++++++++ .../siftmicro_base.fvecs | Bin 0 -> 51600 bytes 10 files changed, 273 insertions(+), 68 deletions(-) create mode 100644 .github/workflows/build_wheels_testing.yml create mode 100644 apis/python/test/test_backwards_compatability.py create mode 100644 apis/python/test/test_utils.py create mode 100644 backwards-compatability-data/README.md create mode 100644 backwards-compatability-data/generate_data.py create mode 100644 backwards-compatability-data/siftmicro_base.fvecs diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml new file mode 100644 index 000000000..63eb06446 --- /dev/null +++ b/.github/workflows/build_wheels_testing.yml @@ -0,0 +1,44 @@ +name: Test Build wheels + +on: + push: + branches: [main] + +jobs: + generate_backwards_compatability_data: + name: Generate Backwards Compatibility Data + runs-on: ubuntu-22.04 + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Determine Release Tag + id: get_release_tag + run: | + release_tag=$(git describe --tags --abbrev=0) + echo "::set-output name=release_tag::$release_tag" + + - name: Build Indexes + run: backwards-compatibility-data/generate_data.py ${{ steps.get_release_tag.outputs.release_tag }} + + - name: Commit and Push to Main Branch + run: | + git status + git branch + # git checkout main + # git add backwards-compatibility-data/ + # git commit -m "Add backward compatibility data for release ${{ steps.get_release_tag.outputs.release_tag }}" + # git push + + build_wheels: + name: Build wheels on ${{ matrix.os }} + needs: generate_backwards_compatability_data + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-22.04] + + steps: + - name: Running! + run: | + Running build_wheels \ No newline at end of file diff --git a/.gitignore b/.gitignore index a0783cad5..17938da3b 100644 --- a/.gitignore +++ b/.gitignore @@ -100,3 +100,6 @@ docs/.quarto documentation/reference/* documentation/reference + +# MacOS +*.DS_Store \ No newline at end of file diff --git a/apis/python/src/tiledb/vector_search/utils.py b/apis/python/src/tiledb/vector_search/utils.py index 420b9d9ae..501d21308 100644 --- a/apis/python/src/tiledb/vector_search/utils.py +++ b/apis/python/src/tiledb/vector_search/utils.py @@ -17,7 +17,7 @@ def _load_vecs_t(uri, dtype, ctx_or_config=None): elem_nbytes = int(4 + ndim * dtype.itemsize) if raw.size % elem_nbytes != 0: raise ValueError( - f"Mismatched dims to bytes in file {uri}: {raw.size}, elem_nbytes" + f"Mismatched dims to bytes in file {uri}: raw.size: {raw.size}, elem_nbytes: {elem_nbytes}" ) # take a view on the whole array as # (ndim, sizeof(t)*ndim), and return the actual elements @@ -40,3 +40,27 @@ def load_fvecs(uri, ctx_or_config=None): def load_bvecs(uri, ctx_or_config=None): return _load_vecs_t(uri, np.uint8, ctx_or_config) + + +def _write_vecs_t(uri, data, dtype, ctx_or_config=None): + with tiledb.scope_ctx(ctx_or_config) as ctx: + dtype = np.dtype(dtype) + vfs = tiledb.VFS(ctx.config()) + ndim = data.shape[1] # Get the number of dimensions from the input data + + buffer = io.BytesIO() + + for vector in data: + buffer.write(np.array([ndim], dtype=np.int32).tobytes()) + buffer.write(vector.tobytes()) + + with vfs.open(uri, "wb") as f: + f.write(buffer.getvalue()) + + +def write_ivecs(uri, data, ctx_or_config=None): + _write_vecs_t(uri, data, np.int32, ctx_or_config) + + +def write_fvecs(uri, data, ctx_or_config=None): + _write_vecs_t(uri, data, np.float32, ctx_or_config) \ No newline at end of file diff --git a/apis/python/test/common.py b/apis/python/test/common.py index 763e375d1..a1964fc1a 100644 --- a/apis/python/test/common.py +++ b/apis/python/test/common.py @@ -6,6 +6,7 @@ import tiledb +MAX_UINT64 = np.iinfo(np.dtype("uint64")).max def xbin_mmap(fname, dtype): n, d = map(int, np.fromfile(fname, dtype="uint32", count=2)) diff --git a/apis/python/test/test_backwards_compatability.py b/apis/python/test/test_backwards_compatability.py new file mode 100644 index 000000000..1488bb7f3 --- /dev/null +++ b/apis/python/test/test_backwards_compatability.py @@ -0,0 +1,110 @@ +import numpy as np +from common import * +import pytest + +from tiledb.vector_search.flat_index import FlatIndex +from tiledb.vector_search.ingestion import ingest +from tiledb.vector_search.ivf_flat_index import IVFFlatIndex +from tiledb.vector_search.utils import load_fvecs + +MINIMUM_ACCURACY = 0.85 + +def test_create_and_query_indices_with_old_storage_versions(tmp_path): + ''' + Tests that the current code can create indices using older storage version formats and then + query them. + ''' + dataset_dir = os.path.join(tmp_path, "dataset") + k = 10 + size = 1000 + partitions = 10 + dimensions = 128 + nqueries = 100 + data = create_random_dataset_u8(nb=size, d=dimensions, nq=nqueries, k=k, path=dataset_dir) + source_uri = os.path.join(dataset_dir, "data.u8bin") + + dtype = np.uint8 + queries = get_queries(dataset_dir, dtype=dtype) + gt_i, _ = get_groundtruth(dataset_dir, k) + + indexes = ["FLAT", "IVF_FLAT"] + index_classes = [FlatIndex, IVFFlatIndex] + index_files = [tiledb.vector_search.flat_index, tiledb.vector_search.ivf_flat_index] + for index_type, index_class, index_file in zip(indexes, index_classes, index_files): + # First we test with an invalid storage version. + with pytest.raises(ValueError) as error: + index_uri = os.path.join(tmp_path, f"array_{index_type}_invalid") + ingest( + index_type=index_type, + index_uri=index_uri, + source_uri=source_uri, + partitions=partitions, + storage_version="Foo" + ) + assert "Invalid storage version" in str(error.value) + + with pytest.raises(ValueError) as error: + index_file.create(uri=index_uri, dimensions=3, vector_type=np.dtype(dtype), storage_version="Foo") + assert "Invalid storage version" in str(error.value) + + # Then we test with valid storage versions. + for storage_version, _ in tiledb.vector_search.storage_formats.items(): + index_uri = os.path.join(tmp_path, f"array_{index_type}_{storage_version}") + index = ingest( + index_type=index_type, + index_uri=index_uri, + source_uri=source_uri, + partitions=partitions, + storage_version=storage_version + ) + _, result = index.query(queries, k=k) + assert accuracy(result, gt_i) >= MINIMUM_ACCURACY + + update_ids_offset = MAX_UINT64 - size + updated_ids = {} + for i in range(10): + index.delete(external_id=i) + index.update(vector=data[i].astype(dtype), external_id=i + update_ids_offset) + updated_ids[i] = i + update_ids_offset + + _, result = index.query(queries, k=k) + assert accuracy(result, gt_i, updated_ids=updated_ids) >= MINIMUM_ACCURACY + + index = index.consolidate_updates(retrain_index=True, partitions=20) + _, result = index.query(queries, k=k) + assert accuracy(result, gt_i, updated_ids=updated_ids) >= MINIMUM_ACCURACY + + index_ram = index_class(uri=index_uri) + _, result = index_ram.query(queries, k=k) + assert accuracy(result, gt_i) > MINIMUM_ACCURACY + +def test_query_old_indices(): + ''' + Tests that current code can query indices which were written to disk by old code. + ''' + backwards_compatability_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'backwards-compatability-data') + datasets_path = os.path.join(backwards_compatability_path, 'data') + base = load_fvecs(os.path.join(backwards_compatability_path, 'siftmicro_base.fvecs')) + query_indices = [0, 3, 4, 8, 10, 19, 28, 31, 39, 40, 41, 47, 49, 50, 56, 64, 68, 70, 71, 79, 82, 89, 90, 94] + queries = base[query_indices] + + for directory_name in os.listdir(datasets_path): + version_path = os.path.join(datasets_path, directory_name) + if not os.path.isdir(version_path): + continue + + for index_name in os.listdir(version_path): + index_uri = os.path.join(version_path, index_name) + if not os.path.isdir(index_uri): + continue + + if "ivf_flat" in index_name: + index = IVFFlatIndex(uri=index_uri) + elif "flat" in index_name: + index = FlatIndex(uri=index_uri) + else: + assert False, f"Unknown index name: {index_name}" + + result_d, result_i = index.query(queries, k=1) + assert query_indices == result_i.flatten().tolist() + assert result_d.flatten().tolist() == [0 for _ in range(len(query_indices))] \ No newline at end of file diff --git a/apis/python/test/test_ingestion.py b/apis/python/test/test_ingestion.py index 05107b447..1471fc3e0 100644 --- a/apis/python/test/test_ingestion.py +++ b/apis/python/test/test_ingestion.py @@ -12,8 +12,6 @@ from tiledb.vector_search.utils import load_fvecs MINIMUM_ACCURACY = 0.85 -MAX_UINT64 = np.iinfo(np.dtype("uint64")).max - def query_and_check_equals(index, queries, expected_result_d, expected_result_i): result_d, result_i = index.query(queries, k=1) check_equals(result_d=result_d, result_i=result_i, expected_result_d=expected_result_d, expected_result_i=expected_result_i) @@ -676,71 +674,6 @@ def test_ivf_flat_ingestion_with_additions_and_timetravel(tmp_path): assert 0.45 < accuracy(result, gt_i) < 0.55 -def test_storage_versions(tmp_path): - dataset_dir = os.path.join(tmp_path, "dataset") - k = 10 - size = 1000 - partitions = 10 - dimensions = 128 - nqueries = 100 - data = create_random_dataset_u8(nb=size, d=dimensions, nq=nqueries, k=k, path=dataset_dir) - source_uri = os.path.join(dataset_dir, "data.u8bin") - - dtype = np.uint8 - queries = get_queries(dataset_dir, dtype=dtype) - gt_i, _ = get_groundtruth(dataset_dir, k) - - indexes = ["FLAT", "IVF_FLAT"] - index_classes = [FlatIndex, IVFFlatIndex] - index_files = [tiledb.vector_search.flat_index, tiledb.vector_search.ivf_flat_index] - for index_type, index_class, index_file in zip(indexes, index_classes, index_files): - # First we test with an invalid storage version. - with pytest.raises(ValueError) as error: - index_uri = os.path.join(tmp_path, f"array_{index_type}_invalid") - ingest( - index_type=index_type, - index_uri=index_uri, - source_uri=source_uri, - partitions=partitions, - storage_version="Foo" - ) - assert "Invalid storage version" in str(error.value) - - with pytest.raises(ValueError) as error: - index_file.create(uri=index_uri, dimensions=3, vector_type=np.dtype(dtype), storage_version="Foo") - assert "Invalid storage version" in str(error.value) - - # Then we test with valid storage versions. - for storage_version, _ in tiledb.vector_search.storage_formats.items(): - index_uri = os.path.join(tmp_path, f"array_{index_type}_{storage_version}") - index = ingest( - index_type=index_type, - index_uri=index_uri, - source_uri=source_uri, - partitions=partitions, - storage_version=storage_version - ) - _, result = index.query(queries, k=k) - assert accuracy(result, gt_i) >= MINIMUM_ACCURACY - - update_ids_offset = MAX_UINT64 - size - updated_ids = {} - for i in range(10): - index.delete(external_id=i) - index.update(vector=data[i].astype(dtype), external_id=i + update_ids_offset) - updated_ids[i] = i + update_ids_offset - - _, result = index.query(queries, k=k) - assert accuracy(result, gt_i, updated_ids=updated_ids) >= MINIMUM_ACCURACY - - index = index.consolidate_updates(retrain_index=True, partitions=20) - _, result = index.query(queries, k=k) - assert accuracy(result, gt_i, updated_ids=updated_ids) >= MINIMUM_ACCURACY - - index_ram = index_class(uri=index_uri) - _, result = index_ram.query(queries, k=k) - assert accuracy(result, gt_i) > MINIMUM_ACCURACY - def test_copy_centroids_uri(tmp_path): dataset_dir = os.path.join(tmp_path, "dataset") os.mkdir(dataset_dir) diff --git a/apis/python/test/test_utils.py b/apis/python/test/test_utils.py new file mode 100644 index 000000000..f45302ad3 --- /dev/null +++ b/apis/python/test/test_utils.py @@ -0,0 +1,30 @@ +import os +import numpy as np +from tiledb.vector_search.utils import load_fvecs, load_ivecs, write_fvecs, write_ivecs + +def test_cloud_flat(tmp_path): + fvecs_uri = "test/data/siftsmall/siftsmall_base.fvecs" + ivecs_uri = "test/data/siftsmall/siftsmall_groundtruth.ivecs" + + fvecs = load_fvecs(fvecs_uri) + assert fvecs.shape == (10000, 128) + assert not np.any(np.isnan(fvecs)) + + ivecs = load_ivecs(ivecs_uri) + assert ivecs.shape == (100, 100) + assert not np.any(np.isnan(ivecs)) + + fvecs_uri = os.path.join(tmp_path, "fvecs") + ivecs_uri = os.path.join(tmp_path, "ivecs") + + write_fvecs(fvecs_uri, fvecs[:10]) + write_ivecs(ivecs_uri, ivecs[:10]) + + new_fvecs = load_fvecs(fvecs_uri) + assert new_fvecs.shape == (10, 128) + assert not np.any(np.isnan(fvecs)) + + new_ivecs = load_ivecs(ivecs_uri) + assert new_ivecs.shape == (10, 100) + assert not np.any(np.isnan(ivecs)) + diff --git a/backwards-compatability-data/README.md b/backwards-compatability-data/README.md new file mode 100644 index 000000000..6d3425167 --- /dev/null +++ b/backwards-compatability-data/README.md @@ -0,0 +1,10 @@ +### What +This folder contains test indices built using different versions of TileDB-Vector-Search. It is used to test the ability of the latest version of TileDB-Vector-Search to load and query arrays built by previous versions. + +### Usage +To generate new data, run: +- `python generate_data.py x.x.x` +This will create a new folder in the `data` directory with the version. This folder will contain the arrays built by the current version of TileDB-Vector-Search. + +To run a backwards compability test, run: +- `cd ~/repo/TileDB-Vector-Search && pytest apis/python/test/test_backwards_compatability.py -s` \ No newline at end of file diff --git a/backwards-compatability-data/generate_data.py b/backwards-compatability-data/generate_data.py new file mode 100644 index 000000000..bacce996f --- /dev/null +++ b/backwards-compatability-data/generate_data.py @@ -0,0 +1,50 @@ +import os +import shutil +import numpy as np + +from tiledb.vector_search.ingestion import ingest +from tiledb.vector_search.utils import load_fvecs, write_fvecs + +def create_sift_micro(): + ''' + Here we create a smaller version of the SIFT 10K dataset (http://corpus-texmex.irisa.fr). You + don't need to run this again. We only write the base data used to create an index. To query you + should just select vectors from this to query against the index with. + ''' + base_uri = "../apis/python/test/data/siftsmall/siftsmall_base.fvecs" + write_fvecs("./siftmicro_base.fvecs", load_fvecs(base_uri)[:100]) + +def generate_release_data(version): + # Create the new release directory. + release_dir = f"./data/{version}" + shutil.rmtree(release_dir, ignore_errors=True) + os.makedirs(release_dir, exist_ok=True) + + # Get the data we'll use to generate the index. + base_uri = "./siftmicro_base.fvecs" + base = load_fvecs(base_uri) + indices = [0, 3, 4, 8, 10, 19, 28, 31, 39, 40, 41, 47, 49, 50, 56, 64, 68, 70, 71, 79, 82, 89, 90, 94] + queries = base[indices] + + # Generate each index and query to make sure it works before we write it. + index_types = ["FLAT", "IVF_FLAT"] + data_types = ["float32", "uint8"] + for index_type in index_types: + for data_type in data_types: + index = ingest( + index_type=index_type, + index_uri=f"{release_dir}/{index_type.lower()}_{data_type}", + input_vectors=base.astype(data_type), + ) + + result_d, result_i = index.query(queries, k=1) + assert indices == result_i.flatten().tolist() + assert result_d.flatten().tolist() == [0 for _ in range(len(indices))] + +if __name__ == "__main__": + import argparse + p = argparse.ArgumentParser() + p.add_argument("version", help="The name of the of the TileDB-Vector-Search version which we are creating indices for.") + args = p.parse_args() + print("[generate_data.py] Building indexes for version:", args.version) + generate_release_data(args.version) \ No newline at end of file diff --git a/backwards-compatability-data/siftmicro_base.fvecs b/backwards-compatability-data/siftmicro_base.fvecs new file mode 100644 index 0000000000000000000000000000000000000000..76de6946252225bbeadd06795583a566ffc17f92 GIT binary patch literal 51600 zcmZ|2KZsn}wg0={ed0a6-%pQMfdU13P=Nvk3Mx>bK=50?ehL&Qs6fF43Mx>bKtTl) zD45_N#LyUFieb1kW-uEeL}Lb141*D7Fo$6n8Z(&27}J=+90oHq#+b&Ki}-Q5ywB&H zy<0~|<;DH>+G~B+T6^t3=Tx;M&uvnb=6|MiX)mQ5c+hXKS6~hn;4^#=fc8!FE1t``8EI80b@)|B=!bdkwF@i@~>Htp5=IeeetL_kxC)7S5QC zv9Qhy_&)%9*nl(m7EYNlHb!k7dF>`ry1-c9X^Gdbs9o2x@Yajyss4w1@f!RYB>WjP zz&W(8eu5gI&eEV|=%Xe?8@hyMq4}ID<2PR$|odw>j6~ z&xM?X&vS6jzUKdEdfI%?UoL|n*7=u=7oY*|lV_3P6m^UL7$_$t#+-gYq^)M$G)$k6X=4wf$jR8+r9QaI8PEqJfj>#{6~!M0q@QYptz1GIT`KRJ@+$x zG30tCuM78k#&v%({06`C+e?SNM6ZBr`wf1%jK7MUzTbgg1J|>_*AQbbYHQfv6R=^N z@SO|x0{>Ic;qUuLSJ8HEpTn8@kO{d`jEO;<{8(-aPQ~;vytw7o~e001J(&(TnF_3b1$VYfp(Rav7FMDn0`Gm z!@FN<{bSDB_Ubxb1NX*p0xLLq@8D_Sg8VA>t8M%ZVjhD7kVF-@Hio%7w7oup zcVEN{#?EWbK3Flf2Yc1;{7ZBqx6eU4pXV$W*ymE>y=T@s!!|ZWi9jD?--YvhmvSSC zAQazAxDC3)ZoxZ(o#lvee)$aSO^Hb9C-~*ulQprG>jU_4g3tXtEO($^SIIZVJ!rrg zu-6pq2l@qY&!3_%gARWH*YOW*;d}{SQ_!N71^%DH{{f^ZKq;^BT?4hxUdM1&&Q)uF z&p6{xp!V#3wZ_63XD^L>iAhDYZ^8c!sAu+B#Jme2;p?J$2YnsCd>+O33~}cC9@HK( zzP>N?8vh@`W|FtYcJIU)W9>s@$gxfXf^ik^pnKoxGpz{fvD9{+HkoXfKHkbd^i^o#PGuY#kAm9q!*Tz9yP;r}zB&obhnBH9_VfyPUe@y*y22e5-H>a{#~?cwsP(Vmv$uq=O2^ne@mb%%1$LE-#cyBz zKE%K;i8}=!;{Ob|H$7hxolkBKGF(TVdvg%pBl*;!99@*LDTlxs)}!!&c`Q*5kf9 zRw9OXh4UofTrED=V%{_0Jn~C)g6n|w<-Y?hFxQv{PC0{*80PV=Er9pcGfMcaA8O)^ z^-R4lE5^ewUbJBF^D^Ym*W=CJs`MSBh{z78xHyKgh_ z7|vcB{7dXAE5^_9-vb?768NlL#plK|JOZw@!_HCJL0`cC@8Ai@@YZnN8W+TO;9C8? z0PYg8_GpYfHRuX+ufcy0?gHy65krh~+%Ei@vHqT;?-h2&-k{C(&bQd2Dfl`573i=N z$VIp4E%ATc8|HTY8(_R2m$8*&;^f60pdHauechI^=i&Kzx8=nenB#r6?h<jj&ahD{v2D*`fIQNN*lxc z_UydYcMpxu6I|? z`fE90z5a{~cNg7~<6ZFCUBX{LoA)!&z?r`fGUEXH9ILI{0mrRA*4&}3;a%{1c3`KH z-x$MqgZ~wn7yq1b7>8KHdob&LP4Ba>HMa9LQRXjuJj1sD*8B@NW1NVwj-30i?b_vC zi*}&R>jm4MSD^P{Ut(;3#<~{wCt+K+iuF&x4{+B03|PO30mk3Pm$0q%HRJDq>pjK} zAlCrp5Zmuj3ETopd0=h(?_yX}qo1+xF8dz30@jS3Cm3G?=Jofp#9FHZ7x35p`jl~! zN6dYS?OxvjO$>8dqpyp7K#cRa4;lM1oM;(~$Be%MnHc@vF?(~)z_$QFocU*r?PG@S z_Z7eEIR?(xVjq;>4c*u3b5E@!-}k}z4nEIm2AnIywIB(^IiBFZ58O*@xt1n+x{ozR zboj&>V|zCCd-5D7!MKI5J!fps^|#;>u!j~tm6&zO+oF%b*YMWc2f4&;@SlMNXz*Q! zTY;;<~D%zYgBJWQ~&oZTE12ZGXqWdt==jpdsfowC@e~#WUN0 zhakai@CDkjXn;K!`|;k}b-?x#`0f2u;GVZJ#Ameoru_ka=id^WusxTIuJQoxB{=`R zz_^k0XCFhJXWzivr~7)0Hvby1mkj2>_5JVsF0y2-U(7(Jj<5%NcI{2^C8QtYb1fNv zfA;!Wi*;SAb;YX^W1eD+d!ND65@!uXuCEon*`NPwV*YLo;=AIP{Sse)hWK4yj5zO| zb8qoawj&q-igxFG*rV%U=o&iz}0HRBb?a7*+8B*s-5?2E)1e#dUHQX7nhc0^h{73D1E(gYI zt3kWxuI#Xnu3rS0fgHmf_>6ho=N4NzuNA{tHt-$e>E7SLzokyiGgn#Qt9w)T?qgz) z@Q?5IA$9{c;1zt1KE40pT>9-xOyz9EZ5g|7Dc(k%V0(7;Y%613=W)+}$(+eNL| zdWPS3iO-$S`yBfi1aVcoyY+4_;T9l8Wrww`@U`VT;g<1DazY>IjP3cMe#iI?yq{bB zdZ6vaxCX9@F>{atKUl*>85{3ibN$xVpFslVxo&GcWgK9}ILkAZ_Z{Rr!CvgQ?#Tsw z4RHZn!!6+29|6U?OLcw~xhkA72pTT!$s@ybd{c@gDgGJ1(%E%-C=ZqBLwj=KJFR*Zd5Y>WpN%^wRs#eDZ<3G5+bCv>P0`)IKD;oYAF0NTYJU0EpbL0)8XszS@U~vhV9<1L5LD>Y>WMb80#H?D&|({VmR}M`iOsu z{vEK^SUi_6@ms5lcl+AFc@O05rC$p%2^?epXW=fP^*PT9D8{Wv&Z8QjJ89kDF&&NX9vtLRM3qK|KqV$Btt_3gn}V-iqj>`&qU0*)C+ z;X3rH*zTG9y5u(KW#O!8&%Vo@qcxv#D*SWC*Fo>cox&f(-X0NS-4o!wx3>sL^ajlE zCG6u8_XYMwR(~t8QUHMG>o;~D|^%aGvg&=YkKC+pYXT9HF*9lFh>|$&&Zy%?fV+= z*|grrAd@ew>wRc|<414@z?^+-;{ ze~fO?RqX$3(3F2m7I1wmaUJ7D$!i!}PyWK*a2D>Hd_98>&U1TOc+TVjm6hkqYfH)GGx3GKLJ{1x1{paXL-mKC?O!17I-#xkrmhfHlRR3d!-GYzdoWF7p7@KE3V_at@uLU#wzXS5l z*%ZJ1dXKgx#|iAEg+IgR8ohHDN=(8|a7yjNu{mA5z5l*@?gD)oe8gDahWr+Lfwm|2 z;016``~8MXAi$a!dvy=?(JOF=u{AP${r7~Q;qyFd597Ubd;;g5dT&nAu4ybmt)Jm{ zFMJoogtx{5zjvlz|2UcPAMra!L!Au9^J_b&=k5Cb00Li?Gv;crt!>#&Eb zXlvX<_xl1L*meCG+r7RhKan5Xu6ZI75ww`@^rVZm9 zJ$3$Lj_(B6m+$8Xz`MA_*AUwoFR*>bzW`T20^Xq(%s>^N{}h82=)m@zz6A%2bKw$t zi+u(>&*#8gWA7zd{1Z`k|-@pZWV?Jm=20jxjZOL>6QfajLLc?~DQZ++3i1vQdTV%Sgn ztueo1Y|XF^yuB`fqVE)}f&2K=^7jSKy){N4&hI{XPvx|&sV@fsz~|06XXaoBIOp-X zXRh72em%u!4>$0A3hYC>A-A6!-GKwR4BYDlyj=d$s}E3*!-EoWnCTe_8xJ$&Vv!dGjN1 z2qoU$D{HLapAz>9G>lt#W0WR_{_ev0T;~aYioFlKTW^51`rgSi)@Rf{TI{+e%Jysc zNx1{&t-)<#eV1FKuF*AZ@y*cz8tNppd-s=u&?l4_&aQG>VvQfqZLP=TddBu<@0mR3 z?11Mxes0cli3#um-um|Snt1E1&||TVF^@oI>^>M{t%e%znK5f{2wHpzd`+Br37;}A zx{J5(74Op-etp)f`_|%%5})Ag?G&tu{|3}`$|(^;{H*5Sv&I!--=KwdACKPPe*wIw zOYDaH4jdbU?=|=UjK$jK9e_3C9Iyhh_xkS#dOM&%3qIT%-nEYPWpbb6bFW>O=Vl)Z zbOW4!gT96SxWt5h1$~6im>lQ|hKeKfo|AcV^acJrSKKHu=l6mTdC&0> z&X|lo=VSOxvR=d3HCR*8{*9DA`x2k^E}@rl z`0TC93;bJPe*tG5pKZrukvGqGX}=GU=WAko2mj~5Ge7soFy}FO=6E+x(HVc0$M`&> zz$faT6}u0$PK)3E6I!vBXV{=OjzNvBy!HyebzD;$LrfHHoMUr6r+&Z5sp48Pd;)hT zyz~0^vkUxfi6L^37{068*9v|g*hgEC)x`9IHm(EK`-a%M4)?5%gR$q>fR3EY=#x>=4Lu+0kW`*wH7dL)eD$meo zQn7b=;oe>aT?}WYx-PN3HT#??ty#Z9@jgt#kQvT){$2)&?|5Zg1lQGIRy7A&#%^4R^=7f!%?oU|~FdpRG0H1ZIo_ zzKVNkAN9|OTOHX=hIiketlNX3OC&g^L_#MHE_=rdor(y;l5Ze7-wJ~YcMw!Tq5=n$c!(b zdp}eabDh(;UxUuPBICNwXWd!(vtsMCpb?|>^r^v5Am^XuobHJ6cZm+a&tkt8YBVKJ zdmkvZhbZwmhBbVP?X$BjaeZ+4Vazi)@4IJrW(>Y6)(`9k{0F=-e*+1-qGKGyW%L4V zpW01{ncfL9d+GOr*cK$`DK%zj;a-@Vq7Uz%ckh!v_x})n3ld{vl{SWVgMC9AQ7fe+qo&RujI%{yTpA@V)c_=%`gCGqz^a>!x?#Bc`6u5^jcVUGdm) zvE4i0VPi4Y7@rBpTXLq7@p(7f7|z_`}!xCigPXS%a8l5 zaXE%R1CUSUJ7IDT{Bv}N9!rD$h}agL!BkGj`A>M)l48mZ=h?uyPt6YAjKFV<&-!n{ zRBqzS#02`;gm=H3-{))rg1E8#9PT1`0Xq1pbTPcAyld9)&vO*yej0NN&UwT;0;g|7 z?ceX_=0M~v%x2>w-navuTD(OU0FxdZR@ zRf1;ZBoOgVR&RWU-?^>r_ol_9_P_Btd5V7@cy4(|4e!lMxLx8i@&q`?dwrAee^UQT zxHr^r?D>u5Z-w)@^lt5{;q&M_WS3aecW4B;#-PXIyVkiI{KmSLv2^&I^AU0D31^&p zejCm@yY}DrK_V}xGiRL9V>y6p^`X6E+GDY%J=y!N`f}Ic>c1&%Vt1T11Fj=}R~>oR z=N=^L?UL8JR>!*>ebnN!&(-e>$7+<$CoM&18Phz|UayvD_s84V-t-J^p{#gZ@1;n!?{ze3!ci{|9`YX-0yEpB<#yS6katHjbeYB%Sis5tKbLtq6C5U$q?t=f99;Wkb;Qt-&bKswu z#w0&!gUj&GK$^@y#}+ zcenA&2l$M!cYZ7vif@VE`EOvqCo%k)-phZ%C3v47^;Ax%+v0D)SHw)`H%H!k?ED9l z{t}Ym-6!{WmzeuSxdXFiM~_n z{B;MvDL#9!*QvOFO$_&}*})q>$NwktXJES{#&y;Hw!}=uXI0*N9WiB_-m&X!B@U_W zJ82j1JMJFvIeP|DnDJJ8{x5Mqea9@pt{OhW&*4(M&8wpZ|0m!s@NB1I?XTfB_Q=@y zv3!ede@}tW@va)z@E?F(@&^A)x#0iM)BgX4?+tTjc;|Po#?s&qe9o=jB|f9xn`!Mt zuIJjJ9p55GUB!57Hu})cKby?a*E9x%aXZAG>wlAK#vI=N?Wm4>=KIKA6zBYqn3Z-6 z@0b?bxCXlezE_X2m%v=>egyh{;N=pwb9ryp*ctx}-J%m{fOqB zt$=5}#Mi_CdmP93F3iAq*W=kQKrr41O2n|118m<#-rXw4_(TI-(<$0H%$cFBy8-h0 zs$|BVpZw?GW03Img0|=D`j9q;XZ1^LdkX9-Yi#?nXFvbi5tkTee9nCcHXug)Z8?zj zjr@);;eS&;3%2Ar&kcMZfM7f?weB8yYw6Z5)%}Zjmd$ zE5^D1JMSX*+B#Elom=2}EhqEWSo0d-ns?Q>NUrm=#7w2b=lssGEB6h#TVQ{)9rNG7 zuheyTUhhc^=j1#;*}?1o6MpqnmiYf0{(I=fj`?S$US^KW_^8D6dyTKk2EX;260g6W z#~RMN`)}}F)QmxgHk{`lJLa$du6UYPt9T~fxv8`+d#th269U_Ke);aJ%FP&NV7c^y-}--Wi`?_tp8w z&+>U7c|L>k-ho}RC4ZMVXMp~k?S*G>Dj8l_-*?b1IVAT7xICVRoGR<`9%za$(9^jx zzR#$UN(`fS#CL@Mc52NW5${mF;X6>y9^CV(oae;w=k4{m7{_@J+-u*9zSGBYhVA?b zJ?-5%*Z%_ado;FqIs5@H;`cppykicZlSlBzy)VuCuIh>5%v?iaJe4>2{oeEe$UAuR z{JVAcw%IxN@C@7AuCwzzGO+($ytPHg+-mz7%N=t66Ud#QW5OG2?R~giVxI3n=kCmn zccZp&m%tLvSnaVyV8PwO{}`;SF_ANTA5HRV$SIs>uiPYNx(97Q2x6SSw=u`k#PAuB z(YwU?H|0BFUyHcuJm%^1*|6R&-X8q3u8YJ3F;h8$zY6D>w2Y_H=LB9kuxpM3{h%{$ zB25hQ&&+2$6?^+}59(j=hj2<4L!Eq+|Fka2?LyqNifr7u1)(hpsZ7&odv}H6}&zqchkgfxiRR z_p@S`Ea=O-;T=+(>z(;Ve`j<|Y<=E5(p9kZBb%npTyC$&$E%8hA23&#r3_JwJ{t_g3;j{Y%I3mv6 zzCK!UKelkrQ)SE8^&FyaqpjCPWrrG%@LdB-;C=~fxb{9C?Y#P2ONX9=04KmbaJ&ZY zZ3 zNZ4bs-+$o$9yE-rST|x=n|tMa#<}LPyup7NpX>FxZ(_{8J;QqTr^L;Ga~%L{`e!3& zjNNOwOby5Gf%+=i`o@Z}e1d%fXP)=>15o#Lj?bQY&Cs>F=l0OVP}_ReHfM$QY~81h zvG2lu&lo4-UAy~a&d0zQYi@w~$7p-iZ{J6tC2uUoe@cw~26zL;qVEy>8hin?>w45V zhWX{}_0W8LRT{?5u>!Zt_kj*y!d?P<+ydus^#kkL_uoK&hHzuaF`SjW``gbs4q$A( zImn;`K~9xgS3bos@0xMTIM6>*)92ba_sux{0e^sQ&4$QEvCo*w??>S@r<_Y~tAZ=j7)idm! z{(zr>kKo!8d(H*>fZsW;0i~Y3cgnm|Ab*0+G0fYb9k(Fx-`X>-Ny^+fV0=XHI{zVe zo7e_zKMAckr}I6=w`*-(3~Rq+F5?VZ5T%|y>a)JRJ^)+BW0CiaKZmPl`+{-8FB&y` zho0`eTnlnROWbsC5>oF#+G9MKUv7@y{`TSQ)wPB-(SO0e1y$rW*!RFSFugm8yz$xD zld?p+_ggRnZQumG#&+InuM2OEec5*v`!z1WWBl?9b4Sv}FvnB4pM&v#heX?bPQ+9R z#=`l0=lvBV>?(2%=)j#lyug^V$-D_Z3;e%;ZzlW-`w(ov8eAmCv3E>Z^AoUT+)$^k z)pJ>tSU~l&@`!XgIuiyTz660sG zJ+}A*ykYFS!=8TxeeIGXe+KXQd<-stjNQnO#C1J~TcDlmR6UZ2;m&y9{EV!<8&Z38 zei7s@C;7(Ne+TMk!9!xt1^+Q{{yA`+?uYl+y?q7_@K;%vvG0}t1=PF5-o7s}Q{KGg zsOAl9&!UZCZuy^rLt@73wT`jwk#``272}q?89LBHTX;6FfahZ0fv*GJf9FW}%sT@u ze#QCZ9?Hj%qc0d+!(8LXeaZP}sDA>kV*@`Hd$ixX;39C3e+`_!i}#=18Gh$#-~#=a z*v?qc>ZhNvc!yl4eV;H+Vk$B0$0z0cKwyi8*lXk^&{SWk@BUj;ZZ)ZCkIq}?e5WVs zB%p6WO#fVKZ1;jfY%gC&0Xu3&smdC8f0rELuY3;bfP}9B7szXXK6`G_>k@CyDP!lk&3KER0b^S1%Rp&jcsC~WRIK@g zIPZ&b=R%ziir+cT#W8;G)`y^;{|di-WnjF$rzlzF-xb^2G3fWLaL!+G2xrZIg0ALL z!f~GiZ;kKZ`+4CKw$G?@>+AOmU27(CjWO>HacAhzSEL`~J1)GnjkDgl#IUXwtzW*@ zT4D!&=X0JGcou75FEi{a=49-#uX2~*&gYHl9nRACitpPNe}GO+oipHjz`WIHp2XO@ zejUt!`H39ozT>#q`szLf<2ioUqrU?gtg&1C*4ZVs?>=uwz=k|gzYjE}zCHGP7Q;Lj ziRt$Xy$|fYg{$ItVJ>hT-jx@`SbLXP`!hJ}^z)RU6YRNlkQLfH)5UNmuF>DKw%`r^ z4G1{%JGApQC3egB&%_zuFixY`RC1ca&*h68@&%F5ixx8mp z$nmb}7hMc{*;EaG5C3CeZJ*11Fc#;?#M{4T**Y!~F>ucR1Go>yds+MQ4t@_h;@z{U z=+F4?m(PM*CHA})d=$73p2=(QYvS$#?{W*T%zb~>E5`2YBW+*|X%g1-T#;(0sgZ8+nmVqNc5iucdL z+Vz?Go|tXPuj}r}5vx($nz7GT#@`F$mhp0L__LP{wn)Wq{RV!QoDn1M8mi==RqUp4 z3A@S)-#+o?c;9E>Tw|=)Az`3SCSJ7U>&*2sADs2q^Ui-Lk)t~UY&j&!x zImXh$e}LcTBpBEKt(kV)+e0t?da-K{3;P7td;;!(+D~I1at!aEO?J*HMv`N=JFaI5 zya&$F;xi&9(LU1L1IrTH-+(AI6V(Z7c`&b#kBywi_F47i5xL;diV(Z}c+FeekU zOFV1&81XG*@6eUPP4m7i{)^~2aZ|AeYtF#}{sc5Z6v*l#?uY9;lxG|PArRyKu!jcK z7x-FW-TUA>;9i(p#d zfc+ed{=UGo^WNSCopB@4@4r)UK6~!hRrx6U1ZVHYS!*nw`DL(%&mhH^9rAp~3)j|` zIgGd0Q+z)M9b@-PzN3b?hyMz2kBxWj_E05=d4G; zt0HfWU*fw2z6QpO#WmQ&eYjg-hHWiH-C(<}47TO(3zo$F6j*aA=KO%)eLExPyGcyM zQ1=>spZ{*cTkkf$kHDJz+KYb2b9}DpOL)&`EO+pEzTN@zy-V&_#E^SR{1@2!XzP5+ z_|M=m=+A}lY%KPo#B|u^*{}VyU;}PoC%Ccn=ZN3=J7VPfJw%^idoShOb49L;;W^U( zTQCPc$1VOU&gXmP810%fSYyX1#x+&D!>?WUPW=t>_dwup;VW0?cmVeTEa8v9Sd7gv ztp8U1b3$W?w>SA|y#~MYb>I@1@@sha;afQ84eF1@+!cJrU$H3m9KD5iPwU)4+_^A5 zgMSJ3!6|WL@%=Kx*AeIZ#`=9Lu$3GG`r3@$bDyDY$w}znVB3dcT&>r|uqW1R8IPqg z9=}{Jx$?nSpSTG&ppD_ZE1wyAH^%$eV2`CEW_L78@~B3p#u{6l>wlMwajIZAu7}ybJDS3WdL;V&-xGG|UGi*i_+IZl-X&MzBVbBT zyJD>Ey3{Qgi@iF(e2Vvb|8Q^M<)#w&+$SZ-KViHs_TU|UI>%G|=ktt7#-GDIEPNO5 zk`1b)Q-hSSHPmRESPw@G9;rwUTnuzoMLp~-1rd(eWuGjB} z^gVU(zu0^KeQ$VQ!+DwC@6@}bz6UnMtxJzh44)zTGUEjN%>4j39!u@hcar%nyy!rH zIX=g!*eyDP{(TO&EIE0P7+^hPd}nNd?`_ZFk8uA4-+(%gv5IHqcZLNB&|({N2)ZzU z{{tB~pW`Qc!}r9Nv330H_MH2g_>8gcUHr}~t^<40Z+)Ms%GEq;tbjdNuBDdisr@oj zpD*vb!JG?d^A=zRQh*}XlFv~+XvRzYXZZaNI<9S$u`~S6aRZzYYkZX!{x$Z2zVe+g z;|px}T+WyVRLS`KjP=f%Cno|V@bTWTNB!q2W2K4V3>xA)#`d{j>{=Cl*1ANz&(RCe zbNJ82F)_|t;b%v`r}(Y?09y&h-jy2X-kIBkokl-700*mTXSwL z){yb_b+HeDT%!-U%=ixQj=7gB(9ac+z~}WN5R9$WpdFtOXUzn6tgaHnXT3dV#(fXy zDnYFGAn;jFZJjNc$pPncpVy#;7pcIQdI#)Tv5xzjf%T1bT^q(9qh0S3xb`ZU808e7 zyuV-UloF_XEaScPiG-#NLE^ zO`S6kpv89;pKA@qRaV#!@q1quAVp<{K3nW#V%#@-tK$5(Mt1e9o!e*hkh~1m>VN=_ zXK?l@?9Ez_fPU>NE%C1T@4!26ZWVLJcJ((g)SJTxa7zerD zq5dq;8{i$V5BL3uv3I5=u8R9T1Fm}sT#LRGLp^Kzyfyj|b8lk@T6w|P80WGl=eDo0 z9Kb&+zZ<$YEBLiBj29)|+$M&1%Psr?TuX;9m6#4)=XeGG32;rVF`xskVGA(9Ww{^#EC=T-U|u*A0oo1*^=|103$b;i(Zpj+Tx-N0{-Tn6@FzUy{B9)knM##G*z z1#oV02v*n$1h|iV85naN{R9~I6|msE`UAEfool2dq#2%NYv|qob~L%Ijte5Uj%5ds~lm!BJQMcE&3*QqfMN( zUSk_K0|{)vKA3|8IPa%+Lwx03Lje8tzfE_GxKnJ`p?H3xi($Ppc;|CHE42A@U@iOW zzy&zpO|D&kCbzGn1_|F5B={qAr?1F=;4{C80p_=01uo;4{{ftV1AN{mpKogh+PVRL z!`Quj06quKbtp$&;kX0V{0wBq4fvYz2A#Bt%LUr|a9@Bq;|~84y~4H!`w|^K*JO>? z;G^O{MtdKuBk$hqqZi>+V!XevwBYyPl030h ztm(aZhTp!|z#8`0fEiqzc;j(8E_rmRqcpj zEox7=l5xM@;;Q~cy!{@d$9zMM=jz=#gR9cvzlP7AuY(Tm0IYy>Z^3D? z6S2eoG($Rm@0&4G>ErM}g8!R!V~Ehd#_t?6a0Xt(H)v&!?$4`m)?YG?7~&gr0vUL( z?}Aoel^8hJA^!tVdr$lQfUi9y#;#Ldxc+clnoufR31#Akm>#PH5;(1G7OXKX9*r3!|Mc`xC9 z2b$faj@Q*(F4ySMhy$4rW~ zN#GwzdTGOZKjIe&|ua^^qW8~9A_E;-X5 z!}nS1c;@d3{DQcCojZ8_KKE*S9E-Mo&wIKy|vK2;CGIxIH$Qn+0}!0 zT)v4ChaQXf<_BVYU)DNf-f=|vv$iRLZT>ZKGchfgiayu41lHfRzd_8K!e{hU zd!VYg<~FdvoS4q>L~;!MIsd_q`tsgA&#GfQ70>S5@*X&0ya8*_5*uJFKTzWlID>zQ zo}S4HzxV7mIG*y=R%|4sw`)32H)cyC_a^2Q6u3vZ5sNr`Txo05ox8MTf=Rp6bz<1yg zI2U5BV;=(d;+hV{I&wco?+yV1j z{2g$e^OC1e`CxC@r)ulVG30y*=ietK?5TA4{QcQJTgHmxEtr8L;GQ|BJ!Qt7T0|e- z|1d`0^_a5)O3ibQQ+(RuTw?eh^9p_il!mw{`CSaXsJ$cm_}9SmJ~Ix#GH2Z4bDkT( zxF&k4kJ0b%!G8lCT$U4zt!Iukls1O7yZ+bs9|HL&po3fC4=`TGM{v#|#(wKHE1zlbeJ}X7pw9CGeuf?9BgQPSe~RC^%}*gSh`e{&JnPIs1CBvN zjY2biOpNc2Q@AzI*9+Pl_qb(z!uSmc#){f=bl(%>45~Q4_pbhTO65Dy3TpUWz;Tro zzGwLU3appmtf7{BfX_4d2JP>x=Yr4mnPc5cXxBX!>v&(TfPiz2ju$0viGG7E4n^@> z!}vS++}|1S`@V7az(sHj@4QRK9ooIR1`ZgzUk#W8-`6Py>|;N~ZnP2eteMgJ0^O=X z%lHSlgihEEu>QB;7(4;nl1D@@Gi+n_foD6%cFrpHb_0I|9^iV7)poC(t91-s;qQB6 zoC_NGHU9fx3*7Sz*MfjM0%yQD{q^6LCwcI2Z#Xae)NYF3+}m)zle{1D2~M%EE%vj* zFVU_k2P9~Oy(8v5hc{-y*t!wJUG4V)yTumn!y|A9wBQfG_pS4Ik5(W7_spK`FM+0@ z#?9e_dd4^CfcGx@clrG~Nc)m;181LVd$Udg2f+A#K3r?KEpU(Zi<+0nmA6m*zXJNK z_n2{JtQ=#1jvdjPhR!+djr;+y-1? z+~Ds8?N~(l_k{9EAN8Yf87=y{duSy9^*VG$+ndjQi*E1*I%Cg^o$y_+HnnH?UH>`H z_ynwhu|oS4w2T9&eV!6pS;4tS#yQ_pIDaRy?ls2!c`{cOKmW{0h5Nt!U1G$i7|2M#X7ieSq^_G~KwPKjNMX!i) zEmh=v7Jd$%gACW=Q~GsaTT{G*Pa*SG7Vux;-=dAb%lMFSm1cAY?!nE0^XDk_32Yb# zn8BTaPl5JStSj#vyTqKI{0shX`0X_j(*f^*d#-r?-+@}&J+{X_9^U)l*mr^NbYbi@ z(8kcewH)8X_c@qjXKa1Kc)2Paz8NurZqV{)Xr+na3^TU#dNwWE{T0`Y1@6-dzxU;_ zG2{i9V;gThd%ggk0@pPbb2G4y`|v5=vd(ji|Au)W7%P1b@pixJUM2XzU&X$+U}b*! zPWTJP{l3?ru?`G)dv&fQm;s*^du-(q*ZS}H7Qo)TyT&T^mSfm|>#X!+N5K#HtYzF( z=5Y7GXTZ2Vm)HRIu>qMn-gWtXYhTquinJ~K4&eyDqS7p zpMeb4AXPuZmT{vE?Df^&+xq?8#l8-_KOX|mu*#D0kNE6u4ZL6a6m$K|-C)lOZ>(@0 zC1Q9FXSDHO!ij{fv@txJ^HSpt+WGan9{aLhV?D3{&XvJK{2P$uiMM`>zxL4JzX}4M z5;43tKE~%W+JVEu^#P+F*4h$hKaQtz8P0jEGnEMXD14oJ95W|p4*y$_%X`4Jq*{x7 z=j`WY?A>-RoXa&GqkSffH)e%atn2^ZAY*gk24?u_8G8547h^fZKV9d77}xJRz`EYO z{+^WFCWgH|!=8a9Fzya<_U7Jp#Ek2j@9%1-U=0FZ@l1A!HBU<2Cf<$cuwRkqoSCun zgfWhRe&M>uVq9Bl`hHQm=+%2ajFoe5r=s7!-Irdl$KpCY<96b&`>~~_>!VwLygf^N zHKr%Z4}G4ipBY2mm>u42^3UKmecgBYE{0xw#*Leb-O-s}?;U;!H^dHo$^~4D{~zFhn2enYLeBBt@HzJqowSu0a0_1ua(yIIEL@=jU}}4*O-gMEsO)!yN&I8;tepSA->L$8SC?W#y-cw zGn2Q5l4JNDtiNIGJWI#Kw%D%Ec<+CMe=Gt20Dl$vOV|&A{mCmihILxY^F4!`jcQrj z{W`|CLI?3xGGpgo!OhJ9bzR0>8U0zYwsM7d`*IHR@+7B=;d@n!|1NRfH8IDo;@G>f z!2cXP1@2Lb5`nglKrcsm+Fua+ci{PSHIA|I+Be|Ds_?Gc_!G2}W3)!bKKJs*S8*J0 z${g-Bc8;O0v9G~3U=MvB+?=tTXt2+P@lT6i?tr+ltg)}+_ncP1`n68j{5yOJ zY#5*C!zJtnSi@faK+HGb{0xb4ynu7=uRt!oP!}Y`{ck}KV-GD{0HXY{Q&zvypvwc-@9WStNf>XwSM7P9Osdc^Qoqlk>CQM`H+zZ!yf_9E^j5)pskHH++ z$5`&+v*(65xmvHo_Wtzig6~BwVwmHYu{ERE&fDQTxA8Br*PvmXf%P0K#>rRvmwUr= z<-WV7E$~hRaY|sPa5P94!*i~#Yu*rJzM{{$_suKf{C+0&5^wXgTkPv_?t8;nJBcFo z&xnrUKOxQ@C|3F7FNB?|fgr!9D|9umlO1b01D^UE_^w$xmR$*f`@o zuQ}Rt?s>*1jJ?yZ8Tb1@j#_k#-H#s&tn(>j{pZ4XhR@Fo9{~A%>^hgeD%Lvz)|><9O;OoFzrmM_uSGu~?gG%KEu2f+ zJ$(YMfPO#lHNbmqov+b1!MVV-z`5NI>(1?AB-eU11)H!Je3^g-8pNWi}1h)6>GCp%Jf!CwFAjW#W1LWN=>pC`9 zi5P0{W6S$}@>5`)7Op=};JH@?p&l0inHyN9IgtiZRS0c(Y zjlBS`;11CG8?bQA3MtugSuul#T=&SeiNigTn|eE$F2-NhN^TCyN+m$=>zPQI!0 zi~q}|r%sa?_ws}qU)MSlo{T={ zG5qviNqO&Xz~z{{!z2;++C6Gxn19zEG;nij)O|AVJ%M-rX%E^9>gO2dZ_#6Eu Date: Wed, 10 Jan 2024 16:59:21 +0100 Subject: [PATCH 02/27] disable other CI jobs --- .github/workflows/build_wheels.yml | 108 ++++++++++----------- .github/workflows/build_wheels_testing.yml | 2 + .github/workflows/ci.yml | 44 ++++----- .github/workflows/ci_python.yml | 80 +++++++-------- .github/workflows/quarto-render.yml | 100 +++++++++---------- 5 files changed, 168 insertions(+), 166 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index f3bb76f7e..9762570ae 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -1,61 +1,61 @@ -name: Build wheels +# name: Build wheels -on: - push: - branches: - - release-* - - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string - tags: - - '*' - pull_request: - branches: - - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string +# on: +# push: +# branches: +# - release-* +# - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string +# tags: +# - '*' +# pull_request: +# branches: +# - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string -jobs: - build_wheels: - name: Build wheels on ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-22.04, macos-12, windows-2022] - # `windows-2022, ` blocked by https://github.com/pybind/pybind11/issues/3445#issuecomment-1525500927 +# jobs: +# build_wheels: +# name: Build wheels on ${{ matrix.os }} +# runs-on: ${{ matrix.os }} +# strategy: +# matrix: +# os: [ubuntu-22.04, macos-12, windows-2022] +# # `windows-2022, ` blocked by https://github.com/pybind/pybind11/issues/3445#issuecomment-1525500927 - steps: - - uses: actions/checkout@v3 +# steps: +# - uses: actions/checkout@v3 - - name: 'Brew setup on macOS' # x-ref c8e49ba8f8b9ce - if: ${{ startsWith(matrix.os, 'macos-') == true }} - run: | - set -e pipefail - brew install automake pkg-config ninja +# - name: 'Brew setup on macOS' # x-ref c8e49ba8f8b9ce +# if: ${{ startsWith(matrix.os, 'macos-') == true }} +# run: | +# set -e pipefail +# brew install automake pkg-config ninja - - name: Build wheels - uses: pypa/cibuildwheel@v2.14.1 - env: - CIBW_MANYLINUX_X86_64_IMAGE: "manylinux_2_28" - CIBW_SKIP: "*-win32 cp27-* cp35-* cp36-* cp37-* pp* *_i686 *musllinux*" - CIBW_ARCHS_MACOS: "x86_64 arm64" - CIBW_BUILD_VERBOSITY: 3 - MACOSX_DEPLOYMENT_TARGET: "12.0" - with: - package-dir: "apis/python" - output-dir: wheelhouse - config-file: "{package}/pyproject.toml" +# - name: Build wheels +# uses: pypa/cibuildwheel@v2.14.1 +# env: +# CIBW_MANYLINUX_X86_64_IMAGE: "manylinux_2_28" +# CIBW_SKIP: "*-win32 cp27-* cp35-* cp36-* cp37-* pp* *_i686 *musllinux*" +# CIBW_ARCHS_MACOS: "x86_64 arm64" +# CIBW_BUILD_VERBOSITY: 3 +# MACOSX_DEPLOYMENT_TARGET: "12.0" +# with: +# package-dir: "apis/python" +# output-dir: wheelhouse +# config-file: "{package}/pyproject.toml" - - uses: actions/upload-artifact@v3 - with: - path: ./wheelhouse/*.whl +# - uses: actions/upload-artifact@v3 +# with: +# path: ./wheelhouse/*.whl -# TODO: Needs support for pulling in the root directory -# build_sdist: -# name: Build source distribution -# runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v3 -# -# - name: Build sdist -# run: pipx run build --sdist -# -# - uses: actions/upload-artifact@v3 -# with: -# path: dist/*.tar.gz +# # TODO: Needs support for pulling in the root directory +# # build_sdist: +# # name: Build source distribution +# # runs-on: ubuntu-latest +# # steps: +# # - uses: actions/checkout@v3 +# # +# # - name: Build sdist +# # run: pipx run build --sdist +# # +# # - uses: actions/upload-artifact@v3 +# # with: +# # path: dist/*.tar.gz diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 63eb06446..65d546109 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -3,6 +3,8 @@ name: Test Build wheels on: push: branches: [main] + pull_request: + branches: [main] jobs: generate_backwards_compatability_data: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e3026bc0f..911141f10 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,24 +1,24 @@ -name: CI +# name: CI -on: - push: - branches: [main] - pull_request: - branches: [main] +# on: +# push: +# branches: [main] +# pull_request: +# branches: [main] -jobs: - Run-Tests: - strategy: - matrix: - os: [ubuntu-latest] - runs-on: ${{ matrix.os }} - steps: - - name: Install OpenBLAS - run: sudo apt install libopenblas-dev - - uses: actions/checkout@v3 - - name: Configure CMake - run: cmake -S ./src -B ./src/build -DCMAKE_BUILD_TYPE=Debug -DTILEDB_VS_ENABLE_BLAS=ON - - name: Build - run: cmake --build ./src/build -j3 - - name: Run Tests - run: cmake --build ./src/build --target check-ci +# jobs: +# Run-Tests: +# strategy: +# matrix: +# os: [ubuntu-latest] +# runs-on: ${{ matrix.os }} +# steps: +# - name: Install OpenBLAS +# run: sudo apt install libopenblas-dev +# - uses: actions/checkout@v3 +# - name: Configure CMake +# run: cmake -S ./src -B ./src/build -DCMAKE_BUILD_TYPE=Debug -DTILEDB_VS_ENABLE_BLAS=ON +# - name: Build +# run: cmake --build ./src/build -j3 +# - name: Run Tests +# run: cmake --build ./src/build --target check-ci diff --git a/.github/workflows/ci_python.yml b/.github/workflows/ci_python.yml index e66d35aea..0afdbbd89 100644 --- a/.github/workflows/ci_python.yml +++ b/.github/workflows/ci_python.yml @@ -1,42 +1,42 @@ -name: Python API CI +# name: Python API CI -on: - push: - branches: [main] - pull_request: - branches: [main] +# on: +# push: +# branches: [main] +# pull_request: +# branches: [main] -jobs: - run-tests: - strategy: - matrix: - os: [ubuntu-latest] - python-version: ["3.9"] - runs-on: ${{ matrix.os }} - steps: - - name: Install OpenBLAS - run: sudo apt install libopenblas-dev - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Print Python version - run: | - which python - which pip - python --version - - name: Build and test python - run: | - cd apis/python - pip install .[test] - pytest - # TODO: fix editable on linux - #pip uninstall -y tiledb.vector_search - #pip install -e . - #pytest - pip install -r test/ipynb/requirements.txt - pytest --nbmake test/ipynb - env: - TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} - shell: bash -el {0} +# jobs: +# run-tests: +# strategy: +# matrix: +# os: [ubuntu-latest] +# python-version: ["3.9"] +# runs-on: ${{ matrix.os }} +# steps: +# - name: Install OpenBLAS +# run: sudo apt install libopenblas-dev +# - uses: actions/checkout@v3 +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v2 +# with: +# python-version: ${{ matrix.python-version }} +# - name: Print Python version +# run: | +# which python +# which pip +# python --version +# - name: Build and test python +# run: | +# cd apis/python +# pip install .[test] +# pytest +# # TODO: fix editable on linux +# #pip uninstall -y tiledb.vector_search +# #pip install -e . +# #pytest +# pip install -r test/ipynb/requirements.txt +# pytest --nbmake test/ipynb +# env: +# TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} +# shell: bash -el {0} diff --git a/.github/workflows/quarto-render.yml b/.github/workflows/quarto-render.yml index df7a2e405..aaf0435c9 100644 --- a/.github/workflows/quarto-render.yml +++ b/.github/workflows/quarto-render.yml @@ -1,55 +1,55 @@ -# Cloned from https://github.com/TileDB-Inc/tiledb-quarto-template +# # Cloned from https://github.com/TileDB-Inc/tiledb-quarto-template -name: Render and deploy Quarto files -on: - push: - pull_request: +# name: Render and deploy Quarto files +# on: +# push: +# pull_request: -jobs: - quarto-render-and-deploy: - strategy: - matrix: - os: [ubuntu-latest] - python-version: ["3.9"] - runs-on: ${{ matrix.os }} - steps: - - name: Install OpenBLAS - run: sudo apt install libopenblas-dev - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Print Python version - run: | - which python - which pip - python --version - - name: Install pybind11 - run: pip install "pybind11[global]" "pydantic<2" - - uses: actions/checkout@v3 - - name: Install - run: cd apis/python && pip install . +# jobs: +# quarto-render-and-deploy: +# strategy: +# matrix: +# os: [ubuntu-latest] +# python-version: ["3.9"] +# runs-on: ${{ matrix.os }} +# steps: +# - name: Install OpenBLAS +# run: sudo apt install libopenblas-dev +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v2 +# with: +# python-version: ${{ matrix.python-version }} +# - name: Print Python version +# run: | +# which python +# which pip +# python --version +# - name: Install pybind11 +# run: pip install "pybind11[global]" "pydantic<2" +# - uses: actions/checkout@v3 +# - name: Install +# run: cd apis/python && pip install . - - name: "Install Quarto" - uses: quarto-dev/quarto-actions/setup@v2 +# - name: "Install Quarto" +# uses: quarto-dev/quarto-actions/setup@v2 - - name: "Quarto render" - shell: bash - run: | - pip install quartodoc PyYAML click "griffe<0.33" # incompatible - # create a symlink to the tiledbvcf python package, so it doesn't have to be installed - #ln -s apis/python/src/tiledb/vector_search - quartodoc build - quarto render --fail-if-warnings - # https://github.com/quarto-dev/quarto-cli/issues/493 +# - name: "Quarto render" +# shell: bash +# run: | +# pip install quartodoc PyYAML click "griffe<0.33" # incompatible +# # create a symlink to the tiledbvcf python package, so it doesn't have to be installed +# #ln -s apis/python/src/tiledb/vector_search +# quartodoc build +# quarto render --fail-if-warnings +# # https://github.com/quarto-dev/quarto-cli/issues/493 - - name: "Deploy to gh-pages" - uses: peaceiris/actions-gh-pages@v3 - # Change to the name of your repo's primary branch name: - if: github.ref == 'refs/heads/main' - with: - # This is GitHub Actions magic; no secrets for us to manage; and this works first-time - # without any extra configs other than visiting Settings -> Pages in your GitHub repo. - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: docs - destination_dir: docs +# - name: "Deploy to gh-pages" +# uses: peaceiris/actions-gh-pages@v3 +# # Change to the name of your repo's primary branch name: +# if: github.ref == 'refs/heads/main' +# with: +# # This is GitHub Actions magic; no secrets for us to manage; and this works first-time +# # without any extra configs other than visiting Settings -> Pages in your GitHub repo. +# github_token: ${{ secrets.GITHUB_TOKEN }} +# publish_dir: docs +# destination_dir: docs From 6e02d48c89769dc1c193c240915474bd1f47439c Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:01:34 +0100 Subject: [PATCH 03/27] ci testing --- .github/workflows/build_wheels_testing.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 65d546109..5df174385 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -17,6 +17,8 @@ jobs: - name: Determine Release Tag id: get_release_tag run: | + git status + git branch release_tag=$(git describe --tags --abbrev=0) echo "::set-output name=release_tag::$release_tag" From 5cba6d6010fc9800bb8db8dfec143d679a57bae1 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:03:59 +0100 Subject: [PATCH 04/27] ci testing --- .github/workflows/build_wheels_testing.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 5df174385..4470957fb 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -17,6 +17,9 @@ jobs: - name: Determine Release Tag id: get_release_tag run: | + git status + git branch + git checkout main git status git branch release_tag=$(git describe --tags --abbrev=0) @@ -29,10 +32,6 @@ jobs: run: | git status git branch - # git checkout main - # git add backwards-compatibility-data/ - # git commit -m "Add backward compatibility data for release ${{ steps.get_release_tag.outputs.release_tag }}" - # git push build_wheels: name: Build wheels on ${{ matrix.os }} From aaaab7c5624e94259c9a6121142f2ae0ad6d5a65 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:06:28 +0100 Subject: [PATCH 05/27] ci testing --- .github/workflows/build_wheels_testing.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 4470957fb..a120b2994 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -17,11 +17,12 @@ jobs: - name: Determine Release Tag id: get_release_tag run: | - git status - git branch - git checkout main - git status - git branch + echo "git status" && git status + echo "git branch" && git branch + echo "git fetch origin" && git fetch origin + echo "git checkout main" && git checkout main + echo "git status" && git status + echo "git branch" && git branch release_tag=$(git describe --tags --abbrev=0) echo "::set-output name=release_tag::$release_tag" From 223c0f0c3bd1214dc47b04ac1918e6674952dd4d Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:10:52 +0100 Subject: [PATCH 06/27] ci testing --- .github/workflows/build_wheels_testing.yml | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index a120b2994..172432811 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -9,7 +9,7 @@ on: jobs: generate_backwards_compatability_data: name: Generate Backwards Compatibility Data - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v3 @@ -26,8 +26,23 @@ jobs: release_tag=$(git describe --tags --abbrev=0) echo "::set-output name=release_tag::$release_tag" + - name: Print Python version + run: | + which python + which pip + python --version + + - name: Build python + run: | + cd apis/python + pip install . + - name: Build Indexes - run: backwards-compatibility-data/generate_data.py ${{ steps.get_release_tag.outputs.release_tag }} + run: | + echo "pwd" && pwd + echo "ls" && ls + echo ${{ steps.get_release_tag.outputs.release_tag }} + python backwards-compatability-data/generate_data.py ${{ steps.get_release_tag.outputs.release_tag }} - name: Commit and Push to Main Branch run: | From f723dbd5f56d3f15037f1d69295f81e543fb3ccf Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:14:48 +0100 Subject: [PATCH 07/27] ci testing --- .github/workflows/build_wheels_testing.yml | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 172432811..c5de8f508 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -10,6 +10,7 @@ jobs: generate_backwards_compatability_data: name: Generate Backwards Compatibility Data runs-on: ubuntu-latest + python-version: 3.9 steps: - name: Checkout code uses: actions/checkout@v3 @@ -17,13 +18,10 @@ jobs: - name: Determine Release Tag id: get_release_tag run: | - echo "git status" && git status - echo "git branch" && git branch - echo "git fetch origin" && git fetch origin - echo "git checkout main" && git checkout main - echo "git status" && git status - echo "git branch" && git branch - release_tag=$(git describe --tags --abbrev=0) + # TODO: test this on a branch with a real tag + # release_tag=$(git describe --tags --abbrev=0) + release_tag="0.0.0" + echo "release_tag" && echo $release_tag echo "::set-output name=release_tag::$release_tag" - name: Print Python version @@ -46,8 +44,12 @@ jobs: - name: Commit and Push to Main Branch run: | - git status - git branch + echo "git status" && git status + echo "git branch" && git branch + echo "git fetch origin main" && git fetch origin main + echo "git checkout main" && git checkout main + echo "git status" && git status + echo "git branch" && git branch build_wheels: name: Build wheels on ${{ matrix.os }} From 41178d6b0b9b3c1ce2744997d7fad42d00bfdb89 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:15:49 +0100 Subject: [PATCH 08/27] ci testing --- .github/workflows/build_wheels_testing.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index c5de8f508..8be007a52 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -10,7 +10,6 @@ jobs: generate_backwards_compatability_data: name: Generate Backwards Compatibility Data runs-on: ubuntu-latest - python-version: 3.9 steps: - name: Checkout code uses: actions/checkout@v3 From 0f4ac1b189362276776c8f9f86015c5fc51bcd2a Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:18:46 +0100 Subject: [PATCH 09/27] ci testing --- backwards-compatability-data/generate_data.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backwards-compatability-data/generate_data.py b/backwards-compatability-data/generate_data.py index bacce996f..56085c04a 100644 --- a/backwards-compatability-data/generate_data.py +++ b/backwards-compatability-data/generate_data.py @@ -1,6 +1,5 @@ import os import shutil -import numpy as np from tiledb.vector_search.ingestion import ingest from tiledb.vector_search.utils import load_fvecs, write_fvecs From e624de44a0aad7e942bc3a8432d8dbf6ac4a34d9 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:22:52 +0100 Subject: [PATCH 10/27] ci testing --- .github/workflows/build_wheels_testing.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 8be007a52..2b705b302 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -34,6 +34,10 @@ jobs: cd apis/python pip install . + - name: List Installed Python Packages + run: | + pip freeze + - name: Build Indexes run: | echo "pwd" && pwd From 6940ea782d50ac7944ff29bb8b8caf55ddee042f Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:25:32 +0100 Subject: [PATCH 11/27] ci testing --- .github/workflows/build_wheels_testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 2b705b302..bddf71299 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -32,7 +32,7 @@ jobs: - name: Build python run: | cd apis/python - pip install . + pip install .[test] - name: List Installed Python Packages run: | From ff3bde7f01db95a42ee3dd34a203c77a43700f66 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:28:14 +0100 Subject: [PATCH 12/27] ci testing --- .github/workflows/build_wheels_testing.yml | 6 +- .github/workflows/ci_python.yml | 83 +++++++++++----------- 2 files changed, 48 insertions(+), 41 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index bddf71299..3ec8d6822 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -32,11 +32,15 @@ jobs: - name: Build python run: | cd apis/python - pip install .[test] + pip install . - name: List Installed Python Packages run: | pip freeze + + - name: List Installed Python Packages 2 + run: | + cd apis/python && pip freeze - name: Build Indexes run: | diff --git a/.github/workflows/ci_python.yml b/.github/workflows/ci_python.yml index 0afdbbd89..91b516559 100644 --- a/.github/workflows/ci_python.yml +++ b/.github/workflows/ci_python.yml @@ -1,42 +1,45 @@ -# name: Python API CI +name: Python API CI -# on: -# push: -# branches: [main] -# pull_request: -# branches: [main] +on: + push: + branches: [main] + pull_request: + branches: [main] -# jobs: -# run-tests: -# strategy: -# matrix: -# os: [ubuntu-latest] -# python-version: ["3.9"] -# runs-on: ${{ matrix.os }} -# steps: -# - name: Install OpenBLAS -# run: sudo apt install libopenblas-dev -# - uses: actions/checkout@v3 -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v2 -# with: -# python-version: ${{ matrix.python-version }} -# - name: Print Python version -# run: | -# which python -# which pip -# python --version -# - name: Build and test python -# run: | -# cd apis/python -# pip install .[test] -# pytest -# # TODO: fix editable on linux -# #pip uninstall -y tiledb.vector_search -# #pip install -e . -# #pytest -# pip install -r test/ipynb/requirements.txt -# pytest --nbmake test/ipynb -# env: -# TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} -# shell: bash -el {0} +jobs: + run-tests: + strategy: + matrix: + os: [ubuntu-latest] + python-version: ["3.9"] + runs-on: ${{ matrix.os }} + steps: + - name: Install OpenBLAS + run: sudo apt install libopenblas-dev + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Print Python version + run: | + which python + which pip + python --version + - name: Build and test python + run: | + cd apis/python + pip install .[test] + pytest + # TODO: fix editable on linux + #pip uninstall -y tiledb.vector_search + #pip install -e . + #pytest + pip install -r test/ipynb/requirements.txt + pytest --nbmake test/ipynb + env: + TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} + shell: bash -el {0} + - name: List Installed Python Packages + run: | + pip freeze From 191021a6c62b8c5af9caca243896042bf0a7e58a Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 17:40:29 +0100 Subject: [PATCH 13/27] ci testing --- .../test/test_backwards_compatability.py | 52 +++++++++---------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/apis/python/test/test_backwards_compatability.py b/apis/python/test/test_backwards_compatability.py index 1488bb7f3..70bb8352d 100644 --- a/apis/python/test/test_backwards_compatability.py +++ b/apis/python/test/test_backwards_compatability.py @@ -78,33 +78,33 @@ def test_create_and_query_indices_with_old_storage_versions(tmp_path): _, result = index_ram.query(queries, k=k) assert accuracy(result, gt_i) > MINIMUM_ACCURACY -def test_query_old_indices(): - ''' - Tests that current code can query indices which were written to disk by old code. - ''' - backwards_compatability_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'backwards-compatability-data') - datasets_path = os.path.join(backwards_compatability_path, 'data') - base = load_fvecs(os.path.join(backwards_compatability_path, 'siftmicro_base.fvecs')) - query_indices = [0, 3, 4, 8, 10, 19, 28, 31, 39, 40, 41, 47, 49, 50, 56, 64, 68, 70, 71, 79, 82, 89, 90, 94] - queries = base[query_indices] +# def test_query_old_indices(): +# ''' +# Tests that current code can query indices which were written to disk by old code. +# ''' +# backwards_compatability_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'backwards-compatability-data') +# datasets_path = os.path.join(backwards_compatability_path, 'data') +# base = load_fvecs(os.path.join(backwards_compatability_path, 'siftmicro_base.fvecs')) +# query_indices = [0, 3, 4, 8, 10, 19, 28, 31, 39, 40, 41, 47, 49, 50, 56, 64, 68, 70, 71, 79, 82, 89, 90, 94] +# queries = base[query_indices] - for directory_name in os.listdir(datasets_path): - version_path = os.path.join(datasets_path, directory_name) - if not os.path.isdir(version_path): - continue +# for directory_name in os.listdir(datasets_path): +# version_path = os.path.join(datasets_path, directory_name) +# if not os.path.isdir(version_path): +# continue - for index_name in os.listdir(version_path): - index_uri = os.path.join(version_path, index_name) - if not os.path.isdir(index_uri): - continue +# for index_name in os.listdir(version_path): +# index_uri = os.path.join(version_path, index_name) +# if not os.path.isdir(index_uri): +# continue - if "ivf_flat" in index_name: - index = IVFFlatIndex(uri=index_uri) - elif "flat" in index_name: - index = FlatIndex(uri=index_uri) - else: - assert False, f"Unknown index name: {index_name}" +# if "ivf_flat" in index_name: +# index = IVFFlatIndex(uri=index_uri) +# elif "flat" in index_name: +# index = FlatIndex(uri=index_uri) +# else: +# assert False, f"Unknown index name: {index_name}" - result_d, result_i = index.query(queries, k=1) - assert query_indices == result_i.flatten().tolist() - assert result_d.flatten().tolist() == [0 for _ in range(len(query_indices))] \ No newline at end of file +# result_d, result_i = index.query(queries, k=1) +# assert query_indices == result_i.flatten().tolist() +# assert result_d.flatten().tolist() == [0 for _ in range(len(query_indices))] \ No newline at end of file From c2a1b329f4594fc37c75ce5ca995d9d46ae7d116 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 10 Jan 2024 20:25:46 +0100 Subject: [PATCH 14/27] ci testing --- .github/workflows/build_wheels_testing.yml | 34 ++++++++++------------ 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 3ec8d6822..d4b45f24e 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -14,14 +14,10 @@ jobs: - name: Checkout code uses: actions/checkout@v3 - - name: Determine Release Tag - id: get_release_tag - run: | - # TODO: test this on a branch with a real tag - # release_tag=$(git describe --tags --abbrev=0) - release_tag="0.0.0" - echo "release_tag" && echo $release_tag - echo "::set-output name=release_tag::$release_tag" + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: "3.9" - name: Print Python version run: | @@ -29,23 +25,23 @@ jobs: which pip python --version - - name: Build python - run: | - cd apis/python - pip install . - - - name: List Installed Python Packages - run: | - pip freeze - - - name: List Installed Python Packages 2 + - name: Determine Release Tag + id: get_release_tag run: | - cd apis/python && pip freeze + # TODO: test this on a branch with a real tag + # release_tag=$(git describe --tags --abbrev=0) + release_tag="0.0.0" + echo "release_tag" && echo $release_tag + echo "::set-output name=release_tag::$release_tag" - name: Build Indexes run: | echo "pwd" && pwd echo "ls" && ls + cd apis/python && pip install . + echo "pwd" && pwd + echo "ls" && ls + echo "pip freeze" && pip freeze echo ${{ steps.get_release_tag.outputs.release_tag }} python backwards-compatability-data/generate_data.py ${{ steps.get_release_tag.outputs.release_tag }} From 525cb9e4fa707685da3b3df2b279f8c2114c0ee9 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Thu, 11 Jan 2024 11:04:43 +0100 Subject: [PATCH 15/27] ci testing --- .github/workflows/build_wheels_testing.yml | 19 ++++++++++--------- backwards-compatability-data/generate_data.py | 4 +++- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index d4b45f24e..016ce0d96 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -25,25 +25,26 @@ jobs: which pip python --version - - name: Determine Release Tag - id: get_release_tag + - name: Build Indexes run: | # TODO: test this on a branch with a real tag # release_tag=$(git describe --tags --abbrev=0) release_tag="0.0.0" echo "release_tag" && echo $release_tag - echo "::set-output name=release_tag::$release_tag" - - - name: Build Indexes - run: | echo "pwd" && pwd echo "ls" && ls - cd apis/python && pip install . + cd apis/python && pip install . & cd ../.. echo "pwd" && pwd echo "ls" && ls echo "pip freeze" && pip freeze - echo ${{ steps.get_release_tag.outputs.release_tag }} - python backwards-compatability-data/generate_data.py ${{ steps.get_release_tag.outputs.release_tag }} + echo "pwd" && pwd + echo "ls" && ls + python backwards-compatability-data/generate_data.py $release_tag + echo "pwd" && pwd + echo "ls" && ls + echo "ls backwards-compatability-data" && ls backwards-compatability-data + echo "ls backwards-compatability-data/data" && ls backwards-compatability-data/data + echo "ls backwards-compatability-data/data/0.0.0" && ls backwards-compatability-data/data/0.0.0 - name: Commit and Push to Main Branch run: | diff --git a/backwards-compatability-data/generate_data.py b/backwards-compatability-data/generate_data.py index 56085c04a..59e90bcf6 100644 --- a/backwards-compatability-data/generate_data.py +++ b/backwards-compatability-data/generate_data.py @@ -30,9 +30,11 @@ def generate_release_data(version): data_types = ["float32", "uint8"] for index_type in index_types: for data_type in data_types: + index_uri = f"{release_dir}/{index_type.lower()}_{data_type}" + print(f"Creating index at {index_uri}") index = ingest( index_type=index_type, - index_uri=f"{release_dir}/{index_type.lower()}_{data_type}", + index_uri=index_uri, input_vectors=base.astype(data_type), ) From b03d525ab1b107e98c5ebc24effff34794902713 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Thu, 11 Jan 2024 11:13:22 +0100 Subject: [PATCH 16/27] ci testing --- .github/workflows/build_wheels_testing.yml | 2 +- .github/workflows/ci_python.yml | 86 +++++++++++----------- 2 files changed, 44 insertions(+), 44 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 016ce0d96..4b93132cd 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -33,7 +33,7 @@ jobs: echo "release_tag" && echo $release_tag echo "pwd" && pwd echo "ls" && ls - cd apis/python && pip install . & cd ../.. + cd apis/python && pip install . && cd ../.. echo "pwd" && pwd echo "ls" && ls echo "pip freeze" && pip freeze diff --git a/.github/workflows/ci_python.yml b/.github/workflows/ci_python.yml index 91b516559..ce28e886a 100644 --- a/.github/workflows/ci_python.yml +++ b/.github/workflows/ci_python.yml @@ -1,45 +1,45 @@ -name: Python API CI +# name: Python API CI -on: - push: - branches: [main] - pull_request: - branches: [main] +# on: +# push: +# branches: [main] +# pull_request: +# branches: [main] -jobs: - run-tests: - strategy: - matrix: - os: [ubuntu-latest] - python-version: ["3.9"] - runs-on: ${{ matrix.os }} - steps: - - name: Install OpenBLAS - run: sudo apt install libopenblas-dev - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Print Python version - run: | - which python - which pip - python --version - - name: Build and test python - run: | - cd apis/python - pip install .[test] - pytest - # TODO: fix editable on linux - #pip uninstall -y tiledb.vector_search - #pip install -e . - #pytest - pip install -r test/ipynb/requirements.txt - pytest --nbmake test/ipynb - env: - TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} - shell: bash -el {0} - - name: List Installed Python Packages - run: | - pip freeze +# jobs: +# run-tests: +# strategy: +# matrix: +# os: [ubuntu-latest] +# python-version: ["3.9"] +# runs-on: ${{ matrix.os }} +# steps: +# - name: Install OpenBLAS +# run: sudo apt install libopenblas-dev +# - uses: actions/checkout@v3 +# - name: Set up Python ${{ matrix.python-version }} +# uses: actions/setup-python@v2 +# with: +# python-version: ${{ matrix.python-version }} +# - name: Print Python version +# run: | +# which python +# which pip +# python --version +# - name: Build and test python +# run: | +# cd apis/python +# pip install .[test] +# pytest +# # TODO: fix editable on linux +# #pip uninstall -y tiledb.vector_search +# #pip install -e . +# #pytest +# pip install -r test/ipynb/requirements.txt +# pytest --nbmake test/ipynb +# env: +# TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} +# shell: bash -el {0} +# - name: List Installed Python Packages +# run: | +# pip freeze From 48b48b8ebc9f92f0d48ccd0802c6975f6548619a Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Thu, 11 Jan 2024 11:35:28 +0100 Subject: [PATCH 17/27] ci testing --- backwards-compatability-data/generate_data.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/backwards-compatability-data/generate_data.py b/backwards-compatability-data/generate_data.py index 59e90bcf6..40a130f8f 100644 --- a/backwards-compatability-data/generate_data.py +++ b/backwards-compatability-data/generate_data.py @@ -10,20 +10,26 @@ def create_sift_micro(): don't need to run this again. We only write the base data used to create an index. To query you should just select vectors from this to query against the index with. ''' + script_dir = os.path.dirname(os.path.abspath(__file__)) base_uri = "../apis/python/test/data/siftsmall/siftsmall_base.fvecs" - write_fvecs("./siftmicro_base.fvecs", load_fvecs(base_uri)[:100]) + write_fvecs(os.path.join(script_dir, "siftmicro_base.fvecs"), load_fvecs(base_uri)[:100]) def generate_release_data(version): + script_dir = os.path.dirname(os.path.abspath(__file__)) + # Create the new release directory. - release_dir = f"./data/{version}" + release_dir = os.path.join(script_dir, "data", version) + print(f"release_dir {release_dir}") shutil.rmtree(release_dir, ignore_errors=True) os.makedirs(release_dir, exist_ok=True) # Get the data we'll use to generate the index. - base_uri = "./siftmicro_base.fvecs" + base_uri = os.path.join(script_dir, "siftmicro_base.fvecs") + print(f"base_uri {base_uri}") base = load_fvecs(base_uri) indices = [0, 3, 4, 8, 10, 19, 28, 31, 39, 40, 41, 47, 49, 50, 56, 64, 68, 70, 71, 79, 82, 89, 90, 94] queries = base[indices] + print(f"queries {queries}") # Generate each index and query to make sure it works before we write it. index_types = ["FLAT", "IVF_FLAT"] From a615991d209a05599f9b8944843d4fff2aa138c6 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Thu, 11 Jan 2024 11:53:52 +0100 Subject: [PATCH 18/27] ci testing --- .github/workflows/build_wheels_testing.yml | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 4b93132cd..eab416390 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -33,27 +33,33 @@ jobs: echo "release_tag" && echo $release_tag echo "pwd" && pwd echo "ls" && ls + echo "" cd apis/python && pip install . && cd ../.. + echo "" echo "pwd" && pwd echo "ls" && ls echo "pip freeze" && pip freeze echo "pwd" && pwd echo "ls" && ls - python backwards-compatability-data/generate_data.py $release_tag + echo "" + echo "python backwards-compatability-data/generate_data.py $release_tag" && python backwards-compatability-data/generate_data.py $release_tag + echo "" echo "pwd" && pwd echo "ls" && ls echo "ls backwards-compatability-data" && ls backwards-compatability-data echo "ls backwards-compatability-data/data" && ls backwards-compatability-data/data echo "ls backwards-compatability-data/data/0.0.0" && ls backwards-compatability-data/data/0.0.0 - - - name: Commit and Push to Main Branch - run: | + echo "" echo "git status" && git status echo "git branch" && git branch - echo "git fetch origin main" && git fetch origin main - echo "git checkout main" && git checkout main + echo "git fetch origin jparismorgan/temp-testing" && git fetch origin jparismorgan/temp-testing + echo "git checkout jparismorgan/temp-testing" && git checkout jparismorgan/temp-testing echo "git status" && git status echo "git branch" && git branch + echo "git add backwards-compatability-data/data/" && git add backwards-compatability-data/data/ + echo "git commit -m '[automated] Update backwards-compatability-data for release $release_tag'" && git commit -m "[automated] Update backwards-compatability-data for release $release_tag" + echo "git push origin jparismorgan/temp-testing" && git push origin jparismorgan/temp-testing + echo "git status" && git status build_wheels: name: Build wheels on ${{ matrix.os }} @@ -66,4 +72,4 @@ jobs: steps: - name: Running! run: | - Running build_wheels \ No newline at end of file + "Running!" \ No newline at end of file From 0760ae4f36898de17017bcbe7556728ae79a3795 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Thu, 11 Jan 2024 12:48:43 +0100 Subject: [PATCH 19/27] ci testing --- .github/workflows/build_wheels_testing.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index eab416390..5009df7e0 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -13,6 +13,12 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v3 + + # Based on https://github.com/TileDB-Inc/conda-forge-nightly-controller/blob/51519a0f8340b32cf737fcb59b76c6a91c42dc47/.github/workflows/activity.yml#L19C10-L19C10 + - name: Setup git + run: | + git config user.name "GitHub Actions" + git config user.email "runneradmin@users.noreply.github.com" - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 @@ -27,6 +33,8 @@ jobs: - name: Build Indexes run: | + echo "$GITHUB_ACTOR" + echo $GITHUB_ACTOR # TODO: test this on a branch with a real tag # release_tag=$(git describe --tags --abbrev=0) release_tag="0.0.0" From ac630eee3234a8ec5d6e055930d2c5d522c5aa9d Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Thu, 11 Jan 2024 13:02:58 +0100 Subject: [PATCH 20/27] fix create_sift_micro and try on new branch --- .github/workflows/build_wheels_testing.yml | 6 +++--- backwards-compatability-data/generate_data.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 5009df7e0..6bc68e806 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -60,13 +60,13 @@ jobs: echo "" echo "git status" && git status echo "git branch" && git branch - echo "git fetch origin jparismorgan/temp-testing" && git fetch origin jparismorgan/temp-testing - echo "git checkout jparismorgan/temp-testing" && git checkout jparismorgan/temp-testing + echo "git fetch origin jparismorgan/temp-testing-2" && git fetch origin jparismorgan/temp-testing-2 + echo "git checkout jparismorgan/temp-testing-2" && git checkout jparismorgan/temp-testing-2 echo "git status" && git status echo "git branch" && git branch echo "git add backwards-compatability-data/data/" && git add backwards-compatability-data/data/ echo "git commit -m '[automated] Update backwards-compatability-data for release $release_tag'" && git commit -m "[automated] Update backwards-compatability-data for release $release_tag" - echo "git push origin jparismorgan/temp-testing" && git push origin jparismorgan/temp-testing + echo "git push origin jparismorgan/temp-testing-2" && git push origin jparismorgan/temp-testing-2 echo "git status" && git status build_wheels: diff --git a/backwards-compatability-data/generate_data.py b/backwards-compatability-data/generate_data.py index 40a130f8f..cfea2166a 100644 --- a/backwards-compatability-data/generate_data.py +++ b/backwards-compatability-data/generate_data.py @@ -11,7 +11,7 @@ def create_sift_micro(): should just select vectors from this to query against the index with. ''' script_dir = os.path.dirname(os.path.abspath(__file__)) - base_uri = "../apis/python/test/data/siftsmall/siftsmall_base.fvecs" + base_uri = os.path.join(script_dir, "..", "apis", "python", "test", "data", "siftsmall", "siftsmall_base.fvecs") write_fvecs(os.path.join(script_dir, "siftmicro_base.fvecs"), load_fvecs(base_uri)[:100]) def generate_release_data(version): From 204c410bf9e4c37b64b007a6e04cef039165e0c5 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 17 Jan 2024 16:19:58 +0100 Subject: [PATCH 21/27] disable two other CI jobs during testing --- .github/workflows/check-formatting.yml | 50 +++++++++++++------------- .github/workflows/ci.yml | 44 +++++++++++------------ 2 files changed, 47 insertions(+), 47 deletions(-) diff --git a/.github/workflows/check-formatting.yml b/.github/workflows/check-formatting.yml index c1f79cc7f..ecbd14bb4 100644 --- a/.github/workflows/check-formatting.yml +++ b/.github/workflows/check-formatting.yml @@ -1,27 +1,27 @@ -name: Check formatting +# name: Check formatting -on: - push: - branches: - - main - paths-ignore: - - '_quarto.yml' - - 'quarto-materials/*' - - '**/.md' - pull_request: - branches: - - main - paths-ignore: - - '_quarto.yml' - - 'quarto-materials/*' - - '**/.md' +# on: +# push: +# branches: +# - main +# paths-ignore: +# - '_quarto.yml' +# - 'quarto-materials/*' +# - '**/.md' +# pull_request: +# branches: +# - main +# paths-ignore: +# - '_quarto.yml' +# - 'quarto-materials/*' +# - '**/.md' -jobs: - check-formatting: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: 'Check clang-format' - run: | - source $GITHUB_WORKSPACE/scripts/ci/check_clang_format.sh - shell: bash \ No newline at end of file +# jobs: +# check-formatting: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# - name: 'Check clang-format' +# run: | +# source $GITHUB_WORKSPACE/scripts/ci/check_clang_format.sh +# shell: bash \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d413ff42c..3383d3d71 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,24 +1,24 @@ -name: CI +# name: CI -on: - push: - branches: [main] - pull_request: - branches: [main] +# on: +# push: +# branches: [main] +# pull_request: +# branches: [main] -jobs: - run-tests: - strategy: - matrix: - os: [ubuntu-latest] - runs-on: ${{ matrix.os }} - steps: - - name: Install OpenBLAS - run: sudo apt install libopenblas-dev - - uses: actions/checkout@v3 - - name: Configure CMake - run: cmake -S ./src -B ./src/build -DCMAKE_BUILD_TYPE=Debug -DTILEDB_VS_ENABLE_BLAS=ON - - name: Build - run: cmake --build ./src/build -j3 - - name: Run Tests - run: cmake --build ./src/build --target check-ci +# jobs: +# run-tests: +# strategy: +# matrix: +# os: [ubuntu-latest] +# runs-on: ${{ matrix.os }} +# steps: +# - name: Install OpenBLAS +# run: sudo apt install libopenblas-dev +# - uses: actions/checkout@v3 +# - name: Configure CMake +# run: cmake -S ./src -B ./src/build -DCMAKE_BUILD_TYPE=Debug -DTILEDB_VS_ENABLE_BLAS=ON +# - name: Build +# run: cmake --build ./src/build -j3 +# - name: Run Tests +# run: cmake --build ./src/build --target check-ci From 898e87ebaf6a5758aca3d07345328b21c1f1242e Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 17 Jan 2024 16:26:07 +0100 Subject: [PATCH 22/27] test with updated release tag --- .github/workflows/build_wheels_testing.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 6bc68e806..615c1dc43 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -35,14 +35,18 @@ jobs: run: | echo "$GITHUB_ACTOR" echo $GITHUB_ACTOR + # TODO: test this on a branch with a real tag # release_tag=$(git describe --tags --abbrev=0) - release_tag="0.0.0" + release_tag="0.0.1" echo "release_tag" && echo $release_tag + echo "pwd" && pwd echo "ls" && ls echo "" + cd apis/python && pip install . && cd ../.. + echo "" echo "pwd" && pwd echo "ls" && ls @@ -50,7 +54,9 @@ jobs: echo "pwd" && pwd echo "ls" && ls echo "" + echo "python backwards-compatability-data/generate_data.py $release_tag" && python backwards-compatability-data/generate_data.py $release_tag + echo "" echo "pwd" && pwd echo "ls" && ls @@ -60,13 +66,18 @@ jobs: echo "" echo "git status" && git status echo "git branch" && git branch + echo "git fetch origin jparismorgan/temp-testing-2" && git fetch origin jparismorgan/temp-testing-2 + echo "git checkout jparismorgan/temp-testing-2" && git checkout jparismorgan/temp-testing-2 + echo "git status" && git status echo "git branch" && git branch + echo "git add backwards-compatability-data/data/" && git add backwards-compatability-data/data/ echo "git commit -m '[automated] Update backwards-compatability-data for release $release_tag'" && git commit -m "[automated] Update backwards-compatability-data for release $release_tag" echo "git push origin jparismorgan/temp-testing-2" && git push origin jparismorgan/temp-testing-2 + echo "git status" && git status build_wheels: From d1f6bbe3d75a17a541d970892dff2a5b5a84dc65 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Wed, 17 Jan 2024 16:30:43 +0100 Subject: [PATCH 23/27] fix broken log line --- .github/workflows/build_wheels_testing.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index 615c1dc43..dc42a5545 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -62,7 +62,6 @@ jobs: echo "ls" && ls echo "ls backwards-compatability-data" && ls backwards-compatability-data echo "ls backwards-compatability-data/data" && ls backwards-compatability-data/data - echo "ls backwards-compatability-data/data/0.0.0" && ls backwards-compatability-data/data/0.0.0 echo "" echo "git status" && git status echo "git branch" && git branch From 79b4b59b5f2e8a0726cbaa4dce432bd39b5a8aee Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Fri, 19 Jan 2024 12:16:33 +0100 Subject: [PATCH 24/27] add back commented out ci files, fix typo --- .github/workflows/build-wheels.yml | 108 +++++++++--------- .github/workflows/build_wheels_testing.yml | 14 +-- .github/workflows/check-formatting.yml | 50 ++++---- .github/workflows/ci-python.yml | 86 +++++++------- .github/workflows/ci.yml | 44 +++---- .github/workflows/quarto-render.yml | 100 ++++++++-------- .../test/test_backwards_compatability.py | 6 +- .../README.md | 2 +- .../generate_data.py | 0 .../siftmicro_base.fvecs | Bin 10 files changed, 205 insertions(+), 205 deletions(-) rename {backwards-compatability-data => backwards-compatibility-data}/README.md (94%) rename {backwards-compatability-data => backwards-compatibility-data}/generate_data.py (100%) rename {backwards-compatability-data => backwards-compatibility-data}/siftmicro_base.fvecs (100%) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 9762570ae..f3bb76f7e 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -1,61 +1,61 @@ -# name: Build wheels +name: Build wheels -# on: -# push: -# branches: -# - release-* -# - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string -# tags: -# - '*' -# pull_request: -# branches: -# - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string +on: + push: + branches: + - release-* + - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string + tags: + - '*' + pull_request: + branches: + - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string -# jobs: -# build_wheels: -# name: Build wheels on ${{ matrix.os }} -# runs-on: ${{ matrix.os }} -# strategy: -# matrix: -# os: [ubuntu-22.04, macos-12, windows-2022] -# # `windows-2022, ` blocked by https://github.com/pybind/pybind11/issues/3445#issuecomment-1525500927 +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-22.04, macos-12, windows-2022] + # `windows-2022, ` blocked by https://github.com/pybind/pybind11/issues/3445#issuecomment-1525500927 -# steps: -# - uses: actions/checkout@v3 + steps: + - uses: actions/checkout@v3 -# - name: 'Brew setup on macOS' # x-ref c8e49ba8f8b9ce -# if: ${{ startsWith(matrix.os, 'macos-') == true }} -# run: | -# set -e pipefail -# brew install automake pkg-config ninja + - name: 'Brew setup on macOS' # x-ref c8e49ba8f8b9ce + if: ${{ startsWith(matrix.os, 'macos-') == true }} + run: | + set -e pipefail + brew install automake pkg-config ninja -# - name: Build wheels -# uses: pypa/cibuildwheel@v2.14.1 -# env: -# CIBW_MANYLINUX_X86_64_IMAGE: "manylinux_2_28" -# CIBW_SKIP: "*-win32 cp27-* cp35-* cp36-* cp37-* pp* *_i686 *musllinux*" -# CIBW_ARCHS_MACOS: "x86_64 arm64" -# CIBW_BUILD_VERBOSITY: 3 -# MACOSX_DEPLOYMENT_TARGET: "12.0" -# with: -# package-dir: "apis/python" -# output-dir: wheelhouse -# config-file: "{package}/pyproject.toml" + - name: Build wheels + uses: pypa/cibuildwheel@v2.14.1 + env: + CIBW_MANYLINUX_X86_64_IMAGE: "manylinux_2_28" + CIBW_SKIP: "*-win32 cp27-* cp35-* cp36-* cp37-* pp* *_i686 *musllinux*" + CIBW_ARCHS_MACOS: "x86_64 arm64" + CIBW_BUILD_VERBOSITY: 3 + MACOSX_DEPLOYMENT_TARGET: "12.0" + with: + package-dir: "apis/python" + output-dir: wheelhouse + config-file: "{package}/pyproject.toml" -# - uses: actions/upload-artifact@v3 -# with: -# path: ./wheelhouse/*.whl + - uses: actions/upload-artifact@v3 + with: + path: ./wheelhouse/*.whl -# # TODO: Needs support for pulling in the root directory -# # build_sdist: -# # name: Build source distribution -# # runs-on: ubuntu-latest -# # steps: -# # - uses: actions/checkout@v3 -# # -# # - name: Build sdist -# # run: pipx run build --sdist -# # -# # - uses: actions/upload-artifact@v3 -# # with: -# # path: dist/*.tar.gz +# TODO: Needs support for pulling in the root directory +# build_sdist: +# name: Build source distribution +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# +# - name: Build sdist +# run: pipx run build --sdist +# +# - uses: actions/upload-artifact@v3 +# with: +# path: dist/*.tar.gz diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml index dc42a5545..160fce1d2 100644 --- a/.github/workflows/build_wheels_testing.yml +++ b/.github/workflows/build_wheels_testing.yml @@ -7,7 +7,7 @@ on: branches: [main] jobs: - generate_backwards_compatability_data: + generate_backwards_compatibility_data: name: Generate Backwards Compatibility Data runs-on: ubuntu-latest steps: @@ -55,13 +55,13 @@ jobs: echo "ls" && ls echo "" - echo "python backwards-compatability-data/generate_data.py $release_tag" && python backwards-compatability-data/generate_data.py $release_tag + echo "python backwards-compatibility-data/generate_data.py $release_tag" && python backwards-compatibility-data/generate_data.py $release_tag echo "" echo "pwd" && pwd echo "ls" && ls - echo "ls backwards-compatability-data" && ls backwards-compatability-data - echo "ls backwards-compatability-data/data" && ls backwards-compatability-data/data + echo "ls backwards-compatibility-data" && ls backwards-compatibility-data + echo "ls backwards-compatibility-data/data" && ls backwards-compatibility-data/data echo "" echo "git status" && git status echo "git branch" && git branch @@ -73,15 +73,15 @@ jobs: echo "git status" && git status echo "git branch" && git branch - echo "git add backwards-compatability-data/data/" && git add backwards-compatability-data/data/ - echo "git commit -m '[automated] Update backwards-compatability-data for release $release_tag'" && git commit -m "[automated] Update backwards-compatability-data for release $release_tag" + echo "git add backwards-compatibility-data/data/" && git add backwards-compatibility-data/data/ + echo "git commit -m '[automated] Update backwards-compatibility-data for release $release_tag'" && git commit -m "[automated] Update backwards-compatibility-data for release $release_tag" echo "git push origin jparismorgan/temp-testing-2" && git push origin jparismorgan/temp-testing-2 echo "git status" && git status build_wheels: name: Build wheels on ${{ matrix.os }} - needs: generate_backwards_compatability_data + needs: generate_backwards_compatibility_data runs-on: ${{ matrix.os }} strategy: matrix: diff --git a/.github/workflows/check-formatting.yml b/.github/workflows/check-formatting.yml index ecbd14bb4..c1f79cc7f 100644 --- a/.github/workflows/check-formatting.yml +++ b/.github/workflows/check-formatting.yml @@ -1,27 +1,27 @@ -# name: Check formatting +name: Check formatting -# on: -# push: -# branches: -# - main -# paths-ignore: -# - '_quarto.yml' -# - 'quarto-materials/*' -# - '**/.md' -# pull_request: -# branches: -# - main -# paths-ignore: -# - '_quarto.yml' -# - 'quarto-materials/*' -# - '**/.md' +on: + push: + branches: + - main + paths-ignore: + - '_quarto.yml' + - 'quarto-materials/*' + - '**/.md' + pull_request: + branches: + - main + paths-ignore: + - '_quarto.yml' + - 'quarto-materials/*' + - '**/.md' -# jobs: -# check-formatting: -# runs-on: ubuntu-latest -# steps: -# - uses: actions/checkout@v3 -# - name: 'Check clang-format' -# run: | -# source $GITHUB_WORKSPACE/scripts/ci/check_clang_format.sh -# shell: bash \ No newline at end of file +jobs: + check-formatting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: 'Check clang-format' + run: | + source $GITHUB_WORKSPACE/scripts/ci/check_clang_format.sh + shell: bash \ No newline at end of file diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index ce28e886a..91b516559 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -1,45 +1,45 @@ -# name: Python API CI +name: Python API CI -# on: -# push: -# branches: [main] -# pull_request: -# branches: [main] +on: + push: + branches: [main] + pull_request: + branches: [main] -# jobs: -# run-tests: -# strategy: -# matrix: -# os: [ubuntu-latest] -# python-version: ["3.9"] -# runs-on: ${{ matrix.os }} -# steps: -# - name: Install OpenBLAS -# run: sudo apt install libopenblas-dev -# - uses: actions/checkout@v3 -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v2 -# with: -# python-version: ${{ matrix.python-version }} -# - name: Print Python version -# run: | -# which python -# which pip -# python --version -# - name: Build and test python -# run: | -# cd apis/python -# pip install .[test] -# pytest -# # TODO: fix editable on linux -# #pip uninstall -y tiledb.vector_search -# #pip install -e . -# #pytest -# pip install -r test/ipynb/requirements.txt -# pytest --nbmake test/ipynb -# env: -# TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} -# shell: bash -el {0} -# - name: List Installed Python Packages -# run: | -# pip freeze +jobs: + run-tests: + strategy: + matrix: + os: [ubuntu-latest] + python-version: ["3.9"] + runs-on: ${{ matrix.os }} + steps: + - name: Install OpenBLAS + run: sudo apt install libopenblas-dev + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Print Python version + run: | + which python + which pip + python --version + - name: Build and test python + run: | + cd apis/python + pip install .[test] + pytest + # TODO: fix editable on linux + #pip uninstall -y tiledb.vector_search + #pip install -e . + #pytest + pip install -r test/ipynb/requirements.txt + pytest --nbmake test/ipynb + env: + TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} + shell: bash -el {0} + - name: List Installed Python Packages + run: | + pip freeze diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3383d3d71..d413ff42c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,24 +1,24 @@ -# name: CI +name: CI -# on: -# push: -# branches: [main] -# pull_request: -# branches: [main] +on: + push: + branches: [main] + pull_request: + branches: [main] -# jobs: -# run-tests: -# strategy: -# matrix: -# os: [ubuntu-latest] -# runs-on: ${{ matrix.os }} -# steps: -# - name: Install OpenBLAS -# run: sudo apt install libopenblas-dev -# - uses: actions/checkout@v3 -# - name: Configure CMake -# run: cmake -S ./src -B ./src/build -DCMAKE_BUILD_TYPE=Debug -DTILEDB_VS_ENABLE_BLAS=ON -# - name: Build -# run: cmake --build ./src/build -j3 -# - name: Run Tests -# run: cmake --build ./src/build --target check-ci +jobs: + run-tests: + strategy: + matrix: + os: [ubuntu-latest] + runs-on: ${{ matrix.os }} + steps: + - name: Install OpenBLAS + run: sudo apt install libopenblas-dev + - uses: actions/checkout@v3 + - name: Configure CMake + run: cmake -S ./src -B ./src/build -DCMAKE_BUILD_TYPE=Debug -DTILEDB_VS_ENABLE_BLAS=ON + - name: Build + run: cmake --build ./src/build -j3 + - name: Run Tests + run: cmake --build ./src/build --target check-ci diff --git a/.github/workflows/quarto-render.yml b/.github/workflows/quarto-render.yml index aaf0435c9..df7a2e405 100644 --- a/.github/workflows/quarto-render.yml +++ b/.github/workflows/quarto-render.yml @@ -1,55 +1,55 @@ -# # Cloned from https://github.com/TileDB-Inc/tiledb-quarto-template +# Cloned from https://github.com/TileDB-Inc/tiledb-quarto-template -# name: Render and deploy Quarto files -# on: -# push: -# pull_request: +name: Render and deploy Quarto files +on: + push: + pull_request: -# jobs: -# quarto-render-and-deploy: -# strategy: -# matrix: -# os: [ubuntu-latest] -# python-version: ["3.9"] -# runs-on: ${{ matrix.os }} -# steps: -# - name: Install OpenBLAS -# run: sudo apt install libopenblas-dev -# - name: Set up Python ${{ matrix.python-version }} -# uses: actions/setup-python@v2 -# with: -# python-version: ${{ matrix.python-version }} -# - name: Print Python version -# run: | -# which python -# which pip -# python --version -# - name: Install pybind11 -# run: pip install "pybind11[global]" "pydantic<2" -# - uses: actions/checkout@v3 -# - name: Install -# run: cd apis/python && pip install . +jobs: + quarto-render-and-deploy: + strategy: + matrix: + os: [ubuntu-latest] + python-version: ["3.9"] + runs-on: ${{ matrix.os }} + steps: + - name: Install OpenBLAS + run: sudo apt install libopenblas-dev + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Print Python version + run: | + which python + which pip + python --version + - name: Install pybind11 + run: pip install "pybind11[global]" "pydantic<2" + - uses: actions/checkout@v3 + - name: Install + run: cd apis/python && pip install . -# - name: "Install Quarto" -# uses: quarto-dev/quarto-actions/setup@v2 + - name: "Install Quarto" + uses: quarto-dev/quarto-actions/setup@v2 -# - name: "Quarto render" -# shell: bash -# run: | -# pip install quartodoc PyYAML click "griffe<0.33" # incompatible -# # create a symlink to the tiledbvcf python package, so it doesn't have to be installed -# #ln -s apis/python/src/tiledb/vector_search -# quartodoc build -# quarto render --fail-if-warnings -# # https://github.com/quarto-dev/quarto-cli/issues/493 + - name: "Quarto render" + shell: bash + run: | + pip install quartodoc PyYAML click "griffe<0.33" # incompatible + # create a symlink to the tiledbvcf python package, so it doesn't have to be installed + #ln -s apis/python/src/tiledb/vector_search + quartodoc build + quarto render --fail-if-warnings + # https://github.com/quarto-dev/quarto-cli/issues/493 -# - name: "Deploy to gh-pages" -# uses: peaceiris/actions-gh-pages@v3 -# # Change to the name of your repo's primary branch name: -# if: github.ref == 'refs/heads/main' -# with: -# # This is GitHub Actions magic; no secrets for us to manage; and this works first-time -# # without any extra configs other than visiting Settings -> Pages in your GitHub repo. -# github_token: ${{ secrets.GITHUB_TOKEN }} -# publish_dir: docs -# destination_dir: docs + - name: "Deploy to gh-pages" + uses: peaceiris/actions-gh-pages@v3 + # Change to the name of your repo's primary branch name: + if: github.ref == 'refs/heads/main' + with: + # This is GitHub Actions magic; no secrets for us to manage; and this works first-time + # without any extra configs other than visiting Settings -> Pages in your GitHub repo. + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: docs + destination_dir: docs diff --git a/apis/python/test/test_backwards_compatability.py b/apis/python/test/test_backwards_compatability.py index 70bb8352d..7f94a9ddf 100644 --- a/apis/python/test/test_backwards_compatability.py +++ b/apis/python/test/test_backwards_compatability.py @@ -82,9 +82,9 @@ def test_create_and_query_indices_with_old_storage_versions(tmp_path): # ''' # Tests that current code can query indices which were written to disk by old code. # ''' -# backwards_compatability_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'backwards-compatability-data') -# datasets_path = os.path.join(backwards_compatability_path, 'data') -# base = load_fvecs(os.path.join(backwards_compatability_path, 'siftmicro_base.fvecs')) +# backwards_compatibility_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'backwards-compatibility-data') +# datasets_path = os.path.join(backwards_compatibility_path, 'data') +# base = load_fvecs(os.path.join(backwards_compatibility_path, 'siftmicro_base.fvecs')) # query_indices = [0, 3, 4, 8, 10, 19, 28, 31, 39, 40, 41, 47, 49, 50, 56, 64, 68, 70, 71, 79, 82, 89, 90, 94] # queries = base[query_indices] diff --git a/backwards-compatability-data/README.md b/backwards-compatibility-data/README.md similarity index 94% rename from backwards-compatability-data/README.md rename to backwards-compatibility-data/README.md index 6d3425167..8b5a609dd 100644 --- a/backwards-compatability-data/README.md +++ b/backwards-compatibility-data/README.md @@ -7,4 +7,4 @@ To generate new data, run: This will create a new folder in the `data` directory with the version. This folder will contain the arrays built by the current version of TileDB-Vector-Search. To run a backwards compability test, run: -- `cd ~/repo/TileDB-Vector-Search && pytest apis/python/test/test_backwards_compatability.py -s` \ No newline at end of file +- `cd ~/repo/TileDB-Vector-Search && pytest apis/python/test/test_backwards_compatibility.py -s` \ No newline at end of file diff --git a/backwards-compatability-data/generate_data.py b/backwards-compatibility-data/generate_data.py similarity index 100% rename from backwards-compatability-data/generate_data.py rename to backwards-compatibility-data/generate_data.py diff --git a/backwards-compatability-data/siftmicro_base.fvecs b/backwards-compatibility-data/siftmicro_base.fvecs similarity index 100% rename from backwards-compatability-data/siftmicro_base.fvecs rename to backwards-compatibility-data/siftmicro_base.fvecs From 1e83943400c380a42a805423b856e0d96b964c2b Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Fri, 19 Jan 2024 12:26:07 +0100 Subject: [PATCH 25/27] update build-wheels.yml --- .github/workflows/build-wheels.yml | 45 +++++++++++ .github/workflows/build_wheels_testing.yml | 93 ---------------------- 2 files changed, 45 insertions(+), 93 deletions(-) delete mode 100644 .github/workflows/build_wheels_testing.yml diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index f3bb76f7e..1e1029eda 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -12,8 +12,53 @@ on: - '*wheel*' # must quote since "*" is a YAML reserved character; we want a string jobs: + generate_backwards_compatibility_data: + name: Generate Backwards Compatibility Data + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v3 + + # Based on https://github.com/TileDB-Inc/conda-forge-nightly-controller/blob/51519a0f8340b32cf737fcb59b76c6a91c42dc47/.github/workflows/activity.yml#L19C10-L19C10 + - name: Setup git + run: | + git config user.name "GitHub Actions" + git config user.email "runneradmin@users.noreply.github.com" + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: "3.9" + + - name: Print Python version + run: | + which python + which pip + python --version + + - name: Build Indexes + run: | + # Get the release tag. + release_tag=$(git describe --tags --abbrev=0) + echo $release_tag + + # Install dependencies. + cd apis/python && pip install . && cd ../.. + + # Generate data. + python backwards-compatibility-data/generate_data.py $release_tag + + # Push this data to main. + git fetch origin main + git checkout main + git add backwards-compatibility-data/data/ + git commit -m "[automated] Update backwards-compatibility-data for release $release_tag" + git push origin main + build_wheels: name: Build wheels on ${{ matrix.os }} + # TODO(paris): Add this back once generate_backwards_compatibility_data is confirmed to work. + # needs: generate_backwards_compatibility_data runs-on: ${{ matrix.os }} strategy: matrix: diff --git a/.github/workflows/build_wheels_testing.yml b/.github/workflows/build_wheels_testing.yml deleted file mode 100644 index 160fce1d2..000000000 --- a/.github/workflows/build_wheels_testing.yml +++ /dev/null @@ -1,93 +0,0 @@ -name: Test Build wheels - -on: - push: - branches: [main] - pull_request: - branches: [main] - -jobs: - generate_backwards_compatibility_data: - name: Generate Backwards Compatibility Data - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v3 - - # Based on https://github.com/TileDB-Inc/conda-forge-nightly-controller/blob/51519a0f8340b32cf737fcb59b76c6a91c42dc47/.github/workflows/activity.yml#L19C10-L19C10 - - name: Setup git - run: | - git config user.name "GitHub Actions" - git config user.email "runneradmin@users.noreply.github.com" - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: "3.9" - - - name: Print Python version - run: | - which python - which pip - python --version - - - name: Build Indexes - run: | - echo "$GITHUB_ACTOR" - echo $GITHUB_ACTOR - - # TODO: test this on a branch with a real tag - # release_tag=$(git describe --tags --abbrev=0) - release_tag="0.0.1" - echo "release_tag" && echo $release_tag - - echo "pwd" && pwd - echo "ls" && ls - echo "" - - cd apis/python && pip install . && cd ../.. - - echo "" - echo "pwd" && pwd - echo "ls" && ls - echo "pip freeze" && pip freeze - echo "pwd" && pwd - echo "ls" && ls - echo "" - - echo "python backwards-compatibility-data/generate_data.py $release_tag" && python backwards-compatibility-data/generate_data.py $release_tag - - echo "" - echo "pwd" && pwd - echo "ls" && ls - echo "ls backwards-compatibility-data" && ls backwards-compatibility-data - echo "ls backwards-compatibility-data/data" && ls backwards-compatibility-data/data - echo "" - echo "git status" && git status - echo "git branch" && git branch - - echo "git fetch origin jparismorgan/temp-testing-2" && git fetch origin jparismorgan/temp-testing-2 - - echo "git checkout jparismorgan/temp-testing-2" && git checkout jparismorgan/temp-testing-2 - - echo "git status" && git status - echo "git branch" && git branch - - echo "git add backwards-compatibility-data/data/" && git add backwards-compatibility-data/data/ - echo "git commit -m '[automated] Update backwards-compatibility-data for release $release_tag'" && git commit -m "[automated] Update backwards-compatibility-data for release $release_tag" - echo "git push origin jparismorgan/temp-testing-2" && git push origin jparismorgan/temp-testing-2 - - echo "git status" && git status - - build_wheels: - name: Build wheels on ${{ matrix.os }} - needs: generate_backwards_compatibility_data - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-22.04] - - steps: - - name: Running! - run: | - "Running!" \ No newline at end of file From 25ef67da2a7a55fc69477dc022decd71fce03817 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Fri, 19 Jan 2024 12:46:37 +0100 Subject: [PATCH 26/27] cleanup code --- .github/workflows/build-wheels.yml | 1 + .github/workflows/ci-python.yml | 3 -- apis/python/test/common.py | 2 - ...ity.py => test_backwards_compatibility.py} | 52 +++++++++---------- apis/python/test/test_ingestion.py | 2 + backwards-compatibility-data/README.md | 17 ++++-- 6 files changed, 42 insertions(+), 35 deletions(-) rename apis/python/test/{test_backwards_compatability.py => test_backwards_compatibility.py} (69%) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 1e1029eda..e83e99a02 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -1,6 +1,7 @@ name: Build wheels on: + workflow_dispatch: push: branches: - release-* diff --git a/.github/workflows/ci-python.yml b/.github/workflows/ci-python.yml index 91b516559..e66d35aea 100644 --- a/.github/workflows/ci-python.yml +++ b/.github/workflows/ci-python.yml @@ -40,6 +40,3 @@ jobs: env: TILEDB_REST_TOKEN: ${{ secrets.TILEDB_CLOUD_HELPER_VAR }} shell: bash -el {0} - - name: List Installed Python Packages - run: | - pip freeze diff --git a/apis/python/test/common.py b/apis/python/test/common.py index 310f30ce4..96f19093d 100644 --- a/apis/python/test/common.py +++ b/apis/python/test/common.py @@ -7,8 +7,6 @@ import tiledb from tiledb.vector_search.storage_formats import storage_formats, STORAGE_VERSION -MAX_UINT64 = np.iinfo(np.dtype("uint64")).max - def xbin_mmap(fname, dtype): n, d = map(int, np.fromfile(fname, dtype="uint32", count=2)) assert os.stat(fname).st_size == 8 + n * d * np.dtype(dtype).itemsize diff --git a/apis/python/test/test_backwards_compatability.py b/apis/python/test/test_backwards_compatibility.py similarity index 69% rename from apis/python/test/test_backwards_compatability.py rename to apis/python/test/test_backwards_compatibility.py index 7f94a9ddf..8f8839feb 100644 --- a/apis/python/test/test_backwards_compatability.py +++ b/apis/python/test/test_backwards_compatibility.py @@ -78,33 +78,33 @@ def test_create_and_query_indices_with_old_storage_versions(tmp_path): _, result = index_ram.query(queries, k=k) assert accuracy(result, gt_i) > MINIMUM_ACCURACY -# def test_query_old_indices(): -# ''' -# Tests that current code can query indices which were written to disk by old code. -# ''' -# backwards_compatibility_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'backwards-compatibility-data') -# datasets_path = os.path.join(backwards_compatibility_path, 'data') -# base = load_fvecs(os.path.join(backwards_compatibility_path, 'siftmicro_base.fvecs')) -# query_indices = [0, 3, 4, 8, 10, 19, 28, 31, 39, 40, 41, 47, 49, 50, 56, 64, 68, 70, 71, 79, 82, 89, 90, 94] -# queries = base[query_indices] +def test_query_old_indices(): + ''' + Tests that current code can query indices which were written to disk by old code. + ''' + backwards_compatibility_path = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'backwards-compatibility-data') + datasets_path = os.path.join(backwards_compatibility_path, 'data') + base = load_fvecs(os.path.join(backwards_compatibility_path, 'siftmicro_base.fvecs')) + query_indices = [0, 3, 4, 8, 10, 19, 28, 31, 39, 40, 41, 47, 49, 50, 56, 64, 68, 70, 71, 79, 82, 89, 90, 94] + queries = base[query_indices] -# for directory_name in os.listdir(datasets_path): -# version_path = os.path.join(datasets_path, directory_name) -# if not os.path.isdir(version_path): -# continue + for directory_name in os.listdir(datasets_path): + version_path = os.path.join(datasets_path, directory_name) + if not os.path.isdir(version_path): + continue -# for index_name in os.listdir(version_path): -# index_uri = os.path.join(version_path, index_name) -# if not os.path.isdir(index_uri): -# continue + for index_name in os.listdir(version_path): + index_uri = os.path.join(version_path, index_name) + if not os.path.isdir(index_uri): + continue -# if "ivf_flat" in index_name: -# index = IVFFlatIndex(uri=index_uri) -# elif "flat" in index_name: -# index = FlatIndex(uri=index_uri) -# else: -# assert False, f"Unknown index name: {index_name}" + if "ivf_flat" in index_name: + index = IVFFlatIndex(uri=index_uri) + elif "flat" in index_name: + index = FlatIndex(uri=index_uri) + else: + assert False, f"Unknown index name: {index_name}" -# result_d, result_i = index.query(queries, k=1) -# assert query_indices == result_i.flatten().tolist() -# assert result_d.flatten().tolist() == [0 for _ in range(len(query_indices))] \ No newline at end of file + result_d, result_i = index.query(queries, k=1) + assert query_indices == result_i.flatten().tolist() + assert result_d.flatten().tolist() == [0 for _ in range(len(query_indices))] \ No newline at end of file diff --git a/apis/python/test/test_ingestion.py b/apis/python/test/test_ingestion.py index 184cb7fbe..b23be0df9 100644 --- a/apis/python/test/test_ingestion.py +++ b/apis/python/test/test_ingestion.py @@ -12,6 +12,8 @@ from tiledb.vector_search.utils import load_fvecs MINIMUM_ACCURACY = 0.85 +MAX_UINT64 = np.iinfo(np.dtype("uint64")).max + def query_and_check_equals(index, queries, expected_result_d, expected_result_i): result_d, result_i = index.query(queries, k=1) check_equals(result_d=result_d, result_i=result_i, expected_result_d=expected_result_d, expected_result_i=expected_result_i) diff --git a/backwards-compatibility-data/README.md b/backwards-compatibility-data/README.md index 8b5a609dd..f88072abb 100644 --- a/backwards-compatibility-data/README.md +++ b/backwards-compatibility-data/README.md @@ -3,8 +3,17 @@ This folder contains test indices built using different versions of TileDB-Vecto ### Usage To generate new data, run: -- `python generate_data.py x.x.x` -This will create a new folder in the `data` directory with the version. This folder will contain the arrays built by the current version of TileDB-Vector-Search. +```bash +cd apis/python +pip install . +cd ../.. +python generate_data.py my_version +``` +This will build new indexes and save them to `backwards-compatibility-data/data/my_version`. -To run a backwards compability test, run: -- `cd ~/repo/TileDB-Vector-Search && pytest apis/python/test/test_backwards_compatibility.py -s` \ No newline at end of file +To run the backwards compability test: +```bash +cd apis/python +pip install ".[test]" +pytest test/test_backwards_compatibility.py -s +``` \ No newline at end of file From e3fcc5a11d152f43acfe85d3c51ca6231d8b3902 Mon Sep 17 00:00:00 2001 From: Paris Morgan Date: Fri, 19 Jan 2024 12:48:15 +0100 Subject: [PATCH 27/27] cleanup code --- .../test/test_backwards_compatibility.py | 72 ------------------- 1 file changed, 72 deletions(-) diff --git a/apis/python/test/test_backwards_compatibility.py b/apis/python/test/test_backwards_compatibility.py index 8f8839feb..967a01793 100644 --- a/apis/python/test/test_backwards_compatibility.py +++ b/apis/python/test/test_backwards_compatibility.py @@ -1,83 +1,11 @@ -import numpy as np from common import * -import pytest from tiledb.vector_search.flat_index import FlatIndex -from tiledb.vector_search.ingestion import ingest from tiledb.vector_search.ivf_flat_index import IVFFlatIndex from tiledb.vector_search.utils import load_fvecs MINIMUM_ACCURACY = 0.85 -def test_create_and_query_indices_with_old_storage_versions(tmp_path): - ''' - Tests that the current code can create indices using older storage version formats and then - query them. - ''' - dataset_dir = os.path.join(tmp_path, "dataset") - k = 10 - size = 1000 - partitions = 10 - dimensions = 128 - nqueries = 100 - data = create_random_dataset_u8(nb=size, d=dimensions, nq=nqueries, k=k, path=dataset_dir) - source_uri = os.path.join(dataset_dir, "data.u8bin") - - dtype = np.uint8 - queries = get_queries(dataset_dir, dtype=dtype) - gt_i, _ = get_groundtruth(dataset_dir, k) - - indexes = ["FLAT", "IVF_FLAT"] - index_classes = [FlatIndex, IVFFlatIndex] - index_files = [tiledb.vector_search.flat_index, tiledb.vector_search.ivf_flat_index] - for index_type, index_class, index_file in zip(indexes, index_classes, index_files): - # First we test with an invalid storage version. - with pytest.raises(ValueError) as error: - index_uri = os.path.join(tmp_path, f"array_{index_type}_invalid") - ingest( - index_type=index_type, - index_uri=index_uri, - source_uri=source_uri, - partitions=partitions, - storage_version="Foo" - ) - assert "Invalid storage version" in str(error.value) - - with pytest.raises(ValueError) as error: - index_file.create(uri=index_uri, dimensions=3, vector_type=np.dtype(dtype), storage_version="Foo") - assert "Invalid storage version" in str(error.value) - - # Then we test with valid storage versions. - for storage_version, _ in tiledb.vector_search.storage_formats.items(): - index_uri = os.path.join(tmp_path, f"array_{index_type}_{storage_version}") - index = ingest( - index_type=index_type, - index_uri=index_uri, - source_uri=source_uri, - partitions=partitions, - storage_version=storage_version - ) - _, result = index.query(queries, k=k) - assert accuracy(result, gt_i) >= MINIMUM_ACCURACY - - update_ids_offset = MAX_UINT64 - size - updated_ids = {} - for i in range(10): - index.delete(external_id=i) - index.update(vector=data[i].astype(dtype), external_id=i + update_ids_offset) - updated_ids[i] = i + update_ids_offset - - _, result = index.query(queries, k=k) - assert accuracy(result, gt_i, updated_ids=updated_ids) >= MINIMUM_ACCURACY - - index = index.consolidate_updates(retrain_index=True, partitions=20) - _, result = index.query(queries, k=k) - assert accuracy(result, gt_i, updated_ids=updated_ids) >= MINIMUM_ACCURACY - - index_ram = index_class(uri=index_uri) - _, result = index_ram.query(queries, k=k) - assert accuracy(result, gt_i) > MINIMUM_ACCURACY - def test_query_old_indices(): ''' Tests that current code can query indices which were written to disk by old code.