Skip to content

add gt urls'

add gt urls' #879

Workflow file for this run

# Contributed by @GuilhemN in https://github.com/erikbern/ann-benchmarks/pull/233
name: Billion-Scale ANN Benchmarks, NeurIPS 2023
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- algorithm: faiss
dataset: random-filter-s
track: filter
- algorithm: wm_filter
dataset: random-filter-s
track: filter
- algorithm: dhq
dataset: random-filter-s
track: filter
- algorithm: hwtl_sdu_anns_filter
dataset: random-filter-s
track: filter
- algorithm: parlayivf
dataset: random-filter-s
track: filter
- algorithm: linscan
dataset: sparse-small
track: sparse
- algorithm: nle
dataset: sparse-small
track: sparse
- algorithm: diskann
dataset: random-xs
track: streaming
- algorithm: diskann
dataset: random-xs
track: ood
- algorithm: cufe
dataset: random-s
track: filter
- algorithm: cufe
dataset: sparse-small
track: sparse
- algorithm: vamana
dataset: random-xs
track: ood
- algorithm: cufe
dataset: random-xs
track: ood
- algorithm: fdufilterdiskann
dataset: random-filter-s
track: filter
- algorithm: faissplus
dataset: random-filter-s
track: filter
- algorithm: shnsw
dataset: sparse-small
track: sparse
- algorithm: sustech-ood
dataset: random-xs
track: ood
- algorithm: epsearch
dataset: random-xs
track: ood
- algorithm: mysteryann
dataset: random-xs
track: ood
- algorithm: hwtl_sdu_anns_stream
dataset: random-xs
track: streaming
- algorithm: cufe
dataset: random-xs
track: streaming
- algorithm: sustech-whu
dataset: sparse-small
track: sparse
- algorithm: pyanns
dataset: sparse-small
track: sparse
- algorithm: pyanns
dataset: random-filter-s
track: filter
- algorithm: mysteryann-dif
dataset: random-xs
track: ood
- algorithm: pyanns
dataset: random-xs
track: streaming
- algorithm: ngt
dataset: random-xs
track: ood
- algorithm: pyanns
dataset: random-xs
track: ood
- algorithm: puck
dataset: random-xs
track: streaming
- algorithm: puck
dataset: random-filter-s
track: filter
- algorithm: puck
dataset: random-xs
track: ood
- algorithm: pinecone
dataset: random-filter-s
track: filter
- algorithm: zilliz
dataset: random-xs
track: ood
- algorithm: zilliz
dataset: sparse-small
track: sparse
- algorithm: zilliz
dataset: random-filter-s
track: filter
fail-fast: false
steps:
- uses: actions/checkout@v2 # Pull the repository
- name: Set up Python 3.10
uses: actions/setup-python@v2
with:
python-version: '3.10'
- name: Install dependencies
run: |
pip install -r requirements_py3.10.txt
python install.py --neurips23track $TRACK --algorithm $ALGO
env:
ALGO: ${{ matrix.algorithm }}
TRACK: ${{ matrix.track }}
- name: Run the benchmark
run: |
python create_dataset.py --dataset $DATASET
python run.py --algorithm $ALGO --max-n-algorithms 2 --neurips23track $TRACK --dataset $DATASET --timeout 600
sudo chmod -R 777 results/
if [ $TRACK != streaming ]
then
python plot.py --dataset $DATASET --neurips23track $TRACK --output plot.png
fi
python data_export.py --output test.csv
env:
ALGO: ${{ matrix.algorithm}}
DATASET: ${{ matrix.dataset }}
TRACK: ${{ matrix.track}}