Skip to content

Commit

Permalink
Merge branch 'cvg:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
pablovela5620 authored Sep 12, 2024
2 parents 9ffee6e + b21ff20 commit b7a24d2
Show file tree
Hide file tree
Showing 62 changed files with 3,108 additions and 2,528 deletions.
4 changes: 4 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[flake8]
max-line-length = 88
extend-ignore = E203
exclude = .git,__pycache__,build,.venv/
24 changes: 24 additions & 0 deletions .github/workflows/code-quality.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: Format and Lint Checks
on:
push:
branches:
- master
paths:
- '*.py'
pull_request:
types: [ assigned, opened, synchronize, reopened ]
jobs:
check:
name: Format and Lint Checks
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
cache: 'pip'
- run: python -m pip install --upgrade pip
- run: python -m pip install black==23.12.1 flake8 isort
- run: python -m flake8 hloc
- run: python -m isort hloc *.ipynb --check-only --diff
- run: python -m black hloc *.ipynb --check --diff
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
build
.venv
__pycache__
*.pyc
*.egg-info
.ipynb_checkpoints
outputs/
datasets/*
!datasets/sacre_coeur/
datasets/sacre_coeur/query
2 changes: 2 additions & 0 deletions .isort.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[settings]
profile=black
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ We show in [`pipeline_SfM.ipynb`](https://nbviewer.jupyter.org/github/cvg/Hierar

- Supported local feature extractors: [SuperPoint](https://arxiv.org/abs/1712.07629), [DISK](https://arxiv.org/abs/2006.13566), [D2-Net](https://arxiv.org/abs/1905.03561), [SIFT](https://www.cs.ubc.ca/~lowe/papers/ijcv04.pdf), and [R2D2](https://arxiv.org/abs/1906.06195).
- Supported feature matchers: [SuperGlue](https://arxiv.org/abs/1911.11763), its faster follow-up [LightGlue](https://github.com/cvg/LightGlue), and nearest neighbor search with ratio test, distance test, and/or mutual check. hloc also supports dense matching with [LoFTR](https://github.com/zju3dv/LoFTR).
- Supported image retrieval: [NetVLAD](https://arxiv.org/abs/1511.07247), [AP-GeM/DIR](https://github.com/naver/deep-image-retrieval), [OpenIBL](https://github.com/yxgeee/OpenIBL), and [CosPlace](https://github.com/gmberton/CosPlace).
- Supported image retrieval: [NetVLAD](https://arxiv.org/abs/1511.07247), [AP-GeM/DIR](https://github.com/naver/deep-image-retrieval), [OpenIBL](https://github.com/yxgeee/OpenIBL), [CosPlace](https://github.com/gmberton/CosPlace) and [EigenPlaces](https://github.com/gmberton/EigenPlaces).

Using NetVLAD for retrieval, we obtain the following best results:

Expand Down
69 changes: 45 additions & 24 deletions demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,18 @@
"%load_ext autoreload\n",
"%autoreload 2\n",
"import tqdm, tqdm.notebook\n",
"\n",
"tqdm.tqdm = tqdm.notebook.tqdm # notebook-friendly progress bars\n",
"from pathlib import Path\n",
"import numpy as np\n",
"\n",
"from hloc import extract_features, match_features, reconstruction, visualization, pairs_from_exhaustive\n",
"from hloc import (\n",
" extract_features,\n",
" match_features,\n",
" reconstruction,\n",
" visualization,\n",
" pairs_from_exhaustive,\n",
")\n",
"from hloc.visualization import plot_images, read_image\n",
"from hloc.utils import viz_3d"
]
Expand All @@ -43,17 +50,17 @@
"metadata": {},
"outputs": [],
"source": [
"images = Path('datasets/sacre_coeur')\n",
"outputs = Path('outputs/demo/')\n",
"images = Path(\"datasets/sacre_coeur\")\n",
"outputs = Path(\"outputs/demo/\")\n",
"!rm -rf $outputs\n",
"sfm_pairs = outputs / 'pairs-sfm.txt'\n",
"loc_pairs = outputs / 'pairs-loc.txt'\n",
"sfm_dir = outputs / 'sfm'\n",
"features = outputs / 'features.h5'\n",
"matches = outputs / 'matches.h5'\n",
"sfm_pairs = outputs / \"pairs-sfm.txt\"\n",
"loc_pairs = outputs / \"pairs-loc.txt\"\n",
"sfm_dir = outputs / \"sfm\"\n",
"features = outputs / \"features.h5\"\n",
"matches = outputs / \"matches.h5\"\n",
"\n",
"feature_conf = extract_features.confs['disk']\n",
"matcher_conf = match_features.confs['disk+lightglue']"
"feature_conf = extract_features.confs[\"disk\"]\n",
"matcher_conf = match_features.confs[\"disk+lightglue\"]"
]
},
{
Expand Down Expand Up @@ -90,7 +97,7 @@
}
],
"source": [
"references = [p.relative_to(images).as_posix() for p in (images / 'mapping/').iterdir()]\n",
"references = [p.relative_to(images).as_posix() for p in (images / \"mapping/\").iterdir()]\n",
"print(len(references), \"mapping images\")\n",
"plot_images([read_image(images / r) for r in references], dpi=25)"
]
Expand Down Expand Up @@ -142,7 +149,9 @@
}
],
"source": [
"extract_features.main(feature_conf, images, image_list=references, feature_path=features)\n",
"extract_features.main(\n",
" feature_conf, images, image_list=references, feature_path=features\n",
")\n",
"pairs_from_exhaustive.main(sfm_pairs, image_list=references)\n",
"match_features.main(matcher_conf, sfm_pairs, features=features, matches=matches);"
]
Expand All @@ -162,9 +171,13 @@
"metadata": {},
"outputs": [],
"source": [
"model = reconstruction.main(sfm_dir, images, sfm_pairs, features, matches, image_list=references)\n",
"model = reconstruction.main(\n",
" sfm_dir, images, sfm_pairs, features, matches, image_list=references\n",
")\n",
"fig = viz_3d.init_figure()\n",
"viz_3d.plot_reconstruction(fig, model, color='rgba(255,0,0,0.5)', name=\"mapping\", points_rgb=True)\n",
"viz_3d.plot_reconstruction(\n",
" fig, model, color=\"rgba(255,0,0,0.5)\", name=\"mapping\", points_rgb=True\n",
")\n",
"fig.show()"
]
},
Expand Down Expand Up @@ -204,7 +217,7 @@
}
],
"source": [
"visualization.visualize_sfm_2d(model, images, color_by='visibility', n=2)"
"visualization.visualize_sfm_2d(model, images, color_by=\"visibility\", n=2)"
]
},
{
Expand Down Expand Up @@ -238,7 +251,7 @@
"# try other queries by uncommenting their url\n",
"# url = \"https://upload.wikimedia.org/wikipedia/commons/5/59/Basilique_du_Sacr%C3%A9-C%C5%93ur_%285430392880%29.jpg\"\n",
"# url = \"https://upload.wikimedia.org/wikipedia/commons/8/8e/Sacr%C3%A9_C%C5%93ur_at_night%21_%285865355326%29.jpg\"\n",
"query = 'query/night.jpg'\n",
"query = \"query/night.jpg\"\n",
"!mkdir -p $images/query && wget $url -O $images/$query -q\n",
"plot_images([read_image(images / query)], dpi=75)"
]
Expand Down Expand Up @@ -290,9 +303,13 @@
}
],
"source": [
"extract_features.main(feature_conf, images, image_list=[query], feature_path=features, overwrite=True)\n",
"extract_features.main(\n",
" feature_conf, images, image_list=[query], feature_path=features, overwrite=True\n",
")\n",
"pairs_from_exhaustive.main(loc_pairs, image_list=[query], ref_list=references)\n",
"match_features.main(matcher_conf, loc_pairs, features=features, matches=matches, overwrite=True);"
"match_features.main(\n",
" matcher_conf, loc_pairs, features=features, matches=matches, overwrite=True\n",
");"
]
},
{
Expand Down Expand Up @@ -344,8 +361,8 @@
"camera = pycolmap.infer_camera_from_image(images / query)\n",
"ref_ids = [model.find_image_with_name(r).image_id for r in references]\n",
"conf = {\n",
" 'estimation': {'ransac': {'max_error': 12}},\n",
" 'refinement': {'refine_focal_length': True, 'refine_extra_params': True},\n",
" \"estimation\": {\"ransac\": {\"max_error\": 12}},\n",
" \"refinement\": {\"refine_focal_length\": True, \"refine_extra_params\": True},\n",
"}\n",
"localizer = QueryLocalizer(model, conf)\n",
"ret, log = pose_from_cluster(localizer, query, camera, ref_ids, features, matches)\n",
Expand All @@ -365,14 +382,18 @@
{
"cell_type": "code",
"execution_count": null,
"id": "71ab5306",
"id": "603c5533-f7b5-4e2c-ae62-de047abce7cc",
"metadata": {},
"outputs": [],
"source": [
"pose = pycolmap.Image(tvec=ret['tvec'], qvec=ret['qvec'])\n",
"viz_3d.plot_camera_colmap(fig, pose, camera, color='rgba(0,255,0,0.5)', name=query, fill=True)\n",
"pose = pycolmap.Image(cam_from_world=ret[\"cam_from_world\"])\n",
"viz_3d.plot_camera_colmap(\n",
" fig, pose, camera, color=\"rgba(0,255,0,0.5)\", name=query, fill=True\n",
")\n",
"# visualize 2D-3D correspodences\n",
"inl_3d = np.array([model.points3D[pid].xyz for pid in np.array(log['points3D_ids'])[ret['inliers']]])\n",
"inl_3d = np.array(\n",
" [model.points3D[pid].xyz for pid in np.array(log[\"points3D_ids\"])[ret[\"inliers\"]]]\n",
")\n",
"viz_3d.plot_points(fig, inl_3d, color=\"lime\", ps=1, name=query)\n",
"fig.show()"
]
Expand Down
25 changes: 14 additions & 11 deletions hloc/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import logging

from packaging import version

__version__ = '1.5'
__version__ = "1.5"

formatter = logging.Formatter(
fmt='[%(asctime)s %(name)s %(levelname)s] %(message)s',
datefmt='%Y/%m/%d %H:%M:%S')
fmt="[%(asctime)s %(name)s %(levelname)s] %(message)s", datefmt="%Y/%m/%d %H:%M:%S"
)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
handler.setLevel(logging.INFO)
Expand All @@ -18,16 +19,18 @@
try:
import pycolmap
except ImportError:
logger.warning('pycolmap is not installed, some features may not work.')
logger.warning("pycolmap is not installed, some features may not work.")
else:
min_version = version.parse('0.3.0')
max_version = version.parse('0.4.0')
min_version = version.parse("0.6.0")
found_version = pycolmap.__version__
if found_version != 'dev':
if found_version != "dev":
version = version.parse(found_version)
if version < min_version or version > max_version:
s = f'pycolmap>={min_version},<={max_version}'
if version < min_version:
s = f"pycolmap>={min_version}"
logger.warning(
'hloc now requires %s but found pycolmap==%s, '
"hloc requires %s but found pycolmap==%s, "
'please upgrade with `pip install --upgrade "%s"`',
s, found_version, s)
s,
found_version,
s,
)
Loading

0 comments on commit b7a24d2

Please sign in to comment.