Skip to content

Commit

Permalink
Allow SuperPoint weights to be loaded from local cache
Browse files Browse the repository at this point in the history
  • Loading branch information
Alexandre Duverger committed Mar 14, 2024
1 parent dc01ec8 commit 83bcc4d
Showing 1 changed file with 6 additions and 1 deletion.
7 changes: 6 additions & 1 deletion gluefactory/models/extractors/superpoint_open.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"""

from collections import OrderedDict
from pathlib import Path
from types import SimpleNamespace

import torch
Expand Down Expand Up @@ -85,6 +86,7 @@ class SuperPoint(BaseModel):
"descriptor_dim": 256,
"channels": [64, 64, 128, 128, 256],
"dense_outputs": None,
"weights": None, # local path of pretrained weights
}

checkpoint_url = "https://github.com/rpautrat/SuperPoint/raw/master/weights/superpoint_v6_from_tf.pth" # noqa: E501
Expand Down Expand Up @@ -112,7 +114,10 @@ def _init(self, conf):
VGGBlock(c, self.conf.descriptor_dim, 1, relu=False),
)

state_dict = torch.hub.load_state_dict_from_url(self.checkpoint_url)
if conf.weights is not None and Path(conf.weights).exists():
state_dict = torch.load(conf.weights, map_location="cpu")
else:
state_dict = torch.hub.load_state_dict_from_url(self.checkpoint_url)
self.load_state_dict(state_dict)

def _forward(self, data):
Expand Down

0 comments on commit 83bcc4d

Please sign in to comment.