Skip to content

Commit

Permalink
Add environment token support for Hugging Face models
Browse files Browse the repository at this point in the history
- Use `HF_TOKEN` environment variable as fallback for Hugging Face authentication
- Simplify token handling in `HuggingfaceDetectionModel.load_model()`
- Ensure consistent token usage across model and processor loading
  • Loading branch information
fcakyon committed Mar 8, 2025
1 parent e983fff commit 5424a8b
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions sahi/models/huggingface.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# Code written by Fatih C Akyon and Devrim Cavusoglu, 2022.

import logging
import os
from typing import Any, Dict, List, Optional, Tuple, Union

import numpy as np
Expand Down Expand Up @@ -69,18 +70,19 @@ def num_categories(self) -> int:
def load_model(self):
from transformers import AutoModelForObjectDetection, AutoProcessor

model = AutoModelForObjectDetection.from_pretrained(self.model_path, token=self._token)
hf_token = os.getenv("HF_TOKEN", self._token)
model = AutoModelForObjectDetection.from_pretrained(self.model_path, token=hf_token)
if self.image_size is not None:
if model.base_model_prefix == "rt_detr_v2":
size = {"height": self.image_size, "width": self.image_size}
else:
size = {"shortest_edge": self.image_size, "longest_edge": None}
# use_fast=True raises error: AttributeError: 'SizeDict' object has no attribute 'keys'
processor = AutoProcessor.from_pretrained(
self.model_path, size=size, do_resize=True, use_fast=False, token=self._token
self.model_path, size=size, do_resize=True, use_fast=False, token=hf_token
)
else:
processor = AutoProcessor.from_pretrained(self.model_path, use_fast=False, token=self._token)
processor = AutoProcessor.from_pretrained(self.model_path, use_fast=False, token=hf_token)
self.set_model(model, processor)

def set_model(self, model: Any, processor: Any = None):
Expand Down

0 comments on commit 5424a8b

Please sign in to comment.