#!/usr/bin/env python3 """Test HF token with modern InferenceClient approach.""" import os from dotenv import load_dotenv # Load environment variables load_dotenv() def test_hf_token(): """Test HF token with different approaches.""" token = os.getenv("HF_TOKEN") print(f"Token loaded: {token[:20]}..." if token else "No token found") if not token: print("āŒ No HF_TOKEN found in environment") return None # Test 1: Modern InferenceClient approach print("\nšŸ”„ Testing with InferenceClient (recommended)...") try: from huggingface_hub import InferenceClient client = InferenceClient(token=token) result = client.text_classification( "I love this product!", model="cardiffnlp/twitter-roberta-base-sentiment-latest" ) print("āœ… Success with InferenceClient!") print("Result:", result) return True except Exception as e: print("āŒ InferenceClient error:", str(e)) # Test 2: Direct API call (old approach) print("\nšŸ”„ Testing with direct API call...") try: import requests headers = {"Authorization": f"Bearer {token}"} payload = {"inputs": "I love this product!"} response = requests.post( "https://api-inference.huggingface.co/models/cardiffnlp/twitter-roberta-base-sentiment-latest", headers=headers, json=payload, timeout=30 ) print(f"Status code: {response.status_code}") if response.status_code == 200: print("āœ… Success with direct API!") print("Result:", response.json()) return True print("āŒ API error:", response.text) except Exception as e: print("āŒ Direct API error:", str(e)) # Test 3: Token validation print("\nšŸ”„ Testing token validation...") try: import requests headers = {"Authorization": f"Bearer {token}"} response = requests.get("https://huggingface.co/api/whoami", headers=headers) if response.status_code == 200: print("āœ… Token is valid!") print("User info:", response.json()) else: print("āŒ Token validation failed:", response.text) except Exception as e: print("āŒ Token validation error:", str(e)) return False if __name__ == "__main__": test_hf_token()