Created
September 25, 2025 19:31
-
-
Save FNGarvin/1da2b595f34657b0058bea75cc5642c2 to your computer and use it in GitHub Desktop.
A tool to analyze .safetensors LoRA files.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| # | |
| # Author: FNGarvin | |
| # License: MIT | |
| # | |
| # Description: A tool to analyze .safetensors LoRA files. It analyzes | |
| # both metadata (if available) and tensor keys in an attempt | |
| # to infer the base model, resolution, and trigger words, | |
| # highlighting any contradictions. | |
| import sys | |
| import json | |
| import re | |
| from safetensors import safe_open | |
| from typing import Dict, Set | |
| # --- Analysis Constants --- | |
| KEYWORD_PRIORITY_LIST = ['flux', 'wan', 'sdxl', 'qwen', 'pony', 'kontext', | |
| 'noob', 'illustrious', '1.5'] | |
| KEYWORD_MAP = {'1.5': 'SD 1.5'} | |
| HASH_TO_MODEL = { | |
| "cc6cb27103417325ff94f52b7a5d2dde45a7515b25c255d8e396c90014281516": "SD 1.5", | |
| "299e4cd0fba7ba6ddf3eff4db07105a698363edf75cd67e8438e1d6847fa8b3f": "WAN", | |
| "67ab2fd8ec439a89b3fedb15cc65f54336af163c7eb5e4f2acc98f090a29b0b3": "Pony Diffusion", | |
| "8e4694c65fb951ca7a6fe05883524f0ec452a405f624ff85aaa1affd15cde0c4": "Pony Diffusion", | |
| "b23192c74094223fd694c73df8477b080c9ca241b88b415bd4fdb301badc1132": "FLUX", | |
| } | |
| COMMON_TAGS_TO_IGNORE = { | |
| 'man', 'woman', 'girl', 'boy', '1man', '1woman', '1girl', '1boy', | |
| 'solo', 'simple background', 'monochrome', 'greyscale' | |
| } | |
| def classify_by_keys(keys: Set[str]) -> str: | |
| """Analyzes a set of tensor keys to find architectural fingerprints.""" | |
| if not keys: | |
| return "Unknown" | |
| if any(k.startswith('diffusion_model.') for k in keys): | |
| return "WAN" | |
| if any(k.startswith('lora_unet_double_blocks_') or k.startswith('lora_unet_single_blocks_') for k in keys): | |
| return "FLUX" | |
| if any(k.startswith('lora_te1_') or k.startswith('lora_te2_') for k in keys): | |
| return "SDXL" | |
| if any(k.startswith('lora_te_') for k in keys): | |
| return "SD 1.5" | |
| if any('down_blocks' in k or 'mid_block' in k for k in keys): | |
| return "SDXL" | |
| if any('input_blocks' in k or 'middle_block' in k for k in keys): | |
| return "SD 1.5" | |
| return "Unknown" | |
| def analyze_file(metadata: Dict, keys: Set[str]) -> str: | |
| """Analyzes metadata and keys to infer model type, resolution, and triggers.""" | |
| analysis_lines = [] | |
| # --- 1. Perform Metadata Analysis --- | |
| metadata_model = "Unknown" | |
| finetune_info = "" | |
| for key in ["name", "ss_output_name"]: | |
| name_val = metadata.get(key, "").lower() | |
| if name_val: | |
| for keyword in KEYWORD_PRIORITY_LIST: | |
| if keyword in name_val: | |
| metadata_model = KEYWORD_MAP.get(keyword, keyword.upper()) | |
| break | |
| if metadata_model != "Unknown": break | |
| if metadata_model == "Unknown": | |
| for key in ["ss_new_sd_model_hash", "sshs_model_hash"]: | |
| model_hash = metadata.get(key) | |
| if model_hash in HASH_TO_MODEL: | |
| metadata_model = HASH_TO_MODEL[model_hash] | |
| break | |
| if metadata_model == "Unknown": | |
| model_spec_arch = metadata.get("modelspec.architecture", "").lower() | |
| base_model_version = metadata.get("ss_base_model_version", "").lower() | |
| model_name = metadata.get("ss_sd_model_name", "").lower() | |
| title = metadata.get("modelspec.title", "").lower() | |
| if "flux" in model_spec_arch or "flux" in base_model_version or "flux" in title: | |
| metadata_model = "FLUX" | |
| elif "wan" in base_model_version or "wan" in title: | |
| metadata_model = "WAN" | |
| elif "pony" in model_name or "pony" in title: | |
| metadata_model = "Pony Diffusion" | |
| elif "sd_xl" in model_name or "sdxl" in base_model_version or "stable-diffusion-xl" in model_spec_arch: | |
| metadata_model = "SDXL" | |
| if model_name and not model_name.startswith('sd_xl_'): | |
| finetune_info = model_name | |
| elif "v1-5" in model_name or "sd15" in model_name or "sd_1.5" in base_model_version: | |
| metadata_model = "SD 1.5" | |
| # --- 2. Perform Tensor Key Analysis --- | |
| keys_model = classify_by_keys(keys) | |
| # --- 3. Compare Results and Format Output --- | |
| model_string = "" | |
| if metadata_model != "Unknown" and keys_model != "Unknown" and metadata_model != keys_model: | |
| model_string = (f"• Base Model: CONTRADICTION DETECTED\n" | |
| f" - Metadata suggests: {metadata_model}\n" | |
| f" - Tensor Keys suggest: {keys_model}") | |
| elif metadata_model != "Unknown" and keys_model != "Unknown": | |
| model_string = f"• Base Model: {metadata_model} (Confirmed by Metadata and Keys)" | |
| elif metadata_model != "Unknown": | |
| model_string = f"• Base Model: {metadata_model} (from Metadata)" | |
| elif keys_model != "Unknown": | |
| model_string = f"• Base Model: {keys_model} (from Tensor Keys)" | |
| else: | |
| model_string = "• Base Model: Unknown" | |
| if finetune_info: | |
| model_string += f" (Finetune: {finetune_info})" | |
| analysis_lines.append(model_string) | |
| # --- 4. Infer Resolution --- | |
| resolution_str = metadata.get("modelspec.resolution") or metadata.get("ss_resolution") | |
| if resolution_str: | |
| try: | |
| dims = [int(d) for d in re.findall(r'\d+', resolution_str)] | |
| if len(dims) == 2: | |
| width, height = dims | |
| megapixels = (width * height) / 1_000_000 | |
| analysis_lines.append(f"• Trained Resolution: {width}x{height} ({megapixels:.2f} MP)") | |
| except (ValueError, TypeError): pass | |
| # --- 5. Infer Trigger Words --- | |
| triggers = [] | |
| if "ss_activation_tags" in metadata: | |
| triggers = [tag.strip() for tag in metadata["ss_activation_tags"].split(',')] | |
| elif "ss_tag_frequency" in metadata: | |
| try: | |
| freq_data = metadata["ss_tag_frequency"] | |
| if isinstance(freq_data, str): freq_data = json.loads(freq_data) | |
| all_tags = {} | |
| for folder_data in freq_data.values(): | |
| for tag, count in folder_data.items(): | |
| all_tags[tag] = all_tags.get(tag, 0) + count | |
| if all_tags: | |
| max_freq = max(all_tags.values()) | |
| candidate_triggers = [tag for tag, count in all_tags.items() if count == max_freq] | |
| triggers = [tag for tag in candidate_triggers if tag not in COMMON_TAGS_TO_IGNORE] | |
| except Exception: pass | |
| if triggers: | |
| analysis_lines.append(f"• Potential Trigger Words: {', '.join(triggers)}") | |
| else: | |
| analysis_lines.append("• No clear trigger words found in metadata.") | |
| return "\n".join(analysis_lines) | |
| def main(): | |
| if len(sys.argv) != 2: | |
| script_name = sys.argv[0] | |
| print(f"Usage: python {script_name} <path_to_safetensors_file>", file=sys.stderr) | |
| sys.exit(1) | |
| filepath = sys.argv[1] | |
| try: | |
| metadata = {} | |
| keys = set() | |
| with safe_open(filepath, framework='pt') as f: | |
| metadata = f.metadata() if f.metadata() else {} | |
| keys = set(f.keys()) | |
| if metadata: | |
| pretty_metadata = json.dumps(metadata, indent=4, sort_keys=True) | |
| print(pretty_metadata) | |
| else: | |
| print("No metadata found in this file.") | |
| print("\n--- Analysis ---") | |
| analysis_report = analyze_file(metadata, keys) | |
| print(analysis_report) | |
| except FileNotFoundError: | |
| print(f"Error: File not found at '{filepath}'", file=sys.stderr) | |
| sys.exit(1) | |
| except Exception as e: | |
| print(f"An error occurred while reading the file: {e}", file=sys.stderr) | |
| sys.exit(1) | |
| if __name__ == "__main__": | |
| main() | |
| # EOF of query-lora-metadata.py |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment