Created
November 19, 2025 12:27
-
-
Save alexanderadam/617228d786cd770840873b0dec6416db to your computer and use it in GitHub Desktop.
This helped me to optimize the file watcher usage of VSCode - this assumes running on Linux but I guess it can be modified / adapted for other OS - also see https://github.com/microsoft/vscode/issues/142763
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env ruby | |
| require 'json' | |
| class VSCodeAnalyzer | |
| EXCLUDABLE = %w[node_modules vendor tmp build dist target .cache .crystal coverage log logs .next out .turbo] | |
| HEAVY_DIR_THRESHOLD = { files: 100, percentage: 5.0 } | |
| SEARCH_PATHS = %w[~/projects/%s ~/%s ./%s] | |
| VSCODE_COMMANDS = %w[code-insiders code] | |
| STORAGE_PATHS = [ | |
| '~/.config/Code - Insiders/User/globalStorage/storage.json', | |
| '~/.config/Code/User/globalStorage/storage.json', | |
| '~/.var/app/com.visualstudio.code/config/Code/User/globalStorage/storage.json' | |
| ] | |
| def initialize | |
| @vscode = find_vscode | |
| @folders = load_folders | |
| end | |
| def run | |
| return warn "VS Code not found" unless @vscode | |
| summary | |
| end | |
| private | |
| def find_vscode = VSCODE_COMMANDS.find { |cmd| system("command -v #{cmd} >/dev/null 2>&1") } | |
| def load_folders | |
| STORAGE_PATHS.each do |path| | |
| path = File.expand_path path | |
| next unless File.exist? path | |
| folders = parse_storage path | |
| return folders if folders.any? | |
| end | |
| {} | |
| end | |
| def parse_storage(path) | |
| data = JSON.parse(File.read(path)) | |
| folders = data.dig('backupWorkspaces', 'folders') || [] | |
| folders.each_with_object({}) do |f, h| | |
| uri = f['folderUri'] | |
| next unless uri&.start_with? 'file://' | |
| path = uri.sub('file://', '') | |
| h[File.basename(path)] = path | |
| end | |
| rescue | |
| {} | |
| end | |
| def summary | |
| header "VS Code File Watcher Analysis", @vscode | |
| workspaces = parse_status | |
| return unless workspaces | |
| system_info | |
| analyze_workspaces workspaces | |
| end | |
| def header(title, subtitle = nil) | |
| puts "\n#{'#' * 80}" | |
| puts " #{title} - #{Time.now.strftime('%Y-%m-%d %H:%M:%S')}" | |
| puts " #{subtitle}" if subtitle | |
| puts "#{'=' * 80}\n" | |
| end | |
| def system_info | |
| max_watches = read_sys('max_user_watches') | |
| max_instances = read_sys('max_user_instances') | |
| instances = count_inotify_fds | |
| pct = (instances * 100.0 / max_instances).round(1) | |
| puts "SYSTEM STATUS" | |
| puts "-" * 80 | |
| puts " Inotify instances: #{instances} / #{max_instances} (#{pct}%)" | |
| puts " Max watches per instance: #{max_watches}" | |
| show_top_processes(inotify_users) if instances > 0 | |
| puts "" | |
| puts " #{status_for(pct)}" | |
| puts "" | |
| end | |
| def read_sys(param) | |
| File.read("/proc/sys/fs/inotify/#{param}").to_i rescue 0 | |
| end | |
| def count_inotify_fds | |
| Dir.glob('/proc/*/fd/*').count { |fd| (File.readlink(fd) rescue '').include?('inotify') } | |
| end | |
| def status_for(pct) | |
| case pct | |
| when 80.. then "[!] WARNING: High inotify usage!" | |
| when 50.. then "[*] NOTICE: Moderate inotify usage" | |
| else "[+] GOOD: Low inotify usage" | |
| end | |
| end | |
| def show_top_processes(processes) | |
| return if processes.empty? | |
| puts "\n Top inotify users:" | |
| processes.first(5).each do |p| | |
| pids = p[:pids].sort.join(', ') | |
| label = p[:processes] > 1 ? "(#{p[:processes]} processes: #{pids})" : "(#{pids})" | |
| puts " #{p[:count].to_s.rjust(3)} instances - #{p[:name]} #{label}" | |
| end | |
| end | |
| def inotify_users | |
| counts = Hash.new(0) | |
| names = {} | |
| my_pid = Process.pid.to_s | |
| Dir.glob('/proc/*/fd/*').each do |fd| | |
| next unless File.symlink?(fd) | |
| link = File.readlink(fd) rescue next | |
| next unless link.include?('inotify') | |
| pid = fd.split('/')[2] | |
| next if pid == my_pid | |
| counts[pid] += 1 | |
| names[pid] ||= process_name(pid) | |
| end | |
| group_by_name counts, names | |
| end | |
| def process_name(pid) | |
| exe = File.readlink("/proc/#{pid}/exe") rescue nil | |
| if exe | |
| name = File.basename(exe) | |
| return $1 if exe.include?('/app/') && exe =~ %r{/app/([^/]+)} | |
| return name | |
| end | |
| cmdline = File.read("/proc/#{pid}/cmdline").gsub("\0", ' ').strip rescue '' | |
| # yes it's hacky but this works for me ;) | |
| return 'joplin' if cmdline.include?('joplin') | |
| return 'code-insiders' if cmdline.include?('code-insiders') | |
| cmd = cmdline.split.first | |
| cmd ? File.basename(cmd) : (File.read("/proc/#{pid}/comm").strip rescue 'unknown') | |
| end | |
| def group_by_name(counts, names) | |
| grouped = Hash.new { |h, k| h[k] = { count: 0, pids: [] } } | |
| counts.each do |pid, count| | |
| name = names[pid] || 'unknown' | |
| grouped[name][:count] += count | |
| grouped[name][:pids] << pid | |
| end | |
| grouped.map { |name, data| | |
| { name: name, count: data[:count], processes: data[:pids].size, pids: data[:pids] } | |
| }.sort_by { |p| -p[:count] } | |
| end | |
| def analyze_workspaces(workspaces) | |
| seen = Set.new | |
| workspaces.each do |ws| | |
| key = ws[:folders].map { |f| f[:name] }.sort.join('|') | |
| next if seen.include?(key) | |
| seen << key | |
| names = workspaces.select { |w| w[:folders].map { |f| f[:name] }.sort.join('|') == key }.map { |w| w[:name] } | |
| puts "WORKSPACE: #{names.join(', ')}" | |
| puts "-" * 80 | |
| ws[:folders].each { |f| check_folder(f) } | |
| puts "" | |
| end | |
| end | |
| def check_folder(folder) | |
| path = find_path(folder[:name]) | |
| unless path | |
| puts " #{folder[:name]} (#{folder[:count]} files)" | |
| puts " [!] Path not found\n\n" | |
| return | |
| end | |
| settings = File.join path, '.vscode', 'settings.json' | |
| exclusions = load_json settings, 'files.watcherExclude' | |
| global = load_json settings, 'files.exclude' | |
| watched, excluded, total = file_counts(path, exclusions) | |
| watch_pct = pct watched, total | |
| exclude_pct = pct excluded, total | |
| puts " #{folder[:name]} (#{path})\n\n" | |
| show_stats watched, excluded, total, watch_pct, exclude_pct, exclusions | |
| show_exclusions exclusions, global | |
| show_heavy_dirs path, total, exclusions | |
| puts "" | |
| end | |
| def show_stats(watched, excluded, total, watch_pct, exclude_pct, exclusions) | |
| if exclusions.any? | |
| icon = exclude_pct > 80 ? "[+]" : exclude_pct > 50 ? "[*]" : "[!]" | |
| puts " #{icon} Watching: #{watched.to_s.rjust(6)} files (#{watch_pct}%)" | |
| puts " Excluded: #{excluded.to_s.rjust(6)} files (#{exclude_pct}%)" | |
| puts " Total: #{total.to_s.rjust(6)} files" | |
| elsif total > 1000 | |
| puts " [!] NO EXCLUSIONS - Watching all #{total} files!" | |
| else | |
| puts " Watching: #{total} files" | |
| end | |
| end | |
| def show_exclusions(exclusions, global) | |
| all = (exclusions.keys + global.keys).uniq.sort | |
| return if all.empty? | |
| puts "\n Configured exclusions (#{all.size} patterns):" | |
| all.first(5).each do |pattern| | |
| source = exclusions.key?(pattern) ? "watcherExclude" : "exclude" | |
| puts " - #{pattern} (#{source})" | |
| end | |
| puts " ... and #{all.size - 5} more" if all.size > 5 | |
| end | |
| def show_heavy_dirs(path, total, exclusions) | |
| dirs = heavy_dirs(path) | |
| significant = dirs.select { |_, count| count >= HEAVY_DIR_THRESHOLD[:files] && pct(count, total) >= HEAVY_DIR_THRESHOLD[:percentage] } | |
| return if significant.empty? | |
| puts "\n Heavy directories:" | |
| significant.first(5).each do |name, count| | |
| excluded = excluded?(name, exclusions) | |
| excludable = EXCLUDABLE.include?(name) | |
| p = pct count, total | |
| icon = excluded ? "[+]" : (excludable ? "[!]" : " ") | |
| suffix = excluded ? "- excluded" : (excludable ? "- should exclude!" : "") | |
| puts " #{icon} #{name.ljust(25)} #{count.to_s.rjust(6)} files (#{p}%) #{suffix}" | |
| end | |
| recommend_exclusions significant, total, exclusions | |
| end | |
| def recommend_exclusions(dirs, total, exclusions) | |
| candidates = dirs.select { |name, _| EXCLUDABLE.include?(name) && !excluded?(name, exclusions) } | |
| return if candidates.empty? | |
| count = candidates.sum { |_, c| c } | |
| p = pct count, total | |
| puts "\n RECOMMENDATION: Add #{candidates.size} exclusion(s) to save #{count} files (#{p}%)" | |
| candidates.first(3).each do |name, _| | |
| puts " Add to .vscode/settings.json: \"**/#{name}/**\": true" | |
| end | |
| end | |
| def pct(part, whole) | |
| whole > 0 ? (part * 100.0 / whole).round(1) : 0 | |
| end | |
| def load_json(file, key) | |
| return {} unless File.exist? file | |
| JSON.parse(File.read(file))[key] || {} | |
| rescue | |
| {} | |
| end | |
| def file_counts(path, exclusions) | |
| excludes = exclusions.select { |_, v| v }.keys | |
| .map { |p| p.gsub('**/', '*/').gsub('/**', '/*') } | |
| .map { |p| "! -path '#{p}'" } | |
| .join(' ') | |
| watched = `find '#{path}' -type f #{excludes} ! -path '*/.git/*' 2>/dev/null | wc -l`.to_i | |
| total = `find '#{path}' -type f 2>/dev/null | wc -l`.to_i | |
| [watched, total - watched, total] | |
| rescue | |
| [0, 0, 0] | |
| end | |
| def heavy_dirs(path) | |
| Dir.glob("#{path}/*/", File::FNM_DOTMATCH).each_with_object({}) do |dir, h| | |
| name = File.basename dir | |
| next if %w[. ..].include?(name) | |
| count = `find "#{dir}" -type f 2>/dev/null | wc -l`.to_i | |
| h[name] = count if count > 0 | |
| end.sort_by { |_, count| -count } | |
| end | |
| def excluded?(dir, exclusions) | |
| exclusions.keys.any? { |p| p.include?("/#{dir}/") || p.include?("#{dir}/**") } | |
| end | |
| def find_path(name) | |
| return @folders[name] if @folders[name] && Dir.exist?(@folders[name]) | |
| SEARCH_PATHS.map { |p| File.expand_path(p % name) }.find { |p| Dir.exist?(p) } | |
| end | |
| def parse_status | |
| output = `#{@vscode} --status 2>/dev/null` | |
| return nil if output.empty? | |
| workspaces = [] | |
| current = nil | |
| folders = [] | |
| in_stats = false | |
| output.each_line do |line| | |
| in_stats = true if line.include?('Workspace Stats:') | |
| next unless in_stats | |
| if line =~ /Window \((.*?) - (\S+) - Visual Studio Code/ | |
| current[:folders] = folders if current && folders.any? | |
| folders = [] | |
| current = { name: $2, file: $1, folders: [] } | |
| workspaces << current | |
| elsif line =~ /^\s*\|\s*Folder \(([^)]+)\): (\d+) files/ | |
| folders << { name: $1, count: $2.to_i } | |
| end | |
| end | |
| workspaces.each { |ws| ws[:folders] = folders.dup } if folders.any? | |
| workspaces | |
| end | |
| end | |
| VSCodeAnalyzer.new.run |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment