Skip to content

Instantly share code, notes, and snippets.

@unphased
Created April 12, 2025 15:24
Show Gist options
  • Save unphased/d2efaf917afbb7994561156389743a3b to your computer and use it in GitHub Desktop.
Save unphased/d2efaf917afbb7994561156389743a3b to your computer and use it in GitHub Desktop.
bat lf previewer including scroll functionality (snapshot)
#!/bin/bash
# lf Previewer Script with Caching for Performance
#
# Design Goals:
# 1. Fast Scrolling: Avoid re-running expensive generation (like bat) on every scroll.
# 2. Caching: Generate full preview content once per file version, store it.
# 3. Cache Invalidation: Use file modification time (mtime) in cache keys to automatically
# handle file changes.
# 4. Large File Handling: For text-based files, only process the head and tail lines for faster generation.
# 5. File Type Handling: Support various types (text, json, binary, compressed).
# 6. Modularity: Separate caching logic from file-specific generation logic for clarity
# and maintainability.
# 7. Robustness: Handle temporary files safely and attempt atomic cache updates.
# --- Configuration ---
CACHE_DIR="$HOME/.cache/lf-previewer"
MAX_XXD_BYTES=2048 # Limit for binary previews
PREVIEW_CHUNK_BYTES=131072 # 128KB threshold. Files larger than this (in bytes) trigger head/tail preview.
PREVIEW_HEADTAIL_BYTES=65536 # 64KB for head and 64KB for tail when truncating.
unset COLORTERM # Ensure consistent terminal behavior for bat/jq
# --- Logging Setup ---
LOGFILE="$HOME/.cache/lf-previewer/previewer.log"
exec 2>> "$LOGFILE" # Redirect stderr to the log file (append)
# --- Dependency Check ---
for cmd in realpath stat md5sum file bat tail head printf mktemp mv rm xxd brotli zstd; do
if ! command -v "$cmd" >/dev/null; then
echo "Error: Required command '$cmd' not found in PATH."
exit 1
fi
done
# Read the preview offset environment variable provided by lf, default to 1
preview_offset=${lf_user_preview_offset:-1}
# --- Helper Functions ---
# Function to generate the preview content for a given file type
# Arguments: $1 = input file path, $2 = temporary output file path
generate_preview_content() {
local input_f="$1"
local output_tmp_f="$2"
local file_t # Use local scope for file_type within function
# Determine file type (handle potential errors)
file_t=$(file -b --mime-type "$input_f" 2>/dev/null)
if [[ -z "$file_t" ]]; then
echo "Error determining file type for: $input_f" > "$output_tmp_f"
return 1 # Indicate failure
fi
# --- File Type Specific Generation Logic ---
if [[ "$input_f" =~ \.json$ ]]; then
# Check file size using OS-compatible stat
local file_size
if [[ "$(uname)" == "Darwin" ]] || [[ "$(uname)" == *BSD* ]]; then
file_size=$(stat -f %z "$input_f") # macOS/BSD stat for size
else
file_size=$(stat -c %s "$input_f") # Linux/GNU stat for size
fi
# Handle large JSON like large text files - preview head/tail lines if size exceeds threshold
if [[ $file_size -gt $PREVIEW_CHUNK_BYTES ]]; then
local truncated_json_tmp
truncated_json_tmp=$(mktemp) # Use system tmp
trap 'rm -f "$truncated_json_tmp"' RETURN # Ensure cleanup
head -c "$PREVIEW_HEADTAIL_BYTES" "$input_f" > "$truncated_json_tmp"
printf "\n\n[... JSON File truncated (%s bytes total) ...]\n\n" "$file_size" >> "$truncated_json_tmp"
tail -c "$PREVIEW_HEADTAIL_BYTES" "$input_f" >> "$truncated_json_tmp"
# Try jq first on the truncated file
if command -v jq >/dev/null && jq --color-output . "$truncated_json_tmp" > "$output_tmp_f" 2>/dev/null; then
: # jq succeeded on truncated
else
# Fallback to bat on the truncated file
bat --language=json --style=changes,numbers --color=always "$truncated_json_tmp" > "$output_tmp_f"
fi
else
# File is small enough, process directly
if command -v jq >/dev/null && jq --color-output . "$input_f" > "$output_tmp_f" 2>/dev/null; then
: # jq succeeded
else
# Fallback to bat
bat --language=json --style=changes,numbers --color=always "$input_f" > "$output_tmp_f"
fi
fi
elif [[ $file_t == "application/octet-stream" && "$input_f" =~ \.br$ ]]; then
handle_compressed "brotli" "$input_f" "$output_tmp_f"
elif [[ $file_t == application/zstd ]]; then
handle_compressed "zstd" "$input_f" "$output_tmp_f"
elif [[ $file_t == application/octet-stream ]]; then
# Binary file: use xxd
xxd -l "$MAX_XXD_BYTES" "$input_f" > "$output_tmp_f"
else
# Default to bat for text files, applying truncation if large
# Check file size using OS-compatible stat
local file_size # Reuse variable name, it's local
if [[ "$(uname)" == "Darwin" ]] || [[ "$(uname)" == *BSD* ]]; then
file_size=$(stat -f %z "$input_f") # macOS/BSD stat for size
else
file_size=$(stat -c %s "$input_f") # Linux/GNU stat for size
fi
if [[ $file_size -gt $PREVIEW_CHUNK_BYTES ]]; then
local truncated_text_tmp
truncated_text_tmp=$(mktemp) # Use system tmp
trap 'rm -f "$truncated_text_tmp"' RETURN # Ensure cleanup
head -c "$PREVIEW_HEADTAIL_BYTES" "$input_f" > "$truncated_text_tmp"
printf "\n\n[... File truncated (%s bytes total) ...]\n\n" "$file_size" >> "$truncated_text_tmp"
tail -c "$PREVIEW_HEADTAIL_BYTES" "$input_f" >> "$truncated_text_tmp"
bat --style=changes,numbers --color=always "$truncated_text_tmp" > "$output_tmp_f"
else
# File is small enough, process directly
bat --style=changes,numbers --color=always "$input_f" > "$output_tmp_f"
fi
fi
# Check if generation produced any output
if [[ ! -s "$output_tmp_f" ]]; then
# If bat/jq/xxd failed silently, provide a message
echo "[Preview generation produced no output for $input_f]" > "$output_tmp_f"
# Optionally return 1 here if empty output is considered an error
fi
return 0 # Indicate success
}
# Function to handle compressed files
# Arguments: $1 = compression type ("brotli" or "zstd"), $2 = input file, $3 = final output temp file
handle_compressed() {
local type="$1"
local input_comp_f="$2"
local output_final_tmp_f="$3"
local decomp_cmd decomp_tmp err_msg
decomp_tmp=$(mktemp --suffix=.lf_preview_tmp) # Use system tmp
# Ensure decompressed temp file is cleaned up
trap 'rm -f "$decomp_tmp"' RETURN
case "$type" in
brotli)
decomp_cmd="brotli -d \"$input_comp_f\" -o \"$decomp_tmp\" -f"
err_msg="Error decompressing Brotli file"
;;
zstd)
decomp_cmd="zstd -qd \"$input_comp_f\" -o \"$decomp_tmp\" -f"
err_msg="Error decompressing Zstd file"
;;
*)
echo "Unsupported compression type: $type" > "$output_final_tmp_f"
return 1
;;
esac
# Attempt decompression
if eval "$decomp_cmd"; then
# Decompression succeeded, now generate preview for the *decompressed* file
# This recursive call handles caching/generation for the inner file type
# We pass offset=1 because we want the *full* content generated for the cache
# Capture the output of the recursive call into our final temp file
lf_user_preview_offset=1 "$0" "$decomp_tmp" > "$output_final_tmp_f"
return 0
else
# Decompression failed
echo "$err_msg: $input_comp_f" > "$output_final_tmp_f"
return 1
fi
}
# --- Main Script Logic ---
# Ensure cache directory exists
mkdir -p "$CACHE_DIR" || { echo "Error: Cannot create cache dir $CACHE_DIR"; exit 1; }
# Input validation
input_file="$1"
if [[ -z "$input_file" ]]; then
echo "Usage: $0 <file_path>"
exit 1
fi
# Get required info: absolute path and modification time
abs_input_file=$(realpath "$input_file" 2>/dev/null)
if [[ -z "$abs_input_file" || ! -e "$abs_input_file" ]]; then
# Handle cases like directories or non-existent files gracefully for lf
# lf might call the previewer even for directories before realizing it shouldn't
exit 0 # Exit cleanly, lf will handle it
fi
# Get modification time (seconds since epoch) compatible with Linux and macOS/BSD
if [[ "$(uname)" == "Darwin" ]] || [[ "$(uname)" == *BSD* ]]; then
mtime=$(stat -f %m "$abs_input_file") # macOS/BSD stat
else
mtime=$(stat -c %Y "$abs_input_file") # Linux/GNU stat
fi
# Calculate cache key
file_hash=$(echo -n "$abs_input_file" | md5sum | cut -d' ' -f1)
cache_file="$CACHE_DIR/${file_hash}_${mtime}.preview"
# Read scroll offset (defaults to 1)
preview_offset=${lf_user_preview_offset:-1}
# --- Cache Check ---
if [[ -f "$cache_file" ]]; then
# Cache Hit: Preview the cached file directly using tail
tail -n "+$preview_offset" "$cache_file"
exit 1 # Tell lf this output is dynamic (for scrolling)
fi
# --- Cache Miss: Generate and Cache Preview ---
echo "Generating preview cache for $input_file..." # Debug message (stderr is redirected)
# Use a temporary file for the intermediate full preview content
preview_content_tmp=$(mktemp "$CACHE_DIR/preview_tmp.XXXXXX")
# Ensure temporary file is cleaned up on exit/interrupt, unless successfully moved
trap 'rm -f "$preview_content_tmp"' EXIT SIGINT SIGTERM
# Call the generation function
if generate_preview_content "$abs_input_file" "$preview_content_tmp"; then
# Generation succeeded, atomically move to cache
mv "$preview_content_tmp" "$cache_file" || {
echo "Error moving temp file to cache: $cache_file" # Log error
rm -f "$preview_content_tmp" # Ensure cleanup if mv fails
# Optionally display an error preview or exit differently
echo "[Error caching preview]"
exit 1 # Still exit 1 for lf scrolling mechanism
}
# Disable trap as the file has been moved successfully
trap - EXIT SIGINT SIGTERM
else
# Generation failed, move the temp file (containing error message) to cache
echo "[Preview generation failed for $input_file]" # Log message
mv "$preview_content_tmp" "$cache_file" || {
echo "Error moving error temp file to cache: $cache_file" # Log error
rm -f "$preview_content_tmp"
echo "[Error caching preview failure message]"
exit 1
}
# Disable trap as the file has been moved successfully
trap - EXIT SIGINT SIGTERM
fi
# --- Display Initial View from Newly Created Cache ---
tail -n "+$preview_offset" "$cache_file"
exit 1 # Essential for lf scrolling mechanism
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment