module LLM::Cache

Defined in:

llm/cache.cr

Class Method Summary

Class Method Detail

def self.cache_dir : String #

[View source]
def self.clear : Int32 #

Clear all cache entries. Returns the number of deleted files.


[View source]
def self.delete(key : String) : Bool #

Remove a cached entry by key. Returns true if a file was removed.


[View source]
def self.disable : Nil #

[View source]
def self.disabled_by_env? : Bool #

[View source]
def self.enable : Nil #

[View source]
def self.enabled? : Bool #

[View source]
def self.ensure_dir : Nil #

Ensure the cache directory exists


[View source]
def self.fetch(key : String) : String | Nil #

Fetch cached content by key (returns nil if not present)


[View source]
def self.key(provider : String, model : String, kind : String, format : String, payload : String) : String #

Build a deterministic cache key from inputs

  • provider: "openai", "ollama", url, etc.
  • model: "gpt-4o", "llama3", etc.
  • kind: logical operation e.g. "FILTER", "ANALYZE", "BUNDLE_ANALYZE"
  • format: response_format string (e.g., "json" or JSON schema string)
  • payload: variable content (file list, source code, bundle, etc.)

Returns a hex-encoded SHA256 digest.


[View source]
def self.path_for(key : String) : String #

Get the file system path for a given key


[View source]
def self.purge_older_than(days : Int32) : Int32 #

Purge entries older than the specified number of days. Returns the number of deleted files.


[View source]
def self.stats : Hash(String, Int64) #

Returns simple statistics for the cache directory:

  • "entries": number of files
  • "bytes": total size in bytes

[View source]
def self.store(key : String, content : String) : Bool #

Store content for a key. Returns true on success.


[View source]