class OpenRouter::CompletionRequest

Defined in:

openrouter/types/completion_request.cr

Constructors

Instance Method Summary

Constructor Detail

def self.new(messages : Array(Message), model : String | Nil = nil, tools : Array(Tool) = [] of Tool) #

[View source]
def self.new(prompt : String, model : String | Nil = nil, tools : Array(Tool) = [] of Tool) #

[View source]

Instance Method Detail

def add_tool(tool : Tool) #

[View source]
def frequency_penalty : Float32 | Nil #

Range: (-2, 2)


[View source]
def frequency_penalty=(frequency_penalty : Float32 | Nil) #

Range: (-2, 2)


[View source]
def logit_bias_key : Float32 | Nil #

[View source]
def logit_bias_key=(logit_bias_key : Float32 | Nil) #

[View source]
def logit_bias_value : Float32 | Nil #

[View source]
def logit_bias_value=(logit_bias_value : Float32 | Nil) #

[View source]
def max_tokens : Int32 | Nil #

See LLM Parameters (openrouter.ai/docs/parameters)


[View source]
def max_tokens=(max_tokens : Int32 | Nil) #

See LLM Parameters (openrouter.ai/docs/parameters)


[View source]
def messages : Array(Message) | Nil #

The chat message history


[View source]
def messages=(messages : Array(Message) | Nil) #

The chat message history


[View source]
def min_p : Float32 | Nil #

Range: [0, 1]


[View source]
def min_p=(min_p : Float32 | Nil) #

Range: [0, 1]


[View source]
def model : String | Nil #

The model to use.

Browse models at openrouter.ai/models, or use the get_models method of the OpenRouter::Client


[View source]
def model=(model : String | Nil) #

The model to use.

Browse models at openrouter.ai/models, or use the get_models method of the OpenRouter::Client


[View source]
def models : Array(String) | Nil #

for models and route, See "Model Routing" section at openrouter.ai/docs/model-routing


[View source]
def models=(models : Array(String) | Nil) #

for models and route, See "Model Routing" section at openrouter.ai/docs/model-routing


[View source]
def presence_penalty : Float32 | Nil #

Range: (-2, 2)


[View source]
def presence_penalty=(presence_penalty : Float32 | Nil) #

Range: (-2, 2)


[View source]
def prompt : String | Nil #

A simple text prompt


[View source]
def prompt=(prompt : String | Nil) #

A simple text prompt


[View source]
def provider : String | Nil #

See "Provider Routing" section: openrouter.ai/docs/provider-routing


[View source]
def provider=(provider : String | Nil) #

See "Provider Routing" section: openrouter.ai/docs/provider-routing


[View source]
def repetition_penalty : Float32 | Nil #

Range: (0, 29)


[View source]
def repetition_penalty=(repetition_penalty : Float32 | Nil) #

Range: (0, 29)


[View source]
def route : String | Nil #

[View source]
def route=(route : String | Nil) #

[View source]
def seed : Int32 | Nil #

Advanced optional parameters


[View source]
def seed=(seed : Int32 | Nil) #

Advanced optional parameters


[View source]
def stop : String | Array(String) | Nil #

the stop tokens


[View source]
def stop=(stop : String | Array(String) | Nil) #

the stop tokens


[View source]
def stream : Bool #

whether to stream the response


[View source]
def stream=(stream : Bool) #

whether to stream the response


[View source]
def temperature : Float32 | Nil #

[View source]
def temperature=(temperature : Float32 | Nil) #

[View source]
def to_json(io : IO) #

[View source]
def to_json(json : JSON::Builder) #

[View source]
def tools : Array(Tool) #

Tool calling Will be passed down as-is for providers implementing OpenAI's interface. For providers with custom interfaces, we transform and map the properties. Otherwise, we transform the tools into a YAML template. The model responds with an assistant message. See models supporting tool calling: openrouter.ai/models?supported_parameters=tools


[View source]
def tools=(tools : Array(Tool)) #

Tool calling Will be passed down as-is for providers implementing OpenAI's interface. For providers with custom interfaces, we transform and map the properties. Otherwise, we transform the tools into a YAML template. The model responds with an assistant message. See models supporting tool calling: openrouter.ai/models?supported_parameters=tools


[View source]
def top_a : Float32 | Nil #

Range: [0, 1]


[View source]
def top_a=(top_a : Float32 | Nil) #

Range: [0, 1]


[View source]
def top_k : Float32 | Nil #

Range: (1, Infinity) Not available for OpenAI models


[View source]
def top_k=(top_k : Float32 | Nil) #

Range: (1, Infinity) Not available for OpenAI models


[View source]
def top_logprobs : Int32 | Nil #

[View source]
def top_logprobs=(top_logprobs : Int32 | Nil) #

[View source]
def top_p : Float32 | Nil #

Range: (0, 1)


[View source]
def top_p=(top_p : Float32 | Nil) #

Range: (0, 1)


[View source]
def transforms : Array(String) | Nil #

Reduce latency by providing the model with a predicted output https://platform.openai.com/docs/guides/latency-optimization#use-predicted-outputs prediction?: { type: 'content'; content: string; };

OpenRouter-only parameters

provider?: ProviderPreferences; // See "Prompt Transforms" section at openrouter.ai/docs/transforms


[View source]
def transforms=(transforms : Array(String) | Nil) #

Reduce latency by providing the model with a predicted output https://platform.openai.com/docs/guides/latency-optimization#use-predicted-outputs prediction?: { type: 'content'; content: string; };

OpenRouter-only parameters

provider?: ProviderPreferences; // See "Prompt Transforms" section at openrouter.ai/docs/transforms


[View source]