mirror of
https://github.com/we-promise/sure
synced 2026-04-25 17:15:07 +02:00
* Wire conversation history through OpenAI responses API * Fix RuboCop hash brace spacing in assistant tests * Pipelock ignores * Batch fixes --------- Co-authored-by: sokiee <sokysrm@gmail.com>
100 lines
4.6 KiB
Plaintext
100 lines
4.6 KiB
Plaintext
<div class="space-y-4">
|
|
<div>
|
|
<h2 class="font-medium mb-1"><%= t(".title") %></h2>
|
|
<% if ENV["OPENAI_ACCESS_TOKEN"].present? %>
|
|
<p class="text-sm text-secondary"><%= t(".env_configured_message") %></p>
|
|
<% else %>
|
|
<p class="text-secondary text-sm mb-4"><%= t(".description") %></p>
|
|
<% end %>
|
|
</div>
|
|
|
|
<%= styled_form_with model: Setting.new,
|
|
url: settings_hosting_path,
|
|
method: :patch,
|
|
class: "space-y-4",
|
|
data: {
|
|
controller: "auto-submit-form",
|
|
"auto-submit-form-trigger-event-value": "blur"
|
|
} do |form| %>
|
|
<%= form.password_field :openai_access_token,
|
|
label: t(".access_token_label"),
|
|
placeholder: t(".access_token_placeholder"),
|
|
value: (Setting.openai_access_token.present? ? "********" : nil),
|
|
autocomplete: "off",
|
|
autocapitalize: "none",
|
|
spellcheck: "false",
|
|
inputmode: "text",
|
|
disabled: ENV["OPENAI_ACCESS_TOKEN"].present?,
|
|
data: { "auto-submit-form-target": "auto" } %>
|
|
|
|
<%= form.text_field :openai_uri_base,
|
|
label: t(".uri_base_label"),
|
|
placeholder: t(".uri_base_placeholder"),
|
|
value: Setting.openai_uri_base,
|
|
autocomplete: "off",
|
|
autocapitalize: "none",
|
|
spellcheck: "false",
|
|
inputmode: "url",
|
|
disabled: ENV["OPENAI_URI_BASE"].present?,
|
|
data: { "auto-submit-form-target": "auto" } %>
|
|
|
|
<%= form.text_field :openai_model,
|
|
label: t(".model_label"),
|
|
placeholder: t(".model_placeholder"),
|
|
value: Setting.openai_model,
|
|
autocomplete: "off",
|
|
autocapitalize: "none",
|
|
spellcheck: "false",
|
|
inputmode: "text",
|
|
disabled: ENV["OPENAI_MODEL"].present?,
|
|
data: { "auto-submit-form-target": "auto" } %>
|
|
|
|
<%= form.select :openai_json_mode,
|
|
options_for_select(
|
|
[
|
|
[t(".json_mode_auto"), ""],
|
|
[t(".json_mode_strict"), "strict"],
|
|
[t(".json_mode_none"), "none"],
|
|
[t(".json_mode_json_object"), "json_object"]
|
|
],
|
|
Setting.openai_json_mode
|
|
),
|
|
{ label: t(".json_mode_label") },
|
|
{ disabled: ENV["LLM_JSON_MODE"].present?,
|
|
data: { "auto-submit-form-target": "auto" } } %>
|
|
<p class="text-xs text-secondary mt-1"><%= t(".json_mode_help") %></p>
|
|
|
|
<div class="pt-4 border-t border-secondary">
|
|
<h3 class="font-medium mb-1"><%= t(".budget_heading") %></h3>
|
|
<p class="text-xs text-secondary mb-3"><%= t(".budget_description") %></p>
|
|
|
|
<%= form.number_field :llm_context_window,
|
|
label: t(".context_window_label"),
|
|
placeholder: "2048",
|
|
value: Setting.llm_context_window,
|
|
min: 256,
|
|
disabled: ENV["LLM_CONTEXT_WINDOW"].present?,
|
|
data: { "auto-submit-form-target": "auto" } %>
|
|
<p class="text-xs text-secondary mt-1 mb-3"><%= t(".context_window_help") %></p>
|
|
|
|
<%= form.number_field :llm_max_response_tokens,
|
|
label: t(".max_response_tokens_label"),
|
|
placeholder: "512",
|
|
value: Setting.llm_max_response_tokens,
|
|
min: 64,
|
|
disabled: ENV["LLM_MAX_RESPONSE_TOKENS"].present?,
|
|
data: { "auto-submit-form-target": "auto" } %>
|
|
<p class="text-xs text-secondary mt-1 mb-3"><%= t(".max_response_tokens_help") %></p>
|
|
|
|
<%= form.number_field :llm_max_items_per_call,
|
|
label: t(".max_items_per_call_label"),
|
|
placeholder: "25",
|
|
value: Setting.llm_max_items_per_call,
|
|
min: 1,
|
|
disabled: ENV["LLM_MAX_ITEMS_PER_CALL"].present?,
|
|
data: { "auto-submit-form-target": "auto" } %>
|
|
<p class="text-xs text-secondary mt-1"><%= t(".max_items_per_call_help") %></p>
|
|
</div>
|
|
<% end %>
|
|
</div>
|