pyrit.prompt_target.OllamaChatTarget#

class OllamaChatTarget(*, endpoint: str = None, model_name: str = None, chat_message_normalizer: ~pyrit.chat_message_normalizer.chat_message_normalizer.ChatMessageNormalizer = <pyrit.chat_message_normalizer.chat_message_nop.ChatMessageNop object>, max_requests_per_minute: int | None = None, **httpx_client_kwargs: ~typing.Any | None)[source]#

Bases: PromptChatTarget

__init__(*, endpoint: str = None, model_name: str = None, chat_message_normalizer: ~pyrit.chat_message_normalizer.chat_message_normalizer.ChatMessageNormalizer = <pyrit.chat_message_normalizer.chat_message_nop.ChatMessageNop object>, max_requests_per_minute: int | None = None, **httpx_client_kwargs: ~typing.Any | None) None[source]#

Methods

__init__(*[, endpoint, model_name, ...])

dispose_db_engine()

Dispose DuckDB database engine to release database connections and resources.

get_identifier()

is_json_response_supported()

Indicates that this target supports JSON response format.

is_response_format_json(request_piece)

Checks if the response format is JSON and ensures the target supports it.

send_prompt_async(**kwargs)

Sends a normalized prompt async to the prompt target.

set_system_prompt(*, system_prompt, ...[, ...])

Sets the system prompt for the prompt target.

Attributes

ENDPOINT_URI_ENVIRONMENT_VARIABLE = 'OLLAMA_ENDPOINT'#
MODEL_NAME_ENVIRONMENT_VARIABLE = 'OLLAMA_MODEL_NAME'#
is_json_response_supported() bool[source]#

Indicates that this target supports JSON response format.

async send_prompt_async(**kwargs)#

Sends a normalized prompt async to the prompt target.

supported_converters: list#