Package Methods (0.7.0)

Summary of entries of Methods for langchain-google-cloud-sql-pg.

langchain_google_cloud_sql_pg.chat_message_history._aget_messages

_aget_messages(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    session_id: str,
    table_name: str,
) -> typing.List[langchain_core.messages.base.BaseMessage]

Retrieve the messages from PostgreSQL.

See more: langchain_google_cloud_sql_pg.chat_message_history._aget_messages

langchain_google_cloud_sql_pg.engine._get_iam_principal_email

_get_iam_principal_email(credentials: google.auth.credentials.Credentials) -> str

Get email address associated with current authenticated IAM principal.

See more: langchain_google_cloud_sql_pg.engine._get_iam_principal_email

langchain_google_cloud_sql_pg.loader._parse_doc_from_row

_parse_doc_from_row(content_columns: typing.Iterable[str], metadata_columns: typing.Iterable[str], row: dict, metadata_json_column: typing.Optional[str] = 'langchain_metadata', formatter: typing.Callable = 

langchain_google_cloud_sql_pg.loader._parse_row_from_doc

_parse_row_from_doc(
    doc: langchain_core.documents.base.Document,
    column_names: typing.Iterable[str],
    content_column: str = "page_content",
    metadata_json_column: typing.Optional[str] = "langchain_metadata",
) -> typing.Dict

Parse document into a dictionary of rows.

See more: langchain_google_cloud_sql_pg.loader._parse_row_from_doc

langchain_google_cloud_sql_pg.loader.csv_formatter

csv_formatter(row, content_columns) -> str

CSV document formatter.

See more: langchain_google_cloud_sql_pg.loader.csv_formatter

langchain_google_cloud_sql_pg.loader.json_formatter

json_formatter(row, content_columns) -> str

JSON document formatter.

See more: langchain_google_cloud_sql_pg.loader.json_formatter

langchain_google_cloud_sql_pg.loader.text_formatter

text_formatter(row, content_columns) -> str

txt document formatter.

See more: langchain_google_cloud_sql_pg.loader.text_formatter

langchain_google_cloud_sql_pg.loader.yaml_formatter

yaml_formatter(row, content_columns) -> str

YAML document formatter.

See more: langchain_google_cloud_sql_pg.loader.yaml_formatter

langchain_google_cloud_sql_pg.vectorstore.cosine_similarity

cosine_similarity(
    X: typing.Union[
        typing.List[typing.List[float]], typing.List[numpy.ndarray], numpy.ndarray
    ],
    Y: typing.Union[
        typing.List[typing.List[float]], typing.List[numpy.ndarray], numpy.ndarray
    ],
) -> numpy.ndarray

Row-wise cosine similarity between two equal-width matrices.

See more: langchain_google_cloud_sql_pg.vectorstore.cosine_similarity

langchain_google_cloud_sql_pg.vectorstore.maximal_marginal_relevance

maximal_marginal_relevance(
    query_embedding: numpy.ndarray,
    embedding_list: list,
    lambda_mult: float = 0.5,
    k: int = 4,
) -> typing.List[int]

Calculate maximal marginal relevance.

See more: langchain_google_cloud_sql_pg.vectorstore.maximal_marginal_relevance

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory

PostgresChatMessageHistory(
    key,
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    session_id: str,
    table_name: str,
    messages: typing.List[langchain_core.messages.base.BaseMessage],
)

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.aadd_message

aadd_message(message: langchain_core.messages.base.BaseMessage) -> None

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.aadd_messages

aadd_messages(
    messages: typing.Sequence[langchain_core.messages.base.BaseMessage],
) -> None

Append a list of messages to the record in PostgreSQL.

See more: langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.aadd_messages

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.aclear

aclear() -> None

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.add_message

add_message(message: langchain_core.messages.base.BaseMessage) -> None

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.add_messages

add_messages(
    messages: typing.Sequence[langchain_core.messages.base.BaseMessage],
) -> None

Append a list of messages to the record in PostgreSQL.

See more: langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.add_messages

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.async_messages

async_messages() -> None

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.clear

clear() -> None

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.create

create(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    session_id: str,
    table_name: str,
)

Create a new PostgresChatMessageHistory instance.

See more: langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.create

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.create_sync

create_sync(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    session_id: str,
    table_name: str,
)

Create a new PostgresChatMessageHistory instance.

See more: langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.create_sync

langchain_google_cloud_sql_pg.chat_message_history.PostgresChatMessageHistory.sync_messages

sync_messages() -> None

langchain_google_cloud_sql_pg.engine.Column.__post_init__

__post_init__()

Check if initialization parameters are valid.

See more: langchain_google_cloud_sql_pg.engine.Column.post_init

langchain_google_cloud_sql_pg.engine.PostgresEngine

PostgresEngine(
    key: object,
    engine: sqlalchemy.ext.asyncio.engine.AsyncEngine,
    loop: typing.Optional[asyncio.events.AbstractEventLoop],
    thread: typing.Optional[threading.Thread],
)

PostgresEngine constructor.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine

langchain_google_cloud_sql_pg.engine.PostgresEngine._aexecute

_aexecute(query: str, params: typing.Optional[dict] = None)

langchain_google_cloud_sql_pg.engine.PostgresEngine._aexecute_outside_tx

_aexecute_outside_tx(query: str)

langchain_google_cloud_sql_pg.engine.PostgresEngine._afetch

_afetch(query: str, params: typing.Optional[dict] = None)

Fetch results from a SQL query.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine._afetch

langchain_google_cloud_sql_pg.engine.PostgresEngine._aload_table_schema

_aload_table_schema(table_name: str) -> sqlalchemy.sql.schema.Table

Load table schema from existing table in PgSQL database.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine._aload_table_schema

langchain_google_cloud_sql_pg.engine.PostgresEngine._create

_create(
    project_id: str,
    region: str,
    instance: str,
    database: str,
    ip_type: typing.Union[str, google.cloud.sql.connector.enums.IPTypes],
    user: typing.Optional[str] = None,
    password: typing.Optional[str] = None,
    loop: typing.Optional[asyncio.events.AbstractEventLoop] = None,
    thread: typing.Optional[threading.Thread] = None,
    quota_project: typing.Optional[str] = None,
    iam_account_email: typing.Optional[str] = None,
) -> langchain_google_cloud_sql_pg.engine.PostgresEngine

Create a PostgresEngine instance.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine._create

langchain_google_cloud_sql_pg.engine.PostgresEngine._execute

_execute(query: str, params: typing.Optional[dict] = None)

langchain_google_cloud_sql_pg.engine.PostgresEngine._fetch

_fetch(query: str, params: typing.Optional[dict] = None)

Fetch results from a SQL query.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine._fetch

langchain_google_cloud_sql_pg.engine.PostgresEngine._run_as_sync

_run_as_sync(
    coro: typing.Awaitable[langchain_google_cloud_sql_pg.engine.T],
) -> langchain_google_cloud_sql_pg.engine.T

Run an async coroutine synchronously.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine._run_as_sync

langchain_google_cloud_sql_pg.engine.PostgresEngine.afrom_instance

afrom_instance(
    project_id: str,
    region: str,
    instance: str,
    database: str,
    user: typing.Optional[str] = None,
    password: typing.Optional[str] = None,
    ip_type: typing.Union[
        str, google.cloud.sql.connector.enums.IPTypes
    ] = IPTypes.PUBLIC,
    quota_project: typing.Optional[str] = None,
    iam_account_email: typing.Optional[str] = None,
) -> langchain_google_cloud_sql_pg.engine.PostgresEngine

Create a PostgresEngine from a Postgres instance.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.afrom_instance

langchain_google_cloud_sql_pg.engine.PostgresEngine.ainit_chat_history_table

ainit_chat_history_table(table_name) -> None

Create a Cloud SQL table to store chat history.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.ainit_chat_history_table

langchain_google_cloud_sql_pg.engine.PostgresEngine.ainit_document_table

ainit_document_table(
    table_name: str,
    content_column: str = "page_content",
    metadata_columns: typing.List[langchain_google_cloud_sql_pg.engine.Column] = [],
    metadata_json_column: str = "langchain_metadata",
    store_metadata: bool = True,
) -> None

Create a table for saving of langchain documents.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.ainit_document_table

langchain_google_cloud_sql_pg.engine.PostgresEngine.ainit_vectorstore_table

ainit_vectorstore_table(
    table_name: str,
    vector_size: int,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[langchain_google_cloud_sql_pg.engine.Column] = [],
    metadata_json_column: str = "langchain_metadata",
    id_column: str = "langchain_id",
    overwrite_existing: bool = False,
    store_metadata: bool = True,
) -> None

Create a table for saving of vectors to be used with PostgresVectorStore.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.ainit_vectorstore_table

langchain_google_cloud_sql_pg.engine.PostgresEngine.from_engine

from_engine(
    engine: sqlalchemy.ext.asyncio.engine.AsyncEngine,
) -> langchain_google_cloud_sql_pg.engine.PostgresEngine

Create an PostgresEngine instance from an AsyncEngine.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.from_engine

langchain_google_cloud_sql_pg.engine.PostgresEngine.from_instance

from_instance(
    project_id: str,
    region: str,
    instance: str,
    database: str,
    user: typing.Optional[str] = None,
    password: typing.Optional[str] = None,
    ip_type: typing.Union[
        str, google.cloud.sql.connector.enums.IPTypes
    ] = IPTypes.PUBLIC,
    quota_project: typing.Optional[str] = None,
    iam_account_email: typing.Optional[str] = None,
) -> langchain_google_cloud_sql_pg.engine.PostgresEngine

Create a PostgresEngine from a Postgres instance.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.from_instance

langchain_google_cloud_sql_pg.engine.PostgresEngine.init_chat_history_table

init_chat_history_table(table_name) -> None

Create a Cloud SQL table to store chat history.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.init_chat_history_table

langchain_google_cloud_sql_pg.engine.PostgresEngine.init_document_table

init_document_table(
    table_name: str,
    content_column: str = "page_content",
    metadata_columns: typing.List[langchain_google_cloud_sql_pg.engine.Column] = [],
    metadata_json_column: str = "langchain_metadata",
    store_metadata: bool = True,
) -> None

Create a table for saving of langchain documents.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.init_document_table

langchain_google_cloud_sql_pg.engine.PostgresEngine.init_vectorstore_table

init_vectorstore_table(
    table_name: str,
    vector_size: int,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[langchain_google_cloud_sql_pg.engine.Column] = [],
    metadata_json_column: str = "langchain_metadata",
    id_column: str = "langchain_id",
    overwrite_existing: bool = False,
    store_metadata: bool = True,
) -> None

Create a table for saving of vectors to be used with PostgresVectorStore.

See more: langchain_google_cloud_sql_pg.engine.PostgresEngine.init_vectorstore_table

langchain_google_cloud_sql_pg.indexes.BaseIndex.index_options

index_options() -> str

Set index query options for vector store initialization.

See more: langchain_google_cloud_sql_pg.indexes.BaseIndex.index_options

langchain_google_cloud_sql_pg.indexes.DistanceStrategy._generate_next_value_

_generate_next_value_(start, count, last_values)

Generate the next value when not given.

See more: langchain_google_cloud_sql_pg.indexes.DistanceStrategy.generate_next_value

langchain_google_cloud_sql_pg.indexes.HNSWIndex.index_options

index_options() -> str

Set index query options for vector store initialization.

See more: langchain_google_cloud_sql_pg.indexes.HNSWIndex.index_options

langchain_google_cloud_sql_pg.indexes.HNSWQueryOptions.to_string

to_string()

Convert index attributes to string.

See more: langchain_google_cloud_sql_pg.indexes.HNSWQueryOptions.to_string

langchain_google_cloud_sql_pg.indexes.IVFFlatIndex.index_options

index_options() -> str

Set index query options for vector store initialization.

See more: langchain_google_cloud_sql_pg.indexes.IVFFlatIndex.index_options

langchain_google_cloud_sql_pg.indexes.IVFFlatQueryOptions.to_string

to_string()

Convert index attributes to string.

See more: langchain_google_cloud_sql_pg.indexes.IVFFlatQueryOptions.to_string

langchain_google_cloud_sql_pg.indexes.QueryOptions.to_string

to_string() -> str

Convert index attributes to string.

See more: langchain_google_cloud_sql_pg.indexes.QueryOptions.to_string

langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver

PostgresDocumentSaver(
    key,
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    table_name: str,
    content_column: str,
    metadata_columns: typing.List[str] = [],
    metadata_json_column: typing.Optional[str] = None,
)

PostgresDocumentSaver constructor.

See more: langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver

langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver._aload_table_schema

_aload_table_schema() -> sqlalchemy.sql.schema.Table

Load table schema from existing table in PgSQL database.

See more: langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver._aload_table_schema

langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.aadd_documents

aadd_documents(docs: typing.List[langchain_core.documents.base.Document]) -> None

Save documents in the DocumentSaver table.

See more: langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.aadd_documents

langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.add_documents

add_documents(docs: typing.List[langchain_core.documents.base.Document]) -> None

Save documents in the DocumentSaver table.

See more: langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.add_documents

langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.adelete

adelete(docs: typing.List[langchain_core.documents.base.Document]) -> None

Delete all instances of a document from the DocumentSaver table by matching the entire Document object.

See more: langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.adelete

langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.create

create(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    table_name: str,
    content_column: str = "page_content",
    metadata_columns: typing.List[str] = [],
    metadata_json_column: typing.Optional[str] = "langchain_metadata",
)

Create an PostgresDocumentSaver instance.

See more: langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.create

langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.create_sync

create_sync(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    table_name: str,
    content_column: str = "page_content",
    metadata_columns: typing.List[str] = [],
    metadata_json_column: str = "langchain_metadata",
)

Create an PostgresDocumentSaver instance.

See more: langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.create_sync

langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.delete

delete(docs: typing.List[langchain_core.documents.base.Document]) -> None

Delete all instances of a document from the DocumentSaver table by matching the entire Document object.

See more: langchain_google_cloud_sql_pg.loader.PostgresDocumentSaver.delete

langchain_google_cloud_sql_pg.loader.PostgresLoader

PostgresLoader(
    key,
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    query: str,
    content_columns: typing.List[str],
    metadata_columns: typing.List[str],
    formatter: typing.Callable,
    metadata_json_column: typing.Optional[str] = None,
)

PostgresLoader constructor.

See more: langchain_google_cloud_sql_pg.loader.PostgresLoader

langchain_google_cloud_sql_pg.loader.PostgresLoader._collect_async_items

_collect_async_items(docs_generator)

Exhause document generator into a list.

See more: langchain_google_cloud_sql_pg.loader.PostgresLoader._collect_async_items

langchain_google_cloud_sql_pg.loader.PostgresLoader.alazy_load

alazy_load() -> typing.AsyncIterator[langchain_core.documents.base.Document]

Load PostgreSQL data into Document objects lazily.

See more: langchain_google_cloud_sql_pg.loader.PostgresLoader.alazy_load

langchain_google_cloud_sql_pg.loader.PostgresLoader.aload

aload() -> typing.List[langchain_core.documents.base.Document]

Load PostgreSQL data into Document objects.

See more: langchain_google_cloud_sql_pg.loader.PostgresLoader.aload

langchain_google_cloud_sql_pg.loader.PostgresLoader.create

create(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    query: typing.Optional[str] = None,
    table_name: typing.Optional[str] = None,
    content_columns: typing.Optional[typing.List[str]] = None,
    metadata_columns: typing.Optional[typing.List[str]] = None,
    metadata_json_column: typing.Optional[str] = None,
    format: typing.Optional[str] = None,
    formatter: typing.Optional[typing.Callable] = None,
)

Create a new PostgresLoader instance.

See more: langchain_google_cloud_sql_pg.loader.PostgresLoader.create

langchain_google_cloud_sql_pg.loader.PostgresLoader.create_sync

create_sync(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    query: typing.Optional[str] = None,
    table_name: typing.Optional[str] = None,
    content_columns: typing.Optional[typing.List[str]] = None,
    metadata_columns: typing.Optional[typing.List[str]] = None,
    metadata_json_column: typing.Optional[str] = None,
    format: typing.Optional[str] = None,
    formatter: typing.Optional[typing.Callable] = None,
)

Create a new PostgresLoader instance.

See more: langchain_google_cloud_sql_pg.loader.PostgresLoader.create_sync

langchain_google_cloud_sql_pg.loader.PostgresLoader.lazy_load

lazy_load() -> typing.Iterator[langchain_core.documents.base.Document]

Load PostgreSQL data into Document objects lazily.

See more: langchain_google_cloud_sql_pg.loader.PostgresLoader.lazy_load

langchain_google_cloud_sql_pg.loader.PostgresLoader.load

load() -> typing.List[langchain_core.documents.base.Document]

Load PostgreSQL data into Document objects.

See more: langchain_google_cloud_sql_pg.loader.PostgresLoader.load

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore

PostgresVectorStore(
    key,
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    embedding_service: langchain_core.embeddings.embeddings.Embeddings,
    table_name: str,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[str] = [],
    id_column: str = "langchain_id",
    metadata_json_column: typing.Optional[str] = "langchain_metadata",
    distance_strategy: langchain_google_cloud_sql_pg.indexes.DistanceStrategy = DistanceStrategy.COSINE_DISTANCE,
    k: int = 4,
    fetch_k: int = 20,
    lambda_mult: float = 0.5,
    index_query_options: typing.Optional[
        langchain_google_cloud_sql_pg.indexes.QueryOptions
    ] = None,
)

PostgresVectorStore constructor.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.__query_collection

__query_collection(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[typing.Any]

Perform similarity search query on the vector store table.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.__query_collection

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore._aadd_embeddings

_aadd_embeddings(
    texts: typing.Iterable[str],
    embeddings: typing.List[typing.List[float]],
    metadatas: typing.Optional[typing.List[dict]] = None,
    ids: typing.Optional[typing.List[str]] = None,
    **kwargs: typing.Any
) -> typing.List[str]

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore._select_relevance_score_fn

_select_relevance_score_fn() -> typing.Callable[[float], float]

Select a relevance function based on distance strategy.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore._select_relevance_score_fn

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.aadd_documents

aadd_documents(
    documents: typing.List[langchain_core.documents.base.Document],
    ids: typing.Optional[typing.List[str]] = None,
    **kwargs: typing.Any
) -> typing.List[str]

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.aadd_texts

aadd_texts(
    texts: typing.Iterable[str],
    metadatas: typing.Optional[typing.List[dict]] = None,
    ids: typing.Optional[typing.List[str]] = None,
    **kwargs: typing.Any
) -> typing.List[str]

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.aapply_vector_index

aapply_vector_index(
    index: langchain_google_cloud_sql_pg.indexes.BaseIndex,
    name: typing.Optional[str] = None,
    concurrently: bool = False,
) -> None

Create an index on the vector store table.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.aapply_vector_index

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.add_documents

add_documents(
    documents: typing.List[langchain_core.documents.base.Document],
    ids: typing.Optional[typing.List[str]] = None,
    **kwargs: typing.Any
) -> typing.List[str]

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.add_texts

add_texts(
    texts: typing.Iterable[str],
    metadatas: typing.Optional[typing.List[dict]] = None,
    ids: typing.Optional[typing.List[str]] = None,
    **kwargs: typing.Any
) -> typing.List[str]

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.adelete

adelete(
    ids: typing.Optional[typing.List[str]] = None, **kwargs: typing.Any
) -> typing.Optional[bool]

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.adrop_vector_index

adrop_vector_index(index_name: str = "langchainvectorindex") -> None

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.afrom_documents

afrom_documents(
    documents: typing.List[langchain_core.documents.base.Document],
    embedding: langchain_core.embeddings.embeddings.Embeddings,
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    table_name: str,
    ids: typing.Optional[typing.List[str]] = None,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[str] = [],
    ignore_metadata_columns: typing.Optional[typing.List[str]] = None,
    id_column: str = "langchain_id",
    metadata_json_column: str = "langchain_metadata",
    **kwargs: typing.Any
) -> langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore

Create an PostgresVectorStore instance from documents.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.afrom_documents

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.afrom_texts

afrom_texts(
    texts: typing.List[str],
    embedding: langchain_core.embeddings.embeddings.Embeddings,
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    table_name: str,
    metadatas: typing.Optional[typing.List[dict]] = None,
    ids: typing.Optional[typing.List[str]] = None,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[str] = [],
    ignore_metadata_columns: typing.Optional[typing.List[str]] = None,
    id_column: str = "langchain_id",
    metadata_json_column: str = "langchain_metadata",
    **kwargs: typing.Any
) -> langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore

Create an PostgresVectorStore instance from texts.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.afrom_texts

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.amax_marginal_relevance_search

amax_marginal_relevance_search(
    query: str,
    k: typing.Optional[int] = None,
    fetch_k: typing.Optional[int] = None,
    lambda_mult: typing.Optional[float] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[langchain_core.documents.base.Document]

Return docs selected using the maximal marginal relevance.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.amax_marginal_relevance_search

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.amax_marginal_relevance_search_by_vector

amax_marginal_relevance_search_by_vector(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    fetch_k: typing.Optional[int] = None,
    lambda_mult: typing.Optional[float] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[langchain_core.documents.base.Document]

Return docs selected using the maximal marginal relevance.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.amax_marginal_relevance_search_by_vector

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.amax_marginal_relevance_search_with_score_by_vector

amax_marginal_relevance_search_with_score_by_vector(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    fetch_k: typing.Optional[int] = None,
    lambda_mult: typing.Optional[float] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[typing.Tuple[langchain_core.documents.base.Document, float]]

Return docs and distance scores selected using the maximal marginal relevance.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.amax_marginal_relevance_search_with_score_by_vector

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.areindex

areindex(index_name: str = "langchainvectorindex") -> None

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.asimilarity_search

asimilarity_search(
    query: str,
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[langchain_core.documents.base.Document]

Return docs selected by similarity search on query.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.asimilarity_search

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.asimilarity_search_by_vector

asimilarity_search_by_vector(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[langchain_core.documents.base.Document]

Return docs selected by vector similarity search.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.asimilarity_search_by_vector

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.asimilarity_search_with_score

asimilarity_search_with_score(
    query: str,
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[typing.Tuple[langchain_core.documents.base.Document, float]]

Return docs and distance scores selected by similarity search on query.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.asimilarity_search_with_score

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.asimilarity_search_with_score_by_vector

asimilarity_search_with_score_by_vector(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[typing.Tuple[langchain_core.documents.base.Document, float]]

Return docs and distance scores selected by vector similarity search.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.asimilarity_search_with_score_by_vector

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.create

create(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    embedding_service: langchain_core.embeddings.embeddings.Embeddings,
    table_name: str,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[str] = [],
    ignore_metadata_columns: typing.Optional[typing.List[str]] = None,
    id_column: str = "langchain_id",
    metadata_json_column: typing.Optional[str] = "langchain_metadata",
    distance_strategy: langchain_google_cloud_sql_pg.indexes.DistanceStrategy = DistanceStrategy.COSINE_DISTANCE,
    k: int = 4,
    fetch_k: int = 20,
    lambda_mult: float = 0.5,
    index_query_options: typing.Optional[
        langchain_google_cloud_sql_pg.indexes.QueryOptions
    ] = None,
)

Create a new PostgresVectorStore instance.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.create

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.create_sync

create_sync(
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    embedding_service: langchain_core.embeddings.embeddings.Embeddings,
    table_name: str,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[str] = [],
    ignore_metadata_columns: typing.Optional[typing.List[str]] = None,
    id_column: str = "langchain_id",
    metadata_json_column: str = "langchain_metadata",
    distance_strategy: langchain_google_cloud_sql_pg.indexes.DistanceStrategy = DistanceStrategy.COSINE_DISTANCE,
    k: int = 4,
    fetch_k: int = 20,
    lambda_mult: float = 0.5,
    index_query_options: typing.Optional[
        langchain_google_cloud_sql_pg.indexes.QueryOptions
    ] = None,
)

Create a new PostgresVectorStore instance.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.create_sync

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.delete

delete(
    ids: typing.Optional[typing.List[str]] = None, **kwargs: typing.Any
) -> typing.Optional[bool]

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.from_documents

from_documents(
    documents: typing.List[langchain_core.documents.base.Document],
    embedding: langchain_core.embeddings.embeddings.Embeddings,
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    table_name: str,
    ids: typing.Optional[typing.List[str]] = None,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[str] = [],
    ignore_metadata_columns: typing.Optional[typing.List[str]] = None,
    id_column: str = "langchain_id",
    metadata_json_column: str = "langchain_metadata",
    **kwargs: typing.Any
) -> langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore

Create an PostgresVectorStore instance from documents.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.from_documents

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.from_texts

from_texts(
    texts: typing.List[str],
    embedding: langchain_core.embeddings.embeddings.Embeddings,
    engine: langchain_google_cloud_sql_pg.engine.PostgresEngine,
    table_name: str,
    metadatas: typing.Optional[typing.List[dict]] = None,
    ids: typing.Optional[typing.List[str]] = None,
    content_column: str = "content",
    embedding_column: str = "embedding",
    metadata_columns: typing.List[str] = [],
    ignore_metadata_columns: typing.Optional[typing.List[str]] = None,
    id_column: str = "langchain_id",
    metadata_json_column: str = "langchain_metadata",
    **kwargs: typing.Any
)

Create an PostgresVectorStore instance from texts.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.from_texts

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.is_valid_index

is_valid_index(index_name: str = "langchainvectorindex") -> bool

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.max_marginal_relevance_search

max_marginal_relevance_search(
    query: str,
    k: typing.Optional[int] = None,
    fetch_k: typing.Optional[int] = None,
    lambda_mult: typing.Optional[float] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[langchain_core.documents.base.Document]

Return docs selected using the maximal marginal relevance.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.max_marginal_relevance_search

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.max_marginal_relevance_search_by_vector

max_marginal_relevance_search_by_vector(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    fetch_k: typing.Optional[int] = None,
    lambda_mult: typing.Optional[float] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[langchain_core.documents.base.Document]

Return docs selected using the maximal marginal relevance.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.max_marginal_relevance_search_by_vector

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.max_marginal_relevance_search_with_score_by_vector

max_marginal_relevance_search_with_score_by_vector(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    fetch_k: typing.Optional[int] = None,
    lambda_mult: typing.Optional[float] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[typing.Tuple[langchain_core.documents.base.Document, float]]

Return docs and distance scores selected using the maximal marginal relevance.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.max_marginal_relevance_search_with_score_by_vector

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.similarity_search

similarity_search(
    query: str,
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[langchain_core.documents.base.Document]

Return docs selected by similarity search on query.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.similarity_search

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.similarity_search_by_vector

similarity_search_by_vector(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[langchain_core.documents.base.Document]

Return docs selected by vector similarity search.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.similarity_search_by_vector

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.similarity_search_with_score

similarity_search_with_score(
    query: str,
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[typing.Tuple[langchain_core.documents.base.Document, float]]

Return docs and distance scores selected by similarity search on query.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.similarity_search_with_score

langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.similarity_search_with_score_by_vector

similarity_search_with_score_by_vector(
    embedding: typing.List[float],
    k: typing.Optional[int] = None,
    filter: typing.Optional[str] = None,
    **kwargs: typing.Any
) -> typing.List[typing.Tuple[langchain_core.documents.base.Document, float]]

Return docs and distance scores selected by similarity search on vector.

See more: langchain_google_cloud_sql_pg.vectorstore.PostgresVectorStore.similarity_search_with_score_by_vector