refactor: excel parse

This commit is contained in:
Blizzard
2026-04-16 10:01:11 +08:00
parent 680ecc320f
commit f62f95ec02
7941 changed files with 2899112 additions and 0 deletions
@@ -0,0 +1,17 @@
import inspect
from pydantic import BaseModel
from qdrant_client._pydantic_compat import update_forward_refs
from qdrant_client.http.api_client import ( # noqa F401
ApiClient as ApiClient,
AsyncApiClient as AsyncApiClient,
AsyncApis as AsyncApis,
SyncApis as SyncApis,
)
from qdrant_client.http.models import models as models # noqa F401
for model in inspect.getmembers(models, inspect.isclass):
if model[1].__module__ == "qdrant_client.http.models.models":
model_class = model[1]
if issubclass(model_class, BaseModel):
update_forward_refs(model_class)
@@ -0,0 +1,171 @@
# flake8: noqa E501
from typing import TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
from qdrant_client.http.models import models as m
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _AliasesApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_get_collection_aliases(
self,
collection_name: str,
):
"""
Get list of all aliases for a collection
"""
path_params = {
"collection_name": str(collection_name),
}
headers = {}
return self.api_client.request(
type_=m.InlineResponse2008,
method="GET",
url="/collections/{collection_name}/aliases",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_get_collections_aliases(
self,
):
"""
Get list of all existing collections aliases
"""
headers = {}
return self.api_client.request(
type_=m.InlineResponse2008,
method="GET",
url="/aliases",
headers=headers if headers else None,
)
def _build_for_update_aliases(
self,
timeout: int = None,
change_aliases_operation: m.ChangeAliasesOperation = None,
):
query_params = {}
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(change_aliases_operation)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse200,
method="POST",
url="/collections/aliases",
headers=headers if headers else None,
params=query_params,
content=body,
)
class AsyncAliasesApi(_AliasesApi):
async def get_collection_aliases(
self,
collection_name: str,
) -> m.InlineResponse2008:
"""
Get list of all aliases for a collection
"""
return await self._build_for_get_collection_aliases(
collection_name=collection_name,
)
async def get_collections_aliases(
self,
) -> m.InlineResponse2008:
"""
Get list of all existing collections aliases
"""
return await self._build_for_get_collections_aliases()
async def update_aliases(
self,
timeout: int = None,
change_aliases_operation: m.ChangeAliasesOperation = None,
) -> m.InlineResponse200:
return await self._build_for_update_aliases(
timeout=timeout,
change_aliases_operation=change_aliases_operation,
)
class SyncAliasesApi(_AliasesApi):
def get_collection_aliases(
self,
collection_name: str,
) -> m.InlineResponse2008:
"""
Get list of all aliases for a collection
"""
return self._build_for_get_collection_aliases(
collection_name=collection_name,
)
def get_collections_aliases(
self,
) -> m.InlineResponse2008:
"""
Get list of all existing collections aliases
"""
return self._build_for_get_collections_aliases()
def update_aliases(
self,
timeout: int = None,
change_aliases_operation: m.ChangeAliasesOperation = None,
) -> m.InlineResponse200:
return self._build_for_update_aliases(
timeout=timeout,
change_aliases_operation=change_aliases_operation,
)
@@ -0,0 +1,116 @@
# flake8: noqa E501
from typing import TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _BetaApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_clear_issues(
self,
):
"""
Removes all issues reported so far
"""
headers = {}
return self.api_client.request(
type_=bool,
method="DELETE",
url="/issues",
headers=headers if headers else None,
)
def _build_for_get_issues(
self,
):
"""
Get a report of performance issues and configuration suggestions
"""
headers = {}
return self.api_client.request(
type_=object,
method="GET",
url="/issues",
headers=headers if headers else None,
)
class AsyncBetaApi(_BetaApi):
async def clear_issues(
self,
) -> bool:
"""
Removes all issues reported so far
"""
return await self._build_for_clear_issues()
async def get_issues(
self,
) -> object:
"""
Get a report of performance issues and configuration suggestions
"""
return await self._build_for_get_issues()
class SyncBetaApi(_BetaApi):
def clear_issues(
self,
) -> bool:
"""
Removes all issues reported so far
"""
return self._build_for_clear_issues()
def get_issues(
self,
) -> object:
"""
Get a report of performance issues and configuration suggestions
"""
return self._build_for_get_issues()
@@ -0,0 +1,345 @@
# flake8: noqa E501
from typing import TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
from qdrant_client.http.models import models as m
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _CollectionsApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_collection_exists(
self,
collection_name: str,
):
"""
Returns \"true\" if the given collection name exists, and \"false\" otherwise
"""
path_params = {
"collection_name": str(collection_name),
}
headers = {}
return self.api_client.request(
type_=m.InlineResponse2006,
method="GET",
url="/collections/{collection_name}/exists",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_create_collection(
self,
collection_name: str,
timeout: int = None,
create_collection: m.CreateCollection = None,
):
"""
Create new collection with given parameters
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(create_collection)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse200,
method="PUT",
url="/collections/{collection_name}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_delete_collection(
self,
collection_name: str,
timeout: int = None,
):
"""
Drop collection and all associated data
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
return self.api_client.request(
type_=m.InlineResponse200,
method="DELETE",
url="/collections/{collection_name}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
def _build_for_get_collection(
self,
collection_name: str,
):
"""
Get detailed information about specified existing collection
"""
path_params = {
"collection_name": str(collection_name),
}
headers = {}
return self.api_client.request(
type_=m.InlineResponse2004,
method="GET",
url="/collections/{collection_name}",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_get_collections(
self,
):
"""
Get list name of all existing collections
"""
headers = {}
return self.api_client.request(
type_=m.InlineResponse2003,
method="GET",
url="/collections",
headers=headers if headers else None,
)
def _build_for_update_collection(
self,
collection_name: str,
timeout: int = None,
update_collection: m.UpdateCollection = None,
):
"""
Update parameters of the existing collection
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(update_collection)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse200,
method="PATCH",
url="/collections/{collection_name}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
class AsyncCollectionsApi(_CollectionsApi):
async def collection_exists(
self,
collection_name: str,
) -> m.InlineResponse2006:
"""
Returns \"true\" if the given collection name exists, and \"false\" otherwise
"""
return await self._build_for_collection_exists(
collection_name=collection_name,
)
async def create_collection(
self,
collection_name: str,
timeout: int = None,
create_collection: m.CreateCollection = None,
) -> m.InlineResponse200:
"""
Create new collection with given parameters
"""
return await self._build_for_create_collection(
collection_name=collection_name,
timeout=timeout,
create_collection=create_collection,
)
async def delete_collection(
self,
collection_name: str,
timeout: int = None,
) -> m.InlineResponse200:
"""
Drop collection and all associated data
"""
return await self._build_for_delete_collection(
collection_name=collection_name,
timeout=timeout,
)
async def get_collection(
self,
collection_name: str,
) -> m.InlineResponse2004:
"""
Get detailed information about specified existing collection
"""
return await self._build_for_get_collection(
collection_name=collection_name,
)
async def get_collections(
self,
) -> m.InlineResponse2003:
"""
Get list name of all existing collections
"""
return await self._build_for_get_collections()
async def update_collection(
self,
collection_name: str,
timeout: int = None,
update_collection: m.UpdateCollection = None,
) -> m.InlineResponse200:
"""
Update parameters of the existing collection
"""
return await self._build_for_update_collection(
collection_name=collection_name,
timeout=timeout,
update_collection=update_collection,
)
class SyncCollectionsApi(_CollectionsApi):
def collection_exists(
self,
collection_name: str,
) -> m.InlineResponse2006:
"""
Returns \"true\" if the given collection name exists, and \"false\" otherwise
"""
return self._build_for_collection_exists(
collection_name=collection_name,
)
def create_collection(
self,
collection_name: str,
timeout: int = None,
create_collection: m.CreateCollection = None,
) -> m.InlineResponse200:
"""
Create new collection with given parameters
"""
return self._build_for_create_collection(
collection_name=collection_name,
timeout=timeout,
create_collection=create_collection,
)
def delete_collection(
self,
collection_name: str,
timeout: int = None,
) -> m.InlineResponse200:
"""
Drop collection and all associated data
"""
return self._build_for_delete_collection(
collection_name=collection_name,
timeout=timeout,
)
def get_collection(
self,
collection_name: str,
) -> m.InlineResponse2004:
"""
Get detailed information about specified existing collection
"""
return self._build_for_get_collection(
collection_name=collection_name,
)
def get_collections(
self,
) -> m.InlineResponse2003:
"""
Get list name of all existing collections
"""
return self._build_for_get_collections()
def update_collection(
self,
collection_name: str,
timeout: int = None,
update_collection: m.UpdateCollection = None,
) -> m.InlineResponse200:
"""
Update parameters of the existing collection
"""
return self._build_for_update_collection(
collection_name=collection_name,
timeout=timeout,
update_collection=update_collection,
)
@@ -0,0 +1,365 @@
# flake8: noqa E501
from typing import TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
from qdrant_client.http.models import models as m
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _DistributedApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_cluster_status(
self,
):
"""
Get information about the current state and composition of the cluster
"""
headers = {}
return self.api_client.request(
type_=m.InlineResponse2002,
method="GET",
url="/cluster",
headers=headers if headers else None,
)
def _build_for_collection_cluster_info(
self,
collection_name: str,
):
"""
Get cluster information for a collection
"""
path_params = {
"collection_name": str(collection_name),
}
headers = {}
return self.api_client.request(
type_=m.InlineResponse2007,
method="GET",
url="/collections/{collection_name}/cluster",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_create_shard_key(
self,
collection_name: str,
timeout: int = None,
create_sharding_key: m.CreateShardingKey = None,
):
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(create_sharding_key)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse200,
method="PUT",
url="/collections/{collection_name}/shards",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_delete_shard_key(
self,
collection_name: str,
timeout: int = None,
drop_sharding_key: m.DropShardingKey = None,
):
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(drop_sharding_key)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse200,
method="POST",
url="/collections/{collection_name}/shards/delete",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_recover_current_peer(
self,
):
headers = {}
return self.api_client.request(
type_=m.InlineResponse200,
method="POST",
url="/cluster/recover",
headers=headers if headers else None,
)
def _build_for_remove_peer(
self,
peer_id: int,
timeout: int = None,
force: bool = None,
):
"""
Tries to remove peer from the cluster. Will return an error if peer has shards on it.
"""
path_params = {
"peer_id": str(peer_id),
}
query_params = {}
if timeout is not None:
query_params["timeout"] = str(timeout)
if force is not None:
query_params["force"] = str(force).lower()
headers = {}
return self.api_client.request(
type_=m.InlineResponse200,
method="DELETE",
url="/cluster/peer/{peer_id}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
def _build_for_update_collection_cluster(
self,
collection_name: str,
timeout: int = None,
cluster_operations: m.ClusterOperations = None,
):
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(cluster_operations)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse200,
method="POST",
url="/collections/{collection_name}/cluster",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
class AsyncDistributedApi(_DistributedApi):
async def cluster_status(
self,
) -> m.InlineResponse2002:
"""
Get information about the current state and composition of the cluster
"""
return await self._build_for_cluster_status()
async def collection_cluster_info(
self,
collection_name: str,
) -> m.InlineResponse2007:
"""
Get cluster information for a collection
"""
return await self._build_for_collection_cluster_info(
collection_name=collection_name,
)
async def create_shard_key(
self,
collection_name: str,
timeout: int = None,
create_sharding_key: m.CreateShardingKey = None,
) -> m.InlineResponse200:
return await self._build_for_create_shard_key(
collection_name=collection_name,
timeout=timeout,
create_sharding_key=create_sharding_key,
)
async def delete_shard_key(
self,
collection_name: str,
timeout: int = None,
drop_sharding_key: m.DropShardingKey = None,
) -> m.InlineResponse200:
return await self._build_for_delete_shard_key(
collection_name=collection_name,
timeout=timeout,
drop_sharding_key=drop_sharding_key,
)
async def recover_current_peer(
self,
) -> m.InlineResponse200:
return await self._build_for_recover_current_peer()
async def remove_peer(
self,
peer_id: int,
timeout: int = None,
force: bool = None,
) -> m.InlineResponse200:
"""
Tries to remove peer from the cluster. Will return an error if peer has shards on it.
"""
return await self._build_for_remove_peer(
peer_id=peer_id,
timeout=timeout,
force=force,
)
async def update_collection_cluster(
self,
collection_name: str,
timeout: int = None,
cluster_operations: m.ClusterOperations = None,
) -> m.InlineResponse200:
return await self._build_for_update_collection_cluster(
collection_name=collection_name,
timeout=timeout,
cluster_operations=cluster_operations,
)
class SyncDistributedApi(_DistributedApi):
def cluster_status(
self,
) -> m.InlineResponse2002:
"""
Get information about the current state and composition of the cluster
"""
return self._build_for_cluster_status()
def collection_cluster_info(
self,
collection_name: str,
) -> m.InlineResponse2007:
"""
Get cluster information for a collection
"""
return self._build_for_collection_cluster_info(
collection_name=collection_name,
)
def create_shard_key(
self,
collection_name: str,
timeout: int = None,
create_sharding_key: m.CreateShardingKey = None,
) -> m.InlineResponse200:
return self._build_for_create_shard_key(
collection_name=collection_name,
timeout=timeout,
create_sharding_key=create_sharding_key,
)
def delete_shard_key(
self,
collection_name: str,
timeout: int = None,
drop_sharding_key: m.DropShardingKey = None,
) -> m.InlineResponse200:
return self._build_for_delete_shard_key(
collection_name=collection_name,
timeout=timeout,
drop_sharding_key=drop_sharding_key,
)
def recover_current_peer(
self,
) -> m.InlineResponse200:
return self._build_for_recover_current_peer()
def remove_peer(
self,
peer_id: int,
timeout: int = None,
force: bool = None,
) -> m.InlineResponse200:
"""
Tries to remove peer from the cluster. Will return an error if peer has shards on it.
"""
return self._build_for_remove_peer(
peer_id=peer_id,
timeout=timeout,
force=force,
)
def update_collection_cluster(
self,
collection_name: str,
timeout: int = None,
cluster_operations: m.ClusterOperations = None,
) -> m.InlineResponse200:
return self._build_for_update_collection_cluster(
collection_name=collection_name,
timeout=timeout,
cluster_operations=cluster_operations,
)
@@ -0,0 +1,190 @@
# flake8: noqa E501
from typing import TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
from qdrant_client.http.models import models as m
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _IndexesApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_create_field_index(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
create_field_index: m.CreateFieldIndex = None,
):
"""
Create index for field in collection
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(create_field_index)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="PUT",
url="/collections/{collection_name}/index",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_delete_field_index(
self,
collection_name: str,
field_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
):
"""
Delete field index for collection
"""
path_params = {
"collection_name": str(collection_name),
"field_name": str(field_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
return self.api_client.request(
type_=m.InlineResponse2005,
method="DELETE",
url="/collections/{collection_name}/index/{field_name}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
class AsyncIndexesApi(_IndexesApi):
async def create_field_index(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
create_field_index: m.CreateFieldIndex = None,
) -> m.InlineResponse2005:
"""
Create index for field in collection
"""
return await self._build_for_create_field_index(
collection_name=collection_name,
wait=wait,
ordering=ordering,
create_field_index=create_field_index,
)
async def delete_field_index(
self,
collection_name: str,
field_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
) -> m.InlineResponse2005:
"""
Delete field index for collection
"""
return await self._build_for_delete_field_index(
collection_name=collection_name,
field_name=field_name,
wait=wait,
ordering=ordering,
)
class SyncIndexesApi(_IndexesApi):
def create_field_index(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
create_field_index: m.CreateFieldIndex = None,
) -> m.InlineResponse2005:
"""
Create index for field in collection
"""
return self._build_for_create_field_index(
collection_name=collection_name,
wait=wait,
ordering=ordering,
create_field_index=create_field_index,
)
def delete_field_index(
self,
collection_name: str,
field_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
) -> m.InlineResponse2005:
"""
Delete field index for collection
"""
return self._build_for_delete_field_index(
collection_name=collection_name,
field_name=field_name,
wait=wait,
ordering=ordering,
)
@@ -0,0 +1,999 @@
# flake8: noqa E501
from typing import TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
from qdrant_client.http.models import models as m
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _PointsApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_batch_update(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
update_operations: m.UpdateOperations = None,
):
"""
Apply a series of update operations for points, vectors and payloads
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(update_operations)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20014,
method="POST",
url="/collections/{collection_name}/points/batch",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_clear_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
points_selector: m.PointsSelector = None,
):
"""
Remove all payload for specified points
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(points_selector)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="POST",
url="/collections/{collection_name}/points/payload/clear",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_count_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
count_request: m.CountRequest = None,
):
"""
Count points which matches given filtering condition
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(count_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20019,
method="POST",
url="/collections/{collection_name}/points/count",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_delete_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
delete_payload: m.DeletePayload = None,
):
"""
Delete specified key payload for points
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(delete_payload)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="POST",
url="/collections/{collection_name}/points/payload/delete",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_delete_points(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
points_selector: m.PointsSelector = None,
):
"""
Delete points
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(points_selector)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="POST",
url="/collections/{collection_name}/points/delete",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_delete_vectors(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
delete_vectors: m.DeleteVectors = None,
):
"""
Delete named vectors from the given points.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(delete_vectors)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="POST",
url="/collections/{collection_name}/points/vectors/delete",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_facet(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
facet_request: m.FacetRequest = None,
):
"""
Count points that satisfy the given filter for each unique value of a payload key.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(facet_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20020,
method="POST",
url="/collections/{collection_name}/facet",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_get_point(
self,
collection_name: str,
id: m.ExtendedPointId,
consistency: m.ReadConsistency = None,
):
"""
Retrieve full information of single point by id
"""
path_params = {
"collection_name": str(collection_name),
"id": str(id),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
headers = {}
return self.api_client.request(
type_=m.InlineResponse20012,
method="GET",
url="/collections/{collection_name}/points/{id}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
def _build_for_get_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
point_request: m.PointRequest = None,
):
"""
Retrieve multiple points by specified IDs
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(point_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20013,
method="POST",
url="/collections/{collection_name}/points",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_overwrite_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
set_payload: m.SetPayload = None,
):
"""
Replace full payload of points with new one
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(set_payload)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="PUT",
url="/collections/{collection_name}/points/payload",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_scroll_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
scroll_request: m.ScrollRequest = None,
):
"""
Scroll request - paginate over all points which matches given filtering condition
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(scroll_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20015,
method="POST",
url="/collections/{collection_name}/points/scroll",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_set_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
set_payload: m.SetPayload = None,
):
"""
Set payload values for points
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(set_payload)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="POST",
url="/collections/{collection_name}/points/payload",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_update_vectors(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
update_vectors: m.UpdateVectors = None,
):
"""
Update specified named vectors on points, keep unspecified vectors intact.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(update_vectors)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="PUT",
url="/collections/{collection_name}/points/vectors",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_upsert_points(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
point_insert_operations: m.PointInsertOperations = None,
):
"""
Perform insert + updates on points. If point with given ID already exists - it will be overwritten.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if ordering is not None:
query_params["ordering"] = str(ordering)
headers = {}
body = jsonable_encoder(point_insert_operations)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2005,
method="PUT",
url="/collections/{collection_name}/points",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
class AsyncPointsApi(_PointsApi):
async def batch_update(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
update_operations: m.UpdateOperations = None,
) -> m.InlineResponse20014:
"""
Apply a series of update operations for points, vectors and payloads
"""
return await self._build_for_batch_update(
collection_name=collection_name,
wait=wait,
ordering=ordering,
update_operations=update_operations,
)
async def clear_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
points_selector: m.PointsSelector = None,
) -> m.InlineResponse2005:
"""
Remove all payload for specified points
"""
return await self._build_for_clear_payload(
collection_name=collection_name,
wait=wait,
ordering=ordering,
points_selector=points_selector,
)
async def count_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
count_request: m.CountRequest = None,
) -> m.InlineResponse20019:
"""
Count points which matches given filtering condition
"""
return await self._build_for_count_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
count_request=count_request,
)
async def delete_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
delete_payload: m.DeletePayload = None,
) -> m.InlineResponse2005:
"""
Delete specified key payload for points
"""
return await self._build_for_delete_payload(
collection_name=collection_name,
wait=wait,
ordering=ordering,
delete_payload=delete_payload,
)
async def delete_points(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
points_selector: m.PointsSelector = None,
) -> m.InlineResponse2005:
"""
Delete points
"""
return await self._build_for_delete_points(
collection_name=collection_name,
wait=wait,
ordering=ordering,
points_selector=points_selector,
)
async def delete_vectors(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
delete_vectors: m.DeleteVectors = None,
) -> m.InlineResponse2005:
"""
Delete named vectors from the given points.
"""
return await self._build_for_delete_vectors(
collection_name=collection_name,
wait=wait,
ordering=ordering,
delete_vectors=delete_vectors,
)
async def facet(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
facet_request: m.FacetRequest = None,
) -> m.InlineResponse20020:
"""
Count points that satisfy the given filter for each unique value of a payload key.
"""
return await self._build_for_facet(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
facet_request=facet_request,
)
async def get_point(
self,
collection_name: str,
id: m.ExtendedPointId,
consistency: m.ReadConsistency = None,
) -> m.InlineResponse20012:
"""
Retrieve full information of single point by id
"""
return await self._build_for_get_point(
collection_name=collection_name,
id=id,
consistency=consistency,
)
async def get_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
point_request: m.PointRequest = None,
) -> m.InlineResponse20013:
"""
Retrieve multiple points by specified IDs
"""
return await self._build_for_get_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
point_request=point_request,
)
async def overwrite_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
set_payload: m.SetPayload = None,
) -> m.InlineResponse2005:
"""
Replace full payload of points with new one
"""
return await self._build_for_overwrite_payload(
collection_name=collection_name,
wait=wait,
ordering=ordering,
set_payload=set_payload,
)
async def scroll_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
scroll_request: m.ScrollRequest = None,
) -> m.InlineResponse20015:
"""
Scroll request - paginate over all points which matches given filtering condition
"""
return await self._build_for_scroll_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
scroll_request=scroll_request,
)
async def set_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
set_payload: m.SetPayload = None,
) -> m.InlineResponse2005:
"""
Set payload values for points
"""
return await self._build_for_set_payload(
collection_name=collection_name,
wait=wait,
ordering=ordering,
set_payload=set_payload,
)
async def update_vectors(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
update_vectors: m.UpdateVectors = None,
) -> m.InlineResponse2005:
"""
Update specified named vectors on points, keep unspecified vectors intact.
"""
return await self._build_for_update_vectors(
collection_name=collection_name,
wait=wait,
ordering=ordering,
update_vectors=update_vectors,
)
async def upsert_points(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
point_insert_operations: m.PointInsertOperations = None,
) -> m.InlineResponse2005:
"""
Perform insert + updates on points. If point with given ID already exists - it will be overwritten.
"""
return await self._build_for_upsert_points(
collection_name=collection_name,
wait=wait,
ordering=ordering,
point_insert_operations=point_insert_operations,
)
class SyncPointsApi(_PointsApi):
def batch_update(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
update_operations: m.UpdateOperations = None,
) -> m.InlineResponse20014:
"""
Apply a series of update operations for points, vectors and payloads
"""
return self._build_for_batch_update(
collection_name=collection_name,
wait=wait,
ordering=ordering,
update_operations=update_operations,
)
def clear_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
points_selector: m.PointsSelector = None,
) -> m.InlineResponse2005:
"""
Remove all payload for specified points
"""
return self._build_for_clear_payload(
collection_name=collection_name,
wait=wait,
ordering=ordering,
points_selector=points_selector,
)
def count_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
count_request: m.CountRequest = None,
) -> m.InlineResponse20019:
"""
Count points which matches given filtering condition
"""
return self._build_for_count_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
count_request=count_request,
)
def delete_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
delete_payload: m.DeletePayload = None,
) -> m.InlineResponse2005:
"""
Delete specified key payload for points
"""
return self._build_for_delete_payload(
collection_name=collection_name,
wait=wait,
ordering=ordering,
delete_payload=delete_payload,
)
def delete_points(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
points_selector: m.PointsSelector = None,
) -> m.InlineResponse2005:
"""
Delete points
"""
return self._build_for_delete_points(
collection_name=collection_name,
wait=wait,
ordering=ordering,
points_selector=points_selector,
)
def delete_vectors(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
delete_vectors: m.DeleteVectors = None,
) -> m.InlineResponse2005:
"""
Delete named vectors from the given points.
"""
return self._build_for_delete_vectors(
collection_name=collection_name,
wait=wait,
ordering=ordering,
delete_vectors=delete_vectors,
)
def facet(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
facet_request: m.FacetRequest = None,
) -> m.InlineResponse20020:
"""
Count points that satisfy the given filter for each unique value of a payload key.
"""
return self._build_for_facet(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
facet_request=facet_request,
)
def get_point(
self,
collection_name: str,
id: m.ExtendedPointId,
consistency: m.ReadConsistency = None,
) -> m.InlineResponse20012:
"""
Retrieve full information of single point by id
"""
return self._build_for_get_point(
collection_name=collection_name,
id=id,
consistency=consistency,
)
def get_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
point_request: m.PointRequest = None,
) -> m.InlineResponse20013:
"""
Retrieve multiple points by specified IDs
"""
return self._build_for_get_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
point_request=point_request,
)
def overwrite_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
set_payload: m.SetPayload = None,
) -> m.InlineResponse2005:
"""
Replace full payload of points with new one
"""
return self._build_for_overwrite_payload(
collection_name=collection_name,
wait=wait,
ordering=ordering,
set_payload=set_payload,
)
def scroll_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
scroll_request: m.ScrollRequest = None,
) -> m.InlineResponse20015:
"""
Scroll request - paginate over all points which matches given filtering condition
"""
return self._build_for_scroll_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
scroll_request=scroll_request,
)
def set_payload(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
set_payload: m.SetPayload = None,
) -> m.InlineResponse2005:
"""
Set payload values for points
"""
return self._build_for_set_payload(
collection_name=collection_name,
wait=wait,
ordering=ordering,
set_payload=set_payload,
)
def update_vectors(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
update_vectors: m.UpdateVectors = None,
) -> m.InlineResponse2005:
"""
Update specified named vectors on points, keep unspecified vectors intact.
"""
return self._build_for_update_vectors(
collection_name=collection_name,
wait=wait,
ordering=ordering,
update_vectors=update_vectors,
)
def upsert_points(
self,
collection_name: str,
wait: bool = None,
ordering: WriteOrdering = None,
point_insert_operations: m.PointInsertOperations = None,
) -> m.InlineResponse2005:
"""
Perform insert + updates on points. If point with given ID already exists - it will be overwritten.
"""
return self._build_for_upsert_points(
collection_name=collection_name,
wait=wait,
ordering=ordering,
point_insert_operations=point_insert_operations,
)
@@ -0,0 +1,941 @@
# flake8: noqa E501
from typing import TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
from qdrant_client.http.models import models as m
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _SearchApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_discover_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
discover_request_batch: m.DiscoverRequestBatch = None,
):
"""
Look for points based on target and/or positive and negative example pairs, in batch.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(discover_request_batch)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20017,
method="POST",
url="/collections/{collection_name}/points/discover/batch",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_discover_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
discover_request: m.DiscoverRequest = None,
):
"""
Use context and a target to find the most similar points to the target, constrained by the context. When using only the context (without a target), a special search - called context search - is performed where pairs of points are used to generate a loss that guides the search towards the zone where most positive examples overlap. This means that the score minimizes the scenario of finding a point closer to a negative than to a positive part of a pair. Since the score of a context relates to loss, the maximum score a point can get is 0.0, and it becomes normal that many points can have a score of 0.0. When using target (with or without context), the score behaves a little different: The integer part of the score represents the rank with respect to the context, while the decimal part of the score relates to the distance to the target. The context part of the score for each pair is calculated +1 if the point is closer to a positive than to a negative part of a pair, and -1 otherwise.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(discover_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20016,
method="POST",
url="/collections/{collection_name}/points/discover",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_query_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_request_batch: m.QueryRequestBatch = None,
):
"""
Universally query points in batch. This endpoint covers all capabilities of search, recommend, discover, filters. But also enables hybrid and multi-stage queries.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(query_request_batch)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20022,
method="POST",
url="/collections/{collection_name}/points/query/batch",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_query_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_request: m.QueryRequest = None,
):
"""
Universally query points. This endpoint covers all capabilities of search, recommend, discover, filters. But also enables hybrid and multi-stage queries.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(query_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20021,
method="POST",
url="/collections/{collection_name}/points/query",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_query_points_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_groups_request: m.QueryGroupsRequest = None,
):
"""
Universally query points, grouped by a given payload field
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(query_groups_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20018,
method="POST",
url="/collections/{collection_name}/points/query/groups",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_recommend_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_request_batch: m.RecommendRequestBatch = None,
):
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(recommend_request_batch)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20017,
method="POST",
url="/collections/{collection_name}/points/recommend/batch",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_recommend_point_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_groups_request: m.RecommendGroupsRequest = None,
):
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples, grouped by a given payload field.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(recommend_groups_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20018,
method="POST",
url="/collections/{collection_name}/points/recommend/groups",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_recommend_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_request: m.RecommendRequest = None,
):
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(recommend_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20016,
method="POST",
url="/collections/{collection_name}/points/recommend",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_search_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_request_batch: m.SearchRequestBatch = None,
):
"""
Retrieve by batch the closest points based on vector similarity and given filtering conditions
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(search_request_batch)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20017,
method="POST",
url="/collections/{collection_name}/points/search/batch",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_search_matrix_offsets(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_matrix_request: m.SearchMatrixRequest = None,
):
"""
Compute distance matrix for sampled points with an offset based output format
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(search_matrix_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20024,
method="POST",
url="/collections/{collection_name}/points/search/matrix/offsets",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_search_matrix_pairs(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_matrix_request: m.SearchMatrixRequest = None,
):
"""
Compute distance matrix for sampled points with a pair based output format
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(search_matrix_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20023,
method="POST",
url="/collections/{collection_name}/points/search/matrix/pairs",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_search_point_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_groups_request: m.SearchGroupsRequest = None,
):
"""
Retrieve closest points based on vector similarity and given filtering conditions, grouped by a given payload field
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(search_groups_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20018,
method="POST",
url="/collections/{collection_name}/points/search/groups",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_search_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_request: m.SearchRequest = None,
):
"""
Retrieve closest points based on vector similarity and given filtering conditions
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if consistency is not None:
query_params["consistency"] = str(consistency)
if timeout is not None:
query_params["timeout"] = str(timeout)
headers = {}
body = jsonable_encoder(search_request)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse20016,
method="POST",
url="/collections/{collection_name}/points/search",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
class AsyncSearchApi(_SearchApi):
async def discover_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
discover_request_batch: m.DiscoverRequestBatch = None,
) -> m.InlineResponse20017:
"""
Look for points based on target and/or positive and negative example pairs, in batch.
"""
return await self._build_for_discover_batch_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
discover_request_batch=discover_request_batch,
)
async def discover_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
discover_request: m.DiscoverRequest = None,
) -> m.InlineResponse20016:
"""
Use context and a target to find the most similar points to the target, constrained by the context. When using only the context (without a target), a special search - called context search - is performed where pairs of points are used to generate a loss that guides the search towards the zone where most positive examples overlap. This means that the score minimizes the scenario of finding a point closer to a negative than to a positive part of a pair. Since the score of a context relates to loss, the maximum score a point can get is 0.0, and it becomes normal that many points can have a score of 0.0. When using target (with or without context), the score behaves a little different: The integer part of the score represents the rank with respect to the context, while the decimal part of the score relates to the distance to the target. The context part of the score for each pair is calculated +1 if the point is closer to a positive than to a negative part of a pair, and -1 otherwise.
"""
return await self._build_for_discover_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
discover_request=discover_request,
)
async def query_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_request_batch: m.QueryRequestBatch = None,
) -> m.InlineResponse20022:
"""
Universally query points in batch. This endpoint covers all capabilities of search, recommend, discover, filters. But also enables hybrid and multi-stage queries.
"""
return await self._build_for_query_batch_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
query_request_batch=query_request_batch,
)
async def query_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_request: m.QueryRequest = None,
) -> m.InlineResponse20021:
"""
Universally query points. This endpoint covers all capabilities of search, recommend, discover, filters. But also enables hybrid and multi-stage queries.
"""
return await self._build_for_query_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
query_request=query_request,
)
async def query_points_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_groups_request: m.QueryGroupsRequest = None,
) -> m.InlineResponse20018:
"""
Universally query points, grouped by a given payload field
"""
return await self._build_for_query_points_groups(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
query_groups_request=query_groups_request,
)
async def recommend_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_request_batch: m.RecommendRequestBatch = None,
) -> m.InlineResponse20017:
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples.
"""
return await self._build_for_recommend_batch_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
recommend_request_batch=recommend_request_batch,
)
async def recommend_point_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_groups_request: m.RecommendGroupsRequest = None,
) -> m.InlineResponse20018:
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples, grouped by a given payload field.
"""
return await self._build_for_recommend_point_groups(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
recommend_groups_request=recommend_groups_request,
)
async def recommend_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_request: m.RecommendRequest = None,
) -> m.InlineResponse20016:
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples.
"""
return await self._build_for_recommend_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
recommend_request=recommend_request,
)
async def search_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_request_batch: m.SearchRequestBatch = None,
) -> m.InlineResponse20017:
"""
Retrieve by batch the closest points based on vector similarity and given filtering conditions
"""
return await self._build_for_search_batch_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_request_batch=search_request_batch,
)
async def search_matrix_offsets(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_matrix_request: m.SearchMatrixRequest = None,
) -> m.InlineResponse20024:
"""
Compute distance matrix for sampled points with an offset based output format
"""
return await self._build_for_search_matrix_offsets(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_matrix_request=search_matrix_request,
)
async def search_matrix_pairs(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_matrix_request: m.SearchMatrixRequest = None,
) -> m.InlineResponse20023:
"""
Compute distance matrix for sampled points with a pair based output format
"""
return await self._build_for_search_matrix_pairs(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_matrix_request=search_matrix_request,
)
async def search_point_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_groups_request: m.SearchGroupsRequest = None,
) -> m.InlineResponse20018:
"""
Retrieve closest points based on vector similarity and given filtering conditions, grouped by a given payload field
"""
return await self._build_for_search_point_groups(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_groups_request=search_groups_request,
)
async def search_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_request: m.SearchRequest = None,
) -> m.InlineResponse20016:
"""
Retrieve closest points based on vector similarity and given filtering conditions
"""
return await self._build_for_search_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_request=search_request,
)
class SyncSearchApi(_SearchApi):
def discover_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
discover_request_batch: m.DiscoverRequestBatch = None,
) -> m.InlineResponse20017:
"""
Look for points based on target and/or positive and negative example pairs, in batch.
"""
return self._build_for_discover_batch_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
discover_request_batch=discover_request_batch,
)
def discover_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
discover_request: m.DiscoverRequest = None,
) -> m.InlineResponse20016:
"""
Use context and a target to find the most similar points to the target, constrained by the context. When using only the context (without a target), a special search - called context search - is performed where pairs of points are used to generate a loss that guides the search towards the zone where most positive examples overlap. This means that the score minimizes the scenario of finding a point closer to a negative than to a positive part of a pair. Since the score of a context relates to loss, the maximum score a point can get is 0.0, and it becomes normal that many points can have a score of 0.0. When using target (with or without context), the score behaves a little different: The integer part of the score represents the rank with respect to the context, while the decimal part of the score relates to the distance to the target. The context part of the score for each pair is calculated +1 if the point is closer to a positive than to a negative part of a pair, and -1 otherwise.
"""
return self._build_for_discover_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
discover_request=discover_request,
)
def query_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_request_batch: m.QueryRequestBatch = None,
) -> m.InlineResponse20022:
"""
Universally query points in batch. This endpoint covers all capabilities of search, recommend, discover, filters. But also enables hybrid and multi-stage queries.
"""
return self._build_for_query_batch_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
query_request_batch=query_request_batch,
)
def query_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_request: m.QueryRequest = None,
) -> m.InlineResponse20021:
"""
Universally query points. This endpoint covers all capabilities of search, recommend, discover, filters. But also enables hybrid and multi-stage queries.
"""
return self._build_for_query_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
query_request=query_request,
)
def query_points_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
query_groups_request: m.QueryGroupsRequest = None,
) -> m.InlineResponse20018:
"""
Universally query points, grouped by a given payload field
"""
return self._build_for_query_points_groups(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
query_groups_request=query_groups_request,
)
def recommend_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_request_batch: m.RecommendRequestBatch = None,
) -> m.InlineResponse20017:
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples.
"""
return self._build_for_recommend_batch_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
recommend_request_batch=recommend_request_batch,
)
def recommend_point_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_groups_request: m.RecommendGroupsRequest = None,
) -> m.InlineResponse20018:
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples, grouped by a given payload field.
"""
return self._build_for_recommend_point_groups(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
recommend_groups_request=recommend_groups_request,
)
def recommend_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
recommend_request: m.RecommendRequest = None,
) -> m.InlineResponse20016:
"""
Look for the points which are closer to stored positive examples and at the same time further to negative examples.
"""
return self._build_for_recommend_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
recommend_request=recommend_request,
)
def search_batch_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_request_batch: m.SearchRequestBatch = None,
) -> m.InlineResponse20017:
"""
Retrieve by batch the closest points based on vector similarity and given filtering conditions
"""
return self._build_for_search_batch_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_request_batch=search_request_batch,
)
def search_matrix_offsets(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_matrix_request: m.SearchMatrixRequest = None,
) -> m.InlineResponse20024:
"""
Compute distance matrix for sampled points with an offset based output format
"""
return self._build_for_search_matrix_offsets(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_matrix_request=search_matrix_request,
)
def search_matrix_pairs(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_matrix_request: m.SearchMatrixRequest = None,
) -> m.InlineResponse20023:
"""
Compute distance matrix for sampled points with a pair based output format
"""
return self._build_for_search_matrix_pairs(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_matrix_request=search_matrix_request,
)
def search_point_groups(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_groups_request: m.SearchGroupsRequest = None,
) -> m.InlineResponse20018:
"""
Retrieve closest points based on vector similarity and given filtering conditions, grouped by a given payload field
"""
return self._build_for_search_point_groups(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_groups_request=search_groups_request,
)
def search_points(
self,
collection_name: str,
consistency: m.ReadConsistency = None,
timeout: int = None,
search_request: m.SearchRequest = None,
) -> m.InlineResponse20016:
"""
Retrieve closest points based on vector similarity and given filtering conditions
"""
return self._build_for_search_points(
collection_name=collection_name,
consistency=consistency,
timeout=timeout,
search_request=search_request,
)
@@ -0,0 +1,268 @@
# flake8: noqa E501
from typing import TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
from qdrant_client.http.models import models as m
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _ServiceApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_healthz(
self,
):
"""
An endpoint for health checking used in Kubernetes.
"""
headers = {}
return self.api_client.request(
type_=str,
method="GET",
url="/healthz",
headers=headers if headers else None,
)
def _build_for_livez(
self,
):
"""
An endpoint for health checking used in Kubernetes.
"""
headers = {}
return self.api_client.request(
type_=str,
method="GET",
url="/livez",
headers=headers if headers else None,
)
def _build_for_metrics(
self,
anonymize: bool = None,
):
"""
Collect metrics data including app info, collections info, cluster info and statistics
"""
query_params = {}
if anonymize is not None:
query_params["anonymize"] = str(anonymize).lower()
headers = {}
return self.api_client.request(
type_=str,
method="GET",
url="/metrics",
headers=headers if headers else None,
params=query_params,
)
def _build_for_readyz(
self,
):
"""
An endpoint for health checking used in Kubernetes.
"""
headers = {}
return self.api_client.request(
type_=str,
method="GET",
url="/readyz",
headers=headers if headers else None,
)
def _build_for_root(
self,
):
"""
Returns information about the running Qdrant instance like version and commit id
"""
headers = {}
return self.api_client.request(
type_=m.VersionInfo,
method="GET",
url="/",
headers=headers if headers else None,
)
def _build_for_telemetry(
self,
anonymize: bool = None,
details_level: int = None,
):
"""
Collect telemetry data including app info, system info, collections info, cluster info, configs and statistics
"""
query_params = {}
if anonymize is not None:
query_params["anonymize"] = str(anonymize).lower()
if details_level is not None:
query_params["details_level"] = str(details_level)
headers = {}
return self.api_client.request(
type_=m.InlineResponse2001,
method="GET",
url="/telemetry",
headers=headers if headers else None,
params=query_params,
)
class AsyncServiceApi(_ServiceApi):
async def healthz(
self,
) -> str:
"""
An endpoint for health checking used in Kubernetes.
"""
return await self._build_for_healthz()
async def livez(
self,
) -> str:
"""
An endpoint for health checking used in Kubernetes.
"""
return await self._build_for_livez()
async def metrics(
self,
anonymize: bool = None,
) -> str:
"""
Collect metrics data including app info, collections info, cluster info and statistics
"""
return await self._build_for_metrics(
anonymize=anonymize,
)
async def readyz(
self,
) -> str:
"""
An endpoint for health checking used in Kubernetes.
"""
return await self._build_for_readyz()
async def root(
self,
) -> m.VersionInfo:
"""
Returns information about the running Qdrant instance like version and commit id
"""
return await self._build_for_root()
async def telemetry(
self,
anonymize: bool = None,
details_level: int = None,
) -> m.InlineResponse2001:
"""
Collect telemetry data including app info, system info, collections info, cluster info, configs and statistics
"""
return await self._build_for_telemetry(
anonymize=anonymize,
details_level=details_level,
)
class SyncServiceApi(_ServiceApi):
def healthz(
self,
) -> str:
"""
An endpoint for health checking used in Kubernetes.
"""
return self._build_for_healthz()
def livez(
self,
) -> str:
"""
An endpoint for health checking used in Kubernetes.
"""
return self._build_for_livez()
def metrics(
self,
anonymize: bool = None,
) -> str:
"""
Collect metrics data including app info, collections info, cluster info and statistics
"""
return self._build_for_metrics(
anonymize=anonymize,
)
def readyz(
self,
) -> str:
"""
An endpoint for health checking used in Kubernetes.
"""
return self._build_for_readyz()
def root(
self,
) -> m.VersionInfo:
"""
Returns information about the running Qdrant instance like version and commit id
"""
return self._build_for_root()
def telemetry(
self,
anonymize: bool = None,
details_level: int = None,
) -> m.InlineResponse2001:
"""
Collect telemetry data including app info, system info, collections info, cluster info, configs and statistics
"""
return self._build_for_telemetry(
anonymize=anonymize,
details_level=details_level,
)
@@ -0,0 +1,937 @@
# flake8: noqa E501
from typing import IO, TYPE_CHECKING, Any, Dict, Set, TypeVar, Union
from pydantic import BaseModel
from pydantic.main import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION
from qdrant_client.http.models import *
from qdrant_client.http.models import models as m
PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
Model = TypeVar("Model", bound="BaseModel")
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
file = None
def to_json(model: BaseModel, *args: Any, **kwargs: Any) -> str:
if PYDANTIC_V2:
return model.model_dump_json(*args, **kwargs)
else:
return model.json(*args, **kwargs)
def jsonable_encoder(
obj: Any,
include: Union[SetIntStr, DictIntStrAny] = None,
exclude=None,
by_alias: bool = True,
skip_defaults: bool = None,
exclude_unset: bool = True,
exclude_none: bool = True,
):
if hasattr(obj, "json") or hasattr(obj, "model_dump_json"):
return to_json(
obj,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=bool(exclude_unset or skip_defaults),
exclude_none=exclude_none,
)
return obj
if TYPE_CHECKING:
from qdrant_client.http.api_client import ApiClient
class _SnapshotsApi:
def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"):
self.api_client = api_client
def _build_for_create_full_snapshot(
self,
wait: bool = None,
):
"""
Create new snapshot of the whole storage
"""
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
headers = {}
return self.api_client.request(
type_=m.InlineResponse20011,
method="POST",
url="/snapshots",
headers=headers if headers else None,
params=query_params,
)
def _build_for_create_shard_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
):
"""
Create new snapshot of a shard for a collection
"""
path_params = {
"collection_name": str(collection_name),
"shard_id": str(shard_id),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
headers = {}
return self.api_client.request(
type_=m.InlineResponse20011,
method="POST",
url="/collections/{collection_name}/shards/{shard_id}/snapshots",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
def _build_for_create_snapshot(
self,
collection_name: str,
wait: bool = None,
):
"""
Create new snapshot for a collection
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
headers = {}
return self.api_client.request(
type_=m.InlineResponse20011,
method="POST",
url="/collections/{collection_name}/snapshots",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
def _build_for_delete_full_snapshot(
self,
snapshot_name: str,
wait: bool = None,
):
"""
Delete snapshot of the whole storage
"""
path_params = {
"snapshot_name": str(snapshot_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
headers = {}
return self.api_client.request(
type_=m.InlineResponse2009,
method="DELETE",
url="/snapshots/{snapshot_name}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
def _build_for_delete_shard_snapshot(
self,
collection_name: str,
shard_id: int,
snapshot_name: str,
wait: bool = None,
):
"""
Delete snapshot of a shard for a collection
"""
path_params = {
"collection_name": str(collection_name),
"shard_id": str(shard_id),
"snapshot_name": str(snapshot_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
headers = {}
return self.api_client.request(
type_=m.InlineResponse2009,
method="DELETE",
url="/collections/{collection_name}/shards/{shard_id}/snapshots/{snapshot_name}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
def _build_for_delete_snapshot(
self,
collection_name: str,
snapshot_name: str,
wait: bool = None,
):
"""
Delete snapshot for a collection
"""
path_params = {
"collection_name": str(collection_name),
"snapshot_name": str(snapshot_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
headers = {}
return self.api_client.request(
type_=m.InlineResponse2009,
method="DELETE",
url="/collections/{collection_name}/snapshots/{snapshot_name}",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
)
def _build_for_get_full_snapshot(
self,
snapshot_name: str,
):
"""
Download specified snapshot of the whole storage as a file
"""
path_params = {
"snapshot_name": str(snapshot_name),
}
headers = {}
return self.api_client.request(
type_=file,
method="GET",
url="/snapshots/{snapshot_name}",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_get_shard_snapshot(
self,
collection_name: str,
shard_id: int,
snapshot_name: str,
):
"""
Download specified snapshot of a shard from a collection as a file
"""
path_params = {
"collection_name": str(collection_name),
"shard_id": str(shard_id),
"snapshot_name": str(snapshot_name),
}
headers = {}
return self.api_client.request(
type_=file,
method="GET",
url="/collections/{collection_name}/shards/{shard_id}/snapshots/{snapshot_name}",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_get_snapshot(
self,
collection_name: str,
snapshot_name: str,
):
"""
Download specified snapshot from a collection as a file
"""
path_params = {
"collection_name": str(collection_name),
"snapshot_name": str(snapshot_name),
}
headers = {}
return self.api_client.request(
type_=file,
method="GET",
url="/collections/{collection_name}/snapshots/{snapshot_name}",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_list_full_snapshots(
self,
):
"""
Get list of snapshots of the whole storage
"""
headers = {}
return self.api_client.request(
type_=m.InlineResponse20010,
method="GET",
url="/snapshots",
headers=headers if headers else None,
)
def _build_for_list_shard_snapshots(
self,
collection_name: str,
shard_id: int,
):
"""
Get list of snapshots for a shard of a collection
"""
path_params = {
"collection_name": str(collection_name),
"shard_id": str(shard_id),
}
headers = {}
return self.api_client.request(
type_=m.InlineResponse20010,
method="GET",
url="/collections/{collection_name}/shards/{shard_id}/snapshots",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_list_snapshots(
self,
collection_name: str,
):
"""
Get list of snapshots for a collection
"""
path_params = {
"collection_name": str(collection_name),
}
headers = {}
return self.api_client.request(
type_=m.InlineResponse20010,
method="GET",
url="/collections/{collection_name}/snapshots",
headers=headers if headers else None,
path_params=path_params,
)
def _build_for_recover_from_snapshot(
self,
collection_name: str,
wait: bool = None,
snapshot_recover: m.SnapshotRecover = None,
):
"""
Recover local collection data from a snapshot. This will overwrite any data, stored on this node, for the collection. If collection does not exist - it will be created.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
headers = {}
body = jsonable_encoder(snapshot_recover)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2009,
method="PUT",
url="/collections/{collection_name}/snapshots/recover",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_recover_from_uploaded_snapshot(
self,
collection_name: str,
wait: bool = None,
priority: SnapshotPriority = None,
checksum: str = None,
snapshot: IO[Any] = None,
):
"""
Recover local collection data from an uploaded snapshot. This will overwrite any data, stored on this node, for the collection. If collection does not exist - it will be created.
"""
path_params = {
"collection_name": str(collection_name),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if priority is not None:
query_params["priority"] = str(priority)
if checksum is not None:
query_params["checksum"] = str(checksum)
headers = {}
files: Dict[str, IO[Any]] = {} # noqa F841
data: Dict[str, Any] = {} # noqa F841
if snapshot is not None:
files["snapshot"] = snapshot
return self.api_client.request(
type_=m.InlineResponse2009,
method="POST",
url="/collections/{collection_name}/snapshots/upload",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
data=data,
files=files,
)
def _build_for_recover_shard_from_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
shard_snapshot_recover: m.ShardSnapshotRecover = None,
):
"""
Recover shard of a local collection data from a snapshot. This will overwrite any data, stored in this shard, for the collection.
"""
path_params = {
"collection_name": str(collection_name),
"shard_id": str(shard_id),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
headers = {}
body = jsonable_encoder(shard_snapshot_recover)
if "Content-Type" not in headers:
headers["Content-Type"] = "application/json"
return self.api_client.request(
type_=m.InlineResponse2009,
method="PUT",
url="/collections/{collection_name}/shards/{shard_id}/snapshots/recover",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
content=body,
)
def _build_for_recover_shard_from_uploaded_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
priority: SnapshotPriority = None,
checksum: str = None,
snapshot: IO[Any] = None,
):
"""
Recover shard of a local collection from an uploaded snapshot. This will overwrite any data, stored on this node, for the collection shard.
"""
path_params = {
"collection_name": str(collection_name),
"shard_id": str(shard_id),
}
query_params = {}
if wait is not None:
query_params["wait"] = str(wait).lower()
if priority is not None:
query_params["priority"] = str(priority)
if checksum is not None:
query_params["checksum"] = str(checksum)
headers = {}
files: Dict[str, IO[Any]] = {} # noqa F841
data: Dict[str, Any] = {} # noqa F841
if snapshot is not None:
files["snapshot"] = snapshot
return self.api_client.request(
type_=m.InlineResponse2009,
method="POST",
url="/collections/{collection_name}/shards/{shard_id}/snapshots/upload",
headers=headers if headers else None,
path_params=path_params,
params=query_params,
data=data,
files=files,
)
class AsyncSnapshotsApi(_SnapshotsApi):
async def create_full_snapshot(
self,
wait: bool = None,
) -> m.InlineResponse20011:
"""
Create new snapshot of the whole storage
"""
return await self._build_for_create_full_snapshot(
wait=wait,
)
async def create_shard_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
) -> m.InlineResponse20011:
"""
Create new snapshot of a shard for a collection
"""
return await self._build_for_create_shard_snapshot(
collection_name=collection_name,
shard_id=shard_id,
wait=wait,
)
async def create_snapshot(
self,
collection_name: str,
wait: bool = None,
) -> m.InlineResponse20011:
"""
Create new snapshot for a collection
"""
return await self._build_for_create_snapshot(
collection_name=collection_name,
wait=wait,
)
async def delete_full_snapshot(
self,
snapshot_name: str,
wait: bool = None,
) -> m.InlineResponse2009:
"""
Delete snapshot of the whole storage
"""
return await self._build_for_delete_full_snapshot(
snapshot_name=snapshot_name,
wait=wait,
)
async def delete_shard_snapshot(
self,
collection_name: str,
shard_id: int,
snapshot_name: str,
wait: bool = None,
) -> m.InlineResponse2009:
"""
Delete snapshot of a shard for a collection
"""
return await self._build_for_delete_shard_snapshot(
collection_name=collection_name,
shard_id=shard_id,
snapshot_name=snapshot_name,
wait=wait,
)
async def delete_snapshot(
self,
collection_name: str,
snapshot_name: str,
wait: bool = None,
) -> m.InlineResponse2009:
"""
Delete snapshot for a collection
"""
return await self._build_for_delete_snapshot(
collection_name=collection_name,
snapshot_name=snapshot_name,
wait=wait,
)
async def get_full_snapshot(
self,
snapshot_name: str,
) -> file:
"""
Download specified snapshot of the whole storage as a file
"""
return await self._build_for_get_full_snapshot(
snapshot_name=snapshot_name,
)
async def get_shard_snapshot(
self,
collection_name: str,
shard_id: int,
snapshot_name: str,
) -> file:
"""
Download specified snapshot of a shard from a collection as a file
"""
return await self._build_for_get_shard_snapshot(
collection_name=collection_name,
shard_id=shard_id,
snapshot_name=snapshot_name,
)
async def get_snapshot(
self,
collection_name: str,
snapshot_name: str,
) -> file:
"""
Download specified snapshot from a collection as a file
"""
return await self._build_for_get_snapshot(
collection_name=collection_name,
snapshot_name=snapshot_name,
)
async def list_full_snapshots(
self,
) -> m.InlineResponse20010:
"""
Get list of snapshots of the whole storage
"""
return await self._build_for_list_full_snapshots()
async def list_shard_snapshots(
self,
collection_name: str,
shard_id: int,
) -> m.InlineResponse20010:
"""
Get list of snapshots for a shard of a collection
"""
return await self._build_for_list_shard_snapshots(
collection_name=collection_name,
shard_id=shard_id,
)
async def list_snapshots(
self,
collection_name: str,
) -> m.InlineResponse20010:
"""
Get list of snapshots for a collection
"""
return await self._build_for_list_snapshots(
collection_name=collection_name,
)
async def recover_from_snapshot(
self,
collection_name: str,
wait: bool = None,
snapshot_recover: m.SnapshotRecover = None,
) -> m.InlineResponse2009:
"""
Recover local collection data from a snapshot. This will overwrite any data, stored on this node, for the collection. If collection does not exist - it will be created.
"""
return await self._build_for_recover_from_snapshot(
collection_name=collection_name,
wait=wait,
snapshot_recover=snapshot_recover,
)
async def recover_from_uploaded_snapshot(
self,
collection_name: str,
wait: bool = None,
priority: SnapshotPriority = None,
checksum: str = None,
snapshot: IO[Any] = None,
) -> m.InlineResponse2009:
"""
Recover local collection data from an uploaded snapshot. This will overwrite any data, stored on this node, for the collection. If collection does not exist - it will be created.
"""
return await self._build_for_recover_from_uploaded_snapshot(
collection_name=collection_name,
wait=wait,
priority=priority,
checksum=checksum,
snapshot=snapshot,
)
async def recover_shard_from_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
shard_snapshot_recover: m.ShardSnapshotRecover = None,
) -> m.InlineResponse2009:
"""
Recover shard of a local collection data from a snapshot. This will overwrite any data, stored in this shard, for the collection.
"""
return await self._build_for_recover_shard_from_snapshot(
collection_name=collection_name,
shard_id=shard_id,
wait=wait,
shard_snapshot_recover=shard_snapshot_recover,
)
async def recover_shard_from_uploaded_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
priority: SnapshotPriority = None,
checksum: str = None,
snapshot: IO[Any] = None,
) -> m.InlineResponse2009:
"""
Recover shard of a local collection from an uploaded snapshot. This will overwrite any data, stored on this node, for the collection shard.
"""
return await self._build_for_recover_shard_from_uploaded_snapshot(
collection_name=collection_name,
shard_id=shard_id,
wait=wait,
priority=priority,
checksum=checksum,
snapshot=snapshot,
)
class SyncSnapshotsApi(_SnapshotsApi):
def create_full_snapshot(
self,
wait: bool = None,
) -> m.InlineResponse20011:
"""
Create new snapshot of the whole storage
"""
return self._build_for_create_full_snapshot(
wait=wait,
)
def create_shard_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
) -> m.InlineResponse20011:
"""
Create new snapshot of a shard for a collection
"""
return self._build_for_create_shard_snapshot(
collection_name=collection_name,
shard_id=shard_id,
wait=wait,
)
def create_snapshot(
self,
collection_name: str,
wait: bool = None,
) -> m.InlineResponse20011:
"""
Create new snapshot for a collection
"""
return self._build_for_create_snapshot(
collection_name=collection_name,
wait=wait,
)
def delete_full_snapshot(
self,
snapshot_name: str,
wait: bool = None,
) -> m.InlineResponse2009:
"""
Delete snapshot of the whole storage
"""
return self._build_for_delete_full_snapshot(
snapshot_name=snapshot_name,
wait=wait,
)
def delete_shard_snapshot(
self,
collection_name: str,
shard_id: int,
snapshot_name: str,
wait: bool = None,
) -> m.InlineResponse2009:
"""
Delete snapshot of a shard for a collection
"""
return self._build_for_delete_shard_snapshot(
collection_name=collection_name,
shard_id=shard_id,
snapshot_name=snapshot_name,
wait=wait,
)
def delete_snapshot(
self,
collection_name: str,
snapshot_name: str,
wait: bool = None,
) -> m.InlineResponse2009:
"""
Delete snapshot for a collection
"""
return self._build_for_delete_snapshot(
collection_name=collection_name,
snapshot_name=snapshot_name,
wait=wait,
)
def get_full_snapshot(
self,
snapshot_name: str,
) -> file:
"""
Download specified snapshot of the whole storage as a file
"""
return self._build_for_get_full_snapshot(
snapshot_name=snapshot_name,
)
def get_shard_snapshot(
self,
collection_name: str,
shard_id: int,
snapshot_name: str,
) -> file:
"""
Download specified snapshot of a shard from a collection as a file
"""
return self._build_for_get_shard_snapshot(
collection_name=collection_name,
shard_id=shard_id,
snapshot_name=snapshot_name,
)
def get_snapshot(
self,
collection_name: str,
snapshot_name: str,
) -> file:
"""
Download specified snapshot from a collection as a file
"""
return self._build_for_get_snapshot(
collection_name=collection_name,
snapshot_name=snapshot_name,
)
def list_full_snapshots(
self,
) -> m.InlineResponse20010:
"""
Get list of snapshots of the whole storage
"""
return self._build_for_list_full_snapshots()
def list_shard_snapshots(
self,
collection_name: str,
shard_id: int,
) -> m.InlineResponse20010:
"""
Get list of snapshots for a shard of a collection
"""
return self._build_for_list_shard_snapshots(
collection_name=collection_name,
shard_id=shard_id,
)
def list_snapshots(
self,
collection_name: str,
) -> m.InlineResponse20010:
"""
Get list of snapshots for a collection
"""
return self._build_for_list_snapshots(
collection_name=collection_name,
)
def recover_from_snapshot(
self,
collection_name: str,
wait: bool = None,
snapshot_recover: m.SnapshotRecover = None,
) -> m.InlineResponse2009:
"""
Recover local collection data from a snapshot. This will overwrite any data, stored on this node, for the collection. If collection does not exist - it will be created.
"""
return self._build_for_recover_from_snapshot(
collection_name=collection_name,
wait=wait,
snapshot_recover=snapshot_recover,
)
def recover_from_uploaded_snapshot(
self,
collection_name: str,
wait: bool = None,
priority: SnapshotPriority = None,
checksum: str = None,
snapshot: IO[Any] = None,
) -> m.InlineResponse2009:
"""
Recover local collection data from an uploaded snapshot. This will overwrite any data, stored on this node, for the collection. If collection does not exist - it will be created.
"""
return self._build_for_recover_from_uploaded_snapshot(
collection_name=collection_name,
wait=wait,
priority=priority,
checksum=checksum,
snapshot=snapshot,
)
def recover_shard_from_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
shard_snapshot_recover: m.ShardSnapshotRecover = None,
) -> m.InlineResponse2009:
"""
Recover shard of a local collection data from a snapshot. This will overwrite any data, stored in this shard, for the collection.
"""
return self._build_for_recover_shard_from_snapshot(
collection_name=collection_name,
shard_id=shard_id,
wait=wait,
shard_snapshot_recover=shard_snapshot_recover,
)
def recover_shard_from_uploaded_snapshot(
self,
collection_name: str,
shard_id: int,
wait: bool = None,
priority: SnapshotPriority = None,
checksum: str = None,
snapshot: IO[Any] = None,
) -> m.InlineResponse2009:
"""
Recover shard of a local collection from an uploaded snapshot. This will overwrite any data, stored on this node, for the collection shard.
"""
return self._build_for_recover_shard_from_uploaded_snapshot(
collection_name=collection_name,
shard_id=shard_id,
wait=wait,
priority=priority,
checksum=checksum,
snapshot=snapshot,
)
@@ -0,0 +1,263 @@
from asyncio import get_event_loop
from functools import lru_cache
from typing import Any, Awaitable, Callable, Dict, Generic, Type, TypeVar, overload
from urllib.parse import urljoin
from httpx import AsyncClient, Client, Request, Response
from pydantic import ValidationError
from qdrant_client.common.client_exceptions import ResourceExhaustedResponse
from qdrant_client.http.api.aliases_api import AsyncAliasesApi, SyncAliasesApi
from qdrant_client.http.api.beta_api import AsyncBetaApi, SyncBetaApi
from qdrant_client.http.api.collections_api import AsyncCollectionsApi, SyncCollectionsApi
from qdrant_client.http.api.distributed_api import AsyncDistributedApi, SyncDistributedApi
from qdrant_client.http.api.indexes_api import AsyncIndexesApi, SyncIndexesApi
from qdrant_client.http.api.points_api import AsyncPointsApi, SyncPointsApi
from qdrant_client.http.api.search_api import AsyncSearchApi, SyncSearchApi
from qdrant_client.http.api.service_api import AsyncServiceApi, SyncServiceApi
from qdrant_client.http.api.snapshots_api import AsyncSnapshotsApi, SyncSnapshotsApi
from qdrant_client.http.exceptions import ResponseHandlingException, UnexpectedResponse
ClientT = TypeVar("ClientT", bound="ApiClient")
AsyncClientT = TypeVar("AsyncClientT", bound="AsyncApiClient")
class AsyncApis(Generic[AsyncClientT]):
def __init__(self, host: str, **kwargs: Any):
self.client = AsyncApiClient(host, **kwargs)
self.aliases_api = AsyncAliasesApi(self.client)
self.beta_api = AsyncBetaApi(self.client)
self.collections_api = AsyncCollectionsApi(self.client)
self.distributed_api = AsyncDistributedApi(self.client)
self.indexes_api = AsyncIndexesApi(self.client)
self.points_api = AsyncPointsApi(self.client)
self.search_api = AsyncSearchApi(self.client)
self.service_api = AsyncServiceApi(self.client)
self.snapshots_api = AsyncSnapshotsApi(self.client)
async def aclose(self) -> None:
await self.client.aclose()
class SyncApis(Generic[ClientT]):
def __init__(self, host: str, **kwargs: Any):
self.client = ApiClient(host, **kwargs)
self.aliases_api = SyncAliasesApi(self.client)
self.beta_api = SyncBetaApi(self.client)
self.collections_api = SyncCollectionsApi(self.client)
self.distributed_api = SyncDistributedApi(self.client)
self.indexes_api = SyncIndexesApi(self.client)
self.points_api = SyncPointsApi(self.client)
self.search_api = SyncSearchApi(self.client)
self.service_api = SyncServiceApi(self.client)
self.snapshots_api = SyncSnapshotsApi(self.client)
def close(self) -> None:
self.client.close()
T = TypeVar("T")
Send = Callable[[Request], Response]
SendAsync = Callable[[Request], Awaitable[Response]]
MiddlewareT = Callable[[Request, Send], Response]
AsyncMiddlewareT = Callable[[Request, SendAsync], Awaitable[Response]]
class ApiClient:
def __init__(self, host: str, **kwargs: Any) -> None:
self.host = host
self.middleware: MiddlewareT = BaseMiddleware()
self._client = Client(**kwargs)
@overload
def request(self, *, type_: Type[T], method: str, url: str, path_params: Dict[str, Any] = None, **kwargs: Any) -> T:
...
@overload # noqa F811
def request(self, *, type_: None, method: str, url: str, path_params: Dict[str, Any] = None, **kwargs: Any) -> None:
...
def request( # noqa F811
self, *, type_: Any, method: str, url: str, path_params: Dict[str, Any] = None, **kwargs: Any
) -> Any:
if path_params is None:
path_params = {}
host = self.host if self.host.endswith("/") else self.host + "/"
url = url[1:] if url.startswith("/") else url
# in order to do a correct join, url join requires base_url to end with /, and url to not start with /,
# since url is treated as an absolute path and might truncate prefix in base_url
url = urljoin(host, url.format(**path_params))
if "params" in kwargs and "timeout" in kwargs["params"]:
kwargs["timeout"] = int(kwargs["params"]["timeout"])
request = self._client.build_request(method, url, **kwargs)
return self.send(request, type_)
@overload
def request_sync(self, *, type_: Type[T], **kwargs: Any) -> T:
...
@overload # noqa F811
def request_sync(self, *, type_: None, **kwargs: Any) -> None:
...
def request_sync(self, *, type_: Any, **kwargs: Any) -> Any: # noqa F811
"""
This method is not used by the generated apis, but is included for convenience
"""
return get_event_loop().run_until_complete(self.request(type_=type_, **kwargs))
def send(self, request: Request, type_: Type[T]) -> T:
response = self.middleware(request, self.send_inner)
if response.status_code == 429:
retry_after_s = response.headers.get("Retry-After", None)
try:
resp = response.json()
message = resp["status"]["error"] if resp["status"] and resp["status"]["error"] else ""
except Exception:
message = ""
if retry_after_s:
raise ResourceExhaustedResponse(message, retry_after_s)
if response.status_code in [200, 201, 202]:
try:
return parse_as_type(response.json(), type_)
except ValidationError as e:
raise ResponseHandlingException(e)
raise UnexpectedResponse.for_response(response)
def send_inner(self, request: Request) -> Response:
try:
response = self._client.send(request)
except Exception as e:
raise ResponseHandlingException(e)
return response
def close(self) -> None:
self._client.close()
def add_middleware(self, middleware: MiddlewareT) -> None:
current_middleware = self.middleware
def new_middleware(request: Request, call_next: Send) -> Response:
def inner_send(request: Request) -> Response:
return current_middleware(request, call_next)
return middleware(request, inner_send)
self.middleware = new_middleware
class AsyncApiClient:
def __init__(self, host: str = None, **kwargs: Any) -> None:
self.host = host
self.middleware: AsyncMiddlewareT = BaseAsyncMiddleware()
self._async_client = AsyncClient(**kwargs)
@overload
async def request(
self, *, type_: Type[T], method: str, url: str, path_params: Dict[str, Any] = None, **kwargs: Any
) -> T:
...
@overload # noqa F811
async def request(
self, *, type_: None, method: str, url: str, path_params: Dict[str, Any] = None, **kwargs: Any
) -> None:
...
async def request( # noqa F811
self, *, type_: Any, method: str, url: str, path_params: Dict[str, Any] = None, **kwargs: Any
) -> Any:
if path_params is None:
path_params = {}
host = self.host if self.host.endswith("/") else self.host + "/"
url = url[1:] if url.startswith("/") else url
# in order to do a correct join, url join requires base_url to end with /, and url to not start with /,
# since url is treated as an absolute path and might truncate prefix in base_url
url = urljoin(host, url.format(**path_params))
request = self._async_client.build_request(method, url, **kwargs)
return await self.send(request, type_)
@overload
def request_sync(self, *, type_: Type[T], **kwargs: Any) -> T:
...
@overload # noqa F811
def request_sync(self, *, type_: None, **kwargs: Any) -> None:
...
def request_sync(self, *, type_: Any, **kwargs: Any) -> Any: # noqa F811
"""
This method is not used by the generated apis, but is included for convenience
"""
return get_event_loop().run_until_complete(self.request(type_=type_, **kwargs))
async def send(self, request: Request, type_: Type[T]) -> T:
response = await self.middleware(request, self.send_inner)
if response.status_code == 429:
retry_after_s = response.headers.get("Retry-After", None)
try:
resp = response.json()
message = resp["status"]["error"] if resp["status"] and resp["status"]["error"] else ""
except Exception:
message = ""
if retry_after_s:
raise ResourceExhaustedResponse(message, retry_after_s)
if response.status_code in [200, 201, 202]:
try:
return parse_as_type(response.json(), type_)
except ValidationError as e:
raise ResponseHandlingException(e)
raise UnexpectedResponse.for_response(response)
async def send_inner(self, request: Request) -> Response:
try:
response = await self._async_client.send(request)
except Exception as e:
raise ResponseHandlingException(e)
return response
async def aclose(self) -> None:
await self._async_client.aclose()
def add_middleware(self, middleware: AsyncMiddlewareT) -> None:
current_middleware = self.middleware
async def new_middleware(request: Request, call_next: SendAsync) -> Response:
async def inner_send(request: Request) -> Response:
return await current_middleware(request, call_next)
return await middleware(request, inner_send)
self.middleware = new_middleware
class BaseAsyncMiddleware:
async def __call__(self, request: Request, call_next: SendAsync) -> Response:
return await call_next(request)
class BaseMiddleware:
def __call__(self, request: Request, call_next: Send) -> Response:
return call_next(request)
@lru_cache(maxsize=None)
def _get_parsing_type(type_: Any, source: str) -> Any:
from pydantic.main import create_model
type_name = getattr(type_, "__name__", str(type_))
return create_model(f"ParsingModel[{type_name}] (for {source})", obj=(type_, ...))
def parse_as_type(obj: Any, type_: Type[T]) -> T:
model_type = _get_parsing_type(type_, source=parse_as_type.__name__)
return model_type(obj=obj).obj
@@ -0,0 +1,5 @@
from typing import Union
# This is a dirty hack - proper way it to upgrade OpenAPI generator for at least 5.0
# But this upgrade will also require updating of all templates. Maybe some other day
AnyOfstringinteger = Union[str, int]
@@ -0,0 +1,46 @@
import json
from typing import Any, Dict, Optional
from httpx import Headers, Response
MAX_CONTENT = 200
class ApiException(Exception):
"""Base class"""
class UnexpectedResponse(ApiException):
def __init__(self, status_code: Optional[int], reason_phrase: str, content: bytes, headers: Headers) -> None:
self.status_code = status_code
self.reason_phrase = reason_phrase
self.content = content
self.headers = headers
@staticmethod
def for_response(response: Response) -> "ApiException":
return UnexpectedResponse(
status_code=response.status_code,
reason_phrase=response.reason_phrase,
content=response.content,
headers=response.headers,
)
def __str__(self) -> str:
status_code_str = f"{self.status_code}" if self.status_code is not None else ""
if self.reason_phrase == "" and self.status_code is not None:
reason_phrase_str = "(Unrecognized Status Code)"
else:
reason_phrase_str = f"({self.reason_phrase})"
status_str = f"{status_code_str} {reason_phrase_str}".strip()
short_content = self.content if len(self.content) <= MAX_CONTENT else self.content[: MAX_CONTENT - 3] + b" ..."
raw_content_str = f"Raw response content:\n{short_content!r}"
return f"Unexpected Response: {status_str}\n{raw_content_str}"
def structured(self) -> Dict[str, Any]:
return json.loads(self.content)
class ResponseHandlingException(ApiException):
def __init__(self, source: Exception):
self.source = source
@@ -0,0 +1 @@
from .models import *
File diff suppressed because it is too large Load Diff