diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 07c32d0b87..365116192b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -193,7 +193,7 @@ jobs: fail-fast: false matrix: python-version: [3.8] - db_test_folder: [base_classes, elastic, epsilla, hnswlib, qdrant, weaviate, redis, milvus] + db_test_folder: [base_classes, elastic, epsilla, hnswlib, qdrant, weaviate, redis, milvus, opensearch] pydantic-version: ["pydantic-v2", "pydantic-v1"] steps: - uses: actions/checkout@v2.5.0 diff --git a/docarray/index/__init__.py b/docarray/index/__init__.py index aa20ff5db8..2e7491fa9d 100644 --- a/docarray/index/__init__.py +++ b/docarray/index/__init__.py @@ -16,6 +16,7 @@ from docarray.index.backends.mongodb_atlas import ( # noqa: F401 MongoDBAtlasDocumentIndex, ) + from docarray.index.backends.opensearchv2 import OpenSearchDocIndex # noqa: F401 from docarray.index.backends.qdrant import QdrantDocumentIndex # noqa: F401 from docarray.index.backends.redis import RedisDocumentIndex # noqa: F401 from docarray.index.backends.weaviate import WeaviateDocumentIndex # noqa: F401 @@ -30,6 +31,7 @@ 'RedisDocumentIndex', 'MilvusDocumentIndex', 'MongoDBAtlasDocumentIndex', + 'OpenSearchDocIndex', ] @@ -41,6 +43,9 @@ def __getattr__(name: str): elif name == 'ElasticDocIndex': import_library('elasticsearch', raise_error=True) import docarray.index.backends.elastic as lib + elif name == 'OpenSearchDocIndex': + import_library('elasticsearch', raise_error=True) + import docarray.index.backends.opensearchv2 as lib elif name == 'ElasticV7DocIndex': import_library('elasticsearch', raise_error=True) import docarray.index.backends.elasticv7 as lib diff --git a/docarray/index/backends/opensearchv2.py b/docarray/index/backends/opensearchv2.py new file mode 100644 index 0000000000..0755c5bbf0 --- /dev/null +++ b/docarray/index/backends/opensearchv2.py @@ -0,0 +1,727 @@ +# mypy: ignore-errors +import warnings +from collections import defaultdict +from dataclasses import dataclass, field +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generator, + Generic, + Iterable, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +import numpy as np +from pydantic import parse_obj_as + +import docarray.typing +from docarray import BaseDoc +from docarray.array.any_array import AnyDocArray +from docarray.index.abstract import BaseDocIndex, _ColumnInfo, _raise_not_composable +from docarray.typing import AnyTensor +from docarray.typing.tensor.abstract_tensor import AbstractTensor +from docarray.typing.tensor.ndarray import NdArray +from docarray.utils._internal._typing import safe_issubclass +from docarray.utils._internal.misc import import_library +from docarray.utils.find import _FindResult, _FindResultBatched + +TSchema = TypeVar('TSchema', bound=BaseDoc) +T = TypeVar('T', bound='OpenSearchDocIndex') + +OPENSEARCH_PY_VEC_TYPES: List[Any] = [list, tuple, np.ndarray, AbstractTensor] + +if TYPE_CHECKING: + import tensorflow as tf # type: ignore + import torch + from opensearchpy import OpenSearch + from opensearchpy.helpers import parallel_bulk +else: + from opensearchpy import OpenSearch + from opensearchpy.helpers import parallel_bulk + + torch = import_library('torch', raise_error=False) + tf = import_library('tensorflow', raise_error=False) + +if torch is not None: + OPENSEARCH_PY_VEC_TYPES.append(torch.Tensor) + +if tf is not None: + from docarray.typing import TensorFlowTensor + + OPENSEARCH_PY_VEC_TYPES.append(tf.Tensor) + OPENSEARCH_PY_VEC_TYPES.append(TensorFlowTensor) + + +class OpenSearchDocIndex(BaseDocIndex, Generic[TSchema]): + _index_vector_params: Optional[Tuple[str]] = ('dimension', 'space_type') + _index_vector_options: Optional[Tuple[str]] = ('m', 'ef_construction') + + def __init__(self, db_config=None, **kwargs): + """Initialize OpenSearchDocIndex""" + super().__init__(db_config=db_config, **kwargs) + self._db_config = cast(OpenSearchDocIndex.DBConfig, self._db_config) + + self._logger.debug('OpenSearch index is being initialized') + + # OpenSearch client creation + self._client = OpenSearch( + hosts=self._db_config.hosts, + auth=self._db_config.auth, + **self._db_config.es_config, + ) + + self._logger.debug('OpenSearch client has been created') + + # OpenSearch index setup + mappings: Dict[str, Any] = { + 'dynamic': True, + '_source': {'enabled': 'true'}, + 'properties': {}, + } + mappings.update(self._db_config.index_mappings) + + self._logger.debug('Mappings have been updated with db_config.index_mappings') + + for col_name, col in self._column_infos.items(): + + if safe_issubclass(col.docarray_type, AnyDocArray): + continue + if col.db_type == 'knn_vector' and ( + not col.n_dim and col.config['dimension'] < 0 + ): + self._logger.info( + f'Not indexing column {col_name}, the dimensionality is not specified' + ) + continue + + mappings['properties'][col_name] = self._create_index_mapping(col) + self._logger.debug(f'Index mapping created for column {col_name}') + + if self._client.indices.exists(index=self.index_name): + self._client_put_mapping(mappings) + self._logger.debug(f'Put mapping for index {self.index_name}') + else: + self._client_create( + {'settings': {'index': {'knn': True}}, 'mappings': mappings} + ) + self._logger.debug(f'Created new index {self.index_name} with mappings') + + if len(self._db_config.index_settings): + self._client_create( + { + 'settings': { + 'index': {'knn': True}, + **self._db_config.index_settings, + }, + 'mappings': mappings, + } + ) + self._logger.debug('Updated index settings') + + self._refresh(self.index_name) + self._logger.debug(f'Refreshed index {self.index_name}') + + @property + def index_name(self): + default_index_name = ( + self._schema.__name__.lower() if self._schema is not None else None + ) + if default_index_name is None: + err_msg = ( + 'An OpenSearchDocIndex must be typed with a Document type.To do so, use the syntax: ' + 'OpenSearchDocIndex[DocumentType] ' + ) + + self._logger.error(err_msg) + raise ValueError(err_msg) + index_name = self._db_config.index_name or default_index_name + return index_name + + ############################################### + # Inner classes for query builder and configs # + ############################################### + class QueryBuilder(BaseDocIndex.QueryBuilder): + def __init__(self, outer_instance, **kwargs): + super().__init__() + self._outer_instance = outer_instance + self._query: Dict[str, Any] = { + 'query': defaultdict(lambda: defaultdict(list)) + } + + def build(self, *args, **kwargs) -> Any: + """Build the OpenSearch query object.""" + self._outer_instance._logger.debug('Building the OpenSearch query object') + + if len(self._query['query']) == 0: + del self._query['query'] + return self._query + + def find( + self, + query: Union[AnyTensor, BaseDoc], + search_field: str = 'embedding', + limit: int = 10, + ): + """ + Find k-nearest neighbors of the query. + + :param query: query vector for KNN/ANN search. Has single axis. + :param search_field: name of the field to search on + :param limit: maximum number of documents to return per query + :return: self + """ + self._outer_instance._logger.debug('Executing find query') + + self._outer_instance._validate_search_field(search_field) + if isinstance(query, BaseDoc): + query_vec = BaseDocIndex._get_values_by_column([query], search_field)[0] + else: + query_vec = query + query_vec_np = BaseDocIndex._to_numpy(self._outer_instance, query_vec) + self._query['query']['knn'] = self._outer_instance._form_search_body( + query_vec_np, + search_field, + limit, + )['query']['knn'] + self._query['size'] = limit + + return self + + # filter accepts Leaf/Compound query clauses + # https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html + def filter(self, query: Dict[str, Any], limit: int = 10): + """Find documents in the index based on a filter query + + :param query: the query to execute + :param limit: maximum number of documents to return + :return: self + """ + self._outer_instance._logger.debug('Executing filter query') + + self._query['size'] = limit + if self._query['query'].get('knn'): + search_field = next(iter(self._query['query']['knn'])) + if self._query['query']['knn'][search_field].get('filter') is not None: + self._query['query']['knn'][search_field]['filter']['bool'][ + 'must' + ].append(query) + else: + self._query['query']['knn'][search_field]['filter'] = {} + self._query['query']['knn'][search_field]['filter']['bool'] = { + 'must': [] + } + self._query['query']['knn'][search_field]['filter']['bool'][ + 'must' + ].append(query) + else: + self._query['query']['bool']['filter'].append(query) + return self + + def text_search(self, query: str, search_field: str = 'text', limit: int = 10): + """Find documents in the index based on a text search query + + :param query: The text to search for + :param search_field: name of the field to search on + :param limit: maximum number of documents to find + :return: self + """ + self._outer_instance._logger.debug('Executing text search query') + + self._outer_instance._validate_search_field(search_field) + self._query['size'] = limit + if self._query['query'].get('knn'): + vector_field = next(iter(self._query['query']['knn'])) + if self._query['query']['knn'][vector_field].get('filter') is not None: + self._query['query']['knn'][vector_field]['filter']['bool'][ + 'must' + ].append({'term': {search_field: query}}) + else: + self._query['query']['knn'][vector_field]['filter'] = {} + self._query['query']['knn'][vector_field]['filter']['bool'] = { + 'must': [] + } + self._query['query']['knn'][vector_field]['filter']['bool'][ + 'must' + ].append({'term': {search_field: query}}) + else: + self._query['query']['bool']['must'].append( + {'match': {search_field: query}} + ) + return self + + find_batched = _raise_not_composable('find_batched') + filter_batched = _raise_not_composable('filter_batched') + text_search_batched = _raise_not_composable('text_search_batched') + + def build_query(self, **kwargs) -> QueryBuilder: + """ + Build a query for OpenSearchDocIndex. + :param kwargs: parameters to forward to QueryBuilder initialization + :return: QueryBuilder object + """ + return self.QueryBuilder(self, **kwargs) + + @dataclass + class DBConfig(BaseDocIndex.DBConfig): + """Dataclass that contains all "static" configurations of OpenSearchDocIndex.""" + + hosts: Union[ + List[Union[str, Mapping[str, Union[str, int]]]], str + ] = 'http://localhost:9200/' + auth: Dict[str, str] = field(default_factory=dict) + index_name: Optional[str] = None + es_config: Dict[str, Any] = field(default_factory=dict) + index_settings: Dict[str, Any] = field(default_factory=dict) + index_mappings: Dict[str, Any] = field(default_factory=dict) + default_column_config: Dict[Any, Dict[str, Any]] = field(default_factory=dict) + + def __post_init__(self): + self.default_column_config = { + 'binary': {}, + 'boolean': {}, + 'keyword': {}, + 'long': {}, + 'integer': {}, + 'short': {}, + 'byte': {}, + 'double': {}, + 'float': {}, + 'half_float': {}, + 'scaled_float': {}, + 'unsigned_long': {}, + 'dates': {}, + 'alias': {}, + 'object': {}, + 'flattened': {}, + 'nested': {}, + 'join': {}, + 'integer_range': {}, + 'float_range': {}, + 'long_range': {}, + 'double_range': {}, + 'date_range': {}, + 'ip_range': {}, + 'ip': {}, + 'version': {}, + 'histogram': {}, + 'text': {}, + 'annotated_text': {}, + 'completion': {}, + 'search_as_you_type': {}, + 'token_count': {}, + 'sparse_vector': {}, + 'rank_feature': {}, + 'rank_features': {}, + 'geo_point': {}, + 'geo_shape': {}, + 'point': {}, + 'shape': {}, + 'percolator': {}, + # `None` is not a Type, but we allow it here anyway + None: {}, # type: ignore + } + self.default_column_config['knn_vector'] = self.knn_vector_config() + + def knn_vector_config(self): + """Get the dense vector config.""" + + config = { + 'dimension': -1, + 'space_type': 'cosinesimil', # 'l2, 'inner_product', 'cosinesimil' + 'm': 16, + 'ef_construction': 100, + # 'num_candidates': 10000, + } + + return config + + @dataclass + class RuntimeConfig(BaseDocIndex.RuntimeConfig): + """Dataclass that contains all "dynamic" configurations of OpenSearchDocIndex.""" + + chunk_size: int = 500 + + ############################################### + # Implementation of abstract methods # + ############################################### + + def python_type_to_db_type(self, python_type: Type) -> Any: + """Map python type to database type. + Takes any python type and returns the corresponding database column type. + + :param python_type: a python type. + :return: the corresponding database column type, + or None if ``python_type`` is not supported. + """ + self._logger.debug(f'Mapping Python type {python_type} to database type') + + for allowed_type in OPENSEARCH_PY_VEC_TYPES: + if safe_issubclass(python_type, allowed_type): + self._logger.info( + f'Mapped Python type {python_type} to database type "knn_vector"' + ) + return 'knn_vector' + + opensearch_py_types = { + docarray.typing.ID: 'keyword', + docarray.typing.AnyUrl: 'keyword', + bool: 'boolean', + int: 'integer', + float: 'float', + str: 'text', + bytes: 'binary', + dict: 'object', + } + + for type in opensearch_py_types.keys(): + if safe_issubclass(python_type, type): + self._logger.info( + f'Mapped Python type {python_type} to database type "{opensearch_py_types[type]}"' + ) + return opensearch_py_types[type] + + err_msg = f'Unsupported column type for {type(self)}: {python_type}' + self._logger.error(err_msg) + raise ValueError(err_msg) + + def _index( + self, + column_to_data: Mapping[str, Generator[Any, None, None]], + refresh: bool = True, + chunk_size: Optional[int] = None, + ): + self._index_subindex(column_to_data) + + data = self._transpose_col_value_dict(column_to_data) + requests = [] + + for row in data: + request = { + '_index': self.index_name, + '_id': row['id'], + } + for col_name, col in self._column_infos.items(): + if safe_issubclass(col.docarray_type, AnyDocArray): + continue + if col.db_type == 'knn_vector' and np.all(row[col_name] == 0): + row[col_name] = row[col_name] + 1.0e-9 + if row[col_name] is None: + continue + request[col_name] = row[col_name] + requests.append(request) + + _, warning_info = self._send_requests(requests, chunk_size) + for info in warning_info: + warnings.warn(str(info)) + self._logger.warning('Warning: %s', str(info)) + + if refresh: + self._logger.debug('Refreshing the index') + self._refresh(self.index_name) + + def num_docs(self) -> int: + """ + Get the number of documents. + """ + self._logger.debug('Getting the number of documents in the index') + return self._client.count(index=self.index_name)['count'] + + def _del_items( + self, + doc_ids: Sequence[str], + chunk_size: Optional[int] = None, + ): + requests = [] + for _id in doc_ids: + requests.append( + {'_op_type': 'delete', '_index': self.index_name, '_id': _id} + ) + + _, warning_info = self._send_requests(requests, chunk_size) + + # raise warning if some ids are not found + if warning_info: + ids = [info['delete']['_id'] for info in warning_info] + warnings.warn(f'No document with id {ids} found') + + self._refresh(self.index_name) + + def _get_items(self, doc_ids: Sequence[str]) -> Sequence[Dict[str, Any]]: + accumulated_docs = [] + accumulated_docs_id_not_found = [] + es_rows = self._client_mget(doc_ids)['docs'] + + for row in es_rows: + if row['found']: + doc_dict = row['_source'] + accumulated_docs.append(doc_dict) + else: + accumulated_docs_id_not_found.append(row['_id']) + + # raise warning if some ids are not found + if accumulated_docs_id_not_found: + warnings.warn(f'No document with id {accumulated_docs_id_not_found} found') + + return accumulated_docs + + def execute_query(self, query: Dict[str, Any], *args, **kwargs) -> Any: + """ + Execute a query on the OpenSearchDocIndex. + + Can take two kinds of inputs: + + 1. A native query of the underlying database. This is meant as a passthrough so that you + can enjoy any functionality that is not available through the Document index API. + 2. The output of this Document index' `QueryBuilder.build()` method. + + :param query: the query to execute + :param args: positional arguments to pass to the query + :param kwargs: keyword arguments to pass to the query + :return: the result of the query + """ + self._logger.debug(f'Executing query: {query}') + + if args or kwargs: + err_msg = ( + f'args and kwargs not supported for `execute_query` on {type(self)}' + ) + self._logger.error(err_msg) + raise ValueError(err_msg) + + resp = self._client.search(index=self.index_name, body=query) + docs, scores = self._format_response(resp) + + return _FindResult(documents=docs, scores=parse_obj_as(NdArray, scores)) + + def _find( + self, query: np.ndarray, limit: int, search_field: str = '' + ) -> _FindResult: + body = self._form_search_body(query, search_field, limit) + + resp = self._client_search(**body) + + docs, scores = self._format_response(resp) + + return _FindResult(documents=docs, scores=parse_obj_as(NdArray, scores)) + + def _find_batched( + self, + queries: np.ndarray, + limit: int, + search_field: str = '', + ) -> _FindResultBatched: + request = [] + for query in queries: + head = {'index': self.index_name} + body = self._form_search_body(query, search_field, limit) + request.extend([head, body]) + + responses = self._client_msearch(request) + + das, scores = zip( + *[self._format_response(resp) for resp in responses['responses']] + ) + return _FindResultBatched(documents=list(das), scores=scores) + + def _filter( + self, + filter_query: Dict[str, Any], + limit: int, + ) -> List[Dict]: + resp = self._client_search(query=filter_query, size=limit) + + docs, _ = self._format_response(resp) + + return docs + + def _filter_batched( + self, + filter_queries: Any, + limit: int, + ) -> List[List[Dict]]: + request = [] + for query in filter_queries: + head = {'index': self.index_name} + body = {'query': query, 'size': limit} + request.extend([head, body]) + + responses = self._client_msearch(request) + das, _ = zip(*[self._format_response(resp) for resp in responses['responses']]) + + return list(das) + + def _text_search( + self, + query: str, + limit: int, + search_field: str = '', + ) -> _FindResult: + body = self._form_text_search_body(query, limit, search_field) + resp = self._client_search(**body) + + docs, scores = self._format_response(resp) + + return _FindResult(documents=docs, scores=np.array(scores)) # type: ignore + + def _text_search_batched( + self, + queries: Sequence[str], + limit: int, + search_field: str = '', + ) -> _FindResultBatched: + request = [] + for query in queries: + head = {'index': self.index_name} + body = self._form_text_search_body(query, limit, search_field) + request.extend([head, body]) + + responses = self._client_msearch(request) + das, scores = zip( + *[self._format_response(resp) for resp in responses['responses']] + ) + return _FindResultBatched(documents=list(das), scores=scores) + + def _filter_by_parent_id(self, id: str) -> List[str]: + resp = self._client_search( + query={'term': {'parent_id': id}}, fields=['id'], _source=False + ) + ids = [hit['fields']['id'][0] for hit in resp['hits']['hits']] + return ids + + ############################################### + # Helpers # + ############################################### + + @classmethod + def _create_index_mapping(cls, col: '_ColumnInfo') -> Dict[str, Any]: + """Create a new HNSW index for a column, and initialize it.""" + + index = {'type': col.config['type'] if 'type' in col.config else col.db_type} + + if col.db_type == 'knn_vector': + if cls._index_vector_params is not None: + for k in cls._index_vector_params: + index[k] = col.config[k] + if col.n_dim: + index['dimension'] = col.n_dim + if cls._index_vector_options is not None: + index['method'] = { + 'parameters': {k: col.config[k] for k in cls._index_vector_options} + } + index['method']['name'] = 'hnsw' + return index + + def _send_requests( + self, + request: Iterable[Dict[str, Any]], + chunk_size: Optional[int] = None, + **kwargs, + ) -> Tuple[List[Dict], List[Any]]: + """Send bulk request to OpenSearch and gather the successful info""" + + accumulated_info = [] + warning_info = [] + for success, info in parallel_bulk( + self._client, + request, + raise_on_error=False, + raise_on_exception=False, + chunk_size=chunk_size if chunk_size else self._runtime_config.chunk_size, # type: ignore + **kwargs, + ): + if not success: + warning_info.append(info) + else: + accumulated_info.append(info) + + return accumulated_info, warning_info + + def _form_search_body( + self, + query: np.ndarray, + search_field: str = '', + limit: int = 10, + ) -> Dict[str, Any]: + body = { + 'size': limit, + 'query': { + 'knn': { + search_field: { + 'vector': query, + 'k': limit, + } + }, + }, + } + return body + + def _form_text_search_body( + self, query: str, limit: int, search_field: str = '' + ) -> Dict[str, Any]: + body = { + 'size': limit, + 'query': { + 'bool': { + 'must': [{'match': {search_field: query}}], + } + }, + } + return body + + def _format_response(self, response: Any) -> Tuple[List[Dict], List[Any]]: + docs = [] + scores = [] + for result in response['hits']['hits']: + if not isinstance(result, dict): + result = result.to_dict() + + if result.get('_source', None): + doc_dict = result['_source'] + else: + doc_dict = result['fields'] + doc_dict['id'] = result['_id'] + docs.append(doc_dict) + scores.append(result['_score']) + + return docs, [parse_obj_as(NdArray, np.array(s)) for s in scores] + + def _refresh(self, index_name: str): + self._client.indices.refresh(index=index_name) + + def _doc_exists(self, doc_id: str) -> bool: + if len(doc_id) == 0: + return False + ret = self._client_mget([doc_id]) + return ret["docs"][0]["found"] + + ############################################### + # API Wrappers # + ############################################### + + def _client_put_mapping(self, mappings: Dict[str, Any]): + self._client.indices.put_mapping(index=self.index_name, body=mappings) + + def _client_create(self, settings: Dict[str, Any]): + self._client.indices.create(index=self.index_name, body=settings) + + def _client_put_settings(self, settings: Dict[str, Any]): + self._client.indices.put_settings(index=self.index_name, settings=settings) + + def _client_mget(self, ids: Sequence[str]): + body_ids = [{'_index': self.index_name, '_id': _id} for _id in ids] + return self._client.mget(index=self.index_name, body={'docs': body_ids}) + + def _client_search(self, **kwargs): + return self._client.search(index=self.index_name, body=kwargs) + + def _client_msearch(self, request: List[Dict[str, Any]]): + return self._client.msearch(index=self.index_name, body=request) diff --git a/poetry.lock b/poetry.lock index 4e185af157..ce74ddab73 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -6,6 +6,7 @@ version = "22.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.7,<4.0" +groups = ["dev"] files = [ {file = "aiofiles-22.1.0-py3-none-any.whl", hash = "sha256:1142fa8e80dbae46bb6339573ad4c8c0841358f79c6eb50a493dceca14621bad"}, {file = "aiofiles-22.1.0.tar.gz", hash = "sha256:9107f1ca0b2a5553987a94a3c9959fe5b491fdf731389aa5b7b1bd0733e32de6"}, @@ -17,6 +18,8 @@ version = "3.8.4" description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, @@ -125,6 +128,8 @@ version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, @@ -139,6 +144,7 @@ version = "0.19.0" description = "asyncio bridge to the standard sqlite3 module" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "aiosqlite-0.19.0-py3-none-any.whl", hash = "sha256:edba222e03453e094a3ce605db1b970c4b3376264e56f32e2a4959f948d66a96"}, {file = "aiosqlite-0.19.0.tar.gz", hash = "sha256:95ee77b91c8d2808bd08a59fbebf66270e9090c3d92ffbf260dc0db0b979577d"}, @@ -154,6 +160,7 @@ version = "3.6.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.6.2" +groups = ["main", "dev"] files = [ {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, @@ -174,6 +181,8 @@ version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform == \"darwin\" or platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, @@ -185,6 +194,7 @@ version = "21.3.0" description = "The secure Argon2 password hashing algorithm." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, @@ -204,6 +214,7 @@ version = "21.2.0" description = "Low-level CFFI bindings for Argon2" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, @@ -241,6 +252,7 @@ version = "1.3.0" description = "Better dates & times for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, @@ -260,6 +272,8 @@ version = "4.0.2" description = "Timeout context manager for asyncio programs" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"redis\" and python_full_version <= \"3.11.2\" or extra == \"jac\"" files = [ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, @@ -271,10 +285,12 @@ version = "22.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.5" +groups = ["main", "dev"] files = [ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] +markers = {main = "extra == \"mesh\" or extra == \"full\" or extra == \"jac\""} [package.extras] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] @@ -288,6 +304,8 @@ version = "1.3.1" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"weaviate\"" files = [ {file = "Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377"}, {file = "authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917"}, @@ -302,6 +320,8 @@ version = "10.0.0" description = "Pythonic bindings for FFmpeg's libraries." optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"video\" or extra == \"full\"" files = [ {file = "av-10.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d19bb54197155d045a2b683d993026d4bcb06e31c2acad0327e3e8711571899c"}, {file = "av-10.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7dba96a85cd37315529998e6dbbe3fa05c2344eb19a431dc24996be030a904ee"}, @@ -355,6 +375,7 @@ version = "2.11.0" description = "Internationalization utilities" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, @@ -369,6 +390,7 @@ version = "0.2.0" description = "Specifications for callback functions passed in to an API" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, @@ -380,6 +402,8 @@ version = "2.2.1" description = "Function decoration for backoff and retry" optional = true python-versions = ">=3.7,<4.0" +groups = ["main"] +markers = "extra == \"epsilla\"" files = [ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, @@ -391,6 +415,7 @@ version = "4.11.1" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" +groups = ["dev"] files = [ {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, @@ -409,6 +434,7 @@ version = "22.10.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, @@ -453,6 +479,7 @@ version = "1.13.0" description = "Run Black on Python code blocks in documentation files." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "blacken_docs-1.13.0-py3-none-any.whl", hash = "sha256:455388df506fca04742f36a4a3475630eb7f141cb98acc6070d3c24bcf69cdda"}, {file = "blacken_docs-1.13.0.tar.gz", hash = "sha256:2babba84a42fb31a1d393dcf5a9a66d9b0657bdc320aec69d9f96301501dba35"}, @@ -467,6 +494,7 @@ version = "5.0.1" description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, @@ -486,6 +514,8 @@ version = "1.26.95" description = "The AWS SDK for Python" optional = true python-versions = ">= 3.7" +groups = ["main"] +markers = "extra == \"aws\"" files = [ {file = "boto3-1.26.95-py3-none-any.whl", hash = "sha256:2f07523d45da7a970d18037676e1fb78401ce3f44f4cc26a6a991be8c519b62b"}, {file = "boto3-1.26.95.tar.gz", hash = "sha256:945d32fa9bbbb5fc775378bc2c19278797b54ad57e24bd4dbd46ab27f0938152"}, @@ -505,6 +535,8 @@ version = "1.29.95" description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">= 3.7" +groups = ["main"] +markers = "extra == \"aws\"" files = [ {file = "botocore-1.29.95-py3-none-any.whl", hash = "sha256:5f5f1c8125f8c331f561ca0a7a892f709df206714c306c48fd907eee469926cb"}, {file = "botocore-1.29.95.tar.gz", hash = "sha256:e9ffd4a2fc415c313eda03713c212e3121084fdcb21e20aac5b15924a4ae5a9d"}, @@ -524,6 +556,7 @@ version = "2.3.post1" description = "Bash style brace expander." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "bracex-2.3.post1-py3-none-any.whl", hash = "sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73"}, {file = "bracex-2.3.post1.tar.gz", hash = "sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693"}, @@ -535,6 +568,7 @@ version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev", "docs"] files = [ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, @@ -546,6 +580,7 @@ version = "1.15.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = "*" +groups = ["main", "dev"] files = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, @@ -612,6 +647,7 @@ files = [ {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] +markers = {main = "platform_python_implementation != \"PyPy\" and extra == \"weaviate\""} [package.dependencies] pycparser = "*" @@ -622,6 +658,7 @@ version = "3.3.1" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.6.1" +groups = ["dev"] files = [ {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, @@ -633,6 +670,8 @@ version = "5.1.0" description = "Universal encoding detector for Python 3" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"}, {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"}, @@ -644,6 +683,7 @@ version = "2.0.12" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.5.0" +groups = ["main", "dev", "docs"] files = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, @@ -658,6 +698,7 @@ version = "8.1.3" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["dev", "docs"] files = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, @@ -672,10 +713,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "docs"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "extra == \"mesh\" and sys_platform == \"win32\" or extra == \"weaviate\" and platform_system == \"Windows\" or extra == \"full\" and sys_platform == \"win32\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "colorlog" @@ -683,6 +726,8 @@ version = "6.7.0" description = "Add colours to the output of Python's logging module." optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "colorlog-6.7.0-py2.py3-none-any.whl", hash = "sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662"}, {file = "colorlog-6.7.0.tar.gz", hash = "sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5"}, @@ -700,6 +745,7 @@ version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, @@ -714,6 +760,7 @@ version = "6.2" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, @@ -776,6 +823,7 @@ version = "42.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"}, {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"}, @@ -810,6 +858,7 @@ files = [ {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"}, {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"}, ] +markers = {main = "extra == \"weaviate\""} [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} @@ -830,6 +879,7 @@ version = "1.6.3" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "debugpy-1.6.3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:c4b2bd5c245eeb49824bf7e539f95fb17f9a756186e51c3e513e32999d8846f3"}, {file = "debugpy-1.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b8deaeb779699350deeed835322730a3efec170b88927debc9ba07a1a38e2585"}, @@ -857,10 +907,12 @@ version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" +groups = ["main", "dev"] files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +markers = {main = "extra == \"weaviate\""} [[package]] name = "defusedxml" @@ -868,6 +920,7 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -879,6 +932,7 @@ version = "0.3.6" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, @@ -890,6 +944,8 @@ version = "2.6.1" description = "DNS toolkit" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"mongo\"" files = [ {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, @@ -910,6 +966,8 @@ version = "6.0.1" description = "A Python library for the Docker Engine API." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "docker-6.0.1-py3-none-any.whl", hash = "sha256:dbcb3bd2fa80dca0788ed908218bf43972772009b881ed1e20dfc29a65e49782"}, {file = "docker-6.0.1.tar.gz", hash = "sha256:896c4282e5c7af5c45e8b683b0b0c33932974fe6e50fc6906a0a83616ab3da97"}, @@ -931,6 +989,8 @@ version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" optional = true python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, @@ -949,6 +1009,8 @@ version = "8.4.0" description = "Transport classes and utilities shared among Python Elastic client libraries" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"elasticsearch\"" files = [ {file = "elastic-transport-8.4.0.tar.gz", hash = "sha256:b9ad708ceb7fcdbc6b30a96f886609a109f042c0b9d9f2e44403b3133ba7ff10"}, {file = "elastic_transport-8.4.0-py3-none-any.whl", hash = "sha256:19db271ab79c9f70f8c43f8f5b5111408781a6176b54ab2e54d713b6d9ceb815"}, @@ -967,6 +1029,8 @@ version = "7.10.1" description = "Python client for Elasticsearch" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +groups = ["main"] +markers = "extra == \"elasticsearch\"" files = [ {file = "elasticsearch-7.10.1-py2.py3-none-any.whl", hash = "sha256:4ebd34fd223b31c99d9f3b6b6236d3ac18b3046191a37231e8235b06ae7db955"}, {file = "elasticsearch-7.10.1.tar.gz", hash = "sha256:a725dd923d349ca0652cf95d6ce23d952e2153740cf4ab6daf4a2d804feeed48"}, @@ -988,6 +1052,7 @@ version = "0.4" description = "Discover and load entry points from installed packages." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, @@ -999,6 +1064,8 @@ version = "9.5.0" description = "simplified environment variable parsing" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"milvus\"" files = [ {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, @@ -1014,12 +1081,25 @@ django = ["dj-database-url", "dj-email-url", "django-cache-url"] lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] +[[package]] +name = "events" +version = "0.5" +description = "Bringing the elegance of C# EventHandler to Python" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "Events-0.5-py3-none-any.whl", hash = "sha256:a7286af378ba3e46640ac9825156c93bdba7502174dd696090fdfcd4d80a1abd"}, +] + [[package]] name = "exceptiongroup" version = "1.1.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, @@ -1034,6 +1114,8 @@ version = "0.100.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"web\"" files = [ {file = "fastapi-0.100.0-py3-none-any.whl", hash = "sha256:271662daf986da8fa98dc2b7c7f61c4abdfdccfb4786d79ed8b2878f172c6d5f"}, {file = "fastapi-0.100.0.tar.gz", hash = "sha256:acb5f941ea8215663283c10018323ba7ea737c571b67fc7e88e9469c7eb1d12e"}, @@ -1053,6 +1135,7 @@ version = "2.16.2" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "fastjsonschema-2.16.2-py3-none-any.whl", hash = "sha256:21f918e8d9a1a4ba9c22e09574ba72267a6762d47822db9add95f6454e51cc1c"}, {file = "fastjsonschema-2.16.2.tar.gz", hash = "sha256:01e366f25d9047816fe3d288cbfc3e10541daf0af2044763f3d0ade42476da18"}, @@ -1067,10 +1150,12 @@ version = "3.8.0" description = "A platform independent file lock." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, ] +markers = {main = "extra == \"torch\" or extra == \"jac\""} [package.extras] docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] @@ -1082,6 +1167,7 @@ version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +groups = ["dev"] files = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, @@ -1093,6 +1179,8 @@ version = "1.3.3" description = "A list-like structure which implements collections.abc.MutableSequence" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, @@ -1176,6 +1264,7 @@ version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." optional = false python-versions = "*" +groups = ["docs"] files = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, @@ -1193,6 +1282,7 @@ version = "0.36.2" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "griffe-0.36.2-py3-none-any.whl", hash = "sha256:ba71895a3f5f606b18dcd950e8a1f8e7332a37f90f24caeb002546593f2e0eee"}, {file = "griffe-0.36.2.tar.gz", hash = "sha256:333ade7932bb9096781d83092602625dfbfe220e87a039d2801259a1bd41d1c2"}, @@ -1207,6 +1297,8 @@ version = "1.53.0" description = "HTTP/2-based RPC framework" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"qdrant\" and python_version < \"3.12\" or extra == \"milvus\"" files = [ {file = "grpcio-1.53.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:752d2949b40e12e6ad3ed8cc552a65b54d226504f6b1fb67cab2ccee502cc06f"}, {file = "grpcio-1.53.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:8a48fd3a7222be226bb86b7b413ad248f17f3101a524018cdc4562eeae1eb2a3"}, @@ -1264,6 +1356,8 @@ version = "1.53.0" description = "Protobuf code generator for gRPC" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"qdrant\" and python_version < \"3.12\"" files = [ {file = "grpcio-tools-1.53.0.tar.gz", hash = "sha256:925efff2d63ca3266f93c924ffeba5d496f16a8ccbe125fa0d18acf47cc5fa88"}, {file = "grpcio_tools-1.53.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:41b859cf943256debba1e7b921e3689c89f95495b65f7ad226c4f0e38edf8ee4"}, @@ -1323,6 +1417,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1334,6 +1429,8 @@ version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" optional = true python-versions = ">=3.6.1" +groups = ["main"] +markers = "extra == \"qdrant\" and python_version < \"3.12\"" files = [ {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, @@ -1349,6 +1446,8 @@ version = "0.7.0" description = "hnswlib" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"hnswlib\"" files = [ {file = "hnswlib-0.7.0.tar.gz", hash = "sha256:bc459668e7e44bb7454b256b90c98c5af750653919d9a91698dafcf416cf64c4"}, ] @@ -1362,6 +1461,8 @@ version = "4.0.0" description = "Pure-Python HPACK header compression" optional = true python-versions = ">=3.6.1" +groups = ["main"] +markers = "extra == \"qdrant\" and python_version < \"3.12\"" files = [ {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, @@ -1373,6 +1474,7 @@ version = "0.16.1" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "httpcore-0.16.1-py3-none-any.whl", hash = "sha256:8d393db683cc8e35cc6ecb02577c5e1abfedde52b38316d038932a84b4875ecb"}, {file = "httpcore-0.16.1.tar.gz", hash = "sha256:3d3143ff5e1656a5740ea2f0c167e8e9d48c5a9bbd7f00ad1f8cff5711b08543"}, @@ -1394,6 +1496,7 @@ version = "0.23.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "httpx-0.23.1-py3-none-any.whl", hash = "sha256:0b9b1f0ee18b9978d637b0776bfd7f54e2ca278e063e3586d8f01cda89e042a8"}, {file = "httpx-0.23.1.tar.gz", hash = "sha256:202ae15319be24efe9a8bd4ed4360e68fde7b38bcc2ce87088d416f026667d19"}, @@ -1418,6 +1521,8 @@ version = "6.0.1" description = "HTTP/2 framing layer for Python" optional = true python-versions = ">=3.6.1" +groups = ["main"] +markers = "extra == \"qdrant\" and python_version < \"3.12\"" files = [ {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, @@ -1429,6 +1534,7 @@ version = "2.5.8" description = "File identification library for Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "identify-2.5.8-py2.py3-none-any.whl", hash = "sha256:48b7925fe122720088aeb7a6c34f17b27e706b72c61070f27fe3789094233440"}, {file = "identify-2.5.8.tar.gz", hash = "sha256:7a214a10313b9489a0d61467db2856ae8d0b8306fc923e03a9effa53d8aedc58"}, @@ -1443,6 +1549,7 @@ version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" +groups = ["main", "dev", "docs"] files = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, @@ -1454,10 +1561,12 @@ version = "5.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "docs"] files = [ {file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"}, {file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"}, ] +markers = {main = "(extra == \"jax\" or extra == \"jac\" or extra == \"full\") and (python_version < \"3.10\" or extra == \"jac\")", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} [package.dependencies] zipp = ">=0.5" @@ -1473,10 +1582,12 @@ version = "5.10.0" description = "Read resources from Python packages" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "importlib_resources-5.10.0-py3-none-any.whl", hash = "sha256:ee17ec648f85480d523596ce49eae8ead87d5631ae1551f913c0100b5edd3437"}, {file = "importlib_resources-5.10.0.tar.gz", hash = "sha256:c01b1b94210d9849f286b86bb51bcea7cd56dde0600d8db721d7b81330711668"}, ] +markers = {main = "python_version < \"3.9\" and (extra == \"mesh\" or extra == \"full\")", dev = "python_version < \"3.9\""} [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} @@ -1491,6 +1602,7 @@ version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -1502,6 +1614,7 @@ version = "6.16.2" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ipykernel-6.16.2-py3-none-any.whl", hash = "sha256:67daf93e5b52456cd8eea87a8b59405d2bb80ae411864a1ea206c3631d8179af"}, {file = "ipykernel-6.16.2.tar.gz", hash = "sha256:463f3d87a92e99969b1605cb7a5b4d7b36b7145a0e72d06e65918a6ddefbe630"}, @@ -1530,6 +1643,7 @@ version = "7.34.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ipython-7.34.0-py3-none-any.whl", hash = "sha256:c175d2440a1caff76116eb719d40538fbb316e214eda85c5515c303aacbfb23e"}, {file = "ipython-7.34.0.tar.gz", hash = "sha256:af3bdb46aa292bce5615b1b2ebc76c2080c5f77f54bda2ec72461317273e7cd6"}, @@ -1566,6 +1680,7 @@ version = "0.2.0" description = "Vestigial utilities from IPython" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, @@ -1577,6 +1692,7 @@ version = "20.11.0" description = "Operations with ISO 8601 durations" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, @@ -1591,6 +1707,7 @@ version = "5.11.5" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.7.0" +groups = ["dev"] files = [ {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"}, {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"}, @@ -1608,6 +1725,8 @@ version = "0.4.13" description = "Differentiate, compile, and transform Numpy code." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"jax\" or extra == \"full\"" files = [ {file = "jax-0.4.13.tar.gz", hash = "sha256:03bfe6749dfe647f16f15f6616638adae6c4a7ca7167c75c21961ecfd3a3baaa"}, ] @@ -1638,6 +1757,7 @@ version = "0.18.1" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, @@ -1656,6 +1776,8 @@ version = "0.34.0" description = "SDK for Hubble API at Jina AI." optional = true python-versions = ">=3.7.0" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "jina-hubble-sdk-0.34.0.tar.gz", hash = "sha256:d52e1c3b90262a779dd6be66c687d5d824698cc0e17f79e9f6b94d03e86b10c1"}, {file = "jina_hubble_sdk-0.34.0-py3-none-any.whl", hash = "sha256:d52b5d0756b710192453858a6d9056280616b9c1ab35fbf061fe6ee66cdf77a5"}, @@ -1681,10 +1803,12 @@ version = "3.1.2" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main", "dev", "docs"] files = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] +markers = {main = "extra == \"torch\""} [package.dependencies] MarkupSafe = ">=2.0" @@ -1698,6 +1822,8 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"aws\"" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -1709,6 +1835,7 @@ version = "0.9.10" description = "A Python implementation of the JSON5 data format." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "json5-0.9.10-py2.py3-none-any.whl", hash = "sha256:993189671e7412e9cdd8be8dc61cf402e8e579b35f1d1bb20ae6b09baa78bbce"}, {file = "json5-0.9.10.tar.gz", hash = "sha256:ad9f048c5b5a4c3802524474ce40a622fae789860a86f10cc4f7e5f9cf9b46ab"}, @@ -1723,6 +1850,7 @@ version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +groups = ["dev"] files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, @@ -1734,10 +1862,12 @@ version = "4.17.3" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, ] +markers = {main = "extra == \"mesh\" or extra == \"full\""} [package.dependencies] attrs = ">=17.4.0" @@ -1763,6 +1893,7 @@ version = "7.4.6" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_client-7.4.6-py3-none-any.whl", hash = "sha256:540b6a5c9c2dc481c5dd54fd5acb260f03dfaaa7c5325b2ffb1f676710f8c7c4"}, {file = "jupyter_client-7.4.6.tar.gz", hash = "sha256:f7f9a9dc3a0ecd223ed6a5a00cf4140a5c252ec72e52d6de370748ed0aa083dd"}, @@ -1787,6 +1918,7 @@ version = "4.12.0" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_core-4.12.0-py3-none-any.whl", hash = "sha256:a54672c539333258495579f6964144924e0aa7b07f7069947bef76d7ea5cb4c1"}, {file = "jupyter_core-4.12.0.tar.gz", hash = "sha256:87f39d7642412ae8a52291cc68e71ac01dfa2c735df2701f8108251d51b4f460"}, @@ -1805,6 +1937,7 @@ version = "0.6.3" description = "Jupyter Event System library" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, @@ -1829,6 +1962,7 @@ version = "1.23.2" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_server-1.23.2-py3-none-any.whl", hash = "sha256:c01d0e84c22a14dd6b0e7d8ce4105b08a3426b46582668e28046a64c07311a4f"}, {file = "jupyter_server-1.23.2.tar.gz", hash = "sha256:69cb954ef02c0ba1837787e34e4a1240c93c8eb590662fae1840778861957660"}, @@ -1861,6 +1995,7 @@ version = "0.9.1" description = "Jupyter Server extension providing an implementation of the File ID service." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_server_fileid-0.9.1-py3-none-any.whl", hash = "sha256:76dd05a45b78c7ec0cba0be98ece289984c6bcfc1ca2da216d42930e506a4d68"}, {file = "jupyter_server_fileid-0.9.1.tar.gz", hash = "sha256:7486bca3acf9bbaab7ce5127f9f64d2df58f5d2de377609fb833291a7217a6a2"}, @@ -1880,6 +2015,7 @@ version = "0.8.0" description = "A Jupyter Server Extension Providing Y Documents." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_server_ydoc-0.8.0-py3-none-any.whl", hash = "sha256:969a3a1a77ed4e99487d60a74048dc9fa7d3b0dcd32e60885d835bbf7ba7be11"}, {file = "jupyter_server_ydoc-0.8.0.tar.gz", hash = "sha256:a6fe125091792d16c962cc3720c950c2b87fcc8c3ecf0c54c84e9a20b814526c"}, @@ -1899,6 +2035,7 @@ version = "0.2.5" description = "Document structures for collaborative editing using Ypy" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_ydoc-0.2.5-py3-none-any.whl", hash = "sha256:5759170f112c70320a84217dd98d287699076ae65a7f88d458d57940a9f2b882"}, {file = "jupyter_ydoc-0.2.5.tar.gz", hash = "sha256:5a02ca7449f0d875f73e8cb8efdf695dddef15a8e71378b1f4eda6b7c90f5382"}, @@ -1918,6 +2055,7 @@ version = "3.6.7" description = "JupyterLab computational environment" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyterlab-3.6.7-py3-none-any.whl", hash = "sha256:d92d57d402f53922bca5090654843aa08e511290dff29fdb0809eafbbeb6df98"}, {file = "jupyterlab-3.6.7.tar.gz", hash = "sha256:2fadeaec161b0d1aec19f17721d8b803aef1d267f89c8b636b703be14f435c8f"}, @@ -1947,6 +2085,7 @@ version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, @@ -1958,6 +2097,7 @@ version = "2.24.0" description = "A set of server components for JupyterLab and JupyterLab like applications." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyterlab_server-2.24.0-py3-none-any.whl", hash = "sha256:5f077e142bb8dc9b843d960f940c513581bceca3793a0d80f9c67d9522c4e876"}, {file = "jupyterlab_server-2.24.0.tar.gz", hash = "sha256:4e6f99e0a5579bbbc32e449c4dbb039561d4f1a7827d5733273ed56738f21f07"}, @@ -1984,6 +2124,7 @@ version = "4.9.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +groups = ["main", "docs"] files = [ {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, @@ -2063,6 +2204,7 @@ files = [ {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, ] +markers = {main = "extra == \"mesh\" or extra == \"full\""} [package.extras] cssselect = ["cssselect (>=0.7)"] @@ -2076,6 +2218,8 @@ version = "4.3.2" description = "LZ4 Bindings for Python" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"proto\" or extra == \"full\"" files = [ {file = "lz4-4.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1c4c100d99eed7c08d4e8852dd11e7d1ec47a3340f49e3a96f8dfbba17ffb300"}, {file = "lz4-4.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:edd8987d8415b5dad25e797043936d91535017237f72fa456601be1479386c92"}, @@ -2125,6 +2269,8 @@ version = "1.0.1" description = "Python bindings for the mapbox earcut C++ polygon triangulation library." optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "mapbox_earcut-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:60f8299b724b5ad1f171c2666a12591845536b0e9318ddc9649f75805096686c"}, {file = "mapbox_earcut-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4af0911ed9d1920c36c54b500ea69fbcc948f409c66f632c75b15fee04c7544e"}, @@ -2207,6 +2353,7 @@ version = "3.3.7" description = "Python implementation of Markdown." optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, @@ -2224,6 +2371,7 @@ version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" +groups = ["main", "dev", "docs"] files = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, @@ -2266,6 +2414,7 @@ files = [ {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] +markers = {main = "extra == \"torch\""} [[package]] name = "marshmallow" @@ -2273,6 +2422,8 @@ version = "3.19.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"milvus\"" files = [ {file = "marshmallow-3.19.0-py3-none-any.whl", hash = "sha256:93f0958568da045b0021ec6aeb7ac37c81bfcccbb9a0e7ed8559885070b3a19b"}, {file = "marshmallow-3.19.0.tar.gz", hash = "sha256:90032c0fd650ce94b6ec6dc8dfeb0e3ff50c144586462c389b81a07205bedb78"}, @@ -2293,6 +2444,7 @@ version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, @@ -2307,6 +2459,7 @@ version = "1.3.4" description = "A deep merge function for 🐍." optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, @@ -2318,6 +2471,7 @@ version = "2.0.4" description = "A sane Markdown parser with useful plugins and renderers" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "mistune-2.0.4-py2.py3-none-any.whl", hash = "sha256:182cc5ee6f8ed1b807de6b7bb50155df7b66495412836b9a74c8fbdfc75fe36d"}, {file = "mistune-2.0.4.tar.gz", hash = "sha256:9ee0a66053e2267aba772c71e06891fa8f1af6d4b01d5e84e267b4570d4d9808"}, @@ -2329,6 +2483,7 @@ version = "1.4.2" description = "Project documentation with Markdown." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "mkdocs-1.4.2-py3-none-any.whl", hash = "sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c"}, {file = "mkdocs-1.4.2.tar.gz", hash = "sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5"}, @@ -2357,6 +2512,7 @@ version = "0.4.1" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "mkdocs-autorefs-0.4.1.tar.gz", hash = "sha256:70748a7bd025f9ecd6d6feeba8ba63f8e891a1af55f48e366d6d6e78493aba84"}, {file = "mkdocs_autorefs-0.4.1-py3-none-any.whl", hash = "sha256:a2248a9501b29dc0cc8ba4c09f4f47ff121945f6ce33d760f145d6f89d313f5b"}, @@ -2372,6 +2528,7 @@ version = "2.8.0" description = "An MkDocs plugin that simplifies configuring page titles and their order" optional = false python-versions = ">=3.6.2" +groups = ["docs"] files = [ {file = "mkdocs-awesome-pages-plugin-2.8.0.tar.gz", hash = "sha256:af7e327e14b2eea3b2735c37428e33a528ecd2d9ae2296dc0f1632f0f3bc28f7"}, {file = "mkdocs_awesome_pages_plugin-2.8.0-py3-none-any.whl", hash = "sha256:6b21ad4f41aecbe89e3a9a51f8837892cc7ce8ca0f9f4e0a355d56159ace3d68"}, @@ -2388,6 +2545,7 @@ version = "9.1.3" description = "Documentation that simply works" optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "mkdocs_material-9.1.3-py3-none-any.whl", hash = "sha256:a8d14d03569008afb0f5a5785c253249b5ff038e3a5509f96a393b8596bf5062"}, {file = "mkdocs_material-9.1.3.tar.gz", hash = "sha256:0be1b5d76c00efc9b2ecbd2d71014be950351e710f5947f276264878afc82ca0"}, @@ -2410,6 +2568,7 @@ version = "1.1.1" description = "Extension pack for Python Markdown and MkDocs Material." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"}, {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"}, @@ -2421,6 +2580,7 @@ version = "1.5.0" description = "" optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "mkdocs-video-1.5.0.tar.gz", hash = "sha256:0defc018f4b7927f8afffc4d8e039c84dfba636dffc5e25e2bfa8d6350bc8eca"}, {file = "mkdocs_video-1.5.0-py3-none-any.whl", hash = "sha256:b35613d4dacbac2dfa94d8c2600383cda14ad99a1fa1542b5fc4e9c6d19e9fe1"}, @@ -2436,6 +2596,7 @@ version = "0.23.0" description = "Automatic documentation from sources, for MkDocs." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "mkdocstrings-0.23.0-py3-none-any.whl", hash = "sha256:051fa4014dfcd9ed90254ae91de2dbb4f24e166347dae7be9a997fe16316c65e"}, {file = "mkdocstrings-0.23.0.tar.gz", hash = "sha256:d9c6a37ffbe7c14a7a54ef1258c70b8d394e6a33a1c80832bce40b9567138d1c"}, @@ -2463,6 +2624,7 @@ version = "1.7.0" description = "A Python handler for mkdocstrings." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "mkdocstrings_python-1.7.0-py3-none-any.whl", hash = "sha256:85c5f009a5a0ebb6076b7818c82a2bb0eebd0b54662628fa8b25ee14a6207951"}, {file = "mkdocstrings_python-1.7.0.tar.gz", hash = "sha256:5dac2712bd38a3ff0812b8650a68b232601d1474091b380a8b5bc102c8c0d80a"}, @@ -2478,6 +2640,7 @@ version = "0.2.0" description = "" optional = false python-versions = "*" +groups = ["docs"] files = [ {file = "mktestdocs-0.2.0-py2.py3-none-any.whl", hash = "sha256:0ce2ba702dfe7f2a516878fd1787d4c1f95e4a088248893bb5788ac037010559"}, {file = "mktestdocs-0.2.0.tar.gz", hash = "sha256:a6b401c63ac02ab683443e0fcb27c58fc8c2264cf4e9e93835741d234f917267"}, @@ -2492,6 +2655,8 @@ version = "0.2.0" description = "" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"jax\" or extra == \"full\"" files = [ {file = "ml_dtypes-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df6a76e1c8adf484feb138ed323f9f40a7b6c21788f120f7c78bec20ac37ee81"}, {file = "ml_dtypes-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc29a0524ef5e23a7fbb8d881bdecabeb3fc1d19d9db61785d077a86cb94fab2"}, @@ -2528,6 +2693,8 @@ version = "1.6" description = "An implementation of time.monotonic() for Python 2 & < 3.3" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"epsilla\"" files = [ {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, @@ -2539,6 +2706,8 @@ version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\" or extra == \"torch\"" files = [ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, @@ -2556,6 +2725,8 @@ version = "6.0.4" description = "multidict implementation" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, @@ -2639,6 +2810,7 @@ version = "1.0.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "mypy-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af"}, {file = "mypy-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c"}, @@ -2685,6 +2857,7 @@ version = "0.4.3" description = "Experimental type system extensions for programs checked with the mypy typechecker." optional = false python-versions = "*" +groups = ["main", "dev"] files = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -2696,6 +2869,7 @@ version = "8.3.1" description = "Simple yet flexible natural sorting in Python." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "natsort-8.3.1-py3-none-any.whl", hash = "sha256:d583bc9050dd10538de36297c960b93f873f0cd01671a3c50df5bd86dd391dcb"}, {file = "natsort-8.3.1.tar.gz", hash = "sha256:517595492dde570a4fd6b6a76f644440c1ba51e2338c8a671d7f0475fda8f9fd"}, @@ -2711,6 +2885,7 @@ version = "0.4.8" description = "A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "nbclassic-0.4.8-py3-none-any.whl", hash = "sha256:cbf05df5842b420d5cece0143462380ea9d308ff57c2dc0eb4d6e035b18fbfb3"}, {file = "nbclassic-0.4.8.tar.gz", hash = "sha256:c74d8a500f8e058d46b576a41e5bc640711e1032cf7541dde5f73ea49497e283"}, @@ -2746,6 +2921,7 @@ version = "0.7.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.7.0" +groups = ["dev"] files = [ {file = "nbclient-0.7.0-py3-none-any.whl", hash = "sha256:434c91385cf3e53084185334d675a0d33c615108b391e260915d1aa8e86661b8"}, {file = "nbclient-0.7.0.tar.gz", hash = "sha256:a1d844efd6da9bc39d2209bf996dbd8e07bf0f36b796edfabaa8f8a9ab77c3aa"}, @@ -2767,6 +2943,7 @@ version = "7.2.5" description = "Converting Jupyter Notebooks" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "nbconvert-7.2.5-py3-none-any.whl", hash = "sha256:3e90e108bb5637b5b8a1422af1156af1368b39dd25369ff7faa7dfdcdef18f81"}, {file = "nbconvert-7.2.5.tar.gz", hash = "sha256:8fdc44fd7d9424db7fdc6e1e834a02f6b8620ffb653767388be2f9eb16f84184"}, @@ -2805,6 +2982,7 @@ version = "5.7.0" description = "The Jupyter Notebook format" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "nbformat-5.7.0-py3-none-any.whl", hash = "sha256:1b05ec2c552c2f1adc745f4eddce1eac8ca9ffd59bb9fd859e827eaa031319f9"}, {file = "nbformat-5.7.0.tar.gz", hash = "sha256:1d4760c15c1a04269ef5caf375be8b98dd2f696e5eb9e603ec2bf091f9b0d3f3"}, @@ -2825,6 +3003,7 @@ version = "1.5.6" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, @@ -2836,6 +3015,8 @@ version = "2.6.3" description = "Python package for creating and manipulating graphs and networks" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\" or extra == \"torch\"" files = [ {file = "networkx-2.6.3-py3-none-any.whl", hash = "sha256:80b6b89c77d1dfb64a4c7854981b60aeea6360ac02c6d4e4913319e0a313abef"}, {file = "networkx-2.6.3.tar.gz", hash = "sha256:c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51"}, @@ -2854,6 +3035,7 @@ version = "1.7.0" description = "Node.js virtual environment builder" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +groups = ["dev"] files = [ {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, @@ -2868,6 +3050,7 @@ version = "6.5.2" description = "A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "notebook-6.5.2-py3-none-any.whl", hash = "sha256:e04f9018ceb86e4fa841e92ea8fb214f8d23c1cedfde530cc96f92446924f0e4"}, {file = "notebook-6.5.2.tar.gz", hash = "sha256:c1897e5317e225fc78b45549a6ab4b668e4c996fd03a04e938fe5e7af2bfffd0"}, @@ -2902,6 +3085,7 @@ version = "0.2.2" description = "A shim layer for notebook traits and config" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "notebook_shim-0.2.2-py3-none-any.whl", hash = "sha256:9c6c30f74c4fbea6fce55c1be58e7fd0409b1c681b075dcedceb005db5026949"}, {file = "notebook_shim-0.2.2.tar.gz", hash = "sha256:090e0baf9a5582ff59b607af523ca2db68ff216da0c69956b62cab2ef4fc9c3f"}, @@ -2919,6 +3103,7 @@ version = "1.24.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, @@ -2950,12 +3135,43 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "opensearch-py" +version = "2.6.0" +description = "Python client for OpenSearch" +optional = false +python-versions = "<4,>=3.8" +groups = ["main"] +files = [ + {file = "opensearch_py-2.6.0-py2.py3-none-any.whl", hash = "sha256:b6e78b685dd4e9c016d7a4299cf1de69e299c88322e3f81c716e6e23fe5683c1"}, + {file = "opensearch_py-2.6.0.tar.gz", hash = "sha256:0b7c27e8ed84c03c99558406927b6161f186a72502ca6d0325413d8e5523ba96"}, +] + +[package.dependencies] +certifi = ">=2022.12.07" +Events = "*" +python-dateutil = "*" +requests = ">=2.4.0,<3.0.0" +six = "*" +urllib3 = [ + {version = ">=1.26.18,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.26.18,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, +] + +[package.extras] +async = ["aiohttp (>=3.9.4,<4)"] +develop = ["black (>=24.3.0)", "botocore", "coverage (<8.0.0)", "jinja2", "mock", "myst-parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] +docs = ["aiohttp (>=3.9.4,<4)", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] +kerberos = ["requests-kerberos"] + [[package]] name = "opt-einsum" version = "3.3.0" description = "Optimizing numpys einsum function" optional = true python-versions = ">=3.5" +groups = ["main"] +markers = "extra == \"jax\" or extra == \"full\"" files = [ {file = "opt_einsum-3.3.0-py3-none-any.whl", hash = "sha256:2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147"}, {file = "opt_einsum-3.3.0.tar.gz", hash = "sha256:59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549"}, @@ -2974,6 +3190,7 @@ version = "3.9.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"}, {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"}, @@ -3033,10 +3250,12 @@ version = "21.3" description = "Core utilities for Python packages" optional = false python-versions = ">=3.6" +groups = ["main", "dev", "docs"] files = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] +markers = {main = "extra == \"jac\" or extra == \"milvus\""} [package.dependencies] pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" @@ -3047,6 +3266,8 @@ version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"pandas\" or extra == \"full\" or extra == \"milvus\"" files = [ {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, @@ -3114,6 +3335,7 @@ version = "1.5.0" description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, @@ -3125,6 +3347,7 @@ version = "0.8.3" description = "A Python Parser" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, @@ -3140,10 +3363,12 @@ version = "0.10.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, ] +markers = {main = "extra == \"jac\""} [[package]] name = "pexpect" @@ -3151,6 +3376,8 @@ version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\"" files = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, @@ -3165,6 +3392,7 @@ version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, @@ -3176,6 +3404,8 @@ version = "10.0.1" description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"image\" or extra == \"full\" or extra == \"mesh\"" files = [ {file = "Pillow-10.0.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:8f06be50669087250f319b706decf69ca71fdecd829091a37cc89398ca4dc17a"}, {file = "Pillow-10.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50bd5f1ebafe9362ad622072a1d2f5850ecfa44303531ff14353a4059113b12d"}, @@ -3243,10 +3473,12 @@ version = "1.3.10" description = "Resolve a name to an object." optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, ] +markers = {main = "python_version < \"3.9\" and (extra == \"mesh\" or extra == \"full\")", dev = "python_version < \"3.9\""} [[package]] name = "platformdirs" @@ -3254,6 +3486,7 @@ version = "2.5.4" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, @@ -3269,6 +3502,7 @@ version = "0.13.1" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, @@ -3283,6 +3517,8 @@ version = "2.7.0" description = "Wraps the portalocker recipe for easy usage" optional = true python-versions = ">=3.5" +groups = ["main"] +markers = "extra == \"qdrant\" and python_version < \"3.12\"" files = [ {file = "portalocker-2.7.0-py2.py3-none-any.whl", hash = "sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983"}, {file = "portalocker-2.7.0.tar.gz", hash = "sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51"}, @@ -3302,6 +3538,8 @@ version = "3.0.2" description = "Integrate PostHog into any python application." optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"epsilla\"" files = [ {file = "posthog-3.0.2-py2.py3-none-any.whl", hash = "sha256:a8c0af6f2401fbe50f90e68c4143d0824b54e872de036b1c2f23b5abb39d88ce"}, {file = "posthog-3.0.2.tar.gz", hash = "sha256:701fba6e446a4de687c6e861b587e7b7741955ad624bf34fe013c06a0fec6fb3"}, @@ -3325,6 +3563,7 @@ version = "2.20.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, @@ -3344,6 +3583,7 @@ version = "0.15.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, @@ -3358,6 +3598,7 @@ version = "3.0.32" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.6.2" +groups = ["dev"] files = [ {file = "prompt_toolkit-3.0.32-py3-none-any.whl", hash = "sha256:24becda58d49ceac4dc26232eb179ef2b21f133fecda7eed6018d341766ed76e"}, {file = "prompt_toolkit-3.0.32.tar.gz", hash = "sha256:e7f2129cba4ff3b3656bbdda0e74ee00d2f874a8bcdb9dd16f5fec7b3e173cae"}, @@ -3372,6 +3613,8 @@ version = "4.21.9" description = "" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"proto\" or extra == \"hnswlib\" or extra == \"full\" or extra == \"qdrant\" and python_version < \"3.12\" or extra == \"milvus\"" files = [ {file = "protobuf-4.21.9-cp310-abi3-win32.whl", hash = "sha256:6e0be9f09bf9b6cf497b27425487706fa48c6d1632ddd94dab1a5fe11a422392"}, {file = "protobuf-4.21.9-cp310-abi3-win_amd64.whl", hash = "sha256:a7d0ea43949d45b836234f4ebb5ba0b22e7432d065394b532cdca8f98415e3cf"}, @@ -3395,6 +3638,7 @@ version = "5.9.4" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, @@ -3421,6 +3665,8 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" or os_name != \"nt\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -3432,6 +3678,8 @@ version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] +markers = "implementation_name == \"pypy\"" files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, @@ -3443,6 +3691,8 @@ version = "0.4.8" description = "ASN.1 types and codecs" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, @@ -3454,6 +3704,8 @@ version = "0.7.2" description = "python library for reading and writing collada documents" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "pycollada-0.7.2.tar.gz", hash = "sha256:70a2630ed499bdab718c0e61a3e6ae3698130d7e4654e89cdecde51bfdaea56f"}, ] @@ -3471,10 +3723,12 @@ version = "2.21" description = "C parser in Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main", "dev"] files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +markers = {main = "platform_python_implementation != \"PyPy\" and extra == \"weaviate\""} [[package]] name = "pydantic" @@ -3482,6 +3736,7 @@ version = "1.10.13" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, @@ -3534,6 +3789,8 @@ version = "0.25.1" description = "Manipulate audio with an simple and easy high level interface" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"audio\" or extra == \"full\"" files = [ {file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"}, {file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"}, @@ -3545,6 +3802,8 @@ version = "0.2.3" description = "Epsilla Python SDK" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"epsilla\"" files = [ {file = "pyepsilla-0.2.3-py3-none-any.whl", hash = "sha256:05bf5f95dc1bd0dfdacac84b844d1505d8aeac442e0c0eadc834ce3ab75ab845"}, {file = "pyepsilla-0.2.3.tar.gz", hash = "sha256:ce302ad965d428dbb22acb574f51046bfa8456204ead7f874ebd63bb5bc820a0"}, @@ -3561,6 +3820,7 @@ version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.6" +groups = ["main", "dev", "docs"] files = [ {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, @@ -3575,6 +3835,7 @@ version = "9.10" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "pymdown_extensions-9.10-py3-none-any.whl", hash = "sha256:31eaa76ce6f96aabfcea98787c2fff2c5c0611b20a53a94213970cfbf05f02b8"}, {file = "pymdown_extensions-9.10.tar.gz", hash = "sha256:562c38eee4ce3f101ce631b804bfc2177a8a76c7e4dc908871fb6741a90257a7"}, @@ -3590,6 +3851,8 @@ version = "2.2.13" description = "Python Sdk for Milvus" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"milvus\"" files = [ {file = "pymilvus-2.2.13-py3-none-any.whl", hash = "sha256:ac991863bd63e860c1210d096695297175c6ed09f4de762cf42394cb5aecd1f6"}, {file = "pymilvus-2.2.13.tar.gz", hash = "sha256:72da36cb5f4f84d7a8307202fcaa9a7fc4497d28d2d2235045ba93a430691ef1"}, @@ -3609,6 +3872,8 @@ version = "4.6.2" description = "Python driver for MongoDB " optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"mongo\"" files = [ {file = "pymongo-4.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7640d176ee5b0afec76a1bda3684995cb731b2af7fcfd7c7ef8dc271c5d689af"}, {file = "pymongo-4.6.2-cp310-cp310-manylinux1_i686.whl", hash = "sha256:4e2129ec8f72806751b621470ac5d26aaa18fae4194796621508fa0e6068278a"}, @@ -3712,10 +3977,12 @@ version = "3.0.9" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" +groups = ["main", "dev", "docs"] files = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] +markers = {main = "extra == \"jac\" or extra == \"milvus\""} [package.extras] diagrams = ["jinja2", "railroad-diagrams"] @@ -3726,6 +3993,7 @@ version = "0.19.2" description = "Persistent/Functional/Immutable data structures" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "pyrsistent-0.19.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6982b5a0237e1b7d876b60265564648a69b14017f3b5f908c5be2de3f9abb7a"}, {file = "pyrsistent-0.19.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:187d5730b0507d9285a96fca9716310d572e5464cadd19f22b63a6976254d77a"}, @@ -3750,6 +4018,7 @@ files = [ {file = "pyrsistent-0.19.2-py3-none-any.whl", hash = "sha256:ea6b79a02a28550c98b6ca9c35b9f492beaa54d7c5c9e9949555893c8a9234d0"}, {file = "pyrsistent-0.19.2.tar.gz", hash = "sha256:bfa0351be89c9fcbcb8c9879b826f4353be10f58f8a677efab0c017bf7137ec2"}, ] +markers = {main = "extra == \"mesh\" or extra == \"full\""} [[package]] name = "pytest" @@ -3757,6 +4026,7 @@ version = "7.2.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, @@ -3780,6 +4050,7 @@ version = "0.20.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-asyncio-0.20.2.tar.gz", hash = "sha256:32a87a9836298a881c0ec637ebcc952cfe23a56436bdc0d09d1511941dd8a812"}, {file = "pytest_asyncio-0.20.2-py3-none-any.whl", hash = "sha256:07e0abf9e6e6b95894a39f688a4a875d63c2128f76c02d03d16ccbc35bcc0f8a"}, @@ -3797,6 +4068,7 @@ version = "3.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, @@ -3815,6 +4087,7 @@ version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev", "docs"] files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -3829,6 +4102,8 @@ version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"milvus\"" files = [ {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, @@ -3843,6 +4118,8 @@ version = "3.3.0" description = "JOSE implementation in Python" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, @@ -3864,6 +4141,7 @@ version = "2.0.7" description = "A python library adding a json log formatter" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, @@ -3875,10 +4153,12 @@ version = "2022.6" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main", "dev"] files = [ {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, ] +markers = {main = "extra == \"pandas\" or extra == \"full\" or extra == \"milvus\""} [[package]] name = "pywin32" @@ -3886,6 +4166,7 @@ version = "305" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["main", "dev"] files = [ {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, @@ -3902,6 +4183,7 @@ files = [ {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, ] +markers = {main = "extra == \"jac\" and sys_platform == \"win32\" or extra == \"qdrant\" and platform_system == \"Windows\" and python_version < \"3.12\"", dev = "platform_python_implementation != \"PyPy\" and sys_platform == \"win32\""} [[package]] name = "pywinpty" @@ -3909,6 +4191,8 @@ version = "2.0.9" description = "Pseudo terminal support for Windows from Python." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "os_name == \"nt\"" files = [ {file = "pywinpty-2.0.9-cp310-none-win_amd64.whl", hash = "sha256:30a7b371446a694a6ce5ef906d70ac04e569de5308c42a2bdc9c3bc9275ec51f"}, {file = "pywinpty-2.0.9-cp311-none-win_amd64.whl", hash = "sha256:d78ef6f4bd7a6c6f94dc1a39ba8fb028540cc39f5cb593e756506db17843125f"}, @@ -3924,6 +4208,7 @@ version = "6.0" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" +groups = ["main", "dev", "docs"] files = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, @@ -3966,6 +4251,7 @@ files = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +markers = {main = "extra == \"jac\""} [[package]] name = "pyyaml-env-tag" @@ -3973,6 +4259,7 @@ version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, @@ -3987,6 +4274,7 @@ version = "24.0.1" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pyzmq-24.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:28b119ba97129d3001673a697b7cce47fe6de1f7255d104c2f01108a5179a066"}, {file = "pyzmq-24.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bcbebd369493d68162cddb74a9c1fcebd139dfbb7ddb23d8f8e43e6c87bac3a6"}, @@ -4074,6 +4362,8 @@ version = "1.9.0" description = "Client library for the Qdrant vector search engine" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"qdrant\" and python_version < \"3.12\"" files = [ {file = "qdrant_client-1.9.0-py3-none-any.whl", hash = "sha256:ee02893eab1f642481b1ac1e38eb68ec30bab0f673bef7cc05c19fa5d2cbf43e"}, {file = "qdrant_client-1.9.0.tar.gz", hash = "sha256:7b1792f616651a6f0a76312f945c13d088e9451726795b82ce0350f7df3b7981"}, @@ -4097,6 +4387,8 @@ version = "4.6.0" description = "Python client for Redis database and key-value store" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"redis\"" files = [ {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, @@ -4115,6 +4407,7 @@ version = "2022.10.31" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"}, {file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"}, @@ -4212,6 +4505,7 @@ version = "2.28.2" description = "Python HTTP for Humans." optional = false python-versions = ">=3.7, <4" +groups = ["main", "dev", "docs"] files = [ {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, @@ -4233,6 +4527,7 @@ version = "0.1.4" description = "A pure python RFC3339 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, @@ -4247,6 +4542,7 @@ version = "1.5.0" description = "Validating URI References per RFC 3986" optional = false python-versions = "*" +groups = ["main", "dev"] files = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, @@ -4264,6 +4560,7 @@ version = "0.1.1" description = "Pure python rfc3986 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, @@ -4275,6 +4572,7 @@ version = "13.1.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "rich-13.1.0-py3-none-any.whl", hash = "sha256:f846bff22a43e8508aebf3f0f2410ce1c6f4cde429098bd58d91fde038c57299"}, {file = "rich-13.1.0.tar.gz", hash = "sha256:81c73a30b144bbcdedc13f4ea0b6ffd7fdc3b0d3cc259a9402309c8e4aee1964"}, @@ -4294,6 +4592,8 @@ version = "4.9" description = "Pure-Python RSA implementation" optional = true python-versions = ">=3.6,<4" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -4308,6 +4608,8 @@ version = "1.0.1" description = "R-Tree spatial index for Python GIS" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "Rtree-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9855b8f11cdad99c56eb361b7b632a4fbd3d8cbe3f2081426b445f0cfb7fdca9"}, {file = "Rtree-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:18ce7e4d04b85c48f2d364835620b3b20e38e199639746e7b12f07a2303e18ff"}, @@ -4362,6 +4664,7 @@ version = "0.0.243" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.0.243-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:242571d79d3d7a1e441e88b0cf2814b24bfc4e3a073e5d82df81aa52ad829e4c"}, {file = "ruff-0.0.243-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4fd0ef0dddd7ccce6457cca556baf51504c11f7deaaa5944a47c5e0c6c3b1425"}, @@ -4387,6 +4690,8 @@ version = "0.6.0" description = "An Amazon S3 Transfer Manager" optional = true python-versions = ">= 3.7" +groups = ["main"] +markers = "extra == \"aws\"" files = [ {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, @@ -4404,6 +4709,8 @@ version = "1.9.3" description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\" or extra == \"jax\"" files = [ {file = "scipy-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1884b66a54887e21addf9c16fb588720a8309a57b2e258ae1c7986d4444d3bc0"}, {file = "scipy-1.9.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:83b89e9586c62e787f5012e8475fbb12185bafb996a03257e9675cd73d3736dd"}, @@ -4442,6 +4749,7 @@ version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, @@ -4458,6 +4766,8 @@ version = "1.38.0" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"epsilla\"" files = [ {file = "sentry-sdk-1.38.0.tar.gz", hash = "sha256:8feab81de6bbf64f53279b085bd3820e3e737403b0a0d9317f73a2c3374ae359"}, {file = "sentry_sdk-1.38.0-py2.py3-none-any.whl", hash = "sha256:0017fa73b8ae2d4e57fd2522ee3df30453715b29d2692142793ec5d5f90b94a6"}, @@ -4503,10 +4813,12 @@ version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] +markers = {main = "extra == \"mesh\" or extra == \"full\" or extra == \"qdrant\" and python_version < \"3.12\""} [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -4518,6 +4830,8 @@ version = "2.0.1" description = "Manipulation and analysis of geometric objects" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "shapely-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b06d031bc64149e340448fea25eee01360a58936c89985cf584134171e05863f"}, {file = "shapely-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9a6ac34c16f4d5d3c174c76c9d7614ec8fe735f8f82b6cc97a46b54f386a86bf"}, @@ -4572,6 +4886,7 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main", "dev", "docs"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -4583,6 +4898,8 @@ version = "6.3.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" optional = true python-versions = ">=3.6,<4.0" +groups = ["main"] +markers = "extra == \"aws\"" files = [ {file = "smart_open-6.3.0-py3-none-any.whl", hash = "sha256:b4c9ae193ad6d3e7add50944b86afa0d150bd821ab8ec21edb26d9a06b66f6a8"}, {file = "smart_open-6.3.0.tar.gz", hash = "sha256:d5238825fe9a9340645fac3d75b287c08fbb99fb2b422477de781c9f5f09e019"}, @@ -4607,6 +4924,7 @@ version = "1.3.0" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, @@ -4618,6 +4936,7 @@ version = "2.3.2.post1" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, @@ -4629,6 +4948,8 @@ version = "0.27.0" description = "The little ASGI library that shines." optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"web\"" files = [ {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, @@ -4647,6 +4968,8 @@ version = "6.2" description = "SVG path objects and parser" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "svg.path-6.2-py2.py3-none-any.whl", hash = "sha256:c3b12e6d372771b466078837252eb13b655ea2658437c426cc67fc6262433dc8"}, {file = "svg.path-6.2.tar.gz", hash = "sha256:1a2159f9db898df93c4637cfd3ccaf7da1fd073f59fa9a5950c73e46d4aa1aca"}, @@ -4661,6 +4984,8 @@ version = "1.10.1" description = "Computer algebra system (CAS) in Python" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\" or extra == \"torch\"" files = [ {file = "sympy-1.10.1-py3-none-any.whl", hash = "sha256:df75d738930f6fe9ebe7034e59d56698f29e85f443f743e51e47df0caccc2130"}, {file = "sympy-1.10.1.tar.gz", hash = "sha256:5939eeffdf9e152172601463626c022a2c27e75cf6278de8d401d50c9d58787b"}, @@ -4675,6 +5000,7 @@ version = "0.17.0" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "terminado-0.17.0-py3-none-any.whl", hash = "sha256:bf6fe52accd06d0661d7611cc73202121ec6ee51e46d8185d489ac074ca457c2"}, {file = "terminado-0.17.0.tar.gz", hash = "sha256:520feaa3aeab8ad64a69ca779be54be9234edb2d0d6567e76c93c2c9a4e6e43f"}, @@ -4695,6 +5021,7 @@ version = "1.2.1" description = "A tiny CSS parser" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, @@ -4713,6 +5040,7 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["dev"] files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -4724,6 +5052,7 @@ version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -4735,6 +5064,8 @@ version = "2.0.1" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = true python-versions = ">=3.8.0" +groups = ["main"] +markers = "extra == \"torch\"" files = [ {file = "torch-2.0.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:8ced00b3ba471856b993822508f77c98f48a458623596a4c43136158781e306a"}, {file = "torch-2.0.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:359bfaad94d1cda02ab775dc1cc386d585712329bb47b8741607ef6ef4950747"}, @@ -4774,6 +5105,7 @@ version = "6.4.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, @@ -4794,6 +5126,8 @@ version = "4.65.0" description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"weaviate\"" files = [ {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"}, {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"}, @@ -4814,6 +5148,7 @@ version = "5.5.0" description = "" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "traitlets-5.5.0-py3-none-any.whl", hash = "sha256:1201b2c9f76097195989cdf7f65db9897593b0dfd69e4ac96016661bb6f0d30f"}, {file = "traitlets-5.5.0.tar.gz", hash = "sha256:b122f9ff2f2f6c1709dab289a05555be011c87828e911c0cf4074b85cb780a79"}, @@ -4829,6 +5164,8 @@ version = "3.21.2" description = "Import, export, process, analyze and view triangular meshes." optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "trimesh-3.21.2-py3-none-any.whl", hash = "sha256:8a575f2ee4dc77fc680aa1545c3503619027c75d09bcc4878d29d5f32e0476ee"}, {file = "trimesh-3.21.2.tar.gz", hash = "sha256:5513c4fb540b286cb95bdf626b906e3cdb661ff7a85ee940a52f27f1275049a4"}, @@ -4864,6 +5201,8 @@ version = "9.3.0.1" description = "Typing stubs for Pillow" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"image\" or extra == \"full\"" files = [ {file = "types-Pillow-9.3.0.1.tar.gz", hash = "sha256:f3b7cada3fa496c78d75253c6b1f07a843d625f42e5639b320a72acaff6f7cfb"}, {file = "types_Pillow-9.3.0.1-py3-none-any.whl", hash = "sha256:79837755fe9659f29efd1016e9903ac4a500e0c73260483f07296bd6ca47668b"}, @@ -4875,6 +5214,7 @@ version = "3.20.4.5" description = "Typing stubs for protobuf" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-protobuf-3.20.4.5.tar.gz", hash = "sha256:e9b45008d106e1d10cc77a29d2d344b85c0f01e2e643aaccf32f69e9e81b0cdd"}, {file = "types_protobuf-3.20.4.5-py3-none-any.whl", hash = "sha256:97af5ce70d890fdb94cb0c906f5a6624ca2fef58bc04e27990a25509e992a950"}, @@ -4886,6 +5226,7 @@ version = "23.2.0.1" description = "Typing stubs for pyOpenSSL" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-pyOpenSSL-23.2.0.1.tar.gz", hash = "sha256:beeb5d22704c625a1e4b6dc756355c5b4af0b980138b702a9d9f932acf020903"}, {file = "types_pyOpenSSL-23.2.0.1-py3-none-any.whl", hash = "sha256:0568553f104466f1b8e0db3360fbe6770137d02e21a1a45c209bf2b1b03d90d4"}, @@ -4900,6 +5241,7 @@ version = "2.8.19.20240106" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, @@ -4911,6 +5253,7 @@ version = "4.6.0.0" description = "Typing stubs for redis" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-redis-4.6.0.0.tar.gz", hash = "sha256:4ad588026d89ba72eae29b6276448ea117d77e5e4df258c0429d274da652ef9c"}, {file = "types_redis-4.6.0.0-py3-none-any.whl", hash = "sha256:528038f32a0a2642e00d9c80dd95879a348ced6071bb747c746c0cb1ad06426c"}, @@ -4926,6 +5269,7 @@ version = "2.28.11.7" description = "Typing stubs for requests" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "types-requests-2.28.11.7.tar.gz", hash = "sha256:0ae38633734990d019b80f5463dfa164ebd3581998ac8435f526da6fe4d598c3"}, {file = "types_requests-2.28.11.7-py3-none-any.whl", hash = "sha256:b6a2fca8109f4fdba33052f11ed86102bddb2338519e1827387137fefc66a98b"}, @@ -4940,6 +5284,7 @@ version = "1.26.25.4" description = "Typing stubs for urllib3" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, @@ -4951,10 +5296,12 @@ version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "docs"] files = [ {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] +markers = {docs = "python_version < \"3.10\""} [[package]] name = "typing-inspect" @@ -4962,6 +5309,7 @@ version = "0.8.0" description = "Runtime inspection utilities for typing module." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "typing_inspect-0.8.0-py3-none-any.whl", hash = "sha256:5fbf9c1e65d4fa01e701fe12a5bca6c6e08a4ffd5bc60bfac028253a447c5188"}, {file = "typing_inspect-0.8.0.tar.gz", hash = "sha256:8b1ff0c400943b6145df8119c41c244ca8207f1f10c9c057aeed1560e4806e3d"}, @@ -4977,6 +5325,8 @@ version = "2023.3" description = "Provider of IANA time zone data" optional = true python-versions = ">=2" +groups = ["main"] +markers = "extra == \"pandas\" or extra == \"full\" or extra == \"milvus\"" files = [ {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, @@ -4988,6 +5338,8 @@ version = "5.8.0" description = "Ultra fast JSON encoder and decoder for Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"milvus\"" files = [ {file = "ujson-5.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1"}, {file = "ujson-5.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75"}, @@ -5058,6 +5410,7 @@ version = "1.3.0" description = "RFC 6570 URI Template Processor" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, @@ -5072,6 +5425,7 @@ version = "1.26.19" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main", "dev", "docs"] files = [ {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, @@ -5088,6 +5442,7 @@ version = "0.19.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "uvicorn-0.19.0-py3-none-any.whl", hash = "sha256:cc277f7e73435748e69e075a721841f7c4a95dba06d12a72fe9874acced16f6f"}, {file = "uvicorn-0.19.0.tar.gz", hash = "sha256:cf538f3018536edb1f4a826311137ab4944ed741d52aeb98846f52215de57f25"}, @@ -5106,6 +5461,8 @@ version = "0.20.0" description = "Python Data Validation for Humans™." optional = true python-versions = ">=3.4" +groups = ["main"] +markers = "extra == \"weaviate\"" files = [ {file = "validators-0.20.0.tar.gz", hash = "sha256:24148ce4e64100a2d5e267233e23e7afeb55316b47d30faae7eb6e7292bc226a"}, ] @@ -5122,6 +5479,7 @@ version = "20.16.7" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "virtualenv-20.16.7-py3-none-any.whl", hash = "sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29"}, {file = "virtualenv-20.16.7.tar.gz", hash = "sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e"}, @@ -5142,6 +5500,7 @@ version = "2.3.1" description = "Filesystem events monitoring" optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "watchdog-2.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1f1200d4ec53b88bf04ab636f9133cb703eb19768a39351cee649de21a33697"}, {file = "watchdog-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:564e7739abd4bd348aeafbf71cc006b6c0ccda3160c7053c4a53b67d14091d42"}, @@ -5182,6 +5541,7 @@ version = "8.4.1" description = "Wildcard/glob file name matcher." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "wcmatch-8.4.1-py3-none-any.whl", hash = "sha256:3476cd107aba7b25ba1d59406938a47dc7eec6cfd0ad09ff77193f21a964dee7"}, {file = "wcmatch-8.4.1.tar.gz", hash = "sha256:b1f042a899ea4c458b7321da1b5e3331e3e0ec781583434de1301946ceadb943"}, @@ -5196,6 +5556,7 @@ version = "0.2.5" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, @@ -5207,6 +5568,8 @@ version = "3.17.1" description = "A python native weaviate client" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"weaviate\"" files = [ {file = "weaviate-client-3.17.1.tar.gz", hash = "sha256:04277030396a0e63e73b994a185c705f07f948254d27c0a3774c60b4795c37ab"}, {file = "weaviate_client-3.17.1-py3-none-any.whl", hash = "sha256:0c86f4d5fcb155efd0888515c8caa20364241c0df01dead361ce0c023dbc5da9"}, @@ -5227,6 +5590,7 @@ version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, @@ -5242,6 +5606,7 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -5253,10 +5618,12 @@ version = "1.4.2" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "websocket-client-1.4.2.tar.gz", hash = "sha256:d6e8f90ca8e2dd4e8027c4561adeb9456b54044312dba655e7cae652ceb9ae59"}, {file = "websocket_client-1.4.2-py3-none-any.whl", hash = "sha256:d6b06432f184438d99ac1f456eaf22fe1ade524c3dd16e661142dc54e9cba574"}, ] +markers = {main = "extra == \"jac\""} [package.extras] docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] @@ -5269,6 +5636,8 @@ version = "3.2.0" description = "Python binding for xxHash" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"mesh\" or extra == \"full\"" files = [ {file = "xxhash-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:af44b9e59c4b2926a4e3c7f9d29949ff42fcea28637ff6b8182e654461932be8"}, {file = "xxhash-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1bdd57973e2b802ef32553d7bebf9402dac1557874dbe5c908b499ea917662cd"}, @@ -5376,6 +5745,7 @@ version = "0.6.2" description = "Python bindings for the Y-CRDT built from yrs (Rust)" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "y_py-0.6.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:c26bada6cd109095139237a46f50fc4308f861f0d304bc9e70acbc6c4503d158"}, {file = "y_py-0.6.2-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:bae1b1ad8d2b8cf938a60313f8f7461de609621c5dcae491b6e54975f76f83c5"}, @@ -5459,6 +5829,8 @@ version = "1.8.2" description = "Yet another URL library" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"jac\"" files = [ {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, @@ -5546,6 +5918,7 @@ version = "0.8.4" description = "WebSocket connector for Ypy" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ypy_websocket-0.8.4-py3-none-any.whl", hash = "sha256:b1ba0dfcc9762f0ca168d2378062d3ca1299d39076b0f145d961359121042be5"}, {file = "ypy_websocket-0.8.4.tar.gz", hash = "sha256:43a001473f5c8abcf182f603049cf305cbc855ad8deaa9dfa0f3b5a7cea9d0ff"}, @@ -5565,10 +5938,12 @@ version = "3.19.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs"] files = [ {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, ] +markers = {main = "extra == \"full\" and python_version < \"3.10\" or extra == \"jac\" or extra == \"jax\" and python_version < \"3.10\" or python_version < \"3.9\" and extra == \"mesh\"", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} [package.extras] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] @@ -5597,6 +5972,6 @@ weaviate = ["weaviate-client"] web = ["fastapi"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.8,<4.0" -content-hash = "afd26d2453ce8edd6f5021193af4bfd2a449de2719e5fe67bcaea2fbcc98d055" +content-hash = "54f80037bc31c4c0bd45031ae108cde04520dfe02233c62113c337c668ac62be" diff --git a/pyproject.toml b/pyproject.toml index efbfcb4fbb..5c28a39a0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,6 +63,7 @@ redis = {version = "^4.6.0", optional = true} jax = {version = ">=0.4.10", optional = true} pyepsilla = {version = ">=0.2.3", optional = true} pymongo = {version = ">=4.6.2", optional = true} +opensearch-py = {version = "2.6.0", optional = true} [tool.poetry.extras] proto = ["protobuf", "lz4"] @@ -167,4 +168,5 @@ markers = [ "elasticv8: marks test that run with ElasticSearch v8", "jac: need to have access to jac cloud", "atlas: mark tests using MongoDB Atlas", + "opensearchv2: mark tests using OpenSearchV2", ] diff --git a/tests/index/opensearch/docker-compose.yml b/tests/index/opensearch/docker-compose.yml new file mode 100644 index 0000000000..aa941a284a --- /dev/null +++ b/tests/index/opensearch/docker-compose.yml @@ -0,0 +1,13 @@ +version: "3.3" +services: + opensearch: + image: opensearchproject/opensearch:latest + environment: + # - xpack.security.enabled=false + - plugins.security.ssl.http.enabled=false + - discovery.type=single-node + - ES_JAVA_OPTS=-Xmx1024m + - plugins.security.disabled=true + - OPENSEARCH_INITIAL_ADMIN_PASSWORD=vV911:PknTd@ + ports: + - "9200:9200" diff --git a/tests/index/opensearch/fixture.py b/tests/index/opensearch/fixture.py new file mode 100644 index 0000000000..16c989287f --- /dev/null +++ b/tests/index/opensearch/fixture.py @@ -0,0 +1,107 @@ +# Licensed to the LF AI & Data foundation under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import time +import uuid + +import numpy as np +import pytest +from pydantic import Field + +from docarray import BaseDoc +from docarray.documents import ImageDoc +from docarray.typing import NdArray + +pytestmark = [pytest.mark.slow, pytest.mark.index] + +cur_dir = os.path.dirname(os.path.abspath(__file__)) +compose_yml = os.path.abspath(os.path.join(cur_dir, "docker-compose.yml")) + +print("FILE", compose_yml) + + +@pytest.fixture(scope="module", autouse=True) +def start_storage(): + os.system(f"docker compose -f {compose_yml} up -d --remove-orphans") + _wait_for_os() + + yield + os.system(f"docker compose -f {compose_yml} down --remove-orphans") + + +def _wait_for_os(): + from opensearchpy import OpenSearch + + es = OpenSearch(hosts="http://localhost:9200/", auth=("admin", "vV911:PknTd@")) + while not es.ping(): + time.sleep(0.5) + + +class SimpleDoc(BaseDoc): + tens: NdArray[10] = Field(dimension=1000) + + +class FlatDoc(BaseDoc): + tens_one: NdArray = Field(dimension=10) + tens_two: NdArray = Field(dimension=50) + + +class NestedDoc(BaseDoc): + d: SimpleDoc + + +class DeepNestedDoc(BaseDoc): + d: NestedDoc + + +class MyImageDoc(ImageDoc): + embedding: NdArray = Field(dimension=128) + + +@pytest.fixture(scope="function") +def ten_simple_docs(): + return [SimpleDoc(tens=np.random.randn(10)) for _ in range(10)] + + +@pytest.fixture(scope="function") +def ten_flat_docs(): + return [ + FlatDoc(tens_one=np.random.randn(10), tens_two=np.random.randn(50)) + for _ in range(10) + ] + + +@pytest.fixture(scope="function") +def ten_nested_docs(): + return [NestedDoc(d=SimpleDoc(tens=np.random.randn(10))) for _ in range(10)] + + +@pytest.fixture(scope="function") +def ten_deep_nested_docs(): + return [ + DeepNestedDoc(d=NestedDoc(d=SimpleDoc(tens=np.random.randn(10)))) + for _ in range(10) + ] + + +@pytest.fixture(scope="function") +def tmp_index_name(): + return uuid.uuid4().hex + + +@pytest.fixture(scope="function") +def auth(): + return {"admin": "vV911:PknTd@"} diff --git a/tests/index/opensearch/test_column_config_os.py b/tests/index/opensearch/test_column_config_os.py new file mode 100644 index 0000000000..563352141f --- /dev/null +++ b/tests/index/opensearch/test_column_config_os.py @@ -0,0 +1,149 @@ +import pytest +from pydantic import Field + +from docarray import BaseDoc +from docarray.index import OpenSearchDocIndex +from tests.index.opensearch.fixture import ( # noqa: F401 + auth, + start_storage, + tmp_index_name, +) + +pytestmark = [pytest.mark.slow, pytest.mark.index, pytest.mark.opensearchv2] + + +def test_column_config(tmp_index_name, auth): # noqa: F811 + class MyDoc(BaseDoc): + text: str + color: str = Field(col_type='keyword') + + index = OpenSearchDocIndex[MyDoc](index_name=tmp_index_name, auth=auth) + index_docs = [ + MyDoc(id='0', text='hello world', color='red'), + MyDoc(id='1', text='never gonna give you up', color='blue'), + MyDoc(id='2', text='we are the world', color='green'), + ] + index.index(index_docs) + + query = 'world' + docs, _ = index.text_search(query, search_field='text') + assert [doc.id for doc in docs] == ['0', '2'] + + filter_query = {'terms': {'color': ['red', 'blue']}} + docs = index.filter(filter_query) + assert [doc.id for doc in docs] == ['0', '1'] + + +def test_field_object(tmp_index_name, auth): # noqa: F811 + class MyDoc(BaseDoc): + manager: dict = Field( + properties={ + 'age': {'type': 'integer'}, + 'name': { + 'properties': { + 'first': {'type': 'keyword'}, + 'last': {'type': 'keyword'}, + } + }, + } + ) + + index = OpenSearchDocIndex[MyDoc](index_name=tmp_index_name, auth=auth) + doc = [ + MyDoc(manager={'age': 25, 'name': {'first': 'Rachel', 'last': 'Green'}}), + MyDoc(manager={'age': 30, 'name': {'first': 'Monica', 'last': 'Geller'}}), + MyDoc(manager={'age': 35, 'name': {'first': 'Phoebe', 'last': 'Buffay'}}), + ] + index.index(doc) + id_ = doc[0].id + assert index[id_].id == id_ + assert index[id_].manager == doc[0].manager + + filter_query = {'range': {'manager.age': {'gte': 30}}} + docs = index.filter(filter_query) + assert [doc.id for doc in docs] == [doc[1].id, doc[2].id] + + +def test_field_geo_point(tmp_index_name, auth): # noqa: F811 + class MyDoc(BaseDoc): + location: dict = Field(col_type='geo_point') + + index = OpenSearchDocIndex[MyDoc](index_name=tmp_index_name, auth=auth) + doc = [ + MyDoc(location={'lat': 40.12, 'lon': -72.34}), + MyDoc(location={'lat': 41.12, 'lon': -73.34}), + MyDoc(location={'lat': 42.12, 'lon': -74.34}), + ] + index.index(doc) + + query = { + 'query': { + 'geo_bounding_box': { + 'location': { + 'top_left': {'lat': 42, 'lon': -74}, + 'bottom_right': {'lat': 40, 'lon': -72}, + } + } + }, + } + + docs, _ = index.execute_query(query) + assert [doc['id'] for doc in docs] == [doc[0].id, doc[1].id] + + +def test_field_range(tmp_index_name, auth): # noqa: F811 + class MyDoc(BaseDoc): + expected_attendees: dict = Field(col_type='integer_range') + time_frame: dict = Field(col_type='date_range', format='yyyy-MM-dd') + + index = OpenSearchDocIndex[MyDoc](index_name=tmp_index_name, auth=auth) + doc = [ + MyDoc( + expected_attendees={'gte': 10, 'lt': 20}, + time_frame={'gte': '2023-01-01', 'lt': '2023-02-01'}, + ), + MyDoc( + expected_attendees={'gte': 20, 'lt': 30}, + time_frame={'gte': '2023-02-01', 'lt': '2023-03-01'}, + ), + MyDoc( + expected_attendees={'gte': 30, 'lt': 40}, + time_frame={'gte': '2023-03-01', 'lt': '2023-04-01'}, + ), + ] + index.index(doc) + + query = { + 'query': { + 'bool': { + 'should': [ + {'term': {'expected_attendees': {'value': 15}}}, + { + 'range': { + 'time_frame': { + 'gte': '2023-02-05', + 'lt': '2023-02-10', + 'relation': 'contains', + } + } + }, + ] + } + }, + } + docs, _ = index.execute_query(query) + assert [doc['id'] for doc in docs] == [doc[0].id, doc[1].id] + + +def test_index_name(): + class TextDoc(BaseDoc): + text: str = Field() + + class StringDoc(BaseDoc): + text: str = Field(col_type='text') + + index = OpenSearchDocIndex[TextDoc]() + assert index.index_name == TextDoc.__name__.lower() + + index = OpenSearchDocIndex[StringDoc]() + assert index.index_name == StringDoc.__name__.lower() diff --git a/tests/index/opensearch/test_find_os.py b/tests/index/opensearch/test_find_os.py new file mode 100644 index 0000000000..92f64afeea --- /dev/null +++ b/tests/index/opensearch/test_find_os.py @@ -0,0 +1,377 @@ +import numpy as np +import pytest +import torch +from pydantic import Field + +from docarray import BaseDoc +from docarray.index import OpenSearchDocIndex +from docarray.typing import NdArray, TorchTensor +from tests.index.opensearch.fixture import ( # noqa: F401 + FlatDoc, + SimpleDoc, + auth, + start_storage, + tmp_index_name, +) + +pytestmark = [pytest.mark.slow, pytest.mark.index, pytest.mark.opensearchv2] + + +@pytest.mark.parametrize('similarity', ['cosinesimil', 'l2', 'inner_product']) +def test_find_simple_schema(similarity, tmp_index_name, auth): # noqa: F811 + class SimpleSchema(BaseDoc): + tens: NdArray[10] = Field(similarity=similarity) + + index = OpenSearchDocIndex[SimpleSchema](index_name=tmp_index_name, auth=auth) + + index_docs = [] + for _ in range(10): + vec = np.random.rand(10) + if similarity == 'inner_product': + vec = vec / np.linalg.norm(vec) + index_docs.append(SimpleDoc(tens=vec)) + index.index(index_docs) + + query = index_docs[-1] + docs, scores = index.find(query, search_field='tens', limit=5) + + assert len(docs) == 5 + assert len(scores) == 5 + assert docs[0].id == index_docs[-1].id + assert np.allclose(docs[0].tens, index_docs[-1].tens) + + +@pytest.mark.parametrize('similarity', ['cosinesimil', 'l2', 'inner_product']) +def test_find_flat_schema(similarity, tmp_index_name, auth): # noqa: F811 + class FlatSchema(BaseDoc): + tens_one: NdArray = Field(dimension=10, similarity=similarity) + tens_two: NdArray = Field(dimension=50, similarity=similarity) + + index = OpenSearchDocIndex[FlatSchema](index_name=tmp_index_name, auth=auth) + + index_docs = [] + for _ in range(10): + vec_one = np.random.rand(10) + vec_two = np.random.rand(50) + if similarity == 'inner_product': + vec_one = vec_one / np.linalg.norm(vec_one) + vec_two = vec_two / np.linalg.norm(vec_two) + index_docs.append(FlatDoc(tens_one=vec_one, tens_two=vec_two)) + + index.index(index_docs) + + query = index_docs[-1] + + # find on tens_one + docs, scores = index.find(query, search_field='tens_one', limit=5) + assert len(docs) == 5 + assert len(scores) == 5 + assert docs[0].id == index_docs[-1].id + assert np.allclose(docs[0].tens_one, index_docs[-1].tens_one) + assert np.allclose(docs[0].tens_two, index_docs[-1].tens_two) + + # find on tens_two + docs, scores = index.find(query, search_field='tens_two', limit=5) + assert len(docs) == 5 + assert len(scores) == 5 + assert docs[0].id == index_docs[-1].id + assert np.allclose(docs[0].tens_one, index_docs[-1].tens_one) + assert np.allclose(docs[0].tens_two, index_docs[-1].tens_two) + + +@pytest.mark.parametrize('similarity', ['cosinesimil', 'l2', 'inner_product']) +def test_find_nested_schema(similarity, tmp_index_name, auth): # noqa: F811 + class SimpleDoc(BaseDoc): + tens: NdArray[10] = Field(similarity=similarity) + + class NestedDoc(BaseDoc): + d: SimpleDoc + tens: NdArray[10] = Field(similarity=similarity) + + class DeepNestedDoc(BaseDoc): + d: NestedDoc + tens: NdArray = Field(similarity=similarity, dimension=10) + + index = OpenSearchDocIndex[DeepNestedDoc](index_name=tmp_index_name, auth=auth) + + index_docs = [] + for _ in range(10): + vec_simple = np.random.rand(10) + vec_nested = np.random.rand(10) + vec_deep = np.random.rand(10) + if similarity == 'dot_product': + vec_simple = vec_simple / np.linalg.norm(vec_simple) + vec_nested = vec_nested / np.linalg.norm(vec_nested) + vec_deep = vec_deep / np.linalg.norm(vec_deep) + index_docs.append( + DeepNestedDoc( + d=NestedDoc(d=SimpleDoc(tens=vec_simple), tens=vec_nested), + tens=vec_deep, + ) + ) + + index.index(index_docs) + + query = index_docs[-1] + + # find on root level + docs, scores = index.find(query, search_field='tens', limit=5) + assert len(docs) == 5 + assert len(scores) == 5 + assert docs[0].id == index_docs[-1].id + assert np.allclose(docs[0].tens, index_docs[-1].tens) + + # find on first nesting level + docs, scores = index.find(query, search_field='d__tens', limit=5) + assert len(docs) == 5 + assert len(scores) == 5 + assert docs[0].id == index_docs[-1].id + assert np.allclose(docs[0].d.tens, index_docs[-1].d.tens) + + # find on second nesting level + docs, scores = index.find(query, search_field='d__d__tens', limit=5) + assert len(docs) == 5 + assert len(scores) == 5 + assert docs[0].id == index_docs[-1].id + assert np.allclose(docs[0].d.d.tens, index_docs[-1].d.d.tens) + + +def test_find_torch(): + class TorchDoc(BaseDoc): + tens: TorchTensor[10] + + index = OpenSearchDocIndex[TorchDoc]() + + # A dense_vector field stores dense vectors of float values. + index_docs = [ + TorchDoc(tens=np.random.rand(10).astype(dtype=np.float32)) for _ in range(10) + ] + index.index(index_docs) + + for doc in index_docs: + assert isinstance(doc.tens, TorchTensor) + + query = index_docs[-1] + docs, scores = index.find(query, search_field='tens', limit=5) + + assert len(docs) == 5 + assert len(scores) == 5 + for doc in docs: + assert isinstance(doc.tens, TorchTensor) + + assert docs[0].id == index_docs[-1].id + assert torch.allclose(docs[0].tens, index_docs[-1].tens) + + +def test_find_tensorflow(): + from docarray.typing import TensorFlowTensor + + class TfDoc(BaseDoc): + tens: TensorFlowTensor[10] + + index = OpenSearchDocIndex[TfDoc]() + + index_docs = [ + TfDoc(tens=np.random.rand(10).astype(dtype=np.float32)) for _ in range(10) + ] + index.index(index_docs) + + for doc in index_docs: + assert isinstance(doc.tens, TensorFlowTensor) + + query = index_docs[-1] + docs, scores = index.find(query, search_field='tens', limit=5) + + assert len(docs) == 5 + assert len(scores) == 5 + for doc in docs: + assert isinstance(doc.tens, TensorFlowTensor) + + assert docs[0].id == index_docs[-1].id + assert np.allclose( + docs[0].tens.unwrap().numpy(), index_docs[-1].tens.unwrap().numpy() + ) + + +def test_find_batched(tmp_index_name, auth): # noqa: F811 + index = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + + index_docs = [SimpleDoc(tens=np.random.rand(10)) for _ in range(10)] + index.index(index_docs) + + queries = index_docs[-2:] + docs_batched, scores_batched = index.find_batched( + queries, search_field='tens', limit=5 + ) + + for docs, scores, query in zip(docs_batched, scores_batched, queries): + assert len(docs) == 5 + assert len(scores) == 5 + assert docs[0].id == query.id + assert np.allclose(docs[0].tens, query.tens) + + +def test_filter(): + class MyDoc(BaseDoc): + A: bool + B: int + C: float + + index = OpenSearchDocIndex[MyDoc]() + + index_docs = [MyDoc(id=f'{i}', A=(i % 2 == 0), B=i, C=i + 0.5) for i in range(10)] + index.index(index_docs) + + filter_query = {'term': {'A': True}} + docs = index.filter(filter_query) + assert len(docs) > 0 + for doc in docs: + assert doc.A + + filter_query = { + 'bool': { + 'filter': [ + {'terms': {'B': [3, 4, 7, 8]}}, + {'range': {'C': {'gte': 3, 'lte': 5}}}, + ] + } + } + docs = index.filter(filter_query) + assert [doc.id for doc in docs] == ['3', '4'] + + +def test_text_search(): + class MyDoc(BaseDoc): + text: str + + index = OpenSearchDocIndex[MyDoc]() + index_docs = [ + MyDoc(text='hello world'), + MyDoc(text='never gonna give you up'), + MyDoc(text='we are the world'), + ] + index.index(index_docs) + + query = 'world' + docs, scores = index.text_search(query, search_field='text') + + assert len(docs) == 2 + assert len(scores) == 2 + assert docs[0].text.index(query) >= 0 + assert docs[1].text.index(query) >= 0 + + queries = ['world', 'never'] + docs, scores = index.text_search_batched(queries, search_field='text') + for query, da, score in zip(queries, docs, scores): + assert len(da) > 0 + assert len(score) > 0 + for doc in da: + assert doc.text.index(query) >= 0 + + +def test_query_builder(): + class MyDoc(BaseDoc): + tens: NdArray[10] = Field(similarity='l2') + num: int + text: str + + index = OpenSearchDocIndex[MyDoc]() + index_docs = [ + MyDoc( + id=f'{i}', tens=np.ones(10) * i, num=int(i / 2), text=f'text {int(i / 2)}' + ) + for i in range(10) + ] + index.index(index_docs) + + # build_query + q = index.build_query() + assert isinstance(q, index.QueryBuilder) + + # filter + q = index.build_query().filter({'term': {'num': 0}}).build() + docs, _ = index.execute_query(q) + assert [doc['id'] for doc in docs] == ['0', '1'] + + # find + q = index.build_query().find(index_docs[-1], search_field='tens', limit=3).build() + docs, _ = index.execute_query(q) + assert [doc['id'] for doc in docs] == ['7', '8', '9'] + + # text_search + q = index.build_query().text_search('0', search_field='text').build() + docs, _ = index.execute_query(q) + assert [doc['id'] for doc in docs] == ['0', '1'] + + # combination + q = ( + index.build_query() + .find(index_docs[-1], search_field='tens') + .filter({'range': {'num': {'lte': 3}}}) + .text_search('0', search_field='text') + .build() + ) + docs, _ = index.execute_query(q) + assert [doc['id'] for doc in docs] == ['1', '0'] + + # direct + query = { + 'query': { + 'knn': { + 'tens': { + 'vector': [ + 9.0, + 9.0, + 9.0, + 9.0, + 9.0, + 9.0, + 9.0, + 9.0, + 9.0, + 9.0, + ], + 'k': 10, + 'filter': { + 'bool': { + 'must': [ + {'range': {'num': {'gte': 2}}}, + {'range': {'num': {'lte': 3}}}, + ] + } + }, + }, + } + } + } + + docs, _ = index.execute_query(query) + assert [doc['id'] for doc in docs] == ['4', '5', '6', '7'] + + +def test_index_name(): + class MyDoc(BaseDoc): + expected_attendees: dict = Field(col_type='integer_range') + time_frame: dict = Field(col_type='date_range', format='yyyy-MM-dd') + + index = OpenSearchDocIndex[MyDoc]() + assert index.index_name == MyDoc.__name__.lower() + + +def test_contain(): + class SimpleSchema(BaseDoc): + tens: NdArray[10] + + index = OpenSearchDocIndex[SimpleSchema]() + index_docs = [SimpleDoc(tens=np.zeros(10)) for _ in range(10)] + + assert (index_docs[0] in index) is False + + index.index(index_docs) + + for doc in index_docs: + assert (doc in index) is True + + index_docs_new = [SimpleDoc(tens=np.zeros(10)) for _ in range(10)] + for doc in index_docs_new: + assert (doc in index) is False diff --git a/tests/index/opensearch/test_index_get_del_os.py b/tests/index/opensearch/test_index_get_del_os.py new file mode 100644 index 0000000000..e1c94adc67 --- /dev/null +++ b/tests/index/opensearch/test_index_get_del_os.py @@ -0,0 +1,283 @@ +from typing import Union + +import numpy as np +import pytest + +from docarray import BaseDoc, DocList +from docarray.documents import TextDoc +from docarray.index import OpenSearchDocIndex +from docarray.typing import NdArray +from tests.index.opensearch.fixture import ( # noqa: F401 + DeepNestedDoc, + FlatDoc, + MyImageDoc, + NestedDoc, + SimpleDoc, + auth, + start_storage, + ten_deep_nested_docs, + ten_flat_docs, + ten_nested_docs, + ten_simple_docs, + tmp_index_name, +) + +pytestmark = [pytest.mark.slow, pytest.mark.index, pytest.mark.opensearchv2] + + +@pytest.mark.parametrize('use_docarray', [True, False]) +def test_index_simple_schema( + ten_simple_docs, use_docarray, tmp_index_name, auth # noqa: F811 +): + index = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + if use_docarray: + ten_simple_docs = DocList[SimpleDoc](ten_simple_docs) + + index.index(ten_simple_docs) + assert index.num_docs() == 10 + + +@pytest.mark.parametrize('use_docarray', [True, False]) +def test_index_flat_schema( + ten_flat_docs, use_docarray, tmp_index_name, auth +): # noqa: F811 + index = OpenSearchDocIndex[FlatDoc](index_name=tmp_index_name, auth=auth) + if use_docarray: + ten_flat_docs = DocList[FlatDoc](ten_flat_docs) + + index.index(ten_flat_docs) + assert index.num_docs() == 10 + + +@pytest.mark.parametrize('use_docarray', [True, False]) +def test_index_nested_schema( + ten_nested_docs, use_docarray, tmp_index_name, auth # noqa: F811 +): + index = OpenSearchDocIndex[NestedDoc](index_name=tmp_index_name, auth=auth) + if use_docarray: + ten_nested_docs = DocList[NestedDoc](ten_nested_docs) + + index.index(ten_nested_docs) + assert index.num_docs() == 10 + + +@pytest.mark.parametrize('use_docarray', [True, False]) +def test_index_deep_nested_schema( + ten_deep_nested_docs, use_docarray, tmp_index_name, auth # noqa: F811 +): + index = OpenSearchDocIndex[DeepNestedDoc](index_name=tmp_index_name, auth=auth) + if use_docarray: + ten_deep_nested_docs = DocList[DeepNestedDoc](ten_deep_nested_docs) + + index.index(ten_deep_nested_docs) + assert index.num_docs() == 10 + + +def test_get_single(ten_simple_docs, ten_flat_docs, ten_nested_docs): # noqa: F811 + # simple + index = OpenSearchDocIndex[SimpleDoc]() + index.index(ten_simple_docs) + + assert index.num_docs() == 10 + for d in ten_simple_docs: + id_ = d.id + assert index[id_].id == id_ + assert np.all(index[id_].tens == d.tens) + index._client.indices.delete(index='simpledoc', ignore_unavailable=True) + + # flat + index = OpenSearchDocIndex[FlatDoc]() + index.index(ten_flat_docs) + + assert index.num_docs() == 10 + for d in ten_flat_docs: + id_ = d.id + assert index[id_].id == id_ + assert np.all(index[id_].tens_one == d.tens_one) + assert np.all(index[id_].tens_two == d.tens_two) + index._client.indices.delete(index='flatdoc', ignore_unavailable=True) + + # nested + index = OpenSearchDocIndex[NestedDoc]() + index.index(ten_nested_docs) + + assert index.num_docs() == 10 + for d in ten_nested_docs: + id_ = d.id + assert index[id_].id == id_ + assert index[id_].d.id == d.d.id + assert np.all(index[id_].d.tens == d.d.tens) + index._client.indices.delete(index='nesteddoc', ignore_unavailable=True) + + +def test_get_multiple(ten_simple_docs, ten_flat_docs, ten_nested_docs): # noqa: F811 + docs_to_get_idx = [0, 2, 4, 6, 8] + + # simple + index = OpenSearchDocIndex[SimpleDoc]() + index.index(ten_simple_docs) + + assert index.num_docs() == 10 + docs_to_get = [ten_simple_docs[i] for i in docs_to_get_idx] + ids_to_get = [d.id for d in docs_to_get] + retrieved_docs = index[ids_to_get] + for id_, d_in, d_out in zip(ids_to_get, docs_to_get, retrieved_docs): + assert d_out.id == id_ + assert np.all(d_out.tens == d_in.tens) + + # flat + index = OpenSearchDocIndex[FlatDoc]() + index.index(ten_flat_docs) + + assert index.num_docs() == 10 + docs_to_get = [ten_flat_docs[i] for i in docs_to_get_idx] + ids_to_get = [d.id for d in docs_to_get] + retrieved_docs = index[ids_to_get] + for id_, d_in, d_out in zip(ids_to_get, docs_to_get, retrieved_docs): + assert d_out.id == id_ + assert np.all(d_out.tens_one == d_in.tens_one) + assert np.all(d_out.tens_two == d_in.tens_two) + + # nested + index = OpenSearchDocIndex[NestedDoc]() + index.index(ten_nested_docs) + + assert index.num_docs() == 10 + docs_to_get = [ten_nested_docs[i] for i in docs_to_get_idx] + ids_to_get = [d.id for d in docs_to_get] + retrieved_docs = index[ids_to_get] + for id_, d_in, d_out in zip(ids_to_get, docs_to_get, retrieved_docs): + assert d_out.id == id_ + assert d_out.d.id == d_in.d.id + assert np.all(d_out.d.tens == d_in.d.tens) + + +def test_get_key_error(ten_simple_docs, tmp_index_name, auth): # noqa: F811 + index = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + index.index(ten_simple_docs) + + with pytest.raises(KeyError): + index['not_a_real_id'] + + +def test_persisting(ten_simple_docs, tmp_index_name, auth): # noqa: F811 + index = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + index.index(ten_simple_docs) + + index2 = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + assert index2.num_docs() == 10 + + +def test_del_single(ten_simple_docs, tmp_index_name, auth): # noqa: F811 + index = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + index.index(ten_simple_docs) + # delete once + assert index.num_docs() == 10 + del index[ten_simple_docs[0].id] + assert index.num_docs() == 9 + for i, d in enumerate(ten_simple_docs): + id_ = d.id + if i == 0: # deleted + with pytest.raises(KeyError): + index[id_] + else: + assert index[id_].id == id_ + assert np.all(index[id_].tens == d.tens) + # delete again + del index[ten_simple_docs[3].id] + assert index.num_docs() == 8 + for i, d in enumerate(ten_simple_docs): + id_ = d.id + if i in (0, 3): # deleted + with pytest.raises(KeyError): + index[id_] + else: + assert index[id_].id == id_ + assert np.all(index[id_].tens == d.tens) + + +def test_del_multiple(ten_simple_docs, tmp_index_name, auth): # noqa: F811 + docs_to_del_idx = [0, 2, 4, 6, 8] + + index = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + index.index(ten_simple_docs) + + assert index.num_docs() == 10 + docs_to_del = [ten_simple_docs[i] for i in docs_to_del_idx] + ids_to_del = [d.id for d in docs_to_del] + del index[ids_to_del] + for i, doc in enumerate(ten_simple_docs): + if i in docs_to_del_idx: + with pytest.raises(KeyError): + index[doc.id] + else: + assert index[doc.id].id == doc.id + assert np.all(index[doc.id].tens == doc.tens) + + +def test_del_key_error(ten_simple_docs, tmp_index_name, auth): # noqa: F811 + index = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + index.index(ten_simple_docs) + + with pytest.warns(UserWarning): + del index['not_a_real_id'] + + +def test_num_docs(ten_simple_docs, tmp_index_name): # noqa: F811 + index = OpenSearchDocIndex[SimpleDoc](index_name=tmp_index_name, auth=auth) + index.index(ten_simple_docs) + + assert index.num_docs() == 10 + + del index[ten_simple_docs[0].id] + assert index.num_docs() == 9 + + del index[ten_simple_docs[3].id, ten_simple_docs[5].id] + assert index.num_docs() == 7 + + more_docs = [SimpleDoc(tens=np.random.rand(10)) for _ in range(5)] + index.index(more_docs) + assert index.num_docs() == 12 + + del index[more_docs[2].id, ten_simple_docs[7].id] + assert index.num_docs() == 10 + + +def test_index_union_doc(): # noqa: F811 + class MyDoc(BaseDoc): + tensor: Union[NdArray, str] + + class MySchema(BaseDoc): + tensor: NdArray[128] + + index = OpenSearchDocIndex[MySchema]() + doc = [MyDoc(tensor=np.random.randn(128))] + index.index(doc) + + id_ = doc[0].id + assert index[id_].id == id_ + assert np.all(index[id_].tensor == doc[0].tensor) + + +def test_index_multi_modal_doc(): + class MyMultiModalDoc(BaseDoc): + image: MyImageDoc + text: TextDoc + + index = OpenSearchDocIndex[MyMultiModalDoc]() + + doc = [ + MyMultiModalDoc( + image=MyImageDoc(embedding=np.random.randn(128)), text=TextDoc(text='hello') + ) + ] + index.index(doc) + + id_ = doc[0].id + assert index[id_].id == id_ + assert np.all(index[id_].image.embedding == doc[0].image.embedding) + assert index[id_].text.text == doc[0].text.text + + query = doc[0] + docs, _ = index.find(query, limit=10, search_field='image__embedding') + assert len(docs) > 0 diff --git a/tests/index/opensearch/test_subindex_os.py b/tests/index/opensearch/test_subindex_os.py new file mode 100644 index 0000000000..d6f2031652 --- /dev/null +++ b/tests/index/opensearch/test_subindex_os.py @@ -0,0 +1,195 @@ +import numpy as np +import pytest +from pydantic import Field + +from docarray import BaseDoc, DocList +from docarray.index import OpenSearchDocIndex +from docarray.typing import NdArray +from tests.index.opensearch.fixture import auth, start_storage # noqa: F401 + +pytestmark = [pytest.mark.slow, pytest.mark.index, pytest.mark.opensearchv2] + + +class SimpleDoc(BaseDoc): + simple_tens: NdArray[10] = Field(similarity='l2_norm') + simple_text: str + + +class ListDoc(BaseDoc): + docs: DocList[SimpleDoc] + simple_doc: SimpleDoc + list_tens: NdArray[20] = Field(similarity='l2_norm') + + +class MyDoc(BaseDoc): + docs: DocList[SimpleDoc] + list_docs: DocList[ListDoc] + my_tens: NdArray[30] = Field(similarity='l2_norm') + + +@pytest.fixture +def index(): + index = OpenSearchDocIndex[MyDoc](index_name='idx', auth=auth) + my_docs = [ + MyDoc( + id=f'{i}', + docs=DocList[SimpleDoc]( + [ + SimpleDoc( + id=f'docs-{i}-{j}', + simple_tens=np.ones(10), + simple_text=f'hello {j}', + ) + for j in range(5) + ] + ), + list_docs=DocList[ListDoc]( + [ + ListDoc( + id=f'list_docs-{i}-{j}', + docs=DocList[SimpleDoc]( + [ + SimpleDoc( + id=f'list_docs-docs-{i}-{j}-{k}', + simple_tens=np.ones(10), + simple_text=f'hello {k}', + ) + for k in range(5) + ] + ), + simple_doc=SimpleDoc( + id=f'list_docs-simple_doc-{i}-{j}', + simple_tens=np.ones(10), + simple_text=f'hello {j}', + ), + list_tens=np.ones(20), + ) + for j in range(5) + ] + ), + my_tens=np.ones((30,)) * (i + 1), + ) + for i in range(5) + ] + + index.index(my_docs) + return index + + +def test_subindex_init(index): + assert isinstance(index._subindices['docs'], OpenSearchDocIndex) + assert isinstance(index._subindices['list_docs'], OpenSearchDocIndex) + assert isinstance( + index._subindices['list_docs']._subindices['docs'], OpenSearchDocIndex + ) + + +def test_subindex_index(index): + assert index.num_docs() == 5 + assert index._subindices['docs'].num_docs() == 25 + assert index._subindices['list_docs'].num_docs() == 25 + assert index._subindices['list_docs']._subindices['docs'].num_docs() == 125 + + +def test_subindex_get(index): + doc = index['1'] + assert type(doc) == MyDoc + assert doc.id == '1' + + assert len(doc.docs) == 5 + assert type(doc.docs[0]) == SimpleDoc + assert doc.docs[0].id == 'docs-1-0' + assert np.allclose(doc.docs[0].simple_tens, np.ones(10)) + + assert len(doc.list_docs) == 5 + assert type(doc.list_docs[0]) == ListDoc + assert doc.list_docs[0].id == 'list_docs-1-0' + assert len(doc.list_docs[0].docs) == 5 + assert type(doc.list_docs[0].docs[0]) == SimpleDoc + assert doc.list_docs[0].docs[0].id == 'list_docs-docs-1-0-0' + assert np.allclose(doc.list_docs[0].docs[0].simple_tens, np.ones(10)) + assert doc.list_docs[0].docs[0].simple_text == 'hello 0' + assert type(doc.list_docs[0].simple_doc) == SimpleDoc + assert doc.list_docs[0].simple_doc.id == 'list_docs-simple_doc-1-0' + assert np.allclose(doc.list_docs[0].simple_doc.simple_tens, np.ones(10)) + assert doc.list_docs[0].simple_doc.simple_text == 'hello 0' + assert np.allclose(doc.list_docs[0].list_tens, np.ones(20)) + + assert np.allclose(doc.my_tens, np.ones(30) * 2) + + +def test_find_subindex(index): + # root level + query = np.ones((30,)) + with pytest.raises(ValueError): + _, _ = index.find_subindex(query, subindex='', search_field='my_tens', limit=5) + + # sub level + query = np.ones((10,)) + root_docs, docs, scores = index.find_subindex( + query, subindex='docs', search_field='simple_tens', limit=5 + ) + assert type(root_docs[0]) == MyDoc + assert type(docs[0]) == SimpleDoc + for root_doc, doc, score in zip(root_docs, docs, scores): + assert np.allclose(doc.simple_tens, np.ones(10)) + assert root_doc.id == f'{doc.id.split("-")[1]}' + assert score == 1.0 + + # sub sub level + query = np.ones((10,)) + root_docs, docs, scores = index.find_subindex( + query, subindex='list_docs__docs', search_field='simple_tens', limit=5 + ) + assert len(docs) == 5 + assert type(root_docs[0]) == MyDoc + assert type(docs[0]) == SimpleDoc + for root_doc, doc, score in zip(root_docs, docs, scores): + assert np.allclose(doc.simple_tens, np.ones(10)) + assert root_doc.id == f'{doc.id.split("-")[2]}' + assert score == 1.0 + + +def test_subindex_filter(index): + query = {'match': {'simple_doc__simple_text': 'hello 0'}} + docs = index.filter_subindex(query, subindex='list_docs', limit=5) + assert len(docs) == 5 + assert type(docs[0]) == ListDoc + for doc in docs: + assert doc.id.split('-')[-1] == '0' + + query = {'match': {'simple_text': 'hello 0'}} + docs = index.filter_subindex(query, subindex='list_docs__docs', limit=5) + assert len(docs) == 5 + assert type(docs[0]) == SimpleDoc + for doc in docs: + assert doc.id.split('-')[-1] == '0' + + +def test_subindex_contain(index, auth): + # Checks for individual simple_docs within list_docs + for i in range(4): + doc = index[f'{i + 1}'] + for simple_doc in doc.list_docs: + assert index.subindex_contains(simple_doc) is True + for nested_doc in simple_doc.docs: + assert index.subindex_contains(nested_doc) is True + + invalid_doc = SimpleDoc( + id='non_existent', + simple_tens=np.zeros(10), + simple_text='invalid', + ) + assert index.subindex_contains(invalid_doc) is False + + # Checks for an empty doc + empty_doc = SimpleDoc( + id='', + simple_tens=np.zeros(10), + simple_text='', + ) + assert index.subindex_contains(empty_doc) is False + + # Empty index + empty_index = OpenSearchDocIndex[MyDoc](auth=auth) + assert (empty_doc in empty_index) is False