Skip to content

Commit

Permalink
more review feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
miguelgrinberg committed Mar 28, 2024
1 parent 2468e8a commit d06d804
Show file tree
Hide file tree
Showing 15 changed files with 179 additions and 175 deletions.
12 changes: 7 additions & 5 deletions elasticsearch_dsl/_async/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,18 +56,20 @@ async def save(self, using=None):


class AsyncIndex(IndexBase):
def __init__(self, name, using="default"):
"""
:arg name: name of the index
:arg using: connection alias to use, defaults to ``'default'``
"""
super().__init__(name, AsyncMapping, using=using)

def _get_connection(self, using=None):
if self._name is None:
raise ValueError("You cannot perform API calls on the default index.")
return get_connection(using or self._using)

connection = property(_get_connection)

def get_or_create_mapping(self):
if self._mapping is None:
self._mapping = AsyncMapping()
return self._mapping

def as_template(self, template_name, pattern=None, order=None):
# TODO: should we allow pattern to be a top-level arg?
# or maybe have an IndexPattern that allows for it and have
Expand Down
12 changes: 7 additions & 5 deletions elasticsearch_dsl/_sync/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,18 +54,20 @@ def save(self, using=None):


class Index(IndexBase):
def __init__(self, name, using="default"):
"""
:arg name: name of the index
:arg using: connection alias to use, defaults to ``'default'``
"""
super().__init__(name, Mapping, using=using)

def _get_connection(self, using=None):
if self._name is None:
raise ValueError("You cannot perform API calls on the default index.")
return get_connection(using or self._using)

connection = property(_get_connection)

def get_or_create_mapping(self):
if self._mapping is None:
self._mapping = Mapping()
return self._mapping

def as_template(self, template_name, pattern=None, order=None):
# TODO: should we allow pattern to be a top-level arg?
# or maybe have an IndexPattern that allows for it and have
Expand Down
8 changes: 7 additions & 1 deletion elasticsearch_dsl/index_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@


class IndexBase:
def __init__(self, name, using="default"):
def __init__(self, name, mapping_class, using="default"):
"""
:arg name: name of the index
:arg using: connection alias to use, defaults to ``'default'``
Expand All @@ -31,6 +31,7 @@ def __init__(self, name, using="default"):
self._settings = {}
self._aliases = {}
self._analysis = {}
self._mapping_class = mapping_class
self._mapping = None

def resolve_nested(self, field_path):
Expand All @@ -51,6 +52,11 @@ def resolve_field(self, field_path):
return self._mapping.resolve_field(field_path)
return None

def get_or_create_mapping(self):
if self._mapping is None:
self._mapping = self._mapping_class()
return self._mapping

def mapping(self, mapping):
"""
Associate a mapping (an instance of
Expand Down
2 changes: 1 addition & 1 deletion tests/_async/test_document.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@ async def test_update_no_fields():
await md.update()


async def test_search_with_custom_alias_and_index(async_mock_client):
def test_search_with_custom_alias_and_index():
search_object = MyDoc.search(
using="staging", index=["custom_index1", "custom_index2"]
)
Expand Down
21 changes: 10 additions & 11 deletions tests/_async/test_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,11 @@

import json

from elasticsearch_dsl import Keyword, Nested, Text, analysis
from elasticsearch_dsl._async import mapping
from elasticsearch_dsl import AsyncMapping, Keyword, Nested, Text, analysis


def test_mapping_can_has_fields():
m = mapping.AsyncMapping()
m = AsyncMapping()
m.field("name", "text").field("tags", "keyword")

assert {
Expand All @@ -31,14 +30,14 @@ def test_mapping_can_has_fields():


def test_mapping_update_is_recursive():
m1 = mapping.AsyncMapping()
m1 = AsyncMapping()
m1.field("title", "text")
m1.field("author", "object")
m1.field("author", "object", properties={"name": {"type": "text"}})
m1.meta("_all", enabled=False)
m1.meta("dynamic", False)

m2 = mapping.AsyncMapping()
m2 = AsyncMapping()
m2.field("published_from", "date")
m2.field("author", "object", properties={"email": {"type": "text"}})
m2.field("title", "text")
Expand All @@ -64,7 +63,7 @@ def test_mapping_update_is_recursive():


def test_properties_can_iterate_over_all_the_fields():
m = mapping.AsyncMapping()
m = AsyncMapping()
m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")})
m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")}))

Expand Down Expand Up @@ -101,7 +100,7 @@ def test_mapping_can_collect_all_analyzers_and_normalizers():
)
n3 = analysis.normalizer("unknown_custom")

m = mapping.AsyncMapping()
m = AsyncMapping()
m.field(
"title",
"text",
Expand Down Expand Up @@ -160,7 +159,7 @@ def test_mapping_can_collect_multiple_analyzers():
tokenizer=analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3),
filter=[analysis.token_filter("my_filter2", "stop", stopwords=["c", "d"])],
)
m = mapping.AsyncMapping()
m = AsyncMapping()
m.field("title", "text", analyzer=a1, search_analyzer=a2)
m.field(
"text",
Expand Down Expand Up @@ -194,7 +193,7 @@ def test_mapping_can_collect_multiple_analyzers():

def test_even_non_custom_analyzers_can_have_params():
a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+")
m = mapping.AsyncMapping()
m = AsyncMapping()
m.field("title", "text", analyzer=a1)

assert {
Expand All @@ -203,14 +202,14 @@ def test_even_non_custom_analyzers_can_have_params():


def test_resolve_field_can_resolve_multifields():
m = mapping.AsyncMapping()
m = AsyncMapping()
m.field("title", "text", fields={"keyword": Keyword()})

assert isinstance(m.resolve_field("title.keyword"), Keyword)


def test_resolve_nested():
m = mapping.AsyncMapping()
m = AsyncMapping()
m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})})
m.field("k2", "keyword")

Expand Down
Loading

0 comments on commit d06d804

Please sign in to comment.