1- from typing import Optional , Union , Set , Dict , List , Tuple
1+ from typing import Dict , List , Optional , Set , Tuple , Union
22
33from redisvl .query .filter import FilterExpression
44from redisvl .utils .token_escaper import TokenEscaper
88nltk_stopwords = lazy_import ("nltk.corpus.stopwords" )
99
1010
11- def _parse_text_weights (
12- weights : Optional [Dict [str , float ]]
13- ) -> Dict [str , float ]:
11+ def _parse_text_weights (weights : Optional [Dict [str , float ]]) -> Dict [str , float ]:
1412 parsed_weights : Dict [str , float ] = {}
1513 if not weights :
1614 return parsed_weights
@@ -20,10 +18,7 @@ def _parse_text_weights(
2018 raise ValueError (
2119 f"Only individual words may be weighted. Got {{ { word } :{ weight } }}"
2220 )
23- if (
24- not (isinstance (weight , float ) or isinstance (weight , int ))
25- or weight < 0.0
26- ):
21+ if not (isinstance (weight , float ) or isinstance (weight , int )) or weight < 0.0 :
2722 raise ValueError (
2823 f"Weights must be positive number. Got {{ { word } :{ weight } }}"
2924 )
@@ -32,8 +27,7 @@ def _parse_text_weights(
3227
3328
3429class FullTextQueryHelper :
35- """Convert raw user queries into Redis full-text queries - tokenizes, escapes, and filters stopwords from the query.
36- """
30+ """Convert raw user queries into Redis full-text queries - tokenizes, escapes, and filters stopwords from the query."""
3731
3832 def __init__ (
3933 self ,
@@ -77,7 +71,9 @@ def build_query_string(
7771
7872 return query
7973
80- def _get_stopwords (self , stopwords : Optional [Union [str , Set [str ]]] = "english" ) -> Set [str ]:
74+ def _get_stopwords (
75+ self , stopwords : Optional [Union [str , Set [str ]]] = "english"
76+ ) -> Set [str ]:
8177 """Get the stopwords to use in the query.
8278
8379 Args:
@@ -106,7 +102,7 @@ def _get_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english")
106102 except Exception as e :
107103 raise ValueError (f"Error trying to load { stopwords } from nltk. { e } " )
108104 elif isinstance (stopwords , (Set , List , Tuple )) and all ( # type: ignore
109- isinstance (word , str ) for word in stopwords
105+ isinstance (word , str ) for word in stopwords
110106 ):
111107 return set (stopwords )
112108 else :
@@ -151,4 +147,3 @@ def _tokenize_and_escape_query(self, user_query: str) -> str:
151147 if not token_list :
152148 raise ValueError ("text string cannot be empty after removing stopwords" )
153149 return " | " .join (token_list )
154-
0 commit comments