diff options
| author | Markus Heiser <markus.heiser@darmarIT.de> | 2021-11-27 00:28:22 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2021-11-27 00:28:22 +0100 |
| commit | 27c1b9f6608035cf42aa6216cbd646d55487ad5f (patch) | |
| tree | 2b32edda04819ab4e7484437842257f6668ef2fe | |
| parent | ab5097d7085f772d7ab92b257403a0fe00426114 (diff) | |
| parent | 0d41f26a20fdaf6d3e6ddd1aa6e3d2360297da8e (diff) | |
Merge pull request #551 from dalf/fix-results-lock
[fix] searx.results: fix pylint issue "useless-with-lock"
| -rw-r--r-- | searx/results.py | 31 |
1 files changed, 17 insertions, 14 deletions
diff --git a/searx/results.py b/searx/results.py index 2e81f5dc4..10a26aa3f 100644 --- a/searx/results.py +++ b/searx/results.py @@ -145,7 +145,8 @@ class ResultContainer: """docstring for ResultContainer""" __slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\ - '_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result' + '_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result',\ + '_lock' def __init__(self): super().__init__() @@ -162,6 +163,7 @@ class ResultContainer: self.timings = [] self.redirect_url = None self.on_result = lambda _: True + self._lock = RLock() def extend(self, engine_name, results): if self._closed: @@ -216,10 +218,11 @@ class ResultContainer: infobox['engines'] = set([infobox['engine']]) if infobox_id is not None: parsed_url_infobox_id = urlparse(infobox_id) - for existingIndex in self.infoboxes: - if compare_urls(urlparse(existingIndex.get('id', '')), parsed_url_infobox_id): - merge_two_infoboxes(existingIndex, infobox) - add_infobox = False + with self._lock: + for existingIndex in self.infoboxes: + if compare_urls(urlparse(existingIndex.get('id', '')), parsed_url_infobox_id): + merge_two_infoboxes(existingIndex, infobox) + add_infobox = False if add_infobox: self.infoboxes.append(infobox) @@ -262,14 +265,14 @@ class ResultContainer: def __merge_url_result(self, result, position): result['engines'] = set([result['engine']]) - duplicated = self.__find_duplicated_http_result(result) - if duplicated: - self.__merge_duplicated_http_result(duplicated, result, position) - return - - # if there is no duplicate found, append result - result['positions'] = [position] - with RLock(): + with self._lock: + duplicated = self.__find_duplicated_http_result(result) + if duplicated: + self.__merge_duplicated_http_result(duplicated, result, position) + return + + # if there is no duplicate found, append result + result['positions'] = [position] self._merged_results.append(result) def __find_duplicated_http_result(self, result): @@ -314,7 +317,7 @@ class ResultContainer: def __merge_result_no_url(self, result, position): result['engines'] = set([result['engine']]) result['positions'] = [position] - with RLock(): + with self._lock: self._merged_results.append(result) def close(self): |