[mod] timeout in log are readable (the timeouts are compare to the start_time of the request).

This commit is contained in:
Alexandre Flament 2016-12-30 17:37:46 +01:00
parent 3a8ab5880a
commit 28d51fd063
1 changed files with 11 additions and 13 deletions

View File

@ -37,7 +37,7 @@ logger = logger.getChild('search')
number_of_searches = 0
def send_http_request(engine, request_params, timeout_limit):
def send_http_request(engine, request_params, start_time, timeout_limit):
# for page_load_time stats
time_before_request = time()
@ -62,7 +62,8 @@ def send_http_request(engine, request_params, timeout_limit):
# is there a timeout (no parsing in this case)
timeout_overhead = 0.2 # seconds
search_duration = time() - request_params['started']
time_after_request = time()
search_duration = time_after_request - start_time
if search_duration > timeout_limit + timeout_overhead:
raise Timeout(response=response)
@ -72,14 +73,14 @@ def send_http_request(engine, request_params, timeout_limit):
engine.suspend_end_time = 0
# update stats with current page-load-time
# only the HTTP request
engine.stats['page_load_time'] += time() - time_before_request
engine.stats['page_load_time'] += time_after_request - time_before_request
engine.stats['page_load_count'] += 1
# everything is ok : return the response
return response
def search_one_request(engine, query, request_params, timeout_limit):
def search_one_request(engine, query, request_params, start_time, timeout_limit):
# update request parameters dependent on
# search-engine (contained in engines folder)
engine.request(query, request_params)
@ -92,20 +93,19 @@ def search_one_request(engine, query, request_params, timeout_limit):
return []
# send request
response = send_http_request(engine, request_params, timeout_limit)
response = send_http_request(engine, request_params, start_time, timeout_limit)
# parse the response
response.search_params = request_params
return engine.response(response)
def search_one_request_safe(engine_name, query, request_params, result_container, timeout_limit):
start_time = time()
def search_one_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit):
engine = engines[engine_name]
try:
# send requests and parse the results
search_results = search_one_request(engine, query, request_params, timeout_limit)
search_results = search_one_request(engine, query, request_params, start_time, timeout_limit)
# add results
result_container.extend(engine_name, search_results)
@ -149,14 +149,13 @@ def search_one_request_safe(engine_name, query, request_params, result_container
return False
def search_multiple_requests(requests, result_container, timeout_limit):
start_time = time()
def search_multiple_requests(requests, result_container, start_time, timeout_limit):
search_id = uuid4().__str__()
for engine_name, query, request_params in requests:
th = threading.Thread(
target=search_one_request_safe,
args=(engine_name, query, request_params, result_container, timeout_limit),
args=(engine_name, query, request_params, result_container, start_time, timeout_limit),
name=search_id,
)
th._engine_name = engine_name
@ -366,7 +365,6 @@ class Search(object):
request_params = default_request_params()
request_params['headers']['User-Agent'] = user_agent
request_params['category'] = selected_engine['category']
request_params['started'] = start_time
request_params['pageno'] = search_query.pageno
if hasattr(engine, 'language') and engine.language:
@ -386,7 +384,7 @@ class Search(object):
if requests:
# send all search-request
search_multiple_requests(requests, self.result_container, timeout_limit - (time() - start_time))
search_multiple_requests(requests, self.result_container, start_time, timeout_limit)
start_new_thread(gc.collect, tuple())
# return results, suggestions, answers and infoboxes