libremiami-search/searx/engines/github.py

61 lines
1.3 KiB
Python
Raw Normal View History

"""
Github (It)
@website https://github.com/
@provide-api yes (https://developer.github.com/v3/)
@using-api yes
@results JSON
@stable yes (using api)
@parse url, title, content
"""
2014-09-02 17:37:47 +02:00
2013-10-20 21:53:49 +02:00
from json import loads
2016-11-30 18:43:03 +01:00
from searx.url_utils import urlencode
2013-10-20 21:53:49 +02:00
2014-09-02 17:37:47 +02:00
# engine dependent config
2013-10-20 21:53:49 +02:00
categories = ['it']
2014-09-02 17:37:47 +02:00
# search-url
2014-01-20 02:31:20 +01:00
search_url = 'https://api.github.com/search/repositories?sort=stars&order=desc&{query}' # noqa
accept_header = 'application/vnd.github.preview.text-match+json'
2013-10-20 21:53:49 +02:00
2014-09-02 17:37:47 +02:00
# do search-request
2013-10-20 21:53:49 +02:00
def request(query, params):
2013-10-23 23:55:37 +02:00
params['url'] = search_url.format(query=urlencode({'q': query}))
2014-09-02 17:37:47 +02:00
2014-01-20 02:31:20 +01:00
params['headers']['Accept'] = accept_header
2014-09-02 17:37:47 +02:00
2013-10-20 21:53:49 +02:00
return params
2014-09-02 17:37:47 +02:00
# get response from search-request
2013-10-20 21:53:49 +02:00
def response(resp):
results = []
2014-09-02 17:37:47 +02:00
2013-10-20 21:53:49 +02:00
search_res = loads(resp.text)
2014-09-02 17:37:47 +02:00
# check if items are recieved
if 'items' not in search_res:
2014-09-02 17:37:47 +02:00
return []
# parse results
2013-10-20 21:53:49 +02:00
for res in search_res['items']:
title = res['name']
url = res['html_url']
2014-09-02 17:37:47 +02:00
if res['description']:
content = res['description'][:500]
else:
content = ''
2014-09-02 17:37:47 +02:00
# append result
results.append({'url': url,
'title': title,
'content': content})
# return results
2013-10-20 21:53:49 +02:00
return results