Browse Source

Merge pull request #1018 from misnyo/generalfile

[mod]generalfile engine removed
Adam Tauber 7 years ago
parent
commit
234366b900
1 changed files with 0 additions and 62 deletions
  1. 0
    62
      searx/engines/generalfile.py

+ 0
- 62
searx/engines/generalfile.py View File

@@ -1,62 +0,0 @@
1
-"""
2
- General Files (Files)
3
-
4
- @website     http://www.general-files.org
5
- @provide-api no (nothing found)
6
-
7
- @using-api   no (because nothing found)
8
- @results     HTML (using search portal)
9
- @stable      no (HTML can change)
10
- @parse       url, title, content
11
-
12
- @todo        detect torrents?
13
-"""
14
-
15
-from lxml import html
16
-
17
-# engine dependent config
18
-categories = ['files']
19
-paging = True
20
-
21
-# search-url
22
-base_url = 'http://www.general-file.com'
23
-search_url = base_url + '/files-{letter}/{query}/{pageno}'
24
-
25
-# specific xpath variables
26
-result_xpath = '//table[@class="block-file"]'
27
-title_xpath = './/h2/a//text()'
28
-url_xpath = './/h2/a/@href'
29
-content_xpath = './/p//text()'
30
-
31
-
32
-# do search-request
33
-def request(query, params):
34
-
35
-    params['url'] = search_url.format(query=query,
36
-                                      letter=query[0],
37
-                                      pageno=params['pageno'])
38
-
39
-    return params
40
-
41
-
42
-# get response from search-request
43
-def response(resp):
44
-    results = []
45
-
46
-    dom = html.fromstring(resp.text)
47
-
48
-    # parse results
49
-    for result in dom.xpath(result_xpath):
50
-        url = result.xpath(url_xpath)[0]
51
-
52
-        # skip fast download links
53
-        if not url.startswith('/'):
54
-            continue
55
-
56
-        # append result
57
-        results.append({'url': base_url + url,
58
-                        'title': ''.join(result.xpath(title_xpath)),
59
-                        'content': ''.join(result.xpath(content_xpath))})
60
-
61
-    # return results
62
-    return results