File size: 2,212 Bytes
0acb617
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1a24a58
0acb617
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
#!/usr/bin/env python
import requests

import concurrent.futures
from progress.bar import ChargingBar

from common import defaults,mkdir

PARALLEL = 20

def get(json):
    return requests.post('http://puppet:8000/screenshot',
                         json=json)

def download_all(feed, n_workers=PARALLEL, dest=defaults.FISH_PATH):
    mkdir.make_dirs([dest])
    res = requests.get(feed)
    with concurrent.futures.ThreadPoolExecutor(max_workers = n_workers) as executor:
        futures = {executor.submit(get, {
            'url': u,
            'path': f'''{dest}/{u
            .replace('http://', '')
            .replace('https://', '')
            .replace('/', '_')
            .replace('&', '_')
            .replace('=', '_')
            .replace('?', '_')
            }.png'''
        }): u for u in res.text.split('\n')}
        print(f'will get {len(futures)} domains')
        bar = ChargingBar('Processing', max=len(futures), suffix='%(index)d/%(max)d')
        for f in concurrent.futures.as_completed(futures):
            url = futures[f]
            try:
                ret = f.result()
            except:
                print(f'{url} generated an exception')
            else:
                print(ret)
            bar.next()
        bar.finish()

if __name__ == '__main__':
    import argparse

    print("☠ getting extra backgrounds from OpenFish")
    parser = argparse.ArgumentParser(description='screenshot openfish open list')
    parser.add_argument('--parallel', metavar='parallel', type=int,
                        default=PARALLEL,
                        help='number of concurrent jobs')
    parser.add_argument('--feed', metavar='feed', type=str,
                        default='https://raw.githubusercontent.com/mitchellkrogza/Phishing.Database/master/phishing-links-ACTIVE-TODAY.txt',
                        help='''
                        fishing feed to use we recomend
                         - https://github.com/mitchellkrogza/Phishing.Database/blob/master/phishing-links-ACTIVE-TODAY.txt
                        - https://openphish.com/feed.txt
                        ''')
    args = parser.parse_args()
    download_all(args.feed, n_workers=args.parallel)