Python Forum
Thread Rating:
  • 0 Vote(s) - 0 Average
  • 1
  • 2
  • 3
  • 4
  • 5
How to rebuild this code?
#1
hi all, i'm trying to build a website traffic bot for my website. this is based from another another one youtube views bot. now i want to rebuild it.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
from multiprocessing import Pool, cpu_count, freeze_support
from selenium import webdriver
from urllib2 import urlopen
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from time import sleep
import random
 
 
def get_proxies(ua):
    proxies = []
    proxies_req = Request('https://www.sslproxies.org/')
    proxies_req.add_header('User-Agent', ua.random)
    proxies_doc = urlopen(proxies_req).read().decode('utf8')
 
    soup = BeautifulSoup(proxies_doc, 'html.parser')
    proxies_table = soup.find(id='proxylisttable')
 
  # Save proxies in the array
    for row in proxies_table.tbody.find_all('tr'):
        proxies.append({
                        'ip':   row.find_all('td')[0].string,
                        'port': row.find_all('td')[1].string})
    return proxies
 
def random_proxy(proxies):
  return random.choice(proxies)
 
def search_string_to_query(search_string):
    search = search_string.split(' ')
    query = '+'.join(search)
    return query
 
def search_and_click(ua,sleep_time,search_string,proxy,proxies,sleep_after):
 
    options = webdriver.ChromeOptions()
    options.add_argument('--proxy-server=%s'%(proxy['ip'] + ':' + proxy['port']))
    options.add_argument('user-agent=%s'%ua.random)
 
    driver = webdriver.Chrome(chrome_options=options)
    query = search_string_to_query(search_string)
     
    try:
        section_list = driver.find_element_by_class_name('section-list')
 
        link = section_list.find_element_by_class_name('yt-uix-tile-link')
 
        link.click()
     
        sleep(sleep_time)
     
        driver.quit()
         
        if sleep_after is not None:
            sleep(sleep_after)
     
    except:
        driver.quit()
        proxy = random.choice(proxies)
        search_and_click(ua,sleep_time,search_string,proxy,proxies,sleep_after)
         
def parse_line(line):
    delim_loc = line.find('=')
    return line[delim_loc+1:].strip()
 
def read_config(config_string):
    try:
        search_string = parse_line(config_string[0])
        min_watch = int(parse_line(config_string[1]))
        max_watch = int(parse_line(config_string[2]))
        sleep_after = int(parse_line(config_string[3]))
        views = int(parse_line(config_string[4]))
        multicore = parse_line(config_string[5])
        if multicore != 'True':
            multicore = False
        if sleep_after == 'None':
            sleep_after = None
        return search_string,sleep_after, min_watch, max_watch, views, multicore
    except:
        write_defaults()
        return 'Bad File', 'RIP', 'Bad File', 'RIP', 'Bad File', 'RIP'
     
def write_defaults():
    with open('config.txt', 'w') as config:
        config.write('search_string = Your Search Here\n')
        config.write('min_watch = 10\n')
        config.write('max_watch = 45\n')
        config.write('wait_after = 15\n')
        config.write('views = 100\n')
        config.write('multicore = False')
 
write_defaults()
 
if __name__ == "__main__":
    freeze_support()
    with open('config.txt', 'r') as config:
        config_values = config.readlines()
     
    search_string, sleep_after, min_watch ,max_watch, views, multicore = read_config(config_values)
    if min_watch == 'Bad File':
        i = 'rip'
    elif multicore:
        threads = int(cpu_count()*0.75)
        pool = Pool(threads)
        ua = UserAgent()
        proxies = get_proxies(ua)
        for i in range(views):
            sleep_time = random.randint(min_watch,max_watch)
            proxy = random_proxy(proxies)
            pool.apply_async(search_and_click, args=[ua,sleep_time,search_string,proxy,proxies,sleep_after])
        pool.close()
        pool.join()
    else:
        ua = UserAgent()
        proxies = get_proxies(ua)
        for i in range(views):
            sleep_time = random.randint(min_watch,max_watch)
            proxy = random_proxy(proxies)
            search_and_click(ua,sleep_time,search_string,proxy,proxies,sleep_after)
i have some questions.
1. How to change this code for automatic get proxy from active site?
2. how to change this to website URL and remove youtube video URL?
3. and how to run it?

please help me. i'm noob to python.
Reply
#2
What have you tried and where are you stuck? If you haven't done much programming before, perhaps the project is a bit advanced for right now?
Reply


Forum Jump:

User Panel Messages

Announcements
Announcement #1 8/1/2020
Announcement #2 8/2/2020
Announcement #3 8/6/2020