#!/usr/bin/env python import os import requests from bs4 import BeautifulSoup import time, random from concurrent.futures import ProcessPoolExecutor, as_completed from concurrent.futures import ThreadPoolExecutor from concurrent.futures import Future import concurrent.futures from threading import Thread import asyncio #import GRequests class unu: def get_title(self): #print("Wut") with open("ip_scanate.txt") as f: for line in f: line2 = line.rstrip('\r\n') try: rechi = requests.get("http://"+ line2 + ":8080", verify = False, timeout = 1) print("Connection Succesful! " + line2) print(rechi.status_code) print(" ") con = BeautifulSoup(rechi.content, 'html.parser') title = con.title except: print("TIMED OUT " + line2) print(" ") continue g = open('hosts_final2.txt','a') try: #if title.string == "Linksys Smart Wi-Fi": #if title.string == "Tracer Synchrony": print(title) g.write(title.string + " " + line2 + "\n") #g.write(title.string + " " + line2 + "\n") #else: print("") except AttributeError: print("empty source") def multiprocessing_funct(): obj = unu() #obj.get_title() executor = concurrent.futures.ProcessPoolExecutor(10) future = executor.submit(obj.get_title) #concurrent.futures.wait(futures) #for task in as_completed(processes): #obj.write_file(self) multiprocessing_funct()
var
This content, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)