User Controls
I want to make a cript that gathers proxies, selects one at random and...
-
2016-08-04 at 8:58 PM UTCFeeds it to another script. That script will then connect to a remote host and perform some nefarious things. So far i have this. But i am not sure if i should make it into a module or just write the program as a single script. Also i am not sure how to feed the randomly selected proxies to the other script so any help would be appreciated. this is what i have so far.
from gevent import monkey
monkey.patch_all()
import requests
import ast
import copy
import gevent
import sys, re, time, os
import socket
from BeautifulSoup import BeautifulSoup
class find_http_proxy():
''' Will only gather L1 (elite anonymity) proxies
which should not give out your IP or advertise
that you are using a proxy at all '''
def __init__(self, args):
self.proxy_list = []
self.headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36'}
self.show_num = args.show
self.show_all = args.all
self.quiet = args.quiet
self.errors = []
self.print_counter = 0
self.externalip = self.external_ip()
def external_ip(self):
req = requests.get('http://myip.dnsdynamic.org/', headers=self.headers)
ip = req.text
return ip
def run(self):
''' Gets raw high anonymity (L1) proxy data then calls make_proxy_list()
Currently parses data from gatherproxy.com and letushide.com '''
print '
[*] Your accurate external IP: %s' % self.externalip
gatherproxy_list = self.gatherproxy_req()
print '
[*] gatherproxy.com: %s proxies' % str(len(gatherproxy_list))
# checkerproxy_list = self.checkerproxy_req()
# print '
[*] checkerproxy.net: %s proxies' % str(len(checkerproxy_list))
#self.proxy_list.append(letushide_list)
self.proxy_list.append(gatherproxy_list)
#self.proxy_list.append(checkerproxy_list)
# Flatten list of lists (1 master list containing 1 list of ips per proxy website)
self.proxy_list = [ips for proxy_site in self.proxy_list for ips in proxy_site]
self.proxy_list = list(set(self.proxy_list)) # Remove duplicates
self.proxy_checker()
def checkerproxy_req(self):
''' Make the request to checkerproxy and create a master list from that site '''
cp_ips = []
try:
url = 'http://checkerproxy.net/all_proxy'
r = requests.get(url, headers=self.headers)
html = r.text
except Exception:
print '[!] Failed to get reply from %s' % url
checkerproxy_list = []
return checkerproxy_list
checkerproxy_list = self.parse_checkerproxy(html)
return checkerproxy_list
def parse_checkerproxy(self, html):
''' Only get elite proxies from checkerproxy '''
ips = []
soup = BeautifulSoup(html)
for tr in soup.findAll('tr'):
if len(tr) == 19:
ip_found = False
elite = False
ip_port = None
tds = tr.findAll('td')
for td in tds:
if ':' in td.text:
ip_found = True
ip_port_re = re.match('(\d{1,3}\.){3}\d{1,3}:\d{1,5}', td.text)
if ip_port_re:
ip_port = ip_port_re.group()
if not ip_port:
ip_found = False
if 'Elite' in td.text:
elite = True
if ip_found == True and elite == True:
ips.append(str(ip_port))
break
return ips
def letushide_req(self):
''' Make the request to the proxy site and create a master list from that site '''
letushide_ips = []
for i in xrange(1,25):
try:
url = 'http://letushide.com/enhancement/http,hap,all/%s/list_of_free_HTTP_High_Anonymity_proxy_servers' % str(i)
r = requests.get(url, headers=self.headers)
html = r.text
ips = self.parse_letushide(html)
# Check html for a link to the next page
if '/enhancement/http,hap,all/%s/list_of_free_HTTP_High_Anonymity_proxy_servers' % str(i+1) in html:
pass
else:
letushide_ips.append(ips)
break
letushide_ips.append(ips)
except:
print '[!] Failed to get a reply from %s' % url
break
# Flatten list of lists (1 list containing 1 list of ips for each page)
letushide_list = [item for sublist in letushide_ips for item in sublist]
return letushide_list
def parse_letushide(self, html):
''' Parse out list of IP:port strings from the html '''
# \d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3} - matches IP addresses
# </a></td><td> - is in between the IP and the port
# .*?< - match all text (.) for as many characters as possible (*) but don't be greedy (?) and stop at the next greater than (<)
raw_ips = re.findall('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}</a></td><td>.*?<', html)
ips = []
for ip in raw_ips:
ip = ip.replace('</a></td><td>', ':')
ip = ip.strip('<')
ips.append(ip)
return ips
def gatherproxy_req(self):
url = 'http://gatherproxy.com/proxylist/anonymity/?t=Elite'
lines = []
for pagenum in xrange(1,10):
try:
data = 'Type=elite&PageIdx={}&Uptime=0'.format(str(pagenum))
headers = copy.copy(self.headers)
headers['Content-Type'] = 'application/x-www-form-urlencoded'
r = requests.post(url, headers=headers, data=data)
lines += r.text.splitlines()
except Exception as e:
print str(e)
print '[!] Failed: %s' % url
gatherproxy_list = []
return gatherproxy_list
gatherproxy_list = self.parse_gp(lines)
return gatherproxy_list
def parse_gp(self, lines):
''' Parse the raw scraped data '''
gatherproxy_list = []
ip = ''
get_port = False
for l in lines:
# Check if an IP was found on the line prior
if get_port == True:
get_port = False
# GP obsfuscates the port with hex
hex_port = l.split("'")[1]
port = str(int(hex_port, 16))
ip_port = '{}:{}'.format(ip, port)
# Reset IP to nothing
ip = ''
gatherproxy_list.append(ip_port)
# Search for the IP
ip_re = re.search( r'[0-9]+(?:\.[0-9]+){3}', l)
if ip_re:
ip = ip_re.group()
get_port = True
# int('hexstring', 16) converts to decimal. GP uses this for obfuscation
#proxy = '%s:%s' % (l["PROXY_IP"], str(int(l["PROXY_PORT"], 16)))
#gatherproxy_list.append(proxy)
#ctry = l["PROXY_COUNTRY"]
return gatherproxy_list
maybe i can also do it with tor or something and i am overthinking it i am open to suggestions. -
2016-08-05 at 2:22 AM UTCWhat is a cript
-
2016-08-05 at 2:23 AM UTC
-
2016-08-05 at 3:45 AM UTC99 cripts and bloods!!
-
2016-08-05 at 1:33 PM UTC
also I'm writing a proper response not just shitposting
Well ok then. -
2016-08-05 at 1:41 PM UTC
What is a cript
A typo. -
2016-08-05 at 2:08 PM UTCYou're supposed to be oh-so-knowledgeable, but all you really do is ask stuff every five minutes. Can't you figure this stuff out on your own, or do you just feel the overwhelming need to show off, seven times a week?
-
2016-08-05 at 2:22 PM UTC
You're supposed to be oh-so-knowledgeable, but all you really do is ask stuff every five minutes. Can't you figure this stuff out on your own, or do you just feel the overwhelming need to show off, seven times a week?
You're just pissed you can't read a single line of code i posted. I never pretended to be an expert like you and learning new things is something you do if you want to stay ahead. Also, there's a significant difference between the threads i post and " LOL HOW I HACK FACEBOOK" threads. This alone should tell you something about my skill level. Smart people ask questions, idiots just pretend they know things. Also, every five minutes, lol nigger this is the only programming related question i posted that's on the first page of T&T. Also, unlike Lanny and Aldra, i don't have an official education in computer science. I'm an engineer, all compsci things i know i taught myself.
Also, if you are oh-so-knowledgeable, you'd have posted the solution already instead of acting like a little kid stomping your feet and making barely coherent accusations. -
2016-08-05 at 2:24 PM UTC
You're just pissed you can't read a single line of code i posted. I never pretended to be an expert like you and learning new things is something you do if you want to stay ahead. Also, there's a significant difference between the threads i post and " LOL HOW I HACK FACEBOOK" threads. This alone should tell you something about my skill level. Smart people ask questions, idiots just pretend they know things. Also, every five minutes, lol nigger this is the only programming related question i posted that's on the first page of T&T. Also, unlike Lanny and Aldra, i don't have an official education in computer science. I'm an engineer, all compsci things i know i taught myself.
Also, if you are oh-so-knowledgeable, you'd have posted the solution already instead of acting like a little kid stomping your feet and making barely coherent accusations.
C'mon, now. -
2016-08-05 at 2:41 PM UTC
C'mon, now.
Yeah i'd say that too if i knew Sophie was right. -
2016-08-05 at 5:23 PM UTCI think he's trying to troll you sophie. What language is that BTW? I'm curious.
-
2016-08-05 at 5:26 PM UTC
I think he's trying to troll you sophie. What language is that BTW? I'm curious.
Python 2.7. And yeah he's probably trolling i just can't stand spectral. -
2016-08-05 at 6:32 PM UTC
… i just can't stand spectral.
But are you ready to kill yourself yet? -
2016-08-05 at 7:14 PM UTC
But are you ready to kill yourself yet?
What does that have to do with anything? And why would i want to do that, i rather enjoy being alive, besides i can't call you out on your bullshit when i'm dead. -
2016-08-06 at 2:36 AM UTCI think you'd want something like a new class NefariousThings, initialization should take a proxy object and prep the
bullconnection - would be a socket or something. Then a function which actually executes whatever it is you want to execute and forwards it through your proxy. Not sure how to make that really flexible, as in how to execute an sh script and have everything be routed through the proxy. You could of course change the routing on the os or use a virtual machine, but that seems like overkill. You already made get_proxy a class so regardless of how you handle the next step you might as well have it utilize the proxy object. -
2016-08-06 at 3:02 AM UTCYou just utilize an existing proxy network, such as the 4EverProxy network, as an example.
-
2016-08-06 at 5:44 AM UTC
I think you'd want something like a new class NefariousThings, initialization should take a proxy object and prep the
bullconnection - would be a socket or something. Then a function which actually executes whatever it is you want to execute and forwards it through your proxy. Not sure how to make that really flexible, as in how to execute an sh script and have everything be routed through the proxy. You could of course change the routing on the os or use a virtual machine, but that seems like overkill. You already made get_proxy a class so regardless of how you handle the next step you might as well have it utilize the proxy object.
Good point, i actually want a new proxy selected at random each time the script is run, maybe i could chain a couple at random as well but that will require some additional coding. And nah, i won't be changing the routing on the OS that's a bit too much as you say. -
2016-08-06 at 2:27 PM UTC
Good point, i actually want a new proxy selected at random each time the script is run, maybe i could chain a couple at random as well but that will require some additional coding. And nah, i won't be changing the routing on the OS that's a bit too much as you say.
You basically have everything already there, just make a getter method to actually pick the random one and optimally makes sure that it's unique. I'd bet there's a python library that can do the heavy lifting for chaining them.
So how do you intend to make calls through the proxy? Did you already have something in mind or are you yet to do that? Curious how this would work in python. -
2016-08-06 at 6:36 PM UTC
You basically have everything already there, just make a getter method to actually pick the random one and optimally makes sure that it's unique. I'd bet there's a python library that can do the heavy lifting for chaining them.
So how do you intend to make calls through the proxy? Did you already have something in mind or are you yet to do that? Curious how this would work in python.
The proxies are saved to a list i can pick one quite easily at random. There's a packet injecting and generating module for python but these are http proxies so i can route http traffic through them. I was just making this to learn more about networking in python but i'm thinking of making a server and client module, have the server module interpret http traffic in order to make system calls on the target machine or something. In any case i will let you know what i go with and how i go about it if you're interested. -
2016-08-07 at 12:40 AM UTCWhat's all this for, anyways? To conveniently serve naked pics of 4 year olds in such a way that none of you child molesters can be traced?