Browse Source

Initial Commit. I didn't write this.

master
anon 2 years ago
commit
4b1f66519b
1 changed files with 322 additions and 0 deletions
  1. 322
    0
      trexmodkill.py

+ 322
- 0
trexmodkill.py View File

@@ -0,0 +1,322 @@
import requests
import time
import traceback
import poplib
from email import parser

#
# TODO:
#
# - Fallback site for domain bans x
# - Create new account on ban x
# - Set ratio of sent/received impressions using stats and REEEEEEEEEEEDACTED (other shit to link up to it that's REEEEEEEEEEEDACTED AF)
# - Set accounts under trexurls
# - Create function to check for error messages in text, code it in for neatness
#



firstname = "REEEEEEEEEEEDACTED"
lastname = "REEEEEEEEEEEDACTED"
email = "REEEEEEEEEEEDACTED@REEEEEEEEEEEDACTED.su"
username = "REEEEEEEEEEEDACTED"
password = "REEEEEEEEEEEDACTED"

trexchange = "http://REEEEEEEEEEEDACTED/trexupdate.php"

surfi = 0
addurli = 0
addurl = ['http://REEEEEEEEEEEDACTED', 'http://REEEEEEEEEEEDACTED']
adddesc = 'Description here for your URLS'

sleeptimer = 25 * 60

trexurls = ["http://yibbida.com", "http://autosurfmax.com"]
trexids = {}

boxsep = "-"*99

s = requests.Session()


# Turing #7:
# m459h

def clip(minv, maxv, inputvar):
if inputvar < minv:
inputvar = minv

if inputvar > maxv:
inputvar = maxv

return inputvar

while True:
print boxsep

for i in range(len(trexurls)):


try:
pageresponse = False
lastpageresponse = True

print "TREX Site:\t\t" + trexurls[i]
print boxsep

# ASMAX mod
if "autosurfmax" in trexurls[i]:
turingid = 25
turingcode = "kqubv8g"
elif "yibbida" in trexurls[i]:
turingid = 99
turingcode = "2zaky"

#
# Login to site
#

loggedin = False

while loggedin == False:
print "Logging you in..."

payload = {'caller': 'index.php', 'tcode': '', 'uid': email, 'pwd': password, 'ckeep': 0}

headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/member.php"
}

pagedata = s.post(trexurls[i] + "/login.php", data = payload, headers = headers)

if "welcome" in pagedata.text.lower():
loggedin = True

# Check response
if loggedin != True:
try:
try:
# If there is an error
pageresponse = pagedata.text.split('<h3 class="badnote">')[1].split('</h3>')[0]
print pageresponse
except:
pageresponse = ""
if "unknown page" not in pageresponse:
print pageresponse

# Re-create the account and fuck them
payload = {'ref': '666', 'FirstName': firstname, 'LastName': lastname, 'Handle': username, 'email': email, 'Password': password, 'Password2': password, 'turing': turingcode, 'turidx': turingid, 'signup': 'Sign+me+up'}

headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/Signup/join.php"
}

pagedata = s.post(trexurls[i] + "/Signup/join.php", data = payload, headers = headers)

try:
pageresponse = pagedata.text.split('<h3 class="badnote">')[1].split('</h3>')[0]
print pageresponse
except:
print "Created a new account."

try:
M = poplib.POP3_SSL('pop.riseup.net', '995')
M.user('hatemind')
M.pass_('FreekRaep357')

numMessages = len(M.list()[1])
for q in range(clip(1, numMessages, numMessages - 20), numMessages):
for j in M.retr(q+1)[1]:
#print str(q) + "\t" + j
if j.startswith(trexurls[i] + "/Signup/confirm.php?"):
try:
newuid = j.split("UID=")[1].split("&")[0]
print "New ID: " + newuid
newCC = j.split("CC=")[1]
print "New CC: " + newCC

requests.get(trexchange + "?trexurl=" + trexurls[i] + "&trexuid=" + newuid)
except:
pass
confirmurl = j
print confirmurl
print s.get(confirmurl + "&turing=" + turingcode + "&turidx=" + str(turingid)).text
print "\n"
M.quit()
except Exception:
print "Waiting out error."
print traceback.format_exc()

print s.get(trexurls[i] + "/Surf/trextop.php?usr=" + newuid + "&man=0&step=2").text
trexids[i] = newuid


print boxsep
except Exception:
print traceback.format_exc()
else:
info = s.get(trexurls[i] + "/members/mygeneral.php").text
trexids[i] = info.split('<td width="70%" colspan="3">')[1].split('</td>')[0]


#
# Check for deleted sites
#

try:
pagedata = s.get(trexurls[i] + "/members/mngsites.php", headers = headers)

pagesitesfirst = pagedata.text.split('<a href="siteinfoedit.php?t=1&sid=')

for site in range(0, len(pagesitesfirst)):
if (site > 0):
sitedata = pagesitesfirst[site].split('">')

siteid = sitedata[0]
print "Site ID:\t" + siteid

if "OK!" in pagesitesfirst[site].split("</tr>")[0]:
sitedeleted = "Alive"
else:
sitedeleted = "Deleted"

payload = {'Approved': 2, 'sid': siteid, 'SiteorBanner': 0, 'SiteorBanner': 0, 'OnExchange': 0, 'GraphicURL': '', 'TipText': '', 'UserStatus': 2, 'UserDesc': adddesc, 'ManualOnly': 0, 'UseDraw': 1, 'DrawPerHour': 9001, 'URL': addurl[addurli], 'Exclusive': 0, 'B1': 'Save+these+changes'}

headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/siteinfoedit.php?t=1&sid=" + siteid
}
# ref siteinfoedit.php?t=1&sid= + pageresponse

pagedata = s.post(trexurls[i] + "/members/siteinfochange.php?t=1", data = payload, headers = headers)
print "Status:\t\t" + sitedeleted
print boxsep
except Exception:
print traceback.format_exc()


#
# Add new site
#
payload = {'SiteorBanner': 0, 'UserStatus': 0, 'ManualOnly': 0, 'UseDraw': 1, 'DrawPerHour': 9001, 'TokensAssigned': 9001, 'UserDesc': adddesc, 'URL': addurl, 'B1': 'Add+this+site!'}


# ASMAX mod
if "autosurfmax" in trexurls[i]:
payload = {'SiteorBanner': 0, 'UserStatus': 0, 'ManualOnly': 0, 'UseDraw': 1, 'DrawPerHour': 9001, 'TokensAssigned': 9001, 'paidsurf': 0, 'Cat1': 0, 'Cat2': 0, 'UserDesc': adddesc, 'URL': addurl, 'B1': 'Add+this+site!'}

headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/members/mngsites.php"
}

while (pageresponse == False or pageresponse.isdigit()) and pageresponse != lastpageresponse:
lastpageresponse = pageresponse

pagedata = s.post(trexurls[i] + '/members/siteinfochange.php?t=1', data=payload, headers = headers)
# /ASMAX mod


# Check response
try:
# If there is an error
pageresponse = pagedata.text.split('<h3 class="badnote">')[1].split('</h3>')[0]
print pageresponse

if "Disallowed" in pageresponse:
addurli += 1
except:
#
# Approve site
#
try:
sitenew = "Approving"
pageresponse = pagedata.text.split('Return to <a href="siteinfoedit.php?t=1&sid=')[1].split('">"Editing this entry"</a>.')[0]

print "Site ID:\t" + pageresponse
print "Status:\t\t" + sitenew

payload = {'Approved': 1, 'sid': pageresponse, 'SiteorBanner': 0, 'SiteorBanner': 0, 'OnExchange': 0, 'GraphicURL': '', 'TipText': '', 'UserStatus': 0, 'UserDesc': adddesc, 'ManualOnly': 0, 'URL': addurl, 'Exclusive': 0, 'B1': 'Add+this+site!'}

headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/siteinfoedit.php?t=1&sid=" + pageresponse
}
# ref siteinfoedit.php?t=1&sid= + pageresponse

pagedata = s.post(trexurls[i] + "/members/siteinfochange.php?t=1", data = payload, headers = headers)
except:
pageresponse = "Unable to add another site."
print "Unhandled response."
print boxsep
except Exception:
print "Waiting out error."
print traceback.format_exc()
print boxsep
time.sleep(30)

print "Checked at " + time.strftime('%l:%M%p %z on %b %d, %Y')
print boxsep

endtime = time.time()
# Clean this shit up, fucking plz
while time.time() < endtime + sleeptimer:
for i in range(len(trexurls)):
if surfi == 0:
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/member.php"
}
pagedata = s.get(trexurls[i] + "/Surf/Regular.php?ID=" + trexids[i], headers=headers).text
try:
# If there is an error
print pagedata.split('<h3 class="badnote">')[1].split('</h3>')[0]
except:
pass
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/Surf/Regular.php?ID=" + trexids[i]
}
pagedata = s.get(trexurls[i] + "/Surf/trexfoot0.php?ID=" + trexids[i] + "&start=1", headers=headers).text
try:
# If there is an error
print pagedata.split('<h3 class="badnote">')[1].split('</h3>')[0]
except:
pass
else:
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/Surf/trexfoot0.php?ID=" + trexids[i] + "&cat=0"
}
pagedata = s.get(trexurls[i] + "/Surf/trextop.php?usr=" + trexids[i] + "&man=0&step=0", headers=headers).text
try:
# If there is an error
print pagedata.split('<h3 class="badnote">')[1].split('</h3>')[0]
except:
pass
time.sleep(30)
for i in range(len(trexurls)):
pagedata = s.get(trexurls[i] + "/Surf/trexfoot0.php?ID=" + trexids[i] + "&cat=0", headers=headers).text
try:
# If there is an error
pagedata = pagedata.split('<h3 class="badnote">')[1].split('</h3>')[0]
except:
pass
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'Referer': trexurls[i] + "/Surf/trextop.php?usr=" + trexids[i] + "&man=0&step=0"
}
pagedata = s.get(trexurls[i] + "/Surf/trextop.php?usr=" + trexids[i] + "&man=0&step=4", headers=headers).text
try:
# If there is an error
print pagedata.split('<h3 class="badnote">')[1].split('</h3>')[0]
except:
pass
time.sleep(10)
surfi += 1

Loading…
Cancel
Save