Report is now in .json format
This commit is contained in:
parent
1f0d8c3032
commit
c509637865
4 changed files with 34 additions and 27 deletions
|
@ -3,21 +3,22 @@ const http = require('http')
|
|||
const fs = require('fs')
|
||||
|
||||
async function main_loop() {
|
||||
json_object = []
|
||||
for (let i = 0; i < times; i++) {
|
||||
const url = await url_generator(domains, mode, log)
|
||||
try {
|
||||
const response = await fetch(url)
|
||||
console.log(`${url} exists!`)
|
||||
fs.appendFile("JS_report.txt", "\n" + url + " | STATUS_CODE: " + String(response.statusCode), function(err) {if (err) throw err})
|
||||
json_object.push(`{"website_url":"${url}","response_type":"SUCCESS","response_code":"${String(response.statusCode)}","response_details":"${String(response.statusMessage)}"}`)
|
||||
}
|
||||
catch(e) {
|
||||
if (e.errno != 'ENOTFOUND') {
|
||||
console.log(`${url} exists!`)
|
||||
fs.appendFile("JS_report.txt", "\n" + url + " | ERROR_CODE: " + e.errno, function(err) {if (err) throw err})
|
||||
json_object.push(`{"website_url":"${url}","response_type":"ERROR","response_code":"${String(e.errno)}","response_details":"${String(e.syscall)}"}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
fs.appendFile("JS_report.txt", "\n---\n", function(err) {if (err) throw err})
|
||||
fs.appendFile(report_file, '[' + String(json_object) + ']', function(err) {if (err) throw err})
|
||||
console.log('\nFinished at ' + String(new Date().getHours()) + 'h' + String(new Date().getMinutes()) + 'm')
|
||||
}
|
||||
|
||||
|
@ -35,42 +36,40 @@ function url_generator(domains, mode, log) {
|
|||
|
||||
function fetch(url, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!url) return reject(new Error('URL was not provided'))
|
||||
if (!url) return reject(new Error('URL was not provided')) //Cannot happen, line may end up getting removed
|
||||
|
||||
const { body, method = 'GET', ...restOptions } = options
|
||||
const {body, method = 'GET', ...restOptions} = options
|
||||
const client = url.startsWith('https') ? https : http
|
||||
const request = client.request(url, { method, ...restOptions }, (res) => {
|
||||
let chunks = ''
|
||||
const request = client.request(url, {method, ...restOptions}, (res) => {
|
||||
res.setEncoding('utf8')
|
||||
let chunks = ''
|
||||
res.on('data', (chunk) => {
|
||||
chunks += chunk
|
||||
})
|
||||
res.on('end', () => {
|
||||
resolve({ statusCode: res.statusCode, body: chunks })
|
||||
resolve({statusCode: res.statusCode, statusMessage: res.statusMessage})
|
||||
})
|
||||
})
|
||||
request.on('error', (err) => {
|
||||
reject(err)
|
||||
})
|
||||
if (body) {
|
||||
request.setHeader('Content-Length', body.length)
|
||||
request.write(body)
|
||||
}
|
||||
request.end()
|
||||
})
|
||||
}
|
||||
|
||||
const times = process.argv.indexOf('-t') > -1 ? Math.round(Number(process.argv[process.argv.indexOf('-t') + 1])) : 3000
|
||||
if (isNaN(times)) return console.error("-t argument expected a number!")
|
||||
const domains = process.argv.indexOf('-d') > -1 ? process.argv[process.argv.indexOf('-d') + 1].split(',') : ['.com', '.net', '.edu', '.gov', '.cn', '.org']
|
||||
const domains = process.argv.indexOf('-d') > -1 ? process.argv[process.argv.indexOf('-d') + 1].split(',') : ['.co', '.com', '.net', '.edu', '.gov', '.cn', '.org', '.cc']
|
||||
const mode = process.argv.indexOf('-m') > -1 ? process.argv[process.argv.indexOf('-m') + 1].split(',') : ['http']
|
||||
const log = process.argv.indexOf('-l') > -1
|
||||
|
||||
const report_file = "JS_report_" + String(new Date().getUTCDate()) + String(new Date().getHours()) + String(new Date().getMinutes()) + ".json"
|
||||
|
||||
process.stdout.write(`\nI am going to look for images through ${times} random URLs with the following domains: `)
|
||||
console.log(domains)
|
||||
process.stdout.write("These URLs will use the following protocols: ")
|
||||
console.log(mode)
|
||||
console.log('Started at ' + String(new Date().getHours()) + 'h' + String(new Date().getMinutes()) + 'm\n')
|
||||
|
||||
fs.appendFile("JS_report.txt", "---", function(err) {if (err) throw err})
|
||||
fs.open(report_file, "w", function(err) {if (err) throw err})
|
||||
main_loop()
|
||||
|
|
|
@ -4,19 +4,20 @@ import datetime
|
|||
import urllib.request
|
||||
|
||||
def main_loop():
|
||||
json_object = []
|
||||
for i in range(times):
|
||||
url = url_generator(domains, log)
|
||||
try:
|
||||
response = urllib.request.urlopen(url)
|
||||
print(url + " exists!")
|
||||
f.write("\n" + url + " | STATUS_CODE: " + str(response.getcode()))
|
||||
json_object.append('{"website_url":"' + url + '","response_type":"SUCCESS","response_code":"' + str(response.getcode()) + '","response_details":"Server seems to be ' + str(response.info()["Server"]) + '"}')
|
||||
except Exception as e:
|
||||
if "[Errno 11001]" in str(e): continue
|
||||
if "[" in str(e) and "]" in str(e): e = str(e)[str(e).index("[") + 1:str(e).index("]")] + ": " + str(e)[str(e).index("]") + 2:][:-1]
|
||||
print(url + " exists!")
|
||||
f.write("\n" + url + " | ERROR_CODE: " + str(e))
|
||||
err_code = str(e)[str(e).index("[") + 1:str(e).index("]")] if "[" in str(e) and "]" in str(e) else "NONE FOUND"
|
||||
json_object.append('{"website_url":"' + url + '","response_type":"ERROR","response_code":"' + err_code + '","response_details":"' + str(e).replace("\\", "") + '"}')
|
||||
|
||||
f.write("\n---\n")
|
||||
f.write(str(json_object).replace("'", ""))
|
||||
f.close()
|
||||
print("Finished at " + str(datetime.datetime.now().time())[0:5].replace(":", "h") + "m")
|
||||
|
||||
|
@ -35,7 +36,7 @@ try:
|
|||
except:
|
||||
print("-t argument expected a number!")
|
||||
sys.exit()
|
||||
domains = sys.argv[sys.argv.index('-d') + 1] if '-d' in sys.argv else ['.com', '.net', '.edu', '.gov', '.cn', '.org']
|
||||
domains = sys.argv[sys.argv.index('-d') + 1] if '-d' in sys.argv else ['.co', '.com', '.net', '.edu', '.gov', '.cn', '.org', '.cc']
|
||||
mode = sys.argv[sys.argv.index('-m') + 1].split(",") if '-m' in sys.argv else ['http']
|
||||
log = '-l' in sys.argv
|
||||
|
||||
|
@ -43,6 +44,5 @@ print("\nI am going to look for images through " + str(times) + " random URLs wi
|
|||
print("These URLs use the following protocols: " + str(mode))
|
||||
print("Started at " + str(datetime.datetime.now().time())[0:5].replace(":", "h") + "m")
|
||||
|
||||
f = open("PY_report.txt", "a+")
|
||||
f.write("---")
|
||||
f = open("PY_report_" + str(datetime.datetime.now().strftime("%d%H%M")) + ".json", "a+")
|
||||
main_loop()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Website-Finder
|
||||
|
||||
Website-Finder is a collection of light scripts written in various programming languages without the need for external libraries that finds websites of all sorts for you and make reports of that in the form of automatically generated text files.
|
||||
Website-Finder is a collection of light scripts written in various programming languages without the need for external libraries that finds websites of all sorts for you and make reports of that in the form of automatically generated json files.
|
||||
|
||||
## REQUIREMENTS
|
||||
|
||||
|
@ -53,3 +53,7 @@ A: As far as I am aware, nope! However, the reports are generated differently de
|
|||
|
||||
Q: Why does the "-m" argument defaults to "http" rather than "https"?
|
||||
A: Requests in "http" receive more status codes than error codes compared to "https". I suspect it's because some websites don't support "https" very well, even in the current year.
|
||||
|
||||
## TO DO
|
||||
|
||||
Second-level domains
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
require 'net/http'
|
||||
require 'json'
|
||||
|
||||
def main_loop
|
||||
json_object = []
|
||||
TIMES.times do
|
||||
url = url_generator(DOMAINS, MODE)
|
||||
puts(url) if LOG
|
||||
begin
|
||||
response = Net::HTTP.get_response(URI(url))
|
||||
puts("#{url} exists!")
|
||||
File.open("RB_report.txt", 'a+') {|f| f.write("\n#{url} | STATUS_CODE: #{response.code} | DETAILS: #{response.message}")}
|
||||
json_object << Hash["website_url" => url, "response_type" => "SUCCESS", "response_code" => response.code, "response_details" => response.message]
|
||||
rescue Exception => e # Unlike JS/PY, the number of existing websites that raise exceptions is small
|
||||
if e.class != SocketError
|
||||
puts("#{url} exists!")
|
||||
File.open("RB_report.txt", 'a+') {|f| f.write("\n#{url} | ERROR_CODE: #{e.class.to_s} | DETAILS: #{e.to_s}")}
|
||||
json_object << Hash["website_url" => url, "response_type" => "ERROR", "response_code" => e.class.to_s, "response_details" => e.to_s]
|
||||
end
|
||||
end
|
||||
end
|
||||
File.open("RB_report.txt", 'a+') {|f| f.write("\n---\n")}
|
||||
File.open(REPORT_FILE, 'a+') {|f| f << json_object.to_json} if json_object.any?
|
||||
puts("Finished at #{Time.new.hour}h#{Time.new.min}m\n")
|
||||
end
|
||||
|
||||
|
@ -27,13 +29,15 @@ def url_generator(domains, mode)
|
|||
end
|
||||
|
||||
TIMES = ARGV.include?('-t') ? ARGV[ARGV.index("-t") + 1].to_i : 3000
|
||||
DOMAINS = ARGV.include?('-d') ? ARGV[ARGV.index("-d") + 1].split(",") : ['.com', '.net', '.edu', '.gov', '.cn', '.org']
|
||||
DOMAINS = ARGV.include?('-d') ? ARGV[ARGV.index("-d") + 1].split(",") : ['.co', '.com', '.net', '.edu', '.gov', '.cn', '.org', '.cc']
|
||||
MODE = ARGV.include?('-m') ? ARGV[ARGV.index("-m") + 1].split(",") : ['http']
|
||||
LOG = ARGV.index("-l").class == Integer
|
||||
|
||||
REPORT_FILE = "RB_report_#{Time.new.day}#{Time.new.hour}#{Time.new.min}.json"
|
||||
|
||||
puts("\nI am going to look for images through #{TIMES} random URLs with the following domains: #{DOMAINS}")
|
||||
puts("These URLs will use the following protocols: #{MODE}")
|
||||
puts("Started at #{Time.new.hour}h#{Time.new.min}m\n")
|
||||
|
||||
File.open("RB_report.txt", 'a+') {|f| f.write("---")}
|
||||
File.open(REPORT_FILE, 'a+')
|
||||
main_loop
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue