__main__.py 1.8 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. #!/usr/bin/env python
  2. import sys
  3. from optparse import OptionParser
  4. from .__version__ import __version__ as VERSION
  5. from .webtech import WebTech
  6. def split_on_comma(option, opt_str, value, parser):
  7. setattr(parser.values, option.dest, value.split(','))
  8. def main():
  9. """
  10. Main function when running from command line.
  11. """
  12. parser = OptionParser(prog="webtech", version="%prog {}".format(VERSION))
  13. parser.add_option(
  14. "-u", "--urls",
  15. help="url(s) to scan", type="string", action="callback", callback=split_on_comma)
  16. parser.add_option(
  17. "--urls-file", "--ul",
  18. help="url(s) list file to scan", type="string")
  19. parser.add_option(
  20. "--user-agent", "--ua",
  21. help="use this user agent")
  22. parser.add_option(
  23. "--random-user-agent", "--rua", action="store_true",
  24. help="use a random user agent", default=False)
  25. parser.add_option(
  26. "--database-file", "--db",
  27. help="custom database file")
  28. parser.add_option(
  29. "--json", "--oj", action="store_true",
  30. help="output json-encoded report", default=False)
  31. parser.add_option(
  32. "--grep", "--og", action="store_true",
  33. help="output grepable report", default=False)
  34. parser.add_option(
  35. "--update-db", "--udb", action="store_true",
  36. help="force update of remote db files", default=False)
  37. parser.add_option(
  38. "--timeout", type="float", help="maximum timeout for scrape requests", default=10)
  39. (options, _args) = parser.parse_args(sys.argv)
  40. options = vars(options)
  41. if options.get('urls') is None and options.get('urls_file') is None and options.get('update_db') is None:
  42. print("No URL(s) given!")
  43. parser.print_help()
  44. exit()
  45. wt = WebTech(options)
  46. wt.start()
  47. if __name__ == "__main__":
  48. main()