A semantic search engine for source code https://bitshift.benkurtovic.com/
Non puoi selezionare più di 25 argomenti Gli argomenti devono iniziare con una lettera o un numero, possono includere trattini ('-') e possono essere lunghi fino a 35 caratteri.
 
 
 
 
 
 

99 righe
2.8 KiB

  1. """
  2. :synopsis: Parent crawler module, which supervises all crawlers.
  3. Contains functions for initializing all subsidiary, threaded crawlers.
  4. """
  5. import logging
  6. import logging.handlers
  7. import os
  8. import Queue
  9. import sys
  10. import time
  11. from threading import Event
  12. from .crawler import GitHubCrawler, BitbucketCrawler
  13. from .indexer import GitIndexer, GitRepository
  14. from ..parser import start_parse_servers
  15. __all__ = ["crawl"]
  16. MAX_URL_QUEUE_SIZE = 5e3
  17. def crawl():
  18. """
  19. Initialize all crawlers (and indexers).
  20. Start the:
  21. 1. GitHub crawler, :class:`crawler.GitHubCrawler`.
  22. 2. Bitbucket crawler, :class:`crawler.BitbucketCrawler`.
  23. 3. Git indexer, :class:`bitshift.crawler.indexer.GitIndexer`.
  24. """
  25. _configure_logging()
  26. parse_servers = start_parse_servers()
  27. time.sleep(5)
  28. repo_clone_queue = Queue.Queue(maxsize=MAX_URL_QUEUE_SIZE)
  29. run_event = Event()
  30. run_event.set()
  31. threads = [GitIndexer(repo_clone_queue, run_event)]
  32. if sys.argv[1:]:
  33. names = sys.argv[1:]
  34. ranks = GitHubCrawler.get_ranks(names)
  35. for name in names:
  36. repo = GitRepository("https://github.com/" + name, name, "GitHub",
  37. ranks[name])
  38. repo_clone_queue.put(repo)
  39. else:
  40. threads += [GitHubCrawler(repo_clone_queue, run_event),
  41. BitbucketCrawler(repo_clone_queue, run_event)]
  42. for thread in threads:
  43. thread.start()
  44. try:
  45. while 1:
  46. time.sleep(0.1)
  47. except KeyboardInterrupt:
  48. run_event.clear()
  49. with repo_clone_queue.mutex:
  50. repo_clone_queue.queue.clear()
  51. for thread in threads:
  52. thread.join()
  53. for server in parse_servers:
  54. server.kill()
  55. def _configure_logging():
  56. # This isn't ideal, since it means the bitshift python package must be kept
  57. # inside the app, but it works for now:
  58. root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
  59. log_dir = os.path.join(root, "logs")
  60. if not os.path.exists(log_dir):
  61. os.mkdir(log_dir)
  62. logging.getLogger("requests").setLevel(logging.WARNING)
  63. logging.getLogger("urllib3").setLevel(logging.WARNING)
  64. formatter = logging.Formatter(
  65. fmt=("%(asctime)s %(levelname)s %(name)s:%(funcName)s"
  66. " %(message)s"), datefmt="%y-%m-%d %H:%M:%S")
  67. file_handler = logging.handlers.TimedRotatingFileHandler(
  68. "%s/%s" % (log_dir, "app.log"), when="H", interval=1,
  69. backupCount=20)
  70. stream_handler = logging.StreamHandler()
  71. file_handler.setFormatter(formatter)
  72. stream_handler.setFormatter(formatter)
  73. root_logger = logging.getLogger()
  74. root_logger.handlers = []
  75. root_logger.addHandler(file_handler)
  76. root_logger.addHandler(stream_handler)
  77. root_logger.setLevel(logging.NOTSET)
  78. if __name__ == "__main__":
  79. crawl()