main.py 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. #!/usr/bin/env python3
  2. #
  3. # EliasDB - Data mining collector example
  4. #
  5. # Copyright 2020 Matthias Ladkau. All rights reserved.
  6. #
  7. # This Source Code Form is subject to the terms of the Mozilla Public
  8. # License, v. 2.0. If a copy of the MPL was not distributed with this
  9. # file, You can obtain one at http://mozilla.org/MPL/2.0/.
  10. #
  11. import schedule
  12. import time
  13. import requests
  14. import json
  15. ELIASDB_URL = "eliasdb1:9090"
  16. requests.packages.urllib3.disable_warnings()
  17. def job():
  18. global counter
  19. url = "https://devt.de"
  20. try:
  21. now = int(time.time())
  22. print("Running request for %s - timestamp: %s (%s)" %
  23. (url, now, time.strftime("%d-%m-%Y %H:%M:%S", time.gmtime(now))))
  24. r = requests.get(url)
  25. res_time = r.elapsed
  26. print (" %s -> %s" % (url, res_time))
  27. result = {
  28. "key" : str(now),
  29. "kind" : "PingResult",
  30. "url" : url,
  31. "success" : True,
  32. "result" : str(res_time),
  33. }
  34. except Exception as e:
  35. print("Error: %s", e)
  36. result = {
  37. "key" : str(now),
  38. "kind" : "PingResult",
  39. "url" : url,
  40. "success" : False,
  41. "result" : str(e),
  42. }
  43. try:
  44. r = requests.post('https://%s/db/v1/graph/main/n' % ELIASDB_URL,
  45. json.dumps([result]), verify=False)
  46. if r.status_code != 200:
  47. print("Could not store result: %s", r.text)
  48. except Exception as e:
  49. print("Error storing result: %s", e)
  50. schedule.every(5).seconds.do(job)
  51. while True:
  52. schedule.run_pending()
  53. time.sleep(1)