Guest User

Untitled

a guest
Feb 20th, 2018
210
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.41 KB | None | 0 0
  1. import sys
  2. import pickle
  3. from threading import Thread
  4. import paramiko # conda install -c anaconda paramiko
  5. import os
  6. from os.path import join
  7. from time import sleep
  8.  
  9. IS_PY2 = sys.version_info < (3, 0)
  10. if IS_PY2:
  11. from Queue import Queue
  12. else:
  13. from queue import Queue
  14. # Paratera servers in China
  15. # :119.90.38.51 #Beijing Innovation Technology Co., Ltd. for business network communications
  16. # :119.90.38.50 #Beijing Innovation Technology Co., Ltd. for business network communications
  17. # :139.196.168.185 #Aliyun Computing Co., LTD
  18. # :120.26.142.238 #Aliyun Computing Co., LTD
  19. # :120.76.127.166 #Aliyun Computing Co., LTD
  20. HOSTs = ['119.90.38.51','119.90.38.50','139.196.168.185','120.26.142.238','120.76.127.166']
  21. HOST = HOSTs[2] # The remote host, choose one
  22. PORT = 2222 # The same port as used by the server
  23. ssh_trans = paramiko.transport.Transport((HOST, PORT))
  24. print("SSH transport object is created")
  25.  
  26. keyfile = r"pp569.id_rsa"
  27. private_key = paramiko.RSAKey.from_private_key_file(keyfile)
  28. print("private key object is read")
  29.  
  30. username = r"pp569"
  31. sleep(5)
  32. ssh_trans.connect(username=username, hostkey = None, password = None, pkey = private_key)
  33. print("SSH transport object has connected to server")
  34.  
  35. target_root="h:\guo" # local target folder
  36. failed_files = []
  37.  
  38. class Worker(Thread):
  39. """ Thread executing tasks from a given tasks queue """
  40. def __init__(self, tasks):
  41. Thread.__init__(self)
  42. ## get sftp_client object
  43. self.sftp_client = paramiko.sftp_client.SFTPClient.from_transport(ssh_trans)
  44. print("created a sftp client")
  45. ##
  46. self.tasks = tasks
  47. self.daemon = True
  48. self.start()
  49.  
  50. def run(self):
  51. while True:
  52. filename = self.tasks.get()
  53. try:
  54. # skip first part of the linux style filename
  55. self.sftp_client.get(filename, join(target_root,filename[19:]).replace('/','\\')) # unix to dos file separator
  56. except Exception as e:
  57. # An exception happened in this thread
  58. print(e)
  59. failed_files.append(filename)
  60. finally:
  61. # Mark this task as done, whether an exception happened or not
  62. # one fail, all fail
  63. self.tasks.task_done()
  64.  
  65. class ThreadPool:
  66. """ Pool of threads consuming tasks from a queue """
  67. def __init__(self, num_threads):
  68. self.tasks = Queue(num_threads)
  69. for _ in range(num_threads):
  70. Worker(self.tasks)
  71.  
  72. def add_task(self, filename):
  73. """ Add a task to the queue """
  74. self.tasks.put(filename)
  75.  
  76. def map(self, filenames):
  77. """ Add a list of tasks to the queue """
  78. for filename in filenames:
  79. self.add_task(filenames)
  80.  
  81. def wait_completion(self):
  82. """ Wait for completion of all the tasks in the queue """
  83. self.tasks.join()
  84.  
  85.  
  86. if __name__ == "__main__":
  87. from random import randrange
  88. from time import sleep
  89.  
  90. # Get file list
  91. with open("downlist.pkl",'rb') as infile:
  92. F = pickle.load(infile)
  93.  
  94. # Instantiate a thread pool with 20 worker threads
  95. pool = ThreadPool(9)
  96.  
  97. # Add the jobs in bulk to the thread pool. Alternatively you could use
  98. # `pool.add_task` to add single jobs. The code will block here, which
  99. # makes it possible to cancel the thread pool with an exception when
  100. # the currently running batch of workers is finished.
  101. pool.map(F)
  102. pool.wait_completion()
Add Comment
Please, Sign In to add comment