Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import sys
- import pickle
- from threading import Thread
- import paramiko # conda install -c anaconda paramiko
- import os
- from os.path import join
- from time import sleep
- IS_PY2 = sys.version_info < (3, 0)
- if IS_PY2:
- from Queue import Queue
- else:
- from queue import Queue
- # Paratera servers in China
- # :119.90.38.51 #Beijing Innovation Technology Co., Ltd. for business network communications
- # :119.90.38.50 #Beijing Innovation Technology Co., Ltd. for business network communications
- # :139.196.168.185 #Aliyun Computing Co., LTD
- # :120.26.142.238 #Aliyun Computing Co., LTD
- # :120.76.127.166 #Aliyun Computing Co., LTD
- HOSTs = ['119.90.38.51','119.90.38.50','139.196.168.185','120.26.142.238','120.76.127.166']
- HOST = HOSTs[2] # The remote host, choose one
- PORT = 2222 # The same port as used by the server
- ssh_trans = paramiko.transport.Transport((HOST, PORT))
- print("SSH transport object is created")
- keyfile = r"pp569.id_rsa"
- private_key = paramiko.RSAKey.from_private_key_file(keyfile)
- print("private key object is read")
- username = r"pp569"
- sleep(5)
- ssh_trans.connect(username=username, hostkey = None, password = None, pkey = private_key)
- print("SSH transport object has connected to server")
- target_root="h:\guo" # local target folder
- failed_files = []
- class Worker(Thread):
- """ Thread executing tasks from a given tasks queue """
- def __init__(self, tasks):
- Thread.__init__(self)
- ## get sftp_client object
- self.sftp_client = paramiko.sftp_client.SFTPClient.from_transport(ssh_trans)
- print("created a sftp client")
- ##
- self.tasks = tasks
- self.daemon = True
- self.start()
- def run(self):
- while True:
- filename = self.tasks.get()
- try:
- # skip first part of the linux style filename
- self.sftp_client.get(filename, join(target_root,filename[19:]).replace('/','\\')) # unix to dos file separator
- except Exception as e:
- # An exception happened in this thread
- print(e)
- failed_files.append(filename)
- finally:
- # Mark this task as done, whether an exception happened or not
- # one fail, all fail
- self.tasks.task_done()
- class ThreadPool:
- """ Pool of threads consuming tasks from a queue """
- def __init__(self, num_threads):
- self.tasks = Queue(num_threads)
- for _ in range(num_threads):
- Worker(self.tasks)
- def add_task(self, filename):
- """ Add a task to the queue """
- self.tasks.put(filename)
- def map(self, filenames):
- """ Add a list of tasks to the queue """
- for filename in filenames:
- self.add_task(filenames)
- def wait_completion(self):
- """ Wait for completion of all the tasks in the queue """
- self.tasks.join()
- if __name__ == "__main__":
- from random import randrange
- from time import sleep
- # Get file list
- with open("downlist.pkl",'rb') as infile:
- F = pickle.load(infile)
- # Instantiate a thread pool with 20 worker threads
- pool = ThreadPool(9)
- # Add the jobs in bulk to the thread pool. Alternatively you could use
- # `pool.add_task` to add single jobs. The code will block here, which
- # makes it possible to cancel the thread pool with an exception when
- # the currently running batch of workers is finished.
- pool.map(F)
- pool.wait_completion()
Add Comment
Please, Sign In to add comment