yanivk1984
10/4/2019 - 4:40 PM

template for new cmd project

from functions import *

import threading
from queue import Queue
import getpass
import argparse
import logging
import sys
from tqdm import tqdm
import time

# argument parsing
parser = argparse.ArgumentParser(description="Add devices to Backbox from a CSV File")
parser.add_argument("-a", "--address", metavar="Backbox_IP", required=True, help="Backbox IP-Address")
parser.add_argument("-u", "--user", metavar="User_Name", required=True, help="Backbox user name")
parser.add_argument("-f", "--file", metavar="CSV_File", required=True, help="csv file to devices load from")
parser.add_argument("-d", "--debug_level", type=int, choices=[10, 20, 30, 40, 50], metavar="debug_level", help=argparse.SUPPRESS, default=20)  # argparse with specific choices
parser.add_argument("-w", "--workers", type=int, metavar="workers", help=argparse.SUPPRESS, default=5)  # supress message from the help menu
parser.add_argument("-p", "--progress_bar",  type=bool, choices=[True, False], metavar="progress_bar", help="Add progress bar (will show printouts only if an error/warning occurred)", default=False)
parser.add_argument("-e", "--show_example", metavar="Show example of CSV File", help="Show CSV file example", type=show_example, action="store")  # run function in case that the variable was choosen
args = parser.parse_args()
file = args.file
user = args.user
address = args.address
debug_level = args.debug_level
workers = args.workers
progress_bar = args.progress_bar
password = getpass.getpass("Please enter password: ")

if progress_bar is True:  # start the progress bar only if the flag provided
    progress = tqdm()
    debug_level = 30  # in case that the progress bar selected, show only errors in order not to interupt the progress bar

# debug configuration
logging.basicConfig(stream=sys.stdout, level=debug_level, format='%(asctime)s - MODULE: %(name)s, THREAD: "%(threadName)s" - %(levelname)s - %(message)s')  # log to stdout
logger = logging.getLogger()

devices = csv_to_json(file)  # convert csv to the json object
cookies = login_to_back_box(address=address, user=user, password=password)  # get the login cookies

start = time.time()
total_jobs_run = 0  # the total jobs that we run already
total_devices = len(devices)  # the total jobs that we need to run
print_lock = threading.Lock()

# worker that contasin the function that we want to run in a tread
def worker_thread(device_, cookies_, address_, q_):
    global total_jobs_run  # use the veriable that contains that total jobs that we run in the thread in order to know when to stop adding jobs to the queue
    global total_devices

    job = q_.get()  # get a job from the queue
    logger.debug(f"Number of jobs in the queue before running task: {q.qsize()}")
    create_device(device_, cookies_, address_)
    total_jobs_run += 1  # increment the number of jobs that we already run

    if progress_bar is True:  # use the progress bar only if it was defined to run with argparse
        global progress
        progress.total = total_devices * 50
        with print_lock:
            progress.update(total_jobs_run)
            progress.refresh()

    logger.debug(f"TOTAL RUN: {total_jobs_run}")
    logger.debug(f"CURRENT RUN: {total_devices}")
    q_.put(1)  # put a job in the queue in order to other thread to start
    q_.task_done()  # notify that the job was done in order to the queue.join to know that all jobs were done.

    if q_.qsize() == workers and workers == 1:  # if the worker == 1 and the q.size() == 1 then there is a need to do task_done only once.
      logger.debug(f"TOTAL DEVICES: {total_devices}, TOTOAL JOB RUN: {total_jobs_run}, WORKERS: {workers}, QSIZE: {q_.qsize()}")
        if total_devices == total_jobs_run:
          if q_.qsize() == 1:
            try:  # in case that somehow the queue is already empty do not show an exception
              q_.task_done()
            except:
              return
            
            logger.debug(f"QSIZE WHEN DONE: {q_.qsize()}")

      elif workers == q_.qsize():  # if the queue size == workers all the jobs were done and the exact number of workers equale to the number of the jobs since in every job completion we are adding more job to the queue 
          while not q_.empty():
              try:
                  q_.task_done()
              except:
                  return


q = Queue()

# add the devices to backbox
for device in devices:
    t = threading.Thread(target=worker_thread, args=(device, cookies, address, q))
    t.name = device['deviceName']
    t.daemon = False  # dont close the application when the main thread stops, wait for all the threads to complete before closing the main thread. if not configured, the main thread will finish but the threads will continue working
    t.start()

# put few workers to work by adding jobs in order to start execution, the number of workers will also be the number of starting threads
# every worker in this app will add additional job in completion
for job in range(workers):
    q.put(job)

q.join()  # its a blocking function, wait for all the jobs to compelte, will wait until there is no jobs in the queue (will not affect in case that the threads will be configured as deamon). running task done on all jobs will cause this blocking function to move forward (when q.size() is empty) .
logger.info('Job took: ' + str(time.time()-start) + "seconds")