#! /usr/bin/python
import subprocess
import datetime
import os
import multiprocessing
s3curl = '/home/kerem/ecs/s3curl.pl'
s3_id = 'ecs'
protocol = 'https://'
endpoint = 'object.ecstestdrive.com'
bucketName = 'bucket4'
filename = '/home/kerem/random_data/3mb_file'
filename_s = '3mb_file'
#ppid = os.getpid()
def worker(num):
# normalde burada bir for dongusu var, ppid yerine index numarasi yazio
# kopyalama bitince 10mb_file_1, 10mb_file_2 vs.. diye dosyalar oluo
cmdString = '%s --id=%s --put=%s -- %s%s.%s/%s_%s' % (s3curl, s3_id, \
filename, protocol, bucketName, endpoint, filename_s, num)
args = cmdString.split()
start_dt = datetime.datetime.now()
sub_proc = subprocess.Popen(args, stdout=subprocess.PIPE, \
stderr=subprocess.PIPE,shell=False)
out = sub_proc.stdout
err = sub_proc.stderr
logfile_name = 'write_log.%s' % str(num)
with open(logfile_name, 'a') as logfile:
logfile.write('start time : %s\n' % start_dt)
logfile.write('------\n')
logfile.write(out.read())
logfile.write(err.read())
logfile.write('------\n')
stop_dt = startTime = datetime.datetime.now()
delta = stop_dt - start_dt
logfile.write('end time : %s\n' % stop_dt)
logfile.write('\n')
logfile.write('delta time : %s\n' % delta)
if __name__ == '__main__':
jobs = []
for i in range(3):
p = multiprocessing.Process(target=worker, args=(i,))
jobs.append(p)
p.start()