-
Notifications
You must be signed in to change notification settings - Fork 1
/
File-Delte-Monitoring.py
188 lines (175 loc) · 8.72 KB
/
File-Delte-Monitoring.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
'''
@author: vladwa
'''
import logging
import sys
import threading
import traceback
from Queue import Queue
from threading import Thread
from time import sleep
from ConfigProcessor import ConfigProcessor
from FileDeleteJob import FileDeleteJob
from SSH import SSH
import time
jobs_queue = Queue()
jobs_ids_list = []
from datetime import datetime
from pytz import timezone
import pytz
logging.basicConfig(
format='%(asctime)s:%(thread)d:%(levelname)s:%(message)s',
level=logging.INFO, filename="File-Delete-Monitoring.log"
)
def get_epoch_time():
return int(time.time())
def get_pst_time():
date_format='%m_%d_%Y_%H_%M_%S_%Z'
date = datetime.now(tz=pytz.utc)
date = date.astimezone(timezone('US/Pacific'))
pstDateTime=date.strftime(date_format)
return pstDateTime
def get_job_details(job):
job_details = ""
for attr, value in job.__dict__.iteritems():
if not attr.startswith("ssh_password"):
job_details += "%s: %s, " % (str(attr or ""), str(value or ""))
print "job_details", job_details
return job_details
def check_list_empty(inputList):
if not inputList:
return True
logging.info("The File to be zipped does not exist , hence not creating tar file!!")
else:
return False
logging.info("The File to be zipped does not exist , hence not creating tar file!!: %s" %(inputList))
def write_log_info(status,outPutMessage):
if (status == True):
logging.info("Tar file generated successfully : %s" %(outPutMessage))
else:
logging.info("Error while generating Tar File: %s" %(outPutMessage))
def worker():
while True:
try:
data = jobs_queue.get()
print "data",data[0]
job = data[0]
ssh = SSH()
interval = job.interval
enabled = job.enabled
ssh_machine = job.ssh_machine.split(',')
ssh_username = job.ssh_username
ssh_password = job.ssh_password
directory = job.directory.split(',')
sudo = job.sudo
tar = job.tar
tar_location=job.tar_location
status, output, error = None, None, None
epochTime = get_epoch_time()
if enabled:
count = 0
offsets_queue = []
job_details = get_job_details(job)
logging.info("Job[%s]: Starting job... interval=%s, directory=%s, sudo=%s, tar=%s, tarLocation=%s" % (
jobid, interval, directory,sudo, tar,tar_location))
while True:
for ip in ssh_machine:
for file in directory:
#command = "/bin/rm -rf %s" % (file)
#logging.info("Executing: %s" % command)
tarFileName=file.split('/')[-1]
print tarFileName
tarLocation=file.rsplit('/',1)[:1][0]
pstTime = get_pst_time()
tarCommand = "[ -f %s ] && cd %s && tar -cvzf %s_%s.tar.gz %s --remove-files" %(file,tarLocation,tarFileName,pstTime,tarFileName)
logging.info("Tar command executed is: %s" % tarCommand)
logging.info("Tar files are stored in the location: %s" %tarLocation)
status, output, error = None, None, None
try:
if sudo:
if tar :
status, output = ssh.run_sudo_command(ssh_username=ssh_username, ssh_password=ssh_password,
ssh_machine=ip, command=tarCommand, jobid=jobid)
logging.info("Tar Command Executed is : %s , output of tar command : %s" % (tarCommand,output))
listStatus=check_list_empty(output)
write_log_info(listStatus, output)
else:
status, output = ssh.run_sudo_command(ssh_username=ssh_username, ssh_password=ssh_password,
ssh_machine=ip, command=command, jobid=jobid)
logging.info("Tar Command Executed is : %s , output of tar command : %s" % (tarCommand,output))
listStatus=check_list_empty(output)
write_log_info(listStatus, output)
else:
if tar:
status, output = ssh.run_command(ssh_username=ssh_username, ssh_password=ssh_password,
ssh_machine=ip, command=tarCommand, jobid=jobid,
job_details=job_details)
logging.info("Tar Command Executed is : %s , output of tar command : %s" % (tarCommand,output))
listStatus=check_list_empty(output)
write_log_info(listStatus, output)
else:
status, output = ssh.run_command(ssh_username=ssh_username, ssh_password=ssh_password,
ssh_machine=ip, command=command, jobid=jobid,
job_details=job_details)
listStatus=check_list_empty(output)
write_log_info(listStatus, output)
except ValueError:
logging.error("Job[%s]: Could not run command:%s" % (jobid, sys.exc_info()[0]))
logging.error(traceback.format_exc())
error = str(traceback.format_exc())
print error
status = False
logging.info("Job[%s]: Sleeping for %s secs..." % (jobid, interval))
sleep(interval)
else:
logging.info("Job[%s]: Is set to disable state." % jobid)
except:
#logging.error("Job[%s]: Unexpected error:%s, output:%s" % (jobid, sys.exc_info()[0], output))
logging.error(traceback.format_exc())
finally:
jobs_queue.task_done()
if __name__ == "__main__":
cp = ConfigProcessor()
config, sections = cp.get_sections("fileDeleteSetting.cfg")
jobs = []
for section in sections:
env = cp.get_config_section_map(config, section)
enabled = int(env["enabled"])
if enabled:
interval = int(env["interval"])
ssh_username = env["ssh_username"]
ssh_password = env["ssh_password"]
ssh_machine = env["ssh_machine"].split(",")
environment = env["env"]
directorys = env["directory"].split(",")
sudo = int(env["sudo"])
tar = env["tar"]
tar_location = env["tar_location"]
for ssh_machines in ssh_machine:
for directory in directorys:
print directory
job = None
jobid = "%s-%s" % (environment,ssh_machines)
job = FileDeleteJob(jobid=jobid,interval=interval,
enabled=enabled,ssh_username=ssh_username, ssh_password=ssh_password,ssh_machine=ssh_machines,
environment=environment,directory=directory,sudo=sudo,tar=tar,tar_location=tar_location)
jobs.append(job)
for attr, value in job.__dict__.iteritems():
logging.info("%s: %s" % (str(attr or ""), str(value or "")))
# exit()
logging.info("Added job(%s) to queue [%s(environment) ==> %s(ssh_machine)]" % (jobid,environment,ssh_machines))
logging.info("No of jobs in queue: %s" % len(jobs))
for job in jobs:
if job.enabled:
job_worker = Thread(target=worker, args=())
job_worker.setDaemon(True)
job_worker.setName(job.jobid+job.directory)
job_worker.start()
for job in jobs:
if job.enabled:
logging.info("Job[%s]: Enabled" % job.jobid+job.directory)
jobs_ids_list.append(job.jobid)
data = [job]
jobs_queue.put(data)
sleep(1)
jobs_queue.join()