146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328 | class NebulaEventHandler(PatternMatchingEventHandler):
"""
NebulaEventHandler handles file system events for .sh scripts.
This class monitors the creation, modification, and deletion of .sh scripts
in a specified directory.
"""
patterns = ["*.sh", "*.ps1"]
def __init__(self):
super(NebulaEventHandler, self).__init__()
self.last_processed = {}
self.timeout_ns = 5 * 1e9
self.processing_files = set()
self.lock = threading.Lock()
def _should_process_event(self, src_path: str) -> bool:
current_time_ns = time.time_ns()
logging.info(f"Current time (ns): {current_time_ns}")
with self.lock:
if src_path in self.last_processed:
logging.info(f"Last processed time for {src_path}: {self.last_processed[src_path]}")
last_time = self.last_processed[src_path]
if current_time_ns - last_time < self.timeout_ns:
return False
self.last_processed[src_path] = current_time_ns
return True
def _is_being_processed(self, src_path: str) -> bool:
with self.lock:
if src_path in self.processing_files:
logging.info(f"Skipping {src_path} as it is already being processed.")
return True
self.processing_files.add(src_path)
return False
def _processing_done(self, src_path: str):
with self.lock:
if src_path in self.processing_files:
self.processing_files.remove(src_path)
def verify_nodes_ports(self, src_path):
parent_dir = os.path.dirname(src_path)
base_dir = os.path.basename(parent_dir)
scenario_path = os.path.join(os.path.dirname(parent_dir), base_dir)
try:
port_mapping = {}
new_port_start = 50001
for filename in os.listdir(scenario_path):
if filename.endswith(".json") and filename.startswith("participant"):
file_path = os.path.join(scenario_path, filename)
with open(file_path) as json_file:
node = json.load(json_file)
current_port = node["network_args"]["port"]
port_mapping[current_port] = SocketUtils.find_free_port(start_port=new_port_start)
new_port_start = port_mapping[current_port] + 1
for filename in os.listdir(scenario_path):
if filename.endswith(".json") and filename.startswith("participant"):
file_path = os.path.join(scenario_path, filename)
with open(file_path) as json_file:
node = json.load(json_file)
current_port = node["network_args"]["port"]
node["network_args"]["port"] = port_mapping[current_port]
neighbors = node["network_args"]["neighbors"]
for old_port, new_port in port_mapping.items():
neighbors = neighbors.replace(f":{old_port}", f":{new_port}")
node["network_args"]["neighbors"] = neighbors
with open(file_path, "w") as f:
json.dump(node, f, indent=4)
except Exception as e:
print(f"Error processing JSON files: {e}")
def on_created(self, event):
"""
Handles the event when a file is created.
"""
if event.is_directory:
return
src_path = event.src_path
if not self._should_process_event(src_path):
return
if self._is_being_processed(src_path):
return
logging.info("File created: %s" % src_path)
try:
self.verify_nodes_ports(src_path)
self.run_script(src_path)
finally:
self._processing_done(src_path)
def on_deleted(self, event):
"""
Handles the event when a file is deleted.
"""
if event.is_directory:
return
src_path = event.src_path
if not self._should_process_event(src_path):
return
if self._is_being_processed(src_path):
return
logging.info("File deleted: %s" % src_path)
directory_script = os.path.dirname(src_path)
pids_file = os.path.join(directory_script, "current_scenario_pids.txt")
logging.info(f"Killing processes from {pids_file}")
try:
self.kill_script_processes(pids_file)
os.remove(pids_file)
except FileNotFoundError:
logging.warning(f"{pids_file} not found.")
except Exception as e:
logging.exception(f"Error while killing processes: {e}")
finally:
self._processing_done(src_path)
def run_script(self, script):
try:
logging.info(f"Running script: {script}")
if script.endswith(".sh"):
result = subprocess.run(["bash", script], capture_output=True, text=True)
logging.info(f"Script output:\n{result.stdout}")
if result.stderr:
logging.error(f"Script error:\n{result.stderr}")
elif script.endswith(".ps1"):
subprocess.Popen(
["powershell", "-ExecutionPolicy", "Bypass", "-File", script],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=False,
)
else:
logging.error("Unsupported script format.")
return
except Exception as e:
logging.exception(f"Error while running script: {e}")
def kill_script_processes(self, pids_file):
try:
with open(pids_file) as f:
pids = f.readlines()
for pid in pids:
try:
pid = int(pid.strip())
if psutil.pid_exists(pid):
process = psutil.Process(pid)
children = process.children(recursive=True)
logging.info(f"Forcibly killing process {pid} and {len(children)} child processes...")
for child in children:
try:
logging.info(f"Forcibly killing child process {child.pid}")
child.kill()
except psutil.NoSuchProcess:
logging.warning(f"Child process {child.pid} already terminated.")
except Exception as e:
logging.exception(f"Error while forcibly killing child process {child.pid}: {e}")
try:
logging.info(f"Forcibly killing main process {pid}")
process.kill()
except psutil.NoSuchProcess:
logging.warning(f"Process {pid} already terminated.")
except Exception as e:
logging.exception(f"Error while forcibly killing main process {pid}: {e}")
else:
logging.warning(f"PID {pid} does not exist.")
except ValueError:
logging.exception(f"Invalid PID value in file: {pid}")
except Exception as e:
logging.exception(f"Error while forcibly killing process {pid}: {e}")
except FileNotFoundError:
logging.exception(f"PID file not found: {pids_file}")
except Exception as e:
logging.exception(f"Error while reading PIDs from file: {e}")
|