-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathaionomadlog.py
More file actions
375 lines (293 loc) · 10.6 KB
/
aionomadlog.py
File metadata and controls
375 lines (293 loc) · 10.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
#!/usr/bin/env python3
# https://stackoverflow.com/a/37420223
# a dirty hack to make KeyboardInterrupt work again
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
import asyncio
import base64
import fnmatch
import json
import random
import sys
from typing import Dict, List, Union
from uuid import uuid4
import aiohttp
import aiosqlite
import click
class TaskList:
"""
ORM-like object. Must be initiated as 'await TaskList.init(...)'
"""
FIELDS = {
"uuid": "TEXT",
"alloc_id": "TEXT",
"state": "TEXT",
"name": "TEXT",
"type": "TEXT",
"offset": "TEXT",
"color": "TEXT",
}
def __init__(self, mask: str, db: str):
self.mask = mask
self.db = db
@classmethod
async def init(cls, mask: str, db: str):
"""The only way to properly instantiate the class
Keyword Arguments:
mask {str} -- glob-like task name pattern
db {str} -- sqlite3-compatible database name
Returns:
TaskList -- class instance
"""
self = TaskList(mask, db)
self.db = await aiosqlite.connect(db)
self.db.row_factory = aiosqlite.Row
fields_str = ", ".join(f"{k} {v}" for k, v in self.FIELDS.items())
await self.db.execute(f"CREATE TABLE IF NOT EXISTS task({fields_str})")
return self
async def refresh_all(self, tasks: List[Dict]) -> None:
"""Fully refresh db and fill with new tasks
Arguments:
tasks {List[Dict]} -- list of task dicts
"""
await self.db.execute("DELETE FROM task")
task_fields_str = ", ".join(self.FIELDS.keys())
values_fields_str = ", ".join(f":{k}" for k in self.FIELDS.keys())
await self.db.executemany(
"INSERT INTO " f"task ({task_fields_str})" f"VALUES ({values_fields_str})",
tasks,
)
print(tasks)
async def load(self) -> List[aiosqlite.Row]:
"""Load a list of all tasks
Returns:
List[aiosqlite.Row] -- tasks. Behaves like a List[Dict]
"""
async with self.db.execute("SELECT * FROM task") as cursor:
tasks = await cursor.fetchall()
return tasks
async def update_offset(self, uuid: str, offset: Union[str, int]) -> None:
"""Update a given task offset to eliminate old logs
Arguments:
uuid {str} -- task uuid
offset {Union[str, int]} -- new offset value
"""
await self.db.execute("UPDATE task SET offset=? WHERE uuid=?", (offset, uuid))
class NomadLogger:
def __init__(
self,
host: str,
port: int,
mask: str,
db: str,
idle_time: Union[int, float],
max_log_length: int,
):
"""Async log streamer
Arguments:
host {str} -- Nomad host
port {int} -- Nomad port
mask {str} -- glob-like task name pattern
Keyword Arguments:
db {str} -- sqlite database name (default: {":memory:"})
idle_time {Union[int, float]} -- time to idle after finishing a batch
max_log_length {int} -- tails log output if it's longer than this value
"""
self.host = host
self.port = port
self._session = aiohttp.ClientSession()
self._base_url = f"http://{host}:{port}/v1"
self._allocations_url = f"{self._base_url}/allocations"
self._logs_url = f"{self._base_url}/client/fs/logs"
self._idle_time = idle_time
self._max_log_length = max_log_length
self._db = db
self.mask = mask
async def __aenter__(self):
"""Async context manager entry point
Because you can't 'await' inside non-async __init__ method
Returns:
class instance
"""
self.tasks = await TaskList.init(mask=self.mask, db=self._db)
await self.refresh_sources()
return self
async def __aexit__(self, *args, **kwargs):
"""Async context manager exit point
Explicitly discard open handlers (just in case)
"""
await self._session.close()
async def _fetch_tasks(self) -> List[Dict]:
"""Request allocations and extract tasks matched with glob-like pattern
Returns:
List[Dict] -- a list of tasks
"""
response = await self._session.get(self._allocations_url)
allocations = await response.json()
import aiofiles
async with aiofiles.open("allocations.json", "w") as aiof:
await aiof.write(json.dumps(allocations, indent=4))
tasks = []
for allocation in allocations:
for task_name, task_data in allocation["TaskStates"].items():
# fnmatch allows matching Unix shell-style wildcards: * ? [seq] [!seq]
if fnmatch.fnmatch(name=task_name, pat=self.mask):
color_code = random.choice(range(1, 8))
color = f"\u001b[9{color_code}m"
task_uuid = str(uuid4())
alloc_id = allocation["ID"]
state = task_data["State"]
task = {
"uuid": task_uuid,
"alloc_id": alloc_id,
"state": state,
"name": task_name,
"type": "stderr",
"offset": 0,
"color": color,
}
tasks.append(task)
return tasks
async def _fetch_log(
self,
uuid: str,
alloc_id: str,
state: str,
name: str,
type: str,
offset: str,
color: str,
**kwargs,
) -> dict:
"""Send GET request, fix json response and collect data
Currently takes only the last chunk of the response to save time
Arguments:
uuid {str} -- task uuid
alloc_id {str} -- task alloc_id
state {str} -- task state
name {str} -- task name
type {str} -- task type
offset {str} -- task offset
color {str} -- task color
Returns:
dict -- a dict of values to be printed
"""
url = f"{self._logs_url}/{alloc_id}?task={name}&type={type}&offset={offset}"
response = await self._session.get(url)
raw_text = await response.text()
for i, char in enumerate(raw_text[::-1]):
if char == "{":
raw_text = raw_text[-i - 1 :]
break
try:
log_json = json.loads(raw_text)
data = log_json.get("Data", "")
text = base64.b64decode(data).decode("utf-8").strip()
filename = log_json.get("File", "Unknown file")
offset = log_json.get("Offset", 0)
if int(offset) > 0:
await self.tasks.update_offset(uuid, offset)
log_data = {
"alloc_id": alloc_id,
"state": state,
"text": text,
"filename": filename,
"offset": offset,
"color": color,
}
except json.JSONDecodeError:
log_data = {}
return log_data
async def refresh_sources(self) -> None:
"""Refresh allocations, extract and save new tasks
"""
filtered_tasks = await self._fetch_tasks()
await self.tasks.refresh_all(filtered_tasks)
async def print(self, log: dict) -> None:
"""Print [arguably] pretty logs
Arguments:
log {dict} -- a dict with text, color, offset, and filename
"""
if log.get("text"):
color = log["color"]
color_stop = "\u001b[0m"
alloc_id = log['alloc_id']
state = log["state"].upper()
filename = log["filename"]
offset = log["offset"]
delimiter = "=" * 5
text = log["text"]
text_length = len(text)
if text_length > self._max_log_length:
text = text[-self._max_log_length :]
text = (
f"... Log output truncated because too long ({text_length} chars) ...\n"
f"{text}"
)
print(
f"{color}{state} {delimiter} {alloc_id} {delimiter} {filename} @offset {offset} {delimiter}\n"
f"{text}{color_stop}"
)
async def stream_batch(self) -> None:
"""The main method of the logger.
Load a list of tasks and run requests to them asynchronously;
Print the results
"""
tasks = await self.tasks.load()
aiotasks = []
for task_data in tasks:
coroutine = self._fetch_log(**task_data)
aiotask = asyncio.create_task(coroutine)
aiotasks.append(aiotask)
for completed_task in asyncio.as_completed(aiotasks):
completed_task_data = await completed_task
await self.print(completed_task_data)
async def idle(self) -> None:
"""Pause execution to prevent aggressive request spam
"""
await asyncio.sleep(self._idle_time)
async def main(host, port, mask, db, idle_time, max_log_length) -> None:
async with NomadLogger(host, port, mask, db, idle_time, max_log_length) as logger:
while True:
await logger.stream_batch()
await logger.idle()
@click.command()
@click.argument("mask")
@click.option("-h", "--host", required=True, help="Nomad host")
@click.option("-p", "--port", required=True, help="Nomad port")
@click.option(
"--db", default=":memory:", show_default="in-memory db", help="SQLite db path",
)
@click.option(
"--idle",
"--idle-time",
"idle_time",
default=1,
show_default=True,
type=click.FLOAT,
help="Seconds to idle after each batch",
)
@click.option(
"--len",
"--max-log-length",
"max_log_length",
default=10000,
show_default=True,
type=click.INT,
help="Max number of latest characters printed by any task",
)
def runlogger(host, port, mask, db, idle_time, max_log_length):
"""MASK\t\tGlob-like mask to filter tasks
"""
if not mask or mask == "*":
prompt = input(
"\n\t* WARNING *\n\n"
"It is highly recommended to apply a mask.\n"
"Streaming all tasks may lead to VERY poor perfomance.\n"
"Proceed anyway?\tY/N\n"
)
if prompt.lower() not in ("y", "yes"):
sys.exit("Exiting. Please, restart providing a mask")
asyncio.run(main(host, port, mask, db, idle_time, max_log_length))
if __name__ == "__main__":
runlogger()