from django.shortcuts import render, redirect
from backend.mongodb import db
from exfo.lib import Exfo, Mikrotik
from pprint import pprint
from ttp import ttp
from django.http import JsonResponse, HttpResponse
from datetime import datetime
from celery import shared_task
from django.contrib import messages
exfo = Exfo("administrator", "exf0w0rxC@t4dm!n")
exfo.login()
from datetime import datetime, timedelta
from dateutil import tz
mkt = Mikrotik()
def index(request):
collection = db['mascot']
mascot_details = collection.find({})
rapi = exfo.list_api()
sla = exfo.call_api("sla")
data_to_parse = """
interface Loopback0
description Router-id-loopback
ip address 192.168.0.113/24
!
interface Vlan778
description CPE_Acces_Vlan
ip address 2002::fd37/124
ip vrf CPE1
!
"""
ttp_template = """
interface {{ interface }}
ip address {{ ip }}/{{ mask }}
description {{ description }}
ip vrf {{ vrf }}
"""
template = """
interface Loopback0
description Router-id-loopback
ip address 192.168.0.113/24
!
interface Loopback1
description Router-id-loopback
ip address 192.168.0.1/24
!
interface Vlan778
ip address 2002::fd37/124
ip vrf CPE1
!
interface Vlan779
ip address 2002::bbcd/124
ip vrf CPE2
!
interface {{ interface | contains("Loop") }}
ip address {{ ip }}/{{ mask }}
description {{ description }}
ip vrf {{ vrf }}
interface {{ interface | contains("Vlan") }}
ip address {{ ip }}/{{ mask }}
description {{ description }}
ip vrf {{ vrf }}
"""
parser = ttp(template=template)
parser.parse()
# form excel table and save in file
parser.result(
format="excel",
filename="excel_out_test_excel_formatter_update.xlsx",
returner="file",
update=True,
url="./Output/",
table=[
{
"headers": ["interface", "ip", "mask", "vrf", "description"],
"path": "loopbacks_new",
"key": "interface",
"tab_name": "loopbacks_new",
},
{"path": "vlans"},
],
)
# create parser object and parse data using template:
# parser = ttp(data=data_to_parse, template=ttp_template)
# parser.parse()
# # print result in JSON format
# results = parser.result(format='xlsx')[0]
# pprint(results)
try:
mk_ips = mkt.call_remote("ip/route")
mk_address = mkt.call_remote("ip/address")
except:
mk_ips = []
mk_address = []
return render(request, 'backend/index.html', {'objs': mascot_details, 'output': rapi.json(),\
'sla': sla.json(), 'mk_ips': mk_ips, 'mk_address': mk_address})
#Define Collection
def remote(request):
cmd = request.GET.get('cmd', None)
# exfo = Exfo("administrator", "exf0w0rxC@t4dm!n")
# exfo.login()
r = exfo.call_remote_api(cmd)
pprint(r.json())
# return JsonResponse(r.json())
return render(request, 'backend/remote_render.html', {'res': r})
def service_status(request):
cmd = request.GET.get('cmd', None)
import urllib.parse
section = request.GET.get('section', 'all')
# exfo = Exfo("administrator", "exf0w0rxC@t4dm!n")
# exfo.login()
cmd = urllib.parse.unquote(cmd)
pprint(f" cmd = {cmd}")
r = exfo.call_remote_api(cmd)
pprint(r.json())
# return JsonResponse(r.json())
return render(request, 'backend/service_status.html', {'res': r, 'section': section})
def reports(request):
days = range(1,31)
sla = exfo.call_api("sla")
sla_json = sla.json()
sla_results = {}
test_status = {}
service_results = {}
for r in sla_json['result']:
sla_uri = r['ids']['sla_uri']
t = exfo.call_remote_api(sla_uri).json()
sla_results[r['ids']['sla_name']] = t
t = exfo.call_remote_api(sla_uri).json()
service_results[r['ids']['sla_name']] = []
for si in t['result']['service_instances']:
service_results[r['ids']['sla_name']].append(exfo.call_remote_api(si['service_uri']).json()['result'])
try:
t2 = exfo.call_remote_api(t['result']['tests'][-1]['test_status_uri']).json()
test_status[r['ids']['sla_name']] = t2
except:
test_status[r['ids']['sla_name']] = {}
return render(request, 'backend/reports2.html', {'days': days, 'sla': sla.json(), 'sla_results': sla_results, 'test_status': test_status, 'service_results': service_results })
def dump_api(request):
from bson.json_util import dumps
from bson.json_util import loads
col = db['exfo_api']
# col.delete_many({})
sla = exfo.call_api("sla")
sla_json = sla.json()
temp = sla_json['result']
for t in temp:
sla_uri = t['ids']['sla_uri']
sla_name = t['ids']['sla_name']
r = exfo.call_remote_api(sla_uri).json()
t['sla_uri_result'] = r['result']
service_result = []
for si in r['result']['service_instances']:
c = exfo.call_remote_api(si['service_uri']).json()['result']
service_result.append(c)
test_instance_class_result = []
test_status_result = []
for si in r['result']['tests']:
c = exfo.call_remote_api(si['test_instance_class_uri']).json()['result']
service = c['service']
target = None
if 'target' in c:
target = c['target']
test_instance_class_result.append(c)
c = exfo.call_remote_api(si['test_status_uri']).json()['result']
test_status_result.append({'sla_name':sla_name, 'test_instance_id': si['test_instance_id'],\
'service': service, 'target': target, \
'type_type_name': si['test_type_name'],\
'test_instance_class_id': si['test_instance_class_id'] , 'status': c, })
t['test_instance_class_result'] = test_instance_class_result
t['test_status_result'] = test_status_result
t['created'] = datetime.utcnow()
col.insert_one(t)
# pprint(temp)
#col.insert_many(temp)
results = col.find({})
data = dumps(list(results), indent=4)
return HttpResponse(data, content_type='application/json')
@shared_task
def dump_api_task():
from bson.json_util import dumps
from bson.json_util import loads
col = db['exfo_api']
# col.delete_many({})
sla = exfo.call_api("sla")
sla_json = sla.json()
temp = sla_json['result']
for t in temp:
sla_uri = t['ids']['sla_uri']
sla_name = t['ids']['sla_name']
r = exfo.call_remote_api(sla_uri).json()
t['sla_uri_result'] = r['result']
service_result = []
for si in r['result']['service_instances']:
c = exfo.call_remote_api(si['service_uri']).json()['result']
service_result.append(c)
test_instance_class_result = []
test_status_result = []
for si in r['result']['tests']:
c = exfo.call_remote_api(si['test_instance_class_uri']).json()['result']
service = c['service']
target = None
if 'target' in c:
target = c['target']
test_instance_class_result.append(c)
c = exfo.call_remote_api(si['test_status_uri']).json()['result']
test_status_result.append({'sla_name':sla_name, 'test_instance_id': si['test_instance_id'],\
'service': service, 'target': target, \
'type_type_name': si['test_type_name'],\
'test_instance_class_id': si['test_instance_class_id'] , 'status': c, })
t['test_instance_class_result'] = test_instance_class_result
t['test_status_result'] = test_status_result
t['created'] = datetime.utcnow()
col.insert_one(t)
pprint("Dump API ... Finished")
def print_table(request):
return render(request, 'backend/print_table.html')
def dump_fixed_results(request):
from bson.json_util import dumps
from bson.json_util import loads
from_zone = tz.gettz('UTC')
to_zone = tz.gettz('Asia/Bangkok')
col = db['fixed_results']
col.delete_many({})
tt = exfo.call_remote_api("/API/REST/Test/v1/TypeByName?list_all=false&size=0")
pprint(tt)
tt_json = tt.json()
temp = tt_json['result']
for t in temp:
tn = t['ids']['test_type_name']
r = exfo.call_remote_api('/API/REST/Test/v1/FixedResults/'+tn+'?sections=all&size=0&sort=verifier_id asc,time_stamp desc&time_range={"start":"1 days ago","end":"now"}').json()
#pprint(r)
try:
r0 = r['result']
if len(r0) > 0:
col.insert_many(r0)
except Exception as e:
pprint(e)
# pprint(temp)
#col.insert_many(temp)
results = col.find({})
data = dumps(list(results), indent=4)
data = {'msg': 'done'}
redir = request.GET.get('redir', None)
if redir:
messages.success(request, 'Dump Fixed Results')
return redirect(redir)
# return HttpResponse(data, content_type='application/json')
return JsonResponse(data)
import humanize
def con_human(r):
try:
'''
if 'time' in r['header'].lower():
return r['results']
'''
x = r['results']
n = int(x)
if n > 1000:
return humanize.naturalsize(n,gnu=True)
else:
return n
except:
return r['results']
def gen_report_notebook():
import pandas as pd
from_zone = tz.gettz('UTC')
to_zone = tz.gettz('Asia/Bangkok')
c = db['fixed_results'].find()
c2 = db['ip_address']
data = []
for i in c:
#pprint(i['header'])
res = dict(zip(i['header'], i['results']))
utc = datetime.fromtimestamp(int(i['ids']['time_stamp'])/1000000000)
utc = utc.replace(tzinfo=from_zone)
yest = utc - timedelta(minutes=4)
dt = utc.astimezone(to_zone)
d = {'header': i['header'], 'results': i['results'], 'output': res, 'params': i['parameters'], 'ts': i['ids']['time_stamp'],
'dt': str(dt), 'test_type_name': i['ids']['fixed_results_url']}
d.update(i['names'])
if not d['sla_name']:
continue
sla_splt = d['sla_name'].split('-')
if len(sla_splt) > 1:
reg_cond = f'(?=^pppoe-out)(?=.*{sla_splt[0]})(?=.*{sla_splt[-1]})'
pprint(reg_cond)
temp_addr = list(c2.find({'ts': {'$lte': utc, '$gte': yest }, 'interface': {'$regex': reg_cond, '$not': { '$regex': '^pppoe-out5'}, '$options': 'i' }, 'invalid': 'false'}).sort("ts", -1))
pprint("temp addr")
pprint(d['sla_name'])
# //pprint(temp_addr)
target_address = None
if temp_addr:
target_address = temp_addr[0]
if target_address:
pprint("Found Target")
pprint(target_address)
d.update(target_address)
#res.update(i['ids'])
#res.update(i[])
#pprint(d)
data.append(d)
df0 = pd.DataFrame(data)
df0 = df0.explode(["header", "results"])
df1 = df0[["header", "results", "dt", "sla_name", "test_display_name", "verifier_name", "network", "address", "interface"]]
df1 = df1.query('sla_name == sla_name')
#pprint(df1['sla_name'].unique())
#df1['results_text'] = df1['results'].apply(con_human)
df1['results_text'] = df1.apply(con_human, axis=1)
df1['network_str'] = df1['network'] + " / " + df1['interface']
table = df1.pivot(index=['sla_name', 'dt', 'verifier_name', 'network_str'],columns=['test_display_name','header'], values='results_text').sort_values(by=['sla_name', 'dt'], ascending=[True, False])
table = table.dropna(how='all', axis=0)
return (table,df1)
def gen_report(request):
pprint("report notebook ...")
if request.GET.get('clear', None):
return redirect("/backend/dump_fixed_results/?redir=/backend/gen_report/")
try:
table,df = gen_report_notebook()
except Exception as e:
messages.error(request, str(e))
return render(request, 'backend/gen_report.html', {'dump_again': True})
slas = list(df['sla_name'].unique())
vrs = list(df['verifier_name'].unique())
sla_name = request.GET.getlist('sla_name')
vr_name = request.GET.getlist('vr_name')
pprint("--- sla_name ---")
pprint(sla_name)
if len(sla_name) > 0:
sla_filter = ", ".join(f"'{w}'" for w in sla_name)
table = table.query(f"sla_name in ({sla_filter})")
if len(vr_name) > 0:
vr_filter = ", ".join(f"'{w}'" for w in vr_name)
table = table.query(f"verifier_name in ({vr_filter})")
table = table.dropna(how='all', axis=0)
table = table.dropna(how='all', axis=1)
gen_report = request.GET.get('genReport', None)
report_link = None
if gen_report:
import time
ts = int(time.time())
fn = f"report_{ts}.xlsx"
table.to_excel(f'/code/media/{fn}')
report_link = f'/media/{fn}'
try:
mk_ips = mkt.call_remote("ip/route")
mk_address = mkt.call_remote("ip/address")
except:
mk_ips = []
mk_address = []
return render(request, 'backend/gen_report.html', {'tbl': table.head(50).to_html(\
classes=["table", "table-striped", "table-bordered", "align-middle"],\
table_id="report_tbl"), 'slas': slas, 'report_link': report_link, 'mk_ips': mk_ips, 'mk_address': mk_address, 'vrs': vrs, 'sla_name': sla_name, 'vr_name': vr_name})
@shared_task
def mikro_dump_task():
#look at only pppoe only ( excloude pppoe5 )
col = db['ip_address']
mk_address = mkt.call_remote("ip/address")
for i in mk_address:
temp = {'ts': datetime.utcnow()}
temp = temp | i
col.insert_one(temp)
pprint("dump ip complete")