Browse Source

fix redis

Tum 8 months ago
parent
commit
ca78c91b48

+ 4 - 3
Dockerfile

1
 # syntax=docker/dockerfile:1
1
 # syntax=docker/dockerfile:1
2
-FROM python:3 as base
2
+FROM python:3.11 as base
3
 ENV PYTHONDONTWRITEBYTECODE=1
3
 ENV PYTHONDONTWRITEBYTECODE=1
4
 ENV PYTHONUNBUFFERED=1
4
 ENV PYTHONUNBUFFERED=1
5
 RUN apt-get update && apt-get install apt-transport-https
5
 RUN apt-get update && apt-get install apt-transport-https
6
 RUN apt-get install -y libjpeg62 libjpeg62-turbo-dev  zlib1g-dev gettext entr poppler-utils gettext xfonts-thai vim
6
 RUN apt-get install -y libjpeg62 libjpeg62-turbo-dev  zlib1g-dev gettext entr poppler-utils gettext xfonts-thai vim
7
 
7
 
8
-RUN wget ftp://ftp.psu.ac.th/pub/thaifonts/sipa-fonts/*ttf -P /usr/share/fonts/truetype/thai
9
-COPY fonts/*ttf  /usr/share/fonts/truetype/thai/
8
+#RUN wget ftp://ftp.psu.ac.th/pub/thaifonts/sipa-fonts/*ttf -P /usr/share/fonts/truetype/thai
9
+#COPY fonts/*ttf  /usr/share/fonts/truetype/thai/
10
 
10
 
11
 RUN \
11
 RUN \
12
         echo "Installing Node and Yarn" && \
12
         echo "Installing Node and Yarn" && \
22
 FROM base as install_package
22
 FROM base as install_package
23
 WORKDIR /code
23
 WORKDIR /code
24
 COPY requirements.txt /code/
24
 COPY requirements.txt /code/
25
+RUN  python -m pip install --upgrade pip
25
 RUN  pip install -r requirements.txt
26
 RUN  pip install -r requirements.txt
26
 COPY app /code/
27
 COPY app /code/
27
 #RUN chmod a+x server-entrypoint.sh
28
 #RUN chmod a+x server-entrypoint.sh

BIN
app/Output/excel_out_test_excel_formatter_update.xlsx


+ 30 - 27
app/backend/views.py

23
 
23
 
24
     rapi = exfo.list_api()
24
     rapi = exfo.list_api()
25
     sla = exfo.call_api("sla")
25
     sla = exfo.call_api("sla")
26
-   
26
+
27
     data_to_parse = """
27
     data_to_parse = """
28
     interface Loopback0
28
     interface Loopback0
29
      description Router-id-loopback
29
      description Router-id-loopback
105
     # results = parser.result(format='xlsx')[0]
105
     # results = parser.result(format='xlsx')[0]
106
     # pprint(results)
106
     # pprint(results)
107
     try:
107
     try:
108
-        mk_ips = mkt.call_remote("ip/route") 
109
-        mk_address = mkt.call_remote("ip/address") 
108
+        mk_ips = mkt.call_remote("ip/route")
109
+        mk_address = mkt.call_remote("ip/address")
110
     except:
110
     except:
111
         mk_ips = []
111
         mk_ips = []
112
         mk_address = []
112
         mk_address = []
252
     return render(request, 'backend/print_table.html')
252
     return render(request, 'backend/print_table.html')
253
 
253
 
254
 
254
 
255
-    
255
+
256
 def dump_fixed_results(request):
256
 def dump_fixed_results(request):
257
     from bson.json_util import dumps
257
     from bson.json_util import dumps
258
     from bson.json_util import loads
258
     from bson.json_util import loads
312
 
312
 
313
     from_zone = tz.gettz('UTC')
313
     from_zone = tz.gettz('UTC')
314
     to_zone = tz.gettz('Asia/Bangkok')
314
     to_zone = tz.gettz('Asia/Bangkok')
315
-    
315
+
316
     c = db['fixed_results'].find()
316
     c = db['fixed_results'].find()
317
     c2  = db['ip_address']
317
     c2  = db['ip_address']
318
 
318
 
325
         yest = utc - timedelta(minutes=4)
325
         yest = utc - timedelta(minutes=4)
326
         dt = utc.astimezone(to_zone)
326
         dt = utc.astimezone(to_zone)
327
 
327
 
328
-        d = {'header': i['header'], 'results': i['results'], 'output': res, 'params': i['parameters'], 'ts': i['ids']['time_stamp'], 
328
+        d = {'header': i['header'], 'results': i['results'], 'output': res, 'params': i['parameters'], 'ts': i['ids']['time_stamp'],
329
              'dt': str(dt), 'test_type_name': i['ids']['fixed_results_url']}
329
              'dt': str(dt), 'test_type_name': i['ids']['fixed_results_url']}
330
 
330
 
331
-                
331
+
332
 
332
 
333
         d.update(i['names'])
333
         d.update(i['names'])
334
         if not d['sla_name']:
334
         if not d['sla_name']:
335
-            continue 
335
+            continue
336
 
336
 
337
         sla_splt = d['sla_name'].split('-')
337
         sla_splt = d['sla_name'].split('-')
338
         if len(sla_splt) > 1:
338
         if len(sla_splt) > 1:
351
                 pprint("Found Target")
351
                 pprint("Found Target")
352
                 pprint(target_address)
352
                 pprint(target_address)
353
                 d.update(target_address)
353
                 d.update(target_address)
354
- 
354
+
355
         #res.update(i['ids'])
355
         #res.update(i['ids'])
356
         #res.update(i[])
356
         #res.update(i[])
357
         #pprint(d)
357
         #pprint(d)
366
 
366
 
367
     df1 = df1.query('sla_name == sla_name')
367
     df1 = df1.query('sla_name == sla_name')
368
     #pprint(df1['sla_name'].unique())
368
     #pprint(df1['sla_name'].unique())
369
-    
369
+
370
     #df1['results_text'] = df1['results'].apply(con_human)
370
     #df1['results_text'] = df1['results'].apply(con_human)
371
     df1['results_text'] = df1.apply(con_human, axis=1)
371
     df1['results_text'] = df1.apply(con_human, axis=1)
372
     try:
372
     try:
373
-        df1['network_str'] = df1['network'] + " / " + df1['interface'] 
373
+        df1['network_str'] = df1['network'] + " / " + df1['interface']
374
     except:
374
     except:
375
         df1['network_str'] = "N/A"
375
         df1['network_str'] = "N/A"
376
 
376
 
382
     pprint("report notebook ...")
382
     pprint("report notebook ...")
383
     if request.GET.get('clear', None):
383
     if request.GET.get('clear', None):
384
         return redirect("/backend/dump_fixed_results/?redir=/backend/gen_report/")
384
         return redirect("/backend/dump_fixed_results/?redir=/backend/gen_report/")
385
-   
385
+
386
     try:
386
     try:
387
         table,df = gen_report_notebook()
387
         table,df = gen_report_notebook()
388
     except Exception as e:
388
     except Exception as e:
389
         messages.error(request, str(e))
389
         messages.error(request, str(e))
390
         return render(request, 'backend/gen_report.html', {'dump_again': True})
390
         return render(request, 'backend/gen_report.html', {'dump_again': True})
391
-    
391
+
392
     slas = list(df['sla_name'].unique())
392
     slas = list(df['sla_name'].unique())
393
     vrs = list(df['verifier_name'].unique())
393
     vrs = list(df['verifier_name'].unique())
394
     sla_name = request.GET.getlist('sla_name')
394
     sla_name = request.GET.getlist('sla_name')
395
     vr_name = request.GET.getlist('vr_name')
395
     vr_name = request.GET.getlist('vr_name')
396
-    
396
+
397
     pprint("--- sla_name ---")
397
     pprint("--- sla_name ---")
398
     pprint(sla_name)
398
     pprint(sla_name)
399
     if len(sla_name) > 0:
399
     if len(sla_name) > 0:
400
         sla_filter = ", ".join(f"'{w}'" for w in sla_name)
400
         sla_filter = ", ".join(f"'{w}'" for w in sla_name)
401
         table = table.query(f"sla_name in ({sla_filter})")
401
         table = table.query(f"sla_name in ({sla_filter})")
402
-    
402
+
403
     if len(vr_name) > 0:
403
     if len(vr_name) > 0:
404
         vr_filter = ", ".join(f"'{w}'" for w in vr_name)
404
         vr_filter = ", ".join(f"'{w}'" for w in vr_name)
405
         table = table.query(f"verifier_name in ({vr_filter})")
405
         table = table.query(f"verifier_name in ({vr_filter})")
406
-    
406
+
407
     table = table.dropna(how='all', axis=0)
407
     table = table.dropna(how='all', axis=0)
408
     table = table.dropna(how='all', axis=1)
408
     table = table.dropna(how='all', axis=1)
409
 
409
 
410
     gen_report = request.GET.get('genReport', None)
410
     gen_report = request.GET.get('genReport', None)
411
-    
411
+
412
     report_link = None
412
     report_link = None
413
     if gen_report:
413
     if gen_report:
414
         import time
414
         import time
422
         report_link = f'/media/{fn}'
422
         report_link = f'/media/{fn}'
423
 
423
 
424
     try:
424
     try:
425
-        mk_ips = mkt.call_remote("ip/route") 
426
-        mk_address = mkt.call_remote("ip/address") 
425
+        mk_ips = mkt.call_remote("ip/route")
426
+        mk_address = mkt.call_remote("ip/address")
427
     except:
427
     except:
428
         mk_ips = []
428
         mk_ips = []
429
         mk_address = []
429
         mk_address = []
436
 def mikro_dump_task():
436
 def mikro_dump_task():
437
     #look at only pppoe only ( excloude pppoe5 )
437
     #look at only pppoe only ( excloude pppoe5 )
438
     col  = db['ip_address']
438
     col  = db['ip_address']
439
-    mk_address = mkt.call_remote("ip/address") 
440
-    for i in mk_address:
441
-        temp = {'ts': datetime.utcnow()}
442
-        temp = temp | i
443
-        col.insert_one(temp)
444
-    pprint("dump ip complete")
439
+    try:
440
+        mk_address = mkt.call_remote("ip/address")
441
+        for i in mk_address:
442
+            temp = {'ts': datetime.utcnow()}
443
+            temp = temp | i
444
+            col.insert_one(temp)
445
+        pprint("dump ip complete")
446
+    except Exception as e:
447
+        pprint(f"error = {e}")
445
 
448
 
446
 @shared_task
449
 @shared_task
447
 def gen_report_task():
450
 def gen_report_task():
464
 def list_reports(request):
467
 def list_reports(request):
465
     import os
468
     import os
466
     from django.conf import settings
469
     from django.conf import settings
467
-    
470
+
468
     mr = settings.MEDIA_ROOT
471
     mr = settings.MEDIA_ROOT
469
-    file_list =os.listdir(mr)  
472
+    file_list =os.listdir(mr)
470
 
473
 
471
     def get_creation_time(item):
474
     def get_creation_time(item):
472
         item_path = os.path.join(mr, item)
475
         item_path = os.path.join(mr, item)

+ 4 - 6
docker-compose.prod.yml

2
   redis:
2
   redis:
3
     restart: unless-stopped
3
     restart: unless-stopped
4
     image: redis:7.0.5-alpine
4
     image: redis:7.0.5-alpine
5
-    expose:
6
-      - 6379
7
     ports:
5
     ports:
8
       - "127.0.0.1:6379:6379"
6
       - "127.0.0.1:6379:6379"
9
   db:
7
   db:
21
     image: tum/network-report-image
19
     image: tum/network-report-image
22
     #command: python -X dev manage.py  runserver 0.0.0.0:8000
20
     #command: python -X dev manage.py  runserver 0.0.0.0:8000
23
     #command: python manage.py  runserver 0.0.0.0:8000
21
     #command: python manage.py  runserver 0.0.0.0:8000
24
-    command: gunicorn kacee.wsgi:application --bind 0.0.0.0:8000 --workers=4 --timeout 1000
22
+    command: gunicorn network_report.wsgi:application --bind 0.0.0.0:8000 --workers=4 --timeout 1000
25
     #entrypoint: ./server-entrypoint.sh
23
     #entrypoint: ./server-entrypoint.sh
26
     volumes:
24
     volumes:
27
       - ./app:/code
25
       - ./app:/code
33
       - POSTGRES_USER=postgres
31
       - POSTGRES_USER=postgres
34
       - POSTGRES_PASSWORD=postgres
32
       - POSTGRES_PASSWORD=postgres
35
       - PYTHONMALLOC=debug
33
       - PYTHONMALLOC=debug
36
-      - DJANGO_SETTINGS_MODULE=kacee.settings
34
+      - DJANGO_SETTINGS_MODULE=network_report.settings
37
       - MODE=${MODE}
35
       - MODE=${MODE}
38
     depends_on:
36
     depends_on:
39
       - db
37
       - db
79
     image: rabbitmq:3-management-alpine
77
     image: rabbitmq:3-management-alpine
80
     container_name: 'rabbitmq3'
78
     container_name: 'rabbitmq3'
81
     ports:
79
     ports:
82
-      - "5672:5672"
83
-      - "15672:15672"
80
+      - "127.0.0.1:5672:5672"
81
+      - "127.0.0.1:15672:15672"
84
     volumes:
82
     volumes:
85
         - ./rabbitmq/data/:/var/lib/rabbitmq/
83
         - ./rabbitmq/data/:/var/lib/rabbitmq/
86
         - ./rabbitmq/log/:/var/log/rabbitmq
84
         - ./rabbitmq/log/:/var/log/rabbitmq

+ 2 - 2
docker-compose.yml

3
     restart: unless-stopped
3
     restart: unless-stopped
4
     image: redis:7.0.5-alpine
4
     image: redis:7.0.5-alpine
5
     ports:
5
     ports:
6
-      - "6379"
6
+      - "127.0.0.1:6379:6379"
7
   db:
7
   db:
8
     image: postgres:15.5-alpine3.19
8
     image: postgres:15.5-alpine3.19
9
     volumes:
9
     volumes:
10
       - ./data/db:/var/lib/postgresql/data
10
       - ./data/db:/var/lib/postgresql/data
11
     ports:
11
     ports:
12
-      - "5432"
12
+      - "127.0.0.1:5432:5432"
13
     environment:
13
     environment:
14
       - POSTGRES_DB=postgres
14
       - POSTGRES_DB=postgres
15
       - POSTGRES_USER=postgres
15
       - POSTGRES_USER=postgres

+ 2 - 2
requirements.txt

72
 django-debug-permissions
72
 django-debug-permissions
73
 pdf2image
73
 pdf2image
74
 django-resized
74
 django-resized
75
-django-allauth
75
+django-allauth==0.54
76
 django-mjml
76
 django-mjml
77
 django-fullurl
77
 django-fullurl
78
 xhtml2pdf
78
 xhtml2pdf
86
 django-markdownx
86
 django-markdownx
87
 django-qr-code
87
 django-qr-code
88
 python-barcode
88
 python-barcode
89
-gunicorn==21.2.0
89
+gunicorn
90
 django-autotranslate
90
 django-autotranslate
91
 pymongo
91
 pymongo
92
 dnspython
92
 dnspython