file
stringlengths
6
44
content
stringlengths
38
162k
linux_dependencies.py
import os import traceback import sys print("before function process") def process(version): print("inside fun process") currentDirectory = os.path.dirname(os.path.abspath(__file__)) print(currentDirectory) try: from os.path import expanduser import platform import subprocess import sys import demoji try: print('Downloading NLTK additional packages...') import nltk nltk.download('punkt') nltk.download('wordnet') nltk.download('stopwords') nltk.download('averaged_perceptron_tagger') except Exception as e: print('NLTK Error: '+str(e)) pass from appbe.dataPath import DATA_DIR import shutil import importlib license_path = DATA_DIR if os.path.isdir(license_path) == False: os.makedirs(license_path) import warnings warnings.filterwarnings("ignore") LicenseFolder = os.path.join(license_path,'License') if os.path.isdir(LicenseFolder) == False: os.makedirs(LicenseFolder) sqlite_path = os.path.join(license_path,'sqlite') if os.path.isdir(sqlite_path) == False: os.makedirs(sqlite_path) pretrainedModel_path = os.path.join(license_path,'PreTrainedModels') if os.path.isdir(pretrainedModel_path) == False: os.makedirs(pretrainedModel_path) config_path = os.path.join(license_path,'config') if os.path.isdir(config_path) == False: os.makedirs(config_path) target_path = os.path.join(license_path,'target') if os.path.isdir(target_path) == False: os.makedirs(target_path) data_path = os.path.join(license_path,'storage') if os.path.isdir(data_path) == False: os.makedirs(data_path) log_path = os.path.join(license_path,'logs') if os.path.isdir(log_path) == False: os.makedirs(log_path) configFolder = os.path.join(currentDirectory,'..','config') for file in os.listdir(configFolder): if file.endswith(".var"): os.remove(os.path.join(configFolder,file)) versionfile = os.path.join(configFolder,str(version)+'.var') with open(versionfile, 'w') as fp: pass manage_path = os.path.join(currentDirectory,'..','aion.py') print('Setting up Django Environment for AION User Interface') proc = subprocess.Popen([sys.executable, manage_path, "-m","migrateappfe"],stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() if proc.returncode != 0: err_string = stderr.decode('utf8') import re result = re.search("No module named '(.*)'", err_string) if 'ModuleNotFoundError' in err_string: print('\n"{}" module is missing. The dependencies of AION were not installed properly. Uninstall and reinstall AION'.format(result.group(1))) else: print('\nThe dependencies of AION were not installed properly. Uninstall and reinstall AION') raise Exception(err_string) else: print('AION User Interface successfully set') print('--------------AION Installed Successfully--------------') except Exception as e: print(e) f = open(os.path.join(currentDirectory, 'workspace_error_logs.txt'), "w") f.write(str(traceback.format_exc())) f.close() pass if __name__ == "__main__": process(sys.argv[1])
dependencies.py
import os import traceback def process(version): currentDirectory = os.path.dirname(os.path.abspath(__file__)) try: import win32com.client from os.path import expanduser import platform import subprocess import sys import demoji try: print('Downloading NLTK additional packages...') import nltk nltk.download('punkt') nltk.download('wordnet') nltk.download('stopwords') nltk.download('averaged_perceptron_tagger') except Exception as e: print('NLTK Error: '+str(e)) pass from appbe.dataPath import DATA_DIR from win32com.shell import shell, shellcon import shutil import importlib license_path = DATA_DIR if os.path.isdir(license_path) == False: os.makedirs(license_path) import warnings warnings.filterwarnings("ignore") LicenseFolder = os.path.join(license_path,'License') if os.path.isdir(LicenseFolder) == False: os.makedirs(LicenseFolder) sqlite_path = os.path.join(license_path,'sqlite') if os.path.isdir(sqlite_path) == False: os.makedirs(sqlite_path) pretrainedModel_path = os.path.join(license_path,'PreTrainedModels') if os.path.isdir(pretrainedModel_path) == False: os.makedirs(pretrainedModel_path) config_path = os.path.join(license_path,'config') if os.path.isdir(config_path) == False: os.makedirs(config_path) target_path = os.path.join(license_path,'target') if os.path.isdir(target_path) == False: os.makedirs(target_path) data_path = os.path.join(license_path,'storage') if os.path.isdir(data_path) == False: os.makedirs(data_path) log_path = os.path.join(license_path,'logs') if os.path.isdir(log_path) == False: os.makedirs(log_path) configFolder = os.path.join(currentDirectory,'..','config') for file in os.listdir(configFolder): if file.endswith(".var"): os.remove(os.path.join(configFolder,file)) versionfile = os.path.join(configFolder,str(version)+'.var') with open(versionfile, 'w') as fp: pass manage_path = os.path.join(currentDirectory,'..','aion.py') print('Setting up Django Environment for AION User Interface') proc = subprocess.Popen([sys.executable, manage_path, "-m","migrateappfe"],stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() if proc.returncode != 0: err_string = stderr.decode('utf8') import re result = re.search("No module named '(.*)'", err_string) if 'ModuleNotFoundError' in err_string: print('\n"{}" module is missing. The dependencies of AION were not installed properly. Uninstall and reinstall AION'.format(result.group(1))) else: print('\nThe dependencies of AION were not installed properly. Uninstall and reinstall AION') raise Exception(err_string) else: print('AION User Interface successfully set') desktop = shell.SHGetFolderPath (0, shellcon.CSIDL_DESKTOP, 0, 0) #desktop = os.path.expanduser('~/Desktop') path = os.path.join(desktop, 'Explorer {0}.lnk'.format(version)) target = os.path.normpath(os.path.join(currentDirectory,'..', 'sbin', 'AION_Explorer.bat')) icon = os.path.join(currentDirectory,'icons','aion.ico') shell = win32com.client.Dispatch("WScript.Shell") shortcut = shell.CreateShortCut(path) shortcut.Targetpath = '"'+target+'"' shortcut.WorkingDirectory = currentDirectory #shortcut.WorkingDirectory = os.path.dirname(__file__) shortcut.IconLocation = icon shortcut.WindowStyle = 1 # 7 - Minimized, 3 - Maximized, 1 - Normal shortcut.save() path = os.path.join(desktop, 'Shell {0}.lnk'.format(version)) target = os.path.normpath(os.path.join(currentDirectory,'..','sbin', 'AION_Shell.bat')) icon = os.path.join(currentDirectory,'icons','aion_shell.ico') shell = win32com.client.Dispatch("WScript.Shell") shortcut = shell.CreateShortCut(path) shortcut.Targetpath = '"'+target+'"' shortcut.WorkingDirectory = currentDirectory #shortcut.WorkingDirectory = os.path.dirname(__file__) shortcut.IconLocation = icon shortcut.WindowStyle = 1 # 7 - Minimized, 3 - Maximized, 1 - Normal shortcut.save() print('--------------AION Installed Successfully--------------') except Exception as e: print(e) f = open(os.path.join(currentDirectory, 'workspace_error_logs.txt'), "w") f.write(str(traceback.format_exc())) f.close() pass
__init__.py
null
__init__.py
null
__init__.py
''' * * ============================================================================= * COPYRIGHT NOTICE * ============================================================================= * @ Copyright HCL Technologies Ltd. 2021, 2022,2023 * Proprietary and confidential. All information contained herein is, and * remains the property of HCL Technologies Limited. Copying or reproducing the * contents of this file, via any medium is strictly prohibited unless prior * written permission is obtained from HCL Technologies Limited. * '''
visualization.py
''' * * ============================================================================= * COPYRIGHT NOTICE * ============================================================================= * @ Copyright HCL Technologies Ltd. 2021, 2022,2023 * Proprietary and confidential. All information contained herein is, and * remains the property of HCL Technologies Limited. Copying or reproducing the * contents of this file, via any medium is strictly prohibited unless prior * written permission is obtained from HCL Technologies Limited. * ''' import warnings import numpy as np import pandas as pd import sklearn.metrics as metrics from collections import defaultdict from sklearn.metrics import confusion_matrix import re import shutil import scipy.stats as st import json import os,sys import glob import logging from utils.file_ops import read_df_compressed class Visualization(): def __init__(self,usecasename,version,dataframe,visualizationJson,dateTimeColumn,deployPath,dataFolderLocation,numericContinuousFeatures,discreteFeatures,categoricalFeatures,modelFeatures,targetFeature,modeltype,original_data_file,profiled_data_file,trained_data_file,predicted_data_file,labelMaps,vectorizerFeatures,textFeatures,numericalFeatures,nonNumericFeatures,emptyFeatures,nrows,ncols,saved_model,scoreParam,learner_type,modelname,featureReduction,reduction_data_file): self.dataframe = dataframe self.displayjson = {} self.visualizationJson = visualizationJson self.dateTimeColumn = dateTimeColumn self.deployPath = deployPath #shutil.copy2(os.path.join(os.path.dirname(os.path.abspath(__file__)),'aion_portal.py'),self.deployPath) if learner_type == 'ML' and modelname != 'Neural Architecture Search': if(os.path.isfile(os.path.join(self.deployPath,'explainable_ai.py'))): os.remove(os.path.join(self.deployPath,'explainable_ai.py')) shutil.copy2(os.path.join(os.path.dirname(os.path.abspath(__file__)),'..','utilities','xai','explainable_ai.py'),self.deployPath) # os.rename(os.path.join(self.deployPath,'explainable_ai.py'),os.path.join(self.deployPath,'aion_xai.py')) try: os.rename(os.path.join(self.deployPath,'explainable_ai.py'),os.path.join(self.deployPath,'aion_xai.py')) except FileExistsError: os.remove(os.path.join(self.deployPath,'aion_xai.py')) os.rename(os.path.join(self.deployPath,'explainable_ai.py'),os.path.join(self.deployPath,'aion_xai.py')) elif learner_type == 'DL' or modelname == 'Neural Architecture Search': if(os.path.isfile(os.path.join(self.deployPath,'explainable_ai.py'))): os.remove(os.path.join(self.deployPath,'explainable_ai.py')) shutil.copy2(os.path.join(os.path.dirname(os.path.abspath(__file__)),'..','utilities','xai','explainabledl_ai.py'),self.deployPath) # os.rename(os.path.join(self.deployPath,'explainabledl_ai.py'),os.path.join(self.deployPath,'aion_xai.py')) try: os.rename(os.path.join(self.deployPath,'explainabledl_ai.py'),os.path.join(self.deployPath,'aion_xai.py')) except FileExistsError: os.remove(os.path.join(self.deployPath,'aion_xai.py')) os.rename(os.path.join(self.deployPath,'explainabledl_ai.py'),os.path.join(self.deployPath,'aion_xai.py')) self.jsondeployPath = deployPath #self.deployPath = self.deployPath+'visualization/' self.dataFolderLocation = dataFolderLocation self.vectorizerFeatures = vectorizerFeatures self.textFeatures = textFeatures self.emptyFeatures = emptyFeatures ''' try: os.makedirs(self.deployPath) except OSError as e: print("\nFolder Already Exists") ''' self.numericContinuousFeatures = numericContinuousFeatures self.discreteFeatures = discreteFeatures self.categoricalFeatures = categoricalFeatures self.modelFeatures = modelFeatures self.modeltype = modeltype self.targetFeature = targetFeature self.displayjson['usecasename'] = str(usecasename) self.displayjson['version'] = str(version) self.displayjson['problemType'] = str(self.modeltype) self.displayjson['targetFeature'] = self.targetFeature self.displayjson['numericalFeatures'] = numericalFeatures self.displayjson['nonNumericFeatures'] = nonNumericFeatures self.displayjson['modelFeatures'] = self.modelFeatures self.displayjson['textFeatures'] = self.textFeatures self.displayjson['emptyFeatures'] = self.emptyFeatures self.displayjson['modelname']= str(modelname) self.displayjson['preprocessedData'] = str(original_data_file) self.displayjson['nrows'] = str(nrows) self.displayjson['ncols'] = str(ncols) self.displayjson['saved_model'] = str(saved_model) self.displayjson['scoreParam'] = str(scoreParam) self.displayjson['labelMaps'] = eval(str(labelMaps)) self.original_data_file = original_data_file self.displayjson['featureReduction'] = featureReduction if featureReduction == 'True': self.displayjson['reduction_data_file'] = reduction_data_file else: self.displayjson['reduction_data_file'] = '' self.pred_filename = predicted_data_file self.profiled_data_file = profiled_data_file self.displayjson['predictedData'] = predicted_data_file self.displayjson['postprocessedData'] = profiled_data_file #self.trained_data_file = trained_data_file #self.displayjson['trainingData'] = trained_data_file #self.displayjson['categorialFeatures']=categoricalFeatures #self.displayjson['discreteFeatures']=discreteFeatures #self.displayjson['continuousFeatures']=numericContinuousFeatures #y = json.dumps(self.displayjson) #print(y) self.labelMaps = labelMaps self.log = logging.getLogger('eion') def visualizationrecommandsystem(self): try: import tensorflow.keras.utils as kutils datasetid = self.visualizationJson['datasetid'] self.log.info('\n================== Data Profiling Details==================') datacolumns=list(self.dataframe.columns) self.log.info('================== Data Profiling Details End ==================\n') self.log.info('================== Features Correlation Details ==================\n') self.log.info('\n================== Model Performance Analysis ==================') if os.path.exists(self.pred_filename): try: status,df=read_df_compressed(self.pred_filename) if self.modeltype == 'Classification' or self.modeltype == 'ImageClassification' or self.modeltype == 'anomaly_detection': y_actual = df['actual'].values y_predict = df['predict'].values y_actual = kutils.to_categorical(y_actual) y_predict = kutils.to_categorical(y_predict) classes = df.actual.unique() n_classes = y_actual.shape[1] self.log.info('-------> ROC AUC CURVE') roc_curve_dict = [] for i in classes: try: classname = i if str(self.labelMaps) != '{}': inv_map = {v: k for k, v in self.labelMaps.items()} classname = inv_map[i] fpr, tpr, threshold = metrics.roc_curve(y_actual[:,i],y_predict[:,i]) roc_auc = metrics.auc(fpr, tpr) class_roc_auc_curve = {} class_roc_auc_curve['class'] = str(classname) fprstring = ','.join(str(v) for v in fpr) tprstring = ','.join(str(v) for v in tpr) class_roc_auc_curve['FP'] = str(fprstring) class_roc_auc_curve['TP'] = str(tprstring) roc_curve_dict.append(class_roc_auc_curve) self.log.info('----------> Class: '+str(classname)) self.log.info('------------> ROC_AUC: '+str(roc_auc)) self.log.info('------------> False Positive Rate (x Points): '+str(fpr)) self.log.info('------------> True Positive Rate (y Points): '+str(tpr)) except: pass self.displayjson['ROC_AUC_CURVE'] = roc_curve_dict self.log.info('-------> Precision Recall CURVE') precision_recall_curve_dict = [] for i in range(n_classes): try: lr_precision, lr_recall, threshold = metrics.precision_recall_curve(y_actual[:,i],y_predict[:,i]) classname = i if str(self.labelMaps) != '{}': inv_map = {v: k for k, v in self.labelMaps.items()} classname = inv_map[i] roc_auc = metrics.auc(lr_recall,lr_precision) class_precision_recall_curve = {} class_precision_recall_curve['class'] = str(classname) Precisionstring = ','.join(str(round(v,2)) for v in lr_precision) Recallstring = ','.join(str(round(v,2)) for v in lr_recall) class_precision_recall_curve['Precision'] = str(Precisionstring) class_precision_recall_curve['Recall'] = str(Recallstring) precision_recall_curve_dict.append(class_precision_recall_curve) except: pass self.log.info('----------> Class: '+str(classname)) self.log.info('------------> ROC_AUC: '+str(roc_auc)) self.log.info('------------> Recall (x Points): '+str(lr_precision)) self.log.info('------------> Precision (y Points): '+str(lr_recall)) self.displayjson['PRECISION_RECALL_CURVE'] = precision_recall_curve_dict status,predictdataFrame=read_df_compressed(self.displayjson['predictedData']) except Exception as e: self.log.info('================== Error in Calculation ROC_AUC/Recall Precision Curve '+str(e)) self.log.info('================== Model Performance Analysis End ==================\n') self.log.info('\n================== For Descriptive Analysis of Model Features ==================') outputfile = os.path.join(self.jsondeployPath,'etc','display.json') with open(outputfile, 'w') as fp: json.dump(self.displayjson, fp) self.log.info('================== For Descriptive Analysis of Model Features End ==================\n') except Exception as inst: self.log.info('Visualization Failed !....'+str(inst)) exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename) self.log.info(str(exc_type)+' '+str(fname)+' '+str(exc_tb.tb_lineno)) def drawlinechart(self,xcolumn,ycolumn,deploy_path,datasetid): title = 'aion_visualization_'+xcolumn+"_"+ycolumn+"_linechart" yaxisname = 'Average '+ycolumn datasetindex = datasetid visulizationjson = '[{"_id": "543234","_type": "visualization","_source": {"title": "'+title+'",' visulizationjson = visulizationjson+'"visState": "{\\"title\\":\\"'+title+'\\",' visulizationjson = visulizationjson+'\\"type\\":\\"line\\",\\"params\\":{\\"type\\":\\"line\\",\\"grid\\":{\\"categoryLines\\":false,\\"style\\":{\\"color\\":\\"#eee\\"}},\\"categoryAxes\\":[{\\"id\\":\\"CategoryAxis-1\\",\\"type\\":\\"category\\",\\"position\\":\\"bottom\\",\\"show\\":true,\\"style\\":{},\\"scale\\":{\\"type\\":\\"linear\\"},\\"labels\\":{\\"show\\":true,\\"truncate\\":100},\\"title\\":{}}],\\"valueAxes\\":[{\\"id\\":\\"ValueAxis-1\\",\\"name\\":\\"LeftAxis-1\\",\\"type\\":\\"value\\",\\"position\\":\\"left\\",\\"show\\":true,\\"style\\":{},\\"scale\\":{\\"type\\":\\"linear\\",\\"mode\\":\\"normal\\"},\\"labels\\":{\\"show\\":true,\\"rotate\\":0,\\"filter\\":false,\\"truncate\\":100},\\"title\\":' visulizationjson = visulizationjson+'{\\"text\\":\\"'+yaxisname+'\\"}}],\\"seriesParams\\":[{\\"show\\":\\"true\\",\\"type\\":\\"line\\",\\"mode\\":\\"normal\\",\\"data\\":' visulizationjson = visulizationjson+'{\\"label\\":\\"'+yaxisname+'\\",\\"id\\":\\"1\\"},\\"valueAxis\\":\\"ValueAxis-1\\",\\"drawLinesBetweenPoints\\":true,\\"showCircles\\":true}],\\"addTooltip\\":true,\\"addLegend\\":true,\\"legendPosition\\":\\"right\\",\\"times\\":[],\\"addTimeMarker\\":false},\\"aggs\\":[{\\"id\\":\\"1\\",\\"enabled\\":true,\\"type\\":\\"avg\\",\\"schema\\":\\"metric\\",\\"params\\":{\\"field\\":\\"'+str(ycolumn)+'\\"}},{\\"id\\":\\"2\\",\\"enabled\\":true,\\"type\\":\\"terms\\",\\"schema\\":\\"segment\\",\\"params\\":{\\"field\\":\\"'+xcolumn+'\\",\\"size\\":100,\\"order\\":\\"desc\\",\\"orderBy\\":\\"1\\",\\"otherBucket\\":false,\\"otherBucketLabel\\":\\"Other\\",\\"missingBucket\\":false,\\"missingBucketLabel\\":\\"Missing\\"}}]}","uiStateJSON": "{}", "description": "","version": 1,"kibanaSavedObjectMeta": {"searchSourceJSON": "{\\"index\\":\\"'+datasetindex+'\\",\\"query\\":{\\"query\\":\\"\\",\\"language\\":\\"lucene\\"},\\"filter\\":[]}"}},"_migrationVersion": {"visualization": "6.7.2"}}]' filename = deploy_path+title+'.json' f = open(filename, "w") f.write(str(visulizationjson)) f.close() def drawbarchart(self,xcolumn,ycolumn,deploy_path,datasetid): title = 'aion_visualization_'+xcolumn+"_"+ycolumn+"_barchart" yaxisname = 'Average '+ycolumn datasetindex = datasetid visulizationjson = '[{"_id": "123456","_type": "visualization","_source": {"title":"'+title+'",' visulizationjson = visulizationjson+'"visState": "{\\"title\\":\\"'+title+'\\",' visulizationjson = visulizationjson+'\\"type\\":\\"histogram\\",\\"params\\":{\\"addLegend\\":true,\\"addTimeMarker\\":false,\\"addTooltip\\":true,\\"categoryAxes\\":[{\\"id\\":\\"CategoryAxis-1\\",\\"labels\\":{\\"show\\":true,\\"truncate\\":100},\\"position\\":\\"bottom\\",\\"scale\\":{\\"type\\":\\"linear\\"},\\"show\\":true,\\"style\\":{},\\"title\\":{},\\"type\\":\\"category\\"}],\\"grid\\":{\\"categoryLines\\":false,\\"style\\":{\\"color\\":\\"#eee\\"}},\\"legendPosition\\":\\"right\\",\\"seriesParams\\":[{\\"data\\":{\\"id\\":\\"1\\",' visulizationjson = visulizationjson+'\\"label\\":\\"'+yaxisname+'\\"},' visulizationjson = visulizationjson+'\\"drawLinesBetweenPoints\\":true,\\"mode\\":\\"stacked\\",\\"show\\":\\"true\\",\\"showCircles\\":true,\\"type\\":\\"histogram\\",\\"valueAxis\\":\\"ValueAxis-1\\"}],\\"times\\":[],\\"type\\":\\"histogram\\",\\"valueAxes\\":[{\\"id\\":\\"ValueAxis-1\\",\\"labels\\":{\\"filter\\":false,\\"rotate\\":0,\\"show\\":true,\\"truncate\\":100},\\"name\\":\\"LeftAxis-1\\",\\"position\\":\\"left\\",\\"scale\\":{\\"mode\\":\\"normal\\",\\"type\\":\\"linear\\"},\\"show\\":true,\\"style\\":{},\\"title\\":' visulizationjson = visulizationjson+'{\\"text\\":\\"'+yaxisname+'\\"},' visulizationjson = visulizationjson+'\\"type\\":\\"value\\"}]},\\"aggs\\":[{\\"id\\":\\"1\\",\\"enabled\\":true,\\"type\\":\\"avg\\",\\"schema\\":\\"metric\\",\\"params\\":{\\"field\\":\\"'+str(xcolumn)+'\\"}},{\\"id\\":\\"2\\",\\"enabled\\":true,\\"type\\":\\"terms\\",\\"schema\\":\\"segment\\",\\"params\\":{\\"field\\":\\"'+ycolumn+'\\",\\"size\\":100,\\"order\\":\\"asc\\",\\"orderBy\\":\\"1\\",\\"otherBucket\\":false,\\"otherBucketLabel\\":\\"Other\\",\\"missingBucket\\":false,\\"missingBucketLabel\\":\\"Missing\\"}}]}","uiStateJSON":"{}","description": "","version": 1,"kibanaSavedObjectMeta": {' visulizationjson = visulizationjson+'"searchSourceJSON": "{\\"index\\":\\"'+datasetindex+'\\",\\"query\\":{\\"language\\":\\"lucene\\",\\"query\\":\\"\\"},\\"filter\\":[]}"}},"_migrationVersion":{"visualization": "6.7.2"}}]' filename = deploy_path+title+'.json' f = open(filename, "w") f.write(str(visulizationjson)) f.close() def drawpiechart(self,xcolumn,deploy_path,datasetid): title = 'aion_visualization_'+xcolumn+"_piechart" datasetindex = datasetid visulizationjson = '[{"_id": "123456","_type": "visualization","_source": {"title":"'+title+'",' visulizationjson = visulizationjson+'"visState": "{\\"title\\":\\"'+title+'\\",' visulizationjson = visulizationjson+'\\"type\\":\\"pie\\",\\"params\\":{\\"type\\":\\"pie\\",\\"addTooltip\\":true,\\"addLegend\\":true,\\"legendPosition\\":\\"right\\",\\"isDonut\\":true,\\"labels\\":{\\"show\\":false,\\"values\\":true,\\"last_level\\":true,\\"truncate\\":100}},\\"aggs\\":[{\\"id\\":\\"1\\",\\"enabled\\":true,\\"type\\":\\"count\\",\\"schema\\":\\"metric\\",\\"params\\":{}},{\\"id\\":\\"2\\",\\"enabled\\":true,\\"type\\":\\"terms\\",\\"schema\\":\\"segment\\",\\"params\\":{\\"field\\":\\"'+xcolumn+'\\",\\"size\\":100,\\"order\\":\\"asc\\",\\"orderBy\\":\\"1\\",\\"otherBucket\\":false,\\"otherBucketLabel\\":\\"Other\\",\\"missingBucket\\":false,\\"missingBucketLabel\\":\\"Missing\\"}}]}",' visulizationjson = visulizationjson+'"uiStateJSON": "{}","description": "","version": 1,"kibanaSavedObjectMeta": {"searchSourceJSON":"{\\"index\\":\\"'+datasetid+'\\",\\"query\\":{\\"query\\":\\"\\",\\"language\\":\\"lucene\\"},\\"filter\\":[]}"}},"_migrationVersion": {"visualization": "6.7.2"}}]' filename = deploy_path+title+'.json' f = open(filename, "w") f.write(str(visulizationjson)) f.close() def get_confusion_matrix(self,df): setOfyTrue = set(df['actual']) unqClassLst = list(setOfyTrue) if(str(self.labelMaps) != '{}'): inv_mapping_dict = {v: k for k, v in self.labelMaps.items()} unqClassLst2 = (pd.Series(unqClassLst)).map(inv_mapping_dict) unqClassLst2 = list(unqClassLst2) else: unqClassLst2 = unqClassLst indexName = [] columnName = [] for item in unqClassLst2: indexName.append("act:"+str(item)) columnName.append("pre:"+str(item)) result = pd.DataFrame(confusion_matrix(df['actual'], df['predict'], labels = unqClassLst),index = indexName, columns = columnName) resultjson = result.to_json(orient='index') return(resultjson) def DistributionFinder(self,data): try: distributionName ="" sse =0.0 KStestStatic=0.0 dataType="" if(data.dtype == "float64"): dataType ="Continuous" elif(data.dtype =="int" or data.dtype =="int64"): dataType="Discrete" if(dataType == "Discrete"): distributions= [st.bernoulli,st.binom,st.geom,st.nbinom,st.poisson] index, counts = np.unique(abs(data.astype(int)),return_counts=True) if(len(index)>=2): best_sse = np.inf y1=[] total=sum(counts) mean=float(sum(index*counts))/total variance=float((sum(index**2*counts) -total*mean**2))/(total-1) dispersion=mean/float(variance) theta=1/float(dispersion) r=mean*(float(theta)/1-theta) for j in counts: y1.append(float(j)/total) pmf1=st.bernoulli.pmf(index,mean) pmf2=st.binom.pmf(index,len(index),p=mean/len(index)) pmf3=st.geom.pmf(index,1/float(1+mean)) pmf4=st.nbinom.pmf(index,mean,r) pmf5=st.poisson.pmf(index,mean) sse1 = np.sum(np.power(y1 - pmf1, 2.0)) sse2 = np.sum(np.power(y1 - pmf2, 2.0)) sse3 = np.sum(np.power(y1 - pmf3, 2.0)) sse4 = np.sum(np.power(y1 - pmf4, 2.0)) sse5 = np.sum(np.power(y1- pmf5, 2.0)) sselist=[sse1,sse2,sse3,sse4,sse5] for i in range(0,len(sselist)): if best_sse > sselist[i] > 0: best_distribution = distributions[i].name best_sse = sselist[i] elif (len(index) == 1): best_distribution = "Constant Data-No Distribution" best_sse = 0.0 distributionName =best_distribution sse=best_sse elif(dataType == "Continuous"): distributions = [st.uniform,st.expon,st.weibull_max,st.weibull_min,st.chi,st.norm,st.lognorm,st.t,st.gamma,st.beta] best_distribution = st.norm.name best_sse = np.inf datamin=data.min() datamax=data.max() nrange=datamax-datamin y, x = np.histogram(data.astype(float), bins='auto', density=True) x = (x + np.roll(x, -1))[:-1] / 2.0 for distribution in distributions: with warnings.catch_warnings(): warnings.filterwarnings('ignore') params = distribution.fit(data.astype(float)) # Separate parts of parameters arg = params[:-2] loc = params[-2] scale = params[-1] # Calculate fitted PDF and error with fit in distribution pdf = distribution.pdf(x, loc=loc, scale=scale, *arg) sse = np.sum(np.power(y - pdf, 2.0)) if(best_sse >sse > 0): best_distribution = distribution.name best_sse = sse distributionName =best_distribution sse=best_sse except: response = str(sys.exc_info()[0]) message='Job has Failed'+response print(message) return distributionName,sse
local_pipeline.py
import docker import json import logging def read_json(file_path): data = None with open(file_path,'r') as f: data = json.load(f) return data def run_pipeline(inputconfig): inputconfig = json.loads(inputconfig) logfilepath = inputconfig['logfilepath'] logging.basicConfig(level=logging.INFO,filename =logfilepath) usecasename = inputconfig['usecase'] logging.info("UseCaseName :"+str(usecasename)) version = inputconfig['version'] logging.info("version :"+str(version)) config = inputconfig['dockerlist'] persistancevolume = inputconfig['persistancevolume'] logging.info("PersistanceVolume :"+str(persistancevolume)) datasetpath = inputconfig['datasetpath'] logging.info("DataSet Path :"+str(datasetpath)) config = read_json(config) client = docker.from_env() inputconfig = {'modelName':usecasename,'modelVersion':str(version),'dataLocation':datasetpath} inputconfig = json.dumps(inputconfig) inputconfig = inputconfig.replace('"', '\\"') logging.info("===== Model Monitoring Container Start =====") outputStr = client.containers.run(config['ModelMonitoring'],'python code.py -i'+datasetpath,volumes=[persistancevolume+':/aion']) outputStr = outputStr.decode('utf-8') logging.info('ModelMonitoring: '+str(outputStr)) print('ModelMonitoring: '+str(outputStr)) logging.info("===== ModelMonitoring Stop =====") logging.info("===== Data Ingestion Container Start =====") outputStr = client.containers.run(config['DataIngestion'],'python code.py',volumes=[persistancevolume+':/aion']) outputStr = outputStr.decode('utf-8') logging.info('DataIngestion: '+str(outputStr)) print('DataIngestion: '+str(outputStr)) logging.info("===== Data Ingestion Container Stop =====") outputStr = outputStr.strip() decoded_data = json.loads(outputStr) status = decoded_data['Status'] if status != 'Success': output = {'Status':'Error','Msg':'Data Ingestion Fails'} logging.info("===== Transformation Container Start =====") outputStr = client.containers.run(config['DataTransformation'],'python code.py',volumes=[persistancevolume+':/aion']) outputStr = outputStr.decode('utf-8') logging.info('Data Transformations: '+str(outputStr)) print('Data Transformations: '+str(outputStr)) logging.info("===== Transformation Container Done =====") outputStr = outputStr.strip() decoded_data = json.loads(outputStr) status = decoded_data['Status'] if status != 'Success': output = {'Status':'Error','Msg':'Data Transformations Fails'} logging.info("===== Feature Engineering Container Start =====") outputStr = client.containers.run(config['FeatureEngineering'],'python code.py',volumes=[persistancevolume+':/aion']) outputStr = outputStr.decode('utf-8') logging.info('FeatureEngineering: '+str(outputStr)) print('FeatureEngineering: '+str(outputStr)) logging.info("===== Feature Engineering Container Done =====") outputStr = outputStr.strip() decoded_data = json.loads(outputStr) status = decoded_data['Status'] modeltraining = config['ModelTraining'] for mt in modeltraining: logging.info("===== Training Container Start =====") outputStr = client.containers.run(mt['Training'],'python code.py',volumes=[persistancevolume+':/aion']) outputStr = outputStr.decode('utf-8') logging.info('ModelTraining: '+str(outputStr)) print('ModelTraining: '+str(outputStr)) logging.info("===== Training Container Done =====") outputStr = outputStr.strip() try: decoded_data = json.loads(outputStr) status = decoded_data['Status'] except Exception as inst: logging.info(inst) logging.info("===== Model Registry Start =====") outputStr = client.containers.run(config['ModelRegistry'],'python code.py',volumes=[persistancevolume+':/aion']) outputStr = outputStr.decode('utf-8') logging.info('ModelRegistry: '+str(outputStr)) print('ModelRegistry: '+str(outputStr)) logging.info("===== ModelRegistry Done =====") logging.info("===== ModelServing Start =====") outputStr = client.containers.run(config['ModelServing'],'python code.py',volumes=[persistancevolume+':/aion']) outputStr = outputStr.decode('utf-8') logging.info('Prediction: '+str(outputStr)) print('Prediction: '+str(outputStr)) logging.info("===== ModelServing Done =====")
build_container.py
import os import shutil import sys import subprocess from os.path import expanduser import platform import json def createDockerImage(model_name,model_version,module,folderpath): command = 'docker pull python:3.8-slim-buster' os.system(command); subprocess.check_call(["docker", "build", "-t",module+'_'+model_name.lower()+":"+model_version,"."], cwd=folderpath) def local_docker_build(config): print(config) config = json.loads(config) model_name = config['usecase'] model_version = config['version'] mlaac__code_path = config['mlacPath'] docker_images = {} docker_images['ModelMonitoring'] = 'modelmonitoring'+'_'+model_name.lower()+':'+model_version dataset_addr = os.path.join(mlaac__code_path,'ModelMonitoring') createDockerImage(model_name,model_version,'modelmonitoring',dataset_addr) docker_images['DataIngestion'] = 'dataingestion'+'_'+model_name.lower()+':'+model_version dataset_addr = os.path.join(mlaac__code_path,'DataIngestion') createDockerImage(model_name,model_version,'dataingestion',dataset_addr) transformer_addr = os.path.join(mlaac__code_path,'DataTransformation') docker_images['DataTransformation'] = 'datatransformation'+'_'+model_name.lower()+':'+model_version createDockerImage(model_name,model_version,'datatransformation',transformer_addr) featureengineering_addr = os.path.join(mlaac__code_path,'FeatureEngineering') docker_images['FeatureEngineering'] = 'featureengineering'+'_'+model_name.lower()+':'+model_version createDockerImage(model_name,model_version,'featureengineering',featureengineering_addr) from os import listdir arr = [filename for filename in os.listdir(mlaac__code_path) if filename.startswith("ModelTraining")] docker_training_images = [] for x in arr: dockertraing={} dockertraing['Training'] = str(x).lower()+'_'+model_name.lower()+':'+model_version docker_training_images.append(dockertraing) training_addri = os.path.join(mlaac__code_path,x) createDockerImage(model_name,model_version,str(x).lower(),training_addri) docker_images['ModelTraining'] = docker_training_images docker_images['ModelRegistry'] = 'modelregistry'+'_'+model_name.lower()+':'+model_version deploy_addr = os.path.join(mlaac__code_path,'ModelRegistry') createDockerImage(model_name,model_version,'modelregistry',deploy_addr) docker_images['ModelServing'] = 'modelserving'+'_'+model_name.lower()+':'+model_version deploy_addr = os.path.join(mlaac__code_path,'ModelServing') createDockerImage(model_name,model_version,'modelserving',deploy_addr) outputjsonFile = os.path.join(mlaac__code_path,'dockerlist.json') with open(outputjsonFile, 'w') as f: json.dump(docker_images, f) f.close() output = {'Status':'Success','Msg':outputjsonFile} output = json.dumps(output) print("aion_build_container:",output)
git_upload.py
import os import sys import json from pathlib import Path import subprocess import shutil import argparse def create_and_save_yaml(git_storage_path, container_label,usecasepath): file_name_prefix = 'gh-acr-' yaml_file = f"""\ name: gh-acr-{container_label} on: push: branches: main paths: {container_label}/** workflow_dispatch: jobs: gh-acr-build-push: runs-on: ubuntu-latest steps: - name: 'checkout action' uses: actions/checkout@main - name: 'azure login' uses: azure/login@v1 with: creds: ${{{{ secrets.AZURE_CREDENTIALS }}}} - name: 'build and push image' uses: azure/docker-login@v1 with: login-server: ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}} username: ${{{{ secrets.REGISTRY_USERNAME }}}} password: ${{{{ secrets.REGISTRY_PASSWORD }}}} - run: | docker build ./{container_label}/ModelMonitoring -t ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/modelmonitoring:{container_label} docker push ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/modelmonitoring:{container_label} docker build ./{container_label}/DataIngestion -t ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/dataingestion:{container_label} docker push ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/dataingestion:{container_label} docker build ./{container_label}/DataTransformation -t ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/datatransformation:{container_label} docker push ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/datatransformation:{container_label} docker build ./{container_label}/FeatureEngineering -t ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/featureengineering:{container_label} docker push ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/featureengineering:{container_label} docker build ./{container_label}/ModelRegistry -t ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/modelregistry:{container_label} docker push ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/modelregistry:{container_label} docker build ./{container_label}/ModelServing -t ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/modelserving:{container_label} docker push ${{{{ secrets.REGISTRY_LOGIN_SERVER }}}}/modelserving:{container_label} """ arr = [filename for filename in os.listdir(usecasepath) if filename.startswith("ModelTraining")] for x in arr: yaml_file+=' docker build ./'+container_label+'/'+x+' -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/'+x.lower()+':'+container_label yaml_file+='\n' yaml_file+=' docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/'+x.lower()+':'+container_label yaml_file+='\n' with open(Path(git_storage_path)/(file_name_prefix + container_label + '.yaml'), 'w') as f: f.write(yaml_file) def run_cmd(cmd): try: subprocess.check_output(cmd, stderr=subprocess.PIPE) except subprocess.CalledProcessError as e: if e.stderr: if isinstance(e.stderr, bytes): err_msg = e.stderr.decode(sys.getfilesystemencoding()) else: err_msg = e.stderr elif e.output: if isinstance(e.output, bytes): err_msg = e.output.decode(sys.getfilesystemencoding()) else: err_msg = e.output else: err_msg = str(e) return False, err_msg return True, "" def validate_config(config): non_null_keys = ['url','username', 'token', 'location', 'gitFolderLocation', 'email', 'modelName'] missing_keys = [k for k in non_null_keys if k not in config.keys()] if missing_keys: raise ValueError(f"following fields are missing in config file: {missing_keys}") for k,v in config.items(): if k in non_null_keys and not v: raise ValueError(f"Please provide value for '{k}' in config file.") def upload(config): validate_config(config) url_type = config.get('url_type','https') if url_type == 'https': https_str = "https://" url = https_str + config['username'] + ":" + config['token'] + "@" + config['url'][len(https_str):] else: url = config['url'] model_location = Path(config['location']) git_folder_location = Path(config['gitFolderLocation']) git_folder_location.mkdir(parents=True, exist_ok=True) (git_folder_location/'.github'/'workflows').mkdir(parents=True, exist_ok=True) if not model_location.exists(): raise ValueError('Trained model data not found') os.chdir(str(git_folder_location)) (git_folder_location/config['modelName']).mkdir(parents=True, exist_ok=True) shutil.copytree(model_location, git_folder_location/config['modelName'], dirs_exist_ok=True) create_and_save_yaml((git_folder_location/'.github'/'workflows'), config['modelName'],config['location']) if (Path(git_folder_location)/'.git').exists(): first_upload = False else: first_upload = True if first_upload: cmd = ['git','init'] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) cmd = ['git','config','user.name',config['username']] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) cmd = ['git','config','user.email',config['email']] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) cmd = ['git','add', '-A'] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) cmd = ['git','commit','-m',f"commit {config['modelName']}"] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) cmd = ['git','branch','-M','main'] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) if first_upload: cmd = ['git','remote','add','origin', url] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) cmd = ['git','push','-f','-u','origin', 'main'] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) else: cmd = ['git','push'] status, msg = run_cmd(cmd) if not status: raise ValueError(msg) return json.dumps({'Status':'SUCCESS'}) if __name__ == '__main__': try: if shutil.which('git') is None: raise ValueError("git is not installed on this system") parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', help='Config file location or as a string') args = parser.parse_args() if Path(args.config).is_file() and Path(args.config).suffix == '.json': with open(args.config,'r') as f: config = json.load(f) else: config = json.loads(args.config) print(upload(config)) except Exception as e: status = {'Status':'Failure','msg':str(e)} print(json.dumps(status))
__init__.py
''' * * ============================================================================= * COPYRIGHT NOTICE * ============================================================================= * @ Copyright HCL Technologies Ltd. 2021, 2022,2023 * Proprietary and confidential. All information contained herein is, and * remains the property of HCL Technologies Limited. Copying or reproducing the * contents of this file, via any medium is strictly prohibited unless prior * written permission is obtained from HCL Technologies Limited. * '''
kafka_consumer.py
from kafka import KafkaConsumer from json import loads import pandas as pd import json import os,sys import time import multiprocessing from os.path import expanduser import platform import datetime modelDetails = {} class Process(multiprocessing.Process): def __init__(self, modelSignature,jsonData,predictedData,modelpath): super(Process, self).__init__() self.config = jsonData self.modelSignature = modelSignature self.data = predictedData self.modelpath = modelpath def run(self): #data = pd.json_normalize(self.data) minotoringService = self.config['minotoringService']['url'] trainingdatalocation = self.config['trainingDataLocation'][self.modelSignature] #filetimestamp = 'AION_'+str(int(time.time()))+'.csv' #data.to_csv(dataFile, index=False) inputFieldsJson = {"trainingDataLocation":trainingdatalocation,"currentDataLocation":self.data} inputFieldsJson = json.dumps(inputFieldsJson) ser_url = minotoringService+self.modelSignature+'/monitoring' driftTime = datetime.datetime.now() import requests try: response = requests.post(ser_url, data=inputFieldsJson,headers={"Content-Type":"application/json",}) outputStr=response.content outputStr = outputStr.decode('utf-8') outputStr = outputStr.strip() decoded_data = json.loads(outputStr) print(decoded_data) status = decoded_data['status'] msg = decoded_data['data'] except Exception as inst: if 'Failed to establish a new connection' in str(inst): status = 'Fail' msg = 'AION Service needs to be started' else: status = 'Fail' msg = 'Error during Drift Analysis' statusFile = os.path.join(self.modelpath,self.modelSignature+'_status.csv') df = pd.DataFrame(columns = ['dateTime', 'status', 'msg']) df = df.append({'dateTime' : driftTime, 'status' : status, 'msg' : msg},ignore_index = True) print(df) if (os.path.exists(statusFile)): df.to_csv(statusFile, mode='a', header=False,index=False) else: df.to_csv(statusFile, header=True,index=False) def launch_kafka_consumer(): from appbe.dataPath import DATA_DIR configfile = os.path.join(os.path.dirname(__file__),'..','config','kafkaConfig.conf') with open(configfile,'r',encoding='utf-8') as f: jsonData = json.load(f) f.close() kafkaIP=jsonData['kafkaCluster']['ip'] kafkaport = jsonData['kafkaCluster']['port'] topic = jsonData['kafkaCluster']['topic'] kafkaurl = kafkaIP+':'+kafkaport if jsonData['database']['csv'] == 'True': database = 'csv' elif jsonData['database']['mySql'] == 'True': database = 'mySql' else: database = 'csv' kafkaPath = os.path.join(DATA_DIR,'kafka') if not (os.path.exists(kafkaPath)): try: os.makedirs(kafkaPath) except OSError as e: pass consumer = KafkaConsumer(topic,bootstrap_servers=[kafkaurl],auto_offset_reset='earliest',enable_auto_commit=True,group_id='my-group',value_deserializer=lambda x: loads(x.decode('utf-8'))) for message in consumer: message = message.value data = message['data'] data = pd.json_normalize(data) modelname = message['usecasename'] version = message['version'] modelSignature = modelname+'_'+str(version) modelpath = os.path.join(kafkaPath,modelSignature) try: os.makedirs(modelpath) except OSError as e: pass secondsSinceEpoch = time.time() if modelSignature not in modelDetails: modelDetails[modelSignature] = {} modelDetails[modelSignature]['startTime'] = secondsSinceEpoch if database == 'csv': csvfile = os.path.join(modelpath,modelSignature+'.csv') if (os.path.exists(csvfile)): data.to_csv(csvfile, mode='a', header=False,index=False) else: data.to_csv(csvfile, header=True,index=False) modelTimeFrame = jsonData['timeFrame'][modelSignature] currentseconds = time.time() print(currentseconds - modelDetails[modelSignature]['startTime']) if (currentseconds - modelDetails[modelSignature]['startTime']) >= float(modelTimeFrame): csv_path = os.path.join(modelpath,modelSignature+'.csv') #predictedData = pd.read_csv(csv_path) ##predictedData = predictedData.to_json(orient="records") index = Process(modelSignature,jsonData,csv_path,modelpath) index.start() modelDetails[modelSignature]['startTime'] = secondsSinceEpoch