Unnamed: 0
int64
0
10k
source
stringlengths
27
7.27k
target
stringlengths
54
7.29k
600
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
601
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
602
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
603
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
604
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { fb . close ( ) ; } } } ; }
public org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > getResultHandler ( final org . apache . thrift . server . AbstractNonblockingServer . AsyncFrameBuffer fb , final int seqid ) { final org . apache . thrift . AsyncProcessFunction fcall = this ; return new org . apache . thrift . async . AsyncMethodCallback < java . lang . Boolean > ( ) { public void onComplete ( java . lang . Boolean o ) { throwsError_result result = new throwsError_result ( ) ; result . success = o ; result . setSuccessIsSet ( true ) ; try { fcall . sendResponse ( fb , result , org . apache . thrift . protocol . TMessageType . REPLY , seqid ) ; } catch ( org . apache . thrift . transport . TTransportException e ) { _LOGGER . error ( "TTransportException writing to internal frame buffer" , e ) ; fb . close ( ) ; } catch ( java . lang . Exception e ) { _LOGGER . error ( "Exception writing to internal frame buffer" , e ) ; onError ( e ) ; } } public void onError ( java . lang . Exception e ) { byte msgType = org . apache . thrift . protocol . TMessageType . REPLY ; org . apache . thrift . TSerializable msg ; throwsError_result result = new throwsError_result ( ) ; if ( e instanceof ThriftSecurityException ) { result . ex = ( ThriftSecurityException ) e ; result . setExIsSet ( true ) ; msg = result ; } else if ( e instanceof org . apache . thrift . transport . TTransportException ) { _LOGGER . error ( "TTransportException inside handler" , e ) ; fb . close ( ) ; return ; } else if ( e instanceof org . apache . thrift . TApplicationException ) { _LOGGER . error ( "TApplicationException inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = ( org . apache . thrift . TApplicationException ) e ; } else { _LOGGER . error ( "Exception inside handler" , e ) ; msgType = org . apache . thrift . protocol . TMessageType . EXCEPTION ; msg = new org . apache . thrift . TApplicationException ( org . apache . thrift . TApplicationException . INTERNAL_ERROR , e . getMessage ( ) ) ; } try { fcall . sendResponse ( fb , msg , msgType , seqid ) ; } catch ( java . lang . Exception ex ) { _LOGGER . error ( "Exception writing to internal frame buffer" , ex ) ; fb . close ( ) ; } } } ; }
605
private final void putOnOffType ( Map < Channel , State > targetMap , Channel channel , long value ) { State val = ChannelUtil . mapValue ( channel , value ) ; targetMap . put ( channel , val ) ; }
private final void putOnOffType ( Map < Channel , State > targetMap , Channel channel , long value ) { State val = ChannelUtil . mapValue ( channel , value ) ; targetMap . put ( channel , val ) ; logger . debug ( "Channel {} transformed to OnOffType ({}) -> {}" , channel . getUID ( ) . getId ( ) , value , val ) ; }
606
private synchronized Boolean checkGcLoggingStatefulSets ( String namespaceName , String statefulSetName ) { Container container = kubeClient ( namespaceName ) . getStatefulSet ( statefulSetName ) . getSpec ( ) . getTemplate ( ) . getSpec ( ) . getContainers ( ) . get ( 0 ) ; LOGGER . info ( "Checking container with name: {}" , container . getName ( ) ) ; return checkEnvVarValue ( container ) ; }
private synchronized Boolean checkGcLoggingStatefulSets ( String namespaceName , String statefulSetName ) { LOGGER . info ( "Checking stateful set: {}" , statefulSetName ) ; Container container = kubeClient ( namespaceName ) . getStatefulSet ( statefulSetName ) . getSpec ( ) . getTemplate ( ) . getSpec ( ) . getContainers ( ) . get ( 0 ) ; LOGGER . info ( "Checking container with name: {}" , container . getName ( ) ) ; return checkEnvVarValue ( container ) ; }
607
private synchronized Boolean checkGcLoggingStatefulSets ( String namespaceName , String statefulSetName ) { LOGGER . info ( "Checking stateful set: {}" , statefulSetName ) ; Container container = kubeClient ( namespaceName ) . getStatefulSet ( statefulSetName ) . getSpec ( ) . getTemplate ( ) . getSpec ( ) . getContainers ( ) . get ( 0 ) ; return checkEnvVarValue ( container ) ; }
private synchronized Boolean checkGcLoggingStatefulSets ( String namespaceName , String statefulSetName ) { LOGGER . info ( "Checking stateful set: {}" , statefulSetName ) ; Container container = kubeClient ( namespaceName ) . getStatefulSet ( statefulSetName ) . getSpec ( ) . getTemplate ( ) . getSpec ( ) . getContainers ( ) . get ( 0 ) ; LOGGER . info ( "Checking container with name: {}" , container . getName ( ) ) ; return checkEnvVarValue ( container ) ; }
608
public void respond ( Attributes attributes ) { try { PageParameters parameters = attributes . getParameters ( ) ; int pageId = parameters . get ( "pageId" ) . toInt ( ) ; Page page = ( Page ) WebSession . get ( ) . getPageManager ( ) . getPage ( pageId ) ; AjaxRequestTarget newAjaxRequestTarget = ( ( WebApplication ) Application . get ( ) ) . newAjaxRequestTarget ( page ) ; RequestCycle . get ( ) . scheduleRequestHandlerAfterCurrent ( newAjaxRequestTarget ) ; page . send ( page , Broadcast . BREADTH , new JEEWebGlobalAjaxEvent ( newAjaxRequestTarget , parameters , RequestCycle . get ( ) . getRequest ( ) . getPostParameters ( ) ) ) ; } catch ( Exception e ) { } }
public void respond ( Attributes attributes ) { try { PageParameters parameters = attributes . getParameters ( ) ; int pageId = parameters . get ( "pageId" ) . toInt ( ) ; Page page = ( Page ) WebSession . get ( ) . getPageManager ( ) . getPage ( pageId ) ; AjaxRequestTarget newAjaxRequestTarget = ( ( WebApplication ) Application . get ( ) ) . newAjaxRequestTarget ( page ) ; RequestCycle . get ( ) . scheduleRequestHandlerAfterCurrent ( newAjaxRequestTarget ) ; page . send ( page , Broadcast . BREADTH , new JEEWebGlobalAjaxEvent ( newAjaxRequestTarget , parameters , RequestCycle . get ( ) . getRequest ( ) . getPostParameters ( ) ) ) ; } catch ( Exception e ) { LOGGER . error ( "Error while processing the ajax request" , e ) ; } }
609
public String getRequestURI ( ) { if ( LOG . isLoggable ( Level . FINE ) ) { } return requestHeaders . get ( WebSocketUtils . URI_KEY ) ; }
public String getRequestURI ( ) { if ( LOG . isLoggable ( Level . FINE ) ) { LOG . log ( Level . FINE , "getRequestURI " + requestHeaders . get ( WebSocketUtils . URI_KEY ) ) ; } return requestHeaders . get ( WebSocketUtils . URI_KEY ) ; }
610
@ Programmatic @ Override public List < Object > importData ( final Object previousRow ) { counter ++ ; if ( counter == 1 ) { System . out . println ( ) ; } Unit unit = unitRepository . findUnitByReference ( unitReference ) ; if ( unit == null ) { unit = unitRepository . findUnitByReference ( unitReference . replace ( " " , "+" ) ) ; } if ( unit == null ) { throw new IllegalArgumentException ( "Unknown unit: " . concat ( unitReference ) ) ; } LandRegister landRegister = null ; List < FixedAssetRegistration > farts = fixedAssetRegistrationRepository . findBySubject ( unit ) ; if ( ! farts . isEmpty ( ) ) { landRegister = ( LandRegister ) farts . get ( 0 ) ; } if ( landRegister == null ) { landRegister = ( LandRegister ) fixedAssetRegistrationTypeRepository . findByTitle ( "LandRegister" ) . create ( factoryService ) ; repositoryService . persist ( landRegister ) ; } landRegister . setSubject ( unit ) ; landRegister . setComuneAmministrativo ( comuneAmministrativo ) ; landRegister . setComuneCatastale ( comuneCatastale ) ; landRegister . setCodiceComuneCatastale ( codiceComuneCatastale ) ; landRegister . setRendita ( rendita == null ? null : rendita . setScale ( 2 , RoundingMode . HALF_EVEN ) ) ; landRegister . setFoglio ( foglio ) ; landRegister . setParticella ( particella ) ; landRegister . setSubalterno ( subalterno ) ; landRegister . setCategoria ( categoria ) ; landRegister . setClasse ( classe ) ; landRegister . setConsistenza ( consistenza ) ; System . out . print ( "." ) ; return null ; }
@ Programmatic @ Override public List < Object > importData ( final Object previousRow ) { counter ++ ; if ( counter == 1 ) { System . out . println ( ) ; LOG . info ( "importing" ) ; } Unit unit = unitRepository . findUnitByReference ( unitReference ) ; if ( unit == null ) { unit = unitRepository . findUnitByReference ( unitReference . replace ( " " , "+" ) ) ; } if ( unit == null ) { throw new IllegalArgumentException ( "Unknown unit: " . concat ( unitReference ) ) ; } LandRegister landRegister = null ; List < FixedAssetRegistration > farts = fixedAssetRegistrationRepository . findBySubject ( unit ) ; if ( ! farts . isEmpty ( ) ) { landRegister = ( LandRegister ) farts . get ( 0 ) ; } if ( landRegister == null ) { landRegister = ( LandRegister ) fixedAssetRegistrationTypeRepository . findByTitle ( "LandRegister" ) . create ( factoryService ) ; repositoryService . persist ( landRegister ) ; } landRegister . setSubject ( unit ) ; landRegister . setComuneAmministrativo ( comuneAmministrativo ) ; landRegister . setComuneCatastale ( comuneCatastale ) ; landRegister . setCodiceComuneCatastale ( codiceComuneCatastale ) ; landRegister . setRendita ( rendita == null ? null : rendita . setScale ( 2 , RoundingMode . HALF_EVEN ) ) ; landRegister . setFoglio ( foglio ) ; landRegister . setParticella ( particella ) ; landRegister . setSubalterno ( subalterno ) ; landRegister . setCategoria ( categoria ) ; landRegister . setClasse ( classe ) ; landRegister . setConsistenza ( consistenza ) ; System . out . print ( "." ) ; return null ; }
611
private String createCallbackUrl ( ) { if ( callbackUrl != null ) { return callbackUrl ; } else { final String ipAddress = networkAddressService . getPrimaryIpv4HostAddress ( ) ; if ( ipAddress == null ) { logger . warn ( "No network interface could be found." ) ; return null ; } logger . debug ( "The callback ip address obtained from the Network Address Service was:{}" , ipAddress ) ; return ipAddress ; } }
private String createCallbackUrl ( ) { if ( callbackUrl != null ) { logger . debug ( "The callback ip address from the OSGI is:{}" , callbackUrl ) ; return callbackUrl ; } else { final String ipAddress = networkAddressService . getPrimaryIpv4HostAddress ( ) ; if ( ipAddress == null ) { logger . warn ( "No network interface could be found." ) ; return null ; } logger . debug ( "The callback ip address obtained from the Network Address Service was:{}" , ipAddress ) ; return ipAddress ; } }
612
private String createCallbackUrl ( ) { if ( callbackUrl != null ) { logger . debug ( "The callback ip address from the OSGI is:{}" , callbackUrl ) ; return callbackUrl ; } else { final String ipAddress = networkAddressService . getPrimaryIpv4HostAddress ( ) ; if ( ipAddress == null ) { return null ; } logger . debug ( "The callback ip address obtained from the Network Address Service was:{}" , ipAddress ) ; return ipAddress ; } }
private String createCallbackUrl ( ) { if ( callbackUrl != null ) { logger . debug ( "The callback ip address from the OSGI is:{}" , callbackUrl ) ; return callbackUrl ; } else { final String ipAddress = networkAddressService . getPrimaryIpv4HostAddress ( ) ; if ( ipAddress == null ) { logger . warn ( "No network interface could be found." ) ; return null ; } logger . debug ( "The callback ip address obtained from the Network Address Service was:{}" , ipAddress ) ; return ipAddress ; } }
613
private String createCallbackUrl ( ) { if ( callbackUrl != null ) { logger . debug ( "The callback ip address from the OSGI is:{}" , callbackUrl ) ; return callbackUrl ; } else { final String ipAddress = networkAddressService . getPrimaryIpv4HostAddress ( ) ; if ( ipAddress == null ) { logger . warn ( "No network interface could be found." ) ; return null ; } return ipAddress ; } }
private String createCallbackUrl ( ) { if ( callbackUrl != null ) { logger . debug ( "The callback ip address from the OSGI is:{}" , callbackUrl ) ; return callbackUrl ; } else { final String ipAddress = networkAddressService . getPrimaryIpv4HostAddress ( ) ; if ( ipAddress == null ) { logger . warn ( "No network interface could be found." ) ; return null ; } logger . debug ( "The callback ip address obtained from the Network Address Service was:{}" , ipAddress ) ; return ipAddress ; } }
614
@ SuppressWarnings ( { "unchecked" } ) public Object invoke ( final RemoteInvocation invocation , final Object targetObject ) throws NoSuchMethodException , IllegalAccessException , InvocationTargetException { try { SecurityManager securityManager = this . securityManager != null ? this . securityManager : SecurityUtils . getSecurityManager ( ) ; Subject . Builder builder = new Subject . Builder ( securityManager ) ; String host = ( String ) invocation . getAttribute ( SecureRemoteInvocationFactory . HOST_KEY ) ; if ( host != null ) { builder . host ( host ) ; } Serializable sessionId = invocation . getAttribute ( SecureRemoteInvocationFactory . SESSION_ID_KEY ) ; if ( sessionId != null ) { builder . sessionId ( sessionId ) ; } else { if ( log . isTraceEnabled ( ) ) { } } Subject subject = builder . buildSubject ( ) ; return subject . execute ( new Callable ( ) { public Object call ( ) throws Exception { return SecureRemoteInvocationExecutor . super . invoke ( invocation , targetObject ) ; } } ) ; } catch ( ExecutionException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof NoSuchMethodException ) { throw ( NoSuchMethodException ) cause ; } else if ( cause instanceof IllegalAccessException ) { throw ( IllegalAccessException ) cause ; } else if ( cause instanceof InvocationTargetException ) { throw ( InvocationTargetException ) cause ; } else { throw new InvocationTargetException ( cause ) ; } } catch ( Throwable t ) { throw new InvocationTargetException ( t ) ; } }
@ SuppressWarnings ( { "unchecked" } ) public Object invoke ( final RemoteInvocation invocation , final Object targetObject ) throws NoSuchMethodException , IllegalAccessException , InvocationTargetException { try { SecurityManager securityManager = this . securityManager != null ? this . securityManager : SecurityUtils . getSecurityManager ( ) ; Subject . Builder builder = new Subject . Builder ( securityManager ) ; String host = ( String ) invocation . getAttribute ( SecureRemoteInvocationFactory . HOST_KEY ) ; if ( host != null ) { builder . host ( host ) ; } Serializable sessionId = invocation . getAttribute ( SecureRemoteInvocationFactory . SESSION_ID_KEY ) ; if ( sessionId != null ) { builder . sessionId ( sessionId ) ; } else { if ( log . isTraceEnabled ( ) ) { log . trace ( "RemoteInvocation did not contain a Shiro Session id attribute under " + "key [" + SecureRemoteInvocationFactory . SESSION_ID_KEY + "]. A Subject based " + "on an existing Session will not be available during the method invocation." ) ; } } Subject subject = builder . buildSubject ( ) ; return subject . execute ( new Callable ( ) { public Object call ( ) throws Exception { return SecureRemoteInvocationExecutor . super . invoke ( invocation , targetObject ) ; } } ) ; } catch ( ExecutionException e ) { Throwable cause = e . getCause ( ) ; if ( cause instanceof NoSuchMethodException ) { throw ( NoSuchMethodException ) cause ; } else if ( cause instanceof IllegalAccessException ) { throw ( IllegalAccessException ) cause ; } else if ( cause instanceof InvocationTargetException ) { throw ( InvocationTargetException ) cause ; } else { throw new InvocationTargetException ( cause ) ; } } catch ( Throwable t ) { throw new InvocationTargetException ( t ) ; } }
615
private ZigBeeStatus setSupportedInputClusters ( Collection < Integer > supportedClusters ) { TelegesisSetInputClustersCommand inputClusters = new TelegesisSetInputClustersCommand ( ) ; inputClusters . setClusterList ( supportedClusters ) ; if ( frameHandler . sendRequest ( inputClusters ) == null ) { return ZigBeeStatus . BAD_RESPONSE ; } return ZigBeeStatus . SUCCESS ; }
private ZigBeeStatus setSupportedInputClusters ( Collection < Integer > supportedClusters ) { TelegesisSetInputClustersCommand inputClusters = new TelegesisSetInputClustersCommand ( ) ; inputClusters . setClusterList ( supportedClusters ) ; if ( frameHandler . sendRequest ( inputClusters ) == null ) { logger . debug ( "Error setting Telegesis input clusters" ) ; return ZigBeeStatus . BAD_RESPONSE ; } return ZigBeeStatus . SUCCESS ; }
616
private void prepareResults ( ) { paginatedList = ( EgovPaginatedList ) searchResult ; final List < Object [ ] > list = paginatedList . getList ( ) ; paginatedList . setList ( populateDishonorChequeBean ( list ) ) ; LOGGER . debug ( "Exiting from prepareResults" ) ; }
private void prepareResults ( ) { LOGGER . debug ( "Entering into prepareResults" ) ; paginatedList = ( EgovPaginatedList ) searchResult ; final List < Object [ ] > list = paginatedList . getList ( ) ; paginatedList . setList ( populateDishonorChequeBean ( list ) ) ; LOGGER . debug ( "Exiting from prepareResults" ) ; }
617
private void prepareResults ( ) { LOGGER . debug ( "Entering into prepareResults" ) ; paginatedList = ( EgovPaginatedList ) searchResult ; final List < Object [ ] > list = paginatedList . getList ( ) ; paginatedList . setList ( populateDishonorChequeBean ( list ) ) ; }
private void prepareResults ( ) { LOGGER . debug ( "Entering into prepareResults" ) ; paginatedList = ( EgovPaginatedList ) searchResult ; final List < Object [ ] > list = paginatedList . getList ( ) ; paginatedList . setList ( populateDishonorChequeBean ( list ) ) ; LOGGER . debug ( "Exiting from prepareResults" ) ; }
618
public static com . liferay . bookmarks . model . BookmarksEntry moveEntryToTrash ( HttpPrincipal httpPrincipal , long entryId ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( BookmarksEntryServiceUtil . class , "moveEntryToTrash" , _moveEntryToTrashParameterTypes16 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , entryId ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( com . liferay . bookmarks . model . BookmarksEntry ) returnObj ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { throw systemException ; } }
public static com . liferay . bookmarks . model . BookmarksEntry moveEntryToTrash ( HttpPrincipal httpPrincipal , long entryId ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( BookmarksEntryServiceUtil . class , "moveEntryToTrash" , _moveEntryToTrashParameterTypes16 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , entryId ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( com . liferay . bookmarks . model . BookmarksEntry ) returnObj ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { _log . error ( systemException , systemException ) ; throw systemException ; } }
619
public boolean isGatewayTokenCacheEnabled ( ) { try { APIManagerConfiguration config = getApiManagerConfiguration ( ) ; String cacheEnabled = config . getFirstProperty ( APIConstants . GATEWAY_TOKEN_CACHE_ENABLED ) ; return Boolean . parseBoolean ( cacheEnabled ) ; } catch ( Exception e ) { } return true ; }
public boolean isGatewayTokenCacheEnabled ( ) { try { APIManagerConfiguration config = getApiManagerConfiguration ( ) ; String cacheEnabled = config . getFirstProperty ( APIConstants . GATEWAY_TOKEN_CACHE_ENABLED ) ; return Boolean . parseBoolean ( cacheEnabled ) ; } catch ( Exception e ) { log . error ( "Did not found valid API Validation Information cache configuration. Use default configuration" + e ) ; } return true ; }
620
private static void assertValues ( RegressionEnvironment env , List values , String valueName ) { env . assertListener ( "s0" , listener -> { EventBean [ ] events = listener . getLastNewData ( ) ; assertEquals ( values . size ( ) , events . length ) ; for ( int i = 0 ; i < events . length ; i ++ ) { log . debug ( ".assertValuesMayConvert events[" + i + "]==" + events [ i ] . get ( valueName ) ) ; assertEquals ( values . get ( i ) , events [ i ] . get ( valueName ) ) ; } } ) ; }
private static void assertValues ( RegressionEnvironment env , List values , String valueName ) { env . assertListener ( "s0" , listener -> { EventBean [ ] events = listener . getLastNewData ( ) ; assertEquals ( values . size ( ) , events . length ) ; log . debug ( ".assertValuesMayConvert values: " + values ) ; for ( int i = 0 ; i < events . length ; i ++ ) { log . debug ( ".assertValuesMayConvert events[" + i + "]==" + events [ i ] . get ( valueName ) ) ; assertEquals ( values . get ( i ) , events [ i ] . get ( valueName ) ) ; } } ) ; }
621
private static void assertValues ( RegressionEnvironment env , List values , String valueName ) { env . assertListener ( "s0" , listener -> { EventBean [ ] events = listener . getLastNewData ( ) ; assertEquals ( values . size ( ) , events . length ) ; log . debug ( ".assertValuesMayConvert values: " + values ) ; for ( int i = 0 ; i < events . length ; i ++ ) { assertEquals ( values . get ( i ) , events [ i ] . get ( valueName ) ) ; } } ) ; }
private static void assertValues ( RegressionEnvironment env , List values , String valueName ) { env . assertListener ( "s0" , listener -> { EventBean [ ] events = listener . getLastNewData ( ) ; assertEquals ( values . size ( ) , events . length ) ; log . debug ( ".assertValuesMayConvert values: " + values ) ; for ( int i = 0 ; i < events . length ; i ++ ) { log . debug ( ".assertValuesMayConvert events[" + i + "]==" + events [ i ] . get ( valueName ) ) ; assertEquals ( values . get ( i ) , events [ i ] . get ( valueName ) ) ; } } ) ; }
622
protected static int determineConnectionTimeout ( Message message , HTTPClientPolicy csPolicy ) { long ctimeout = csPolicy . getConnectionTimeout ( ) ; if ( message . get ( Message . CONNECTION_TIMEOUT ) != null ) { Object obj = message . get ( Message . CONNECTION_TIMEOUT ) ; try { ctimeout = Long . parseLong ( obj . toString ( ) ) ; } catch ( NumberFormatException e ) { } } if ( ctimeout > Integer . MAX_VALUE ) { ctimeout = Integer . MAX_VALUE ; } return ( int ) ctimeout ; }
protected static int determineConnectionTimeout ( Message message , HTTPClientPolicy csPolicy ) { long ctimeout = csPolicy . getConnectionTimeout ( ) ; if ( message . get ( Message . CONNECTION_TIMEOUT ) != null ) { Object obj = message . get ( Message . CONNECTION_TIMEOUT ) ; try { ctimeout = Long . parseLong ( obj . toString ( ) ) ; } catch ( NumberFormatException e ) { LOG . log ( Level . WARNING , "INVALID_TIMEOUT_FORMAT" , new Object [ ] { Message . CONNECTION_TIMEOUT , obj . toString ( ) } ) ; } } if ( ctimeout > Integer . MAX_VALUE ) { ctimeout = Integer . MAX_VALUE ; } return ( int ) ctimeout ; }
623
public Object call ( Context context , List args ) throws FunctionCallException { assert args . size ( ) == 1 ; OLink olink = _oxpath . getLinks ( ) . get ( args . get ( 0 ) ) ; try { return _xpathEvalCtx . isLinkActive ( olink ) ? Boolean . TRUE : Boolean . FALSE ; } catch ( FaultException e ) { throw new WrappedFaultException . JaxenFunctionException ( e ) ; } }
public Object call ( Context context , List args ) throws FunctionCallException { assert args . size ( ) == 1 ; OLink olink = _oxpath . getLinks ( ) . get ( args . get ( 0 ) ) ; try { return _xpathEvalCtx . isLinkActive ( olink ) ? Boolean . TRUE : Boolean . FALSE ; } catch ( FaultException e ) { __log . error ( "bpws:getLinkStatus(" + args + ") threw FaultException" , e ) ; throw new WrappedFaultException . JaxenFunctionException ( e ) ; } }
624
private static ClassLoader createProgramClassLoader ( MapReduceContextConfig contextConfig ) { Location programLocation = Locations . toLocation ( new File ( contextConfig . getProgramJarName ( ) ) ) ; try { File unpackDir = DirUtils . createTempDir ( new File ( System . getProperty ( "user.dir" ) ) ) ; BundleJarUtil . prepareClassLoaderFolder ( programLocation , unpackDir ) ; return new ProgramClassLoader ( contextConfig . getCConf ( ) , unpackDir , FilterClassLoader . create ( contextConfig . getHConf ( ) . getClassLoader ( ) ) ) ; } catch ( IOException e ) { LOG . error ( "Failed to create ProgramClassLoader" , e ) ; throw Throwables . propagate ( e ) ; } }
private static ClassLoader createProgramClassLoader ( MapReduceContextConfig contextConfig ) { Location programLocation = Locations . toLocation ( new File ( contextConfig . getProgramJarName ( ) ) ) ; try { File unpackDir = DirUtils . createTempDir ( new File ( System . getProperty ( "user.dir" ) ) ) ; LOG . info ( "Create ProgramClassLoader from {}, expand to {}" , programLocation , unpackDir ) ; BundleJarUtil . prepareClassLoaderFolder ( programLocation , unpackDir ) ; return new ProgramClassLoader ( contextConfig . getCConf ( ) , unpackDir , FilterClassLoader . create ( contextConfig . getHConf ( ) . getClassLoader ( ) ) ) ; } catch ( IOException e ) { LOG . error ( "Failed to create ProgramClassLoader" , e ) ; throw Throwables . propagate ( e ) ; } }
625
private static ClassLoader createProgramClassLoader ( MapReduceContextConfig contextConfig ) { Location programLocation = Locations . toLocation ( new File ( contextConfig . getProgramJarName ( ) ) ) ; try { File unpackDir = DirUtils . createTempDir ( new File ( System . getProperty ( "user.dir" ) ) ) ; LOG . info ( "Create ProgramClassLoader from {}, expand to {}" , programLocation , unpackDir ) ; BundleJarUtil . prepareClassLoaderFolder ( programLocation , unpackDir ) ; return new ProgramClassLoader ( contextConfig . getCConf ( ) , unpackDir , FilterClassLoader . create ( contextConfig . getHConf ( ) . getClassLoader ( ) ) ) ; } catch ( IOException e ) { throw Throwables . propagate ( e ) ; } }
private static ClassLoader createProgramClassLoader ( MapReduceContextConfig contextConfig ) { Location programLocation = Locations . toLocation ( new File ( contextConfig . getProgramJarName ( ) ) ) ; try { File unpackDir = DirUtils . createTempDir ( new File ( System . getProperty ( "user.dir" ) ) ) ; LOG . info ( "Create ProgramClassLoader from {}, expand to {}" , programLocation , unpackDir ) ; BundleJarUtil . prepareClassLoaderFolder ( programLocation , unpackDir ) ; return new ProgramClassLoader ( contextConfig . getCConf ( ) , unpackDir , FilterClassLoader . create ( contextConfig . getHConf ( ) . getClassLoader ( ) ) ) ; } catch ( IOException e ) { LOG . error ( "Failed to create ProgramClassLoader" , e ) ; throw Throwables . propagate ( e ) ; } }
626
@ Overheaded @ Override public boolean isSpecificQueryToBeBypassed ( ) throws JargonException { if ( getIRODSServerProperties ( ) . isSupportsSpecificQuery ( ) ) { return false ; } else { return true ; } }
@ Overheaded @ Override public boolean isSpecificQueryToBeBypassed ( ) throws JargonException { if ( getIRODSServerProperties ( ) . isSupportsSpecificQuery ( ) ) { log . debug ( "by version number I know I support specific query" ) ; return false ; } else { return true ; } }
627
public static boolean loadRefPercentileData ( final String filename , final Map < SampleTraitType , Map < String , double [ ] > > refTraitPercentiles ) { try { final List < String > fileData = Files . readAllLines ( new File ( filename ) . toPath ( ) ) ; final String header = fileData . get ( 0 ) ; fileData . remove ( 0 ) ; for ( final String line : fileData ) { final String [ ] items = line . split ( DATA_DELIM , - 1 ) ; final String cancerType = items [ 0 ] ; final SampleTraitType traitType = SampleTraitType . valueOf ( items [ 1 ] ) ; double [ ] percentileData = new double [ PERCENTILE_COUNT ] ; int startIndex = 2 ; for ( int i = startIndex ; i < items . length ; ++ i ) { double value = Double . parseDouble ( items [ i ] ) ; percentileData [ i - startIndex ] = value ; } Map < String , double [ ] > traitData = refTraitPercentiles . get ( traitType ) ; if ( traitData == null ) { traitData = Maps . newHashMap ( ) ; refTraitPercentiles . put ( traitType , traitData ) ; } traitData . put ( cancerType , percentileData ) ; } } catch ( IOException e ) { return false ; } return true ; }
public static boolean loadRefPercentileData ( final String filename , final Map < SampleTraitType , Map < String , double [ ] > > refTraitPercentiles ) { try { final List < String > fileData = Files . readAllLines ( new File ( filename ) . toPath ( ) ) ; final String header = fileData . get ( 0 ) ; fileData . remove ( 0 ) ; for ( final String line : fileData ) { final String [ ] items = line . split ( DATA_DELIM , - 1 ) ; final String cancerType = items [ 0 ] ; final SampleTraitType traitType = SampleTraitType . valueOf ( items [ 1 ] ) ; double [ ] percentileData = new double [ PERCENTILE_COUNT ] ; int startIndex = 2 ; for ( int i = startIndex ; i < items . length ; ++ i ) { double value = Double . parseDouble ( items [ i ] ) ; percentileData [ i - startIndex ] = value ; } Map < String , double [ ] > traitData = refTraitPercentiles . get ( traitType ) ; if ( traitData == null ) { traitData = Maps . newHashMap ( ) ; refTraitPercentiles . put ( traitType , traitData ) ; } traitData . put ( cancerType , percentileData ) ; } } catch ( IOException e ) { CUP_LOGGER . error ( "failed to read sample traits perc data file({}): {}" , filename , e . toString ( ) ) ; return false ; } return true ; }
628
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
629
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
630
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
631
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
632
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
633
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
protected JsonObject apiPost ( String path , ImmutableMap < String , String > headers , ImmutableMap < String , String > requestData ) { try { String body = gson . toJson ( requestData ) ; String uri = Urls . mergePaths ( endpoint , path ) ; LOG . debug ( "Vault request - POST: {}" , uri ) ; LOG . debug ( "Vault request - headers: {}" , headers . toString ( ) ) ; LOG . debug ( "Vault request - body: {}" , body ) ; HttpToolResponse response = HttpTool . httpPost ( httpClient , Urls . toUri ( uri ) , headers , body . getBytes ( CHARSET_NAME ) ) ; LOG . debug ( "Vault response - code: {} {}" , new Object [ ] { Integer . toString ( response . getResponseCode ( ) ) , response . getReasonPhrase ( ) } ) ; LOG . debug ( "Vault response - headers: {}" , response . getHeaderLists ( ) . toString ( ) ) ; String responseBody = new String ( response . getContent ( ) , CHARSET_NAME ) ; LOG . debug ( "Vault response - body: {}" , responseBody ) ; if ( HttpTool . isStatusCodeHealthy ( response . getResponseCode ( ) ) ) { return gson . fromJson ( responseBody , JsonObject . class ) ; } else { throw new IllegalStateException ( "HTTP request returned error" ) ; } } catch ( UnsupportedEncodingException e ) { throw Exceptions . propagate ( e ) ; } }
634
public QueryDefinition replaceQuery ( String queryName , String payload , String marshallingType ) { QueryDefinition queryDefinition = marshallerHelper . unmarshal ( payload , marshallingType , QueryDefinition . class ) ; queryDefinition . setName ( queryName ) ; SqlQueryDefinition actualDefinition = build ( context , queryDefinition ) ; logger . debug ( "Built sql query definition for {} with content {}" , queryName , actualDefinition ) ; queryService . replaceQuery ( actualDefinition ) ; return convertQueryDefinition ( actualDefinition ) ; }
public QueryDefinition replaceQuery ( String queryName , String payload , String marshallingType ) { logger . debug ( "About to unmarshal queryDefinition from payload: '{}'" , payload ) ; QueryDefinition queryDefinition = marshallerHelper . unmarshal ( payload , marshallingType , QueryDefinition . class ) ; queryDefinition . setName ( queryName ) ; SqlQueryDefinition actualDefinition = build ( context , queryDefinition ) ; logger . debug ( "Built sql query definition for {} with content {}" , queryName , actualDefinition ) ; queryService . replaceQuery ( actualDefinition ) ; return convertQueryDefinition ( actualDefinition ) ; }
635
public QueryDefinition replaceQuery ( String queryName , String payload , String marshallingType ) { logger . debug ( "About to unmarshal queryDefinition from payload: '{}'" , payload ) ; QueryDefinition queryDefinition = marshallerHelper . unmarshal ( payload , marshallingType , QueryDefinition . class ) ; queryDefinition . setName ( queryName ) ; SqlQueryDefinition actualDefinition = build ( context , queryDefinition ) ; queryService . replaceQuery ( actualDefinition ) ; return convertQueryDefinition ( actualDefinition ) ; }
public QueryDefinition replaceQuery ( String queryName , String payload , String marshallingType ) { logger . debug ( "About to unmarshal queryDefinition from payload: '{}'" , payload ) ; QueryDefinition queryDefinition = marshallerHelper . unmarshal ( payload , marshallingType , QueryDefinition . class ) ; queryDefinition . setName ( queryName ) ; SqlQueryDefinition actualDefinition = build ( context , queryDefinition ) ; logger . debug ( "Built sql query definition for {} with content {}" , queryName , actualDefinition ) ; queryService . replaceQuery ( actualDefinition ) ; return convertQueryDefinition ( actualDefinition ) ; }
636
public void error ( final String message , final Exception e ) { NewRelic . noticeError ( e ) ; }
public void error ( final String message , final Exception e ) { logger . error ( message , e ) ; NewRelic . noticeError ( e ) ; }
637
public static void sleepDurationTwice ( Logger logger , Duration duration ) { if ( duration . isEternal ( ) || duration . isZero ( ) ) { return ; } TimeUnit timeUnit = duration . getTimeUnit ( ) ; long timeout = duration . getDurationAmount ( ) * 2 ; sleepTimeUnit ( timeUnit , timeout ) ; }
public static void sleepDurationTwice ( Logger logger , Duration duration ) { if ( duration . isEternal ( ) || duration . isZero ( ) ) { return ; } TimeUnit timeUnit = duration . getTimeUnit ( ) ; long timeout = duration . getDurationAmount ( ) * 2 ; logger . info ( format ( "Sleeping for %d %s..." , timeout , timeUnit ) ) ; sleepTimeUnit ( timeUnit , timeout ) ; }
638
private TermValueSetConcept saveConcept ( String theSystem , String theCode , String theDisplay , Long theSourceConceptPid , String theSourceConceptDirectParentPids ) { ValidateUtil . isNotBlankOrThrowInvalidRequest ( theSystem , "ValueSet contains a concept with no system value" ) ; ValidateUtil . isNotBlankOrThrowInvalidRequest ( theCode , "ValueSet contains a concept with no code value" ) ; TermValueSetConcept concept = new TermValueSetConcept ( ) ; concept . setValueSet ( myTermValueSet ) ; concept . setOrder ( myConceptsSaved ) ; int versionIndex = theSystem . indexOf ( "|" ) ; if ( versionIndex >= 0 ) { concept . setSystem ( theSystem . substring ( 0 , versionIndex ) ) ; concept . setSystemVersion ( theSystem . substring ( versionIndex + 1 ) ) ; } else { concept . setSystem ( theSystem ) ; } concept . setCode ( theCode ) ; if ( isNotBlank ( theDisplay ) ) { concept . setDisplay ( theDisplay ) ; } concept . setSourceConceptPid ( theSourceConceptPid ) ; concept . setSourceConceptDirectParentPids ( theSourceConceptDirectParentPids ) ; myValueSetConceptDao . save ( concept ) ; myValueSetDao . save ( myTermValueSet . incrementTotalConcepts ( ) ) ; if ( ++ myConceptsSaved % 250 == 0 ) { } return concept ; }
private TermValueSetConcept saveConcept ( String theSystem , String theCode , String theDisplay , Long theSourceConceptPid , String theSourceConceptDirectParentPids ) { ValidateUtil . isNotBlankOrThrowInvalidRequest ( theSystem , "ValueSet contains a concept with no system value" ) ; ValidateUtil . isNotBlankOrThrowInvalidRequest ( theCode , "ValueSet contains a concept with no code value" ) ; TermValueSetConcept concept = new TermValueSetConcept ( ) ; concept . setValueSet ( myTermValueSet ) ; concept . setOrder ( myConceptsSaved ) ; int versionIndex = theSystem . indexOf ( "|" ) ; if ( versionIndex >= 0 ) { concept . setSystem ( theSystem . substring ( 0 , versionIndex ) ) ; concept . setSystemVersion ( theSystem . substring ( versionIndex + 1 ) ) ; } else { concept . setSystem ( theSystem ) ; } concept . setCode ( theCode ) ; if ( isNotBlank ( theDisplay ) ) { concept . setDisplay ( theDisplay ) ; } concept . setSourceConceptPid ( theSourceConceptPid ) ; concept . setSourceConceptDirectParentPids ( theSourceConceptDirectParentPids ) ; myValueSetConceptDao . save ( concept ) ; myValueSetDao . save ( myTermValueSet . incrementTotalConcepts ( ) ) ; if ( ++ myConceptsSaved % 250 == 0 ) { ourLog . info ( "Have pre-expanded {} concepts in ValueSet[{}]" , myConceptsSaved , myTermValueSet . getUrl ( ) ) ; } return concept ; }
639
public void setDelay ( long delay ) { if ( log . isDebugEnabled ( ) ) { } this . delay = delay ; }
public void setDelay ( long delay ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "Mediation statistics reporter delay set to " + delay + " ms" ) ; } this . delay = delay ; }
640
public List < ProtocolInfo > getProtocolInfos ( final String organisationIdentification ) throws FunctionalException { final Organisation organisation = this . findOrganisation ( organisationIdentification ) ; this . isAllowed ( organisation , PlatformFunction . GET_PROTOCOL_INFOS ) ; return this . protocolRepository . findAll ( Sort . by ( Direction . ASC , "protocol" , "protocolVersion" ) ) ; }
public List < ProtocolInfo > getProtocolInfos ( final String organisationIdentification ) throws FunctionalException { LOGGER . debug ( "Retrieving all protocol infos on behalf of organisation: {}" , organisationIdentification ) ; final Organisation organisation = this . findOrganisation ( organisationIdentification ) ; this . isAllowed ( organisation , PlatformFunction . GET_PROTOCOL_INFOS ) ; return this . protocolRepository . findAll ( Sort . by ( Direction . ASC , "protocol" , "protocolVersion" ) ) ; }
641
private GetUrlForArtifact . Response getPresignedUrl ( ArtifactStoreDAO artifactStoreDAO , Session session , GetUrlForBlobVersioned request , Blob blob ) throws ModelDBException { Map < String , String > componentWithSHAMap = getDatasetComponentBlob ( blob , request . getPathDatasetComponentBlobPath ( ) ) ; if ( blob . getContentCase ( ) . equals ( Blob . ContentCase . DATASET ) ) { String internalPath = componentWithSHAMap . get ( "internalPath" ) ; String computeSha = componentWithSHAMap . get ( "computeSha" ) ; Map . Entry < String , String > s3KeyUploadId = getS3PathAndMultipartUploadId ( session , computeSha , internalPath , blob . getDataset ( ) . getContentCase ( ) , request . getPartNumber ( ) > 0 , key -> artifactStoreDAO . initializeMultipart ( internalPath ) ) ; String errorMessage = "S3Key not found" ; String s3Key = s3KeyUploadId . getKey ( ) ; String uploadId = s3KeyUploadId . getValue ( ) ; if ( s3Key == null || s3Key . isEmpty ( ) ) { throw new ModelDBException ( errorMessage , Status . Code . NOT_FOUND ) ; } return artifactStoreDAO . getUrlForArtifactMultipart ( s3Key , request . getMethod ( ) , request . getPartNumber ( ) , uploadId ) ; } else { throw new ModelDBException ( "Invalid Blob type found" , Status . Code . INVALID_ARGUMENT ) ; } }
private GetUrlForArtifact . Response getPresignedUrl ( ArtifactStoreDAO artifactStoreDAO , Session session , GetUrlForBlobVersioned request , Blob blob ) throws ModelDBException { Map < String , String > componentWithSHAMap = getDatasetComponentBlob ( blob , request . getPathDatasetComponentBlobPath ( ) ) ; if ( blob . getContentCase ( ) . equals ( Blob . ContentCase . DATASET ) ) { String internalPath = componentWithSHAMap . get ( "internalPath" ) ; String computeSha = componentWithSHAMap . get ( "computeSha" ) ; Map . Entry < String , String > s3KeyUploadId = getS3PathAndMultipartUploadId ( session , computeSha , internalPath , blob . getDataset ( ) . getContentCase ( ) , request . getPartNumber ( ) > 0 , key -> artifactStoreDAO . initializeMultipart ( internalPath ) ) ; String errorMessage = "S3Key not found" ; String s3Key = s3KeyUploadId . getKey ( ) ; String uploadId = s3KeyUploadId . getValue ( ) ; if ( s3Key == null || s3Key . isEmpty ( ) ) { LOGGER . warn ( errorMessage ) ; throw new ModelDBException ( errorMessage , Status . Code . NOT_FOUND ) ; } return artifactStoreDAO . getUrlForArtifactMultipart ( s3Key , request . getMethod ( ) , request . getPartNumber ( ) , uploadId ) ; } else { throw new ModelDBException ( "Invalid Blob type found" , Status . Code . INVALID_ARGUMENT ) ; } }
642
public String addListElement ( ) { IApsEntity entity = this . getCurrentApsEntity ( ) ; try { ListAttributeInterface currentAttribute = ( ListAttributeInterface ) entity . getAttribute ( this . getAttributeName ( ) ) ; if ( currentAttribute instanceof MonoListAttribute ) { ( ( MonoListAttribute ) currentAttribute ) . addAttribute ( ) ; } else if ( currentAttribute instanceof ListAttribute ) { ( ( ListAttribute ) currentAttribute ) . addAttribute ( this . getListLangCode ( ) ) ; } } catch ( Throwable t ) { _logger . error ( "error in addListElement" , t ) ; return FAILURE ; } return SUCCESS ; }
public String addListElement ( ) { IApsEntity entity = this . getCurrentApsEntity ( ) ; try { ListAttributeInterface currentAttribute = ( ListAttributeInterface ) entity . getAttribute ( this . getAttributeName ( ) ) ; if ( currentAttribute instanceof MonoListAttribute ) { ( ( MonoListAttribute ) currentAttribute ) . addAttribute ( ) ; } else if ( currentAttribute instanceof ListAttribute ) { ( ( ListAttribute ) currentAttribute ) . addAttribute ( this . getListLangCode ( ) ) ; } _logger . debug ( "Added element of type {} to the list {}" , currentAttribute . getNestedAttributeTypeCode ( ) , currentAttribute . getName ( ) ) ; } catch ( Throwable t ) { _logger . error ( "error in addListElement" , t ) ; return FAILURE ; } return SUCCESS ; }
643
public String addListElement ( ) { IApsEntity entity = this . getCurrentApsEntity ( ) ; try { ListAttributeInterface currentAttribute = ( ListAttributeInterface ) entity . getAttribute ( this . getAttributeName ( ) ) ; if ( currentAttribute instanceof MonoListAttribute ) { ( ( MonoListAttribute ) currentAttribute ) . addAttribute ( ) ; } else if ( currentAttribute instanceof ListAttribute ) { ( ( ListAttribute ) currentAttribute ) . addAttribute ( this . getListLangCode ( ) ) ; } _logger . debug ( "Added element of type {} to the list {}" , currentAttribute . getNestedAttributeTypeCode ( ) , currentAttribute . getName ( ) ) ; } catch ( Throwable t ) { return FAILURE ; } return SUCCESS ; }
public String addListElement ( ) { IApsEntity entity = this . getCurrentApsEntity ( ) ; try { ListAttributeInterface currentAttribute = ( ListAttributeInterface ) entity . getAttribute ( this . getAttributeName ( ) ) ; if ( currentAttribute instanceof MonoListAttribute ) { ( ( MonoListAttribute ) currentAttribute ) . addAttribute ( ) ; } else if ( currentAttribute instanceof ListAttribute ) { ( ( ListAttribute ) currentAttribute ) . addAttribute ( this . getListLangCode ( ) ) ; } _logger . debug ( "Added element of type {} to the list {}" , currentAttribute . getNestedAttributeTypeCode ( ) , currentAttribute . getName ( ) ) ; } catch ( Throwable t ) { _logger . error ( "error in addListElement" , t ) ; return FAILURE ; } return SUCCESS ; }
644
private static void move ( Counter counter , FileSystem fromFs , Path from , FileSystem toFs , Path to , boolean fromLocal , int threads ) throws IOException , InterruptedException { if ( counter == null ) { throw new IllegalArgumentException ( "counter must not be null" ) ; } if ( fromFs == null ) { throw new IllegalArgumentException ( "fromFs must not be null" ) ; } if ( from == null ) { throw new IllegalArgumentException ( "from must not be null" ) ; } if ( toFs == null ) { throw new IllegalArgumentException ( "toFs must not be null" ) ; } if ( to == null ) { throw new IllegalArgumentException ( "to must not be null" ) ; } if ( fromLocal && isLocalPath ( from ) == false ) { throw new IllegalArgumentException ( "from must be on local file system" ) ; } if ( LOG . isDebugEnabled ( ) ) { } Path source = fromFs . makeQualified ( from ) ; Path target = toFs . makeQualified ( to ) ; List < Path > list = createFileListRelative ( counter , fromFs , source ) ; if ( list . isEmpty ( ) ) { return ; } boolean parallel = threads > 1 && list . size ( ) >= PARALLEL_MOVE_MIN ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Process moving files (from={0}, to={1}, count={2}, parallel={3})" , from , to , list . size ( ) , parallel ? threads : "N/A" ) ) ; } if ( parallel ) { ExecutorService executor = Executors . newFixedThreadPool ( Math . min ( threads , list . size ( ) ) , DAEMON_THREAD_FACTORY ) ; try { moveParallel ( counter , fromFs , toFs , source , target , list , fromLocal , executor ) ; } finally { executor . shutdownNow ( ) ; } } else { moveSerial ( counter , fromFs , toFs , source , target , list , fromLocal ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Finish moving files (from={0}, to={1}, count={2})" , from , to , list . size ( ) ) ) ; } }
private static void move ( Counter counter , FileSystem fromFs , Path from , FileSystem toFs , Path to , boolean fromLocal , int threads ) throws IOException , InterruptedException { if ( counter == null ) { throw new IllegalArgumentException ( "counter must not be null" ) ; } if ( fromFs == null ) { throw new IllegalArgumentException ( "fromFs must not be null" ) ; } if ( from == null ) { throw new IllegalArgumentException ( "from must not be null" ) ; } if ( toFs == null ) { throw new IllegalArgumentException ( "toFs must not be null" ) ; } if ( to == null ) { throw new IllegalArgumentException ( "to must not be null" ) ; } if ( fromLocal && isLocalPath ( from ) == false ) { throw new IllegalArgumentException ( "from must be on local file system" ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Start moving files (from={0}, to={1})" , from , to ) ) ; } Path source = fromFs . makeQualified ( from ) ; Path target = toFs . makeQualified ( to ) ; List < Path > list = createFileListRelative ( counter , fromFs , source ) ; if ( list . isEmpty ( ) ) { return ; } boolean parallel = threads > 1 && list . size ( ) >= PARALLEL_MOVE_MIN ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Process moving files (from={0}, to={1}, count={2}, parallel={3})" , from , to , list . size ( ) , parallel ? threads : "N/A" ) ) ; } if ( parallel ) { ExecutorService executor = Executors . newFixedThreadPool ( Math . min ( threads , list . size ( ) ) , DAEMON_THREAD_FACTORY ) ; try { moveParallel ( counter , fromFs , toFs , source , target , list , fromLocal , executor ) ; } finally { executor . shutdownNow ( ) ; } } else { moveSerial ( counter , fromFs , toFs , source , target , list , fromLocal ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Finish moving files (from={0}, to={1}, count={2})" , from , to , list . size ( ) ) ) ; } }
645
private static void move ( Counter counter , FileSystem fromFs , Path from , FileSystem toFs , Path to , boolean fromLocal , int threads ) throws IOException , InterruptedException { if ( counter == null ) { throw new IllegalArgumentException ( "counter must not be null" ) ; } if ( fromFs == null ) { throw new IllegalArgumentException ( "fromFs must not be null" ) ; } if ( from == null ) { throw new IllegalArgumentException ( "from must not be null" ) ; } if ( toFs == null ) { throw new IllegalArgumentException ( "toFs must not be null" ) ; } if ( to == null ) { throw new IllegalArgumentException ( "to must not be null" ) ; } if ( fromLocal && isLocalPath ( from ) == false ) { throw new IllegalArgumentException ( "from must be on local file system" ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Start moving files (from={0}, to={1})" , from , to ) ) ; } Path source = fromFs . makeQualified ( from ) ; Path target = toFs . makeQualified ( to ) ; List < Path > list = createFileListRelative ( counter , fromFs , source ) ; if ( list . isEmpty ( ) ) { return ; } boolean parallel = threads > 1 && list . size ( ) >= PARALLEL_MOVE_MIN ; if ( LOG . isDebugEnabled ( ) ) { } if ( parallel ) { ExecutorService executor = Executors . newFixedThreadPool ( Math . min ( threads , list . size ( ) ) , DAEMON_THREAD_FACTORY ) ; try { moveParallel ( counter , fromFs , toFs , source , target , list , fromLocal , executor ) ; } finally { executor . shutdownNow ( ) ; } } else { moveSerial ( counter , fromFs , toFs , source , target , list , fromLocal ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Finish moving files (from={0}, to={1}, count={2})" , from , to , list . size ( ) ) ) ; } }
private static void move ( Counter counter , FileSystem fromFs , Path from , FileSystem toFs , Path to , boolean fromLocal , int threads ) throws IOException , InterruptedException { if ( counter == null ) { throw new IllegalArgumentException ( "counter must not be null" ) ; } if ( fromFs == null ) { throw new IllegalArgumentException ( "fromFs must not be null" ) ; } if ( from == null ) { throw new IllegalArgumentException ( "from must not be null" ) ; } if ( toFs == null ) { throw new IllegalArgumentException ( "toFs must not be null" ) ; } if ( to == null ) { throw new IllegalArgumentException ( "to must not be null" ) ; } if ( fromLocal && isLocalPath ( from ) == false ) { throw new IllegalArgumentException ( "from must be on local file system" ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Start moving files (from={0}, to={1})" , from , to ) ) ; } Path source = fromFs . makeQualified ( from ) ; Path target = toFs . makeQualified ( to ) ; List < Path > list = createFileListRelative ( counter , fromFs , source ) ; if ( list . isEmpty ( ) ) { return ; } boolean parallel = threads > 1 && list . size ( ) >= PARALLEL_MOVE_MIN ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Process moving files (from={0}, to={1}, count={2}, parallel={3})" , from , to , list . size ( ) , parallel ? threads : "N/A" ) ) ; } if ( parallel ) { ExecutorService executor = Executors . newFixedThreadPool ( Math . min ( threads , list . size ( ) ) , DAEMON_THREAD_FACTORY ) ; try { moveParallel ( counter , fromFs , toFs , source , target , list , fromLocal , executor ) ; } finally { executor . shutdownNow ( ) ; } } else { moveSerial ( counter , fromFs , toFs , source , target , list , fromLocal ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Finish moving files (from={0}, to={1}, count={2})" , from , to , list . size ( ) ) ) ; } }
646
private static void move ( Counter counter , FileSystem fromFs , Path from , FileSystem toFs , Path to , boolean fromLocal , int threads ) throws IOException , InterruptedException { if ( counter == null ) { throw new IllegalArgumentException ( "counter must not be null" ) ; } if ( fromFs == null ) { throw new IllegalArgumentException ( "fromFs must not be null" ) ; } if ( from == null ) { throw new IllegalArgumentException ( "from must not be null" ) ; } if ( toFs == null ) { throw new IllegalArgumentException ( "toFs must not be null" ) ; } if ( to == null ) { throw new IllegalArgumentException ( "to must not be null" ) ; } if ( fromLocal && isLocalPath ( from ) == false ) { throw new IllegalArgumentException ( "from must be on local file system" ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Start moving files (from={0}, to={1})" , from , to ) ) ; } Path source = fromFs . makeQualified ( from ) ; Path target = toFs . makeQualified ( to ) ; List < Path > list = createFileListRelative ( counter , fromFs , source ) ; if ( list . isEmpty ( ) ) { return ; } boolean parallel = threads > 1 && list . size ( ) >= PARALLEL_MOVE_MIN ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Process moving files (from={0}, to={1}, count={2}, parallel={3})" , from , to , list . size ( ) , parallel ? threads : "N/A" ) ) ; } if ( parallel ) { ExecutorService executor = Executors . newFixedThreadPool ( Math . min ( threads , list . size ( ) ) , DAEMON_THREAD_FACTORY ) ; try { moveParallel ( counter , fromFs , toFs , source , target , list , fromLocal , executor ) ; } finally { executor . shutdownNow ( ) ; } } else { moveSerial ( counter , fromFs , toFs , source , target , list , fromLocal ) ; } if ( LOG . isDebugEnabled ( ) ) { } }
private static void move ( Counter counter , FileSystem fromFs , Path from , FileSystem toFs , Path to , boolean fromLocal , int threads ) throws IOException , InterruptedException { if ( counter == null ) { throw new IllegalArgumentException ( "counter must not be null" ) ; } if ( fromFs == null ) { throw new IllegalArgumentException ( "fromFs must not be null" ) ; } if ( from == null ) { throw new IllegalArgumentException ( "from must not be null" ) ; } if ( toFs == null ) { throw new IllegalArgumentException ( "toFs must not be null" ) ; } if ( to == null ) { throw new IllegalArgumentException ( "to must not be null" ) ; } if ( fromLocal && isLocalPath ( from ) == false ) { throw new IllegalArgumentException ( "from must be on local file system" ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Start moving files (from={0}, to={1})" , from , to ) ) ; } Path source = fromFs . makeQualified ( from ) ; Path target = toFs . makeQualified ( to ) ; List < Path > list = createFileListRelative ( counter , fromFs , source ) ; if ( list . isEmpty ( ) ) { return ; } boolean parallel = threads > 1 && list . size ( ) >= PARALLEL_MOVE_MIN ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Process moving files (from={0}, to={1}, count={2}, parallel={3})" , from , to , list . size ( ) , parallel ? threads : "N/A" ) ) ; } if ( parallel ) { ExecutorService executor = Executors . newFixedThreadPool ( Math . min ( threads , list . size ( ) ) , DAEMON_THREAD_FACTORY ) ; try { moveParallel ( counter , fromFs , toFs , source , target , list , fromLocal , executor ) ; } finally { executor . shutdownNow ( ) ; } } else { moveSerial ( counter , fromFs , toFs , source , target , list , fromLocal ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( MessageFormat . format ( "Finish moving files (from={0}, to={1}, count={2})" , from , to , list . size ( ) ) ) ; } }
647
public List < Transfer > retrieveAll ( final List < ? extends ArtifactStore > stores , final String path , final EventMetadata eventMetadata ) throws IndyWorkflowException { if ( ! indexCfg . isEnabled ( ) ) { return delegate . retrieveAll ( stores , path , eventMetadata ) ; } List < Transfer > results = new ArrayList < > ( ) ; return results ; }
public List < Transfer > retrieveAll ( final List < ? extends ArtifactStore > stores , final String path , final EventMetadata eventMetadata ) throws IndyWorkflowException { if ( ! indexCfg . isEnabled ( ) ) { return delegate . retrieveAll ( stores , path , eventMetadata ) ; } List < Transfer > results = new ArrayList < > ( ) ; stores . stream ( ) . map ( ( store ) -> { try { return retrieve ( store , path , eventMetadata ) ; } catch ( IndyWorkflowException e ) { logger . error ( String . format ( "Failed to retrieve indexed content: %s:%s. Reason: %s" , store . getKey ( ) , path , e . getMessage ( ) ) , e ) ; } return null ; } ) . filter ( Objects :: nonNull ) . forEachOrdered ( results :: add ) ; return results ; }
648
public List < Transfer > retrieveAll ( final List < ? extends ArtifactStore > stores , final String path , final EventMetadata eventMetadata ) throws IndyWorkflowException { if ( ! indexCfg . isEnabled ( ) ) { return delegate . retrieveAll ( stores , path , eventMetadata ) ; } List < Transfer > results = new ArrayList < > ( ) ; stores . stream ( ) . map ( ( store ) -> { try { return retrieve ( store , path , eventMetadata ) ; } catch ( IndyWorkflowException e ) { } return null ; } ) . filter ( Objects :: nonNull ) . forEachOrdered ( results :: add ) ; return results ; }
public List < Transfer > retrieveAll ( final List < ? extends ArtifactStore > stores , final String path , final EventMetadata eventMetadata ) throws IndyWorkflowException { if ( ! indexCfg . isEnabled ( ) ) { return delegate . retrieveAll ( stores , path , eventMetadata ) ; } List < Transfer > results = new ArrayList < > ( ) ; stores . stream ( ) . map ( ( store ) -> { try { return retrieve ( store , path , eventMetadata ) ; } catch ( IndyWorkflowException e ) { logger . error ( String . format ( "Failed to retrieve indexed content: %s:%s. Reason: %s" , store . getKey ( ) , path , e . getMessage ( ) ) , e ) ; } return null ; } ) . filter ( Objects :: nonNull ) . forEachOrdered ( results :: add ) ; return results ; }
649
public void delete ( MbDelMassn persistentInstance ) { try { sessionFactory . getCurrentSession ( ) . delete ( persistentInstance ) ; log . debug ( "delete successful" ) ; } catch ( RuntimeException re ) { log . error ( "delete failed" , re ) ; throw re ; } }
public void delete ( MbDelMassn persistentInstance ) { log . debug ( "deleting MbDelMassn instance" ) ; try { sessionFactory . getCurrentSession ( ) . delete ( persistentInstance ) ; log . debug ( "delete successful" ) ; } catch ( RuntimeException re ) { log . error ( "delete failed" , re ) ; throw re ; } }
650
public void delete ( MbDelMassn persistentInstance ) { log . debug ( "deleting MbDelMassn instance" ) ; try { sessionFactory . getCurrentSession ( ) . delete ( persistentInstance ) ; } catch ( RuntimeException re ) { log . error ( "delete failed" , re ) ; throw re ; } }
public void delete ( MbDelMassn persistentInstance ) { log . debug ( "deleting MbDelMassn instance" ) ; try { sessionFactory . getCurrentSession ( ) . delete ( persistentInstance ) ; log . debug ( "delete successful" ) ; } catch ( RuntimeException re ) { log . error ( "delete failed" , re ) ; throw re ; } }
651
public void delete ( MbDelMassn persistentInstance ) { log . debug ( "deleting MbDelMassn instance" ) ; try { sessionFactory . getCurrentSession ( ) . delete ( persistentInstance ) ; log . debug ( "delete successful" ) ; } catch ( RuntimeException re ) { throw re ; } }
public void delete ( MbDelMassn persistentInstance ) { log . debug ( "deleting MbDelMassn instance" ) ; try { sessionFactory . getCurrentSession ( ) . delete ( persistentInstance ) ; log . debug ( "delete successful" ) ; } catch ( RuntimeException re ) { log . error ( "delete failed" , re ) ; throw re ; } }
652
public static double getFontZoomFactor ( ConfigThingy config ) { if ( config == null ) { return 1 ; } Common . setLookAndFeelOnce ( ) ; ConfigThingy zoom = config . query ( "Dialoge" ) . query ( "FONT_ZOOM" , 2 ) ; if ( zoom . count ( ) > 0 ) { try { return Double . parseDouble ( zoom . getLastChild ( ) . toString ( ) ) ; } catch ( Exception x ) { LOGGER . error ( "" , x ) ; } } return 1 ; }
public static double getFontZoomFactor ( ConfigThingy config ) { if ( config == null ) { LOGGER . debug ( "Common: getFontZoomFactor(): ConfigThingy is NULL. Returning with default value 1 for zoom." ) ; return 1 ; } Common . setLookAndFeelOnce ( ) ; ConfigThingy zoom = config . query ( "Dialoge" ) . query ( "FONT_ZOOM" , 2 ) ; if ( zoom . count ( ) > 0 ) { try { return Double . parseDouble ( zoom . getLastChild ( ) . toString ( ) ) ; } catch ( Exception x ) { LOGGER . error ( "" , x ) ; } } return 1 ; }
653
public static double getFontZoomFactor ( ConfigThingy config ) { if ( config == null ) { LOGGER . debug ( "Common: getFontZoomFactor(): ConfigThingy is NULL. Returning with default value 1 for zoom." ) ; return 1 ; } Common . setLookAndFeelOnce ( ) ; ConfigThingy zoom = config . query ( "Dialoge" ) . query ( "FONT_ZOOM" , 2 ) ; if ( zoom . count ( ) > 0 ) { try { return Double . parseDouble ( zoom . getLastChild ( ) . toString ( ) ) ; } catch ( Exception x ) { } } return 1 ; }
public static double getFontZoomFactor ( ConfigThingy config ) { if ( config == null ) { LOGGER . debug ( "Common: getFontZoomFactor(): ConfigThingy is NULL. Returning with default value 1 for zoom." ) ; return 1 ; } Common . setLookAndFeelOnce ( ) ; ConfigThingy zoom = config . query ( "Dialoge" ) . query ( "FONT_ZOOM" , 2 ) ; if ( zoom . count ( ) > 0 ) { try { return Double . parseDouble ( zoom . getLastChild ( ) . toString ( ) ) ; } catch ( Exception x ) { LOGGER . error ( "" , x ) ; } } return 1 ; }
654
private void signalEventToProcesses ( Event event , StatefulKnowledgeSession session , Set < Long > processIds ) { for ( Long pid : processIds ) { ProcessInstance processInstance = session . getProcessInstance ( pid ) ; if ( processInstance == null ) { } else { processInstance . signalEvent ( event . getClass ( ) . getSimpleName ( ) , event ) ; } } }
private void signalEventToProcesses ( Event event , StatefulKnowledgeSession session , Set < Long > processIds ) { for ( Long pid : processIds ) { ProcessInstance processInstance = session . getProcessInstance ( pid ) ; if ( processInstance == null ) { LOGGER . warn ( "processInstance with ID {} not found, maybe it already terminated" , pid ) ; } else { processInstance . signalEvent ( event . getClass ( ) . getSimpleName ( ) , event ) ; } } }
655
public void deleteJob ( String jobUUID ) { try { String jobPath = getJobPath ( jobUUID ) ; for ( String subnode : Arrays . asList ( alertChildName , queryConfigChildName , taskChildName , tasksChildName , configChildName , brokerInfoChildName ) ) { spawnDataStore . delete ( jobPath + subnode ) ; } spawnDataStore . delete ( jobPath ) ; spawnDataStore . deleteChild ( SPAWN_JOB_CONFIG_PATH , jobUUID ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } }
public void deleteJob ( String jobUUID ) { try { String jobPath = getJobPath ( jobUUID ) ; for ( String subnode : Arrays . asList ( alertChildName , queryConfigChildName , taskChildName , tasksChildName , configChildName , brokerInfoChildName ) ) { spawnDataStore . delete ( jobPath + subnode ) ; } spawnDataStore . delete ( jobPath ) ; spawnDataStore . deleteChild ( SPAWN_JOB_CONFIG_PATH , jobUUID ) ; } catch ( Exception e ) { logger . warn ( "Failing to delete job, bailing" , e ) ; throw new RuntimeException ( e ) ; } }
656
private File downloadZipIfNeeded ( String zipLocation , String destinationDir ) throws SchemaProviderException { HttpRequest httpRequest = HttpRequest . newBuilder ( ) . GET ( ) . uri ( URI . create ( zipLocation ) ) . build ( ) ; File schemasLocation = new File ( schemasRootDirectory , destinationDir ) ; if ( new File ( schemasLocation , ZIP_FILE_NAME ) . exists ( ) ) { return new File ( schemasLocation , ZIP_FILE_NAME ) ; } if ( ! schemasLocation . mkdirs ( ) && ! schemasLocation . exists ( ) ) { throw new SchemaProviderException ( "Unable to create schemaDirecory" ) ; } File destinationFile = new File ( schemasLocation , ZIP_FILE_NAME ) ; final HttpResponse < Path > httpResponse ; try { httpResponse = httpClient . send ( httpRequest , BodyHandlers . ofFile ( Paths . get ( destinationFile . toURI ( ) ) ) ) ; destinationFile = httpResponse . body ( ) . toFile ( ) ; } catch ( IOException e ) { LOGGER . info ( "There was some trouble sending a request to {}" , schemasLocation ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOGGER . info ( "The thread was interrupted" ) ; } return destinationFile ; }
private File downloadZipIfNeeded ( String zipLocation , String destinationDir ) throws SchemaProviderException { HttpRequest httpRequest = HttpRequest . newBuilder ( ) . GET ( ) . uri ( URI . create ( zipLocation ) ) . build ( ) ; File schemasLocation = new File ( schemasRootDirectory , destinationDir ) ; if ( new File ( schemasLocation , ZIP_FILE_NAME ) . exists ( ) ) { LOGGER . debug ( "Zip file will not be downloaded, already exists in temp directory" ) ; return new File ( schemasLocation , ZIP_FILE_NAME ) ; } if ( ! schemasLocation . mkdirs ( ) && ! schemasLocation . exists ( ) ) { throw new SchemaProviderException ( "Unable to create schemaDirecory" ) ; } File destinationFile = new File ( schemasLocation , ZIP_FILE_NAME ) ; final HttpResponse < Path > httpResponse ; try { httpResponse = httpClient . send ( httpRequest , BodyHandlers . ofFile ( Paths . get ( destinationFile . toURI ( ) ) ) ) ; destinationFile = httpResponse . body ( ) . toFile ( ) ; } catch ( IOException e ) { LOGGER . info ( "There was some trouble sending a request to {}" , schemasLocation ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOGGER . info ( "The thread was interrupted" ) ; } return destinationFile ; }
657
private File downloadZipIfNeeded ( String zipLocation , String destinationDir ) throws SchemaProviderException { HttpRequest httpRequest = HttpRequest . newBuilder ( ) . GET ( ) . uri ( URI . create ( zipLocation ) ) . build ( ) ; File schemasLocation = new File ( schemasRootDirectory , destinationDir ) ; if ( new File ( schemasLocation , ZIP_FILE_NAME ) . exists ( ) ) { LOGGER . debug ( "Zip file will not be downloaded, already exists in temp directory" ) ; return new File ( schemasLocation , ZIP_FILE_NAME ) ; } if ( ! schemasLocation . mkdirs ( ) && ! schemasLocation . exists ( ) ) { throw new SchemaProviderException ( "Unable to create schemaDirecory" ) ; } File destinationFile = new File ( schemasLocation , ZIP_FILE_NAME ) ; final HttpResponse < Path > httpResponse ; try { httpResponse = httpClient . send ( httpRequest , BodyHandlers . ofFile ( Paths . get ( destinationFile . toURI ( ) ) ) ) ; destinationFile = httpResponse . body ( ) . toFile ( ) ; } catch ( IOException e ) { } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOGGER . info ( "The thread was interrupted" ) ; } return destinationFile ; }
private File downloadZipIfNeeded ( String zipLocation , String destinationDir ) throws SchemaProviderException { HttpRequest httpRequest = HttpRequest . newBuilder ( ) . GET ( ) . uri ( URI . create ( zipLocation ) ) . build ( ) ; File schemasLocation = new File ( schemasRootDirectory , destinationDir ) ; if ( new File ( schemasLocation , ZIP_FILE_NAME ) . exists ( ) ) { LOGGER . debug ( "Zip file will not be downloaded, already exists in temp directory" ) ; return new File ( schemasLocation , ZIP_FILE_NAME ) ; } if ( ! schemasLocation . mkdirs ( ) && ! schemasLocation . exists ( ) ) { throw new SchemaProviderException ( "Unable to create schemaDirecory" ) ; } File destinationFile = new File ( schemasLocation , ZIP_FILE_NAME ) ; final HttpResponse < Path > httpResponse ; try { httpResponse = httpClient . send ( httpRequest , BodyHandlers . ofFile ( Paths . get ( destinationFile . toURI ( ) ) ) ) ; destinationFile = httpResponse . body ( ) . toFile ( ) ; } catch ( IOException e ) { LOGGER . info ( "There was some trouble sending a request to {}" , schemasLocation ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOGGER . info ( "The thread was interrupted" ) ; } return destinationFile ; }
658
private File downloadZipIfNeeded ( String zipLocation , String destinationDir ) throws SchemaProviderException { HttpRequest httpRequest = HttpRequest . newBuilder ( ) . GET ( ) . uri ( URI . create ( zipLocation ) ) . build ( ) ; File schemasLocation = new File ( schemasRootDirectory , destinationDir ) ; if ( new File ( schemasLocation , ZIP_FILE_NAME ) . exists ( ) ) { LOGGER . debug ( "Zip file will not be downloaded, already exists in temp directory" ) ; return new File ( schemasLocation , ZIP_FILE_NAME ) ; } if ( ! schemasLocation . mkdirs ( ) && ! schemasLocation . exists ( ) ) { throw new SchemaProviderException ( "Unable to create schemaDirecory" ) ; } File destinationFile = new File ( schemasLocation , ZIP_FILE_NAME ) ; final HttpResponse < Path > httpResponse ; try { httpResponse = httpClient . send ( httpRequest , BodyHandlers . ofFile ( Paths . get ( destinationFile . toURI ( ) ) ) ) ; destinationFile = httpResponse . body ( ) . toFile ( ) ; } catch ( IOException e ) { LOGGER . info ( "There was some trouble sending a request to {}" , schemasLocation ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; } return destinationFile ; }
private File downloadZipIfNeeded ( String zipLocation , String destinationDir ) throws SchemaProviderException { HttpRequest httpRequest = HttpRequest . newBuilder ( ) . GET ( ) . uri ( URI . create ( zipLocation ) ) . build ( ) ; File schemasLocation = new File ( schemasRootDirectory , destinationDir ) ; if ( new File ( schemasLocation , ZIP_FILE_NAME ) . exists ( ) ) { LOGGER . debug ( "Zip file will not be downloaded, already exists in temp directory" ) ; return new File ( schemasLocation , ZIP_FILE_NAME ) ; } if ( ! schemasLocation . mkdirs ( ) && ! schemasLocation . exists ( ) ) { throw new SchemaProviderException ( "Unable to create schemaDirecory" ) ; } File destinationFile = new File ( schemasLocation , ZIP_FILE_NAME ) ; final HttpResponse < Path > httpResponse ; try { httpResponse = httpClient . send ( httpRequest , BodyHandlers . ofFile ( Paths . get ( destinationFile . toURI ( ) ) ) ) ; destinationFile = httpResponse . body ( ) . toFile ( ) ; } catch ( IOException e ) { LOGGER . info ( "There was some trouble sending a request to {}" , schemasLocation ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; LOGGER . info ( "The thread was interrupted" ) ; } return destinationFile ; }
659
protected boolean login ( final NameCallback nameCb , final PasswordCallback passCb ) throws LoginException { try { getCredentials ( nameCb , passCb , false ) ; authRequest . setUser ( new User ( nameCb . getName ( ) ) ) ; authRequest . setCredential ( new Credential ( passCb . getPassword ( ) ) ) ; AuthenticationResponse response = auth . authenticate ( authRequest ) ; LdapEntry entry = null ; if ( response . isSuccess ( ) ) { entry = response . getLdapEntry ( ) ; if ( entry != null ) { roles . addAll ( LdapRole . toRoles ( entry ) ) ; if ( defaultRole != null && ! defaultRole . isEmpty ( ) ) { roles . addAll ( defaultRole ) ; } } loginSuccess = true ; } else { if ( tryFirstPass ) { getCredentials ( nameCb , passCb , true ) ; response = auth . authenticate ( authRequest ) ; if ( response . isSuccess ( ) ) { entry = response . getLdapEntry ( ) ; if ( entry != null ) { roles . addAll ( LdapRole . toRoles ( entry ) ) ; } if ( defaultRole != null && ! defaultRole . isEmpty ( ) ) { roles . addAll ( defaultRole ) ; } loginSuccess = true ; } else { loginSuccess = false ; } } else { loginSuccess = false ; } } if ( ! loginSuccess ) { throw new LoginException ( "Authentication failed: " + response ) ; } else { if ( setLdapPrincipal ) { principals . add ( new LdapPrincipal ( nameCb . getName ( ) , entry ) ) ; } final String loginDn = response . getResolvedDn ( ) ; if ( loginDn != null && setLdapDnPrincipal ) { principals . add ( new LdapDnPrincipal ( loginDn , entry ) ) ; } if ( setLdapCredential ) { credentials . add ( new LdapCredential ( passCb . getPassword ( ) ) ) ; } storeCredentials ( nameCb , passCb , loginDn ) ; } } catch ( LdapException e ) { logger . debug ( "Error occurred attempting authentication" , e ) ; loginSuccess = false ; throw new LoginException ( e . getMessage ( ) ) ; } return true ; }
protected boolean login ( final NameCallback nameCb , final PasswordCallback passCb ) throws LoginException { try { getCredentials ( nameCb , passCb , false ) ; authRequest . setUser ( new User ( nameCb . getName ( ) ) ) ; authRequest . setCredential ( new Credential ( passCb . getPassword ( ) ) ) ; AuthenticationResponse response = auth . authenticate ( authRequest ) ; LdapEntry entry = null ; if ( response . isSuccess ( ) ) { entry = response . getLdapEntry ( ) ; if ( entry != null ) { roles . addAll ( LdapRole . toRoles ( entry ) ) ; if ( defaultRole != null && ! defaultRole . isEmpty ( ) ) { roles . addAll ( defaultRole ) ; } } loginSuccess = true ; } else { if ( tryFirstPass ) { getCredentials ( nameCb , passCb , true ) ; response = auth . authenticate ( authRequest ) ; if ( response . isSuccess ( ) ) { entry = response . getLdapEntry ( ) ; if ( entry != null ) { roles . addAll ( LdapRole . toRoles ( entry ) ) ; } if ( defaultRole != null && ! defaultRole . isEmpty ( ) ) { roles . addAll ( defaultRole ) ; } loginSuccess = true ; } else { loginSuccess = false ; } } else { loginSuccess = false ; } } if ( ! loginSuccess ) { logger . debug ( "Authentication failed: " + response ) ; throw new LoginException ( "Authentication failed: " + response ) ; } else { if ( setLdapPrincipal ) { principals . add ( new LdapPrincipal ( nameCb . getName ( ) , entry ) ) ; } final String loginDn = response . getResolvedDn ( ) ; if ( loginDn != null && setLdapDnPrincipal ) { principals . add ( new LdapDnPrincipal ( loginDn , entry ) ) ; } if ( setLdapCredential ) { credentials . add ( new LdapCredential ( passCb . getPassword ( ) ) ) ; } storeCredentials ( nameCb , passCb , loginDn ) ; } } catch ( LdapException e ) { logger . debug ( "Error occurred attempting authentication" , e ) ; loginSuccess = false ; throw new LoginException ( e . getMessage ( ) ) ; } return true ; }
660
protected boolean login ( final NameCallback nameCb , final PasswordCallback passCb ) throws LoginException { try { getCredentials ( nameCb , passCb , false ) ; authRequest . setUser ( new User ( nameCb . getName ( ) ) ) ; authRequest . setCredential ( new Credential ( passCb . getPassword ( ) ) ) ; AuthenticationResponse response = auth . authenticate ( authRequest ) ; LdapEntry entry = null ; if ( response . isSuccess ( ) ) { entry = response . getLdapEntry ( ) ; if ( entry != null ) { roles . addAll ( LdapRole . toRoles ( entry ) ) ; if ( defaultRole != null && ! defaultRole . isEmpty ( ) ) { roles . addAll ( defaultRole ) ; } } loginSuccess = true ; } else { if ( tryFirstPass ) { getCredentials ( nameCb , passCb , true ) ; response = auth . authenticate ( authRequest ) ; if ( response . isSuccess ( ) ) { entry = response . getLdapEntry ( ) ; if ( entry != null ) { roles . addAll ( LdapRole . toRoles ( entry ) ) ; } if ( defaultRole != null && ! defaultRole . isEmpty ( ) ) { roles . addAll ( defaultRole ) ; } loginSuccess = true ; } else { loginSuccess = false ; } } else { loginSuccess = false ; } } if ( ! loginSuccess ) { logger . debug ( "Authentication failed: " + response ) ; throw new LoginException ( "Authentication failed: " + response ) ; } else { if ( setLdapPrincipal ) { principals . add ( new LdapPrincipal ( nameCb . getName ( ) , entry ) ) ; } final String loginDn = response . getResolvedDn ( ) ; if ( loginDn != null && setLdapDnPrincipal ) { principals . add ( new LdapDnPrincipal ( loginDn , entry ) ) ; } if ( setLdapCredential ) { credentials . add ( new LdapCredential ( passCb . getPassword ( ) ) ) ; } storeCredentials ( nameCb , passCb , loginDn ) ; } } catch ( LdapException e ) { loginSuccess = false ; throw new LoginException ( e . getMessage ( ) ) ; } return true ; }
protected boolean login ( final NameCallback nameCb , final PasswordCallback passCb ) throws LoginException { try { getCredentials ( nameCb , passCb , false ) ; authRequest . setUser ( new User ( nameCb . getName ( ) ) ) ; authRequest . setCredential ( new Credential ( passCb . getPassword ( ) ) ) ; AuthenticationResponse response = auth . authenticate ( authRequest ) ; LdapEntry entry = null ; if ( response . isSuccess ( ) ) { entry = response . getLdapEntry ( ) ; if ( entry != null ) { roles . addAll ( LdapRole . toRoles ( entry ) ) ; if ( defaultRole != null && ! defaultRole . isEmpty ( ) ) { roles . addAll ( defaultRole ) ; } } loginSuccess = true ; } else { if ( tryFirstPass ) { getCredentials ( nameCb , passCb , true ) ; response = auth . authenticate ( authRequest ) ; if ( response . isSuccess ( ) ) { entry = response . getLdapEntry ( ) ; if ( entry != null ) { roles . addAll ( LdapRole . toRoles ( entry ) ) ; } if ( defaultRole != null && ! defaultRole . isEmpty ( ) ) { roles . addAll ( defaultRole ) ; } loginSuccess = true ; } else { loginSuccess = false ; } } else { loginSuccess = false ; } } if ( ! loginSuccess ) { logger . debug ( "Authentication failed: " + response ) ; throw new LoginException ( "Authentication failed: " + response ) ; } else { if ( setLdapPrincipal ) { principals . add ( new LdapPrincipal ( nameCb . getName ( ) , entry ) ) ; } final String loginDn = response . getResolvedDn ( ) ; if ( loginDn != null && setLdapDnPrincipal ) { principals . add ( new LdapDnPrincipal ( loginDn , entry ) ) ; } if ( setLdapCredential ) { credentials . add ( new LdapCredential ( passCb . getPassword ( ) ) ) ; } storeCredentials ( nameCb , passCb , loginDn ) ; } } catch ( LdapException e ) { logger . debug ( "Error occurred attempting authentication" , e ) ; loginSuccess = false ; throw new LoginException ( e . getMessage ( ) ) ; } return true ; }
661
public void synchronousBulk ( BulkRequest request ) { request . timeout ( TimeValue . timeValueMinutes ( 2 ) ) ; request . setRefreshPolicy ( WriteRequest . RefreshPolicy . WAIT_UNTIL ) ; request . waitForActiveShards ( ActiveShardCount . ONE ) ; try { int size = request . requests ( ) . size ( ) ; BulkResponse responses = client . bulk ( request , RequestOptions . DEFAULT ) ; healthChecker . health ( ) ; } catch ( Throwable t ) { healthChecker . unHealth ( t ) ; } }
public void synchronousBulk ( BulkRequest request ) { request . timeout ( TimeValue . timeValueMinutes ( 2 ) ) ; request . setRefreshPolicy ( WriteRequest . RefreshPolicy . WAIT_UNTIL ) ; request . waitForActiveShards ( ActiveShardCount . ONE ) ; try { int size = request . requests ( ) . size ( ) ; BulkResponse responses = client . bulk ( request , RequestOptions . DEFAULT ) ; log . info ( "Synchronous bulk took time: {} millis, size: {}" , responses . getTook ( ) . getMillis ( ) , size ) ; healthChecker . health ( ) ; } catch ( Throwable t ) { healthChecker . unHealth ( t ) ; } }
662
private IsaControlFlowContext startProcessIfMissing ( IsaControlFlowContext context ) { Control control = context . getControl ( ) ; String uuid = control . getUuid ( ) ; List < ExecutionImpl > executionList = findExecutionForElement ( IIsaControlFlowProcess . KEY , uuid ) ; if ( executionList == null || executionList . isEmpty ( ) ) { if ( LOG . isDebugEnabled ( ) ) { } startProcess ( context ) ; } return context ; }
private IsaControlFlowContext startProcessIfMissing ( IsaControlFlowContext context ) { Control control = context . getControl ( ) ; String uuid = control . getUuid ( ) ; List < ExecutionImpl > executionList = findExecutionForElement ( IIsaControlFlowProcess . KEY , uuid ) ; if ( executionList == null || executionList . isEmpty ( ) ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "No process for control: " + uuid ) ; } startProcess ( context ) ; } return context ; }
663
public Job jobsFrom ( JobDescriptor descriptor ) throws JobNotFoundException { Job job = context . getBean ( descriptor . getJobName ( ) , Job . class ) ; if ( job == null ) { String error = String . format ( "Could not find job implementation for job name %s" , descriptor . getJobName ( ) ) ; throw new JobNotFoundException ( error ) ; } return job ; }
public Job jobsFrom ( JobDescriptor descriptor ) throws JobNotFoundException { Job job = context . getBean ( descriptor . getJobName ( ) , Job . class ) ; if ( job == null ) { String error = String . format ( "Could not find job implementation for job name %s" , descriptor . getJobName ( ) ) ; logger . error ( error ) ; throw new JobNotFoundException ( error ) ; } return job ; }
664
private InputStream getResourceAsStream ( URI docUri ) { URI resolvedURI = _baseResourceURI . resolve ( docUri ) ; InputStream is = null ; try { URL url = resolvedURI . toURL ( ) ; is = url . openStream ( ) ; return is ; } catch ( Exception e ) { } return null ; }
private InputStream getResourceAsStream ( URI docUri ) { URI resolvedURI = _baseResourceURI . resolve ( docUri ) ; InputStream is = null ; try { URL url = resolvedURI . toURL ( ) ; is = url . openStream ( ) ; return is ; } catch ( Exception e ) { __log . warn ( "Couldn't load XSL resource " + docUri , e ) ; } return null ; }
665
private void createWorkflowType ( ) { InsightsWorkflowType workflowType = new InsightsWorkflowType ( ) ; WorkflowDAL workflowDAL = new WorkflowDAL ( ) ; workflowType . setWorkflowType ( "SYSTEM2" ) ; try { List < InsightsWorkflowConfiguration > worlflowList = workflowDAL . getAllActiveWorkflowConfiguration ( ) ; int workflowTypeId = workflowDAL . saveWorkflowType ( workflowType ) ; logger . debug ( "Object save {} " , workflowTypeId ) ; } catch ( Exception e ) { logger . error ( e ) ; throw e ; } }
private void createWorkflowType ( ) { InsightsWorkflowType workflowType = new InsightsWorkflowType ( ) ; WorkflowDAL workflowDAL = new WorkflowDAL ( ) ; workflowType . setWorkflowType ( "SYSTEM2" ) ; try { List < InsightsWorkflowConfiguration > worlflowList = workflowDAL . getAllActiveWorkflowConfiguration ( ) ; logger . debug ( "Object save worlflowList {} " , worlflowList ) ; int workflowTypeId = workflowDAL . saveWorkflowType ( workflowType ) ; logger . debug ( "Object save {} " , workflowTypeId ) ; } catch ( Exception e ) { logger . error ( e ) ; throw e ; } }
666
private void createWorkflowType ( ) { InsightsWorkflowType workflowType = new InsightsWorkflowType ( ) ; WorkflowDAL workflowDAL = new WorkflowDAL ( ) ; workflowType . setWorkflowType ( "SYSTEM2" ) ; try { List < InsightsWorkflowConfiguration > worlflowList = workflowDAL . getAllActiveWorkflowConfiguration ( ) ; logger . debug ( "Object save worlflowList {} " , worlflowList ) ; int workflowTypeId = workflowDAL . saveWorkflowType ( workflowType ) ; } catch ( Exception e ) { logger . error ( e ) ; throw e ; } }
private void createWorkflowType ( ) { InsightsWorkflowType workflowType = new InsightsWorkflowType ( ) ; WorkflowDAL workflowDAL = new WorkflowDAL ( ) ; workflowType . setWorkflowType ( "SYSTEM2" ) ; try { List < InsightsWorkflowConfiguration > worlflowList = workflowDAL . getAllActiveWorkflowConfiguration ( ) ; logger . debug ( "Object save worlflowList {} " , worlflowList ) ; int workflowTypeId = workflowDAL . saveWorkflowType ( workflowType ) ; logger . debug ( "Object save {} " , workflowTypeId ) ; } catch ( Exception e ) { logger . error ( e ) ; throw e ; } }
667
private void createWorkflowType ( ) { InsightsWorkflowType workflowType = new InsightsWorkflowType ( ) ; WorkflowDAL workflowDAL = new WorkflowDAL ( ) ; workflowType . setWorkflowType ( "SYSTEM2" ) ; try { List < InsightsWorkflowConfiguration > worlflowList = workflowDAL . getAllActiveWorkflowConfiguration ( ) ; logger . debug ( "Object save worlflowList {} " , worlflowList ) ; int workflowTypeId = workflowDAL . saveWorkflowType ( workflowType ) ; logger . debug ( "Object save {} " , workflowTypeId ) ; } catch ( Exception e ) { throw e ; } }
private void createWorkflowType ( ) { InsightsWorkflowType workflowType = new InsightsWorkflowType ( ) ; WorkflowDAL workflowDAL = new WorkflowDAL ( ) ; workflowType . setWorkflowType ( "SYSTEM2" ) ; try { List < InsightsWorkflowConfiguration > worlflowList = workflowDAL . getAllActiveWorkflowConfiguration ( ) ; logger . debug ( "Object save worlflowList {} " , worlflowList ) ; int workflowTypeId = workflowDAL . saveWorkflowType ( workflowType ) ; logger . debug ( "Object save {} " , workflowTypeId ) ; } catch ( Exception e ) { logger . error ( e ) ; throw e ; } }
668
private UsageDataFrame extractData ( Command c ) { String type , id ; if ( c . getProperty ( "QueryAddress" ) != null ) { String [ ] searchParam = c . getProperty ( "QueryAddress" ) . split ( ":" ) ; type = searchParam [ 0 ] ; id = searchParam [ 1 ] ; } else { type = c . getProperty ( "FilterType" ) ; id = c . getProperty ( "FilterID" ) ; } if ( type != null && id != null ) { Query q = null ; if ( type . startsWith ( "obj" ) ) { q = em . createNamedQuery ( "powered" ) ; String date = c . getProperty ( "startDate" ) ; if ( date == null || date . isEmpty ( ) || date . equals ( "CURRENT_DATE" ) ) { q . setParameter ( "startDate" , new Date ( 0 ) ) ; } else { q . setParameter ( "startDate" , new Date ( Long . parseLong ( date ) ) ) ; } date = c . getProperty ( "stopDate" ) ; if ( date == null || date . isEmpty ( ) || date . equals ( "CURRENT_DATE" ) ) { q . setParameter ( "stopDate" , new Date ( ) ) ; } else { q . setParameter ( "stopDate" , new Date ( Long . parseLong ( date ) ) ) ; } q . setParameter ( "uuid" , id . trim ( ) ) ; q . setParameter ( "protocol" , "%" ) ; } else if ( type . equals ( "tag" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findAll ( ) ; } else if ( type . startsWith ( "prot" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findByProtocol ( id ) ; } else if ( type . equals ( "room" ) ) { } else if ( type . startsWith ( "env" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findByEnvironment ( id ) ; } UsageDataFrame df = new UsageDataFrame ( UsageDataFrame . FULL_UPDATE , q . getResultList ( ) ) ; return df ; } else { LOG . warn ( "Harvester cannot extract data if it misses FilterType or FilterID properties" ) ; return null ; } }
private UsageDataFrame extractData ( Command c ) { String type , id ; if ( c . getProperty ( "QueryAddress" ) != null ) { String [ ] searchParam = c . getProperty ( "QueryAddress" ) . split ( ":" ) ; type = searchParam [ 0 ] ; id = searchParam [ 1 ] ; } else { type = c . getProperty ( "FilterType" ) ; id = c . getProperty ( "FilterID" ) ; } if ( type != null && id != null ) { Query q = null ; if ( type . startsWith ( "obj" ) ) { q = em . createNamedQuery ( "powered" ) ; String date = c . getProperty ( "startDate" ) ; if ( date == null || date . isEmpty ( ) || date . equals ( "CURRENT_DATE" ) ) { q . setParameter ( "startDate" , new Date ( 0 ) ) ; } else { q . setParameter ( "startDate" , new Date ( Long . parseLong ( date ) ) ) ; } date = c . getProperty ( "stopDate" ) ; if ( date == null || date . isEmpty ( ) || date . equals ( "CURRENT_DATE" ) ) { q . setParameter ( "stopDate" , new Date ( ) ) ; } else { q . setParameter ( "stopDate" , new Date ( Long . parseLong ( date ) ) ) ; } q . setParameter ( "uuid" , id . trim ( ) ) ; q . setParameter ( "protocol" , "%" ) ; } else if ( type . equals ( "tag" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findAll ( ) ; } else if ( type . startsWith ( "prot" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findByProtocol ( id ) ; } else if ( type . equals ( "room" ) ) { } else if ( type . startsWith ( "env" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findByEnvironment ( id ) ; } UsageDataFrame df = new UsageDataFrame ( UsageDataFrame . FULL_UPDATE , q . getResultList ( ) ) ; LOG . info ( df . toString ( ) ) ; return df ; } else { LOG . warn ( "Harvester cannot extract data if it misses FilterType or FilterID properties" ) ; return null ; } }
669
private UsageDataFrame extractData ( Command c ) { String type , id ; if ( c . getProperty ( "QueryAddress" ) != null ) { String [ ] searchParam = c . getProperty ( "QueryAddress" ) . split ( ":" ) ; type = searchParam [ 0 ] ; id = searchParam [ 1 ] ; } else { type = c . getProperty ( "FilterType" ) ; id = c . getProperty ( "FilterID" ) ; } if ( type != null && id != null ) { Query q = null ; if ( type . startsWith ( "obj" ) ) { q = em . createNamedQuery ( "powered" ) ; String date = c . getProperty ( "startDate" ) ; if ( date == null || date . isEmpty ( ) || date . equals ( "CURRENT_DATE" ) ) { q . setParameter ( "startDate" , new Date ( 0 ) ) ; } else { q . setParameter ( "startDate" , new Date ( Long . parseLong ( date ) ) ) ; } date = c . getProperty ( "stopDate" ) ; if ( date == null || date . isEmpty ( ) || date . equals ( "CURRENT_DATE" ) ) { q . setParameter ( "stopDate" , new Date ( ) ) ; } else { q . setParameter ( "stopDate" , new Date ( Long . parseLong ( date ) ) ) ; } q . setParameter ( "uuid" , id . trim ( ) ) ; q . setParameter ( "protocol" , "%" ) ; } else if ( type . equals ( "tag" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findAll ( ) ; } else if ( type . startsWith ( "prot" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findByProtocol ( id ) ; } else if ( type . equals ( "room" ) ) { } else if ( type . startsWith ( "env" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findByEnvironment ( id ) ; } UsageDataFrame df = new UsageDataFrame ( UsageDataFrame . FULL_UPDATE , q . getResultList ( ) ) ; LOG . info ( df . toString ( ) ) ; return df ; } else { return null ; } }
private UsageDataFrame extractData ( Command c ) { String type , id ; if ( c . getProperty ( "QueryAddress" ) != null ) { String [ ] searchParam = c . getProperty ( "QueryAddress" ) . split ( ":" ) ; type = searchParam [ 0 ] ; id = searchParam [ 1 ] ; } else { type = c . getProperty ( "FilterType" ) ; id = c . getProperty ( "FilterID" ) ; } if ( type != null && id != null ) { Query q = null ; if ( type . startsWith ( "obj" ) ) { q = em . createNamedQuery ( "powered" ) ; String date = c . getProperty ( "startDate" ) ; if ( date == null || date . isEmpty ( ) || date . equals ( "CURRENT_DATE" ) ) { q . setParameter ( "startDate" , new Date ( 0 ) ) ; } else { q . setParameter ( "startDate" , new Date ( Long . parseLong ( date ) ) ) ; } date = c . getProperty ( "stopDate" ) ; if ( date == null || date . isEmpty ( ) || date . equals ( "CURRENT_DATE" ) ) { q . setParameter ( "stopDate" , new Date ( ) ) ; } else { q . setParameter ( "stopDate" , new Date ( Long . parseLong ( date ) ) ) ; } q . setParameter ( "uuid" , id . trim ( ) ) ; q . setParameter ( "protocol" , "%" ) ; } else if ( type . equals ( "tag" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findAll ( ) ; } else if ( type . startsWith ( "prot" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findByProtocol ( id ) ; } else if ( type . equals ( "room" ) ) { } else if ( type . startsWith ( "env" ) ) { Collection < EnvObjectLogic > objs = getApi ( ) . things ( ) . findByEnvironment ( id ) ; } UsageDataFrame df = new UsageDataFrame ( UsageDataFrame . FULL_UPDATE , q . getResultList ( ) ) ; LOG . info ( df . toString ( ) ) ; return df ; } else { LOG . warn ( "Harvester cannot extract data if it misses FilterType or FilterID properties" ) ; return null ; } }
670
static OutboxDir create ( File dir , TreeLogger logger ) throws IOException { if ( ! dir . isDirectory ( ) && ! dir . mkdir ( ) ) { throw new IOException ( "can't create app directory: " + dir ) ; } File [ ] children = dir . listFiles ( ) ; if ( children == null ) { throw new IOException ( "unable to list files in " + dir ) ; } for ( File candidate : children ) { if ( candidate . getName ( ) . startsWith ( COMPILE_DIR_PREFIX ) ) { Util . recursiveDelete ( candidate , false ) ; if ( candidate . exists ( ) ) { } } } return new OutboxDir ( dir ) ; }
static OutboxDir create ( File dir , TreeLogger logger ) throws IOException { if ( ! dir . isDirectory ( ) && ! dir . mkdir ( ) ) { throw new IOException ( "can't create app directory: " + dir ) ; } File [ ] children = dir . listFiles ( ) ; if ( children == null ) { throw new IOException ( "unable to list files in " + dir ) ; } for ( File candidate : children ) { if ( candidate . getName ( ) . startsWith ( COMPILE_DIR_PREFIX ) ) { Util . recursiveDelete ( candidate , false ) ; if ( candidate . exists ( ) ) { logger . log ( Type . WARN , "unable to delete '" + candidate + "' (skipped)" ) ; } } } return new OutboxDir ( dir ) ; }
671
private void tryDeleteRepository ( String entityTypeId ) { if ( dataService . hasRepository ( entityTypeId ) && permissionService . hasPermission ( new EntityTypeIdentity ( entityTypeId ) , EntityTypePermission . DELETE_METADATA ) ) { runAsSystem ( ( ) -> deleteRepository ( entityTypeId ) ) ; } else { } }
private void tryDeleteRepository ( String entityTypeId ) { if ( dataService . hasRepository ( entityTypeId ) && permissionService . hasPermission ( new EntityTypeIdentity ( entityTypeId ) , EntityTypePermission . DELETE_METADATA ) ) { runAsSystem ( ( ) -> deleteRepository ( entityTypeId ) ) ; } else { LOG . info ( "Unable to delete repository {}" , entityTypeId ) ; } }
672
public void chunk ( ByteBuf request , HttpResponder responder ) { buffer . discardReadComponents ( ) ; buffer . addComponent ( true , request . retain ( ) ) ; inputStream . setDelegate ( new ByteBufInputStream ( buffer ) ) ; try { try { while ( inputStream . available ( ) > 0 ) { if ( items < 0 ) { inputStream . mark ( buffer . readableBytes ( ) ) ; items = decoder . readArrayStart ( ) ; } while ( items > 0 ) { inputStream . mark ( buffer . readableBytes ( ) ) ; long len = decoder . readLong ( ) ; try { if ( inputStream . available ( ) < len ) { return ; } } finally { inputStream . reset ( ) ; } payload = decoder . readBytes ( payload ) ; payloads . add ( Bytes . toBytes ( payload ) ) ; items -- ; } if ( ! payloads . isEmpty ( ) ) { try { payloadProcessor . process ( payloads . iterator ( ) ) ; payloads . clear ( ) ; } catch ( IOException e ) { } } inputStream . mark ( buffer . readableBytes ( ) ) ; items = decoder . arrayNext ( ) ; } } catch ( EOFException e ) { inputStream . reset ( ) ; } } catch ( IOException | BadRequestException | AccessException e ) { responder . sendString ( HttpResponseStatus . BAD_REQUEST , "Failed to process request due to exception " + e . getMessage ( ) ) ; throw new RuntimeException ( e ) ; } }
public void chunk ( ByteBuf request , HttpResponder responder ) { buffer . discardReadComponents ( ) ; buffer . addComponent ( true , request . retain ( ) ) ; inputStream . setDelegate ( new ByteBufInputStream ( buffer ) ) ; try { try { while ( inputStream . available ( ) > 0 ) { if ( items < 0 ) { inputStream . mark ( buffer . readableBytes ( ) ) ; items = decoder . readArrayStart ( ) ; } while ( items > 0 ) { inputStream . mark ( buffer . readableBytes ( ) ) ; long len = decoder . readLong ( ) ; try { if ( inputStream . available ( ) < len ) { return ; } } finally { inputStream . reset ( ) ; } payload = decoder . readBytes ( payload ) ; payloads . add ( Bytes . toBytes ( payload ) ) ; items -- ; } if ( ! payloads . isEmpty ( ) ) { try { payloadProcessor . process ( payloads . iterator ( ) ) ; payloads . clear ( ) ; } catch ( IOException e ) { LOG . debug ( "Failed to process payload for topic {}. Will be retried" , topicId , e ) ; } } inputStream . mark ( buffer . readableBytes ( ) ) ; items = decoder . arrayNext ( ) ; } } catch ( EOFException e ) { inputStream . reset ( ) ; } } catch ( IOException | BadRequestException | AccessException e ) { responder . sendString ( HttpResponseStatus . BAD_REQUEST , "Failed to process request due to exception " + e . getMessage ( ) ) ; throw new RuntimeException ( e ) ; } }
673
public GameChatResponse handle ( String command , OsuApiUser apiUser , UserData userData ) throws UserException , IOException , SQLException , InterruptedException { if ( getLevenshteinDistance ( command . toLowerCase ( ) . substring ( 0 , Math . min ( "complain" . length ( ) , command . length ( ) ) ) , "complain" ) <= 2 ) { Recommendation lastRecommendation = manager . getLastRecommendation ( apiUser . getUserId ( ) ) ; if ( lastRecommendation != null && lastRecommendation . beatmap != null ) { return new Success ( userData . getLanguage ( ) . complaint ( ) ) ; } } return null ; }
public GameChatResponse handle ( String command , OsuApiUser apiUser , UserData userData ) throws UserException , IOException , SQLException , InterruptedException { if ( getLevenshteinDistance ( command . toLowerCase ( ) . substring ( 0 , Math . min ( "complain" . length ( ) , command . length ( ) ) ) , "complain" ) <= 2 ) { Recommendation lastRecommendation = manager . getLastRecommendation ( apiUser . getUserId ( ) ) ; if ( lastRecommendation != null && lastRecommendation . beatmap != null ) { log . debug ( "COMPLAINT: " + lastRecommendation . beatmap . getBeatmap ( ) . getBeatmapId ( ) + " mods: " + lastRecommendation . bareRecommendation . getMods ( ) + ". Recommendation source: " + Arrays . asList ( ArrayUtils . toObject ( lastRecommendation . bareRecommendation . getCauses ( ) ) ) ) ; return new Success ( userData . getLanguage ( ) . complaint ( ) ) ; } } return null ; }
674
public ExpandoRow findByT_C ( long tableId , long classPK ) throws NoSuchRowException { ExpandoRow expandoRow = fetchByT_C ( tableId , classPK ) ; if ( expandoRow == null ) { StringBundler sb = new StringBundler ( 6 ) ; sb . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; sb . append ( "tableId=" ) ; sb . append ( tableId ) ; sb . append ( ", classPK=" ) ; sb . append ( classPK ) ; sb . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { } throw new NoSuchRowException ( sb . toString ( ) ) ; } return expandoRow ; }
public ExpandoRow findByT_C ( long tableId , long classPK ) throws NoSuchRowException { ExpandoRow expandoRow = fetchByT_C ( tableId , classPK ) ; if ( expandoRow == null ) { StringBundler sb = new StringBundler ( 6 ) ; sb . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; sb . append ( "tableId=" ) ; sb . append ( tableId ) ; sb . append ( ", classPK=" ) ; sb . append ( classPK ) ; sb . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( sb . toString ( ) ) ; } throw new NoSuchRowException ( sb . toString ( ) ) ; } return expandoRow ; }
675
public static int getAvailablePort ( ) { try ( ServerSocket socket = new ServerSocket ( 0 ) ) { int port = socket . getLocalPort ( ) ; if ( log . isDebugEnabled ( ) ) { } return port ; } catch ( IOException e ) { throw new JRRuntimeException ( e ) ; } }
public static int getAvailablePort ( ) { try ( ServerSocket socket = new ServerSocket ( 0 ) ) { int port = socket . getLocalPort ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "found available port " + port ) ; } return port ; } catch ( IOException e ) { throw new JRRuntimeException ( e ) ; } }
676
private ConnectionContainer addConnection ( final NodeID source , final int sourcePort , final NodeID dest , final int destPort , final boolean currentlyLoadingFlow ) { assert source != null ; assert dest != null ; assert sourcePort >= 0 ; assert destPort >= 0 ; ConnectionContainer newConn = null ; ConnectionType newConnType = null ; NodeContainer sourceNC ; NodeContainer destNC ; try ( WorkflowLock lock = lock ( ) ) { if ( ! canAddConnection ( source , sourcePort , dest , destPort , true , currentlyLoadingFlow ) ) { throw new IllegalArgumentException ( "Cannot add connection!" ) ; } Set < ConnectionContainer > scc = m_workflow . getConnectionsByDest ( dest ) ; ConnectionContainer removeCCfirst = null ; for ( ConnectionContainer cc : scc ) { if ( cc . getDestPort ( ) == destPort ) { removeCCfirst = cc ; } } if ( removeCCfirst != null ) { removeConnection ( removeCCfirst ) ; } sourceNC = m_workflow . getNode ( source ) ; destNC = m_workflow . getNode ( dest ) ; boolean isFlowVariablePortConnection = false ; if ( ( sourceNC == null ) && ( destNC == null ) ) { newConnType = ConnectionType . WFMTHROUGH ; } else if ( sourceNC == null ) { newConnType = ConnectionType . WFMIN ; isFlowVariablePortConnection = destNC . getInPort ( destPort ) . getPortType ( ) . equals ( FlowVariablePortObject . TYPE ) ; } else if ( destNC == null ) { newConnType = ConnectionType . WFMOUT ; isFlowVariablePortConnection = sourceNC . getOutPort ( sourcePort ) . getPortType ( ) . equals ( FlowVariablePortObject . TYPE ) ; } else { newConnType = ConnectionType . STD ; isFlowVariablePortConnection = sourceNC . getOutPort ( sourcePort ) . getPortType ( ) . equals ( FlowVariablePortObject . TYPE ) ; } newConn = new ConnectionContainer ( source , sourcePort , dest , destPort , newConnType , isFlowVariablePortConnection ) ; addConnection ( newConn ) ; if ( ! currentlyLoadingFlow ) { if ( newConn . getType ( ) . isLeavingWorkflow ( ) ) { assert ! m_workflow . containsNodeKey ( dest ) ; getParent ( ) . configureNodeAndSuccessors ( dest , false ) ; lock . queueCheckForNodeStateChangeNotification ( true ) ; } else if ( destNC instanceof WorkflowManager ) { WorkflowManager destWFM = ( WorkflowManager ) destNC ; destWFM . configureNodesConnectedToPortInWFM ( Collections . singleton ( destPort ) ) ; Set < Integer > outPorts = destWFM . getWorkflow ( ) . connectedOutPorts ( destPort ) ; configureNodeAndPortSuccessors ( dest , outPorts , false , true , true ) ; } else { assert m_workflow . containsNodeKey ( dest ) ; resetAndConfigureNode ( dest ) ; } } } notifyWorkflowListeners ( new WorkflowEvent ( WorkflowEvent . Type . CONNECTION_ADDED , null , null , newConn ) ) ; return newConn ; }
private ConnectionContainer addConnection ( final NodeID source , final int sourcePort , final NodeID dest , final int destPort , final boolean currentlyLoadingFlow ) { assert source != null ; assert dest != null ; assert sourcePort >= 0 ; assert destPort >= 0 ; ConnectionContainer newConn = null ; ConnectionType newConnType = null ; NodeContainer sourceNC ; NodeContainer destNC ; try ( WorkflowLock lock = lock ( ) ) { if ( ! canAddConnection ( source , sourcePort , dest , destPort , true , currentlyLoadingFlow ) ) { throw new IllegalArgumentException ( "Cannot add connection!" ) ; } Set < ConnectionContainer > scc = m_workflow . getConnectionsByDest ( dest ) ; ConnectionContainer removeCCfirst = null ; for ( ConnectionContainer cc : scc ) { if ( cc . getDestPort ( ) == destPort ) { removeCCfirst = cc ; } } if ( removeCCfirst != null ) { removeConnection ( removeCCfirst ) ; } sourceNC = m_workflow . getNode ( source ) ; destNC = m_workflow . getNode ( dest ) ; boolean isFlowVariablePortConnection = false ; if ( ( sourceNC == null ) && ( destNC == null ) ) { newConnType = ConnectionType . WFMTHROUGH ; } else if ( sourceNC == null ) { newConnType = ConnectionType . WFMIN ; isFlowVariablePortConnection = destNC . getInPort ( destPort ) . getPortType ( ) . equals ( FlowVariablePortObject . TYPE ) ; } else if ( destNC == null ) { newConnType = ConnectionType . WFMOUT ; isFlowVariablePortConnection = sourceNC . getOutPort ( sourcePort ) . getPortType ( ) . equals ( FlowVariablePortObject . TYPE ) ; } else { newConnType = ConnectionType . STD ; isFlowVariablePortConnection = sourceNC . getOutPort ( sourcePort ) . getPortType ( ) . equals ( FlowVariablePortObject . TYPE ) ; } newConn = new ConnectionContainer ( source , sourcePort , dest , destPort , newConnType , isFlowVariablePortConnection ) ; addConnection ( newConn ) ; if ( ! currentlyLoadingFlow ) { if ( newConn . getType ( ) . isLeavingWorkflow ( ) ) { assert ! m_workflow . containsNodeKey ( dest ) ; getParent ( ) . configureNodeAndSuccessors ( dest , false ) ; lock . queueCheckForNodeStateChangeNotification ( true ) ; } else if ( destNC instanceof WorkflowManager ) { WorkflowManager destWFM = ( WorkflowManager ) destNC ; destWFM . configureNodesConnectedToPortInWFM ( Collections . singleton ( destPort ) ) ; Set < Integer > outPorts = destWFM . getWorkflow ( ) . connectedOutPorts ( destPort ) ; configureNodeAndPortSuccessors ( dest , outPorts , false , true , true ) ; } else { assert m_workflow . containsNodeKey ( dest ) ; resetAndConfigureNode ( dest ) ; } } } notifyWorkflowListeners ( new WorkflowEvent ( WorkflowEvent . Type . CONNECTION_ADDED , null , null , newConn ) ) ; LOGGER . debug ( "Added new connection from node " + source + "(" + sourcePort + ")" + " to node " + dest + "(" + destPort + ")" ) ; return newConn ; }
677
public void updateCheckpointJob ( String jobId , List < AbstractExecutable > subTasksForCheck ) { try { jobId = jobId . replaceAll ( "[./]" , "" ) ; final ExecutablePO job = executableDao . getJob ( jobId ) ; Preconditions . checkArgument ( job != null , "there is no related job for job id:" + jobId ) ; List < ExecutablePO > tasksForCheck = Lists . newArrayListWithExpectedSize ( subTasksForCheck . size ( ) ) ; for ( AbstractExecutable taskForCheck : subTasksForCheck ) { tasksForCheck . add ( parse ( taskForCheck ) ) ; } job . setTasksForCheck ( tasksForCheck ) ; executableDao . updateJob ( job ) ; } catch ( PersistentException e ) { throw new RuntimeException ( e ) ; } }
public void updateCheckpointJob ( String jobId , List < AbstractExecutable > subTasksForCheck ) { try { jobId = jobId . replaceAll ( "[./]" , "" ) ; final ExecutablePO job = executableDao . getJob ( jobId ) ; Preconditions . checkArgument ( job != null , "there is no related job for job id:" + jobId ) ; List < ExecutablePO > tasksForCheck = Lists . newArrayListWithExpectedSize ( subTasksForCheck . size ( ) ) ; for ( AbstractExecutable taskForCheck : subTasksForCheck ) { tasksForCheck . add ( parse ( taskForCheck ) ) ; } job . setTasksForCheck ( tasksForCheck ) ; executableDao . updateJob ( job ) ; } catch ( PersistentException e ) { logger . error ( "fail to update checkpoint job:" + jobId , e ) ; throw new RuntimeException ( e ) ; } }
678
private int processFileStatuses ( HdfsInfo info , FileStatus [ ] fileStatuses ) { final AtomicInteger totalMessageCount = new AtomicInteger ( ) ; List < HdfsInputStream > hdfsFiles = Arrays . stream ( fileStatuses ) . filter ( status -> normalFileIsDirectoryHasSuccessFile ( status , info ) ) . filter ( this :: hasMatchingOwner ) . limit ( endpointConfig . getMaxMessagesPerPoll ( ) ) . map ( this :: asHdfsFile ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; for ( int i = 0 ; i < hdfsFiles . size ( ) ; i ++ ) { HdfsInputStream hdfsFile = hdfsFiles . get ( i ) ; try { int messageCount = processHdfsInputStream ( hdfsFile , totalMessageCount ) ; LOG . debug ( "Processed [{}] files out of [{}]." , i , hdfsFiles . size ( ) ) ; LOG . debug ( "File [{}] was split to [{}] messages." , i , messageCount ) ; } finally { IOHelper . close ( hdfsFile , "hdfs file" , LOG ) ; } } return totalMessageCount . get ( ) ; }
private int processFileStatuses ( HdfsInfo info , FileStatus [ ] fileStatuses ) { final AtomicInteger totalMessageCount = new AtomicInteger ( ) ; List < HdfsInputStream > hdfsFiles = Arrays . stream ( fileStatuses ) . filter ( status -> normalFileIsDirectoryHasSuccessFile ( status , info ) ) . filter ( this :: hasMatchingOwner ) . limit ( endpointConfig . getMaxMessagesPerPoll ( ) ) . map ( this :: asHdfsFile ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; LOG . info ( "Processing [{}] valid files out of [{}] available." , hdfsFiles . size ( ) , fileStatuses . length ) ; for ( int i = 0 ; i < hdfsFiles . size ( ) ; i ++ ) { HdfsInputStream hdfsFile = hdfsFiles . get ( i ) ; try { int messageCount = processHdfsInputStream ( hdfsFile , totalMessageCount ) ; LOG . debug ( "Processed [{}] files out of [{}]." , i , hdfsFiles . size ( ) ) ; LOG . debug ( "File [{}] was split to [{}] messages." , i , messageCount ) ; } finally { IOHelper . close ( hdfsFile , "hdfs file" , LOG ) ; } } return totalMessageCount . get ( ) ; }
679
private int processFileStatuses ( HdfsInfo info , FileStatus [ ] fileStatuses ) { final AtomicInteger totalMessageCount = new AtomicInteger ( ) ; List < HdfsInputStream > hdfsFiles = Arrays . stream ( fileStatuses ) . filter ( status -> normalFileIsDirectoryHasSuccessFile ( status , info ) ) . filter ( this :: hasMatchingOwner ) . limit ( endpointConfig . getMaxMessagesPerPoll ( ) ) . map ( this :: asHdfsFile ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; LOG . info ( "Processing [{}] valid files out of [{}] available." , hdfsFiles . size ( ) , fileStatuses . length ) ; for ( int i = 0 ; i < hdfsFiles . size ( ) ; i ++ ) { HdfsInputStream hdfsFile = hdfsFiles . get ( i ) ; try { int messageCount = processHdfsInputStream ( hdfsFile , totalMessageCount ) ; LOG . debug ( "File [{}] was split to [{}] messages." , i , messageCount ) ; } finally { IOHelper . close ( hdfsFile , "hdfs file" , LOG ) ; } } return totalMessageCount . get ( ) ; }
private int processFileStatuses ( HdfsInfo info , FileStatus [ ] fileStatuses ) { final AtomicInteger totalMessageCount = new AtomicInteger ( ) ; List < HdfsInputStream > hdfsFiles = Arrays . stream ( fileStatuses ) . filter ( status -> normalFileIsDirectoryHasSuccessFile ( status , info ) ) . filter ( this :: hasMatchingOwner ) . limit ( endpointConfig . getMaxMessagesPerPoll ( ) ) . map ( this :: asHdfsFile ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; LOG . info ( "Processing [{}] valid files out of [{}] available." , hdfsFiles . size ( ) , fileStatuses . length ) ; for ( int i = 0 ; i < hdfsFiles . size ( ) ; i ++ ) { HdfsInputStream hdfsFile = hdfsFiles . get ( i ) ; try { int messageCount = processHdfsInputStream ( hdfsFile , totalMessageCount ) ; LOG . debug ( "Processed [{}] files out of [{}]." , i , hdfsFiles . size ( ) ) ; LOG . debug ( "File [{}] was split to [{}] messages." , i , messageCount ) ; } finally { IOHelper . close ( hdfsFile , "hdfs file" , LOG ) ; } } return totalMessageCount . get ( ) ; }
680
private int processFileStatuses ( HdfsInfo info , FileStatus [ ] fileStatuses ) { final AtomicInteger totalMessageCount = new AtomicInteger ( ) ; List < HdfsInputStream > hdfsFiles = Arrays . stream ( fileStatuses ) . filter ( status -> normalFileIsDirectoryHasSuccessFile ( status , info ) ) . filter ( this :: hasMatchingOwner ) . limit ( endpointConfig . getMaxMessagesPerPoll ( ) ) . map ( this :: asHdfsFile ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; LOG . info ( "Processing [{}] valid files out of [{}] available." , hdfsFiles . size ( ) , fileStatuses . length ) ; for ( int i = 0 ; i < hdfsFiles . size ( ) ; i ++ ) { HdfsInputStream hdfsFile = hdfsFiles . get ( i ) ; try { int messageCount = processHdfsInputStream ( hdfsFile , totalMessageCount ) ; LOG . debug ( "Processed [{}] files out of [{}]." , i , hdfsFiles . size ( ) ) ; } finally { IOHelper . close ( hdfsFile , "hdfs file" , LOG ) ; } } return totalMessageCount . get ( ) ; }
private int processFileStatuses ( HdfsInfo info , FileStatus [ ] fileStatuses ) { final AtomicInteger totalMessageCount = new AtomicInteger ( ) ; List < HdfsInputStream > hdfsFiles = Arrays . stream ( fileStatuses ) . filter ( status -> normalFileIsDirectoryHasSuccessFile ( status , info ) ) . filter ( this :: hasMatchingOwner ) . limit ( endpointConfig . getMaxMessagesPerPoll ( ) ) . map ( this :: asHdfsFile ) . filter ( Objects :: nonNull ) . collect ( Collectors . toList ( ) ) ; LOG . info ( "Processing [{}] valid files out of [{}] available." , hdfsFiles . size ( ) , fileStatuses . length ) ; for ( int i = 0 ; i < hdfsFiles . size ( ) ; i ++ ) { HdfsInputStream hdfsFile = hdfsFiles . get ( i ) ; try { int messageCount = processHdfsInputStream ( hdfsFile , totalMessageCount ) ; LOG . debug ( "Processed [{}] files out of [{}]." , i , hdfsFiles . size ( ) ) ; LOG . debug ( "File [{}] was split to [{}] messages." , i , messageCount ) ; } finally { IOHelper . close ( hdfsFile , "hdfs file" , LOG ) ; } } return totalMessageCount . get ( ) ; }
681
public void onEvent ( Map < String , Object > heartbeat ) { String key = ( String ) heartbeat . get ( "name" ) ; services . put ( key , heartbeat ) ; }
public void onEvent ( Map < String , Object > heartbeat ) { String key = ( String ) heartbeat . get ( "name" ) ; LOG . debug ( ">>> event for: " + key + " details: " + heartbeat ) ; services . put ( key , heartbeat ) ; }
682
public Gadget execute ( final PrincipalActionContext inActionContext ) throws ExecutionException { SetGadgetStateRequest currentRequest = ( SetGadgetStateRequest ) inActionContext . getParams ( ) ; if ( logger . isDebugEnabled ( ) ) { } Gadget gadget = gadgetMapper . findById ( currentRequest . getGadgetId ( ) ) ; if ( null == gadget ) { throw new ExecutionException ( "Failed to set minimized state for gadget. Gadget id " + currentRequest . getGadgetId ( ) + " not found" ) ; } gadget . setMinimized ( currentRequest . isMinimized ( ) ) ; gadget . setMaximized ( currentRequest . isMaximized ( ) ) ; gadgetMapper . flush ( ) ; deleteKeysMapper . execute ( Collections . singleton ( CacheKeys . PERSON_PAGE_PROPERTIES_BY_ID + inActionContext . getPrincipal ( ) . getId ( ) ) ) ; return gadget ; }
public Gadget execute ( final PrincipalActionContext inActionContext ) throws ExecutionException { SetGadgetStateRequest currentRequest = ( SetGadgetStateRequest ) inActionContext . getParams ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Calling Minimize on Gadget: " + currentRequest . getGadgetId ( ) + ". Setting minimize to: " + currentRequest . isMinimized ( ) ) ; } Gadget gadget = gadgetMapper . findById ( currentRequest . getGadgetId ( ) ) ; if ( null == gadget ) { throw new ExecutionException ( "Failed to set minimized state for gadget. Gadget id " + currentRequest . getGadgetId ( ) + " not found" ) ; } gadget . setMinimized ( currentRequest . isMinimized ( ) ) ; gadget . setMaximized ( currentRequest . isMaximized ( ) ) ; gadgetMapper . flush ( ) ; deleteKeysMapper . execute ( Collections . singleton ( CacheKeys . PERSON_PAGE_PROPERTIES_BY_ID + inActionContext . getPrincipal ( ) . getId ( ) ) ) ; return gadget ; }
683
@ Path ( "/latest/final" ) @ GET public Response getLatestBundleFinal ( ) { String latestBundleId ; if ( isTdm ( ) ) { latestBundleId = _tdmBundleDeployer . getLatestBundleId ( ) ; } else { latestBundleId = _localBundleDeployer . getLatestBundleId ( ) ; } if ( latestBundleId == null ) { return Response . serverError ( ) . build ( ) ; } return _localBundleArchiver . getArchiveBundleById ( latestBundleId , "/final/" ) ; }
@ Path ( "/latest/final" ) @ GET public Response getLatestBundleFinal ( ) { String latestBundleId ; if ( isTdm ( ) ) { latestBundleId = _tdmBundleDeployer . getLatestBundleId ( ) ; } else { latestBundleId = _localBundleDeployer . getLatestBundleId ( ) ; } if ( latestBundleId == null ) { _log . error ( "no latest bundle found" ) ; return Response . serverError ( ) . build ( ) ; } return _localBundleArchiver . getArchiveBundleById ( latestBundleId , "/final/" ) ; }
684
public MessageMarshaller < T > getMarshaller ( ) throws IllegalAccessException , InstantiationException { if ( marshaller != null ) { if ( MessageMarshaller . class . isAssignableFrom ( marshaller ) ) { throw new IllegalArgumentException ( marshaller . getName ( ) + " does not inherit from MessageMarshaller" ) ; } return marshaller . newInstance ( ) ; } for ( Class inner : clazz . getClasses ( ) ) { if ( ! "Marshaller" . equals ( inner . getSimpleName ( ) ) ) { } else if ( ! MessageMarshaller . class . isAssignableFrom ( inner ) ) { log . trace ( inner . getName ( ) + " does not inherit from MessageMarshaller" ) ; } else if ( ! Modifier . isStatic ( inner . getModifiers ( ) ) ) { log . trace ( inner . getName ( ) + " is not static class" ) ; } else { return ( MessageMarshaller < T > ) inner . newInstance ( ) ; } } throw new IllegalStateException ( "No marshaller class" ) ; }
public MessageMarshaller < T > getMarshaller ( ) throws IllegalAccessException , InstantiationException { if ( marshaller != null ) { if ( MessageMarshaller . class . isAssignableFrom ( marshaller ) ) { throw new IllegalArgumentException ( marshaller . getName ( ) + " does not inherit from MessageMarshaller" ) ; } return marshaller . newInstance ( ) ; } for ( Class inner : clazz . getClasses ( ) ) { if ( ! "Marshaller" . equals ( inner . getSimpleName ( ) ) ) { log . trace ( inner . getName ( ) + " is not called Marshaller" ) ; } else if ( ! MessageMarshaller . class . isAssignableFrom ( inner ) ) { log . trace ( inner . getName ( ) + " does not inherit from MessageMarshaller" ) ; } else if ( ! Modifier . isStatic ( inner . getModifiers ( ) ) ) { log . trace ( inner . getName ( ) + " is not static class" ) ; } else { return ( MessageMarshaller < T > ) inner . newInstance ( ) ; } } throw new IllegalStateException ( "No marshaller class" ) ; }
685
public MessageMarshaller < T > getMarshaller ( ) throws IllegalAccessException , InstantiationException { if ( marshaller != null ) { if ( MessageMarshaller . class . isAssignableFrom ( marshaller ) ) { throw new IllegalArgumentException ( marshaller . getName ( ) + " does not inherit from MessageMarshaller" ) ; } return marshaller . newInstance ( ) ; } for ( Class inner : clazz . getClasses ( ) ) { if ( ! "Marshaller" . equals ( inner . getSimpleName ( ) ) ) { log . trace ( inner . getName ( ) + " is not called Marshaller" ) ; } else if ( ! MessageMarshaller . class . isAssignableFrom ( inner ) ) { } else if ( ! Modifier . isStatic ( inner . getModifiers ( ) ) ) { log . trace ( inner . getName ( ) + " is not static class" ) ; } else { return ( MessageMarshaller < T > ) inner . newInstance ( ) ; } } throw new IllegalStateException ( "No marshaller class" ) ; }
public MessageMarshaller < T > getMarshaller ( ) throws IllegalAccessException , InstantiationException { if ( marshaller != null ) { if ( MessageMarshaller . class . isAssignableFrom ( marshaller ) ) { throw new IllegalArgumentException ( marshaller . getName ( ) + " does not inherit from MessageMarshaller" ) ; } return marshaller . newInstance ( ) ; } for ( Class inner : clazz . getClasses ( ) ) { if ( ! "Marshaller" . equals ( inner . getSimpleName ( ) ) ) { log . trace ( inner . getName ( ) + " is not called Marshaller" ) ; } else if ( ! MessageMarshaller . class . isAssignableFrom ( inner ) ) { log . trace ( inner . getName ( ) + " does not inherit from MessageMarshaller" ) ; } else if ( ! Modifier . isStatic ( inner . getModifiers ( ) ) ) { log . trace ( inner . getName ( ) + " is not static class" ) ; } else { return ( MessageMarshaller < T > ) inner . newInstance ( ) ; } } throw new IllegalStateException ( "No marshaller class" ) ; }
686
public MessageMarshaller < T > getMarshaller ( ) throws IllegalAccessException , InstantiationException { if ( marshaller != null ) { if ( MessageMarshaller . class . isAssignableFrom ( marshaller ) ) { throw new IllegalArgumentException ( marshaller . getName ( ) + " does not inherit from MessageMarshaller" ) ; } return marshaller . newInstance ( ) ; } for ( Class inner : clazz . getClasses ( ) ) { if ( ! "Marshaller" . equals ( inner . getSimpleName ( ) ) ) { log . trace ( inner . getName ( ) + " is not called Marshaller" ) ; } else if ( ! MessageMarshaller . class . isAssignableFrom ( inner ) ) { log . trace ( inner . getName ( ) + " does not inherit from MessageMarshaller" ) ; } else if ( ! Modifier . isStatic ( inner . getModifiers ( ) ) ) { } else { return ( MessageMarshaller < T > ) inner . newInstance ( ) ; } } throw new IllegalStateException ( "No marshaller class" ) ; }
public MessageMarshaller < T > getMarshaller ( ) throws IllegalAccessException , InstantiationException { if ( marshaller != null ) { if ( MessageMarshaller . class . isAssignableFrom ( marshaller ) ) { throw new IllegalArgumentException ( marshaller . getName ( ) + " does not inherit from MessageMarshaller" ) ; } return marshaller . newInstance ( ) ; } for ( Class inner : clazz . getClasses ( ) ) { if ( ! "Marshaller" . equals ( inner . getSimpleName ( ) ) ) { log . trace ( inner . getName ( ) + " is not called Marshaller" ) ; } else if ( ! MessageMarshaller . class . isAssignableFrom ( inner ) ) { log . trace ( inner . getName ( ) + " does not inherit from MessageMarshaller" ) ; } else if ( ! Modifier . isStatic ( inner . getModifiers ( ) ) ) { log . trace ( inner . getName ( ) + " is not static class" ) ; } else { return ( MessageMarshaller < T > ) inner . newInstance ( ) ; } } throw new IllegalStateException ( "No marshaller class" ) ; }
687
public void unassign ( Collection < TabletLocationState > tablets , Map < TServerInstance , List < Path > > logsForDeadServers ) throws DistributedStoreException { if ( tablets . size ( ) != 1 ) throw new IllegalArgumentException ( "There is only one root tablet" ) ; TabletLocationState tls = tablets . iterator ( ) . next ( ) ; if ( tls . extent . compareTo ( RootTable . EXTENT ) != 0 ) throw new IllegalArgumentException ( "You can only store the root tablet location" ) ; TabletMutator tabletMutator = ample . mutateTablet ( tls . extent ) ; tabletMutator . deleteLocation ( tls . futureOrCurrent ( ) , LocationType . FUTURE ) ; tabletMutator . deleteLocation ( tls . futureOrCurrent ( ) , LocationType . CURRENT ) ; if ( logsForDeadServers != null ) { List < Path > logs = logsForDeadServers . get ( tls . futureOrCurrent ( ) ) ; if ( logs != null ) { for ( Path entry : logs ) { LogEntry logEntry = new LogEntry ( RootTable . EXTENT , System . currentTimeMillis ( ) , entry . toString ( ) ) ; tabletMutator . putWal ( logEntry ) ; } } } tabletMutator . mutate ( ) ; }
public void unassign ( Collection < TabletLocationState > tablets , Map < TServerInstance , List < Path > > logsForDeadServers ) throws DistributedStoreException { if ( tablets . size ( ) != 1 ) throw new IllegalArgumentException ( "There is only one root tablet" ) ; TabletLocationState tls = tablets . iterator ( ) . next ( ) ; if ( tls . extent . compareTo ( RootTable . EXTENT ) != 0 ) throw new IllegalArgumentException ( "You can only store the root tablet location" ) ; TabletMutator tabletMutator = ample . mutateTablet ( tls . extent ) ; tabletMutator . deleteLocation ( tls . futureOrCurrent ( ) , LocationType . FUTURE ) ; tabletMutator . deleteLocation ( tls . futureOrCurrent ( ) , LocationType . CURRENT ) ; if ( logsForDeadServers != null ) { List < Path > logs = logsForDeadServers . get ( tls . futureOrCurrent ( ) ) ; if ( logs != null ) { for ( Path entry : logs ) { LogEntry logEntry = new LogEntry ( RootTable . EXTENT , System . currentTimeMillis ( ) , entry . toString ( ) ) ; tabletMutator . putWal ( logEntry ) ; } } } tabletMutator . mutate ( ) ; log . debug ( "unassign root tablet location" ) ; }
688
private boolean removeFromLaunchedQueries ( final QueryContext finishedQuery ) { removalFromLaunchedQueriesLock . lock ( ) ; boolean modified = false ; try { modified = this . launchedQueries . remove ( finishedQuery ) ; } finally { removalFromLaunchedQueriesLock . unlock ( ) ; } log . debug ( "launchedQueries.remove(finishedQuery) has returned [{}] for finished query with query id:[{}]" , modified , finishedQuery . getQueryHandleString ( ) ) ; return modified ; }
private boolean removeFromLaunchedQueries ( final QueryContext finishedQuery ) { log . debug ( "Acquiring lock in removeFromLaunchedQueries" ) ; removalFromLaunchedQueriesLock . lock ( ) ; boolean modified = false ; try { modified = this . launchedQueries . remove ( finishedQuery ) ; } finally { removalFromLaunchedQueriesLock . unlock ( ) ; } log . debug ( "launchedQueries.remove(finishedQuery) has returned [{}] for finished query with query id:[{}]" , modified , finishedQuery . getQueryHandleString ( ) ) ; return modified ; }
689
private boolean removeFromLaunchedQueries ( final QueryContext finishedQuery ) { log . debug ( "Acquiring lock in removeFromLaunchedQueries" ) ; removalFromLaunchedQueriesLock . lock ( ) ; boolean modified = false ; try { modified = this . launchedQueries . remove ( finishedQuery ) ; } finally { removalFromLaunchedQueriesLock . unlock ( ) ; } return modified ; }
private boolean removeFromLaunchedQueries ( final QueryContext finishedQuery ) { log . debug ( "Acquiring lock in removeFromLaunchedQueries" ) ; removalFromLaunchedQueriesLock . lock ( ) ; boolean modified = false ; try { modified = this . launchedQueries . remove ( finishedQuery ) ; } finally { removalFromLaunchedQueriesLock . unlock ( ) ; } log . debug ( "launchedQueries.remove(finishedQuery) has returned [{}] for finished query with query id:[{}]" , modified , finishedQuery . getQueryHandleString ( ) ) ; return modified ; }
690
public void attachClean ( FilterResRol instance ) { try { sessionFactory . getCurrentSession ( ) . lock ( instance , LockMode . NONE ) ; log . debug ( "attach successful" ) ; } catch ( RuntimeException re ) { log . error ( "attach failed" , re ) ; throw re ; } }
public void attachClean ( FilterResRol instance ) { log . debug ( "attaching clean FilterResRol instance" ) ; try { sessionFactory . getCurrentSession ( ) . lock ( instance , LockMode . NONE ) ; log . debug ( "attach successful" ) ; } catch ( RuntimeException re ) { log . error ( "attach failed" , re ) ; throw re ; } }
691
public void attachClean ( FilterResRol instance ) { log . debug ( "attaching clean FilterResRol instance" ) ; try { sessionFactory . getCurrentSession ( ) . lock ( instance , LockMode . NONE ) ; } catch ( RuntimeException re ) { log . error ( "attach failed" , re ) ; throw re ; } }
public void attachClean ( FilterResRol instance ) { log . debug ( "attaching clean FilterResRol instance" ) ; try { sessionFactory . getCurrentSession ( ) . lock ( instance , LockMode . NONE ) ; log . debug ( "attach successful" ) ; } catch ( RuntimeException re ) { log . error ( "attach failed" , re ) ; throw re ; } }
692
public void attachClean ( FilterResRol instance ) { log . debug ( "attaching clean FilterResRol instance" ) ; try { sessionFactory . getCurrentSession ( ) . lock ( instance , LockMode . NONE ) ; log . debug ( "attach successful" ) ; } catch ( RuntimeException re ) { throw re ; } }
public void attachClean ( FilterResRol instance ) { log . debug ( "attaching clean FilterResRol instance" ) ; try { sessionFactory . getCurrentSession ( ) . lock ( instance , LockMode . NONE ) ; log . debug ( "attach successful" ) ; } catch ( RuntimeException re ) { log . error ( "attach failed" , re ) ; throw re ; } }
693
private void quietly ( Statement statement ) { try { statement . execute ( ) ; } catch ( Exception exception ) { } }
private void quietly ( Statement statement ) { try { statement . execute ( ) ; } catch ( Exception exception ) { LOGGER . log ( Level . FINE , "Ignored error." , exception ) ; } }
694
private static String generateDefaultTaskValue ( ) { final String jvmName = ManagementFactory . getRuntimeMXBean ( ) . getName ( ) ; if ( jvmName . indexOf ( '@' ) < 1 ) { String hostname = "localhost" ; try { hostname = InetAddress . getLocalHost ( ) . getHostName ( ) ; } catch ( UnknownHostException e ) { } return "java-" + new SecureRandom ( ) . nextInt ( ) + "@" + hostname ; } return "java-" + jvmName ; }
private static String generateDefaultTaskValue ( ) { final String jvmName = ManagementFactory . getRuntimeMXBean ( ) . getName ( ) ; if ( jvmName . indexOf ( '@' ) < 1 ) { String hostname = "localhost" ; try { hostname = InetAddress . getLocalHost ( ) . getHostName ( ) ; } catch ( UnknownHostException e ) { logger . log ( Level . INFO , "Unable to get the hostname." , e ) ; } return "java-" + new SecureRandom ( ) . nextInt ( ) + "@" + hostname ; } return "java-" + jvmName ; }
695
private StreamCode handleStreamRouteCreate ( TCommandTransfer request , StreamCreatePacket packet , ServerStreamChannel serverStreamChannel ) { byte [ ] payload = request . getPayload ( ) ; TBase < ? , ? > command = deserialize ( payload ) ; if ( command == null ) { return StreamCode . TYPE_UNKNOWN ; } TCommandTransferResponse response = streamRouteHandler . onRoute ( new StreamEvent ( request , serverStreamChannel , command ) ) ; TRouteResult routeResult = response . getRouteResult ( ) ; if ( routeResult != TRouteResult . OK ) { return convertToStreamCode ( routeResult ) ; } return StreamCode . OK ; }
private StreamCode handleStreamRouteCreate ( TCommandTransfer request , StreamCreatePacket packet , ServerStreamChannel serverStreamChannel ) { byte [ ] payload = request . getPayload ( ) ; TBase < ? , ? > command = deserialize ( payload ) ; if ( command == null ) { return StreamCode . TYPE_UNKNOWN ; } TCommandTransferResponse response = streamRouteHandler . onRoute ( new StreamEvent ( request , serverStreamChannel , command ) ) ; TRouteResult routeResult = response . getRouteResult ( ) ; if ( routeResult != TRouteResult . OK ) { logger . warn ( "handleStreamRouteCreate failed. command:{}, routeResult:{}" , command , routeResult ) ; return convertToStreamCode ( routeResult ) ; } return StreamCode . OK ; }
696
private void updateInspector ( CallInspector callInspector , ServiceTunnelResponse serviceRes ) { if ( callInspector != null ) { try { callInspector . update ( ) ; } catch ( RuntimeException e ) { } try { callInspector . close ( serviceRes ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not close service invocation on call inspector" , e ) ; } try { callInspector . getSessionInspector ( ) . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update session inspector" , e ) ; } } }
private void updateInspector ( CallInspector callInspector , ServiceTunnelResponse serviceRes ) { if ( callInspector != null ) { try { callInspector . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update call inspector" , e ) ; } try { callInspector . close ( serviceRes ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not close service invocation on call inspector" , e ) ; } try { callInspector . getSessionInspector ( ) . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update session inspector" , e ) ; } } }
697
private void updateInspector ( CallInspector callInspector , ServiceTunnelResponse serviceRes ) { if ( callInspector != null ) { try { callInspector . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update call inspector" , e ) ; } try { callInspector . close ( serviceRes ) ; } catch ( RuntimeException e ) { } try { callInspector . getSessionInspector ( ) . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update session inspector" , e ) ; } } }
private void updateInspector ( CallInspector callInspector , ServiceTunnelResponse serviceRes ) { if ( callInspector != null ) { try { callInspector . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update call inspector" , e ) ; } try { callInspector . close ( serviceRes ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not close service invocation on call inspector" , e ) ; } try { callInspector . getSessionInspector ( ) . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update session inspector" , e ) ; } } }
698
private void updateInspector ( CallInspector callInspector , ServiceTunnelResponse serviceRes ) { if ( callInspector != null ) { try { callInspector . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update call inspector" , e ) ; } try { callInspector . close ( serviceRes ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not close service invocation on call inspector" , e ) ; } try { callInspector . getSessionInspector ( ) . update ( ) ; } catch ( RuntimeException e ) { } } }
private void updateInspector ( CallInspector callInspector , ServiceTunnelResponse serviceRes ) { if ( callInspector != null ) { try { callInspector . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update call inspector" , e ) ; } try { callInspector . close ( serviceRes ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not close service invocation on call inspector" , e ) ; } try { callInspector . getSessionInspector ( ) . update ( ) ; } catch ( RuntimeException e ) { LOG . warn ( "Could not update session inspector" , e ) ; } } }
699
public void handleWrite ( final Widget widget , final Object value ) { final WidgetRuntime < Widget > runtime = getRuntime ( widget ) ; if ( runtime == null ) else runtime . writePrimaryPV ( value ) ; }
public void handleWrite ( final Widget widget , final Object value ) { final WidgetRuntime < Widget > runtime = getRuntime ( widget ) ; if ( runtime == null ) logger . log ( Level . WARNING , "Widget " + widget + " has no runtime for writing " + value ) ; else runtime . writePrimaryPV ( value ) ; }