Unnamed: 0
int64
0
10k
source
stringlengths
27
7.27k
target
stringlengths
54
7.29k
100
public Calendar getIntermediateCalendar ( Interval interval , InputStream in ) { try { CalendarBuilder builder = new CalendarBuilder ( new CalendarParserImpl ( ) ) ; Calendar calendar = builder . build ( in ) ; log . debug ( "calendar built" ) ; return calendar ; } catch ( IOException e ) { throw new CalendarException ( "caught IOException" , e ) ; } catch ( ParserException e ) { throw new CalendarException ( "caught ParserException" , e ) ; } }
public Calendar getIntermediateCalendar ( Interval interval , InputStream in ) { try { log . debug ( "begin getEvents" ) ; CalendarBuilder builder = new CalendarBuilder ( new CalendarParserImpl ( ) ) ; Calendar calendar = builder . build ( in ) ; log . debug ( "calendar built" ) ; return calendar ; } catch ( IOException e ) { throw new CalendarException ( "caught IOException" , e ) ; } catch ( ParserException e ) { throw new CalendarException ( "caught ParserException" , e ) ; } }
101
public Calendar getIntermediateCalendar ( Interval interval , InputStream in ) { try { log . debug ( "begin getEvents" ) ; CalendarBuilder builder = new CalendarBuilder ( new CalendarParserImpl ( ) ) ; Calendar calendar = builder . build ( in ) ; return calendar ; } catch ( IOException e ) { throw new CalendarException ( "caught IOException" , e ) ; } catch ( ParserException e ) { throw new CalendarException ( "caught ParserException" , e ) ; } }
public Calendar getIntermediateCalendar ( Interval interval , InputStream in ) { try { log . debug ( "begin getEvents" ) ; CalendarBuilder builder = new CalendarBuilder ( new CalendarParserImpl ( ) ) ; Calendar calendar = builder . build ( in ) ; log . debug ( "calendar built" ) ; return calendar ; } catch ( IOException e ) { throw new CalendarException ( "caught IOException" , e ) ; } catch ( ParserException e ) { throw new CalendarException ( "caught ParserException" , e ) ; } }
102
protected String getDeprecatedProperty ( ConfigBag conf , String key ) { if ( conf . containsKey ( key ) ) { return ( String ) conf . getStringKey ( key ) ; } else { return null ; } }
protected String getDeprecatedProperty ( ConfigBag conf , String key ) { if ( conf . containsKey ( key ) ) { LOG . warn ( "Jclouds using deprecated brooklyn-jclouds property " + key + ": " + Sanitizer . sanitize ( conf . getAllConfig ( ) ) ) ; return ( String ) conf . getStringKey ( key ) ; } else { return null ; } }
103
public boolean matches ( DeploymentInfo deploymentInfo ) { if ( deploymentInfo == null || ( pluginMatcher == null && methodMatcher == null ) ) { return true ; } if ( pluginMatcher != null && pluginMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( pluginMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Plugin matcher rejected" ) ; return false ; } } if ( methodMatcher != null && methodMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( methodMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Method matcher rejected" ) ; return false ; } } return true ; }
public boolean matches ( DeploymentInfo deploymentInfo ) { if ( deploymentInfo == null || ( pluginMatcher == null && methodMatcher == null ) ) { LOGGER . debug ( "No matchers, apply" ) ; return true ; } if ( pluginMatcher != null && pluginMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( pluginMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Plugin matcher rejected" ) ; return false ; } } if ( methodMatcher != null && methodMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( methodMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Method matcher rejected" ) ; return false ; } } return true ; }
104
public boolean matches ( DeploymentInfo deploymentInfo ) { if ( deploymentInfo == null || ( pluginMatcher == null && methodMatcher == null ) ) { LOGGER . debug ( "No matchers, apply" ) ; return true ; } if ( pluginMatcher != null && pluginMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( pluginMatcher . matches ( deploymentInfo ) ) ) { return false ; } } if ( methodMatcher != null && methodMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( methodMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Method matcher rejected" ) ; return false ; } } return true ; }
public boolean matches ( DeploymentInfo deploymentInfo ) { if ( deploymentInfo == null || ( pluginMatcher == null && methodMatcher == null ) ) { LOGGER . debug ( "No matchers, apply" ) ; return true ; } if ( pluginMatcher != null && pluginMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( pluginMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Plugin matcher rejected" ) ; return false ; } } if ( methodMatcher != null && methodMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( methodMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Method matcher rejected" ) ; return false ; } } return true ; }
105
public boolean matches ( DeploymentInfo deploymentInfo ) { if ( deploymentInfo == null || ( pluginMatcher == null && methodMatcher == null ) ) { LOGGER . debug ( "No matchers, apply" ) ; return true ; } if ( pluginMatcher != null && pluginMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( pluginMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Plugin matcher rejected" ) ; return false ; } } if ( methodMatcher != null && methodMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( methodMatcher . matches ( deploymentInfo ) ) ) { return false ; } } return true ; }
public boolean matches ( DeploymentInfo deploymentInfo ) { if ( deploymentInfo == null || ( pluginMatcher == null && methodMatcher == null ) ) { LOGGER . debug ( "No matchers, apply" ) ; return true ; } if ( pluginMatcher != null && pluginMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( pluginMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Plugin matcher rejected" ) ; return false ; } } if ( methodMatcher != null && methodMatcher . isApply ( ) ) { if ( VersionMatchResult . REJECTED . equals ( methodMatcher . matches ( deploymentInfo ) ) ) { LOGGER . debug ( "Method matcher rejected" ) ; return false ; } } return true ; }
106
private void sendRpcRequestToDevice ( ToDeviceRpcRequest msg ) { TopicPartitionInfo tpi = partitionService . resolve ( ServiceType . TB_CORE , msg . getTenantId ( ) , msg . getDeviceId ( ) ) ; ToDeviceRpcRequestActorMsg rpcMsg = new ToDeviceRpcRequestActorMsg ( serviceId , msg ) ; if ( tpi . isMyPartition ( ) ) { if ( tbCoreRpcService . isPresent ( ) ) { tbCoreRpcService . get ( ) . forwardRpcRequestToDeviceActor ( rpcMsg ) ; } else { log . warn ( "Failed to find tbCoreRpcService for local service. Possible duplication of serviceIds." ) ; } } else { log . trace ( "[{}] Forwarding msg {} to queue actor!" , msg . getDeviceId ( ) , msg ) ; clusterService . pushMsgToCore ( rpcMsg , null ) ; } }
private void sendRpcRequestToDevice ( ToDeviceRpcRequest msg ) { TopicPartitionInfo tpi = partitionService . resolve ( ServiceType . TB_CORE , msg . getTenantId ( ) , msg . getDeviceId ( ) ) ; ToDeviceRpcRequestActorMsg rpcMsg = new ToDeviceRpcRequestActorMsg ( serviceId , msg ) ; if ( tpi . isMyPartition ( ) ) { log . trace ( "[{}] Forwarding msg {} to device actor!" , msg . getDeviceId ( ) , msg ) ; if ( tbCoreRpcService . isPresent ( ) ) { tbCoreRpcService . get ( ) . forwardRpcRequestToDeviceActor ( rpcMsg ) ; } else { log . warn ( "Failed to find tbCoreRpcService for local service. Possible duplication of serviceIds." ) ; } } else { log . trace ( "[{}] Forwarding msg {} to queue actor!" , msg . getDeviceId ( ) , msg ) ; clusterService . pushMsgToCore ( rpcMsg , null ) ; } }
107
private void sendRpcRequestToDevice ( ToDeviceRpcRequest msg ) { TopicPartitionInfo tpi = partitionService . resolve ( ServiceType . TB_CORE , msg . getTenantId ( ) , msg . getDeviceId ( ) ) ; ToDeviceRpcRequestActorMsg rpcMsg = new ToDeviceRpcRequestActorMsg ( serviceId , msg ) ; if ( tpi . isMyPartition ( ) ) { log . trace ( "[{}] Forwarding msg {} to device actor!" , msg . getDeviceId ( ) , msg ) ; if ( tbCoreRpcService . isPresent ( ) ) { tbCoreRpcService . get ( ) . forwardRpcRequestToDeviceActor ( rpcMsg ) ; } else { } } else { log . trace ( "[{}] Forwarding msg {} to queue actor!" , msg . getDeviceId ( ) , msg ) ; clusterService . pushMsgToCore ( rpcMsg , null ) ; } }
private void sendRpcRequestToDevice ( ToDeviceRpcRequest msg ) { TopicPartitionInfo tpi = partitionService . resolve ( ServiceType . TB_CORE , msg . getTenantId ( ) , msg . getDeviceId ( ) ) ; ToDeviceRpcRequestActorMsg rpcMsg = new ToDeviceRpcRequestActorMsg ( serviceId , msg ) ; if ( tpi . isMyPartition ( ) ) { log . trace ( "[{}] Forwarding msg {} to device actor!" , msg . getDeviceId ( ) , msg ) ; if ( tbCoreRpcService . isPresent ( ) ) { tbCoreRpcService . get ( ) . forwardRpcRequestToDeviceActor ( rpcMsg ) ; } else { log . warn ( "Failed to find tbCoreRpcService for local service. Possible duplication of serviceIds." ) ; } } else { log . trace ( "[{}] Forwarding msg {} to queue actor!" , msg . getDeviceId ( ) , msg ) ; clusterService . pushMsgToCore ( rpcMsg , null ) ; } }
108
private void sendRpcRequestToDevice ( ToDeviceRpcRequest msg ) { TopicPartitionInfo tpi = partitionService . resolve ( ServiceType . TB_CORE , msg . getTenantId ( ) , msg . getDeviceId ( ) ) ; ToDeviceRpcRequestActorMsg rpcMsg = new ToDeviceRpcRequestActorMsg ( serviceId , msg ) ; if ( tpi . isMyPartition ( ) ) { log . trace ( "[{}] Forwarding msg {} to device actor!" , msg . getDeviceId ( ) , msg ) ; if ( tbCoreRpcService . isPresent ( ) ) { tbCoreRpcService . get ( ) . forwardRpcRequestToDeviceActor ( rpcMsg ) ; } else { log . warn ( "Failed to find tbCoreRpcService for local service. Possible duplication of serviceIds." ) ; } } else { clusterService . pushMsgToCore ( rpcMsg , null ) ; } }
private void sendRpcRequestToDevice ( ToDeviceRpcRequest msg ) { TopicPartitionInfo tpi = partitionService . resolve ( ServiceType . TB_CORE , msg . getTenantId ( ) , msg . getDeviceId ( ) ) ; ToDeviceRpcRequestActorMsg rpcMsg = new ToDeviceRpcRequestActorMsg ( serviceId , msg ) ; if ( tpi . isMyPartition ( ) ) { log . trace ( "[{}] Forwarding msg {} to device actor!" , msg . getDeviceId ( ) , msg ) ; if ( tbCoreRpcService . isPresent ( ) ) { tbCoreRpcService . get ( ) . forwardRpcRequestToDeviceActor ( rpcMsg ) ; } else { log . warn ( "Failed to find tbCoreRpcService for local service. Possible duplication of serviceIds." ) ; } } else { log . trace ( "[{}] Forwarding msg {} to queue actor!" , msg . getDeviceId ( ) , msg ) ; clusterService . pushMsgToCore ( rpcMsg , null ) ; } }
109
public SCrunProductIdentification setProductId ( int actuatorId ) { this . reqIndexValue0 = actuatorId ; return this ; }
public SCrunProductIdentification setProductId ( int actuatorId ) { logger . trace ( "setProductId({}) called." , actuatorId ) ; this . reqIndexValue0 = actuatorId ; return this ; }
110
private void fireOnShown ( ) { if ( ! Objects . isNull ( getOnShown ( ) ) ) { getOnShown ( ) . handle ( new Event ( this , this , Event . ANY ) ) ; } }
private void fireOnShown ( ) { if ( ! Objects . isNull ( getOnShown ( ) ) ) { LOGGER . trace ( "Firing onShown event - Dialog is initialized and being shown" ) ; getOnShown ( ) . handle ( new Event ( this , this , Event . ANY ) ) ; } }
111
@ Test public void test_collapseZeroGap ( ) { initRand ( ) ; int numMarkers = 20 ; Chromosome chr = genome . getChromosome ( "1" ) ; for ( int num = 1 ; num < 1000 ; num ++ ) { Tuple < Markers , Markers > tupleMarkers = createMarkers ( chr , numMarkers ) ; Markers markersOri = tupleMarkers . first ; Markers markersCollapsedOri = tupleMarkers . second ; String mStr = markers2string ( markersOri ) ; String mColOriStr = markers2string ( markersCollapsedOri ) ; if ( verbose ) Log . debug ( "Iteration : " + num + "\n\tMarkers : " + mStr + "\n\tMarkers collapsed : " + mColOriStr ) ; if ( ! mStr . equals ( mColOriStr ) ) { System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; for ( Marker m : markersCollapsedOri ) System . err . println ( m ) ; throw new RuntimeException ( "Error creating markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColOriStr ) ; } Map < Marker , Marker > collapse = MarkerUtil . collapseZeroGap ( markersOri ) ; HashSet < Marker > collapsed = new HashSet < Marker > ( ) ; collapsed . addAll ( collapse . values ( ) ) ; Markers markers = new Markers ( ) ; markers . addAll ( collapsed ) ; String mColStr = markers2string ( markers ) ; if ( ! mColStr . equals ( mStr ) ) { Log . debug ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; markers = new Markers ( ) ; markers . addAll ( collapse . keySet ( ) ) ; Markers keySorted = markers . sort ( false , false ) ; for ( Marker mkey : keySorted ) System . err . println ( mkey + "\t->\t" + collapse . get ( mkey ) ) ; throw new RuntimeException ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; } } }
@ Test public void test_collapseZeroGap ( ) { Log . debug ( "Test" ) ; initRand ( ) ; int numMarkers = 20 ; Chromosome chr = genome . getChromosome ( "1" ) ; for ( int num = 1 ; num < 1000 ; num ++ ) { Tuple < Markers , Markers > tupleMarkers = createMarkers ( chr , numMarkers ) ; Markers markersOri = tupleMarkers . first ; Markers markersCollapsedOri = tupleMarkers . second ; String mStr = markers2string ( markersOri ) ; String mColOriStr = markers2string ( markersCollapsedOri ) ; if ( verbose ) Log . debug ( "Iteration : " + num + "\n\tMarkers : " + mStr + "\n\tMarkers collapsed : " + mColOriStr ) ; if ( ! mStr . equals ( mColOriStr ) ) { System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; for ( Marker m : markersCollapsedOri ) System . err . println ( m ) ; throw new RuntimeException ( "Error creating markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColOriStr ) ; } Map < Marker , Marker > collapse = MarkerUtil . collapseZeroGap ( markersOri ) ; HashSet < Marker > collapsed = new HashSet < Marker > ( ) ; collapsed . addAll ( collapse . values ( ) ) ; Markers markers = new Markers ( ) ; markers . addAll ( collapsed ) ; String mColStr = markers2string ( markers ) ; if ( ! mColStr . equals ( mStr ) ) { Log . debug ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; markers = new Markers ( ) ; markers . addAll ( collapse . keySet ( ) ) ; Markers keySorted = markers . sort ( false , false ) ; for ( Marker mkey : keySorted ) System . err . println ( mkey + "\t->\t" + collapse . get ( mkey ) ) ; throw new RuntimeException ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; } } }
112
@ Test public void test_collapseZeroGap ( ) { Log . debug ( "Test" ) ; initRand ( ) ; int numMarkers = 20 ; Chromosome chr = genome . getChromosome ( "1" ) ; for ( int num = 1 ; num < 1000 ; num ++ ) { Tuple < Markers , Markers > tupleMarkers = createMarkers ( chr , numMarkers ) ; Markers markersOri = tupleMarkers . first ; Markers markersCollapsedOri = tupleMarkers . second ; String mStr = markers2string ( markersOri ) ; String mColOriStr = markers2string ( markersCollapsedOri ) ; if ( verbose ) if ( ! mStr . equals ( mColOriStr ) ) { System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; for ( Marker m : markersCollapsedOri ) System . err . println ( m ) ; throw new RuntimeException ( "Error creating markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColOriStr ) ; } Map < Marker , Marker > collapse = MarkerUtil . collapseZeroGap ( markersOri ) ; HashSet < Marker > collapsed = new HashSet < Marker > ( ) ; collapsed . addAll ( collapse . values ( ) ) ; Markers markers = new Markers ( ) ; markers . addAll ( collapsed ) ; String mColStr = markers2string ( markers ) ; if ( ! mColStr . equals ( mStr ) ) { Log . debug ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; markers = new Markers ( ) ; markers . addAll ( collapse . keySet ( ) ) ; Markers keySorted = markers . sort ( false , false ) ; for ( Marker mkey : keySorted ) System . err . println ( mkey + "\t->\t" + collapse . get ( mkey ) ) ; throw new RuntimeException ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; } } }
@ Test public void test_collapseZeroGap ( ) { Log . debug ( "Test" ) ; initRand ( ) ; int numMarkers = 20 ; Chromosome chr = genome . getChromosome ( "1" ) ; for ( int num = 1 ; num < 1000 ; num ++ ) { Tuple < Markers , Markers > tupleMarkers = createMarkers ( chr , numMarkers ) ; Markers markersOri = tupleMarkers . first ; Markers markersCollapsedOri = tupleMarkers . second ; String mStr = markers2string ( markersOri ) ; String mColOriStr = markers2string ( markersCollapsedOri ) ; if ( verbose ) Log . debug ( "Iteration : " + num + "\n\tMarkers : " + mStr + "\n\tMarkers collapsed : " + mColOriStr ) ; if ( ! mStr . equals ( mColOriStr ) ) { System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; for ( Marker m : markersCollapsedOri ) System . err . println ( m ) ; throw new RuntimeException ( "Error creating markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColOriStr ) ; } Map < Marker , Marker > collapse = MarkerUtil . collapseZeroGap ( markersOri ) ; HashSet < Marker > collapsed = new HashSet < Marker > ( ) ; collapsed . addAll ( collapse . values ( ) ) ; Markers markers = new Markers ( ) ; markers . addAll ( collapsed ) ; String mColStr = markers2string ( markers ) ; if ( ! mColStr . equals ( mStr ) ) { Log . debug ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; markers = new Markers ( ) ; markers . addAll ( collapse . keySet ( ) ) ; Markers keySorted = markers . sort ( false , false ) ; for ( Marker mkey : keySorted ) System . err . println ( mkey + "\t->\t" + collapse . get ( mkey ) ) ; throw new RuntimeException ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; } } }
113
@ Test public void test_collapseZeroGap ( ) { Log . debug ( "Test" ) ; initRand ( ) ; int numMarkers = 20 ; Chromosome chr = genome . getChromosome ( "1" ) ; for ( int num = 1 ; num < 1000 ; num ++ ) { Tuple < Markers , Markers > tupleMarkers = createMarkers ( chr , numMarkers ) ; Markers markersOri = tupleMarkers . first ; Markers markersCollapsedOri = tupleMarkers . second ; String mStr = markers2string ( markersOri ) ; String mColOriStr = markers2string ( markersCollapsedOri ) ; if ( verbose ) Log . debug ( "Iteration : " + num + "\n\tMarkers : " + mStr + "\n\tMarkers collapsed : " + mColOriStr ) ; if ( ! mStr . equals ( mColOriStr ) ) { System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; for ( Marker m : markersCollapsedOri ) System . err . println ( m ) ; throw new RuntimeException ( "Error creating markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColOriStr ) ; } Map < Marker , Marker > collapse = MarkerUtil . collapseZeroGap ( markersOri ) ; HashSet < Marker > collapsed = new HashSet < Marker > ( ) ; collapsed . addAll ( collapse . values ( ) ) ; Markers markers = new Markers ( ) ; markers . addAll ( collapsed ) ; String mColStr = markers2string ( markers ) ; if ( ! mColStr . equals ( mStr ) ) { System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; markers = new Markers ( ) ; markers . addAll ( collapse . keySet ( ) ) ; Markers keySorted = markers . sort ( false , false ) ; for ( Marker mkey : keySorted ) System . err . println ( mkey + "\t->\t" + collapse . get ( mkey ) ) ; throw new RuntimeException ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; } } }
@ Test public void test_collapseZeroGap ( ) { Log . debug ( "Test" ) ; initRand ( ) ; int numMarkers = 20 ; Chromosome chr = genome . getChromosome ( "1" ) ; for ( int num = 1 ; num < 1000 ; num ++ ) { Tuple < Markers , Markers > tupleMarkers = createMarkers ( chr , numMarkers ) ; Markers markersOri = tupleMarkers . first ; Markers markersCollapsedOri = tupleMarkers . second ; String mStr = markers2string ( markersOri ) ; String mColOriStr = markers2string ( markersCollapsedOri ) ; if ( verbose ) Log . debug ( "Iteration : " + num + "\n\tMarkers : " + mStr + "\n\tMarkers collapsed : " + mColOriStr ) ; if ( ! mStr . equals ( mColOriStr ) ) { System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; for ( Marker m : markersCollapsedOri ) System . err . println ( m ) ; throw new RuntimeException ( "Error creating markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColOriStr ) ; } Map < Marker , Marker > collapse = MarkerUtil . collapseZeroGap ( markersOri ) ; HashSet < Marker > collapsed = new HashSet < Marker > ( ) ; collapsed . addAll ( collapse . values ( ) ) ; Markers markers = new Markers ( ) ; markers . addAll ( collapsed ) ; String mColStr = markers2string ( markers ) ; if ( ! mColStr . equals ( mStr ) ) { Log . debug ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; System . err . println ( "Markers : " ) ; for ( Marker m : markersOri ) System . err . println ( m ) ; System . err . println ( "Markers collapsed: " ) ; markers = new Markers ( ) ; markers . addAll ( collapse . keySet ( ) ) ; Markers keySorted = markers . sort ( false , false ) ; for ( Marker mkey : keySorted ) System . err . println ( mkey + "\t->\t" + collapse . get ( mkey ) ) ; throw new RuntimeException ( "Error checing markers! Markers and collapsed marker do not match!\n\t" + mStr + "\n\t" + mColStr ) ; } } }
114
public void destroy ( ) { try { queue . close ( ) ; } catch ( IOException e ) { } }
public void destroy ( ) { try { queue . close ( ) ; } catch ( IOException e ) { LOGGER . debug ( "error close queue" , e ) ; } }
115
@ Nullable @ Override public ComputeJobResultPolicy apply ( ) { try { ComputeJobResultPolicy plc = null ; try { plc = task . result ( jobRes , results ) ; if ( plc == FAILOVER && noFailover ) { IgniteException e = jobRes . getException ( ) ; if ( e != null ) throw e ; plc = WAIT ; } } finally { recordJobEvent ( EVT_JOB_RESULTED , jobRes . getJobContext ( ) . getJobId ( ) , jobRes . getNode ( ) , plc , "Job got resulted with: " + plc ) ; } if ( log . isDebugEnabled ( ) ) return plc ; } catch ( IgniteException e ) { if ( X . hasCause ( e , GridInternalException . class ) ) { if ( log . isDebugEnabled ( ) ) U . error ( log , "Failed to obtain remote job result policy for result from " + "ComputeTask.result(..) method (will fail the whole task): " + jobRes , e ) ; } else if ( X . hasCause ( e , ComputeJobFailoverException . class ) ) { IgniteCheckedException e0 = new IgniteCheckedException ( " Job was not failed over because " + "ComputeJobResultPolicy.FAILOVER was not returned from " + "ComputeTask.result(...) method for job result with ComputeJobFailoverException." , e ) ; finishTask ( null , e0 ) ; return null ; } else if ( X . hasCause ( e , GridServiceNotFoundException . class ) || X . hasCause ( e , ClusterTopologyCheckedException . class ) ) { LT . error ( log , e , "Failed to obtain remote job result policy for result from " + "ComputeTask.result(..) method (will fail the whole task): " + jobRes ) ; } else U . error ( log , "Failed to obtain remote job result policy for result from " + "ComputeTask.result(..) method (will fail the whole task): " + jobRes , e ) ; finishTask ( null , e ) ; return null ; } catch ( Throwable e ) { String errMsg = "Failed to obtain remote job result policy for result from" + "ComputeTask.result(..) method due to undeclared user exception " + "(will fail the whole task): " + jobRes ; U . error ( log , errMsg , e ) ; Throwable tmp = new ComputeUserUndeclaredException ( errMsg , e ) ; finishTask ( null , tmp ) ; if ( e instanceof Error ) throw e ; return null ; } }
@ Nullable @ Override public ComputeJobResultPolicy apply ( ) { try { ComputeJobResultPolicy plc = null ; try { plc = task . result ( jobRes , results ) ; if ( plc == FAILOVER && noFailover ) { IgniteException e = jobRes . getException ( ) ; if ( e != null ) throw e ; plc = WAIT ; } } finally { recordJobEvent ( EVT_JOB_RESULTED , jobRes . getJobContext ( ) . getJobId ( ) , jobRes . getNode ( ) , plc , "Job got resulted with: " + plc ) ; } if ( log . isDebugEnabled ( ) ) log . debug ( "Obtained job result policy [policy=" + plc + ", ses=" + ses + ']' ) ; return plc ; } catch ( IgniteException e ) { if ( X . hasCause ( e , GridInternalException . class ) ) { if ( log . isDebugEnabled ( ) ) U . error ( log , "Failed to obtain remote job result policy for result from " + "ComputeTask.result(..) method (will fail the whole task): " + jobRes , e ) ; } else if ( X . hasCause ( e , ComputeJobFailoverException . class ) ) { IgniteCheckedException e0 = new IgniteCheckedException ( " Job was not failed over because " + "ComputeJobResultPolicy.FAILOVER was not returned from " + "ComputeTask.result(...) method for job result with ComputeJobFailoverException." , e ) ; finishTask ( null , e0 ) ; return null ; } else if ( X . hasCause ( e , GridServiceNotFoundException . class ) || X . hasCause ( e , ClusterTopologyCheckedException . class ) ) { LT . error ( log , e , "Failed to obtain remote job result policy for result from " + "ComputeTask.result(..) method (will fail the whole task): " + jobRes ) ; } else U . error ( log , "Failed to obtain remote job result policy for result from " + "ComputeTask.result(..) method (will fail the whole task): " + jobRes , e ) ; finishTask ( null , e ) ; return null ; } catch ( Throwable e ) { String errMsg = "Failed to obtain remote job result policy for result from" + "ComputeTask.result(..) method due to undeclared user exception " + "(will fail the whole task): " + jobRes ; U . error ( log , errMsg , e ) ; Throwable tmp = new ComputeUserUndeclaredException ( errMsg , e ) ; finishTask ( null , tmp ) ; if ( e instanceof Error ) throw e ; return null ; } }
116
@ NotNull @ Override public Map < ? extends ComputeJob , ClusterNode > map ( List < ClusterNode > subgrid , String arg ) { assert ignite != null ; UUID locNodeId = ignite . configuration ( ) . getNodeId ( ) ; assert locNodeId != null ; if ( log . isInfoEnabled ( ) ) ClusterNode remoteNode = null ; for ( ClusterNode node : subgrid ) { if ( ! node . id ( ) . equals ( locNodeId ) ) remoteNode = node ; } return Collections . singletonMap ( new ComputeJobAdapter ( locNodeId ) { @ IgniteInstanceResource private Ignite ignite ; @ SuppressWarnings ( "NakedNotify" ) @ Override public Serializable execute ( ) { assert ignite != null ; UUID nodeId = ignite . configuration ( ) . getNodeId ( ) ; assert nodeId != null ; if ( ! nodeId . equals ( argument ( 0 ) ) ) { try { synchronized ( mux ) { mux . notifyAll ( ) ; } Thread . sleep ( Integer . MAX_VALUE ) ; } catch ( InterruptedException e ) { throw new ComputeExecutionRejectedException ( "Expected interruption during execution." , e ) ; } } else return "success" ; throw new ComputeExecutionRejectedException ( "Expected exception during execution." ) ; } } , remoteNode ) ; }
@ NotNull @ Override public Map < ? extends ComputeJob , ClusterNode > map ( List < ClusterNode > subgrid , String arg ) { assert ignite != null ; UUID locNodeId = ignite . configuration ( ) . getNodeId ( ) ; assert locNodeId != null ; if ( log . isInfoEnabled ( ) ) log . info ( "Mapping jobs [subgrid=" + subgrid + ", arg=" + arg + ']' ) ; ClusterNode remoteNode = null ; for ( ClusterNode node : subgrid ) { if ( ! node . id ( ) . equals ( locNodeId ) ) remoteNode = node ; } return Collections . singletonMap ( new ComputeJobAdapter ( locNodeId ) { @ IgniteInstanceResource private Ignite ignite ; @ SuppressWarnings ( "NakedNotify" ) @ Override public Serializable execute ( ) { assert ignite != null ; UUID nodeId = ignite . configuration ( ) . getNodeId ( ) ; assert nodeId != null ; if ( ! nodeId . equals ( argument ( 0 ) ) ) { try { synchronized ( mux ) { mux . notifyAll ( ) ; } Thread . sleep ( Integer . MAX_VALUE ) ; } catch ( InterruptedException e ) { throw new ComputeExecutionRejectedException ( "Expected interruption during execution." , e ) ; } } else return "success" ; throw new ComputeExecutionRejectedException ( "Expected exception during execution." ) ; } } , remoteNode ) ; }
117
private static boolean fromBytes ( byte [ ] bytes ) { if ( log . isDebugEnabled ( ) ) { } return bytes . length > 0 && bytes [ 0 ] == ( byte ) 1 ; }
private static boolean fromBytes ( byte [ ] bytes ) { if ( log . isDebugEnabled ( ) ) { log . debug ( "fromBytes(" + Arrays . toString ( bytes ) + ") and bytes.length > 0 && bytes[0] == (byte) 1 returning:" + ( bytes . length > 0 && bytes [ 0 ] == ( byte ) 1 ) ) ; } return bytes . length > 0 && bytes [ 0 ] == ( byte ) 1 ; }
118
@ SuppressWarnings ( "PMD.SimplifiedTernary" ) @ Override public boolean canImport ( TransferHandler . TransferSupport info ) { if ( ! info . isDataFlavorSupported ( TypeKeyEntry . ourDataFlavor ) ) { return false ; } Transferable tf = info . getTransferable ( ) ; try { TypeKeyEntry tke = ( TypeKeyEntry ) tf . getTransferData ( TypeKeyEntry . ourDataFlavor ) ; return myAllowOnlyWithSpecialKeys ? tke . getSpecialKeyType ( ) != null : true ; } catch ( UnsupportedFlavorException | IOException e ) { } return false ; }
@ SuppressWarnings ( "PMD.SimplifiedTernary" ) @ Override public boolean canImport ( TransferHandler . TransferSupport info ) { if ( ! info . isDataFlavorSupported ( TypeKeyEntry . ourDataFlavor ) ) { return false ; } Transferable tf = info . getTransferable ( ) ; try { TypeKeyEntry tke = ( TypeKeyEntry ) tf . getTransferData ( TypeKeyEntry . ourDataFlavor ) ; return myAllowOnlyWithSpecialKeys ? tke . getSpecialKeyType ( ) != null : true ; } catch ( UnsupportedFlavorException | IOException e ) { LOGGER . warn ( e ) ; } return false ; }
119
@ Test public void testGetSetLoggerLevel ( ) { assertNull ( this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; LogQueue queue = new LogQueue ( ) ; this . loggerManager . pushLogListener ( new LogQueueListener ( "loglistenerid" , queue ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , LogLevel . WARN ) ; assertSame ( LogLevel . WARN , this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; if ( queue . size ( ) > 0 ) { Assert . fail ( "Should have contained no message but got [" + queue . peek ( ) . getFormattedMessage ( ) + "] instead (last message, there might be more)" ) ; } assertEquals ( 0 , queue . size ( ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , LogLevel . DEBUG ) ; assertSame ( LogLevel . DEBUG , this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; this . logger . debug ( "[test] debug message 2" ) ; assertEquals ( 1 , queue . size ( ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , null ) ; assertNull ( this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; }
@ Test public void testGetSetLoggerLevel ( ) { assertNull ( this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; LogQueue queue = new LogQueue ( ) ; this . loggerManager . pushLogListener ( new LogQueueListener ( "loglistenerid" , queue ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , LogLevel . WARN ) ; assertSame ( LogLevel . WARN , this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; this . logger . debug ( "[test] debug message 1" ) ; if ( queue . size ( ) > 0 ) { Assert . fail ( "Should have contained no message but got [" + queue . peek ( ) . getFormattedMessage ( ) + "] instead (last message, there might be more)" ) ; } assertEquals ( 0 , queue . size ( ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , LogLevel . DEBUG ) ; assertSame ( LogLevel . DEBUG , this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; this . logger . debug ( "[test] debug message 2" ) ; assertEquals ( 1 , queue . size ( ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , null ) ; assertNull ( this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; }
120
@ Test public void testGetSetLoggerLevel ( ) { assertNull ( this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; LogQueue queue = new LogQueue ( ) ; this . loggerManager . pushLogListener ( new LogQueueListener ( "loglistenerid" , queue ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , LogLevel . WARN ) ; assertSame ( LogLevel . WARN , this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; this . logger . debug ( "[test] debug message 1" ) ; if ( queue . size ( ) > 0 ) { Assert . fail ( "Should have contained no message but got [" + queue . peek ( ) . getFormattedMessage ( ) + "] instead (last message, there might be more)" ) ; } assertEquals ( 0 , queue . size ( ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , LogLevel . DEBUG ) ; assertSame ( LogLevel . DEBUG , this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; assertEquals ( 1 , queue . size ( ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , null ) ; assertNull ( this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; }
@ Test public void testGetSetLoggerLevel ( ) { assertNull ( this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; LogQueue queue = new LogQueue ( ) ; this . loggerManager . pushLogListener ( new LogQueueListener ( "loglistenerid" , queue ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , LogLevel . WARN ) ; assertSame ( LogLevel . WARN , this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; this . logger . debug ( "[test] debug message 1" ) ; if ( queue . size ( ) > 0 ) { Assert . fail ( "Should have contained no message but got [" + queue . peek ( ) . getFormattedMessage ( ) + "] instead (last message, there might be more)" ) ; } assertEquals ( 0 , queue . size ( ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , LogLevel . DEBUG ) ; assertSame ( LogLevel . DEBUG , this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; this . logger . debug ( "[test] debug message 2" ) ; assertEquals ( 1 , queue . size ( ) ) ; this . loggerManager . setLoggerLevel ( getClass ( ) . getName ( ) , null ) ; assertNull ( this . loggerManager . getLoggerLevel ( getClass ( ) . getName ( ) ) ) ; }
121
public Stream . Listener onNewStream ( Stream stream , HeadersFrame frame ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; return new Stream . Listener . Adapter ( ) { @ Override public void onData ( Stream stream , DataFrame frame , Callback callback ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; callback . succeeded ( ) ; MetaData . Response response = new MetaData . Response ( HttpVersion . HTTP_2 , HttpStatus . OK_200 , HttpFields . EMPTY ) ; Callback . Completable completable1 = new Callback . Completable ( ) ; HeadersFrame reply = new HeadersFrame ( stream . getId ( ) , response , null , false ) ; if ( LOGGER . isDebugEnabled ( ) ) stream . headers ( reply , completable1 ) ; completable1 . thenCompose ( ignored -> { Callback . Completable completable2 = new Callback . Completable ( ) ; DataFrame data = new DataFrame ( stream . getId ( ) , buffer1 . slice ( ) , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , data ) ; stream . data ( data , completable2 ) ; return completable2 ; } ) . thenRun ( ( ) -> { MetaData trailer = new MetaData ( HttpVersion . HTTP_2 , HttpFields . EMPTY ) ; HeadersFrame end = new HeadersFrame ( stream . getId ( ) , trailer , null , true ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , end ) ; stream . headers ( end , Callback . NOOP ) ; } ) ; } } ; }
public Stream . Listener onNewStream ( Stream stream , HeadersFrame frame ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; return new Stream . Listener . Adapter ( ) { @ Override public void onData ( Stream stream , DataFrame frame , Callback callback ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; callback . succeeded ( ) ; MetaData . Response response = new MetaData . Response ( HttpVersion . HTTP_2 , HttpStatus . OK_200 , HttpFields . EMPTY ) ; Callback . Completable completable1 = new Callback . Completable ( ) ; HeadersFrame reply = new HeadersFrame ( stream . getId ( ) , response , null , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , reply ) ; stream . headers ( reply , completable1 ) ; completable1 . thenCompose ( ignored -> { Callback . Completable completable2 = new Callback . Completable ( ) ; DataFrame data = new DataFrame ( stream . getId ( ) , buffer1 . slice ( ) , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , data ) ; stream . data ( data , completable2 ) ; return completable2 ; } ) . thenRun ( ( ) -> { MetaData trailer = new MetaData ( HttpVersion . HTTP_2 , HttpFields . EMPTY ) ; HeadersFrame end = new HeadersFrame ( stream . getId ( ) , trailer , null , true ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , end ) ; stream . headers ( end , Callback . NOOP ) ; } ) ; } } ; }
122
public Stream . Listener onNewStream ( Stream stream , HeadersFrame frame ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; return new Stream . Listener . Adapter ( ) { @ Override public void onData ( Stream stream , DataFrame frame , Callback callback ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; callback . succeeded ( ) ; MetaData . Response response = new MetaData . Response ( HttpVersion . HTTP_2 , HttpStatus . OK_200 , HttpFields . EMPTY ) ; Callback . Completable completable1 = new Callback . Completable ( ) ; HeadersFrame reply = new HeadersFrame ( stream . getId ( ) , response , null , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , reply ) ; stream . headers ( reply , completable1 ) ; completable1 . thenCompose ( ignored -> { Callback . Completable completable2 = new Callback . Completable ( ) ; DataFrame data = new DataFrame ( stream . getId ( ) , buffer1 . slice ( ) , false ) ; if ( LOGGER . isDebugEnabled ( ) ) stream . data ( data , completable2 ) ; return completable2 ; } ) . thenRun ( ( ) -> { MetaData trailer = new MetaData ( HttpVersion . HTTP_2 , HttpFields . EMPTY ) ; HeadersFrame end = new HeadersFrame ( stream . getId ( ) , trailer , null , true ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , end ) ; stream . headers ( end , Callback . NOOP ) ; } ) ; } } ; }
public Stream . Listener onNewStream ( Stream stream , HeadersFrame frame ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; return new Stream . Listener . Adapter ( ) { @ Override public void onData ( Stream stream , DataFrame frame , Callback callback ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; callback . succeeded ( ) ; MetaData . Response response = new MetaData . Response ( HttpVersion . HTTP_2 , HttpStatus . OK_200 , HttpFields . EMPTY ) ; Callback . Completable completable1 = new Callback . Completable ( ) ; HeadersFrame reply = new HeadersFrame ( stream . getId ( ) , response , null , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , reply ) ; stream . headers ( reply , completable1 ) ; completable1 . thenCompose ( ignored -> { Callback . Completable completable2 = new Callback . Completable ( ) ; DataFrame data = new DataFrame ( stream . getId ( ) , buffer1 . slice ( ) , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , data ) ; stream . data ( data , completable2 ) ; return completable2 ; } ) . thenRun ( ( ) -> { MetaData trailer = new MetaData ( HttpVersion . HTTP_2 , HttpFields . EMPTY ) ; HeadersFrame end = new HeadersFrame ( stream . getId ( ) , trailer , null , true ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , end ) ; stream . headers ( end , Callback . NOOP ) ; } ) ; } } ; }
123
public Stream . Listener onNewStream ( Stream stream , HeadersFrame frame ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; return new Stream . Listener . Adapter ( ) { @ Override public void onData ( Stream stream , DataFrame frame , Callback callback ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; callback . succeeded ( ) ; MetaData . Response response = new MetaData . Response ( HttpVersion . HTTP_2 , HttpStatus . OK_200 , HttpFields . EMPTY ) ; Callback . Completable completable1 = new Callback . Completable ( ) ; HeadersFrame reply = new HeadersFrame ( stream . getId ( ) , response , null , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , reply ) ; stream . headers ( reply , completable1 ) ; completable1 . thenCompose ( ignored -> { Callback . Completable completable2 = new Callback . Completable ( ) ; DataFrame data = new DataFrame ( stream . getId ( ) , buffer1 . slice ( ) , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , data ) ; stream . data ( data , completable2 ) ; return completable2 ; } ) . thenRun ( ( ) -> { MetaData trailer = new MetaData ( HttpVersion . HTTP_2 , HttpFields . EMPTY ) ; HeadersFrame end = new HeadersFrame ( stream . getId ( ) , trailer , null , true ) ; if ( LOGGER . isDebugEnabled ( ) ) stream . headers ( end , Callback . NOOP ) ; } ) ; } } ; }
public Stream . Listener onNewStream ( Stream stream , HeadersFrame frame ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; return new Stream . Listener . Adapter ( ) { @ Override public void onData ( Stream stream , DataFrame frame , Callback callback ) { if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 received {}" , frame ) ; callback . succeeded ( ) ; MetaData . Response response = new MetaData . Response ( HttpVersion . HTTP_2 , HttpStatus . OK_200 , HttpFields . EMPTY ) ; Callback . Completable completable1 = new Callback . Completable ( ) ; HeadersFrame reply = new HeadersFrame ( stream . getId ( ) , response , null , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , reply ) ; stream . headers ( reply , completable1 ) ; completable1 . thenCompose ( ignored -> { Callback . Completable completable2 = new Callback . Completable ( ) ; DataFrame data = new DataFrame ( stream . getId ( ) , buffer1 . slice ( ) , false ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , data ) ; stream . data ( data , completable2 ) ; return completable2 ; } ) . thenRun ( ( ) -> { MetaData trailer = new MetaData ( HttpVersion . HTTP_2 , HttpFields . EMPTY ) ; HeadersFrame end = new HeadersFrame ( stream . getId ( ) , trailer , null , true ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "SERVER2 sending {}" , end ) ; stream . headers ( end , Callback . NOOP ) ; } ) ; } } ; }
124
public void run ( ) { try { for ( ; ; ) { Message message = consumer . receive ( ) ; if ( message == null ) { continue ; } Object object = null ; if ( ! serializerEnable ) { TextMessage textMessage = ( TextMessage ) message ; object = textMessage . getText ( ) ; } else { BytesMessage bytesMessage = ( BytesMessage ) message ; int dataLen = bytesMessage . getIntProperty ( "data-len" ) ; byte [ ] data = new byte [ dataLen ] ; if ( dataLen != bytesMessage . readBytes ( data ) ) { continue ; } object = getSerializer ( ) . deserialize ( data ) ; } if ( object != null ) { notifyListeners ( channel , object ) ; } } } catch ( Exception e ) { } }
public void run ( ) { try { for ( ; ; ) { Message message = consumer . receive ( ) ; if ( message == null ) { continue ; } Object object = null ; if ( ! serializerEnable ) { TextMessage textMessage = ( TextMessage ) message ; object = textMessage . getText ( ) ; } else { BytesMessage bytesMessage = ( BytesMessage ) message ; int dataLen = bytesMessage . getIntProperty ( "data-len" ) ; byte [ ] data = new byte [ dataLen ] ; if ( dataLen != bytesMessage . readBytes ( data ) ) { continue ; } object = getSerializer ( ) . deserialize ( data ) ; } if ( object != null ) { notifyListeners ( channel , object ) ; } } } catch ( Exception e ) { LOG . error ( e . toString ( ) , e ) ; } }
125
public void createDirectory ( Path dir , FileAttribute < ? > ... attrs ) throws IOException { SftpPath p = toSftpPath ( dir ) ; SftpFileSystem fs = p . getFileSystem ( ) ; if ( log . isDebugEnabled ( ) ) { } try ( SftpClient sftp = fs . getClient ( ) ) { try { sftp . mkdir ( dir . toString ( ) ) ; } catch ( SftpException e ) { int sftpStatus = e . getStatus ( ) ; if ( ( sftp . getVersion ( ) == SftpConstants . SFTP_V3 ) && ( sftpStatus == SftpConstants . SSH_FX_FAILURE ) ) { try { Attributes attributes = sftp . stat ( dir . toString ( ) ) ; if ( attributes != null ) { throw new FileAlreadyExistsException ( p . toString ( ) ) ; } } catch ( SshException e2 ) { e . addSuppressed ( e2 ) ; } } if ( sftpStatus == SftpConstants . SSH_FX_FILE_ALREADY_EXISTS ) { throw new FileAlreadyExistsException ( p . toString ( ) ) ; } throw e ; } for ( FileAttribute < ? > attr : attrs ) { setAttribute ( p , attr . name ( ) , attr . value ( ) ) ; } } }
public void createDirectory ( Path dir , FileAttribute < ? > ... attrs ) throws IOException { SftpPath p = toSftpPath ( dir ) ; SftpFileSystem fs = p . getFileSystem ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "createDirectory({}) {} ({})" , fs , dir , Arrays . asList ( attrs ) ) ; } try ( SftpClient sftp = fs . getClient ( ) ) { try { sftp . mkdir ( dir . toString ( ) ) ; } catch ( SftpException e ) { int sftpStatus = e . getStatus ( ) ; if ( ( sftp . getVersion ( ) == SftpConstants . SFTP_V3 ) && ( sftpStatus == SftpConstants . SSH_FX_FAILURE ) ) { try { Attributes attributes = sftp . stat ( dir . toString ( ) ) ; if ( attributes != null ) { throw new FileAlreadyExistsException ( p . toString ( ) ) ; } } catch ( SshException e2 ) { e . addSuppressed ( e2 ) ; } } if ( sftpStatus == SftpConstants . SSH_FX_FILE_ALREADY_EXISTS ) { throw new FileAlreadyExistsException ( p . toString ( ) ) ; } throw e ; } for ( FileAttribute < ? > attr : attrs ) { setAttribute ( p , attr . name ( ) , attr . value ( ) ) ; } } }
126
public void sendReport ( Report report ) { LOGGER . info ( report . getContent ( ) ) ; lastReport = report ; }
public void sendReport ( Report report ) { LOGGER . info ( "Subject: " + report . getTitle ( ) ) ; LOGGER . info ( report . getContent ( ) ) ; lastReport = report ; }
127
public void sendReport ( Report report ) { LOGGER . info ( "Subject: " + report . getTitle ( ) ) ; lastReport = report ; }
public void sendReport ( Report report ) { LOGGER . info ( "Subject: " + report . getTitle ( ) ) ; LOGGER . info ( report . getContent ( ) ) ; lastReport = report ; }
128
@ Bean public Function < String , String > reverse ( ) { return value -> { return new StringBuilder ( value ) . reverse ( ) . toString ( ) ; } ; }
@ Bean public Function < String , String > reverse ( ) { return value -> { logger . info ( "REVERSING: " + value ) ; return new StringBuilder ( value ) . reverse ( ) . toString ( ) ; } ; }
129
@ GET @ Path ( "/flushCachedCredentials" ) @ Produces ( { "application/xml" , "text/xml" , "application/json" , "text/yaml" , "text/x-yaml" , "application/x-yaml" , "application/x-protobuf" , "application/x-protostuff" } ) @ PermitAll public GenericResponse < String > flushCachedCredentials ( ) { GenericResponse < String > response = new GenericResponse < > ( ) ; Principal callerPrincipal = context . getCallerPrincipal ( ) ; if ( callerPrincipal instanceof DatawavePrincipal ) { DatawavePrincipal dp = ( DatawavePrincipal ) callerPrincipal ; response . setResult ( credentialsCache . evict ( dp . getUserDN ( ) . subjectDN ( ) ) ) ; } else { log . warn ( callerPrincipal + " is not a DatawavePrincipal. Cannot flush credentials." ) ; response . addMessage ( "Unable to determine calling user name. Values were not flushed!" ) ; throw new DatawaveWebApplicationException ( new IllegalStateException ( "Unable to flush credentials. Unknown principal type." ) , response ) ; } return response ; }
@ GET @ Path ( "/flushCachedCredentials" ) @ Produces ( { "application/xml" , "text/xml" , "application/json" , "text/yaml" , "text/x-yaml" , "application/x-yaml" , "application/x-protobuf" , "application/x-protostuff" } ) @ PermitAll public GenericResponse < String > flushCachedCredentials ( ) { GenericResponse < String > response = new GenericResponse < > ( ) ; Principal callerPrincipal = context . getCallerPrincipal ( ) ; log . info ( "Flushing credentials for " + callerPrincipal + " from the cache." ) ; if ( callerPrincipal instanceof DatawavePrincipal ) { DatawavePrincipal dp = ( DatawavePrincipal ) callerPrincipal ; response . setResult ( credentialsCache . evict ( dp . getUserDN ( ) . subjectDN ( ) ) ) ; } else { log . warn ( callerPrincipal + " is not a DatawavePrincipal. Cannot flush credentials." ) ; response . addMessage ( "Unable to determine calling user name. Values were not flushed!" ) ; throw new DatawaveWebApplicationException ( new IllegalStateException ( "Unable to flush credentials. Unknown principal type." ) , response ) ; } return response ; }
130
@ GET @ Path ( "/flushCachedCredentials" ) @ Produces ( { "application/xml" , "text/xml" , "application/json" , "text/yaml" , "text/x-yaml" , "application/x-yaml" , "application/x-protobuf" , "application/x-protostuff" } ) @ PermitAll public GenericResponse < String > flushCachedCredentials ( ) { GenericResponse < String > response = new GenericResponse < > ( ) ; Principal callerPrincipal = context . getCallerPrincipal ( ) ; log . info ( "Flushing credentials for " + callerPrincipal + " from the cache." ) ; if ( callerPrincipal instanceof DatawavePrincipal ) { DatawavePrincipal dp = ( DatawavePrincipal ) callerPrincipal ; response . setResult ( credentialsCache . evict ( dp . getUserDN ( ) . subjectDN ( ) ) ) ; } else { response . addMessage ( "Unable to determine calling user name. Values were not flushed!" ) ; throw new DatawaveWebApplicationException ( new IllegalStateException ( "Unable to flush credentials. Unknown principal type." ) , response ) ; } return response ; }
@ GET @ Path ( "/flushCachedCredentials" ) @ Produces ( { "application/xml" , "text/xml" , "application/json" , "text/yaml" , "text/x-yaml" , "application/x-yaml" , "application/x-protobuf" , "application/x-protostuff" } ) @ PermitAll public GenericResponse < String > flushCachedCredentials ( ) { GenericResponse < String > response = new GenericResponse < > ( ) ; Principal callerPrincipal = context . getCallerPrincipal ( ) ; log . info ( "Flushing credentials for " + callerPrincipal + " from the cache." ) ; if ( callerPrincipal instanceof DatawavePrincipal ) { DatawavePrincipal dp = ( DatawavePrincipal ) callerPrincipal ; response . setResult ( credentialsCache . evict ( dp . getUserDN ( ) . subjectDN ( ) ) ) ; } else { log . warn ( callerPrincipal + " is not a DatawavePrincipal. Cannot flush credentials." ) ; response . addMessage ( "Unable to determine calling user name. Values were not flushed!" ) ; throw new DatawaveWebApplicationException ( new IllegalStateException ( "Unable to flush credentials. Unknown principal type." ) , response ) ; } return response ; }
131
public boolean updateDeviceStatus ( Device device , boolean playing ) { if ( deviceName . equals ( device . getName ( ) ) ) { deviceId = device . getId ( ) == null ? "" : device . getId ( ) ; final boolean online = setOnlineStatus ( device . isRestricted ( ) ) ; updateChannelState ( CHANNEL_DEVICEID , new StringType ( deviceId ) ) ; updateChannelState ( CHANNEL_DEVICENAME , new StringType ( device . getName ( ) ) ) ; updateChannelState ( CHANNEL_DEVICETYPE , new StringType ( device . getType ( ) ) ) ; updateChannelState ( CHANNEL_DEVICEVOLUME , device . getVolumePercent ( ) == null ? UnDefType . UNDEF : new PercentType ( device . getVolumePercent ( ) ) ) ; active = device . isActive ( ) ; updateChannelState ( CHANNEL_DEVICEACTIVE , OnOffType . from ( active ) ) ; updateChannelState ( CHANNEL_DEVICEPLAYER , online && active && playing ? PlayPauseType . PLAY : PlayPauseType . PAUSE ) ; return true ; } else { return false ; } }
public boolean updateDeviceStatus ( Device device , boolean playing ) { if ( deviceName . equals ( device . getName ( ) ) ) { deviceId = device . getId ( ) == null ? "" : device . getId ( ) ; logger . debug ( "Updating status of Thing: {} Device [ {} {}, {} ]" , thing . getUID ( ) , deviceId , device . getName ( ) , device . getType ( ) ) ; final boolean online = setOnlineStatus ( device . isRestricted ( ) ) ; updateChannelState ( CHANNEL_DEVICEID , new StringType ( deviceId ) ) ; updateChannelState ( CHANNEL_DEVICENAME , new StringType ( device . getName ( ) ) ) ; updateChannelState ( CHANNEL_DEVICETYPE , new StringType ( device . getType ( ) ) ) ; updateChannelState ( CHANNEL_DEVICEVOLUME , device . getVolumePercent ( ) == null ? UnDefType . UNDEF : new PercentType ( device . getVolumePercent ( ) ) ) ; active = device . isActive ( ) ; updateChannelState ( CHANNEL_DEVICEACTIVE , OnOffType . from ( active ) ) ; updateChannelState ( CHANNEL_DEVICEPLAYER , online && active && playing ? PlayPauseType . PLAY : PlayPauseType . PAUSE ) ; return true ; } else { return false ; } }
132
@ EventHandler ( priority = EventPriority . LOWEST ) public void onJoin ( @ Nonnull PlayerJoinEvent event ) { if ( ! handler . hasLoggedIn ( event . getPlayer ( ) . getUniqueId ( ) ) ) { boolean login = handler . login ( event . getPlayer ( ) . getUniqueId ( ) ) ; if ( ! login || ! handler . hasLoggedIn ( event . getPlayer ( ) . getUniqueId ( ) ) ) { event . getPlayer ( ) . kickPlayer ( Lang . legacyColors ( Lang . string ( LangKey . DATA_NOT_LOADED ) ) ) ; return ; } } handler . join ( event . getPlayer ( ) ) ; }
@ EventHandler ( priority = EventPriority . LOWEST ) public void onJoin ( @ Nonnull PlayerJoinEvent event ) { if ( ! handler . hasLoggedIn ( event . getPlayer ( ) . getUniqueId ( ) ) ) { log . warning ( "Loading data for player " + event . getPlayer ( ) . getName ( ) + "(" + event . getPlayer ( ) . getUniqueId ( ) + ") sync!" ) ; boolean login = handler . login ( event . getPlayer ( ) . getUniqueId ( ) ) ; if ( ! login || ! handler . hasLoggedIn ( event . getPlayer ( ) . getUniqueId ( ) ) ) { event . getPlayer ( ) . kickPlayer ( Lang . legacyColors ( Lang . string ( LangKey . DATA_NOT_LOADED ) ) ) ; return ; } } handler . join ( event . getPlayer ( ) ) ; }
133
public boolean checkTimeseriesExists ( String path ) throws IoTDBConnectionException , StatementExecutionException { for ( int i = 0 ; i < RETRY ; i ++ ) { Session session = getSession ( ) ; try { boolean resp = session . checkTimeseriesExists ( path ) ; putBack ( session ) ; return resp ; } catch ( IoTDBConnectionException e ) { cleanSessionAndMayThrowConnectionException ( session , i , e ) ; } catch ( StatementExecutionException | RuntimeException e ) { putBack ( session ) ; throw e ; } } return false ; }
public boolean checkTimeseriesExists ( String path ) throws IoTDBConnectionException , StatementExecutionException { for ( int i = 0 ; i < RETRY ; i ++ ) { Session session = getSession ( ) ; try { boolean resp = session . checkTimeseriesExists ( path ) ; putBack ( session ) ; return resp ; } catch ( IoTDBConnectionException e ) { logger . warn ( "checkTimeseriesExists failed" , e ) ; cleanSessionAndMayThrowConnectionException ( session , i , e ) ; } catch ( StatementExecutionException | RuntimeException e ) { putBack ( session ) ; throw e ; } } return false ; }
134
private synchronized boolean enableExclusiveProcessing ( String controllerId , String instanceId , PropertyHandler paramHandler ) throws APPlatformException { return platformService . lockServiceInstance ( controllerId , instanceId , paramHandler . getTPAuthentication ( ) ) ; }
private synchronized boolean enableExclusiveProcessing ( String controllerId , String instanceId , PropertyHandler paramHandler ) throws APPlatformException { logger . debug ( "enableExclusiveProcessing('{}')" , instanceId ) ; return platformService . lockServiceInstance ( controllerId , instanceId , paramHandler . getTPAuthentication ( ) ) ; }
135
@ BeforeClass public void setUp ( ) throws Exception { basePath = TestsHelper . createBaseTempDir ( getClass ( ) , true ) ; boolean deploySolr = true ; boolean deployLdap = true ; boolean deployFolderMonitor = true ; boolean deployOrchestrator = true ; boolean deployPluginManager = true ; boolean deployDefaultResources = false ; RodaCoreFactory . instantiateTest ( deploySolr , deployLdap , deployFolderMonitor , deployOrchestrator , deployPluginManager , deployDefaultResources , false ) ; model = RodaCoreFactory . getModelService ( ) ; index = RodaCoreFactory . getIndexService ( ) ; URL corporaURL = FailureIngestPluginTest . class . getResource ( "/corpora" ) ; corporaPath = Paths . get ( corporaURL . toURI ( ) ) ; }
@ BeforeClass public void setUp ( ) throws Exception { basePath = TestsHelper . createBaseTempDir ( getClass ( ) , true ) ; boolean deploySolr = true ; boolean deployLdap = true ; boolean deployFolderMonitor = true ; boolean deployOrchestrator = true ; boolean deployPluginManager = true ; boolean deployDefaultResources = false ; RodaCoreFactory . instantiateTest ( deploySolr , deployLdap , deployFolderMonitor , deployOrchestrator , deployPluginManager , deployDefaultResources , false ) ; model = RodaCoreFactory . getModelService ( ) ; index = RodaCoreFactory . getIndexService ( ) ; URL corporaURL = FailureIngestPluginTest . class . getResource ( "/corpora" ) ; corporaPath = Paths . get ( corporaURL . toURI ( ) ) ; LOGGER . info ( "Running FailureIngestPlugin tests under storage {}" , basePath ) ; }
136
public void log ( Level level , Marker marker , Message msg , Throwable t ) { }
public void log ( Level level , Marker marker , Message msg , Throwable t ) { getLogger ( ) . log ( level , marker , msg , t ) ; }
137
public Task < ? > updateAsync ( ) { synchronized ( mutex ) { Task < ? > result = null ; if ( ! isActive ( ) ) { updateNeeded = true ; } else { updateNeeded = false ; LOG . info ( "Updating {}, server pool targets {}" , new Object [ ] { this , getAttribute ( SERVER_POOL_TARGETS ) } ) ; reconfigureService ( ) ; LOG . debug ( "Reloading {} in response to changes" , this ) ; invoke ( RELOAD ) ; } return result ; } }
public Task < ? > updateAsync ( ) { synchronized ( mutex ) { Task < ? > result = null ; if ( ! isActive ( ) ) { updateNeeded = true ; } else { updateNeeded = false ; LOG . debug ( "Updating {} in response to changes" , this ) ; LOG . info ( "Updating {}, server pool targets {}" , new Object [ ] { this , getAttribute ( SERVER_POOL_TARGETS ) } ) ; reconfigureService ( ) ; LOG . debug ( "Reloading {} in response to changes" , this ) ; invoke ( RELOAD ) ; } return result ; } }
138
public Task < ? > updateAsync ( ) { synchronized ( mutex ) { Task < ? > result = null ; if ( ! isActive ( ) ) { updateNeeded = true ; } else { updateNeeded = false ; LOG . debug ( "Updating {} in response to changes" , this ) ; reconfigureService ( ) ; LOG . debug ( "Reloading {} in response to changes" , this ) ; invoke ( RELOAD ) ; } return result ; } }
public Task < ? > updateAsync ( ) { synchronized ( mutex ) { Task < ? > result = null ; if ( ! isActive ( ) ) { updateNeeded = true ; } else { updateNeeded = false ; LOG . debug ( "Updating {} in response to changes" , this ) ; LOG . info ( "Updating {}, server pool targets {}" , new Object [ ] { this , getAttribute ( SERVER_POOL_TARGETS ) } ) ; reconfigureService ( ) ; LOG . debug ( "Reloading {} in response to changes" , this ) ; invoke ( RELOAD ) ; } return result ; } }
139
public Task < ? > updateAsync ( ) { synchronized ( mutex ) { Task < ? > result = null ; if ( ! isActive ( ) ) { updateNeeded = true ; } else { updateNeeded = false ; LOG . debug ( "Updating {} in response to changes" , this ) ; LOG . info ( "Updating {}, server pool targets {}" , new Object [ ] { this , getAttribute ( SERVER_POOL_TARGETS ) } ) ; reconfigureService ( ) ; invoke ( RELOAD ) ; } return result ; } }
public Task < ? > updateAsync ( ) { synchronized ( mutex ) { Task < ? > result = null ; if ( ! isActive ( ) ) { updateNeeded = true ; } else { updateNeeded = false ; LOG . debug ( "Updating {} in response to changes" , this ) ; LOG . info ( "Updating {}, server pool targets {}" , new Object [ ] { this , getAttribute ( SERVER_POOL_TARGETS ) } ) ; reconfigureService ( ) ; LOG . debug ( "Reloading {} in response to changes" , this ) ; invoke ( RELOAD ) ; } return result ; } }
140
protected void doJumpToPage ( int itemIndex ) { if ( startAfterValues == null && getPage ( ) > 0 ) { String jumpToItemSql = queryProvider . generateJumpToItemQuery ( itemIndex , getPageSize ( ) ) ; if ( logger . isDebugEnabled ( ) ) { } if ( this . queryProvider . isUsingNamedParameters ( ) ) { startAfterValues = namedParameterJdbcTemplate . queryForMap ( jumpToItemSql , getParameterMap ( parameterValues , null ) ) ; } else { startAfterValues = getJdbcTemplate ( ) . queryForMap ( jumpToItemSql , getParameterList ( parameterValues , null ) . toArray ( ) ) ; } } }
protected void doJumpToPage ( int itemIndex ) { if ( startAfterValues == null && getPage ( ) > 0 ) { String jumpToItemSql = queryProvider . generateJumpToItemQuery ( itemIndex , getPageSize ( ) ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "SQL used for jumping: [" + jumpToItemSql + "]" ) ; } if ( this . queryProvider . isUsingNamedParameters ( ) ) { startAfterValues = namedParameterJdbcTemplate . queryForMap ( jumpToItemSql , getParameterMap ( parameterValues , null ) ) ; } else { startAfterValues = getJdbcTemplate ( ) . queryForMap ( jumpToItemSql , getParameterList ( parameterValues , null ) . toArray ( ) ) ; } } }
141
public void clear ( ) { for ( Bridge bridge : bridges . values ( ) ) { try { bridge . stop ( ) ; } catch ( Exception e ) { } } bridges . clear ( ) ; for ( ClusterConnection clusterConnection : clusterConnections . values ( ) ) { try { clusterConnection . stop ( ) ; } catch ( Exception e ) { ActiveMQServerLogger . LOGGER . failedToStopClusterConnection ( e ) ; } } clearClusterConnections ( ) ; }
public void clear ( ) { for ( Bridge bridge : bridges . values ( ) ) { try { bridge . stop ( ) ; } catch ( Exception e ) { ActiveMQServerLogger . LOGGER . warn ( e . getMessage ( ) , e ) ; } } bridges . clear ( ) ; for ( ClusterConnection clusterConnection : clusterConnections . values ( ) ) { try { clusterConnection . stop ( ) ; } catch ( Exception e ) { ActiveMQServerLogger . LOGGER . failedToStopClusterConnection ( e ) ; } } clearClusterConnections ( ) ; }
142
private void serializeBands ( final MeterBandHeaders meterBandHeaders , final ByteBuf outBuffer ) { if ( meterBandHeaders == null ) { return ; } for ( MeterBandHeader meterBandHeader : meterBandHeaders . nonnullMeterBandHeader ( ) . values ( ) ) { final BandType type = meterBandHeader . getBandType ( ) ; if ( type == null ) { continue ; } final var types = meterBandHeader . getMeterBandTypes ( ) ; if ( types == null ) { continue ; } final var flags = types . getFlags ( ) ; if ( flags != null ) { if ( flags . getOfpmbtDrop ( ) ) { final Drop band = ( Drop ) type ; outBuffer . writeShort ( MeterBandType . OFPMBTDROP . getIntValue ( ) ) ; outBuffer . writeShort ( LENGTH_OF_METER_BANDS ) ; outBuffer . writeInt ( band . getDropRate ( ) . intValue ( ) ) ; outBuffer . writeInt ( band . getDropBurstSize ( ) . intValue ( ) ) ; outBuffer . writeZero ( PADDING_IN_METER_BAND_DROP ) ; } else if ( flags . getOfpmbtDscpRemark ( ) ) { final DscpRemark band = ( DscpRemark ) type ; outBuffer . writeShort ( MeterBandType . OFPMBTDSCPREMARK . getIntValue ( ) ) ; outBuffer . writeShort ( LENGTH_OF_METER_BANDS ) ; outBuffer . writeInt ( band . getDscpRemarkRate ( ) . intValue ( ) ) ; outBuffer . writeInt ( band . getDscpRemarkBurstSize ( ) . intValue ( ) ) ; outBuffer . writeByte ( band . getPrecLevel ( ) . toJava ( ) ) ; outBuffer . writeZero ( PADDING_IN_METER_BAND_DSCP_REMARK ) ; } else if ( flags . getOfpmbtExperimenter ( ) ) { final Experimenter band = ( Experimenter ) type ; final ExperimenterIdSerializerKey < Experimenter > key = new ExperimenterIdSerializerKey < > ( EncodeConstants . OF13_VERSION_ID , band . getExperimenter ( ) . toJava ( ) , ( Class < Experimenter > ) type . implementedInterface ( ) ) ; final OFSerializer < Experimenter > serializer = registry . getSerializer ( key ) ; try { serializer . serialize ( band , outBuffer ) ; } catch ( final IllegalStateException e ) { } } } } }
private void serializeBands ( final MeterBandHeaders meterBandHeaders , final ByteBuf outBuffer ) { if ( meterBandHeaders == null ) { return ; } for ( MeterBandHeader meterBandHeader : meterBandHeaders . nonnullMeterBandHeader ( ) . values ( ) ) { final BandType type = meterBandHeader . getBandType ( ) ; if ( type == null ) { continue ; } final var types = meterBandHeader . getMeterBandTypes ( ) ; if ( types == null ) { continue ; } final var flags = types . getFlags ( ) ; if ( flags != null ) { if ( flags . getOfpmbtDrop ( ) ) { final Drop band = ( Drop ) type ; outBuffer . writeShort ( MeterBandType . OFPMBTDROP . getIntValue ( ) ) ; outBuffer . writeShort ( LENGTH_OF_METER_BANDS ) ; outBuffer . writeInt ( band . getDropRate ( ) . intValue ( ) ) ; outBuffer . writeInt ( band . getDropBurstSize ( ) . intValue ( ) ) ; outBuffer . writeZero ( PADDING_IN_METER_BAND_DROP ) ; } else if ( flags . getOfpmbtDscpRemark ( ) ) { final DscpRemark band = ( DscpRemark ) type ; outBuffer . writeShort ( MeterBandType . OFPMBTDSCPREMARK . getIntValue ( ) ) ; outBuffer . writeShort ( LENGTH_OF_METER_BANDS ) ; outBuffer . writeInt ( band . getDscpRemarkRate ( ) . intValue ( ) ) ; outBuffer . writeInt ( band . getDscpRemarkBurstSize ( ) . intValue ( ) ) ; outBuffer . writeByte ( band . getPrecLevel ( ) . toJava ( ) ) ; outBuffer . writeZero ( PADDING_IN_METER_BAND_DSCP_REMARK ) ; } else if ( flags . getOfpmbtExperimenter ( ) ) { final Experimenter band = ( Experimenter ) type ; final ExperimenterIdSerializerKey < Experimenter > key = new ExperimenterIdSerializerKey < > ( EncodeConstants . OF13_VERSION_ID , band . getExperimenter ( ) . toJava ( ) , ( Class < Experimenter > ) type . implementedInterface ( ) ) ; final OFSerializer < Experimenter > serializer = registry . getSerializer ( key ) ; try { serializer . serialize ( band , outBuffer ) ; } catch ( final IllegalStateException e ) { LOG . warn ( "Serializer for key: {} wasn't found" , key , e ) ; } } } } }
143
public Map < String , CQSMessage > getMessages ( String queueUrl , List < String > ids ) throws NoSuchAlgorithmException , IOException , JSONException , PersistenceException { Map < String , CQSMessage > messageMap = new HashMap < String , CQSMessage > ( ) ; if ( ids == null || ids . size ( ) == 0 ) { return messageMap ; } else if ( ids . size ( ) > 100 ) { return getMessagesBulk ( queueUrl , ids ) ; } for ( String id : ids ) { String [ ] idParts = id . split ( ":" ) ; if ( idParts . length != 3 ) { logger . error ( "event=get_messages error_code=invalid_message_id id=" + id ) ; throw new IllegalArgumentException ( "Invalid message id " + id ) ; } CmbComposite columnName = cassandraHandler . getCmbComposite ( Arrays . asList ( Long . parseLong ( idParts [ 1 ] ) , Long . parseLong ( idParts [ 2 ] ) ) ) ; CmbColumn < CmbComposite , String > column = cassandraHandler . readColumn ( AbstractDurablePersistence . CQS_KEYSPACE , COLUMN_FAMILY_PARTITIONED_QUEUE_MESSAGES , idParts [ 0 ] , columnName , CMB_SERIALIZER . STRING_SERIALIZER , CMB_SERIALIZER . COMPOSITE_SERIALIZER , CMB_SERIALIZER . STRING_SERIALIZER ) ; CQSMessage message = null ; if ( column != null ) { message = extractMessageFromJSON ( queueUrl , column ) ; } messageMap . put ( id , message ) ; } return messageMap ; }
public Map < String , CQSMessage > getMessages ( String queueUrl , List < String > ids ) throws NoSuchAlgorithmException , IOException , JSONException , PersistenceException { Map < String , CQSMessage > messageMap = new HashMap < String , CQSMessage > ( ) ; logger . debug ( "event=get_messages ids=" + ids ) ; if ( ids == null || ids . size ( ) == 0 ) { return messageMap ; } else if ( ids . size ( ) > 100 ) { return getMessagesBulk ( queueUrl , ids ) ; } for ( String id : ids ) { String [ ] idParts = id . split ( ":" ) ; if ( idParts . length != 3 ) { logger . error ( "event=get_messages error_code=invalid_message_id id=" + id ) ; throw new IllegalArgumentException ( "Invalid message id " + id ) ; } CmbComposite columnName = cassandraHandler . getCmbComposite ( Arrays . asList ( Long . parseLong ( idParts [ 1 ] ) , Long . parseLong ( idParts [ 2 ] ) ) ) ; CmbColumn < CmbComposite , String > column = cassandraHandler . readColumn ( AbstractDurablePersistence . CQS_KEYSPACE , COLUMN_FAMILY_PARTITIONED_QUEUE_MESSAGES , idParts [ 0 ] , columnName , CMB_SERIALIZER . STRING_SERIALIZER , CMB_SERIALIZER . COMPOSITE_SERIALIZER , CMB_SERIALIZER . STRING_SERIALIZER ) ; CQSMessage message = null ; if ( column != null ) { message = extractMessageFromJSON ( queueUrl , column ) ; } messageMap . put ( id , message ) ; } return messageMap ; }
144
public Map < String , CQSMessage > getMessages ( String queueUrl , List < String > ids ) throws NoSuchAlgorithmException , IOException , JSONException , PersistenceException { Map < String , CQSMessage > messageMap = new HashMap < String , CQSMessage > ( ) ; logger . debug ( "event=get_messages ids=" + ids ) ; if ( ids == null || ids . size ( ) == 0 ) { return messageMap ; } else if ( ids . size ( ) > 100 ) { return getMessagesBulk ( queueUrl , ids ) ; } for ( String id : ids ) { String [ ] idParts = id . split ( ":" ) ; if ( idParts . length != 3 ) { throw new IllegalArgumentException ( "Invalid message id " + id ) ; } CmbComposite columnName = cassandraHandler . getCmbComposite ( Arrays . asList ( Long . parseLong ( idParts [ 1 ] ) , Long . parseLong ( idParts [ 2 ] ) ) ) ; CmbColumn < CmbComposite , String > column = cassandraHandler . readColumn ( AbstractDurablePersistence . CQS_KEYSPACE , COLUMN_FAMILY_PARTITIONED_QUEUE_MESSAGES , idParts [ 0 ] , columnName , CMB_SERIALIZER . STRING_SERIALIZER , CMB_SERIALIZER . COMPOSITE_SERIALIZER , CMB_SERIALIZER . STRING_SERIALIZER ) ; CQSMessage message = null ; if ( column != null ) { message = extractMessageFromJSON ( queueUrl , column ) ; } messageMap . put ( id , message ) ; } return messageMap ; }
public Map < String , CQSMessage > getMessages ( String queueUrl , List < String > ids ) throws NoSuchAlgorithmException , IOException , JSONException , PersistenceException { Map < String , CQSMessage > messageMap = new HashMap < String , CQSMessage > ( ) ; logger . debug ( "event=get_messages ids=" + ids ) ; if ( ids == null || ids . size ( ) == 0 ) { return messageMap ; } else if ( ids . size ( ) > 100 ) { return getMessagesBulk ( queueUrl , ids ) ; } for ( String id : ids ) { String [ ] idParts = id . split ( ":" ) ; if ( idParts . length != 3 ) { logger . error ( "event=get_messages error_code=invalid_message_id id=" + id ) ; throw new IllegalArgumentException ( "Invalid message id " + id ) ; } CmbComposite columnName = cassandraHandler . getCmbComposite ( Arrays . asList ( Long . parseLong ( idParts [ 1 ] ) , Long . parseLong ( idParts [ 2 ] ) ) ) ; CmbColumn < CmbComposite , String > column = cassandraHandler . readColumn ( AbstractDurablePersistence . CQS_KEYSPACE , COLUMN_FAMILY_PARTITIONED_QUEUE_MESSAGES , idParts [ 0 ] , columnName , CMB_SERIALIZER . STRING_SERIALIZER , CMB_SERIALIZER . COMPOSITE_SERIALIZER , CMB_SERIALIZER . STRING_SERIALIZER ) ; CQSMessage message = null ; if ( column != null ) { message = extractMessageFromJSON ( queueUrl , column ) ; } messageMap . put ( id , message ) ; } return messageMap ; }
145
protected AuthenticationResult getAccessToken ( final AuthorizationCode authorizationCode , final String currentUri ) { final String authority = getAuthority ( ) + getTenant ( ) + "/" ; final String authCode = authorizationCode . getValue ( ) ; if ( logger . isDebugEnabled ( ) ) { } final ClientCredential credential = new ClientCredential ( getClientId ( ) , getClientSecret ( ) ) ; ExecutorService service = null ; try { service = Executors . newFixedThreadPool ( 1 ) ; final AuthenticationContext context = new AuthenticationContext ( authority , true , service ) ; final Future < AuthenticationResult > future = context . acquireTokenByAuthorizationCode ( authCode , new URI ( currentUri ) , credential , null ) ; final AuthenticationResult result = future . get ( acquisitionTimeout , TimeUnit . MILLISECONDS ) ; if ( result == null ) { throw new SsoLoginException ( "authentication result was null" ) ; } return result ; } catch ( final Exception e ) { throw new SsoLoginException ( "Failed to get a token." , e ) ; } finally { if ( service != null ) { service . shutdown ( ) ; } } }
protected AuthenticationResult getAccessToken ( final AuthorizationCode authorizationCode , final String currentUri ) { final String authority = getAuthority ( ) + getTenant ( ) + "/" ; final String authCode = authorizationCode . getValue ( ) ; if ( logger . isDebugEnabled ( ) ) { logger . debug ( "authCode: {}, authority: {}, uri: {}" , authCode , authority , currentUri ) ; } final ClientCredential credential = new ClientCredential ( getClientId ( ) , getClientSecret ( ) ) ; ExecutorService service = null ; try { service = Executors . newFixedThreadPool ( 1 ) ; final AuthenticationContext context = new AuthenticationContext ( authority , true , service ) ; final Future < AuthenticationResult > future = context . acquireTokenByAuthorizationCode ( authCode , new URI ( currentUri ) , credential , null ) ; final AuthenticationResult result = future . get ( acquisitionTimeout , TimeUnit . MILLISECONDS ) ; if ( result == null ) { throw new SsoLoginException ( "authentication result was null" ) ; } return result ; } catch ( final Exception e ) { throw new SsoLoginException ( "Failed to get a token." , e ) ; } finally { if ( service != null ) { service . shutdown ( ) ; } } }
146
@ Test public void testRead ( ) throws Exception { ExecutorService executeService = Executors . newFixedThreadPool ( 2 ) ; ReaderWriterLock lock = new ReaderWriterLock ( ) ; Reader reader1 = spy ( new Reader ( "Reader 1" , lock . readLock ( ) ) ) ; Reader reader2 = spy ( new Reader ( "Reader 2" , lock . readLock ( ) ) ) ; executeService . submit ( reader1 ) ; Thread . sleep ( 150 ) ; executeService . submit ( reader2 ) ; executeService . shutdown ( ) ; try { executeService . awaitTermination ( 10 , TimeUnit . SECONDS ) ; } catch ( InterruptedException e ) { } assertTrue ( appender . logContains ( "Reader 1 begin" ) ) ; assertTrue ( appender . logContains ( "Reader 2 begin" ) ) ; assertTrue ( appender . logContains ( "Reader 1 finish" ) ) ; assertTrue ( appender . logContains ( "Reader 2 finish" ) ) ; }
@ Test public void testRead ( ) throws Exception { ExecutorService executeService = Executors . newFixedThreadPool ( 2 ) ; ReaderWriterLock lock = new ReaderWriterLock ( ) ; Reader reader1 = spy ( new Reader ( "Reader 1" , lock . readLock ( ) ) ) ; Reader reader2 = spy ( new Reader ( "Reader 2" , lock . readLock ( ) ) ) ; executeService . submit ( reader1 ) ; Thread . sleep ( 150 ) ; executeService . submit ( reader2 ) ; executeService . shutdown ( ) ; try { executeService . awaitTermination ( 10 , TimeUnit . SECONDS ) ; } catch ( InterruptedException e ) { LOGGER . error ( "Error waiting for ExecutorService shutdown" , e ) ; } assertTrue ( appender . logContains ( "Reader 1 begin" ) ) ; assertTrue ( appender . logContains ( "Reader 2 begin" ) ) ; assertTrue ( appender . logContains ( "Reader 1 finish" ) ) ; assertTrue ( appender . logContains ( "Reader 2 finish" ) ) ; }
147
protected void stopServing ( ) throws Exception { if ( isServing ( ) ) { if ( ! mGrpcServer . shutdown ( ) ) { } } if ( mRPCExecutor != null ) { mRPCExecutor . shutdownNow ( ) ; try { mRPCExecutor . awaitTermination ( ServerConfiguration . getMs ( PropertyKey . NETWORK_CONNECTION_SERVER_SHUTDOWN_TIMEOUT ) , TimeUnit . MILLISECONDS ) ; } catch ( InterruptedException ie ) { Thread . currentThread ( ) . interrupt ( ) ; } } if ( mJvmPauseMonitor != null ) { mJvmPauseMonitor . stop ( ) ; } if ( mWebServer != null ) { mWebServer . stop ( ) ; mWebServer = null ; } MetricsSystem . stopSinks ( ) ; }
protected void stopServing ( ) throws Exception { if ( isServing ( ) ) { if ( ! mGrpcServer . shutdown ( ) ) { LOG . warn ( "Alluxio master RPC server shutdown timed out." ) ; } } if ( mRPCExecutor != null ) { mRPCExecutor . shutdownNow ( ) ; try { mRPCExecutor . awaitTermination ( ServerConfiguration . getMs ( PropertyKey . NETWORK_CONNECTION_SERVER_SHUTDOWN_TIMEOUT ) , TimeUnit . MILLISECONDS ) ; } catch ( InterruptedException ie ) { Thread . currentThread ( ) . interrupt ( ) ; } } if ( mJvmPauseMonitor != null ) { mJvmPauseMonitor . stop ( ) ; } if ( mWebServer != null ) { mWebServer . stop ( ) ; mWebServer = null ; } MetricsSystem . stopSinks ( ) ; }
148
public Properties getJndiEnv ( ) throws NamingException { Properties jndiEnv = new Properties ( ) ; if ( StringUtils . isNotEmpty ( getJndiProperties ( ) ) ) { URL url = ClassUtils . getResourceURL ( this , getJndiProperties ( ) ) ; if ( url == null ) { throw new NamingException ( "cannot find jndiProperties from [" + getJndiProperties ( ) + "]" ) ; } try { jndiEnv . load ( url . openStream ( ) ) ; } catch ( IOException e ) { throw new NamingException ( "cannot load jndiProperties [" + getJndiProperties ( ) + "] from url [" + url . toString ( ) + "]" ) ; } } if ( getInitialContextFactoryName ( ) != null ) jndiEnv . put ( Context . INITIAL_CONTEXT_FACTORY , getInitialContextFactoryName ( ) ) ; if ( getProviderURL ( ) != null ) jndiEnv . put ( Context . PROVIDER_URL , getProviderURL ( ) ) ; if ( getAuthentication ( ) != null ) jndiEnv . put ( Context . SECURITY_AUTHENTICATION , getAuthentication ( ) ) ; if ( getPrincipal ( ) != null || getCredentials ( ) != null || getJndiAuthAlias ( ) != null ) { CredentialFactory jndiCf = new CredentialFactory ( getJndiAuthAlias ( ) , getPrincipal ( ) , getCredentials ( ) ) ; if ( StringUtils . isNotEmpty ( jndiCf . getUsername ( ) ) ) jndiEnv . put ( Context . SECURITY_PRINCIPAL , jndiCf . getUsername ( ) ) ; if ( StringUtils . isNotEmpty ( jndiCf . getPassword ( ) ) ) jndiEnv . put ( Context . SECURITY_CREDENTIALS , jndiCf . getPassword ( ) ) ; } if ( getUrlPkgPrefixes ( ) != null ) jndiEnv . put ( Context . URL_PKG_PREFIXES , getUrlPkgPrefixes ( ) ) ; if ( getSecurityProtocol ( ) != null ) jndiEnv . put ( Context . SECURITY_PROTOCOL , getSecurityProtocol ( ) ) ; if ( log . isDebugEnabled ( ) ) { for ( Iterator it = jndiEnv . keySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { String key = ( String ) it . next ( ) ; String value = jndiEnv . getProperty ( key ) ; } } return jndiEnv ; }
public Properties getJndiEnv ( ) throws NamingException { Properties jndiEnv = new Properties ( ) ; if ( StringUtils . isNotEmpty ( getJndiProperties ( ) ) ) { URL url = ClassUtils . getResourceURL ( this , getJndiProperties ( ) ) ; if ( url == null ) { throw new NamingException ( "cannot find jndiProperties from [" + getJndiProperties ( ) + "]" ) ; } try { jndiEnv . load ( url . openStream ( ) ) ; } catch ( IOException e ) { throw new NamingException ( "cannot load jndiProperties [" + getJndiProperties ( ) + "] from url [" + url . toString ( ) + "]" ) ; } } if ( getInitialContextFactoryName ( ) != null ) jndiEnv . put ( Context . INITIAL_CONTEXT_FACTORY , getInitialContextFactoryName ( ) ) ; if ( getProviderURL ( ) != null ) jndiEnv . put ( Context . PROVIDER_URL , getProviderURL ( ) ) ; if ( getAuthentication ( ) != null ) jndiEnv . put ( Context . SECURITY_AUTHENTICATION , getAuthentication ( ) ) ; if ( getPrincipal ( ) != null || getCredentials ( ) != null || getJndiAuthAlias ( ) != null ) { CredentialFactory jndiCf = new CredentialFactory ( getJndiAuthAlias ( ) , getPrincipal ( ) , getCredentials ( ) ) ; if ( StringUtils . isNotEmpty ( jndiCf . getUsername ( ) ) ) jndiEnv . put ( Context . SECURITY_PRINCIPAL , jndiCf . getUsername ( ) ) ; if ( StringUtils . isNotEmpty ( jndiCf . getPassword ( ) ) ) jndiEnv . put ( Context . SECURITY_CREDENTIALS , jndiCf . getPassword ( ) ) ; } if ( getUrlPkgPrefixes ( ) != null ) jndiEnv . put ( Context . URL_PKG_PREFIXES , getUrlPkgPrefixes ( ) ) ; if ( getSecurityProtocol ( ) != null ) jndiEnv . put ( Context . SECURITY_PROTOCOL , getSecurityProtocol ( ) ) ; if ( log . isDebugEnabled ( ) ) { for ( Iterator it = jndiEnv . keySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { String key = ( String ) it . next ( ) ; String value = jndiEnv . getProperty ( key ) ; log . debug ( "jndiEnv [" + key + "] = [" + value + "]" ) ; } } return jndiEnv ; }
149
public ApiUser getApiUserByName ( String name ) { try { MapSqlParameterSource params = new MapSqlParameterSource ( ) ; params . addValue ( "name" , name ) ; List < ApiUser > apiUserList = baseDao . geoApiNamedJbdcTemaplate . query ( ApiUserQuery . GET_API_USER_BY_NAME . getSql ( baseDao . getPublicSchema ( ) ) , params , new ApiUserHandler ( ) ) ; if ( apiUserList != null ) { return apiUserList . get ( 0 ) ; } } catch ( Exception sqlEx ) { logger . error ( sqlEx . getMessage ( ) ) ; } return null ; }
public ApiUser getApiUserByName ( String name ) { try { MapSqlParameterSource params = new MapSqlParameterSource ( ) ; params . addValue ( "name" , name ) ; List < ApiUser > apiUserList = baseDao . geoApiNamedJbdcTemaplate . query ( ApiUserQuery . GET_API_USER_BY_NAME . getSql ( baseDao . getPublicSchema ( ) ) , params , new ApiUserHandler ( ) ) ; if ( apiUserList != null ) { return apiUserList . get ( 0 ) ; } } catch ( Exception sqlEx ) { logger . error ( "Failed to get ApiUser by name in ApiUserDAO!" ) ; logger . error ( sqlEx . getMessage ( ) ) ; } return null ; }
150
public ApiUser getApiUserByName ( String name ) { try { MapSqlParameterSource params = new MapSqlParameterSource ( ) ; params . addValue ( "name" , name ) ; List < ApiUser > apiUserList = baseDao . geoApiNamedJbdcTemaplate . query ( ApiUserQuery . GET_API_USER_BY_NAME . getSql ( baseDao . getPublicSchema ( ) ) , params , new ApiUserHandler ( ) ) ; if ( apiUserList != null ) { return apiUserList . get ( 0 ) ; } } catch ( Exception sqlEx ) { logger . error ( "Failed to get ApiUser by name in ApiUserDAO!" ) ; } return null ; }
public ApiUser getApiUserByName ( String name ) { try { MapSqlParameterSource params = new MapSqlParameterSource ( ) ; params . addValue ( "name" , name ) ; List < ApiUser > apiUserList = baseDao . geoApiNamedJbdcTemaplate . query ( ApiUserQuery . GET_API_USER_BY_NAME . getSql ( baseDao . getPublicSchema ( ) ) , params , new ApiUserHandler ( ) ) ; if ( apiUserList != null ) { return apiUserList . get ( 0 ) ; } } catch ( Exception sqlEx ) { logger . error ( "Failed to get ApiUser by name in ApiUserDAO!" ) ; logger . error ( sqlEx . getMessage ( ) ) ; } return null ; }
151
public void setVirtualVolumeVplexClusterName ( String virtualVolumeVplexClusterName ) { this . _virtualVolumeVplexClusterName = virtualVolumeVplexClusterName ; }
public void setVirtualVolumeVplexClusterName ( String virtualVolumeVplexClusterName ) { _logger . info ( "setting virtual volume VPLEX cluster name to " + virtualVolumeVplexClusterName ) ; this . _virtualVolumeVplexClusterName = virtualVolumeVplexClusterName ; }
152
public void removeConfig ( String configCode ) { try { this . getKieFormManager ( ) . deleteConfig ( configCode ) ; } catch ( Exception t ) { throw new RestServerError ( "error in delete configuration" , t ) ; } }
public void removeConfig ( String configCode ) { try { this . getKieFormManager ( ) . deleteConfig ( configCode ) ; } catch ( Exception t ) { logger . error ( "error in delete configuration" , t ) ; throw new RestServerError ( "error in delete configuration" , t ) ; } }
153
public void compact ( ) { final RocksDB db = getDb ( true ) ; if ( db == null ) { return ; } try { db . compactRange ( ) ; } catch ( final RocksDBException e ) { } }
public void compact ( ) { final RocksDB db = getDb ( true ) ; if ( db == null ) { return ; } try { db . compactRange ( ) ; } catch ( final RocksDBException e ) { LOGGER . warn ( "Unable to force compacting range" , e ) ; } }
154
public static com . liferay . commerce . bom . model . CommerceBOMFolderApplicationRelSoap [ ] getCommerceBOMFolderApplicationRelsByCommerceBOMFolderId ( long commerceBOMFolderId , int start , int end ) throws RemoteException { try { java . util . List < com . liferay . commerce . bom . model . CommerceBOMFolderApplicationRel > returnValue = CommerceBOMFolderApplicationRelServiceUtil . getCommerceBOMFolderApplicationRelsByCommerceBOMFolderId ( commerceBOMFolderId , start , end ) ; return com . liferay . commerce . bom . model . CommerceBOMFolderApplicationRelSoap . toSoapModels ( returnValue ) ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } }
public static com . liferay . commerce . bom . model . CommerceBOMFolderApplicationRelSoap [ ] getCommerceBOMFolderApplicationRelsByCommerceBOMFolderId ( long commerceBOMFolderId , int start , int end ) throws RemoteException { try { java . util . List < com . liferay . commerce . bom . model . CommerceBOMFolderApplicationRel > returnValue = CommerceBOMFolderApplicationRelServiceUtil . getCommerceBOMFolderApplicationRelsByCommerceBOMFolderId ( commerceBOMFolderId , start , end ) ; return com . liferay . commerce . bom . model . CommerceBOMFolderApplicationRelSoap . toSoapModels ( returnValue ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } }
155
private void checkModules ( OldDBStructure dbStructure ) { String droppedModules = "" ; for ( String moduleName : dbStructure . modulesList ) if ( businessLogics . getSysModule ( moduleName ) == null ) { droppedModules += moduleName + ", " ; } if ( denyDropModules && ! droppedModules . isEmpty ( ) ) throw new RuntimeException ( "Dropped modules: " + droppedModules . substring ( 0 , droppedModules . length ( ) - 2 ) ) ; }
private void checkModules ( OldDBStructure dbStructure ) { String droppedModules = "" ; for ( String moduleName : dbStructure . modulesList ) if ( businessLogics . getSysModule ( moduleName ) == null ) { startLogger . info ( "Module " + moduleName + " has been dropped" ) ; droppedModules += moduleName + ", " ; } if ( denyDropModules && ! droppedModules . isEmpty ( ) ) throw new RuntimeException ( "Dropped modules: " + droppedModules . substring ( 0 , droppedModules . length ( ) - 2 ) ) ; }
156
public boolean disableIOService ( ) { boolean disabled = false ; if ( this . cc2650 ) { byte [ ] value = { 0x00 } ; try { this . gattResources . get ( IO ) . getGattService ( ) . findCharacteristic ( TiSensorTagGatt . UUID_IO_SENSOR_ENABLE ) . writeValue ( value ) ; disabled = true ; } catch ( KuraException e ) { } } else { logger . info ( IO_ERROR_MESSAGE ) ; } return disabled ; }
public boolean disableIOService ( ) { boolean disabled = false ; if ( this . cc2650 ) { byte [ ] value = { 0x00 } ; try { this . gattResources . get ( IO ) . getGattService ( ) . findCharacteristic ( TiSensorTagGatt . UUID_IO_SENSOR_ENABLE ) . writeValue ( value ) ; disabled = true ; } catch ( KuraException e ) { logger . error ( "IO Service enable failed" , e ) ; } } else { logger . info ( IO_ERROR_MESSAGE ) ; } return disabled ; }
157
public boolean disableIOService ( ) { boolean disabled = false ; if ( this . cc2650 ) { byte [ ] value = { 0x00 } ; try { this . gattResources . get ( IO ) . getGattService ( ) . findCharacteristic ( TiSensorTagGatt . UUID_IO_SENSOR_ENABLE ) . writeValue ( value ) ; disabled = true ; } catch ( KuraException e ) { logger . error ( "IO Service enable failed" , e ) ; } } else { } return disabled ; }
public boolean disableIOService ( ) { boolean disabled = false ; if ( this . cc2650 ) { byte [ ] value = { 0x00 } ; try { this . gattResources . get ( IO ) . getGattService ( ) . findCharacteristic ( TiSensorTagGatt . UUID_IO_SENSOR_ENABLE ) . writeValue ( value ) ; disabled = true ; } catch ( KuraException e ) { logger . error ( "IO Service enable failed" , e ) ; } } else { logger . info ( IO_ERROR_MESSAGE ) ; } return disabled ; }
158
public void findAndInit ( Object obj ) { super . findAndInit ( obj ) ; if ( obj instanceof DynamicPredictorHandlerCommon ) { ( ( DynamicPredictorHandlerCommon ) obj ) . addListener ( this ) ; } }
public void findAndInit ( Object obj ) { super . findAndInit ( obj ) ; if ( obj instanceof DynamicPredictorHandlerCommon ) { LOG . debug ( "Found dynamic predictor handler" ) ; ( ( DynamicPredictorHandlerCommon ) obj ) . addListener ( this ) ; } }
159
public void startStream ( ) { task = executor . submit ( new S3PersistReaderTask ( this ) ) ; }
public void startStream ( ) { LOGGER . debug ( "startStream" ) ; task = executor . submit ( new S3PersistReaderTask ( this ) ) ; }
160
public IngestStatus handleFile ( File product ) { if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
161
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
162
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
163
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
164
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
165
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
166
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
167
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
public IngestStatus handleFile ( File product ) { LOG . log ( Level . INFO , "Handling file " + product ) ; if ( ! passesPreconditions ( product ) ) { LOG . log ( Level . WARNING , "Failed to pass preconditions for ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . PRECONDS_FAILED , "Failed to pass preconditions" ) ; } Metadata productMetadata = new Metadata ( ) ; productMetadata . addMetadata ( getGlobalMetadata ( ) ) ; try { productMetadata . replaceMetadata ( getMetadataForProduct ( product ) ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to get metadata for product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to get metadata for product : " + e . getMessage ( ) ) ; } try { product = renameProduct ( product , productMetadata ) ; } catch ( Exception e ) { LOG . log ( Level . SEVERE , "Failed to rename product : " + e . getMessage ( ) , e ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to rename product : " + e . getMessage ( ) ) ; } addKnownMetadata ( product , productMetadata ) ; if ( ! containsRequiredMetadata ( productMetadata ) ) { LOG . log ( Level . SEVERE , "Missing required metadata for product '" + product + "'" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Missing required metadata" ) ; } if ( ! performPreIngestActions ( product , productMetadata ) ) { performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "PreIngest actions failed to complete" ) ; } if ( isSkipIngest ( ) ) { LOG . log ( Level . INFO , "Skipping ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; return createIngestStatus ( product , IngestStatus . Result . SKIPPED , "Crawler ingest turned OFF" ) ; } boolean ingestSuccess = ingest ( product , productMetadata ) ; if ( ingestSuccess ) { LOG . log ( Level . INFO , "Successful ingest of product: [" + product . getAbsolutePath ( ) + "]" ) ; performPostIngestOnSuccessActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . SUCCESS , "Ingest was successful" ) ; } else { LOG . log ( Level . WARNING , "Failed to ingest product: [" + product . getAbsolutePath ( ) + "]: performing postIngestFail actions" ) ; performPostIngestOnFailActions ( product , productMetadata ) ; return createIngestStatus ( product , IngestStatus . Result . FAILURE , "Failed to ingest product" ) ; } }
168
private void purgeExpiredDevices ( ) { Calendar deadline = Calendar . getInstance ( ) ; deadline . add ( Calendar . YEAR , ONE_YEAR_AGO ) ; DeviceDAO deviceDao = new DeviceDAO ( ) ; DeviceSurveyJobQueueDAO dsjqDao = new DeviceSurveyJobQueueDAO ( ) ; DeviceFileJobQueueDAO dfjqDao = new DeviceFileJobQueueDAO ( ) ; SurveyAssignmentDao saDao = new SurveyAssignmentDao ( ) ; List < Device > deviceList = deviceDao . listAllWithBeaconBefore ( deadline . getTime ( ) ) ; log . info ( "Found " + deviceList . size ( ) + " old Devices" ) ; for ( Device d : deviceList ) { long did = d . getKey ( ) . getId ( ) ; List < DeviceSurveyJobQueue > djql = dsjqDao . get ( d . getPhoneNumber ( ) , d . getEsn ( ) , d . getAndroidId ( ) ) ; if ( djql . size ( ) > 0 ) { log . fine ( "Deleting " + djql . size ( ) + " form assignments for device " + did ) ; dsjqDao . delete ( djql ) ; } List < DeviceFileJobQueue > dfql = dfjqDao . listByDeviceId ( did ) ; if ( dfql . size ( ) > 0 ) { log . fine ( "Deleting " + dfql . size ( ) + " file requests for device " + did ) ; dfjqDao . delete ( dfql ) ; } int affected = saDao . removeDevice ( did ) ; log . fine ( "Removed device " + did + " from " + affected + " assignments." ) ; } deviceDao . delete ( deviceList ) ; }
private void purgeExpiredDevices ( ) { Calendar deadline = Calendar . getInstance ( ) ; deadline . add ( Calendar . YEAR , ONE_YEAR_AGO ) ; log . info ( "Starting scan for Devices not seen since: " + deadline . getTime ( ) ) ; DeviceDAO deviceDao = new DeviceDAO ( ) ; DeviceSurveyJobQueueDAO dsjqDao = new DeviceSurveyJobQueueDAO ( ) ; DeviceFileJobQueueDAO dfjqDao = new DeviceFileJobQueueDAO ( ) ; SurveyAssignmentDao saDao = new SurveyAssignmentDao ( ) ; List < Device > deviceList = deviceDao . listAllWithBeaconBefore ( deadline . getTime ( ) ) ; log . info ( "Found " + deviceList . size ( ) + " old Devices" ) ; for ( Device d : deviceList ) { long did = d . getKey ( ) . getId ( ) ; List < DeviceSurveyJobQueue > djql = dsjqDao . get ( d . getPhoneNumber ( ) , d . getEsn ( ) , d . getAndroidId ( ) ) ; if ( djql . size ( ) > 0 ) { log . fine ( "Deleting " + djql . size ( ) + " form assignments for device " + did ) ; dsjqDao . delete ( djql ) ; } List < DeviceFileJobQueue > dfql = dfjqDao . listByDeviceId ( did ) ; if ( dfql . size ( ) > 0 ) { log . fine ( "Deleting " + dfql . size ( ) + " file requests for device " + did ) ; dfjqDao . delete ( dfql ) ; } int affected = saDao . removeDevice ( did ) ; log . fine ( "Removed device " + did + " from " + affected + " assignments." ) ; } deviceDao . delete ( deviceList ) ; }
169
private void purgeExpiredDevices ( ) { Calendar deadline = Calendar . getInstance ( ) ; deadline . add ( Calendar . YEAR , ONE_YEAR_AGO ) ; log . info ( "Starting scan for Devices not seen since: " + deadline . getTime ( ) ) ; DeviceDAO deviceDao = new DeviceDAO ( ) ; DeviceSurveyJobQueueDAO dsjqDao = new DeviceSurveyJobQueueDAO ( ) ; DeviceFileJobQueueDAO dfjqDao = new DeviceFileJobQueueDAO ( ) ; SurveyAssignmentDao saDao = new SurveyAssignmentDao ( ) ; List < Device > deviceList = deviceDao . listAllWithBeaconBefore ( deadline . getTime ( ) ) ; for ( Device d : deviceList ) { long did = d . getKey ( ) . getId ( ) ; List < DeviceSurveyJobQueue > djql = dsjqDao . get ( d . getPhoneNumber ( ) , d . getEsn ( ) , d . getAndroidId ( ) ) ; if ( djql . size ( ) > 0 ) { log . fine ( "Deleting " + djql . size ( ) + " form assignments for device " + did ) ; dsjqDao . delete ( djql ) ; } List < DeviceFileJobQueue > dfql = dfjqDao . listByDeviceId ( did ) ; if ( dfql . size ( ) > 0 ) { log . fine ( "Deleting " + dfql . size ( ) + " file requests for device " + did ) ; dfjqDao . delete ( dfql ) ; } int affected = saDao . removeDevice ( did ) ; log . fine ( "Removed device " + did + " from " + affected + " assignments." ) ; } deviceDao . delete ( deviceList ) ; }
private void purgeExpiredDevices ( ) { Calendar deadline = Calendar . getInstance ( ) ; deadline . add ( Calendar . YEAR , ONE_YEAR_AGO ) ; log . info ( "Starting scan for Devices not seen since: " + deadline . getTime ( ) ) ; DeviceDAO deviceDao = new DeviceDAO ( ) ; DeviceSurveyJobQueueDAO dsjqDao = new DeviceSurveyJobQueueDAO ( ) ; DeviceFileJobQueueDAO dfjqDao = new DeviceFileJobQueueDAO ( ) ; SurveyAssignmentDao saDao = new SurveyAssignmentDao ( ) ; List < Device > deviceList = deviceDao . listAllWithBeaconBefore ( deadline . getTime ( ) ) ; log . info ( "Found " + deviceList . size ( ) + " old Devices" ) ; for ( Device d : deviceList ) { long did = d . getKey ( ) . getId ( ) ; List < DeviceSurveyJobQueue > djql = dsjqDao . get ( d . getPhoneNumber ( ) , d . getEsn ( ) , d . getAndroidId ( ) ) ; if ( djql . size ( ) > 0 ) { log . fine ( "Deleting " + djql . size ( ) + " form assignments for device " + did ) ; dsjqDao . delete ( djql ) ; } List < DeviceFileJobQueue > dfql = dfjqDao . listByDeviceId ( did ) ; if ( dfql . size ( ) > 0 ) { log . fine ( "Deleting " + dfql . size ( ) + " file requests for device " + did ) ; dfjqDao . delete ( dfql ) ; } int affected = saDao . removeDevice ( did ) ; log . fine ( "Removed device " + did + " from " + affected + " assignments." ) ; } deviceDao . delete ( deviceList ) ; }
170
protected void checkCPAttachmentFileEntriesByExpirationDate ( ) throws PortalException { List < CPAttachmentFileEntry > cpAttachmentFileEntries = cpAttachmentFileEntryFinder . findByExpirationDate ( new Date ( ) , new QueryDefinition < > ( WorkflowConstants . STATUS_APPROVED ) ) ; if ( _log . isDebugEnabled ( ) ) { } if ( ( cpAttachmentFileEntries != null ) && ! cpAttachmentFileEntries . isEmpty ( ) ) { for ( CPAttachmentFileEntry cpAttachmentFileEntry : cpAttachmentFileEntries ) { long userId = PortalUtil . getValidUserId ( cpAttachmentFileEntry . getCompanyId ( ) , cpAttachmentFileEntry . getUserId ( ) ) ; ServiceContext serviceContext = new ServiceContext ( ) ; serviceContext . setCommand ( Constants . UPDATE ) ; serviceContext . setScopeGroupId ( cpAttachmentFileEntry . getGroupId ( ) ) ; cpAttachmentFileEntryLocalService . updateStatus ( userId , cpAttachmentFileEntry . getCPAttachmentFileEntryId ( ) , WorkflowConstants . STATUS_EXPIRED , serviceContext , new HashMap < String , Serializable > ( ) ) ; } } }
protected void checkCPAttachmentFileEntriesByExpirationDate ( ) throws PortalException { List < CPAttachmentFileEntry > cpAttachmentFileEntries = cpAttachmentFileEntryFinder . findByExpirationDate ( new Date ( ) , new QueryDefinition < > ( WorkflowConstants . STATUS_APPROVED ) ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( "Expiring " + cpAttachmentFileEntries . size ( ) + " commerce product attachment file entries" ) ; } if ( ( cpAttachmentFileEntries != null ) && ! cpAttachmentFileEntries . isEmpty ( ) ) { for ( CPAttachmentFileEntry cpAttachmentFileEntry : cpAttachmentFileEntries ) { long userId = PortalUtil . getValidUserId ( cpAttachmentFileEntry . getCompanyId ( ) , cpAttachmentFileEntry . getUserId ( ) ) ; ServiceContext serviceContext = new ServiceContext ( ) ; serviceContext . setCommand ( Constants . UPDATE ) ; serviceContext . setScopeGroupId ( cpAttachmentFileEntry . getGroupId ( ) ) ; cpAttachmentFileEntryLocalService . updateStatus ( userId , cpAttachmentFileEntry . getCPAttachmentFileEntryId ( ) , WorkflowConstants . STATUS_EXPIRED , serviceContext , new HashMap < String , Serializable > ( ) ) ; } } }
171
public static int getUserGroupsActivitiesCount ( HttpPrincipal httpPrincipal , long userId ) { try { MethodKey methodKey = new MethodKey ( SocialActivityServiceUtil . class , "getUserGroupsActivitiesCount" , _getUserGroupsActivitiesCountParameterTypes26 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , userId ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( ( Integer ) returnObj ) . intValue ( ) ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { throw systemException ; } }
public static int getUserGroupsActivitiesCount ( HttpPrincipal httpPrincipal , long userId ) { try { MethodKey methodKey = new MethodKey ( SocialActivityServiceUtil . class , "getUserGroupsActivitiesCount" , _getUserGroupsActivitiesCountParameterTypes26 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , userId ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( ( Integer ) returnObj ) . intValue ( ) ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { _log . error ( systemException , systemException ) ; throw systemException ; } }
172
private IoWriteFuture sendRandomLine ( ) throws IOException { randomizer . fill ( dataBuffer ) ; Encoder encoder = Base64 . getEncoder ( ) ; int len = encoder . encode ( dataBuffer , outputBuffer ) ; outputBuffer [ len ] = ( byte ) '\r' ; outputBuffer [ len + 1 ] = ( byte ) '\n' ; byte [ ] packet = Arrays . copyOf ( outputBuffer , len + 2 ) ; String line = new String ( packet , 0 , packet . length - 2 , StandardCharsets . US_ASCII ) ; IoSession networkSession = session . getIoSession ( ) ; IoWriteFuture future = networkSession . writeBuffer ( new ByteArrayBuffer ( packet ) ) ; long count = numSent . incrementAndGet ( ) ; return future ; }
private IoWriteFuture sendRandomLine ( ) throws IOException { randomizer . fill ( dataBuffer ) ; Encoder encoder = Base64 . getEncoder ( ) ; int len = encoder . encode ( dataBuffer , outputBuffer ) ; outputBuffer [ len ] = ( byte ) '\r' ; outputBuffer [ len + 1 ] = ( byte ) '\n' ; byte [ ] packet = Arrays . copyOf ( outputBuffer , len + 2 ) ; String line = new String ( packet , 0 , packet . length - 2 , StandardCharsets . US_ASCII ) ; IoSession networkSession = session . getIoSession ( ) ; IoWriteFuture future = networkSession . writeBuffer ( new ByteArrayBuffer ( packet ) ) ; long count = numSent . incrementAndGet ( ) ; log . info ( "sendRandomLine({}) sent line #{}: {}" , session , count , line ) ; return future ; }
173
public void send ( Map < MeterId , BaseMeter > meterMap , MeterService meterService ) { if ( status == GRPCChannelStatus . CONNECTED ) { StreamObserver < MeterData > reportStreamObserver = null ; final GRPCStreamServiceStatus status = new GRPCStreamServiceStatus ( false ) ; try { final StreamObserver < MeterData > reporter = reportStreamObserver ; transform ( meterMap , meterData -> reporter . onNext ( meterData ) ) ; } catch ( Throwable e ) { if ( ! ( e instanceof StatusRuntimeException ) ) { LOGGER . error ( e , "Report meters to backend fail." ) ; return ; } final StatusRuntimeException statusRuntimeException = ( StatusRuntimeException ) e ; if ( statusRuntimeException . getStatus ( ) . getCode ( ) == Status . Code . UNIMPLEMENTED ) { LOGGER . warn ( "Backend doesn't support meter, it will be disabled" ) ; meterService . shutdown ( ) ; } } finally { if ( reportStreamObserver != null ) { reportStreamObserver . onCompleted ( ) ; } status . wait4Finish ( ) ; } } }
public void send ( Map < MeterId , BaseMeter > meterMap , MeterService meterService ) { if ( status == GRPCChannelStatus . CONNECTED ) { StreamObserver < MeterData > reportStreamObserver = null ; final GRPCStreamServiceStatus status = new GRPCStreamServiceStatus ( false ) ; try { reportStreamObserver = meterReportServiceStub . withDeadlineAfter ( GRPC_UPSTREAM_TIMEOUT , TimeUnit . SECONDS ) . collect ( new StreamObserver < Commands > ( ) { @ Override public void onNext ( Commands commands ) { } @ Override public void onError ( Throwable throwable ) { status . finished ( ) ; if ( LOGGER . isErrorEnable ( ) ) { LOGGER . error ( throwable , "Send meters to collector fail with a grpc internal exception." ) ; } ServiceManager . INSTANCE . findService ( GRPCChannelManager . class ) . reportError ( throwable ) ; } @ Override public void onCompleted ( ) { status . finished ( ) ; } } ) ; final StreamObserver < MeterData > reporter = reportStreamObserver ; transform ( meterMap , meterData -> reporter . onNext ( meterData ) ) ; } catch ( Throwable e ) { if ( ! ( e instanceof StatusRuntimeException ) ) { LOGGER . error ( e , "Report meters to backend fail." ) ; return ; } final StatusRuntimeException statusRuntimeException = ( StatusRuntimeException ) e ; if ( statusRuntimeException . getStatus ( ) . getCode ( ) == Status . Code . UNIMPLEMENTED ) { LOGGER . warn ( "Backend doesn't support meter, it will be disabled" ) ; meterService . shutdown ( ) ; } } finally { if ( reportStreamObserver != null ) { reportStreamObserver . onCompleted ( ) ; } status . wait4Finish ( ) ; } } }
174
public void send ( Map < MeterId , BaseMeter > meterMap , MeterService meterService ) { if ( status == GRPCChannelStatus . CONNECTED ) { StreamObserver < MeterData > reportStreamObserver = null ; final GRPCStreamServiceStatus status = new GRPCStreamServiceStatus ( false ) ; try { reportStreamObserver = meterReportServiceStub . withDeadlineAfter ( GRPC_UPSTREAM_TIMEOUT , TimeUnit . SECONDS ) . collect ( new StreamObserver < Commands > ( ) { @ Override public void onNext ( Commands commands ) { } @ Override public void onError ( Throwable throwable ) { status . finished ( ) ; if ( LOGGER . isErrorEnable ( ) ) { } ServiceManager . INSTANCE . findService ( GRPCChannelManager . class ) . reportError ( throwable ) ; } @ Override public void onCompleted ( ) { status . finished ( ) ; } } ) ; final StreamObserver < MeterData > reporter = reportStreamObserver ; transform ( meterMap , meterData -> reporter . onNext ( meterData ) ) ; } catch ( Throwable e ) { if ( ! ( e instanceof StatusRuntimeException ) ) { LOGGER . error ( e , "Report meters to backend fail." ) ; return ; } final StatusRuntimeException statusRuntimeException = ( StatusRuntimeException ) e ; if ( statusRuntimeException . getStatus ( ) . getCode ( ) == Status . Code . UNIMPLEMENTED ) { LOGGER . warn ( "Backend doesn't support meter, it will be disabled" ) ; meterService . shutdown ( ) ; } } finally { if ( reportStreamObserver != null ) { reportStreamObserver . onCompleted ( ) ; } status . wait4Finish ( ) ; } } }
public void send ( Map < MeterId , BaseMeter > meterMap , MeterService meterService ) { if ( status == GRPCChannelStatus . CONNECTED ) { StreamObserver < MeterData > reportStreamObserver = null ; final GRPCStreamServiceStatus status = new GRPCStreamServiceStatus ( false ) ; try { reportStreamObserver = meterReportServiceStub . withDeadlineAfter ( GRPC_UPSTREAM_TIMEOUT , TimeUnit . SECONDS ) . collect ( new StreamObserver < Commands > ( ) { @ Override public void onNext ( Commands commands ) { } @ Override public void onError ( Throwable throwable ) { status . finished ( ) ; if ( LOGGER . isErrorEnable ( ) ) { LOGGER . error ( throwable , "Send meters to collector fail with a grpc internal exception." ) ; } ServiceManager . INSTANCE . findService ( GRPCChannelManager . class ) . reportError ( throwable ) ; } @ Override public void onCompleted ( ) { status . finished ( ) ; } } ) ; final StreamObserver < MeterData > reporter = reportStreamObserver ; transform ( meterMap , meterData -> reporter . onNext ( meterData ) ) ; } catch ( Throwable e ) { if ( ! ( e instanceof StatusRuntimeException ) ) { LOGGER . error ( e , "Report meters to backend fail." ) ; return ; } final StatusRuntimeException statusRuntimeException = ( StatusRuntimeException ) e ; if ( statusRuntimeException . getStatus ( ) . getCode ( ) == Status . Code . UNIMPLEMENTED ) { LOGGER . warn ( "Backend doesn't support meter, it will be disabled" ) ; meterService . shutdown ( ) ; } } finally { if ( reportStreamObserver != null ) { reportStreamObserver . onCompleted ( ) ; } status . wait4Finish ( ) ; } } }
175
public void send ( Map < MeterId , BaseMeter > meterMap , MeterService meterService ) { if ( status == GRPCChannelStatus . CONNECTED ) { StreamObserver < MeterData > reportStreamObserver = null ; final GRPCStreamServiceStatus status = new GRPCStreamServiceStatus ( false ) ; try { reportStreamObserver = meterReportServiceStub . withDeadlineAfter ( GRPC_UPSTREAM_TIMEOUT , TimeUnit . SECONDS ) . collect ( new StreamObserver < Commands > ( ) { @ Override public void onNext ( Commands commands ) { } @ Override public void onError ( Throwable throwable ) { status . finished ( ) ; if ( LOGGER . isErrorEnable ( ) ) { LOGGER . error ( throwable , "Send meters to collector fail with a grpc internal exception." ) ; } ServiceManager . INSTANCE . findService ( GRPCChannelManager . class ) . reportError ( throwable ) ; } @ Override public void onCompleted ( ) { status . finished ( ) ; } } ) ; final StreamObserver < MeterData > reporter = reportStreamObserver ; transform ( meterMap , meterData -> reporter . onNext ( meterData ) ) ; } catch ( Throwable e ) { if ( ! ( e instanceof StatusRuntimeException ) ) { return ; } final StatusRuntimeException statusRuntimeException = ( StatusRuntimeException ) e ; if ( statusRuntimeException . getStatus ( ) . getCode ( ) == Status . Code . UNIMPLEMENTED ) { LOGGER . warn ( "Backend doesn't support meter, it will be disabled" ) ; meterService . shutdown ( ) ; } } finally { if ( reportStreamObserver != null ) { reportStreamObserver . onCompleted ( ) ; } status . wait4Finish ( ) ; } } }
public void send ( Map < MeterId , BaseMeter > meterMap , MeterService meterService ) { if ( status == GRPCChannelStatus . CONNECTED ) { StreamObserver < MeterData > reportStreamObserver = null ; final GRPCStreamServiceStatus status = new GRPCStreamServiceStatus ( false ) ; try { reportStreamObserver = meterReportServiceStub . withDeadlineAfter ( GRPC_UPSTREAM_TIMEOUT , TimeUnit . SECONDS ) . collect ( new StreamObserver < Commands > ( ) { @ Override public void onNext ( Commands commands ) { } @ Override public void onError ( Throwable throwable ) { status . finished ( ) ; if ( LOGGER . isErrorEnable ( ) ) { LOGGER . error ( throwable , "Send meters to collector fail with a grpc internal exception." ) ; } ServiceManager . INSTANCE . findService ( GRPCChannelManager . class ) . reportError ( throwable ) ; } @ Override public void onCompleted ( ) { status . finished ( ) ; } } ) ; final StreamObserver < MeterData > reporter = reportStreamObserver ; transform ( meterMap , meterData -> reporter . onNext ( meterData ) ) ; } catch ( Throwable e ) { if ( ! ( e instanceof StatusRuntimeException ) ) { LOGGER . error ( e , "Report meters to backend fail." ) ; return ; } final StatusRuntimeException statusRuntimeException = ( StatusRuntimeException ) e ; if ( statusRuntimeException . getStatus ( ) . getCode ( ) == Status . Code . UNIMPLEMENTED ) { LOGGER . warn ( "Backend doesn't support meter, it will be disabled" ) ; meterService . shutdown ( ) ; } } finally { if ( reportStreamObserver != null ) { reportStreamObserver . onCompleted ( ) ; } status . wait4Finish ( ) ; } } }
176
public void send ( Map < MeterId , BaseMeter > meterMap , MeterService meterService ) { if ( status == GRPCChannelStatus . CONNECTED ) { StreamObserver < MeterData > reportStreamObserver = null ; final GRPCStreamServiceStatus status = new GRPCStreamServiceStatus ( false ) ; try { reportStreamObserver = meterReportServiceStub . withDeadlineAfter ( GRPC_UPSTREAM_TIMEOUT , TimeUnit . SECONDS ) . collect ( new StreamObserver < Commands > ( ) { @ Override public void onNext ( Commands commands ) { } @ Override public void onError ( Throwable throwable ) { status . finished ( ) ; if ( LOGGER . isErrorEnable ( ) ) { LOGGER . error ( throwable , "Send meters to collector fail with a grpc internal exception." ) ; } ServiceManager . INSTANCE . findService ( GRPCChannelManager . class ) . reportError ( throwable ) ; } @ Override public void onCompleted ( ) { status . finished ( ) ; } } ) ; final StreamObserver < MeterData > reporter = reportStreamObserver ; transform ( meterMap , meterData -> reporter . onNext ( meterData ) ) ; } catch ( Throwable e ) { if ( ! ( e instanceof StatusRuntimeException ) ) { LOGGER . error ( e , "Report meters to backend fail." ) ; return ; } final StatusRuntimeException statusRuntimeException = ( StatusRuntimeException ) e ; if ( statusRuntimeException . getStatus ( ) . getCode ( ) == Status . Code . UNIMPLEMENTED ) { meterService . shutdown ( ) ; } } finally { if ( reportStreamObserver != null ) { reportStreamObserver . onCompleted ( ) ; } status . wait4Finish ( ) ; } } }
public void send ( Map < MeterId , BaseMeter > meterMap , MeterService meterService ) { if ( status == GRPCChannelStatus . CONNECTED ) { StreamObserver < MeterData > reportStreamObserver = null ; final GRPCStreamServiceStatus status = new GRPCStreamServiceStatus ( false ) ; try { reportStreamObserver = meterReportServiceStub . withDeadlineAfter ( GRPC_UPSTREAM_TIMEOUT , TimeUnit . SECONDS ) . collect ( new StreamObserver < Commands > ( ) { @ Override public void onNext ( Commands commands ) { } @ Override public void onError ( Throwable throwable ) { status . finished ( ) ; if ( LOGGER . isErrorEnable ( ) ) { LOGGER . error ( throwable , "Send meters to collector fail with a grpc internal exception." ) ; } ServiceManager . INSTANCE . findService ( GRPCChannelManager . class ) . reportError ( throwable ) ; } @ Override public void onCompleted ( ) { status . finished ( ) ; } } ) ; final StreamObserver < MeterData > reporter = reportStreamObserver ; transform ( meterMap , meterData -> reporter . onNext ( meterData ) ) ; } catch ( Throwable e ) { if ( ! ( e instanceof StatusRuntimeException ) ) { LOGGER . error ( e , "Report meters to backend fail." ) ; return ; } final StatusRuntimeException statusRuntimeException = ( StatusRuntimeException ) e ; if ( statusRuntimeException . getStatus ( ) . getCode ( ) == Status . Code . UNIMPLEMENTED ) { LOGGER . warn ( "Backend doesn't support meter, it will be disabled" ) ; meterService . shutdown ( ) ; } } finally { if ( reportStreamObserver != null ) { reportStreamObserver . onCompleted ( ) ; } status . wait4Finish ( ) ; } } }
177
private void createEnclosureGroup ( ) { EnclosureGroup enclosureGroup = this . buildEnclosureGroup ( ) ; EnclosureGroup created = this . enclosureGroupClient . create ( enclosureGroup ) ; }
private void createEnclosureGroup ( ) { EnclosureGroup enclosureGroup = this . buildEnclosureGroup ( ) ; EnclosureGroup created = this . enclosureGroupClient . create ( enclosureGroup ) ; LOGGER . info ( "EnclosureGroup object returned to client : " + created . toJsonString ( ) ) ; }
178
public Object getNewValue ( final Object current ) { Object toReturn = current ; List < Long > list ; try { list = getListFromBytes ( current ) ; if ( list != null ) { list . removeAll ( inValues ) ; } toReturn = getBytesFromList ( list ) ; } catch ( IOException e ) { } return toReturn ; }
public Object getNewValue ( final Object current ) { Object toReturn = current ; List < Long > list ; try { list = getListFromBytes ( current ) ; if ( list != null ) { list . removeAll ( inValues ) ; } toReturn = getBytesFromList ( list ) ; } catch ( IOException e ) { log . error ( "Unable to retrieve key " + key + " from memcached. Not able to delete " + inValues + " from list." ) ; } return toReturn ; }
179
@ Bean public Jaxb2Marshaller configurationManagementMarshaller ( ) { final Jaxb2Marshaller marshaller = new Jaxb2Marshaller ( ) ; marshaller . setContextPath ( this . environment . getRequiredProperty ( PROPERTY_NAME_MARSHALLER_CONTEXT_PATH_CONFIGURATION_MANAGEMENT ) ) ; return marshaller ; }
@ Bean public Jaxb2Marshaller configurationManagementMarshaller ( ) { LOGGER . debug ( "Creating Configuration Management Marshaller Bean" ) ; final Jaxb2Marshaller marshaller = new Jaxb2Marshaller ( ) ; marshaller . setContextPath ( this . environment . getRequiredProperty ( PROPERTY_NAME_MARSHALLER_CONTEXT_PATH_CONFIGURATION_MANAGEMENT ) ) ; return marshaller ; }
180
public void deleteCSAR ( CsarId csarId ) throws SystemException , UserException { FileUtils . forceDelete ( basePath . resolve ( csarId . csarName ( ) ) ) ; LOGGER . info ( "Deleted CSAR \"{}\"..." , csarId . csarName ( ) ) ; }
public void deleteCSAR ( CsarId csarId ) throws SystemException , UserException { LOGGER . info ( "Deleting CSAR \"{}\"..." , csarId . csarName ( ) ) ; FileUtils . forceDelete ( basePath . resolve ( csarId . csarName ( ) ) ) ; LOGGER . info ( "Deleted CSAR \"{}\"..." , csarId . csarName ( ) ) ; }
181
public void deleteCSAR ( CsarId csarId ) throws SystemException , UserException { LOGGER . info ( "Deleting CSAR \"{}\"..." , csarId . csarName ( ) ) ; FileUtils . forceDelete ( basePath . resolve ( csarId . csarName ( ) ) ) ; }
public void deleteCSAR ( CsarId csarId ) throws SystemException , UserException { LOGGER . info ( "Deleting CSAR \"{}\"..." , csarId . csarName ( ) ) ; FileUtils . forceDelete ( basePath . resolve ( csarId . csarName ( ) ) ) ; LOGGER . info ( "Deleted CSAR \"{}\"..." , csarId . csarName ( ) ) ; }
182
public void testEnded ( String host ) { if ( workerThread == null ) { log . debug ( "End testEnded workerThread == null" ) ; return ; } workerHost = null ; workerThread . interrupt ( ) ; shutdownConnectors ( ) ; autoFileBaseName = null ; counter = 0 ; super . testEnded ( host ) ; log . debug ( "End testEnded" ) ; }
public void testEnded ( String host ) { log . debug ( "Start testEnded" ) ; if ( workerThread == null ) { log . debug ( "End testEnded workerThread == null" ) ; return ; } workerHost = null ; workerThread . interrupt ( ) ; shutdownConnectors ( ) ; autoFileBaseName = null ; counter = 0 ; super . testEnded ( host ) ; log . debug ( "End testEnded" ) ; }
183
public void testEnded ( String host ) { log . debug ( "Start testEnded" ) ; if ( workerThread == null ) { return ; } workerHost = null ; workerThread . interrupt ( ) ; shutdownConnectors ( ) ; autoFileBaseName = null ; counter = 0 ; super . testEnded ( host ) ; log . debug ( "End testEnded" ) ; }
public void testEnded ( String host ) { log . debug ( "Start testEnded" ) ; if ( workerThread == null ) { log . debug ( "End testEnded workerThread == null" ) ; return ; } workerHost = null ; workerThread . interrupt ( ) ; shutdownConnectors ( ) ; autoFileBaseName = null ; counter = 0 ; super . testEnded ( host ) ; log . debug ( "End testEnded" ) ; }
184
public void testEnded ( String host ) { log . debug ( "Start testEnded" ) ; if ( workerThread == null ) { log . debug ( "End testEnded workerThread == null" ) ; return ; } workerHost = null ; workerThread . interrupt ( ) ; shutdownConnectors ( ) ; autoFileBaseName = null ; counter = 0 ; super . testEnded ( host ) ; }
public void testEnded ( String host ) { log . debug ( "Start testEnded" ) ; if ( workerThread == null ) { log . debug ( "End testEnded workerThread == null" ) ; return ; } workerHost = null ; workerThread . interrupt ( ) ; shutdownConnectors ( ) ; autoFileBaseName = null ; counter = 0 ; super . testEnded ( host ) ; log . debug ( "End testEnded" ) ; }
185
public boolean restoreVersion ( String path ) throws RemoteException , ResourceAdminServiceExceptionException { boolean status ; try { status = resourceAdminServiceStub . restoreVersion ( path ) ; } catch ( RemoteException e ) { throw new RemoteException ( "Restore version error : " , e ) ; } catch ( ResourceAdminServiceExceptionException e ) { log . error ( "Restore version error : " + e . getMessage ( ) ) ; throw new ResourceAdminServiceExceptionException ( "Restore version error : " , e ) ; } return status ; }
public boolean restoreVersion ( String path ) throws RemoteException , ResourceAdminServiceExceptionException { boolean status ; try { status = resourceAdminServiceStub . restoreVersion ( path ) ; } catch ( RemoteException e ) { log . error ( "No versions to restore : " + e . getMessage ( ) ) ; throw new RemoteException ( "Restore version error : " , e ) ; } catch ( ResourceAdminServiceExceptionException e ) { log . error ( "Restore version error : " + e . getMessage ( ) ) ; throw new ResourceAdminServiceExceptionException ( "Restore version error : " , e ) ; } return status ; }
186
public boolean restoreVersion ( String path ) throws RemoteException , ResourceAdminServiceExceptionException { boolean status ; try { status = resourceAdminServiceStub . restoreVersion ( path ) ; } catch ( RemoteException e ) { log . error ( "No versions to restore : " + e . getMessage ( ) ) ; throw new RemoteException ( "Restore version error : " , e ) ; } catch ( ResourceAdminServiceExceptionException e ) { throw new ResourceAdminServiceExceptionException ( "Restore version error : " , e ) ; } return status ; }
public boolean restoreVersion ( String path ) throws RemoteException , ResourceAdminServiceExceptionException { boolean status ; try { status = resourceAdminServiceStub . restoreVersion ( path ) ; } catch ( RemoteException e ) { log . error ( "No versions to restore : " + e . getMessage ( ) ) ; throw new RemoteException ( "Restore version error : " , e ) ; } catch ( ResourceAdminServiceExceptionException e ) { log . error ( "Restore version error : " + e . getMessage ( ) ) ; throw new ResourceAdminServiceExceptionException ( "Restore version error : " , e ) ; } return status ; }
187
public static com . liferay . knowledge . base . model . KBArticleSoap [ ] getKBArticles ( long groupId , long parentResourcePrimKey , int status , int start , int end , com . liferay . portal . kernel . util . OrderByComparator < com . liferay . knowledge . base . model . KBArticle > orderByComparator ) throws RemoteException { try { java . util . List < com . liferay . knowledge . base . model . KBArticle > returnValue = KBArticleServiceUtil . getKBArticles ( groupId , parentResourcePrimKey , status , start , end , orderByComparator ) ; return com . liferay . knowledge . base . model . KBArticleSoap . toSoapModels ( returnValue ) ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } }
public static com . liferay . knowledge . base . model . KBArticleSoap [ ] getKBArticles ( long groupId , long parentResourcePrimKey , int status , int start , int end , com . liferay . portal . kernel . util . OrderByComparator < com . liferay . knowledge . base . model . KBArticle > orderByComparator ) throws RemoteException { try { java . util . List < com . liferay . knowledge . base . model . KBArticle > returnValue = KBArticleServiceUtil . getKBArticles ( groupId , parentResourcePrimKey , status , start , end , orderByComparator ) ; return com . liferay . knowledge . base . model . KBArticleSoap . toSoapModels ( returnValue ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } }
188
private void checkFieldConstraints ( JsonDoc doc , List < FieldConstraint > constraints , Path currentValuePath , JsonNode currentValue ) { for ( FieldConstraint x : constraints ) { currentFieldConstraint = x ; String constraintType = currentFieldConstraint . getType ( ) ; Error . push ( constraintType ) ; try { FieldConstraintChecker checker = fRegistry . find ( constraintType ) ; if ( checker == null ) { throw Error . get ( CrudConstants . ERR_NO_CONSTRAINT ) ; } if ( checker instanceof FieldConstraintDocChecker ) { checkFieldContraints ( doc , ( FieldConstraintDocChecker ) checker ) ; } else if ( checker instanceof FieldConstraintValueChecker ) { checkValueContraints ( doc , ( FieldConstraintChecker ) checker , currentValuePath , currentValue ) ; } } catch ( Error e ) { throw e ; } catch ( Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; throw Error . get ( CrudConstants . ERR_CRUD , e . getMessage ( ) ) ; } finally { Error . pop ( ) ; } } }
private void checkFieldConstraints ( JsonDoc doc , List < FieldConstraint > constraints , Path currentValuePath , JsonNode currentValue ) { for ( FieldConstraint x : constraints ) { currentFieldConstraint = x ; String constraintType = currentFieldConstraint . getType ( ) ; LOGGER . debug ( "checking constraint " + constraintType ) ; Error . push ( constraintType ) ; try { FieldConstraintChecker checker = fRegistry . find ( constraintType ) ; if ( checker == null ) { throw Error . get ( CrudConstants . ERR_NO_CONSTRAINT ) ; } if ( checker instanceof FieldConstraintDocChecker ) { checkFieldContraints ( doc , ( FieldConstraintDocChecker ) checker ) ; } else if ( checker instanceof FieldConstraintValueChecker ) { checkValueContraints ( doc , ( FieldConstraintChecker ) checker , currentValuePath , currentValue ) ; } } catch ( Error e ) { throw e ; } catch ( Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; throw Error . get ( CrudConstants . ERR_CRUD , e . getMessage ( ) ) ; } finally { Error . pop ( ) ; } } }
189
private void checkFieldConstraints ( JsonDoc doc , List < FieldConstraint > constraints , Path currentValuePath , JsonNode currentValue ) { for ( FieldConstraint x : constraints ) { currentFieldConstraint = x ; String constraintType = currentFieldConstraint . getType ( ) ; LOGGER . debug ( "checking constraint " + constraintType ) ; Error . push ( constraintType ) ; try { FieldConstraintChecker checker = fRegistry . find ( constraintType ) ; if ( checker == null ) { throw Error . get ( CrudConstants . ERR_NO_CONSTRAINT ) ; } if ( checker instanceof FieldConstraintDocChecker ) { checkFieldContraints ( doc , ( FieldConstraintDocChecker ) checker ) ; } else if ( checker instanceof FieldConstraintValueChecker ) { checkValueContraints ( doc , ( FieldConstraintChecker ) checker , currentValuePath , currentValue ) ; } } catch ( Error e ) { throw e ; } catch ( Exception e ) { throw Error . get ( CrudConstants . ERR_CRUD , e . getMessage ( ) ) ; } finally { Error . pop ( ) ; } } }
private void checkFieldConstraints ( JsonDoc doc , List < FieldConstraint > constraints , Path currentValuePath , JsonNode currentValue ) { for ( FieldConstraint x : constraints ) { currentFieldConstraint = x ; String constraintType = currentFieldConstraint . getType ( ) ; LOGGER . debug ( "checking constraint " + constraintType ) ; Error . push ( constraintType ) ; try { FieldConstraintChecker checker = fRegistry . find ( constraintType ) ; if ( checker == null ) { throw Error . get ( CrudConstants . ERR_NO_CONSTRAINT ) ; } if ( checker instanceof FieldConstraintDocChecker ) { checkFieldContraints ( doc , ( FieldConstraintDocChecker ) checker ) ; } else if ( checker instanceof FieldConstraintValueChecker ) { checkValueContraints ( doc , ( FieldConstraintChecker ) checker , currentValuePath , currentValue ) ; } } catch ( Error e ) { throw e ; } catch ( Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; throw Error . get ( CrudConstants . ERR_CRUD , e . getMessage ( ) ) ; } finally { Error . pop ( ) ; } } }
190
public void userEventTriggered ( @ Nullable ChannelHandlerContext ctx , @ Nullable Object evt ) throws Exception { if ( evt == null || ctx == null ) { return ; } if ( evt instanceof IdleStateEvent ) { IdleStateEvent e = ( IdleStateEvent ) evt ; if ( e . state ( ) == IdleState . WRITER_IDLE ) { ctx . close ( ) ; } } }
public void userEventTriggered ( @ Nullable ChannelHandlerContext ctx , @ Nullable Object evt ) throws Exception { if ( evt == null || ctx == null ) { return ; } if ( evt instanceof IdleStateEvent ) { IdleStateEvent e = ( IdleStateEvent ) evt ; if ( e . state ( ) == IdleState . WRITER_IDLE ) { logger . debug ( "Stream server is going to close an idle channel." ) ; ctx . close ( ) ; } } }
191
protected StateData removeStateFromSession ( final HttpSession session , final String state ) { @ SuppressWarnings ( "unchecked" ) final Map < String , StateData > states = ( Map < String , StateData > ) session . getAttribute ( STATES ) ; if ( states != null ) { final long now = System . currentTimeMillis ( ) ; states . entrySet ( ) . stream ( ) . filter ( e -> ( now - e . getValue ( ) . getExpiration ( ) ) / 1000L > getStateTtl ( ) ) . map ( Map . Entry :: getKey ) . collect ( Collectors . toList ( ) ) . forEach ( s -> { if ( logger . isDebugEnabled ( ) ) { } states . remove ( s ) ; }
protected StateData removeStateFromSession ( final HttpSession session , final String state ) { @ SuppressWarnings ( "unchecked" ) final Map < String , StateData > states = ( Map < String , StateData > ) session . getAttribute ( STATES ) ; if ( states != null ) { final long now = System . currentTimeMillis ( ) ; states . entrySet ( ) . stream ( ) . filter ( e -> ( now - e . getValue ( ) . getExpiration ( ) ) / 1000L > getStateTtl ( ) ) . map ( Map . Entry :: getKey ) . collect ( Collectors . toList ( ) ) . forEach ( s -> { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "remove old state: {}" , s ) ; } states . remove ( s ) ; }
192
public boolean add ( T element ) throws LevelVersionError { try { if ( listOf . add ( element ) ) { if ( registerChild ( element ) ) { return true ; } listOf . remove ( listOf . size ( ) - 1 ) ; } return false ; } catch ( RuntimeException exc ) { if ( logger . isDebugEnabled ( ) ) { } listOf . remove ( listOf . size ( ) - 1 ) ; throw exc ; } }
public boolean add ( T element ) throws LevelVersionError { try { if ( listOf . add ( element ) ) { if ( registerChild ( element ) ) { return true ; } listOf . remove ( listOf . size ( ) - 1 ) ; } return false ; } catch ( RuntimeException exc ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( MessageFormat . format ( "Reverting change: removing element {0} from internal list" , element ) ) ; } listOf . remove ( listOf . size ( ) - 1 ) ; throw exc ; } }
193
private void updateConfiguration ( String pruningConf ) { LogPruneStrategy strategy = strategyFactory . strategyFromConfigValue ( fs , logFiles , logProvider , clock , pruningConf ) ; this . pruneStrategy = strategy ; }
private void updateConfiguration ( String pruningConf ) { LogPruneStrategy strategy = strategyFactory . strategyFromConfigValue ( fs , logFiles , logProvider , clock , pruningConf ) ; this . pruneStrategy = strategy ; log . info ( "Retention policy updated to '" + strategy + "', which will take effect next time a checkpoint completes." ) ; }
194
private static long getTimeFromSessionHistoryString ( String sessionHistoryString ) { Map < String , String > sessionHistoryMap = sessionHistoryStringToMap ( sessionHistoryString ) ; if ( ! sessionHistoryMap . containsKey ( ConfigProperty . TIME . name ( ) ) ) { return - 1 ; } try { return Long . parseLong ( sessionHistoryMap . get ( ConfigProperty . TIME . name ( ) ) ) ; } catch ( NumberFormatException e ) { return - 1 ; } }
private static long getTimeFromSessionHistoryString ( String sessionHistoryString ) { Map < String , String > sessionHistoryMap = sessionHistoryStringToMap ( sessionHistoryString ) ; if ( ! sessionHistoryMap . containsKey ( ConfigProperty . TIME . name ( ) ) ) { return - 1 ; } try { return Long . parseLong ( sessionHistoryMap . get ( ConfigProperty . TIME . name ( ) ) ) ; } catch ( NumberFormatException e ) { LOG . warn ( "Unable to parse TIME field to long: " + sessionHistoryMap . get ( ConfigProperty . TIME . name ( ) ) ) ; return - 1 ; } }
195
@ Test public void testCreateProject3 ( ) { String projectName = metadata . commit ( ( ) -> { Project p3 = projProvider . ensureProject ( "Project3" ) ; p3 . setProjectName ( "ProjectName3" ) ; p3 . setContainerImage ( "kylo/nonExisentImage" ) ; p3 . getRoleMembership ( ProjectAccessControl . ROLE_EDITOR ) . ifPresent ( role -> role . addMember ( TEST_USER1 ) ) ; p3 . getRoleMembership ( ProjectAccessControl . ROLE_READER ) . ifPresent ( role -> role . addMember ( TEST_USER2 ) ) ; return p3 . getProjectName ( ) ; } , JcrMetadataAccess . SERVICE ) ; }
@ Test public void testCreateProject3 ( ) { logger . info ( "Running Test 'testCreateProject3'" ) ; String projectName = metadata . commit ( ( ) -> { Project p3 = projProvider . ensureProject ( "Project3" ) ; p3 . setProjectName ( "ProjectName3" ) ; p3 . setContainerImage ( "kylo/nonExisentImage" ) ; p3 . getRoleMembership ( ProjectAccessControl . ROLE_EDITOR ) . ifPresent ( role -> role . addMember ( TEST_USER1 ) ) ; p3 . getRoleMembership ( ProjectAccessControl . ROLE_READER ) . ifPresent ( role -> role . addMember ( TEST_USER2 ) ) ; return p3 . getProjectName ( ) ; } , JcrMetadataAccess . SERVICE ) ; }
196
public void deleteDiscountRuleBatch ( Long id , String callbackURL , Object object ) throws Exception { HttpInvoker . HttpResponse httpResponse = deleteDiscountRuleBatchHttpResponse ( id , callbackURL , object ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( "HTTP response content: " + content ) ; _logger . fine ( "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . fine ( "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; } }
public void deleteDiscountRuleBatch ( Long id , String callbackURL , Object object ) throws Exception { HttpInvoker . HttpResponse httpResponse = deleteDiscountRuleBatchHttpResponse ( id , callbackURL , object ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , "Unable to process HTTP response content: " + content ) ; _logger . log ( Level . WARNING , "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( "HTTP response content: " + content ) ; _logger . fine ( "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . fine ( "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; } }
197
public void deleteDiscountRuleBatch ( Long id , String callbackURL , Object object ) throws Exception { HttpInvoker . HttpResponse httpResponse = deleteDiscountRuleBatchHttpResponse ( id , callbackURL , object ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , "Unable to process HTTP response content: " + content ) ; _logger . log ( Level . WARNING , "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( "HTTP response content: " + content ) ; _logger . fine ( "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . fine ( "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; } }
public void deleteDiscountRuleBatch ( Long id , String callbackURL , Object object ) throws Exception { HttpInvoker . HttpResponse httpResponse = deleteDiscountRuleBatchHttpResponse ( id , callbackURL , object ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , "Unable to process HTTP response content: " + content ) ; _logger . log ( Level . WARNING , "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( "HTTP response content: " + content ) ; _logger . fine ( "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . fine ( "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; } }
198
public void deleteDiscountRuleBatch ( Long id , String callbackURL , Object object ) throws Exception { HttpInvoker . HttpResponse httpResponse = deleteDiscountRuleBatchHttpResponse ( id , callbackURL , object ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , "Unable to process HTTP response content: " + content ) ; _logger . log ( Level . WARNING , "HTTP response message: " + httpResponse . getMessage ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( "HTTP response content: " + content ) ; _logger . fine ( "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . fine ( "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; } }
public void deleteDiscountRuleBatch ( Long id , String callbackURL , Object object ) throws Exception { HttpInvoker . HttpResponse httpResponse = deleteDiscountRuleBatchHttpResponse ( id , callbackURL , object ) ; String content = httpResponse . getContent ( ) ; if ( ( httpResponse . getStatusCode ( ) / 100 ) != 2 ) { _logger . log ( Level . WARNING , "Unable to process HTTP response content: " + content ) ; _logger . log ( Level . WARNING , "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . log ( Level . WARNING , "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; throw new Problem . ProblemException ( Problem . toDTO ( content ) ) ; } else { _logger . fine ( "HTTP response content: " + content ) ; _logger . fine ( "HTTP response message: " + httpResponse . getMessage ( ) ) ; _logger . fine ( "HTTP response status code: " + httpResponse . getStatusCode ( ) ) ; } }
199
@ Transactional ( readOnly = false , propagation = Propagation . REQUIRED ) public Template getByUuid ( String uuid ) { try { Query query = entityManager . createNamedQuery ( Template . QUERY_FIND_BY_UUID ) ; query . setParameter ( "uuid" , uuid ) ; Template template = null ; template = ( Template ) query . getSingleResult ( ) ; return template ; } catch ( NoResultException e ) { return null ; } }
@ Transactional ( readOnly = false , propagation = Propagation . REQUIRED ) public Template getByUuid ( String uuid ) { try { Query query = entityManager . createNamedQuery ( Template . QUERY_FIND_BY_UUID ) ; query . setParameter ( "uuid" , uuid ) ; Template template = null ; template = ( Template ) query . getSingleResult ( ) ; return template ; } catch ( NoResultException e ) { logger . debug ( "No Result found: " + e ) ; return null ; } }