Unnamed: 0
int64
0
10k
source
stringlengths
27
7.27k
target
stringlengths
54
7.29k
300
protected Entity newEntity ( EntityMementoManifest entityManifest ) { String entityId = entityManifest . getId ( ) ; CatalogItemIdAndSearchPath idPath = findCatalogItemIds ( classLoader , mementoManifest . getEntityIdToManifest ( ) , entityManifest ) ; String entityType = entityManifest . getType ( ) ; LoadedClass < ? extends Entity > loaded = load ( Entity . class , entityType , idPath . getCatalogItemId ( ) , idPath . getSearchPath ( ) , entityId ) ; Class < ? extends Entity > entityClazz = loaded . clazz ; Entity entity ; if ( InternalFactory . isNewStyle ( entityClazz ) ) { InternalEntityFactory entityFactory = managementContext . getEntityFactory ( ) ; entity = entityFactory . constructEntity ( entityClazz , Reflections . getAllInterfaces ( entityClazz ) , entityId ) ; } else { Map < Object , Object > flags = Maps . newLinkedHashMap ( ) ; flags . put ( "id" , entityId ) ; if ( AbstractApplication . class . isAssignableFrom ( entityClazz ) ) flags . put ( "mgmt" , managementContext ) ; entity = invokeConstructor ( null , entityClazz , new Object [ ] { flags } , new Object [ ] { flags , null } , new Object [ ] { null } , new Object [ 0 ] ) ; FlagUtils . setFieldsFromFlags ( ImmutableMap . of ( "id" , entityId ) , entity ) ; if ( entity instanceof AbstractApplication ) { FlagUtils . setFieldsFromFlags ( ImmutableMap . of ( "mgmt" , managementContext ) , entity ) ; } ( ( AbstractEntity ) entity ) . setManagementContext ( managementContext ) ; managementContext . prePreManage ( entity ) ; } setCatalogItemIds ( entity , loaded . catalogItemId , loaded . searchPath ) ; return entity ; }
protected Entity newEntity ( EntityMementoManifest entityManifest ) { String entityId = entityManifest . getId ( ) ; CatalogItemIdAndSearchPath idPath = findCatalogItemIds ( classLoader , mementoManifest . getEntityIdToManifest ( ) , entityManifest ) ; String entityType = entityManifest . getType ( ) ; LoadedClass < ? extends Entity > loaded = load ( Entity . class , entityType , idPath . getCatalogItemId ( ) , idPath . getSearchPath ( ) , entityId ) ; Class < ? extends Entity > entityClazz = loaded . clazz ; Entity entity ; if ( InternalFactory . isNewStyle ( entityClazz ) ) { InternalEntityFactory entityFactory = managementContext . getEntityFactory ( ) ; entity = entityFactory . constructEntity ( entityClazz , Reflections . getAllInterfaces ( entityClazz ) , entityId ) ; } else { LOG . warn ( "Deprecated rebind of entity without no-arg constructor; " + "this may not be supported in future versions: id=" + entityId + "; type=" + entityType ) ; Map < Object , Object > flags = Maps . newLinkedHashMap ( ) ; flags . put ( "id" , entityId ) ; if ( AbstractApplication . class . isAssignableFrom ( entityClazz ) ) flags . put ( "mgmt" , managementContext ) ; entity = invokeConstructor ( null , entityClazz , new Object [ ] { flags } , new Object [ ] { flags , null } , new Object [ ] { null } , new Object [ 0 ] ) ; FlagUtils . setFieldsFromFlags ( ImmutableMap . of ( "id" , entityId ) , entity ) ; if ( entity instanceof AbstractApplication ) { FlagUtils . setFieldsFromFlags ( ImmutableMap . of ( "mgmt" , managementContext ) , entity ) ; } ( ( AbstractEntity ) entity ) . setManagementContext ( managementContext ) ; managementContext . prePreManage ( entity ) ; } setCatalogItemIds ( entity , loaded . catalogItemId , loaded . searchPath ) ; return entity ; }
301
private double metersToDegrees ( double distance ) { double degrees = ( distance / CswConstants . EARTH_MEAN_RADIUS_METERS ) * CswConstants . RADIANS_TO_DEGREES ; return degrees ; }
private double metersToDegrees ( double distance ) { double degrees = ( distance / CswConstants . EARTH_MEAN_RADIUS_METERS ) * CswConstants . RADIANS_TO_DEGREES ; LOGGER . debug ( "{} meter(s) is approximately {} degree(s) of latitude." , distance , degrees ) ; return degrees ; }
302
public String getLocal_dir ( ) { try { setLocal_dir ( bundle . getString ( "local_dir" ) ) ; } catch ( MissingResourceException e ) { local_dir = "/tmp" ; } return local_dir ; }
public String getLocal_dir ( ) { try { setLocal_dir ( bundle . getString ( "local_dir" ) ) ; } catch ( MissingResourceException e ) { logger . info ( ChainICPBrasilMessagesBundle . getString ( "error.chain.ipcbrasil.config" , "local_dir" ) ) ; local_dir = "/tmp" ; } return local_dir ; }
303
private SocketChannel openSocketChannelToMITM ( ) throws IOException { InetSocketAddress target = new InetSocketAddress ( "localhost" , serverPort ) ; return SocketChannel . open ( target ) ; }
private SocketChannel openSocketChannelToMITM ( ) throws IOException { logger . debug ( "Open socket channel to MITM server, localhost:{}" , serverPort ) ; InetSocketAddress target = new InetSocketAddress ( "localhost" , serverPort ) ; return SocketChannel . open ( target ) ; }
304
public static int getCommerceShipmentItemsCount ( HttpPrincipal httpPrincipal , long commerceShipmentId ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( CommerceShipmentItemServiceUtil . class , "getCommerceShipmentItemsCount" , _getCommerceShipmentItemsCountParameterTypes8 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , commerceShipmentId ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( ( Integer ) returnObj ) . intValue ( ) ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { throw systemException ; } }
public static int getCommerceShipmentItemsCount ( HttpPrincipal httpPrincipal , long commerceShipmentId ) throws com . liferay . portal . kernel . exception . PortalException { try { MethodKey methodKey = new MethodKey ( CommerceShipmentItemServiceUtil . class , "getCommerceShipmentItemsCount" , _getCommerceShipmentItemsCountParameterTypes8 ) ; MethodHandler methodHandler = new MethodHandler ( methodKey , commerceShipmentId ) ; Object returnObj = null ; try { returnObj = TunnelUtil . invoke ( httpPrincipal , methodHandler ) ; } catch ( Exception exception ) { if ( exception instanceof com . liferay . portal . kernel . exception . PortalException ) { throw ( com . liferay . portal . kernel . exception . PortalException ) exception ; } throw new com . liferay . portal . kernel . exception . SystemException ( exception ) ; } return ( ( Integer ) returnObj ) . intValue ( ) ; } catch ( com . liferay . portal . kernel . exception . SystemException systemException ) { _log . error ( systemException , systemException ) ; throw systemException ; } }
305
private boolean waitForUpgrade ( URI wsUri , Future < Session > response ) { try { response . get ( 10 , TimeUnit . SECONDS ) ; return true ; } catch ( Throwable t ) { return false ; } }
private boolean waitForUpgrade ( URI wsUri , Future < Session > response ) { try { response . get ( 10 , TimeUnit . SECONDS ) ; return true ; } catch ( Throwable t ) { LOG . warn ( "Unable to connect to: " + wsUri , t ) ; return false ; } }
306
@ Test public void testGetDriverContextStatic ( ) throws Exception { driver . setCustomDataFile ( new File ( "./src/test/resources/cfn_templates/static-cfn.template" ) ) ; PrivateEc2Template template = ( PrivateEc2Template ) driver . getCFNTemplatePerService ( "static" ) ; Assert . assertNotNull ( template ) ; Assert . assertNotNull ( template . getResources ( ) ) ; Assert . assertNotNull ( template . getEC2Instance ( ) . getProperties ( ) . getImageId ( ) ) ; }
@ Test public void testGetDriverContextStatic ( ) throws Exception { driver . setCustomDataFile ( new File ( "./src/test/resources/cfn_templates/static-cfn.template" ) ) ; PrivateEc2Template template = ( PrivateEc2Template ) driver . getCFNTemplatePerService ( "static" ) ; Assert . assertNotNull ( template ) ; logger . info ( template . toString ( ) ) ; Assert . assertNotNull ( template . getResources ( ) ) ; Assert . assertNotNull ( template . getEC2Instance ( ) . getProperties ( ) . getImageId ( ) ) ; }
307
public BillInfo getBillInfo ( BaseBillId billId ) throws BillNotFoundEx { if ( billId == null ) { throw new IllegalArgumentException ( "BillId cannot be null" ) ; } if ( billCache . get ( billId ) != null ) { return new BillInfo ( ( Bill ) billCache . get ( billId ) . getObjectValue ( ) ) ; } if ( billInfoCache . get ( billId ) != null ) { return ( BillInfo ) billInfoCache . get ( billId ) . getObjectValue ( ) ; } try { BillInfo billInfo = billDao . getBillInfo ( billId ) ; billInfoCache . put ( new Element ( billId , billInfo ) ) ; return billInfo ; } catch ( EmptyResultDataAccessException ex ) { throw new BillNotFoundEx ( billId , ex ) ; } }
public BillInfo getBillInfo ( BaseBillId billId ) throws BillNotFoundEx { logger . debug ( "Fetching bill info {}.." , billId ) ; if ( billId == null ) { throw new IllegalArgumentException ( "BillId cannot be null" ) ; } if ( billCache . get ( billId ) != null ) { return new BillInfo ( ( Bill ) billCache . get ( billId ) . getObjectValue ( ) ) ; } if ( billInfoCache . get ( billId ) != null ) { return ( BillInfo ) billInfoCache . get ( billId ) . getObjectValue ( ) ; } try { BillInfo billInfo = billDao . getBillInfo ( billId ) ; billInfoCache . put ( new Element ( billId , billInfo ) ) ; return billInfo ; } catch ( EmptyResultDataAccessException ex ) { throw new BillNotFoundEx ( billId , ex ) ; } }
308
static Pair < Session , String > getSession ( String location , CassandraDeepJobConfig conf , Boolean balanced ) { assert balanced != null ; synchronized ( CLIENTS_CACHE ) { final int port = conf . getCqlPort ( ) ; final String key = location + ":" + port + ":" + conf . getKeyspace ( ) + ":" + balanced ; if ( CLIENTS_CACHE . containsKey ( key ) ) { return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } if ( balanced && location . equals ( conf . getHost ( ) ) ) { CLIENTS_CACHE . put ( key , conf . getSession ( ) ) ; LOG . trace ( "Found cached session at level 2 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } try { LOG . debug ( "No cached session found for key {{}}" , key ) ; InetAddress locationInet = InetAddress . getByName ( location ) ; LoadBalancingPolicy loadBalancingPolicy = balanced ? Policies . defaultLoadBalancingPolicy ( ) : new LocalMachineLoadBalancingPolicy ( locationInet ) ; Cluster cluster = Cluster . builder ( ) . withPort ( port ) . addContactPoint ( location ) . withLoadBalancingPolicy ( loadBalancingPolicy ) . withProtocolVersion ( ProtocolVersion . V2 ) . withCredentials ( conf . getUsername ( ) , conf . getPassword ( ) ) . build ( ) ; Session session = cluster . connect ( quote ( conf . getKeyspace ( ) ) ) ; CLIENTS_CACHE . put ( key , session ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } catch ( Exception e ) { throw new DeepIOException ( "Failed to create authenticated client to {" + location + "}:{" + port + "}" , e ) ; } } }
static Pair < Session , String > getSession ( String location , CassandraDeepJobConfig conf , Boolean balanced ) { assert balanced != null ; synchronized ( CLIENTS_CACHE ) { final int port = conf . getCqlPort ( ) ; final String key = location + ":" + port + ":" + conf . getKeyspace ( ) + ":" + balanced ; if ( CLIENTS_CACHE . containsKey ( key ) ) { LOG . trace ( "Found cached session at level 1 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } if ( balanced && location . equals ( conf . getHost ( ) ) ) { CLIENTS_CACHE . put ( key , conf . getSession ( ) ) ; LOG . trace ( "Found cached session at level 2 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } try { LOG . debug ( "No cached session found for key {{}}" , key ) ; InetAddress locationInet = InetAddress . getByName ( location ) ; LoadBalancingPolicy loadBalancingPolicy = balanced ? Policies . defaultLoadBalancingPolicy ( ) : new LocalMachineLoadBalancingPolicy ( locationInet ) ; Cluster cluster = Cluster . builder ( ) . withPort ( port ) . addContactPoint ( location ) . withLoadBalancingPolicy ( loadBalancingPolicy ) . withProtocolVersion ( ProtocolVersion . V2 ) . withCredentials ( conf . getUsername ( ) , conf . getPassword ( ) ) . build ( ) ; Session session = cluster . connect ( quote ( conf . getKeyspace ( ) ) ) ; CLIENTS_CACHE . put ( key , session ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } catch ( Exception e ) { throw new DeepIOException ( "Failed to create authenticated client to {" + location + "}:{" + port + "}" , e ) ; } } }
309
static Pair < Session , String > getSession ( String location , CassandraDeepJobConfig conf , Boolean balanced ) { assert balanced != null ; synchronized ( CLIENTS_CACHE ) { final int port = conf . getCqlPort ( ) ; final String key = location + ":" + port + ":" + conf . getKeyspace ( ) + ":" + balanced ; if ( CLIENTS_CACHE . containsKey ( key ) ) { LOG . trace ( "Found cached session at level 1 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } if ( balanced && location . equals ( conf . getHost ( ) ) ) { CLIENTS_CACHE . put ( key , conf . getSession ( ) ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } try { LOG . debug ( "No cached session found for key {{}}" , key ) ; InetAddress locationInet = InetAddress . getByName ( location ) ; LoadBalancingPolicy loadBalancingPolicy = balanced ? Policies . defaultLoadBalancingPolicy ( ) : new LocalMachineLoadBalancingPolicy ( locationInet ) ; Cluster cluster = Cluster . builder ( ) . withPort ( port ) . addContactPoint ( location ) . withLoadBalancingPolicy ( loadBalancingPolicy ) . withProtocolVersion ( ProtocolVersion . V2 ) . withCredentials ( conf . getUsername ( ) , conf . getPassword ( ) ) . build ( ) ; Session session = cluster . connect ( quote ( conf . getKeyspace ( ) ) ) ; CLIENTS_CACHE . put ( key , session ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } catch ( Exception e ) { throw new DeepIOException ( "Failed to create authenticated client to {" + location + "}:{" + port + "}" , e ) ; } } }
static Pair < Session , String > getSession ( String location , CassandraDeepJobConfig conf , Boolean balanced ) { assert balanced != null ; synchronized ( CLIENTS_CACHE ) { final int port = conf . getCqlPort ( ) ; final String key = location + ":" + port + ":" + conf . getKeyspace ( ) + ":" + balanced ; if ( CLIENTS_CACHE . containsKey ( key ) ) { LOG . trace ( "Found cached session at level 1 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } if ( balanced && location . equals ( conf . getHost ( ) ) ) { CLIENTS_CACHE . put ( key , conf . getSession ( ) ) ; LOG . trace ( "Found cached session at level 2 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } try { LOG . debug ( "No cached session found for key {{}}" , key ) ; InetAddress locationInet = InetAddress . getByName ( location ) ; LoadBalancingPolicy loadBalancingPolicy = balanced ? Policies . defaultLoadBalancingPolicy ( ) : new LocalMachineLoadBalancingPolicy ( locationInet ) ; Cluster cluster = Cluster . builder ( ) . withPort ( port ) . addContactPoint ( location ) . withLoadBalancingPolicy ( loadBalancingPolicy ) . withProtocolVersion ( ProtocolVersion . V2 ) . withCredentials ( conf . getUsername ( ) , conf . getPassword ( ) ) . build ( ) ; Session session = cluster . connect ( quote ( conf . getKeyspace ( ) ) ) ; CLIENTS_CACHE . put ( key , session ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } catch ( Exception e ) { throw new DeepIOException ( "Failed to create authenticated client to {" + location + "}:{" + port + "}" , e ) ; } } }
310
static Pair < Session , String > getSession ( String location , CassandraDeepJobConfig conf , Boolean balanced ) { assert balanced != null ; synchronized ( CLIENTS_CACHE ) { final int port = conf . getCqlPort ( ) ; final String key = location + ":" + port + ":" + conf . getKeyspace ( ) + ":" + balanced ; if ( CLIENTS_CACHE . containsKey ( key ) ) { LOG . trace ( "Found cached session at level 1 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } if ( balanced && location . equals ( conf . getHost ( ) ) ) { CLIENTS_CACHE . put ( key , conf . getSession ( ) ) ; LOG . trace ( "Found cached session at level 2 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } try { InetAddress locationInet = InetAddress . getByName ( location ) ; LoadBalancingPolicy loadBalancingPolicy = balanced ? Policies . defaultLoadBalancingPolicy ( ) : new LocalMachineLoadBalancingPolicy ( locationInet ) ; Cluster cluster = Cluster . builder ( ) . withPort ( port ) . addContactPoint ( location ) . withLoadBalancingPolicy ( loadBalancingPolicy ) . withProtocolVersion ( ProtocolVersion . V2 ) . withCredentials ( conf . getUsername ( ) , conf . getPassword ( ) ) . build ( ) ; Session session = cluster . connect ( quote ( conf . getKeyspace ( ) ) ) ; CLIENTS_CACHE . put ( key , session ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } catch ( Exception e ) { throw new DeepIOException ( "Failed to create authenticated client to {" + location + "}:{" + port + "}" , e ) ; } } }
static Pair < Session , String > getSession ( String location , CassandraDeepJobConfig conf , Boolean balanced ) { assert balanced != null ; synchronized ( CLIENTS_CACHE ) { final int port = conf . getCqlPort ( ) ; final String key = location + ":" + port + ":" + conf . getKeyspace ( ) + ":" + balanced ; if ( CLIENTS_CACHE . containsKey ( key ) ) { LOG . trace ( "Found cached session at level 1 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } if ( balanced && location . equals ( conf . getHost ( ) ) ) { CLIENTS_CACHE . put ( key , conf . getSession ( ) ) ; LOG . trace ( "Found cached session at level 2 for key {{}}" , key ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } try { LOG . debug ( "No cached session found for key {{}}" , key ) ; InetAddress locationInet = InetAddress . getByName ( location ) ; LoadBalancingPolicy loadBalancingPolicy = balanced ? Policies . defaultLoadBalancingPolicy ( ) : new LocalMachineLoadBalancingPolicy ( locationInet ) ; Cluster cluster = Cluster . builder ( ) . withPort ( port ) . addContactPoint ( location ) . withLoadBalancingPolicy ( loadBalancingPolicy ) . withProtocolVersion ( ProtocolVersion . V2 ) . withCredentials ( conf . getUsername ( ) , conf . getPassword ( ) ) . build ( ) ; Session session = cluster . connect ( quote ( conf . getKeyspace ( ) ) ) ; CLIENTS_CACHE . put ( key , session ) ; return Pair . create ( CLIENTS_CACHE . get ( key ) , location ) ; } catch ( Exception e ) { throw new DeepIOException ( "Failed to create authenticated client to {" + location + "}:{" + port + "}" , e ) ; } } }
311
public void run ( ) { try { String dashName = dash . getNameOrEmpty ( ) ; ArrayList < DeviceFileLink > pinsCSVFilePath = new ArrayList < > ( ) ; for ( GraphDataStream graphDataStream : enhancedHistoryGraph . dataStreams ) { DataStream dataStream = graphDataStream . dataStream ; int deviceId = graphDataStream . getTargetId ( targetId ) ; if ( dataStream != null ) { try { int [ ] deviceIds = new int [ ] { deviceId } ; if ( deviceId >= DeviceSelector . DEVICE_SELECTOR_STARTING_ID ) { Widget deviceSelector = dash . getWidgetById ( deviceId ) ; if ( deviceSelector == null ) { deviceSelector = dash . getWidgetByIdInDeviceTilesOrThrow ( deviceId ) ; } if ( deviceSelector instanceof DeviceSelector ) { deviceIds = ( ( DeviceSelector ) deviceSelector ) . deviceIds ; } } Path path = reportingDao . csvGenerator . createCSV ( user , dash . id , deviceId , dataStream . pinType , dataStream . pin , deviceIds ) ; Device device = user . profile . getDeviceById ( dash , deviceId ) ; String name = ( device == null || device . name == null ) ? dashName : device . name ; pinsCSVFilePath . add ( new DeviceFileLink ( path , name , dataStream . pinType , dataStream . pin ) ) ; } catch ( Exception e ) { } } } if ( pinsCSVFilePath . size ( ) == 0 ) { ctx . writeAndFlush ( noData ( msgId ) , ctx . voidPromise ( ) ) ; } else { String title = "History graph data for project " + dashName ; String bodyWithLinks = DeviceFileLink . makeBody ( csvDownloadUrl , pinsCSVFilePath ) ; mailWrapper . sendHtml ( user . email , title , bodyWithLinks ) ; ctx . writeAndFlush ( ok ( msgId ) , ctx . voidPromise ( ) ) ; } } catch ( Exception e ) { log . error ( "Error making csv file for data export. Reason {}" , e . getMessage ( ) ) ; ctx . writeAndFlush ( notificationError ( msgId ) , ctx . voidPromise ( ) ) ; } }
public void run ( ) { try { String dashName = dash . getNameOrEmpty ( ) ; ArrayList < DeviceFileLink > pinsCSVFilePath = new ArrayList < > ( ) ; for ( GraphDataStream graphDataStream : enhancedHistoryGraph . dataStreams ) { DataStream dataStream = graphDataStream . dataStream ; int deviceId = graphDataStream . getTargetId ( targetId ) ; if ( dataStream != null ) { try { int [ ] deviceIds = new int [ ] { deviceId } ; if ( deviceId >= DeviceSelector . DEVICE_SELECTOR_STARTING_ID ) { Widget deviceSelector = dash . getWidgetById ( deviceId ) ; if ( deviceSelector == null ) { deviceSelector = dash . getWidgetByIdInDeviceTilesOrThrow ( deviceId ) ; } if ( deviceSelector instanceof DeviceSelector ) { deviceIds = ( ( DeviceSelector ) deviceSelector ) . deviceIds ; } } Path path = reportingDao . csvGenerator . createCSV ( user , dash . id , deviceId , dataStream . pinType , dataStream . pin , deviceIds ) ; Device device = user . profile . getDeviceById ( dash , deviceId ) ; String name = ( device == null || device . name == null ) ? dashName : device . name ; pinsCSVFilePath . add ( new DeviceFileLink ( path , name , dataStream . pinType , dataStream . pin ) ) ; } catch ( Exception e ) { log . debug ( "Error generating csv file." , e ) ; } } } if ( pinsCSVFilePath . size ( ) == 0 ) { ctx . writeAndFlush ( noData ( msgId ) , ctx . voidPromise ( ) ) ; } else { String title = "History graph data for project " + dashName ; String bodyWithLinks = DeviceFileLink . makeBody ( csvDownloadUrl , pinsCSVFilePath ) ; mailWrapper . sendHtml ( user . email , title , bodyWithLinks ) ; ctx . writeAndFlush ( ok ( msgId ) , ctx . voidPromise ( ) ) ; } } catch ( Exception e ) { log . error ( "Error making csv file for data export. Reason {}" , e . getMessage ( ) ) ; ctx . writeAndFlush ( notificationError ( msgId ) , ctx . voidPromise ( ) ) ; } }
312
public void run ( ) { try { String dashName = dash . getNameOrEmpty ( ) ; ArrayList < DeviceFileLink > pinsCSVFilePath = new ArrayList < > ( ) ; for ( GraphDataStream graphDataStream : enhancedHistoryGraph . dataStreams ) { DataStream dataStream = graphDataStream . dataStream ; int deviceId = graphDataStream . getTargetId ( targetId ) ; if ( dataStream != null ) { try { int [ ] deviceIds = new int [ ] { deviceId } ; if ( deviceId >= DeviceSelector . DEVICE_SELECTOR_STARTING_ID ) { Widget deviceSelector = dash . getWidgetById ( deviceId ) ; if ( deviceSelector == null ) { deviceSelector = dash . getWidgetByIdInDeviceTilesOrThrow ( deviceId ) ; } if ( deviceSelector instanceof DeviceSelector ) { deviceIds = ( ( DeviceSelector ) deviceSelector ) . deviceIds ; } } Path path = reportingDao . csvGenerator . createCSV ( user , dash . id , deviceId , dataStream . pinType , dataStream . pin , deviceIds ) ; Device device = user . profile . getDeviceById ( dash , deviceId ) ; String name = ( device == null || device . name == null ) ? dashName : device . name ; pinsCSVFilePath . add ( new DeviceFileLink ( path , name , dataStream . pinType , dataStream . pin ) ) ; } catch ( Exception e ) { log . debug ( "Error generating csv file." , e ) ; } } } if ( pinsCSVFilePath . size ( ) == 0 ) { ctx . writeAndFlush ( noData ( msgId ) , ctx . voidPromise ( ) ) ; } else { String title = "History graph data for project " + dashName ; String bodyWithLinks = DeviceFileLink . makeBody ( csvDownloadUrl , pinsCSVFilePath ) ; mailWrapper . sendHtml ( user . email , title , bodyWithLinks ) ; ctx . writeAndFlush ( ok ( msgId ) , ctx . voidPromise ( ) ) ; } } catch ( Exception e ) { ctx . writeAndFlush ( notificationError ( msgId ) , ctx . voidPromise ( ) ) ; } }
public void run ( ) { try { String dashName = dash . getNameOrEmpty ( ) ; ArrayList < DeviceFileLink > pinsCSVFilePath = new ArrayList < > ( ) ; for ( GraphDataStream graphDataStream : enhancedHistoryGraph . dataStreams ) { DataStream dataStream = graphDataStream . dataStream ; int deviceId = graphDataStream . getTargetId ( targetId ) ; if ( dataStream != null ) { try { int [ ] deviceIds = new int [ ] { deviceId } ; if ( deviceId >= DeviceSelector . DEVICE_SELECTOR_STARTING_ID ) { Widget deviceSelector = dash . getWidgetById ( deviceId ) ; if ( deviceSelector == null ) { deviceSelector = dash . getWidgetByIdInDeviceTilesOrThrow ( deviceId ) ; } if ( deviceSelector instanceof DeviceSelector ) { deviceIds = ( ( DeviceSelector ) deviceSelector ) . deviceIds ; } } Path path = reportingDao . csvGenerator . createCSV ( user , dash . id , deviceId , dataStream . pinType , dataStream . pin , deviceIds ) ; Device device = user . profile . getDeviceById ( dash , deviceId ) ; String name = ( device == null || device . name == null ) ? dashName : device . name ; pinsCSVFilePath . add ( new DeviceFileLink ( path , name , dataStream . pinType , dataStream . pin ) ) ; } catch ( Exception e ) { log . debug ( "Error generating csv file." , e ) ; } } } if ( pinsCSVFilePath . size ( ) == 0 ) { ctx . writeAndFlush ( noData ( msgId ) , ctx . voidPromise ( ) ) ; } else { String title = "History graph data for project " + dashName ; String bodyWithLinks = DeviceFileLink . makeBody ( csvDownloadUrl , pinsCSVFilePath ) ; mailWrapper . sendHtml ( user . email , title , bodyWithLinks ) ; ctx . writeAndFlush ( ok ( msgId ) , ctx . voidPromise ( ) ) ; } } catch ( Exception e ) { log . error ( "Error making csv file for data export. Reason {}" , e . getMessage ( ) ) ; ctx . writeAndFlush ( notificationError ( msgId ) , ctx . voidPromise ( ) ) ; } }
313
@ Test public void get ( ) { try { String applicationUrl = hadoopUtils . getApplicationUrl ( "application_1542010131334_0029" ) ; String responseContent ; KerberosHttpClient kerberosHttpClient = new KerberosHttpClient ( PropertyUtils . getString ( Constants . LOGIN_USER_KEY_TAB_USERNAME ) , PropertyUtils . getString ( Constants . LOGIN_USER_KEY_TAB_PATH ) , PropertyUtils . getString ( Constants . JAVA_SECURITY_KRB5_CONF_PATH ) , true ) ; responseContent = kerberosHttpClient . get ( applicationUrl , PropertyUtils . getString ( Constants . LOGIN_USER_KEY_TAB_USERNAME ) ) ; Assert . assertNull ( responseContent ) ; } catch ( Exception e ) { } }
@ Test public void get ( ) { try { String applicationUrl = hadoopUtils . getApplicationUrl ( "application_1542010131334_0029" ) ; String responseContent ; KerberosHttpClient kerberosHttpClient = new KerberosHttpClient ( PropertyUtils . getString ( Constants . LOGIN_USER_KEY_TAB_USERNAME ) , PropertyUtils . getString ( Constants . LOGIN_USER_KEY_TAB_PATH ) , PropertyUtils . getString ( Constants . JAVA_SECURITY_KRB5_CONF_PATH ) , true ) ; responseContent = kerberosHttpClient . get ( applicationUrl , PropertyUtils . getString ( Constants . LOGIN_USER_KEY_TAB_USERNAME ) ) ; Assert . assertNull ( responseContent ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } }
314
public void parseLineInternal ( String line ) { switch ( state ) { case PREAMBLE : parsePreamble ( line ) ; break ; case PARTCONTENT : parsePartContent ( line ) ; break ; case PARTDONE : parsePartDone ( line ) ; break ; case EPILOGUE : LOGGER . debug ( "{}Epilogue line: {}" , logIndent , line ) ; break ; default : LOGGER . warn ( "{}Uhandled state: {}." , logIndent , state ) ; break ; } }
public void parseLineInternal ( String line ) { LOGGER . trace ( "{}Read line: {}" , logIndent , line ) ; switch ( state ) { case PREAMBLE : parsePreamble ( line ) ; break ; case PARTCONTENT : parsePartContent ( line ) ; break ; case PARTDONE : parsePartDone ( line ) ; break ; case EPILOGUE : LOGGER . debug ( "{}Epilogue line: {}" , logIndent , line ) ; break ; default : LOGGER . warn ( "{}Uhandled state: {}." , logIndent , state ) ; break ; } }
315
public void parseLineInternal ( String line ) { LOGGER . trace ( "{}Read line: {}" , logIndent , line ) ; switch ( state ) { case PREAMBLE : parsePreamble ( line ) ; break ; case PARTCONTENT : parsePartContent ( line ) ; break ; case PARTDONE : parsePartDone ( line ) ; break ; case EPILOGUE : break ; default : LOGGER . warn ( "{}Uhandled state: {}." , logIndent , state ) ; break ; } }
public void parseLineInternal ( String line ) { LOGGER . trace ( "{}Read line: {}" , logIndent , line ) ; switch ( state ) { case PREAMBLE : parsePreamble ( line ) ; break ; case PARTCONTENT : parsePartContent ( line ) ; break ; case PARTDONE : parsePartDone ( line ) ; break ; case EPILOGUE : LOGGER . debug ( "{}Epilogue line: {}" , logIndent , line ) ; break ; default : LOGGER . warn ( "{}Uhandled state: {}." , logIndent , state ) ; break ; } }
316
public void parseLineInternal ( String line ) { LOGGER . trace ( "{}Read line: {}" , logIndent , line ) ; switch ( state ) { case PREAMBLE : parsePreamble ( line ) ; break ; case PARTCONTENT : parsePartContent ( line ) ; break ; case PARTDONE : parsePartDone ( line ) ; break ; case EPILOGUE : LOGGER . debug ( "{}Epilogue line: {}" , logIndent , line ) ; break ; default : break ; } }
public void parseLineInternal ( String line ) { LOGGER . trace ( "{}Read line: {}" , logIndent , line ) ; switch ( state ) { case PREAMBLE : parsePreamble ( line ) ; break ; case PARTCONTENT : parsePartContent ( line ) ; break ; case PARTDONE : parsePartDone ( line ) ; break ; case EPILOGUE : LOGGER . debug ( "{}Epilogue line: {}" , logIndent , line ) ; break ; default : LOGGER . warn ( "{}Uhandled state: {}." , logIndent , state ) ; break ; } }
317
public void write ( String string , int offset , int length ) { if ( _writer == null ) { _hasError = true ; } else { try { _writer . write ( string , offset , length ) ; } catch ( InterruptedIOException interruptedIOException ) { if ( _log . isDebugEnabled ( ) ) { } Thread currentThread = Thread . currentThread ( ) ; currentThread . interrupt ( ) ; } catch ( IOException ioException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ioException , ioException ) ; } _hasError = true ; } } }
public void write ( String string , int offset , int length ) { if ( _writer == null ) { _hasError = true ; } else { try { _writer . write ( string , offset , length ) ; } catch ( InterruptedIOException interruptedIOException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( interruptedIOException , interruptedIOException ) ; } Thread currentThread = Thread . currentThread ( ) ; currentThread . interrupt ( ) ; } catch ( IOException ioException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ioException , ioException ) ; } _hasError = true ; } } }
318
public void write ( String string , int offset , int length ) { if ( _writer == null ) { _hasError = true ; } else { try { _writer . write ( string , offset , length ) ; } catch ( InterruptedIOException interruptedIOException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( interruptedIOException , interruptedIOException ) ; } Thread currentThread = Thread . currentThread ( ) ; currentThread . interrupt ( ) ; } catch ( IOException ioException ) { if ( _log . isDebugEnabled ( ) ) { } _hasError = true ; } } }
public void write ( String string , int offset , int length ) { if ( _writer == null ) { _hasError = true ; } else { try { _writer . write ( string , offset , length ) ; } catch ( InterruptedIOException interruptedIOException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( interruptedIOException , interruptedIOException ) ; } Thread currentThread = Thread . currentThread ( ) ; currentThread . interrupt ( ) ; } catch ( IOException ioException ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( ioException , ioException ) ; } _hasError = true ; } } }
319
private static boolean checkFilename ( String filename ) { if ( ! filename . endsWith ( ".xml" ) ) { return false ; } if ( ! new File ( filename ) . isFile ( ) ) { LOGGER . warn ( "{} ignored, is not a file." , filename ) ; return false ; } return true ; }
private static boolean checkFilename ( String filename ) { if ( ! filename . endsWith ( ".xml" ) ) { LOGGER . warn ( "{} ignored, does not end with *.xml" , filename ) ; return false ; } if ( ! new File ( filename ) . isFile ( ) ) { LOGGER . warn ( "{} ignored, is not a file." , filename ) ; return false ; } return true ; }
320
private static boolean checkFilename ( String filename ) { if ( ! filename . endsWith ( ".xml" ) ) { LOGGER . warn ( "{} ignored, does not end with *.xml" , filename ) ; return false ; } if ( ! new File ( filename ) . isFile ( ) ) { return false ; } return true ; }
private static boolean checkFilename ( String filename ) { if ( ! filename . endsWith ( ".xml" ) ) { LOGGER . warn ( "{} ignored, does not end with *.xml" , filename ) ; return false ; } if ( ! new File ( filename ) . isFile ( ) ) { LOGGER . warn ( "{} ignored, is not a file." , filename ) ; return false ; } return true ; }
321
private void createSubDirRecursively ( String parent , int depthIndex , int spanIndex ) throws Exception { if ( depthIndex < depth ) { String depthSubDir = makeDirWithGivenNumberOfFiles ( parent ) ; ++ depthIndex ; if ( LOG . isDebugEnabled ( ) ) { } if ( depthIndex < depth ) { createSubDirRecursively ( depthSubDir , depthIndex , spanIndex ) ; } } while ( spanIndex < span ) { String levelSubDir = makeDirWithGivenNumberOfFiles ( parent ) ; ++ spanIndex ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "SpanSubDir:{}, depthIndex:{}, spanIndex:{} +" , levelSubDir , depthIndex , spanIndex ) ; } if ( depthIndex < depth ) { createSubDirRecursively ( levelSubDir , depthIndex , 1 ) ; } } }
private void createSubDirRecursively ( String parent , int depthIndex , int spanIndex ) throws Exception { if ( depthIndex < depth ) { String depthSubDir = makeDirWithGivenNumberOfFiles ( parent ) ; ++ depthIndex ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "SubDir:{}, depthIndex:{} +" , depthSubDir , depthIndex ) ; } if ( depthIndex < depth ) { createSubDirRecursively ( depthSubDir , depthIndex , spanIndex ) ; } } while ( spanIndex < span ) { String levelSubDir = makeDirWithGivenNumberOfFiles ( parent ) ; ++ spanIndex ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "SpanSubDir:{}, depthIndex:{}, spanIndex:{} +" , levelSubDir , depthIndex , spanIndex ) ; } if ( depthIndex < depth ) { createSubDirRecursively ( levelSubDir , depthIndex , 1 ) ; } } }
322
private void createSubDirRecursively ( String parent , int depthIndex , int spanIndex ) throws Exception { if ( depthIndex < depth ) { String depthSubDir = makeDirWithGivenNumberOfFiles ( parent ) ; ++ depthIndex ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "SubDir:{}, depthIndex:{} +" , depthSubDir , depthIndex ) ; } if ( depthIndex < depth ) { createSubDirRecursively ( depthSubDir , depthIndex , spanIndex ) ; } } while ( spanIndex < span ) { String levelSubDir = makeDirWithGivenNumberOfFiles ( parent ) ; ++ spanIndex ; if ( LOG . isDebugEnabled ( ) ) { } if ( depthIndex < depth ) { createSubDirRecursively ( levelSubDir , depthIndex , 1 ) ; } } }
private void createSubDirRecursively ( String parent , int depthIndex , int spanIndex ) throws Exception { if ( depthIndex < depth ) { String depthSubDir = makeDirWithGivenNumberOfFiles ( parent ) ; ++ depthIndex ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "SubDir:{}, depthIndex:{} +" , depthSubDir , depthIndex ) ; } if ( depthIndex < depth ) { createSubDirRecursively ( depthSubDir , depthIndex , spanIndex ) ; } } while ( spanIndex < span ) { String levelSubDir = makeDirWithGivenNumberOfFiles ( parent ) ; ++ spanIndex ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "SpanSubDir:{}, depthIndex:{}, spanIndex:{} +" , levelSubDir , depthIndex , spanIndex ) ; } if ( depthIndex < depth ) { createSubDirRecursively ( levelSubDir , depthIndex , 1 ) ; } } }
323
public static void subscribeCategory ( long groupId , long categoryId ) throws RemoteException { try { MBCategoryServiceUtil . subscribeCategory ( groupId , categoryId ) ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } }
public static void subscribeCategory ( long groupId , long categoryId ) throws RemoteException { try { MBCategoryServiceUtil . subscribeCategory ( groupId , categoryId ) ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } }
324
public void doFilter ( ServletRequest req , ServletResponse res , FilterChain chain ) throws IOException , ServletException { HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) res ; if ( request . getAttribute ( FILTER_APPLIED ) != null ) { chain . doFilter ( request , response ) ; return ; } request . setAttribute ( FILTER_APPLIED , Boolean . TRUE ) ; LOGGER . debug ( "applying Context-id-filter" ) ; HttpSession httpSession = request . getSession ( ) ; String contextBeforeChainExecution = ( String ) httpSession . getAttribute ( CONTEXT_ID_ATTRIBUTE_NAME ) ; try { if ( LOGGER . isDebugEnabled ( ) ) { String oldContext = ContextHolder . get ( ) . getCurrentContextId ( ) ; if ( oldContext == null || ! oldContext . equals ( contextBeforeChainExecution ) ) { LOGGER . debug ( "correcting threadlocal context-id from {} to {} in thread {}" , new Object [ ] { oldContext , contextBeforeChainExecution , Thread . currentThread ( ) . getId ( ) } ) ; } } ContextHolder . get ( ) . setCurrentContextId ( contextBeforeChainExecution ) ; chain . doFilter ( req , res ) ; } finally { String currentContextId = ContextHolder . get ( ) . getCurrentContextId ( ) ; LOGGER . debug ( "request done; storing threadlocal context to session ({})" , currentContextId ) ; String contextAfterChainExecution = currentContextId ; httpSession . setAttribute ( CONTEXT_ID_ATTRIBUTE_NAME , contextAfterChainExecution ) ; request . removeAttribute ( FILTER_APPLIED ) ; } }
public void doFilter ( ServletRequest req , ServletResponse res , FilterChain chain ) throws IOException , ServletException { HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) res ; if ( request . getAttribute ( FILTER_APPLIED ) != null ) { LOGGER . debug ( "filter was already applied, moving on to next Filter in chain" ) ; chain . doFilter ( request , response ) ; return ; } request . setAttribute ( FILTER_APPLIED , Boolean . TRUE ) ; LOGGER . debug ( "applying Context-id-filter" ) ; HttpSession httpSession = request . getSession ( ) ; String contextBeforeChainExecution = ( String ) httpSession . getAttribute ( CONTEXT_ID_ATTRIBUTE_NAME ) ; try { if ( LOGGER . isDebugEnabled ( ) ) { String oldContext = ContextHolder . get ( ) . getCurrentContextId ( ) ; if ( oldContext == null || ! oldContext . equals ( contextBeforeChainExecution ) ) { LOGGER . debug ( "correcting threadlocal context-id from {} to {} in thread {}" , new Object [ ] { oldContext , contextBeforeChainExecution , Thread . currentThread ( ) . getId ( ) } ) ; } } ContextHolder . get ( ) . setCurrentContextId ( contextBeforeChainExecution ) ; chain . doFilter ( req , res ) ; } finally { String currentContextId = ContextHolder . get ( ) . getCurrentContextId ( ) ; LOGGER . debug ( "request done; storing threadlocal context to session ({})" , currentContextId ) ; String contextAfterChainExecution = currentContextId ; httpSession . setAttribute ( CONTEXT_ID_ATTRIBUTE_NAME , contextAfterChainExecution ) ; request . removeAttribute ( FILTER_APPLIED ) ; } }
325
public void doFilter ( ServletRequest req , ServletResponse res , FilterChain chain ) throws IOException , ServletException { HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) res ; if ( request . getAttribute ( FILTER_APPLIED ) != null ) { LOGGER . debug ( "filter was already applied, moving on to next Filter in chain" ) ; chain . doFilter ( request , response ) ; return ; } request . setAttribute ( FILTER_APPLIED , Boolean . TRUE ) ; HttpSession httpSession = request . getSession ( ) ; String contextBeforeChainExecution = ( String ) httpSession . getAttribute ( CONTEXT_ID_ATTRIBUTE_NAME ) ; try { if ( LOGGER . isDebugEnabled ( ) ) { String oldContext = ContextHolder . get ( ) . getCurrentContextId ( ) ; if ( oldContext == null || ! oldContext . equals ( contextBeforeChainExecution ) ) { LOGGER . debug ( "correcting threadlocal context-id from {} to {} in thread {}" , new Object [ ] { oldContext , contextBeforeChainExecution , Thread . currentThread ( ) . getId ( ) } ) ; } } ContextHolder . get ( ) . setCurrentContextId ( contextBeforeChainExecution ) ; chain . doFilter ( req , res ) ; } finally { String currentContextId = ContextHolder . get ( ) . getCurrentContextId ( ) ; LOGGER . debug ( "request done; storing threadlocal context to session ({})" , currentContextId ) ; String contextAfterChainExecution = currentContextId ; httpSession . setAttribute ( CONTEXT_ID_ATTRIBUTE_NAME , contextAfterChainExecution ) ; request . removeAttribute ( FILTER_APPLIED ) ; } }
public void doFilter ( ServletRequest req , ServletResponse res , FilterChain chain ) throws IOException , ServletException { HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) res ; if ( request . getAttribute ( FILTER_APPLIED ) != null ) { LOGGER . debug ( "filter was already applied, moving on to next Filter in chain" ) ; chain . doFilter ( request , response ) ; return ; } request . setAttribute ( FILTER_APPLIED , Boolean . TRUE ) ; LOGGER . debug ( "applying Context-id-filter" ) ; HttpSession httpSession = request . getSession ( ) ; String contextBeforeChainExecution = ( String ) httpSession . getAttribute ( CONTEXT_ID_ATTRIBUTE_NAME ) ; try { if ( LOGGER . isDebugEnabled ( ) ) { String oldContext = ContextHolder . get ( ) . getCurrentContextId ( ) ; if ( oldContext == null || ! oldContext . equals ( contextBeforeChainExecution ) ) { LOGGER . debug ( "correcting threadlocal context-id from {} to {} in thread {}" , new Object [ ] { oldContext , contextBeforeChainExecution , Thread . currentThread ( ) . getId ( ) } ) ; } } ContextHolder . get ( ) . setCurrentContextId ( contextBeforeChainExecution ) ; chain . doFilter ( req , res ) ; } finally { String currentContextId = ContextHolder . get ( ) . getCurrentContextId ( ) ; LOGGER . debug ( "request done; storing threadlocal context to session ({})" , currentContextId ) ; String contextAfterChainExecution = currentContextId ; httpSession . setAttribute ( CONTEXT_ID_ATTRIBUTE_NAME , contextAfterChainExecution ) ; request . removeAttribute ( FILTER_APPLIED ) ; } }
326
public void doFilter ( ServletRequest req , ServletResponse res , FilterChain chain ) throws IOException , ServletException { HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) res ; if ( request . getAttribute ( FILTER_APPLIED ) != null ) { LOGGER . debug ( "filter was already applied, moving on to next Filter in chain" ) ; chain . doFilter ( request , response ) ; return ; } request . setAttribute ( FILTER_APPLIED , Boolean . TRUE ) ; LOGGER . debug ( "applying Context-id-filter" ) ; HttpSession httpSession = request . getSession ( ) ; String contextBeforeChainExecution = ( String ) httpSession . getAttribute ( CONTEXT_ID_ATTRIBUTE_NAME ) ; try { if ( LOGGER . isDebugEnabled ( ) ) { String oldContext = ContextHolder . get ( ) . getCurrentContextId ( ) ; if ( oldContext == null || ! oldContext . equals ( contextBeforeChainExecution ) ) { } } ContextHolder . get ( ) . setCurrentContextId ( contextBeforeChainExecution ) ; chain . doFilter ( req , res ) ; } finally { String currentContextId = ContextHolder . get ( ) . getCurrentContextId ( ) ; LOGGER . debug ( "request done; storing threadlocal context to session ({})" , currentContextId ) ; String contextAfterChainExecution = currentContextId ; httpSession . setAttribute ( CONTEXT_ID_ATTRIBUTE_NAME , contextAfterChainExecution ) ; request . removeAttribute ( FILTER_APPLIED ) ; } }
public void doFilter ( ServletRequest req , ServletResponse res , FilterChain chain ) throws IOException , ServletException { HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) res ; if ( request . getAttribute ( FILTER_APPLIED ) != null ) { LOGGER . debug ( "filter was already applied, moving on to next Filter in chain" ) ; chain . doFilter ( request , response ) ; return ; } request . setAttribute ( FILTER_APPLIED , Boolean . TRUE ) ; LOGGER . debug ( "applying Context-id-filter" ) ; HttpSession httpSession = request . getSession ( ) ; String contextBeforeChainExecution = ( String ) httpSession . getAttribute ( CONTEXT_ID_ATTRIBUTE_NAME ) ; try { if ( LOGGER . isDebugEnabled ( ) ) { String oldContext = ContextHolder . get ( ) . getCurrentContextId ( ) ; if ( oldContext == null || ! oldContext . equals ( contextBeforeChainExecution ) ) { LOGGER . debug ( "correcting threadlocal context-id from {} to {} in thread {}" , new Object [ ] { oldContext , contextBeforeChainExecution , Thread . currentThread ( ) . getId ( ) } ) ; } } ContextHolder . get ( ) . setCurrentContextId ( contextBeforeChainExecution ) ; chain . doFilter ( req , res ) ; } finally { String currentContextId = ContextHolder . get ( ) . getCurrentContextId ( ) ; LOGGER . debug ( "request done; storing threadlocal context to session ({})" , currentContextId ) ; String contextAfterChainExecution = currentContextId ; httpSession . setAttribute ( CONTEXT_ID_ATTRIBUTE_NAME , contextAfterChainExecution ) ; request . removeAttribute ( FILTER_APPLIED ) ; } }
327
public void doFilter ( ServletRequest req , ServletResponse res , FilterChain chain ) throws IOException , ServletException { HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) res ; if ( request . getAttribute ( FILTER_APPLIED ) != null ) { LOGGER . debug ( "filter was already applied, moving on to next Filter in chain" ) ; chain . doFilter ( request , response ) ; return ; } request . setAttribute ( FILTER_APPLIED , Boolean . TRUE ) ; LOGGER . debug ( "applying Context-id-filter" ) ; HttpSession httpSession = request . getSession ( ) ; String contextBeforeChainExecution = ( String ) httpSession . getAttribute ( CONTEXT_ID_ATTRIBUTE_NAME ) ; try { if ( LOGGER . isDebugEnabled ( ) ) { String oldContext = ContextHolder . get ( ) . getCurrentContextId ( ) ; if ( oldContext == null || ! oldContext . equals ( contextBeforeChainExecution ) ) { LOGGER . debug ( "correcting threadlocal context-id from {} to {} in thread {}" , new Object [ ] { oldContext , contextBeforeChainExecution , Thread . currentThread ( ) . getId ( ) } ) ; } } ContextHolder . get ( ) . setCurrentContextId ( contextBeforeChainExecution ) ; chain . doFilter ( req , res ) ; } finally { String currentContextId = ContextHolder . get ( ) . getCurrentContextId ( ) ; String contextAfterChainExecution = currentContextId ; httpSession . setAttribute ( CONTEXT_ID_ATTRIBUTE_NAME , contextAfterChainExecution ) ; request . removeAttribute ( FILTER_APPLIED ) ; } }
public void doFilter ( ServletRequest req , ServletResponse res , FilterChain chain ) throws IOException , ServletException { HttpServletRequest request = ( HttpServletRequest ) req ; HttpServletResponse response = ( HttpServletResponse ) res ; if ( request . getAttribute ( FILTER_APPLIED ) != null ) { LOGGER . debug ( "filter was already applied, moving on to next Filter in chain" ) ; chain . doFilter ( request , response ) ; return ; } request . setAttribute ( FILTER_APPLIED , Boolean . TRUE ) ; LOGGER . debug ( "applying Context-id-filter" ) ; HttpSession httpSession = request . getSession ( ) ; String contextBeforeChainExecution = ( String ) httpSession . getAttribute ( CONTEXT_ID_ATTRIBUTE_NAME ) ; try { if ( LOGGER . isDebugEnabled ( ) ) { String oldContext = ContextHolder . get ( ) . getCurrentContextId ( ) ; if ( oldContext == null || ! oldContext . equals ( contextBeforeChainExecution ) ) { LOGGER . debug ( "correcting threadlocal context-id from {} to {} in thread {}" , new Object [ ] { oldContext , contextBeforeChainExecution , Thread . currentThread ( ) . getId ( ) } ) ; } } ContextHolder . get ( ) . setCurrentContextId ( contextBeforeChainExecution ) ; chain . doFilter ( req , res ) ; } finally { String currentContextId = ContextHolder . get ( ) . getCurrentContextId ( ) ; LOGGER . debug ( "request done; storing threadlocal context to session ({})" , currentContextId ) ; String contextAfterChainExecution = currentContextId ; httpSession . setAttribute ( CONTEXT_ID_ATTRIBUTE_NAME , contextAfterChainExecution ) ; request . removeAttribute ( FILTER_APPLIED ) ; } }
328
public void setConnected ( IReplicationConnectionProxy proxy , T tag , String endPointLookupName , Uuid proxyId ) { boolean newTarget = _proxyId == null ? true : ! _proxyId . equals ( proxyId ) ; synchronized ( getStateLock ( ) ) { if ( _connectionState == ConnectionState . CONNECTED ) return ; _timeOfDisconnection = null ; _connectionProxy = proxy ; _tag = tag ; _endPointLookupName = endPointLookupName ; _proxyId = proxyId ; _connectionState = ConnectionState . CONNECTED ; if ( _specificLogger . isInfoEnabled ( ) ) addPendingEvent ( newTarget ? StateChangedEvent . CONNECTED_NEW : StateChangedEvent . CONNECTED_OLD ) ; } }
public void setConnected ( IReplicationConnectionProxy proxy , T tag , String endPointLookupName , Uuid proxyId ) { boolean newTarget = _proxyId == null ? true : ! _proxyId . equals ( proxyId ) ; synchronized ( getStateLock ( ) ) { if ( _connectionState == ConnectionState . CONNECTED ) return ; _timeOfDisconnection = null ; _connectionProxy = proxy ; _tag = tag ; _endPointLookupName = endPointLookupName ; _proxyId = proxyId ; _connectionState = ConnectionState . CONNECTED ; if ( _specificLogger . isInfoEnabled ( ) ) _specificLogger . info ( "Connection state updated to 'CONNECTED', Lookup name: " + endPointLookupName ) ; addPendingEvent ( newTarget ? StateChangedEvent . CONNECTED_NEW : StateChangedEvent . CONNECTED_OLD ) ; } }
329
private void scheduleStateExpiration ( final CommandQueue queue ) { if ( queue . stateExpirationJob != null ) { queue . stateExpirationJob . cancel ( false ) ; queue . stateExpirationJob = null ; } Runnable r = ( ) -> { log . info ( "Returning {} queue to default state {}" , queue . getName ( ) , queue . defaultState ) ; setQueueState ( queue . getName ( ) , queue . defaultState ) ; queue . stateExpirationJob = null ; } ; queue . stateExpirationRemainingS = queue . stateExpirationTimeS ; queue . stateExpirationJob = timer . schedule ( r , queue . stateExpirationTimeS , TimeUnit . SECONDS ) ; }
private void scheduleStateExpiration ( final CommandQueue queue ) { if ( queue . stateExpirationJob != null ) { log . debug ( "expiration job existing, removing..." ) ; queue . stateExpirationJob . cancel ( false ) ; queue . stateExpirationJob = null ; } Runnable r = ( ) -> { log . info ( "Returning {} queue to default state {}" , queue . getName ( ) , queue . defaultState ) ; setQueueState ( queue . getName ( ) , queue . defaultState ) ; queue . stateExpirationJob = null ; } ; queue . stateExpirationRemainingS = queue . stateExpirationTimeS ; queue . stateExpirationJob = timer . schedule ( r , queue . stateExpirationTimeS , TimeUnit . SECONDS ) ; }
330
private void scheduleStateExpiration ( final CommandQueue queue ) { if ( queue . stateExpirationJob != null ) { log . debug ( "expiration job existing, removing..." ) ; queue . stateExpirationJob . cancel ( false ) ; queue . stateExpirationJob = null ; } Runnable r = ( ) -> { setQueueState ( queue . getName ( ) , queue . defaultState ) ; queue . stateExpirationJob = null ; } ; queue . stateExpirationRemainingS = queue . stateExpirationTimeS ; queue . stateExpirationJob = timer . schedule ( r , queue . stateExpirationTimeS , TimeUnit . SECONDS ) ; }
private void scheduleStateExpiration ( final CommandQueue queue ) { if ( queue . stateExpirationJob != null ) { log . debug ( "expiration job existing, removing..." ) ; queue . stateExpirationJob . cancel ( false ) ; queue . stateExpirationJob = null ; } Runnable r = ( ) -> { log . info ( "Returning {} queue to default state {}" , queue . getName ( ) , queue . defaultState ) ; setQueueState ( queue . getName ( ) , queue . defaultState ) ; queue . stateExpirationJob = null ; } ; queue . stateExpirationRemainingS = queue . stateExpirationTimeS ; queue . stateExpirationJob = timer . schedule ( r , queue . stateExpirationTimeS , TimeUnit . SECONDS ) ; }
331
private void writeMutations ( final RowMutations rowMutation ) { try { synchronized ( duplicateRowTracker ) { mutator . mutate ( rowMutation . getMutations ( ) ) ; } } catch ( final IOException e ) { } }
private void writeMutations ( final RowMutations rowMutation ) { try { synchronized ( duplicateRowTracker ) { mutator . mutate ( rowMutation . getMutations ( ) ) ; } } catch ( final IOException e ) { LOGGER . error ( "Unable to write mutation." , e ) ; } }
332
public ViewMetadata parseView ( AdminRow viewRow , CqlIdentifier keyspaceId , Map < CqlIdentifier , UserDefinedType > userTypes ) { CqlIdentifier viewId = CqlIdentifier . fromInternal ( viewRow . getString ( "view_name" ) ) ; UUID uuid = viewRow . getUuid ( "id" ) ; CqlIdentifier baseTableId = CqlIdentifier . fromInternal ( viewRow . getString ( "base_table_name" ) ) ; boolean includesAllColumns = MoreObjects . firstNonNull ( viewRow . getBoolean ( "include_all_columns" ) , false ) ; String whereClause = viewRow . getString ( "where_clause" ) ; List < RawColumn > rawColumns = RawColumn . toRawColumns ( rows . columns ( ) . getOrDefault ( keyspaceId , ImmutableMultimap . of ( ) ) . get ( viewId ) ) ; if ( rawColumns . isEmpty ( ) ) { return null ; } Collections . sort ( rawColumns ) ; ImmutableMap . Builder < CqlIdentifier , ColumnMetadata > allColumnsBuilder = ImmutableMap . builder ( ) ; ImmutableList . Builder < ColumnMetadata > partitionKeyBuilder = ImmutableList . builder ( ) ; ImmutableMap . Builder < ColumnMetadata , ClusteringOrder > clusteringColumnsBuilder = ImmutableMap . builder ( ) ; for ( RawColumn raw : rawColumns ) { DataType dataType = rows . dataTypeParser ( ) . parse ( keyspaceId , raw . dataType , userTypes , context ) ; ColumnMetadata column = new DefaultColumnMetadata ( keyspaceId , viewId , raw . name , dataType , raw . kind . equals ( RawColumn . KIND_STATIC ) ) ; switch ( raw . kind ) { case RawColumn . KIND_PARTITION_KEY : partitionKeyBuilder . add ( column ) ; break ; case RawColumn . KIND_CLUSTERING_COLUMN : clusteringColumnsBuilder . put ( column , raw . reversed ? ClusteringOrder . DESC : ClusteringOrder . ASC ) ; break ; default : } allColumnsBuilder . put ( column . getName ( ) , column ) ; } Map < CqlIdentifier , Object > options ; try { options = parseOptions ( viewRow ) ; } catch ( Exception e ) { Loggers . warnWithException ( LOG , "[{}] Error while parsing options for {}.{}, getOptions() will be empty" , logPrefix , keyspaceId , viewId , e ) ; options = Collections . emptyMap ( ) ; } return new DefaultViewMetadata ( keyspaceId , viewId , baseTableId , includesAllColumns , whereClause , uuid , partitionKeyBuilder . build ( ) , clusteringColumnsBuilder . build ( ) , allColumnsBuilder . build ( ) , options ) ; }
public ViewMetadata parseView ( AdminRow viewRow , CqlIdentifier keyspaceId , Map < CqlIdentifier , UserDefinedType > userTypes ) { CqlIdentifier viewId = CqlIdentifier . fromInternal ( viewRow . getString ( "view_name" ) ) ; UUID uuid = viewRow . getUuid ( "id" ) ; CqlIdentifier baseTableId = CqlIdentifier . fromInternal ( viewRow . getString ( "base_table_name" ) ) ; boolean includesAllColumns = MoreObjects . firstNonNull ( viewRow . getBoolean ( "include_all_columns" ) , false ) ; String whereClause = viewRow . getString ( "where_clause" ) ; List < RawColumn > rawColumns = RawColumn . toRawColumns ( rows . columns ( ) . getOrDefault ( keyspaceId , ImmutableMultimap . of ( ) ) . get ( viewId ) ) ; if ( rawColumns . isEmpty ( ) ) { LOG . warn ( "[{}] Processing VIEW refresh for {}.{} but found no matching rows, skipping" , logPrefix , keyspaceId , viewId ) ; return null ; } Collections . sort ( rawColumns ) ; ImmutableMap . Builder < CqlIdentifier , ColumnMetadata > allColumnsBuilder = ImmutableMap . builder ( ) ; ImmutableList . Builder < ColumnMetadata > partitionKeyBuilder = ImmutableList . builder ( ) ; ImmutableMap . Builder < ColumnMetadata , ClusteringOrder > clusteringColumnsBuilder = ImmutableMap . builder ( ) ; for ( RawColumn raw : rawColumns ) { DataType dataType = rows . dataTypeParser ( ) . parse ( keyspaceId , raw . dataType , userTypes , context ) ; ColumnMetadata column = new DefaultColumnMetadata ( keyspaceId , viewId , raw . name , dataType , raw . kind . equals ( RawColumn . KIND_STATIC ) ) ; switch ( raw . kind ) { case RawColumn . KIND_PARTITION_KEY : partitionKeyBuilder . add ( column ) ; break ; case RawColumn . KIND_CLUSTERING_COLUMN : clusteringColumnsBuilder . put ( column , raw . reversed ? ClusteringOrder . DESC : ClusteringOrder . ASC ) ; break ; default : } allColumnsBuilder . put ( column . getName ( ) , column ) ; } Map < CqlIdentifier , Object > options ; try { options = parseOptions ( viewRow ) ; } catch ( Exception e ) { Loggers . warnWithException ( LOG , "[{}] Error while parsing options for {}.{}, getOptions() will be empty" , logPrefix , keyspaceId , viewId , e ) ; options = Collections . emptyMap ( ) ; } return new DefaultViewMetadata ( keyspaceId , viewId , baseTableId , includesAllColumns , whereClause , uuid , partitionKeyBuilder . build ( ) , clusteringColumnsBuilder . build ( ) , allColumnsBuilder . build ( ) , options ) ; }
333
public void executeUpdate ( Connection connection ) throws SQLException { int index = 0 ; StringBuilder sb = new StringBuilder ( ) ; for ( Row row : rows ) { try { for ( ; index < row . getArity ( ) ; index ++ ) { Object rowData = row . getField ( index ) ; sb . append ( rowData ) . append ( DEFAULT_FIELD_DELIM ) ; } String rowVal = sb . toString ( ) ; ByteArrayInputStream bi = new ByteArrayInputStream ( rowVal . getBytes ( StandardCharsets . UTF_8 ) ) ; copyManager . copyIn ( copySql , bi ) ; connection . commit ( ) ; } catch ( Exception e ) { connection . rollback ( ) ; connection . commit ( ) ; if ( metricOutputFormat . outDirtyRecords . getCount ( ) % DIRTYDATA_PRINT_FREQUENTY == 0 || LOG . isDebugEnabled ( ) ) { LOG . error ( "" , e ) ; } metricOutputFormat . outDirtyRecords . inc ( ) ; } } rows . clear ( ) ; }
public void executeUpdate ( Connection connection ) throws SQLException { int index = 0 ; StringBuilder sb = new StringBuilder ( ) ; for ( Row row : rows ) { try { for ( ; index < row . getArity ( ) ; index ++ ) { Object rowData = row . getField ( index ) ; sb . append ( rowData ) . append ( DEFAULT_FIELD_DELIM ) ; } String rowVal = sb . toString ( ) ; ByteArrayInputStream bi = new ByteArrayInputStream ( rowVal . getBytes ( StandardCharsets . UTF_8 ) ) ; copyManager . copyIn ( copySql , bi ) ; connection . commit ( ) ; } catch ( Exception e ) { connection . rollback ( ) ; connection . commit ( ) ; if ( metricOutputFormat . outDirtyRecords . getCount ( ) % DIRTYDATA_PRINT_FREQUENTY == 0 || LOG . isDebugEnabled ( ) ) { LOG . error ( "record insert failed ,this row is {}" , row . toString ( ) ) ; LOG . error ( "" , e ) ; } metricOutputFormat . outDirtyRecords . inc ( ) ; } } rows . clear ( ) ; }
334
public void executeUpdate ( Connection connection ) throws SQLException { int index = 0 ; StringBuilder sb = new StringBuilder ( ) ; for ( Row row : rows ) { try { for ( ; index < row . getArity ( ) ; index ++ ) { Object rowData = row . getField ( index ) ; sb . append ( rowData ) . append ( DEFAULT_FIELD_DELIM ) ; } String rowVal = sb . toString ( ) ; ByteArrayInputStream bi = new ByteArrayInputStream ( rowVal . getBytes ( StandardCharsets . UTF_8 ) ) ; copyManager . copyIn ( copySql , bi ) ; connection . commit ( ) ; } catch ( Exception e ) { connection . rollback ( ) ; connection . commit ( ) ; if ( metricOutputFormat . outDirtyRecords . getCount ( ) % DIRTYDATA_PRINT_FREQUENTY == 0 || LOG . isDebugEnabled ( ) ) { LOG . error ( "record insert failed ,this row is {}" , row . toString ( ) ) ; } metricOutputFormat . outDirtyRecords . inc ( ) ; } } rows . clear ( ) ; }
public void executeUpdate ( Connection connection ) throws SQLException { int index = 0 ; StringBuilder sb = new StringBuilder ( ) ; for ( Row row : rows ) { try { for ( ; index < row . getArity ( ) ; index ++ ) { Object rowData = row . getField ( index ) ; sb . append ( rowData ) . append ( DEFAULT_FIELD_DELIM ) ; } String rowVal = sb . toString ( ) ; ByteArrayInputStream bi = new ByteArrayInputStream ( rowVal . getBytes ( StandardCharsets . UTF_8 ) ) ; copyManager . copyIn ( copySql , bi ) ; connection . commit ( ) ; } catch ( Exception e ) { connection . rollback ( ) ; connection . commit ( ) ; if ( metricOutputFormat . outDirtyRecords . getCount ( ) % DIRTYDATA_PRINT_FREQUENTY == 0 || LOG . isDebugEnabled ( ) ) { LOG . error ( "record insert failed ,this row is {}" , row . toString ( ) ) ; LOG . error ( "" , e ) ; } metricOutputFormat . outDirtyRecords . inc ( ) ; } } rows . clear ( ) ; }
335
private void locateRecoveryRegistry ( ) { if ( registry == null ) { for ( String locatorClasse : RESOURCE_RECOVERY_CLASS_NAMES ) { try { ServiceLoader < ActiveMQRegistry > sl = ServiceLoader . load ( ActiveMQRegistry . class ) ; if ( sl . iterator ( ) . hasNext ( ) ) { registry = sl . iterator ( ) . next ( ) ; } } catch ( Throwable e ) { } if ( registry != null ) { break ; } } if ( registry != null ) { ActiveMQJMSBridgeLogger . LOGGER . debug ( "Recovery Registry located = " + registry ) ; } } }
private void locateRecoveryRegistry ( ) { if ( registry == null ) { for ( String locatorClasse : RESOURCE_RECOVERY_CLASS_NAMES ) { try { ServiceLoader < ActiveMQRegistry > sl = ServiceLoader . load ( ActiveMQRegistry . class ) ; if ( sl . iterator ( ) . hasNext ( ) ) { registry = sl . iterator ( ) . next ( ) ; } } catch ( Throwable e ) { ActiveMQJMSBridgeLogger . LOGGER . debug ( "unable to load recovery registry " + locatorClasse , e ) ; } if ( registry != null ) { break ; } } if ( registry != null ) { ActiveMQJMSBridgeLogger . LOGGER . debug ( "Recovery Registry located = " + registry ) ; } } }
336
private void locateRecoveryRegistry ( ) { if ( registry == null ) { for ( String locatorClasse : RESOURCE_RECOVERY_CLASS_NAMES ) { try { ServiceLoader < ActiveMQRegistry > sl = ServiceLoader . load ( ActiveMQRegistry . class ) ; if ( sl . iterator ( ) . hasNext ( ) ) { registry = sl . iterator ( ) . next ( ) ; } } catch ( Throwable e ) { ActiveMQJMSBridgeLogger . LOGGER . debug ( "unable to load recovery registry " + locatorClasse , e ) ; } if ( registry != null ) { break ; } } if ( registry != null ) { } } }
private void locateRecoveryRegistry ( ) { if ( registry == null ) { for ( String locatorClasse : RESOURCE_RECOVERY_CLASS_NAMES ) { try { ServiceLoader < ActiveMQRegistry > sl = ServiceLoader . load ( ActiveMQRegistry . class ) ; if ( sl . iterator ( ) . hasNext ( ) ) { registry = sl . iterator ( ) . next ( ) ; } } catch ( Throwable e ) { ActiveMQJMSBridgeLogger . LOGGER . debug ( "unable to load recovery registry " + locatorClasse , e ) ; } if ( registry != null ) { break ; } } if ( registry != null ) { ActiveMQJMSBridgeLogger . LOGGER . debug ( "Recovery Registry located = " + registry ) ; } } }
337
@ Test public void givenHttpCallWithoutHeaderCertificateThenRaizeShiroException ( ) { final SecureClientConfiguration configuration = changeConfigurationFile ( INGEST_EXTERNAL_CLIENT_CONF_NOKEY ) ; configuration . setServerPort ( vitamServerTestRunner . getBusinessPort ( ) ) ; final VitamClientFactory < DefaultClient > factory = new VitamClientFactory < DefaultClient > ( configuration , BASE_URI ) { @ Override public DefaultClient getClient ( ) { return new DefaultClient ( this ) ; } } ; try ( final DefaultClient client = factory . getClient ( ) ) { client . checkStatus ( ) ; fail ( "THIS SHOULD NOT RAIZED EXCEPTION" ) ; } catch ( final VitamException e ) { } finally { try { factory . shutdown ( ) ; } catch ( Exception e ) { SysErrLogger . FAKE_LOGGER . ignoreLog ( e ) ; } } }
@ Test public void givenHttpCallWithoutHeaderCertificateThenRaizeShiroException ( ) { final SecureClientConfiguration configuration = changeConfigurationFile ( INGEST_EXTERNAL_CLIENT_CONF_NOKEY ) ; configuration . setServerPort ( vitamServerTestRunner . getBusinessPort ( ) ) ; final VitamClientFactory < DefaultClient > factory = new VitamClientFactory < DefaultClient > ( configuration , BASE_URI ) { @ Override public DefaultClient getClient ( ) { return new DefaultClient ( this ) ; } } ; try ( final DefaultClient client = factory . getClient ( ) ) { client . checkStatus ( ) ; LOGGER . error ( "THIS SHOULD RAIZED AN EXCEPTION" ) ; fail ( "THIS SHOULD NOT RAIZED EXCEPTION" ) ; } catch ( final VitamException e ) { } finally { try { factory . shutdown ( ) ; } catch ( Exception e ) { SysErrLogger . FAKE_LOGGER . ignoreLog ( e ) ; } } }
338
private static File resetTempDirectory ( String currentDir ) throws IOException { File workDir ; workDir = new File ( currentDir , "work" ) ; if ( workDir . exists ( ) ) { try { FileUtils . cleanDirectory ( workDir ) ; } catch ( IllegalArgumentException e ) { } } return workDir ; }
private static File resetTempDirectory ( String currentDir ) throws IOException { File workDir ; workDir = new File ( currentDir , "work" ) ; if ( workDir . exists ( ) ) { try { FileUtils . cleanDirectory ( workDir ) ; } catch ( IllegalArgumentException e ) { LOG . info ( "Info: issue while deleting work directory, it was already deleted. Not a problem." ) ; } } return workDir ; }
339
private void callbackLog ( List < HandleCallbackParam > callbackParamList , String logContent ) { for ( HandleCallbackParam c : callbackParamList ) { String logFileName = JobFileAppender . makeLogFileName ( new Date ( c . getLogDateTim ( ) ) , c . getLogId ( ) ) ; JobFileAppender . contextHolder . set ( logFileName ) ; } }
private void callbackLog ( List < HandleCallbackParam > callbackParamList , String logContent ) { for ( HandleCallbackParam c : callbackParamList ) { String logFileName = JobFileAppender . makeLogFileName ( new Date ( c . getLogDateTim ( ) ) , c . getLogId ( ) ) ; JobFileAppender . contextHolder . set ( logFileName ) ; JobLogger . log ( logContent ) ; } }
340
public Pair < Integer , String > getDatabaseIdAndUri ( String databaseName ) throws UndefinedDatabaseException { String sql = "SELECT DB_ID, SPACE_URI from " + TB_DATABASES + " natural join " + TB_SPACES + " WHERE db_name = ?" ; if ( LOG . isDebugEnabled ( ) ) { } ResultSet res = null ; try ( PreparedStatement pstmt = getConnection ( ) . prepareStatement ( sql ) ) { pstmt . setString ( 1 , databaseName ) ; res = pstmt . executeQuery ( ) ; if ( ! res . next ( ) ) { throw new UndefinedDatabaseException ( databaseName ) ; } return new Pair < > ( res . getInt ( 1 ) , res . getString ( 2 ) + "/" + databaseName ) ; } catch ( SQLException e ) { throw new TajoInternalError ( e ) ; } finally { CatalogUtil . closeQuietly ( res ) ; } }
public Pair < Integer , String > getDatabaseIdAndUri ( String databaseName ) throws UndefinedDatabaseException { String sql = "SELECT DB_ID, SPACE_URI from " + TB_DATABASES + " natural join " + TB_SPACES + " WHERE db_name = ?" ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( sql ) ; } ResultSet res = null ; try ( PreparedStatement pstmt = getConnection ( ) . prepareStatement ( sql ) ) { pstmt . setString ( 1 , databaseName ) ; res = pstmt . executeQuery ( ) ; if ( ! res . next ( ) ) { throw new UndefinedDatabaseException ( databaseName ) ; } return new Pair < > ( res . getInt ( 1 ) , res . getString ( 2 ) + "/" + databaseName ) ; } catch ( SQLException e ) { throw new TajoInternalError ( e ) ; } finally { CatalogUtil . closeQuietly ( res ) ; } }
341
public int getPendingPublishMessagesNo ( String clientID ) { if ( ! sessions . containsKey ( clientID ) ) { return 0 ; } return sessions . get ( clientID ) . queue . size ( ) ; }
public int getPendingPublishMessagesNo ( String clientID ) { if ( ! sessions . containsKey ( clientID ) ) { LOG . error ( "Can't find the session for client <{}>" , clientID ) ; return 0 ; } return sessions . get ( clientID ) . queue . size ( ) ; }
342
public void setResizeDownIterationIncrement ( Integer val ) { if ( LOG . isInfoEnabled ( ) ) setOrDefault ( RESIZE_DOWN_ITERATION_INCREMENT , val ) ; }
public void setResizeDownIterationIncrement ( Integer val ) { if ( LOG . isInfoEnabled ( ) ) LOG . info ( "{} changing resizeDownIterationIncrement from {} to {}" , new Object [ ] { this , getResizeDownIterationIncrement ( ) , val } ) ; setOrDefault ( RESIZE_DOWN_ITERATION_INCREMENT , val ) ; }
343
private int getCurrValidWorkers ( Collection < ImmutableWorkerInfo > workers ) { final Predicate < ImmutableWorkerInfo > isValidWorker = ProvisioningUtil . createValidWorkerPredicate ( config ) ; final int currValidWorkers = Collections2 . filter ( workers , isValidWorker ) . size ( ) ; return currValidWorkers ; }
private int getCurrValidWorkers ( Collection < ImmutableWorkerInfo > workers ) { final Predicate < ImmutableWorkerInfo > isValidWorker = ProvisioningUtil . createValidWorkerPredicate ( config ) ; final int currValidWorkers = Collections2 . filter ( workers , isValidWorker ) . size ( ) ; log . debug ( "Current valid workers: %d" , currValidWorkers ) ; return currValidWorkers ; }
344
private void deleteNtFile ( SyncResult res , Entry e ) throws RepositoryException { String path = e . node . getPath ( ) ; e . node . remove ( ) ; res . addEntry ( path , e . getFsPath ( ) , SyncResult . Operation . DELETE_JCR ) ; }
private void deleteNtFile ( SyncResult res , Entry e ) throws RepositoryException { String path = e . node . getPath ( ) ; e . node . remove ( ) ; syncLog . log ( "D jcr:/%s" , path ) ; res . addEntry ( path , e . getFsPath ( ) , SyncResult . Operation . DELETE_JCR ) ; }
345
public boolean initSerial ( ) { if ( wiimote == null ) { log . error ( "please press the (1) & (2) buttons of the wii - and re-run program while lights are flashing" ) ; return false ; } if ( ! serialInitialized ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { } wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , true , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , true , true , true ) ; wiimote . setLeds ( false , true , true , true ) ; serialInitialized = true ; } else { log . warn ( "wii serial already initialized" ) ; } return true ; }
public boolean initSerial ( ) { if ( wiimote == null ) { log . error ( "wii is not connected - can not initialize" ) ; log . error ( "please press the (1) & (2) buttons of the wii - and re-run program while lights are flashing" ) ; return false ; } if ( ! serialInitialized ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { } wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , true , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , true , true , true ) ; wiimote . setLeds ( false , true , true , true ) ; serialInitialized = true ; } else { log . warn ( "wii serial already initialized" ) ; } return true ; }
346
public boolean initSerial ( ) { if ( wiimote == null ) { log . error ( "wii is not connected - can not initialize" ) ; return false ; } if ( ! serialInitialized ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { } wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , true , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , true , true , true ) ; wiimote . setLeds ( false , true , true , true ) ; serialInitialized = true ; } else { log . warn ( "wii serial already initialized" ) ; } return true ; }
public boolean initSerial ( ) { if ( wiimote == null ) { log . error ( "wii is not connected - can not initialize" ) ; log . error ( "please press the (1) & (2) buttons of the wii - and re-run program while lights are flashing" ) ; return false ; } if ( ! serialInitialized ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { } wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , true , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , true , true , true ) ; wiimote . setLeds ( false , true , true , true ) ; serialInitialized = true ; } else { log . warn ( "wii serial already initialized" ) ; } return true ; }
347
public boolean initSerial ( ) { if ( wiimote == null ) { log . error ( "wii is not connected - can not initialize" ) ; log . error ( "please press the (1) & (2) buttons of the wii - and re-run program while lights are flashing" ) ; return false ; } if ( ! serialInitialized ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { } wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , true , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , true , true , true ) ; wiimote . setLeds ( false , true , true , true ) ; serialInitialized = true ; } else { } return true ; }
public boolean initSerial ( ) { if ( wiimote == null ) { log . error ( "wii is not connected - can not initialize" ) ; log . error ( "please press the (1) & (2) buttons of the wii - and re-run program while lights are flashing" ) ; return false ; } if ( ! serialInitialized ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { } wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , true , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , false , false , false ) ; wiimote . setLeds ( false , false , false , false ) ; wiimote . setLeds ( true , true , true , true ) ; wiimote . setLeds ( false , true , true , true ) ; serialInitialized = true ; } else { log . warn ( "wii serial already initialized" ) ; } return true ; }
348
private void startBroker ( boolean deleteAllMessages ) throws Exception { if ( broker != null ) return ; broker = BrokerFactory . createBroker ( "broker:(vm://" + getName ( ) + ")" ) ; broker . setBrokerName ( getName ( ) ) ; broker . setAdvisorySupport ( false ) ; broker . setDeleteAllMessagesOnStartup ( deleteAllMessages ) ; File kahadbData = new File ( "activemq-data/" + getName ( ) + "-kahadb" ) ; if ( deleteAllMessages ) delete ( kahadbData ) ; broker . setPersistent ( true ) ; KahaDBPersistenceAdapter kahadb = new KahaDBPersistenceAdapter ( ) ; kahadb . setDirectory ( kahadbData ) ; kahadb . setJournalMaxFileLength ( 10 * 1024 ) ; kahadb . setCleanupInterval ( 5000 ) ; broker . setPersistenceAdapter ( kahadb ) ; broker . addConnector ( "tcp://localhost:61656" ) ; broker . getSystemUsage ( ) . getMemoryUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . getSystemUsage ( ) . getTempUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . getSystemUsage ( ) . getStoreUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . start ( ) ; broker . waitUntilStarted ( ) ; LOG . info ( toString ( ) + " Broker started!!" ) ; }
private void startBroker ( boolean deleteAllMessages ) throws Exception { if ( broker != null ) return ; broker = BrokerFactory . createBroker ( "broker:(vm://" + getName ( ) + ")" ) ; broker . setBrokerName ( getName ( ) ) ; broker . setAdvisorySupport ( false ) ; broker . setDeleteAllMessagesOnStartup ( deleteAllMessages ) ; File kahadbData = new File ( "activemq-data/" + getName ( ) + "-kahadb" ) ; if ( deleteAllMessages ) delete ( kahadbData ) ; broker . setPersistent ( true ) ; KahaDBPersistenceAdapter kahadb = new KahaDBPersistenceAdapter ( ) ; kahadb . setDirectory ( kahadbData ) ; kahadb . setJournalMaxFileLength ( 10 * 1024 ) ; kahadb . setCleanupInterval ( 5000 ) ; broker . setPersistenceAdapter ( kahadb ) ; broker . addConnector ( "tcp://localhost:61656" ) ; broker . getSystemUsage ( ) . getMemoryUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . getSystemUsage ( ) . getTempUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . getSystemUsage ( ) . getStoreUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; LOG . info ( toString ( ) + "Starting Broker..." ) ; broker . start ( ) ; broker . waitUntilStarted ( ) ; LOG . info ( toString ( ) + " Broker started!!" ) ; }
349
private void startBroker ( boolean deleteAllMessages ) throws Exception { if ( broker != null ) return ; broker = BrokerFactory . createBroker ( "broker:(vm://" + getName ( ) + ")" ) ; broker . setBrokerName ( getName ( ) ) ; broker . setAdvisorySupport ( false ) ; broker . setDeleteAllMessagesOnStartup ( deleteAllMessages ) ; File kahadbData = new File ( "activemq-data/" + getName ( ) + "-kahadb" ) ; if ( deleteAllMessages ) delete ( kahadbData ) ; broker . setPersistent ( true ) ; KahaDBPersistenceAdapter kahadb = new KahaDBPersistenceAdapter ( ) ; kahadb . setDirectory ( kahadbData ) ; kahadb . setJournalMaxFileLength ( 10 * 1024 ) ; kahadb . setCleanupInterval ( 5000 ) ; broker . setPersistenceAdapter ( kahadb ) ; broker . addConnector ( "tcp://localhost:61656" ) ; broker . getSystemUsage ( ) . getMemoryUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . getSystemUsage ( ) . getTempUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . getSystemUsage ( ) . getStoreUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; LOG . info ( toString ( ) + "Starting Broker..." ) ; broker . start ( ) ; broker . waitUntilStarted ( ) ; }
private void startBroker ( boolean deleteAllMessages ) throws Exception { if ( broker != null ) return ; broker = BrokerFactory . createBroker ( "broker:(vm://" + getName ( ) + ")" ) ; broker . setBrokerName ( getName ( ) ) ; broker . setAdvisorySupport ( false ) ; broker . setDeleteAllMessagesOnStartup ( deleteAllMessages ) ; File kahadbData = new File ( "activemq-data/" + getName ( ) + "-kahadb" ) ; if ( deleteAllMessages ) delete ( kahadbData ) ; broker . setPersistent ( true ) ; KahaDBPersistenceAdapter kahadb = new KahaDBPersistenceAdapter ( ) ; kahadb . setDirectory ( kahadbData ) ; kahadb . setJournalMaxFileLength ( 10 * 1024 ) ; kahadb . setCleanupInterval ( 5000 ) ; broker . setPersistenceAdapter ( kahadb ) ; broker . addConnector ( "tcp://localhost:61656" ) ; broker . getSystemUsage ( ) . getMemoryUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . getSystemUsage ( ) . getTempUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; broker . getSystemUsage ( ) . getStoreUsage ( ) . setLimit ( 256 * 1024 * 1024 ) ; LOG . info ( toString ( ) + "Starting Broker..." ) ; broker . start ( ) ; broker . waitUntilStarted ( ) ; LOG . info ( toString ( ) + " Broker started!!" ) ; }
350
private void handleAnyExecutionNotFinished ( Set < ExecutionState > notFinishedExecutionStates ) { final State oldState = state ; state = state . onAnyExecutionNotFinished ( notFinishedExecutionStates ) ; }
private void handleAnyExecutionNotFinished ( Set < ExecutionState > notFinishedExecutionStates ) { final State oldState = state ; state = state . onAnyExecutionNotFinished ( notFinishedExecutionStates ) ; log . warn ( "Stop-with-savepoint transitioned from {} to {} on execution termination handling for job {} with some executions being in an not-finished state: {}" , oldState . getName ( ) , state . getName ( ) , jobId , notFinishedExecutionStates ) ; }
351
protected File getExternalComponentsLocation ( ) { Bundle bundle = Activator . getBundle ( ) ; try { URL localURL = FileLocator . toFileURL ( FileLocator . find ( bundle , new Path ( "components" ) , null ) ) ; return new File ( localURL . getPath ( ) ) ; } catch ( Exception localException ) { localException . printStackTrace ( ) ; } return null ; }
protected File getExternalComponentsLocation ( ) { Bundle bundle = Activator . getBundle ( ) ; try { URL localURL = FileLocator . toFileURL ( FileLocator . find ( bundle , new Path ( "components" ) , null ) ) ; return new File ( localURL . getPath ( ) ) ; } catch ( Exception localException ) { logger . error ( localException ) ; localException . printStackTrace ( ) ; } return null ; }
352
public static void unregister ( DependencyProvider provider ) { providers . remove ( provider ) ; }
public static void unregister ( DependencyProvider provider ) { log . debug ( "Unregistering " + provider ) ; providers . remove ( provider ) ; }
353
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
354
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
355
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
356
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
357
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
static void mbtilesStore ( File rootDirectory , File seedFile , long [ ] [ ] tiles ) throws Exception { File databaseFile = new File ( rootDirectory , Utils . buildPath ( "grid" , "layer" , "image_png" , "mbtiles_perf_test.sqlite" ) ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Start mbtiles select from file '%s'." , databaseFile ) ) ; } FileUtils . copyFile ( seedFile , databaseFile ) ; ExecutorService executor = Executors . newFixedThreadPool ( WORKERS ) ; long startTime = System . currentTimeMillis ( ) ; MbtilesInfo configuration = new MbtilesInfo ( ) ; configuration . setRootDirectory ( rootDirectory . getPath ( ) ) ; configuration . setTemplatePath ( Utils . buildPath ( "{grid}" , "{layer}" , "{format}" , "mbtiles_perf_test.sqlite" ) ) ; configuration . setUseCreateTime ( false ) ; SqliteConnectionManager connectionManager = new SqliteConnectionManager ( 10 , 2000 ) ; MbtilesBlobStore mbtilesBlobStore = new MbtilesBlobStore ( configuration , connectionManager ) ; for ( int i = 0 ; i < tiles . length ; i ++ ) { long [ ] tile = tiles [ i ] ; executor . submit ( ( ) -> { TileObject mbtile = TileObject . createQueryTileObject ( "layer" , tile , "grid" , "image/png" , null ) ; try { mbtilesBlobStore . get ( mbtile ) ; } catch ( Exception exception ) { throw Utils . exception ( exception , "Error retrieving tile '%s'." , mbtile ) ; } } ) ; if ( i != 0 && i % 10000 == 0 ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , i ) ) ; } } } if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( String . format ( "Submitted %d select tasks." , TILES ) ) ; } executor . shutdown ( ) ; executor . awaitTermination ( 5 , TimeUnit . MINUTES ) ; long endTime = System . currentTimeMillis ( ) ; if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore select time '%d'." , endTime - startTime ) ) ; } if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( String . format ( "Tiles mbtiles blobstore selected per second '%f'." , TILES / ( float ) ( endTime - startTime ) * 1000 ) ) ; } connectionManager . reapAllConnections ( ) ; connectionManager . stopPoolReaper ( ) ; FileUtils . deleteQuietly ( databaseFile ) ; }
358
protected void initRequestProcessorClass ( ModuleConfig config ) throws ServletException { String tilesProcessorClassname = TilesRequestProcessor . class . getName ( ) ; ControllerConfig ctrlConfig = config . getControllerConfig ( ) ; String configProcessorClassname = ctrlConfig . getProcessorClass ( ) ; Class configProcessorClass ; try { configProcessorClass = RequestUtils . applicationClass ( configProcessorClassname ) ; } catch ( ClassNotFoundException ex ) { throw new ServletException ( ex ) ; } if ( ComposableRequestProcessor . class . isAssignableFrom ( configProcessorClass ) ) { return ; } if ( configProcessorClassname . equals ( RequestProcessor . class . getName ( ) ) || configProcessorClassname . endsWith ( tilesProcessorClassname ) ) { ctrlConfig . setProcessorClass ( tilesProcessorClassname ) ; return ; } Class tilesProcessorClass = TilesRequestProcessor . class ; if ( ! tilesProcessorClass . isAssignableFrom ( configProcessorClass ) ) { String msg = "TilesPlugin : Specified RequestProcessor not compatible with TilesRequestProcessor" ; if ( log . isFatalEnabled ( ) ) { log . fatal ( msg ) ; } throw new ServletException ( msg ) ; } }
protected void initRequestProcessorClass ( ModuleConfig config ) throws ServletException { String tilesProcessorClassname = TilesRequestProcessor . class . getName ( ) ; ControllerConfig ctrlConfig = config . getControllerConfig ( ) ; String configProcessorClassname = ctrlConfig . getProcessorClass ( ) ; Class configProcessorClass ; try { configProcessorClass = RequestUtils . applicationClass ( configProcessorClassname ) ; } catch ( ClassNotFoundException ex ) { log . fatal ( "Can't set TilesRequestProcessor: bad class name '" + configProcessorClassname + "'." ) ; throw new ServletException ( ex ) ; } if ( ComposableRequestProcessor . class . isAssignableFrom ( configProcessorClass ) ) { return ; } if ( configProcessorClassname . equals ( RequestProcessor . class . getName ( ) ) || configProcessorClassname . endsWith ( tilesProcessorClassname ) ) { ctrlConfig . setProcessorClass ( tilesProcessorClassname ) ; return ; } Class tilesProcessorClass = TilesRequestProcessor . class ; if ( ! tilesProcessorClass . isAssignableFrom ( configProcessorClass ) ) { String msg = "TilesPlugin : Specified RequestProcessor not compatible with TilesRequestProcessor" ; if ( log . isFatalEnabled ( ) ) { log . fatal ( msg ) ; } throw new ServletException ( msg ) ; } }
359
protected void initRequestProcessorClass ( ModuleConfig config ) throws ServletException { String tilesProcessorClassname = TilesRequestProcessor . class . getName ( ) ; ControllerConfig ctrlConfig = config . getControllerConfig ( ) ; String configProcessorClassname = ctrlConfig . getProcessorClass ( ) ; Class configProcessorClass ; try { configProcessorClass = RequestUtils . applicationClass ( configProcessorClassname ) ; } catch ( ClassNotFoundException ex ) { log . fatal ( "Can't set TilesRequestProcessor: bad class name '" + configProcessorClassname + "'." ) ; throw new ServletException ( ex ) ; } if ( ComposableRequestProcessor . class . isAssignableFrom ( configProcessorClass ) ) { return ; } if ( configProcessorClassname . equals ( RequestProcessor . class . getName ( ) ) || configProcessorClassname . endsWith ( tilesProcessorClassname ) ) { ctrlConfig . setProcessorClass ( tilesProcessorClassname ) ; return ; } Class tilesProcessorClass = TilesRequestProcessor . class ; if ( ! tilesProcessorClass . isAssignableFrom ( configProcessorClass ) ) { String msg = "TilesPlugin : Specified RequestProcessor not compatible with TilesRequestProcessor" ; if ( log . isFatalEnabled ( ) ) { } throw new ServletException ( msg ) ; } }
protected void initRequestProcessorClass ( ModuleConfig config ) throws ServletException { String tilesProcessorClassname = TilesRequestProcessor . class . getName ( ) ; ControllerConfig ctrlConfig = config . getControllerConfig ( ) ; String configProcessorClassname = ctrlConfig . getProcessorClass ( ) ; Class configProcessorClass ; try { configProcessorClass = RequestUtils . applicationClass ( configProcessorClassname ) ; } catch ( ClassNotFoundException ex ) { log . fatal ( "Can't set TilesRequestProcessor: bad class name '" + configProcessorClassname + "'." ) ; throw new ServletException ( ex ) ; } if ( ComposableRequestProcessor . class . isAssignableFrom ( configProcessorClass ) ) { return ; } if ( configProcessorClassname . equals ( RequestProcessor . class . getName ( ) ) || configProcessorClassname . endsWith ( tilesProcessorClassname ) ) { ctrlConfig . setProcessorClass ( tilesProcessorClassname ) ; return ; } Class tilesProcessorClass = TilesRequestProcessor . class ; if ( ! tilesProcessorClass . isAssignableFrom ( configProcessorClass ) ) { String msg = "TilesPlugin : Specified RequestProcessor not compatible with TilesRequestProcessor" ; if ( log . isFatalEnabled ( ) ) { log . fatal ( msg ) ; } throw new ServletException ( msg ) ; } }
360
public IIdeaComment loadComment ( int id ) { IIdeaComment ideaComment = null ; Connection conn = null ; PreparedStatement stat = null ; ResultSet res = null ; try { conn = this . getConnection ( ) ; stat = conn . prepareStatement ( LOAD_COMMENT ) ; stat . setInt ( 1 , id ) ; res = stat . executeQuery ( ) ; if ( res . next ( ) ) { ideaComment = this . buildIdeaCommentFromRes ( res ) ; } } catch ( Throwable t ) { throw new RuntimeException ( "Error loading Comment" , t ) ; } finally { closeDaoResources ( res , stat , conn ) ; } return ideaComment ; }
public IIdeaComment loadComment ( int id ) { IIdeaComment ideaComment = null ; Connection conn = null ; PreparedStatement stat = null ; ResultSet res = null ; try { conn = this . getConnection ( ) ; stat = conn . prepareStatement ( LOAD_COMMENT ) ; stat . setInt ( 1 , id ) ; res = stat . executeQuery ( ) ; if ( res . next ( ) ) { ideaComment = this . buildIdeaCommentFromRes ( res ) ; } } catch ( Throwable t ) { _logger . error ( "Error loading Comment {}" , id , t ) ; throw new RuntimeException ( "Error loading Comment" , t ) ; } finally { closeDaoResources ( res , stat , conn ) ; } return ideaComment ; }
361
public boolean isDisabled ( Object object ) { User user = ( User ) object ; try { if ( isChecked ( user ) || ! UserGroupMembershipPolicyUtil . isMembershipAllowed ( user . getUserId ( ) , _userGroup . getUserGroupId ( ) ) ) { return true ; } } catch ( Exception exception ) { } return super . isDisabled ( object ) ; }
public boolean isDisabled ( Object object ) { User user = ( User ) object ; try { if ( isChecked ( user ) || ! UserGroupMembershipPolicyUtil . isMembershipAllowed ( user . getUserId ( ) , _userGroup . getUserGroupId ( ) ) ) { return true ; } } catch ( Exception exception ) { _log . error ( exception , exception ) ; } return super . isDisabled ( object ) ; }
362
@ Test public void testPerformance1 ( ) throws Exception { File file = Resources . asFile ( "/big/raw_sentences.txt" ) ; BasicLineIterator iterator = new BasicLineIterator ( file ) ; PrefetchingSentenceIterator fetcher = new PrefetchingSentenceIterator . Builder ( new BasicLineIterator ( file ) ) . setFetchSize ( 500000 ) . build ( ) ; long time01 = System . currentTimeMillis ( ) ; int cnt0 = 0 ; while ( iterator . hasNext ( ) ) { iterator . nextSentence ( ) ; cnt0 ++ ; } long time02 = System . currentTimeMillis ( ) ; long time11 = System . currentTimeMillis ( ) ; int cnt1 = 0 ; while ( fetcher . hasNext ( ) ) { fetcher . nextSentence ( ) ; cnt1 ++ ; } long time12 = System . currentTimeMillis ( ) ; log . info ( "Prefetched iterator: " + ( time12 - time11 ) ) ; long difference = ( time12 - time11 ) - ( time02 - time01 ) ; log . info ( "Difference: " + difference ) ; assertTrue ( difference < 150 ) ; }
@ Test public void testPerformance1 ( ) throws Exception { File file = Resources . asFile ( "/big/raw_sentences.txt" ) ; BasicLineIterator iterator = new BasicLineIterator ( file ) ; PrefetchingSentenceIterator fetcher = new PrefetchingSentenceIterator . Builder ( new BasicLineIterator ( file ) ) . setFetchSize ( 500000 ) . build ( ) ; long time01 = System . currentTimeMillis ( ) ; int cnt0 = 0 ; while ( iterator . hasNext ( ) ) { iterator . nextSentence ( ) ; cnt0 ++ ; } long time02 = System . currentTimeMillis ( ) ; long time11 = System . currentTimeMillis ( ) ; int cnt1 = 0 ; while ( fetcher . hasNext ( ) ) { fetcher . nextSentence ( ) ; cnt1 ++ ; } long time12 = System . currentTimeMillis ( ) ; log . info ( "Basic iterator: " + ( time02 - time01 ) ) ; log . info ( "Prefetched iterator: " + ( time12 - time11 ) ) ; long difference = ( time12 - time11 ) - ( time02 - time01 ) ; log . info ( "Difference: " + difference ) ; assertTrue ( difference < 150 ) ; }
363
@ Test public void testPerformance1 ( ) throws Exception { File file = Resources . asFile ( "/big/raw_sentences.txt" ) ; BasicLineIterator iterator = new BasicLineIterator ( file ) ; PrefetchingSentenceIterator fetcher = new PrefetchingSentenceIterator . Builder ( new BasicLineIterator ( file ) ) . setFetchSize ( 500000 ) . build ( ) ; long time01 = System . currentTimeMillis ( ) ; int cnt0 = 0 ; while ( iterator . hasNext ( ) ) { iterator . nextSentence ( ) ; cnt0 ++ ; } long time02 = System . currentTimeMillis ( ) ; long time11 = System . currentTimeMillis ( ) ; int cnt1 = 0 ; while ( fetcher . hasNext ( ) ) { fetcher . nextSentence ( ) ; cnt1 ++ ; } long time12 = System . currentTimeMillis ( ) ; log . info ( "Basic iterator: " + ( time02 - time01 ) ) ; long difference = ( time12 - time11 ) - ( time02 - time01 ) ; log . info ( "Difference: " + difference ) ; assertTrue ( difference < 150 ) ; }
@ Test public void testPerformance1 ( ) throws Exception { File file = Resources . asFile ( "/big/raw_sentences.txt" ) ; BasicLineIterator iterator = new BasicLineIterator ( file ) ; PrefetchingSentenceIterator fetcher = new PrefetchingSentenceIterator . Builder ( new BasicLineIterator ( file ) ) . setFetchSize ( 500000 ) . build ( ) ; long time01 = System . currentTimeMillis ( ) ; int cnt0 = 0 ; while ( iterator . hasNext ( ) ) { iterator . nextSentence ( ) ; cnt0 ++ ; } long time02 = System . currentTimeMillis ( ) ; long time11 = System . currentTimeMillis ( ) ; int cnt1 = 0 ; while ( fetcher . hasNext ( ) ) { fetcher . nextSentence ( ) ; cnt1 ++ ; } long time12 = System . currentTimeMillis ( ) ; log . info ( "Basic iterator: " + ( time02 - time01 ) ) ; log . info ( "Prefetched iterator: " + ( time12 - time11 ) ) ; long difference = ( time12 - time11 ) - ( time02 - time01 ) ; log . info ( "Difference: " + difference ) ; assertTrue ( difference < 150 ) ; }
364
@ Test public void testPerformance1 ( ) throws Exception { File file = Resources . asFile ( "/big/raw_sentences.txt" ) ; BasicLineIterator iterator = new BasicLineIterator ( file ) ; PrefetchingSentenceIterator fetcher = new PrefetchingSentenceIterator . Builder ( new BasicLineIterator ( file ) ) . setFetchSize ( 500000 ) . build ( ) ; long time01 = System . currentTimeMillis ( ) ; int cnt0 = 0 ; while ( iterator . hasNext ( ) ) { iterator . nextSentence ( ) ; cnt0 ++ ; } long time02 = System . currentTimeMillis ( ) ; long time11 = System . currentTimeMillis ( ) ; int cnt1 = 0 ; while ( fetcher . hasNext ( ) ) { fetcher . nextSentence ( ) ; cnt1 ++ ; } long time12 = System . currentTimeMillis ( ) ; log . info ( "Basic iterator: " + ( time02 - time01 ) ) ; log . info ( "Prefetched iterator: " + ( time12 - time11 ) ) ; long difference = ( time12 - time11 ) - ( time02 - time01 ) ; assertTrue ( difference < 150 ) ; }
@ Test public void testPerformance1 ( ) throws Exception { File file = Resources . asFile ( "/big/raw_sentences.txt" ) ; BasicLineIterator iterator = new BasicLineIterator ( file ) ; PrefetchingSentenceIterator fetcher = new PrefetchingSentenceIterator . Builder ( new BasicLineIterator ( file ) ) . setFetchSize ( 500000 ) . build ( ) ; long time01 = System . currentTimeMillis ( ) ; int cnt0 = 0 ; while ( iterator . hasNext ( ) ) { iterator . nextSentence ( ) ; cnt0 ++ ; } long time02 = System . currentTimeMillis ( ) ; long time11 = System . currentTimeMillis ( ) ; int cnt1 = 0 ; while ( fetcher . hasNext ( ) ) { fetcher . nextSentence ( ) ; cnt1 ++ ; } long time12 = System . currentTimeMillis ( ) ; log . info ( "Basic iterator: " + ( time02 - time01 ) ) ; log . info ( "Prefetched iterator: " + ( time12 - time11 ) ) ; long difference = ( time12 - time11 ) - ( time02 - time01 ) ; log . info ( "Difference: " + difference ) ; assertTrue ( difference < 150 ) ; }
365
public void process ( Exchange exchange ) throws Exception { Integer integer = duplicate . get ( exchange . getExchangeId ( ) ) ; if ( integer == null ) { duplicate . put ( exchange . getExchangeId ( ) , 1 ) ; } else { integer ++ ; duplicate . put ( exchange . getExchangeId ( ) , integer ) ; } Thread . sleep ( 20 ) ; }
public void process ( Exchange exchange ) throws Exception { Integer integer = duplicate . get ( exchange . getExchangeId ( ) ) ; if ( integer == null ) { duplicate . put ( exchange . getExchangeId ( ) , 1 ) ; } else { integer ++ ; duplicate . put ( exchange . getExchangeId ( ) , integer ) ; } log . info ( "Process called for-" + exchange . getExchangeId ( ) ) ; Thread . sleep ( 20 ) ; }
366
@ Test public void testDescribeFeatureType ( ) { String path = "wfs?request=DescribeFeatureType&typename=gsml:MappedFeature&version=1.1.0" ; String newline = System . getProperty ( "line.separator" ) ; Document doc = getAsDOM ( path ) ; assertEquals ( "xsd:schema" , doc . getDocumentElement ( ) . getNodeName ( ) ) ; assertXpathEvaluatesTo ( getNamespace ( "gsml" ) , "//@targetNamespace" , doc ) ; assertXpathCount ( 1 , "//xsd:include" , doc ) ; assertXpathCount ( 0 , "//xsd:import" , doc ) ; assertXpathEvaluatesTo ( Gsml30MockData . GSML_SCHEMA_LOCATION , "//xsd:include/@schemaLocation" , doc ) ; assertXpathCount ( 0 , "//xsd:complexType" , doc ) ; assertXpathCount ( 0 , "//xsd:element" , doc ) ; }
@ Test public void testDescribeFeatureType ( ) { String path = "wfs?request=DescribeFeatureType&typename=gsml:MappedFeature&version=1.1.0" ; String newline = System . getProperty ( "line.separator" ) ; Document doc = getAsDOM ( path ) ; LOGGER . info ( "Response for " + path + " :" + newline + prettyString ( doc ) ) ; assertEquals ( "xsd:schema" , doc . getDocumentElement ( ) . getNodeName ( ) ) ; assertXpathEvaluatesTo ( getNamespace ( "gsml" ) , "//@targetNamespace" , doc ) ; assertXpathCount ( 1 , "//xsd:include" , doc ) ; assertXpathCount ( 0 , "//xsd:import" , doc ) ; assertXpathEvaluatesTo ( Gsml30MockData . GSML_SCHEMA_LOCATION , "//xsd:include/@schemaLocation" , doc ) ; assertXpathCount ( 0 , "//xsd:complexType" , doc ) ; assertXpathCount ( 0 , "//xsd:element" , doc ) ; }
367
public SerDesInfo getSerDes ( Long serDesId ) { return atlasClient . getSerdesById ( serDesId ) . orElse ( null ) ; }
public SerDesInfo getSerDes ( Long serDesId ) { LOG . info ( "--------------- getSerDes {}" , serDesId ) ; return atlasClient . getSerdesById ( serDesId ) . orElse ( null ) ; }
368
public static int searchCount ( long companyId , long [ ] groupIds , long [ ] calendarResourceIds , String name , String description , boolean andOperator ) throws RemoteException { try { int returnValue = CalendarServiceUtil . searchCount ( companyId , groupIds , calendarResourceIds , name , description , andOperator ) ; return returnValue ; } catch ( Exception exception ) { throw new RemoteException ( exception . getMessage ( ) ) ; } }
public static int searchCount ( long companyId , long [ ] groupIds , long [ ] calendarResourceIds , String name , String description , boolean andOperator ) throws RemoteException { try { int returnValue = CalendarServiceUtil . searchCount ( companyId , groupIds , calendarResourceIds , name , description , andOperator ) ; return returnValue ; } catch ( Exception exception ) { _log . error ( exception , exception ) ; throw new RemoteException ( exception . getMessage ( ) ) ; } }
369
private ServiceNetwork createNetworkObject ( ) throws ElasticMachineProvisioningException { final String networkAsString = this . config . getNetworkAsString ( ) ; if ( StringUtils . isBlank ( networkAsString ) ) { return null ; } final ObjectMapper mapper = new ObjectMapper ( ) ; try { final ServiceNetwork network = mapper . readValue ( networkAsString , ServiceNetwork . class ) ; return network ; } catch ( final IOException e ) { throw new ElasticMachineProvisioningException ( "Failed to deserialize json string into service network description: " + e . getMessage ( ) , e ) ; } }
private ServiceNetwork createNetworkObject ( ) throws ElasticMachineProvisioningException { final String networkAsString = this . config . getNetworkAsString ( ) ; logger . info ( "Network string is: " + networkAsString ) ; if ( StringUtils . isBlank ( networkAsString ) ) { return null ; } final ObjectMapper mapper = new ObjectMapper ( ) ; try { final ServiceNetwork network = mapper . readValue ( networkAsString , ServiceNetwork . class ) ; return network ; } catch ( final IOException e ) { throw new ElasticMachineProvisioningException ( "Failed to deserialize json string into service network description: " + e . getMessage ( ) , e ) ; } }
370
private void appendUserRoleCodeSystem ( RequestType policyRequest , AssertionType assertion ) { SubjectType parent = getSubject ( policyRequest ) ; String attributeId = XacmlAttributeId . UserRoleCodeSystem ; String dataType = Constants . DataTypeString ; String attributeValue = extractUserRoleCodeSystem ( assertion ) ; AttributeHelper attrHelper = new AttributeHelper ( ) ; attrHelper . appendAttributeToParent ( parent , attributeId , dataType , attributeValue , appendAttributesIfNull ) ; LOG . debug ( "end appending UserRoleCodeSystem" ) ; }
private void appendUserRoleCodeSystem ( RequestType policyRequest , AssertionType assertion ) { LOG . debug ( "begin appending UserRoleCodeSystem" ) ; SubjectType parent = getSubject ( policyRequest ) ; String attributeId = XacmlAttributeId . UserRoleCodeSystem ; String dataType = Constants . DataTypeString ; String attributeValue = extractUserRoleCodeSystem ( assertion ) ; AttributeHelper attrHelper = new AttributeHelper ( ) ; attrHelper . appendAttributeToParent ( parent , attributeId , dataType , attributeValue , appendAttributesIfNull ) ; LOG . debug ( "end appending UserRoleCodeSystem" ) ; }
371
private void appendUserRoleCodeSystem ( RequestType policyRequest , AssertionType assertion ) { LOG . debug ( "begin appending UserRoleCodeSystem" ) ; SubjectType parent = getSubject ( policyRequest ) ; String attributeId = XacmlAttributeId . UserRoleCodeSystem ; String dataType = Constants . DataTypeString ; String attributeValue = extractUserRoleCodeSystem ( assertion ) ; AttributeHelper attrHelper = new AttributeHelper ( ) ; attrHelper . appendAttributeToParent ( parent , attributeId , dataType , attributeValue , appendAttributesIfNull ) ; }
private void appendUserRoleCodeSystem ( RequestType policyRequest , AssertionType assertion ) { LOG . debug ( "begin appending UserRoleCodeSystem" ) ; SubjectType parent = getSubject ( policyRequest ) ; String attributeId = XacmlAttributeId . UserRoleCodeSystem ; String dataType = Constants . DataTypeString ; String attributeValue = extractUserRoleCodeSystem ( assertion ) ; AttributeHelper attrHelper = new AttributeHelper ( ) ; attrHelper . appendAttributeToParent ( parent , attributeId , dataType , attributeValue , appendAttributesIfNull ) ; LOG . debug ( "end appending UserRoleCodeSystem" ) ; }
372
public void collectionItemRemoved ( final E item ) { if ( this . isLocked ( ) ) { return ; } for ( CollectionListener < E > listener : listeners ) { listener . collectionItemRemoved ( item ) ; } }
public void collectionItemRemoved ( final E item ) { if ( this . isLocked ( ) ) { log . debug ( "Do not notify changes of locked collection" ) ; return ; } for ( CollectionListener < E > listener : listeners ) { listener . collectionItemRemoved ( item ) ; } }
373
private static void writeOutputFile ( ) { FileWriter writer ; File file ; file = new File ( outputEducFileDir ) ; try { writer = new FileWriter ( file , true ) ; writer . write ( "id\tx\ty\teduc_kiga\teduc_primary\teduc_secondary\tmergedFacilityIds\n" ) ; for ( EducFacility educFacility : educListNewAreaForOutput ) { int isKiga = 0 ; int isPrimary = 0 ; int isSecondary = 0 ; String mergedFacilites = "" ; if ( educFacility . isEducKiga ( ) ) isKiga = 1 ; if ( educFacility . isEducPrimary ( ) ) isPrimary = 1 ; if ( educFacility . isEducSecondary ( ) ) isSecondary = 1 ; for ( Id < ActivityFacility > otherFac : educFacility . getContainedFacilities ( ) ) { mergedFacilites += otherFac . toString ( ) + ";" ; } writer . write ( educFacility . getId ( ) + "\t" + educFacility . getCoord ( ) . getX ( ) + "\t" + educFacility . getCoord ( ) . getY ( ) + "\t" + isKiga + "\t" + isPrimary + "\t" + isSecondary + "\t" + mergedFacilites + "\n" ) ; } writer . flush ( ) ; writer . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } }
private static void writeOutputFile ( ) { FileWriter writer ; File file ; file = new File ( outputEducFileDir ) ; try { writer = new FileWriter ( file , true ) ; writer . write ( "id\tx\ty\teduc_kiga\teduc_primary\teduc_secondary\tmergedFacilityIds\n" ) ; for ( EducFacility educFacility : educListNewAreaForOutput ) { int isKiga = 0 ; int isPrimary = 0 ; int isSecondary = 0 ; String mergedFacilites = "" ; if ( educFacility . isEducKiga ( ) ) isKiga = 1 ; if ( educFacility . isEducPrimary ( ) ) isPrimary = 1 ; if ( educFacility . isEducSecondary ( ) ) isSecondary = 1 ; for ( Id < ActivityFacility > otherFac : educFacility . getContainedFacilities ( ) ) { mergedFacilites += otherFac . toString ( ) + ";" ; } writer . write ( educFacility . getId ( ) + "\t" + educFacility . getCoord ( ) . getX ( ) + "\t" + educFacility . getCoord ( ) . getY ( ) + "\t" + isKiga + "\t" + isPrimary + "\t" + isSecondary + "\t" + mergedFacilites + "\n" ) ; } writer . flush ( ) ; writer . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } log . info ( "Wrote result file under " + outputEducFileDir ) ; }
374
public void onMessage ( UUID nodeId , Object msg , byte plc ) { MessageInfo info = rcvMsgMap . get ( nodeId ) ; if ( info == null ) { if ( log . isDebugEnabled ( ) ) return ; } int stealReqs0 ; synchronized ( info ) { JobStealingRequest req = ( JobStealingRequest ) msg ; stealReqs0 = stealReqs . addAndGet ( req . delta ( ) - info . jobsToSteal ( ) ) ; info . reset ( req . delta ( ) ) ; } if ( log . isDebugEnabled ( ) ) log . debug ( "Received steal request [nodeId=" + nodeId + ", msg=" + msg + ", stealReqs=" + stealReqs0 + ']' ) ; CollisionExternalListener tmp = extLsnr ; if ( tmp != null ) tmp . onExternalCollision ( ) ; }
public void onMessage ( UUID nodeId , Object msg , byte plc ) { MessageInfo info = rcvMsgMap . get ( nodeId ) ; if ( info == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( "Ignoring message steal request as discovery event has not yet been received " + "for node: " + nodeId ) ; return ; } int stealReqs0 ; synchronized ( info ) { JobStealingRequest req = ( JobStealingRequest ) msg ; stealReqs0 = stealReqs . addAndGet ( req . delta ( ) - info . jobsToSteal ( ) ) ; info . reset ( req . delta ( ) ) ; } if ( log . isDebugEnabled ( ) ) log . debug ( "Received steal request [nodeId=" + nodeId + ", msg=" + msg + ", stealReqs=" + stealReqs0 + ']' ) ; CollisionExternalListener tmp = extLsnr ; if ( tmp != null ) tmp . onExternalCollision ( ) ; }
375
public void onMessage ( UUID nodeId , Object msg , byte plc ) { MessageInfo info = rcvMsgMap . get ( nodeId ) ; if ( info == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( "Ignoring message steal request as discovery event has not yet been received " + "for node: " + nodeId ) ; return ; } int stealReqs0 ; synchronized ( info ) { JobStealingRequest req = ( JobStealingRequest ) msg ; stealReqs0 = stealReqs . addAndGet ( req . delta ( ) - info . jobsToSteal ( ) ) ; info . reset ( req . delta ( ) ) ; } if ( log . isDebugEnabled ( ) ) CollisionExternalListener tmp = extLsnr ; if ( tmp != null ) tmp . onExternalCollision ( ) ; }
public void onMessage ( UUID nodeId , Object msg , byte plc ) { MessageInfo info = rcvMsgMap . get ( nodeId ) ; if ( info == null ) { if ( log . isDebugEnabled ( ) ) log . debug ( "Ignoring message steal request as discovery event has not yet been received " + "for node: " + nodeId ) ; return ; } int stealReqs0 ; synchronized ( info ) { JobStealingRequest req = ( JobStealingRequest ) msg ; stealReqs0 = stealReqs . addAndGet ( req . delta ( ) - info . jobsToSteal ( ) ) ; info . reset ( req . delta ( ) ) ; } if ( log . isDebugEnabled ( ) ) log . debug ( "Received steal request [nodeId=" + nodeId + ", msg=" + msg + ", stealReqs=" + stealReqs0 + ']' ) ; CollisionExternalListener tmp = extLsnr ; if ( tmp != null ) tmp . onExternalCollision ( ) ; }
376
private void checkTransform ( Integer key ) throws Exception { IgniteCache < Integer , BinaryObject > c = keepBinaryCache ( ) ; try { c . invoke ( key , new EntryProcessor < Integer , BinaryObject , Void > ( ) { @ Override public Void process ( MutableEntry < Integer , BinaryObject > e , Object ... args ) { BinaryObject val = e . getValue ( ) ; assertNull ( "Unexpected value: " + val , val ) ; return null ; } } ) ; jcache ( 0 ) . put ( key , new TestObject ( 1 ) ) ; c . invoke ( key , new EntryProcessor < Integer , BinaryObject , Void > ( ) { @ Override public Void process ( MutableEntry < Integer , BinaryObject > e , Object ... args ) { BinaryObject val = e . getValue ( ) ; assertNotNull ( "Unexpected value: " + val , val ) ; assertEquals ( new Integer ( 1 ) , val . field ( "val" ) ) ; Ignite ignite = e . unwrap ( Ignite . class ) ; IgniteBinary binaries = ignite . binary ( ) ; BinaryObjectBuilder builder = binaries . builder ( val ) ; builder . setField ( "val" , 2 ) ; e . setValue ( builder . build ( ) ) ; return null ; } } ) ; BinaryObject obj = c . get ( key ) ; assertEquals ( new Integer ( 2 ) , obj . field ( "val" ) ) ; c . invoke ( key , new EntryProcessor < Integer , BinaryObject , Void > ( ) { @ Override public Void process ( MutableEntry < Integer , BinaryObject > e , Object ... args ) { BinaryObject val = e . getValue ( ) ; assertNotNull ( "Unexpected value: " + val , val ) ; assertEquals ( new Integer ( 2 ) , val . field ( "val" ) ) ; e . setValue ( val ) ; return null ; } } ) ; obj = c . get ( key ) ; assertEquals ( new Integer ( 2 ) , obj . field ( "val" ) ) ; c . invoke ( key , new EntryProcessor < Integer , BinaryObject , Void > ( ) { @ Override public Void process ( MutableEntry < Integer , BinaryObject > e , Object ... args ) { BinaryObject val = e . getValue ( ) ; assertNotNull ( "Unexpected value: " + val , val ) ; assertEquals ( new Integer ( 2 ) , val . field ( "val" ) ) ; e . remove ( ) ; return null ; } } ) ; assertNull ( c . get ( key ) ) ; } finally { c . remove ( key ) ; } }
private void checkTransform ( Integer key ) throws Exception { log . info ( "Transform: " + key ) ; IgniteCache < Integer , BinaryObject > c = keepBinaryCache ( ) ; try { c . invoke ( key , new EntryProcessor < Integer , BinaryObject , Void > ( ) { @ Override public Void process ( MutableEntry < Integer , BinaryObject > e , Object ... args ) { BinaryObject val = e . getValue ( ) ; assertNull ( "Unexpected value: " + val , val ) ; return null ; } } ) ; jcache ( 0 ) . put ( key , new TestObject ( 1 ) ) ; c . invoke ( key , new EntryProcessor < Integer , BinaryObject , Void > ( ) { @ Override public Void process ( MutableEntry < Integer , BinaryObject > e , Object ... args ) { BinaryObject val = e . getValue ( ) ; assertNotNull ( "Unexpected value: " + val , val ) ; assertEquals ( new Integer ( 1 ) , val . field ( "val" ) ) ; Ignite ignite = e . unwrap ( Ignite . class ) ; IgniteBinary binaries = ignite . binary ( ) ; BinaryObjectBuilder builder = binaries . builder ( val ) ; builder . setField ( "val" , 2 ) ; e . setValue ( builder . build ( ) ) ; return null ; } } ) ; BinaryObject obj = c . get ( key ) ; assertEquals ( new Integer ( 2 ) , obj . field ( "val" ) ) ; c . invoke ( key , new EntryProcessor < Integer , BinaryObject , Void > ( ) { @ Override public Void process ( MutableEntry < Integer , BinaryObject > e , Object ... args ) { BinaryObject val = e . getValue ( ) ; assertNotNull ( "Unexpected value: " + val , val ) ; assertEquals ( new Integer ( 2 ) , val . field ( "val" ) ) ; e . setValue ( val ) ; return null ; } } ) ; obj = c . get ( key ) ; assertEquals ( new Integer ( 2 ) , obj . field ( "val" ) ) ; c . invoke ( key , new EntryProcessor < Integer , BinaryObject , Void > ( ) { @ Override public Void process ( MutableEntry < Integer , BinaryObject > e , Object ... args ) { BinaryObject val = e . getValue ( ) ; assertNotNull ( "Unexpected value: " + val , val ) ; assertEquals ( new Integer ( 2 ) , val . field ( "val" ) ) ; e . remove ( ) ; return null ; } } ) ; assertNull ( c . get ( key ) ) ; } finally { c . remove ( key ) ; } }
377
public void removeRootControllerService ( final ControllerServiceNode service ) { final ControllerServiceNode existing = rootControllerServices . get ( requireNonNull ( service ) . getIdentifier ( ) ) ; if ( existing == null ) { throw new IllegalStateException ( service + " is not a member of this Process Group" ) ; } service . verifyCanDelete ( ) ; final ExtensionManager extensionManager = flowController . getExtensionManager ( ) ; final VariableRegistry variableRegistry = flowController . getVariableRegistry ( ) ; try ( final NarCloseable x = NarCloseable . withComponentNarLoader ( extensionManager , service . getControllerServiceImplementation ( ) . getClass ( ) , service . getIdentifier ( ) ) ) { final ConfigurationContext configurationContext = new StandardConfigurationContext ( service , flowController . getControllerServiceProvider ( ) , null , variableRegistry ) ; ReflectionUtils . quietlyInvokeMethodsWithAnnotation ( OnRemoved . class , service . getControllerServiceImplementation ( ) , configurationContext ) ; } for ( final Map . Entry < PropertyDescriptor , String > entry : service . getEffectivePropertyValues ( ) . entrySet ( ) ) { final PropertyDescriptor descriptor = entry . getKey ( ) ; if ( descriptor . getControllerServiceDefinition ( ) != null ) { final String value = entry . getValue ( ) == null ? descriptor . getDefaultValue ( ) : entry . getValue ( ) ; if ( value != null ) { final ControllerServiceNode referencedNode = getRootControllerService ( value ) ; if ( referencedNode != null ) { referencedNode . removeReference ( service , descriptor ) ; } } } } rootControllerServices . remove ( service . getIdentifier ( ) ) ; flowController . getStateManagerProvider ( ) . onComponentRemoved ( service . getIdentifier ( ) ) ; extensionManager . removeInstanceClassLoader ( service . getIdentifier ( ) ) ; }
public void removeRootControllerService ( final ControllerServiceNode service ) { final ControllerServiceNode existing = rootControllerServices . get ( requireNonNull ( service ) . getIdentifier ( ) ) ; if ( existing == null ) { throw new IllegalStateException ( service + " is not a member of this Process Group" ) ; } service . verifyCanDelete ( ) ; final ExtensionManager extensionManager = flowController . getExtensionManager ( ) ; final VariableRegistry variableRegistry = flowController . getVariableRegistry ( ) ; try ( final NarCloseable x = NarCloseable . withComponentNarLoader ( extensionManager , service . getControllerServiceImplementation ( ) . getClass ( ) , service . getIdentifier ( ) ) ) { final ConfigurationContext configurationContext = new StandardConfigurationContext ( service , flowController . getControllerServiceProvider ( ) , null , variableRegistry ) ; ReflectionUtils . quietlyInvokeMethodsWithAnnotation ( OnRemoved . class , service . getControllerServiceImplementation ( ) , configurationContext ) ; } for ( final Map . Entry < PropertyDescriptor , String > entry : service . getEffectivePropertyValues ( ) . entrySet ( ) ) { final PropertyDescriptor descriptor = entry . getKey ( ) ; if ( descriptor . getControllerServiceDefinition ( ) != null ) { final String value = entry . getValue ( ) == null ? descriptor . getDefaultValue ( ) : entry . getValue ( ) ; if ( value != null ) { final ControllerServiceNode referencedNode = getRootControllerService ( value ) ; if ( referencedNode != null ) { referencedNode . removeReference ( service , descriptor ) ; } } } } rootControllerServices . remove ( service . getIdentifier ( ) ) ; flowController . getStateManagerProvider ( ) . onComponentRemoved ( service . getIdentifier ( ) ) ; extensionManager . removeInstanceClassLoader ( service . getIdentifier ( ) ) ; logger . info ( "{} removed from Flow Controller" , service ) ; }
378
private Stream < ResultDocument > getResultList ( QueryExpression q , ExecutionContext ctx ) { Retrieve r = block . getStep ( Retrieve . class ) ; if ( r != null ) { r . setQuery ( q ) ; StepResult < ResultDocument > results = block . getResultStep ( ) . getResults ( ctx ) ; return results . stream ( ) ; } else { throw new IllegalStateException ( "Cannot find a Retrieve step in block" ) ; } }
private Stream < ResultDocument > getResultList ( QueryExpression q , ExecutionContext ctx ) { LOGGER . debug ( "getResultList q={} block={}" , q , block ) ; Retrieve r = block . getStep ( Retrieve . class ) ; if ( r != null ) { r . setQuery ( q ) ; StepResult < ResultDocument > results = block . getResultStep ( ) . getResults ( ctx ) ; return results . stream ( ) ; } else { throw new IllegalStateException ( "Cannot find a Retrieve step in block" ) ; } }
379
void returnError ( HttpServletResponse resp , @ Nullable String errorMessage ) { try { String message = errorMessage != null ? errorMessage : "null" ; resp . getWriter ( ) . write ( "<html>" + HtmlEscape . escapeHtml4 ( message ) + "<br><a href='" + servletUrl + "'>Try again</a></html>" ) ; } catch ( IOException e ) { } }
void returnError ( HttpServletResponse resp , @ Nullable String errorMessage ) { try { String message = errorMessage != null ? errorMessage : "null" ; resp . getWriter ( ) . write ( "<html>" + HtmlEscape . escapeHtml4 ( message ) + "<br><a href='" + servletUrl + "'>Try again</a></html>" ) ; } catch ( IOException e ) { logger . info ( "Returning error message failed" , e ) ; } }
380
public Connection getConnection ( ConnectionSpec properties ) throws ResourceException { return new MockConnection ( _listener ) ; }
public Connection getConnection ( ConnectionSpec properties ) throws ResourceException { _logger . debug ( "call getConnection(" + properties + ")" ) ; return new MockConnection ( _listener ) ; }
381
@ Test public void testMemberLeavesThatClusterListenerNotNotified ( ) { Cache < Object , String > cache0 = cache ( 0 , CACHE_NAME ) ; Cache < Object , String > cache1 = cache ( 1 , CACHE_NAME ) ; Cache < Object , String > cache2 = cache ( 2 , CACHE_NAME ) ; Object key = new MagicKey ( cache1 , cache2 ) ; cache1 . put ( key , "some-key" ) ; final ClusterListener clusterListener = listener ( ) ; cache0 . addListener ( clusterListener ) ; TestingUtil . killCacheManagers ( manager ( 1 ) ) ; cacheManagers . remove ( 1 ) ; log . info ( "Node 1 killed" ) ; TestingUtil . blockUntilViewsReceived ( 10000 , false , cacheManagers ) ; TestingUtil . waitForNoRebalance ( caches ( CACHE_NAME ) ) ; assertEquals ( clusterListener . hasIncludeState ( ) ? 1 : 0 , clusterListener . events . size ( ) ) ; }
@ Test public void testMemberLeavesThatClusterListenerNotNotified ( ) { Cache < Object , String > cache0 = cache ( 0 , CACHE_NAME ) ; Cache < Object , String > cache1 = cache ( 1 , CACHE_NAME ) ; Cache < Object , String > cache2 = cache ( 2 , CACHE_NAME ) ; Object key = new MagicKey ( cache1 , cache2 ) ; cache1 . put ( key , "some-key" ) ; final ClusterListener clusterListener = listener ( ) ; cache0 . addListener ( clusterListener ) ; log . info ( "Killing node 1 .." ) ; TestingUtil . killCacheManagers ( manager ( 1 ) ) ; cacheManagers . remove ( 1 ) ; log . info ( "Node 1 killed" ) ; TestingUtil . blockUntilViewsReceived ( 10000 , false , cacheManagers ) ; TestingUtil . waitForNoRebalance ( caches ( CACHE_NAME ) ) ; assertEquals ( clusterListener . hasIncludeState ( ) ? 1 : 0 , clusterListener . events . size ( ) ) ; }
382
@ Test public void testMemberLeavesThatClusterListenerNotNotified ( ) { Cache < Object , String > cache0 = cache ( 0 , CACHE_NAME ) ; Cache < Object , String > cache1 = cache ( 1 , CACHE_NAME ) ; Cache < Object , String > cache2 = cache ( 2 , CACHE_NAME ) ; Object key = new MagicKey ( cache1 , cache2 ) ; cache1 . put ( key , "some-key" ) ; final ClusterListener clusterListener = listener ( ) ; cache0 . addListener ( clusterListener ) ; log . info ( "Killing node 1 .." ) ; TestingUtil . killCacheManagers ( manager ( 1 ) ) ; cacheManagers . remove ( 1 ) ; TestingUtil . blockUntilViewsReceived ( 10000 , false , cacheManagers ) ; TestingUtil . waitForNoRebalance ( caches ( CACHE_NAME ) ) ; assertEquals ( clusterListener . hasIncludeState ( ) ? 1 : 0 , clusterListener . events . size ( ) ) ; }
@ Test public void testMemberLeavesThatClusterListenerNotNotified ( ) { Cache < Object , String > cache0 = cache ( 0 , CACHE_NAME ) ; Cache < Object , String > cache1 = cache ( 1 , CACHE_NAME ) ; Cache < Object , String > cache2 = cache ( 2 , CACHE_NAME ) ; Object key = new MagicKey ( cache1 , cache2 ) ; cache1 . put ( key , "some-key" ) ; final ClusterListener clusterListener = listener ( ) ; cache0 . addListener ( clusterListener ) ; log . info ( "Killing node 1 .." ) ; TestingUtil . killCacheManagers ( manager ( 1 ) ) ; cacheManagers . remove ( 1 ) ; log . info ( "Node 1 killed" ) ; TestingUtil . blockUntilViewsReceived ( 10000 , false , cacheManagers ) ; TestingUtil . waitForNoRebalance ( caches ( CACHE_NAME ) ) ; assertEquals ( clusterListener . hasIncludeState ( ) ? 1 : 0 , clusterListener . events . size ( ) ) ; }
383
private Arc createArc ( Node left , Node right , RelationshipMeta relationship ) { Arc arc = new Arc ( left , right , relationship , this ) ; arcs . add ( arc ) ; left . addArc ( arc ) ; right . addArc ( arc ) ; return arc ; }
private Arc createArc ( Node left , Node right , RelationshipMeta relationship ) { Arc arc = new Arc ( left , right , relationship , this ) ; arcs . add ( arc ) ; logger . trace ( "Created " + arc ) ; left . addArc ( arc ) ; right . addArc ( arc ) ; return arc ; }
384
private void updateBuildTaskStatus ( BuildTask task , BuildCoordinationStatus status , String statusDescription ) { BuildCoordinationStatus oldStatus = task . getStatus ( ) ; if ( status . isCompleted ( ) && ! oldStatus . isCompleted ( ) ) { markFinished ( task , status , statusDescription ) ; } else { task . setStatus ( status ) ; task . setStatusDescription ( statusDescription ) ; } Build build = buildMapper . fromBuildTask ( task ) ; BuildStatusChangedEvent buildStatusChanged = new DefaultBuildStatusChangedEvent ( build , BuildStatus . fromBuildCoordinationStatus ( oldStatus ) , BuildStatus . fromBuildCoordinationStatus ( status ) ) ; userLog . info ( "Build status updated to {}; previous: {}" , status , oldStatus ) ; BuildStatus oldBuildStatus = BuildStatus . fromBuildCoordinationStatus ( oldStatus ) ; BuildStatus newBuildStatus = BuildStatus . fromBuildCoordinationStatus ( status ) ; if ( ( oldBuildStatus != newBuildStatus ) && ! ( oldBuildStatus . isFinal ( ) && newBuildStatus . isFinal ( ) ) ) { buildStatusChangedEventNotifier . fire ( buildStatusChanged ) ; log . debug ( "Fired buildStatusChangedEventNotifier after task {} status update to {}." , task . getId ( ) , status ) ; } }
private void updateBuildTaskStatus ( BuildTask task , BuildCoordinationStatus status , String statusDescription ) { BuildCoordinationStatus oldStatus = task . getStatus ( ) ; if ( status . isCompleted ( ) && ! oldStatus . isCompleted ( ) ) { markFinished ( task , status , statusDescription ) ; } else { task . setStatus ( status ) ; task . setStatusDescription ( statusDescription ) ; } Build build = buildMapper . fromBuildTask ( task ) ; BuildStatusChangedEvent buildStatusChanged = new DefaultBuildStatusChangedEvent ( build , BuildStatus . fromBuildCoordinationStatus ( oldStatus ) , BuildStatus . fromBuildCoordinationStatus ( status ) ) ; log . debug ( "Updated build task {} status to {}; old coord status: {}, new coord status: {}" , task . getId ( ) , buildStatusChanged , oldStatus , status ) ; userLog . info ( "Build status updated to {}; previous: {}" , status , oldStatus ) ; BuildStatus oldBuildStatus = BuildStatus . fromBuildCoordinationStatus ( oldStatus ) ; BuildStatus newBuildStatus = BuildStatus . fromBuildCoordinationStatus ( status ) ; if ( ( oldBuildStatus != newBuildStatus ) && ! ( oldBuildStatus . isFinal ( ) && newBuildStatus . isFinal ( ) ) ) { buildStatusChangedEventNotifier . fire ( buildStatusChanged ) ; log . debug ( "Fired buildStatusChangedEventNotifier after task {} status update to {}." , task . getId ( ) , status ) ; } }
385
private void updateBuildTaskStatus ( BuildTask task , BuildCoordinationStatus status , String statusDescription ) { BuildCoordinationStatus oldStatus = task . getStatus ( ) ; if ( status . isCompleted ( ) && ! oldStatus . isCompleted ( ) ) { markFinished ( task , status , statusDescription ) ; } else { task . setStatus ( status ) ; task . setStatusDescription ( statusDescription ) ; } Build build = buildMapper . fromBuildTask ( task ) ; BuildStatusChangedEvent buildStatusChanged = new DefaultBuildStatusChangedEvent ( build , BuildStatus . fromBuildCoordinationStatus ( oldStatus ) , BuildStatus . fromBuildCoordinationStatus ( status ) ) ; log . debug ( "Updated build task {} status to {}; old coord status: {}, new coord status: {}" , task . getId ( ) , buildStatusChanged , oldStatus , status ) ; BuildStatus oldBuildStatus = BuildStatus . fromBuildCoordinationStatus ( oldStatus ) ; BuildStatus newBuildStatus = BuildStatus . fromBuildCoordinationStatus ( status ) ; if ( ( oldBuildStatus != newBuildStatus ) && ! ( oldBuildStatus . isFinal ( ) && newBuildStatus . isFinal ( ) ) ) { buildStatusChangedEventNotifier . fire ( buildStatusChanged ) ; log . debug ( "Fired buildStatusChangedEventNotifier after task {} status update to {}." , task . getId ( ) , status ) ; } }
private void updateBuildTaskStatus ( BuildTask task , BuildCoordinationStatus status , String statusDescription ) { BuildCoordinationStatus oldStatus = task . getStatus ( ) ; if ( status . isCompleted ( ) && ! oldStatus . isCompleted ( ) ) { markFinished ( task , status , statusDescription ) ; } else { task . setStatus ( status ) ; task . setStatusDescription ( statusDescription ) ; } Build build = buildMapper . fromBuildTask ( task ) ; BuildStatusChangedEvent buildStatusChanged = new DefaultBuildStatusChangedEvent ( build , BuildStatus . fromBuildCoordinationStatus ( oldStatus ) , BuildStatus . fromBuildCoordinationStatus ( status ) ) ; log . debug ( "Updated build task {} status to {}; old coord status: {}, new coord status: {}" , task . getId ( ) , buildStatusChanged , oldStatus , status ) ; userLog . info ( "Build status updated to {}; previous: {}" , status , oldStatus ) ; BuildStatus oldBuildStatus = BuildStatus . fromBuildCoordinationStatus ( oldStatus ) ; BuildStatus newBuildStatus = BuildStatus . fromBuildCoordinationStatus ( status ) ; if ( ( oldBuildStatus != newBuildStatus ) && ! ( oldBuildStatus . isFinal ( ) && newBuildStatus . isFinal ( ) ) ) { buildStatusChangedEventNotifier . fire ( buildStatusChanged ) ; log . debug ( "Fired buildStatusChangedEventNotifier after task {} status update to {}." , task . getId ( ) , status ) ; } }
386
private void updateBuildTaskStatus ( BuildTask task , BuildCoordinationStatus status , String statusDescription ) { BuildCoordinationStatus oldStatus = task . getStatus ( ) ; if ( status . isCompleted ( ) && ! oldStatus . isCompleted ( ) ) { markFinished ( task , status , statusDescription ) ; } else { task . setStatus ( status ) ; task . setStatusDescription ( statusDescription ) ; } Build build = buildMapper . fromBuildTask ( task ) ; BuildStatusChangedEvent buildStatusChanged = new DefaultBuildStatusChangedEvent ( build , BuildStatus . fromBuildCoordinationStatus ( oldStatus ) , BuildStatus . fromBuildCoordinationStatus ( status ) ) ; log . debug ( "Updated build task {} status to {}; old coord status: {}, new coord status: {}" , task . getId ( ) , buildStatusChanged , oldStatus , status ) ; userLog . info ( "Build status updated to {}; previous: {}" , status , oldStatus ) ; BuildStatus oldBuildStatus = BuildStatus . fromBuildCoordinationStatus ( oldStatus ) ; BuildStatus newBuildStatus = BuildStatus . fromBuildCoordinationStatus ( status ) ; if ( ( oldBuildStatus != newBuildStatus ) && ! ( oldBuildStatus . isFinal ( ) && newBuildStatus . isFinal ( ) ) ) { buildStatusChangedEventNotifier . fire ( buildStatusChanged ) ; } }
private void updateBuildTaskStatus ( BuildTask task , BuildCoordinationStatus status , String statusDescription ) { BuildCoordinationStatus oldStatus = task . getStatus ( ) ; if ( status . isCompleted ( ) && ! oldStatus . isCompleted ( ) ) { markFinished ( task , status , statusDescription ) ; } else { task . setStatus ( status ) ; task . setStatusDescription ( statusDescription ) ; } Build build = buildMapper . fromBuildTask ( task ) ; BuildStatusChangedEvent buildStatusChanged = new DefaultBuildStatusChangedEvent ( build , BuildStatus . fromBuildCoordinationStatus ( oldStatus ) , BuildStatus . fromBuildCoordinationStatus ( status ) ) ; log . debug ( "Updated build task {} status to {}; old coord status: {}, new coord status: {}" , task . getId ( ) , buildStatusChanged , oldStatus , status ) ; userLog . info ( "Build status updated to {}; previous: {}" , status , oldStatus ) ; BuildStatus oldBuildStatus = BuildStatus . fromBuildCoordinationStatus ( oldStatus ) ; BuildStatus newBuildStatus = BuildStatus . fromBuildCoordinationStatus ( status ) ; if ( ( oldBuildStatus != newBuildStatus ) && ! ( oldBuildStatus . isFinal ( ) && newBuildStatus . isFinal ( ) ) ) { buildStatusChangedEventNotifier . fire ( buildStatusChanged ) ; log . debug ( "Fired buildStatusChangedEventNotifier after task {} status update to {}." , task . getId ( ) , status ) ; } }
387
private void handleBrokerResource ( AbstractTableConfig tableConfig ) { try { String brokerTenant = ControllerTenantNameBuilder . getBrokerTenantNameForTenant ( tableConfig . getTenantConfig ( ) . getBroker ( ) ) ; if ( _helixAdmin . getInstancesInClusterWithTag ( _helixClusterName , brokerTenant ) . isEmpty ( ) ) { throw new RuntimeException ( "broker tenant : " + tableConfig . getTenantConfig ( ) . getBroker ( ) + " is not existed!" ) ; } final IdealState idealState = _helixAdmin . getResourceIdealState ( _helixClusterName , CommonConstants . Helix . BROKER_RESOURCE_INSTANCE ) ; String tableName = tableConfig . getTableName ( ) ; for ( String instanceName : _helixAdmin . getInstancesInClusterWithTag ( _helixClusterName , brokerTenant ) ) { idealState . setPartitionState ( tableName , instanceName , BrokerOnlineOfflineStateModel . ONLINE ) ; } if ( idealState != null ) { _helixAdmin . setResourceIdealState ( _helixClusterName , CommonConstants . Helix . BROKER_RESOURCE_INSTANCE , idealState ) ; } } catch ( final Exception e ) { LOGGER . warn ( "Caught exception while creating broker" , e ) ; } }
private void handleBrokerResource ( AbstractTableConfig tableConfig ) { try { String brokerTenant = ControllerTenantNameBuilder . getBrokerTenantNameForTenant ( tableConfig . getTenantConfig ( ) . getBroker ( ) ) ; if ( _helixAdmin . getInstancesInClusterWithTag ( _helixClusterName , brokerTenant ) . isEmpty ( ) ) { throw new RuntimeException ( "broker tenant : " + tableConfig . getTenantConfig ( ) . getBroker ( ) + " is not existed!" ) ; } LOGGER . info ( "Trying to update BrokerDataResource IdealState!" ) ; final IdealState idealState = _helixAdmin . getResourceIdealState ( _helixClusterName , CommonConstants . Helix . BROKER_RESOURCE_INSTANCE ) ; String tableName = tableConfig . getTableName ( ) ; for ( String instanceName : _helixAdmin . getInstancesInClusterWithTag ( _helixClusterName , brokerTenant ) ) { idealState . setPartitionState ( tableName , instanceName , BrokerOnlineOfflineStateModel . ONLINE ) ; } if ( idealState != null ) { _helixAdmin . setResourceIdealState ( _helixClusterName , CommonConstants . Helix . BROKER_RESOURCE_INSTANCE , idealState ) ; } } catch ( final Exception e ) { LOGGER . warn ( "Caught exception while creating broker" , e ) ; } }
388
private void handleBrokerResource ( AbstractTableConfig tableConfig ) { try { String brokerTenant = ControllerTenantNameBuilder . getBrokerTenantNameForTenant ( tableConfig . getTenantConfig ( ) . getBroker ( ) ) ; if ( _helixAdmin . getInstancesInClusterWithTag ( _helixClusterName , brokerTenant ) . isEmpty ( ) ) { throw new RuntimeException ( "broker tenant : " + tableConfig . getTenantConfig ( ) . getBroker ( ) + " is not existed!" ) ; } LOGGER . info ( "Trying to update BrokerDataResource IdealState!" ) ; final IdealState idealState = _helixAdmin . getResourceIdealState ( _helixClusterName , CommonConstants . Helix . BROKER_RESOURCE_INSTANCE ) ; String tableName = tableConfig . getTableName ( ) ; for ( String instanceName : _helixAdmin . getInstancesInClusterWithTag ( _helixClusterName , brokerTenant ) ) { idealState . setPartitionState ( tableName , instanceName , BrokerOnlineOfflineStateModel . ONLINE ) ; } if ( idealState != null ) { _helixAdmin . setResourceIdealState ( _helixClusterName , CommonConstants . Helix . BROKER_RESOURCE_INSTANCE , idealState ) ; } } catch ( final Exception e ) { } }
private void handleBrokerResource ( AbstractTableConfig tableConfig ) { try { String brokerTenant = ControllerTenantNameBuilder . getBrokerTenantNameForTenant ( tableConfig . getTenantConfig ( ) . getBroker ( ) ) ; if ( _helixAdmin . getInstancesInClusterWithTag ( _helixClusterName , brokerTenant ) . isEmpty ( ) ) { throw new RuntimeException ( "broker tenant : " + tableConfig . getTenantConfig ( ) . getBroker ( ) + " is not existed!" ) ; } LOGGER . info ( "Trying to update BrokerDataResource IdealState!" ) ; final IdealState idealState = _helixAdmin . getResourceIdealState ( _helixClusterName , CommonConstants . Helix . BROKER_RESOURCE_INSTANCE ) ; String tableName = tableConfig . getTableName ( ) ; for ( String instanceName : _helixAdmin . getInstancesInClusterWithTag ( _helixClusterName , brokerTenant ) ) { idealState . setPartitionState ( tableName , instanceName , BrokerOnlineOfflineStateModel . ONLINE ) ; } if ( idealState != null ) { _helixAdmin . setResourceIdealState ( _helixClusterName , CommonConstants . Helix . BROKER_RESOURCE_INSTANCE , idealState ) ; } } catch ( final Exception e ) { LOGGER . warn ( "Caught exception while creating broker" , e ) ; } }
389
public int execute ( StratosCommandContext context , String [ ] args , Option [ ] alreadyParsedOpts ) throws CommandException { if ( log . isDebugEnabled ( ) ) { } if ( args == null || args . length == 0 ) { RestCommandLineService . getInstance ( ) . listCartridgeGroups ( ) ; return CliConstants . COMMAND_SUCCESSFULL ; } else { context . getStratosApplication ( ) . printUsage ( getName ( ) ) ; return CliConstants . COMMAND_FAILED ; } }
public int execute ( StratosCommandContext context , String [ ] args , Option [ ] alreadyParsedOpts ) throws CommandException { if ( log . isDebugEnabled ( ) ) { log . debug ( "Executing {} command..." , getName ( ) ) ; } if ( args == null || args . length == 0 ) { RestCommandLineService . getInstance ( ) . listCartridgeGroups ( ) ; return CliConstants . COMMAND_SUCCESSFULL ; } else { context . getStratosApplication ( ) . printUsage ( getName ( ) ) ; return CliConstants . COMMAND_FAILED ; } }
390
public void configurePipeline ( PipelineConfigurer pipelineConfigurer ) { FailureCollector collector = pipelineConfigurer . getStageConfigurer ( ) . getFailureCollector ( ) ; config . validate ( collector ) ; collector . getOrThrowException ( ) ; if ( config . containsMacro ( NAME_FORMAT ) ) { for ( FileFormat f : FileFormat . values ( ) ) { try { pipelineConfigurer . usePlugin ( ValidatingInputFormat . PLUGIN_TYPE , f . name ( ) . toLowerCase ( ) , f . name ( ) . toLowerCase ( ) , config . getRawProperties ( ) ) ; } catch ( InvalidPluginConfigException e ) { } } return ; } String fileFormat = config . getFormatName ( ) ; Schema schema = config . getSchema ( ) ; PluginProperties . Builder builder = PluginProperties . builder ( ) ; builder . addAll ( config . getRawProperties ( ) . getProperties ( ) ) ; if ( shouldGetSchema ( ) ) { builder . add ( FILE_SYSTEM_PROPERTIES , GSON . toJson ( getFileSystemProperties ( null ) ) ) ; } ValidatingInputFormat validatingInputFormat = pipelineConfigurer . usePlugin ( ValidatingInputFormat . PLUGIN_TYPE , fileFormat , fileFormat , builder . build ( ) ) ; FormatContext context = new FormatContext ( collector , null ) ; validateInputFormatProvider ( context , fileFormat , validatingInputFormat ) ; if ( validatingInputFormat != null && shouldGetSchema ( ) ) { schema = validatingInputFormat . getSchema ( context ) ; } validatePathField ( collector , schema ) ; pipelineConfigurer . getStageConfigurer ( ) . setOutputSchema ( schema ) ; }
public void configurePipeline ( PipelineConfigurer pipelineConfigurer ) { FailureCollector collector = pipelineConfigurer . getStageConfigurer ( ) . getFailureCollector ( ) ; config . validate ( collector ) ; collector . getOrThrowException ( ) ; if ( config . containsMacro ( NAME_FORMAT ) ) { for ( FileFormat f : FileFormat . values ( ) ) { try { pipelineConfigurer . usePlugin ( ValidatingInputFormat . PLUGIN_TYPE , f . name ( ) . toLowerCase ( ) , f . name ( ) . toLowerCase ( ) , config . getRawProperties ( ) ) ; } catch ( InvalidPluginConfigException e ) { LOG . warn ( "Failed to register format '{}', which means it cannot be used when the pipeline is run. " + "Missing properties: {}, invalid properties: {}" , f . name ( ) , e . getMissingProperties ( ) , e . getInvalidProperties ( ) . stream ( ) . map ( InvalidPluginProperty :: getName ) . collect ( Collectors . toList ( ) ) ) ; } } return ; } String fileFormat = config . getFormatName ( ) ; Schema schema = config . getSchema ( ) ; PluginProperties . Builder builder = PluginProperties . builder ( ) ; builder . addAll ( config . getRawProperties ( ) . getProperties ( ) ) ; if ( shouldGetSchema ( ) ) { builder . add ( FILE_SYSTEM_PROPERTIES , GSON . toJson ( getFileSystemProperties ( null ) ) ) ; } ValidatingInputFormat validatingInputFormat = pipelineConfigurer . usePlugin ( ValidatingInputFormat . PLUGIN_TYPE , fileFormat , fileFormat , builder . build ( ) ) ; FormatContext context = new FormatContext ( collector , null ) ; validateInputFormatProvider ( context , fileFormat , validatingInputFormat ) ; if ( validatingInputFormat != null && shouldGetSchema ( ) ) { schema = validatingInputFormat . getSchema ( context ) ; } validatePathField ( collector , schema ) ; pipelineConfigurer . getStageConfigurer ( ) . setOutputSchema ( schema ) ; }
391
private boolean outsideDates ( CalEvent event , Date start , Date end ) { if ( start != null ) { if ( event . getStartDate ( ) . before ( start ) ) { return true ; } } if ( end != null ) { if ( event . getEndDate ( ) . after ( end ) ) { log . info ( " after end: " + event . getEndDate ( ) + " < " + end ) ; return true ; } } return false ; }
private boolean outsideDates ( CalEvent event , Date start , Date end ) { if ( start != null ) { if ( event . getStartDate ( ) . before ( start ) ) { log . info ( " before start: " + event . getStartDate ( ) + " < " + start ) ; return true ; } } if ( end != null ) { if ( event . getEndDate ( ) . after ( end ) ) { log . info ( " after end: " + event . getEndDate ( ) + " < " + end ) ; return true ; } } return false ; }
392
private boolean outsideDates ( CalEvent event , Date start , Date end ) { if ( start != null ) { if ( event . getStartDate ( ) . before ( start ) ) { log . info ( " before start: " + event . getStartDate ( ) + " < " + start ) ; return true ; } } if ( end != null ) { if ( event . getEndDate ( ) . after ( end ) ) { return true ; } } return false ; }
private boolean outsideDates ( CalEvent event , Date start , Date end ) { if ( start != null ) { if ( event . getStartDate ( ) . before ( start ) ) { log . info ( " before start: " + event . getStartDate ( ) + " < " + start ) ; return true ; } } if ( end != null ) { if ( event . getEndDate ( ) . after ( end ) ) { log . info ( " after end: " + event . getEndDate ( ) + " < " + end ) ; return true ; } } return false ; }
393
public void onStatusChanged ( boolean status ) { if ( status ) { initJob ( ) ; } else { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , "Communication lost with " + thing . getLabel ( ) ) ; } }
public void onStatusChanged ( boolean status ) { logger . debug ( "UPnP device {} received status update {}" , thing . getLabel ( ) , status ) ; if ( status ) { initJob ( ) ; } else { updateStatus ( ThingStatus . OFFLINE , ThingStatusDetail . COMMUNICATION_ERROR , "Communication lost with " + thing . getLabel ( ) ) ; } }
394
public void warn ( String msg , Throwable t ) { warnMessages . add ( new LogMessage ( null , msg , t ) ) ; }
public void warn ( String msg , Throwable t ) { warnMessages . add ( new LogMessage ( null , msg , t ) ) ; logger . warn ( msg , t ) ; }
395
private PersistedCatalogState filterBundlesAndCatalogInPersistedState ( PersistedCatalogState persistedState , RebindLogger rebindLogger ) { CatalogUpgrades catalogUpgrades = CatalogUpgrades . getFromManagementContext ( managementContext ) ; if ( catalogUpgrades . isEmpty ( ) ) { return persistedState ; } else { } Map < VersionedName , InstallableManagedBundle > bundles = new LinkedHashMap < > ( ) ; for ( Map . Entry < VersionedName , InstallableManagedBundle > entry : persistedState . getBundles ( ) . entrySet ( ) ) { if ( catalogUpgrades . isBundleRemoved ( entry . getKey ( ) ) ) { rebindLogger . debug ( "Filtering out persisted bundle " + entry . getKey ( ) ) ; } else { bundles . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } List < CatalogItem < ? , ? > > legacyCatalogItems = new ArrayList < > ( ) ; for ( CatalogItem < ? , ? > legacyCatalogItem : persistedState . getLegacyCatalogItems ( ) ) { if ( catalogUpgrades . isLegacyItemRemoved ( legacyCatalogItem ) ) { rebindLogger . debug ( "Filtering out persisted legacy catalog item " + legacyCatalogItem . getId ( ) ) ; } else { legacyCatalogItems . add ( legacyCatalogItem ) ; } } return new PersistedCatalogState ( bundles , legacyCatalogItems ) ; }
private PersistedCatalogState filterBundlesAndCatalogInPersistedState ( PersistedCatalogState persistedState , RebindLogger rebindLogger ) { CatalogUpgrades catalogUpgrades = CatalogUpgrades . getFromManagementContext ( managementContext ) ; if ( catalogUpgrades . isEmpty ( ) ) { return persistedState ; } else { rebindLogger . info ( "Filtering out persisted catalog: removedBundles=" + catalogUpgrades . getRemovedBundles ( ) + "; removedLegacyItems=" + catalogUpgrades . getRemovedLegacyItems ( ) ) ; } Map < VersionedName , InstallableManagedBundle > bundles = new LinkedHashMap < > ( ) ; for ( Map . Entry < VersionedName , InstallableManagedBundle > entry : persistedState . getBundles ( ) . entrySet ( ) ) { if ( catalogUpgrades . isBundleRemoved ( entry . getKey ( ) ) ) { rebindLogger . debug ( "Filtering out persisted bundle " + entry . getKey ( ) ) ; } else { bundles . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } List < CatalogItem < ? , ? > > legacyCatalogItems = new ArrayList < > ( ) ; for ( CatalogItem < ? , ? > legacyCatalogItem : persistedState . getLegacyCatalogItems ( ) ) { if ( catalogUpgrades . isLegacyItemRemoved ( legacyCatalogItem ) ) { rebindLogger . debug ( "Filtering out persisted legacy catalog item " + legacyCatalogItem . getId ( ) ) ; } else { legacyCatalogItems . add ( legacyCatalogItem ) ; } } return new PersistedCatalogState ( bundles , legacyCatalogItems ) ; }
396
private PersistedCatalogState filterBundlesAndCatalogInPersistedState ( PersistedCatalogState persistedState , RebindLogger rebindLogger ) { CatalogUpgrades catalogUpgrades = CatalogUpgrades . getFromManagementContext ( managementContext ) ; if ( catalogUpgrades . isEmpty ( ) ) { return persistedState ; } else { rebindLogger . info ( "Filtering out persisted catalog: removedBundles=" + catalogUpgrades . getRemovedBundles ( ) + "; removedLegacyItems=" + catalogUpgrades . getRemovedLegacyItems ( ) ) ; } Map < VersionedName , InstallableManagedBundle > bundles = new LinkedHashMap < > ( ) ; for ( Map . Entry < VersionedName , InstallableManagedBundle > entry : persistedState . getBundles ( ) . entrySet ( ) ) { if ( catalogUpgrades . isBundleRemoved ( entry . getKey ( ) ) ) { } else { bundles . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } List < CatalogItem < ? , ? > > legacyCatalogItems = new ArrayList < > ( ) ; for ( CatalogItem < ? , ? > legacyCatalogItem : persistedState . getLegacyCatalogItems ( ) ) { if ( catalogUpgrades . isLegacyItemRemoved ( legacyCatalogItem ) ) { rebindLogger . debug ( "Filtering out persisted legacy catalog item " + legacyCatalogItem . getId ( ) ) ; } else { legacyCatalogItems . add ( legacyCatalogItem ) ; } } return new PersistedCatalogState ( bundles , legacyCatalogItems ) ; }
private PersistedCatalogState filterBundlesAndCatalogInPersistedState ( PersistedCatalogState persistedState , RebindLogger rebindLogger ) { CatalogUpgrades catalogUpgrades = CatalogUpgrades . getFromManagementContext ( managementContext ) ; if ( catalogUpgrades . isEmpty ( ) ) { return persistedState ; } else { rebindLogger . info ( "Filtering out persisted catalog: removedBundles=" + catalogUpgrades . getRemovedBundles ( ) + "; removedLegacyItems=" + catalogUpgrades . getRemovedLegacyItems ( ) ) ; } Map < VersionedName , InstallableManagedBundle > bundles = new LinkedHashMap < > ( ) ; for ( Map . Entry < VersionedName , InstallableManagedBundle > entry : persistedState . getBundles ( ) . entrySet ( ) ) { if ( catalogUpgrades . isBundleRemoved ( entry . getKey ( ) ) ) { rebindLogger . debug ( "Filtering out persisted bundle " + entry . getKey ( ) ) ; } else { bundles . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } List < CatalogItem < ? , ? > > legacyCatalogItems = new ArrayList < > ( ) ; for ( CatalogItem < ? , ? > legacyCatalogItem : persistedState . getLegacyCatalogItems ( ) ) { if ( catalogUpgrades . isLegacyItemRemoved ( legacyCatalogItem ) ) { rebindLogger . debug ( "Filtering out persisted legacy catalog item " + legacyCatalogItem . getId ( ) ) ; } else { legacyCatalogItems . add ( legacyCatalogItem ) ; } } return new PersistedCatalogState ( bundles , legacyCatalogItems ) ; }
397
private PersistedCatalogState filterBundlesAndCatalogInPersistedState ( PersistedCatalogState persistedState , RebindLogger rebindLogger ) { CatalogUpgrades catalogUpgrades = CatalogUpgrades . getFromManagementContext ( managementContext ) ; if ( catalogUpgrades . isEmpty ( ) ) { return persistedState ; } else { rebindLogger . info ( "Filtering out persisted catalog: removedBundles=" + catalogUpgrades . getRemovedBundles ( ) + "; removedLegacyItems=" + catalogUpgrades . getRemovedLegacyItems ( ) ) ; } Map < VersionedName , InstallableManagedBundle > bundles = new LinkedHashMap < > ( ) ; for ( Map . Entry < VersionedName , InstallableManagedBundle > entry : persistedState . getBundles ( ) . entrySet ( ) ) { if ( catalogUpgrades . isBundleRemoved ( entry . getKey ( ) ) ) { rebindLogger . debug ( "Filtering out persisted bundle " + entry . getKey ( ) ) ; } else { bundles . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } List < CatalogItem < ? , ? > > legacyCatalogItems = new ArrayList < > ( ) ; for ( CatalogItem < ? , ? > legacyCatalogItem : persistedState . getLegacyCatalogItems ( ) ) { if ( catalogUpgrades . isLegacyItemRemoved ( legacyCatalogItem ) ) { } else { legacyCatalogItems . add ( legacyCatalogItem ) ; } } return new PersistedCatalogState ( bundles , legacyCatalogItems ) ; }
private PersistedCatalogState filterBundlesAndCatalogInPersistedState ( PersistedCatalogState persistedState , RebindLogger rebindLogger ) { CatalogUpgrades catalogUpgrades = CatalogUpgrades . getFromManagementContext ( managementContext ) ; if ( catalogUpgrades . isEmpty ( ) ) { return persistedState ; } else { rebindLogger . info ( "Filtering out persisted catalog: removedBundles=" + catalogUpgrades . getRemovedBundles ( ) + "; removedLegacyItems=" + catalogUpgrades . getRemovedLegacyItems ( ) ) ; } Map < VersionedName , InstallableManagedBundle > bundles = new LinkedHashMap < > ( ) ; for ( Map . Entry < VersionedName , InstallableManagedBundle > entry : persistedState . getBundles ( ) . entrySet ( ) ) { if ( catalogUpgrades . isBundleRemoved ( entry . getKey ( ) ) ) { rebindLogger . debug ( "Filtering out persisted bundle " + entry . getKey ( ) ) ; } else { bundles . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } } List < CatalogItem < ? , ? > > legacyCatalogItems = new ArrayList < > ( ) ; for ( CatalogItem < ? , ? > legacyCatalogItem : persistedState . getLegacyCatalogItems ( ) ) { if ( catalogUpgrades . isLegacyItemRemoved ( legacyCatalogItem ) ) { rebindLogger . debug ( "Filtering out persisted legacy catalog item " + legacyCatalogItem . getId ( ) ) ; } else { legacyCatalogItems . add ( legacyCatalogItem ) ; } } return new PersistedCatalogState ( bundles , legacyCatalogItems ) ; }
398
public static Map < String , String > getExternalResourceConfigurationKeys ( Configuration config , String suffix ) { final Set < String > resourceSet = getExternalResourceSet ( config ) ; final Map < String , String > configKeysToResourceNameMap = new HashMap < > ( ) ; if ( resourceSet . isEmpty ( ) ) { return Collections . emptyMap ( ) ; } final Map < String , String > externalResourceConfigs = new HashMap < > ( ) ; for ( String resourceName : resourceSet ) { final ConfigOption < String > configKeyOption = key ( ExternalResourceOptions . getSystemConfigKeyConfigOptionForResource ( resourceName , suffix ) ) . stringType ( ) . noDefaultValue ( ) ; final String configKey = config . get ( configKeyOption ) ; if ( StringUtils . isNullOrWhitespaceOnly ( configKey ) ) { LOG . warn ( "Could not find valid {} for {}. Will ignore that resource." , configKeyOption . key ( ) , resourceName ) ; } else { configKeysToResourceNameMap . compute ( configKey , ( ignored , previousResource ) -> { if ( previousResource != null ) { LOG . warn ( "Duplicate config key {} occurred for external resources, the one named {} will overwrite the value." , configKey , resourceName ) ; externalResourceConfigs . remove ( previousResource ) ; } return resourceName ; } ) ; externalResourceConfigs . put ( resourceName , configKey ) ; } } return externalResourceConfigs ; }
public static Map < String , String > getExternalResourceConfigurationKeys ( Configuration config , String suffix ) { final Set < String > resourceSet = getExternalResourceSet ( config ) ; final Map < String , String > configKeysToResourceNameMap = new HashMap < > ( ) ; LOG . info ( "Enabled external resources: {}" , resourceSet ) ; if ( resourceSet . isEmpty ( ) ) { return Collections . emptyMap ( ) ; } final Map < String , String > externalResourceConfigs = new HashMap < > ( ) ; for ( String resourceName : resourceSet ) { final ConfigOption < String > configKeyOption = key ( ExternalResourceOptions . getSystemConfigKeyConfigOptionForResource ( resourceName , suffix ) ) . stringType ( ) . noDefaultValue ( ) ; final String configKey = config . get ( configKeyOption ) ; if ( StringUtils . isNullOrWhitespaceOnly ( configKey ) ) { LOG . warn ( "Could not find valid {} for {}. Will ignore that resource." , configKeyOption . key ( ) , resourceName ) ; } else { configKeysToResourceNameMap . compute ( configKey , ( ignored , previousResource ) -> { if ( previousResource != null ) { LOG . warn ( "Duplicate config key {} occurred for external resources, the one named {} will overwrite the value." , configKey , resourceName ) ; externalResourceConfigs . remove ( previousResource ) ; } return resourceName ; } ) ; externalResourceConfigs . put ( resourceName , configKey ) ; } } return externalResourceConfigs ; }
399
public static Map < String , String > getExternalResourceConfigurationKeys ( Configuration config , String suffix ) { final Set < String > resourceSet = getExternalResourceSet ( config ) ; final Map < String , String > configKeysToResourceNameMap = new HashMap < > ( ) ; LOG . info ( "Enabled external resources: {}" , resourceSet ) ; if ( resourceSet . isEmpty ( ) ) { return Collections . emptyMap ( ) ; } final Map < String , String > externalResourceConfigs = new HashMap < > ( ) ; for ( String resourceName : resourceSet ) { final ConfigOption < String > configKeyOption = key ( ExternalResourceOptions . getSystemConfigKeyConfigOptionForResource ( resourceName , suffix ) ) . stringType ( ) . noDefaultValue ( ) ; final String configKey = config . get ( configKeyOption ) ; if ( StringUtils . isNullOrWhitespaceOnly ( configKey ) ) { } else { configKeysToResourceNameMap . compute ( configKey , ( ignored , previousResource ) -> { if ( previousResource != null ) { LOG . warn ( "Duplicate config key {} occurred for external resources, the one named {} will overwrite the value." , configKey , resourceName ) ; externalResourceConfigs . remove ( previousResource ) ; } return resourceName ; } ) ; externalResourceConfigs . put ( resourceName , configKey ) ; } } return externalResourceConfigs ; }
public static Map < String , String > getExternalResourceConfigurationKeys ( Configuration config , String suffix ) { final Set < String > resourceSet = getExternalResourceSet ( config ) ; final Map < String , String > configKeysToResourceNameMap = new HashMap < > ( ) ; LOG . info ( "Enabled external resources: {}" , resourceSet ) ; if ( resourceSet . isEmpty ( ) ) { return Collections . emptyMap ( ) ; } final Map < String , String > externalResourceConfigs = new HashMap < > ( ) ; for ( String resourceName : resourceSet ) { final ConfigOption < String > configKeyOption = key ( ExternalResourceOptions . getSystemConfigKeyConfigOptionForResource ( resourceName , suffix ) ) . stringType ( ) . noDefaultValue ( ) ; final String configKey = config . get ( configKeyOption ) ; if ( StringUtils . isNullOrWhitespaceOnly ( configKey ) ) { LOG . warn ( "Could not find valid {} for {}. Will ignore that resource." , configKeyOption . key ( ) , resourceName ) ; } else { configKeysToResourceNameMap . compute ( configKey , ( ignored , previousResource ) -> { if ( previousResource != null ) { LOG . warn ( "Duplicate config key {} occurred for external resources, the one named {} will overwrite the value." , configKey , resourceName ) ; externalResourceConfigs . remove ( previousResource ) ; } return resourceName ; } ) ; externalResourceConfigs . put ( resourceName , configKey ) ; } } return externalResourceConfigs ; }