signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
---|---|
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getPTD1 ( ) { } } | if ( ptd1EClass == null ) { ptd1EClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 318 ) ; } return ptd1EClass ; |
public class RawResponse { /** * Wrap response input stream if it is compressed , return input its self if not use compress */
private InputStream decompressBody ( ) { } } | if ( ! decompress ) { return body ; } // if has no body , some server still set content - encoding header ,
// GZIPInputStream wrap empty input stream will cause exception . we should check this
if ( method . equals ( Methods . HEAD ) || ( statusCode >= 100 && statusCode < 200 ) || statusCode == NOT_MODIFIED || statusCode == NO_CONTENT ) { return body ; } String contentEncoding = headers . getHeader ( NAME_CONTENT_ENCODING ) ; if ( contentEncoding == null ) { return body ; } // we should remove the content - encoding header here ?
switch ( contentEncoding ) { case "gzip" : try { return new GZIPInputStream ( body ) ; } catch ( IOException e ) { Closeables . closeQuietly ( body ) ; throw new RequestsException ( e ) ; } case "deflate" : // Note : deflate implements may or may not wrap in zlib due to rfc confusing .
// here deal with deflate without zlib header
return new InflaterInputStream ( body , new Inflater ( true ) ) ; case "identity" : case "compress" : // historic ; deprecated in most applications and replaced by gzip or deflate
default : return body ; } |
public class CmsVfsSitemapService { /** * Checks whether a resource is a default file of a folder . < p >
* @ param resource the resource to check
* @ return true if the resource is the default file of a folder
* @ throws CmsException if something goes wrong */
private boolean isDefaultFile ( CmsResource resource ) throws CmsException { } } | CmsObject cms = getCmsObject ( ) ; if ( resource . isFolder ( ) ) { return false ; } CmsResource parent = cms . readResource ( CmsResource . getParentFolder ( cms . getSitePath ( resource ) ) , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; CmsResource defaultFile = cms . readDefaultFile ( parent , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; return resource . equals ( defaultFile ) ; |
public class WSJdbcPreparedStatement { /** * Method getParameterMetaData .
* < p > Retrieves the number , types and properties of this PreparedStatement object ' s
* parameters . < / p >
* @ return a ParameterMetaData object that contains information about the number ,
* types and properties of this PreparedStatement object ' s parameters
* @ throws SQLException If a database access error occurs */
public ParameterMetaData getParameterMetaData ( ) throws SQLException { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( this , tc , "getParameterMetaData" ) ; ParameterMetaData pmd = null ; try { pmd = pstmtImpl . getParameterMetaData ( ) ; } catch ( SQLException ex ) { FFDCFilter . processException ( ex , "com.ibm.ws.rsadapter.jdbc.WSJdbcPreparedStatement.getParameterMetaData" , "1442" , this ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "getParameterMetaData" , "Exception, details in FFDC" ) ; throw WSJdbcUtil . mapException ( this , ex ) ; } catch ( NullPointerException nullX ) { // No FFDC code needed ; we might be closed .
if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "getParameterMetaData" , "Exception, details in FFDC" ) ; throw runtimeXIfNotClosed ( nullX ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( this , tc , "getParameterMetaData" , pmd ) ; return pmd ; |
public class CmsDynamicFunctionParser { /** * Converts a ( possibly null ) content value location to a string . < p >
* @ param cms the current CMS context
* @ param location the content value location
* @ param defaultValue the value to return if the location is null
* @ return the string value of the content value location */
protected String getStringValue ( CmsObject cms , I_CmsXmlContentValueLocation location , String defaultValue ) { } } | if ( location == null ) { return defaultValue ; } return location . asString ( cms ) ; |
public class ModelsImpl { /** * Create an entity role for an entity in the application .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param entityId The entity model ID .
* @ param createPatternAnyEntityRoleOptionalParameter the object representing the optional parameters to be set before calling this API
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < UUID > createPatternAnyEntityRoleAsync ( UUID appId , String versionId , UUID entityId , CreatePatternAnyEntityRoleOptionalParameter createPatternAnyEntityRoleOptionalParameter , final ServiceCallback < UUID > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createPatternAnyEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , createPatternAnyEntityRoleOptionalParameter ) , serviceCallback ) ; |
public class ImageAttribute { /** * The launch permissions .
* @ return The launch permissions . */
public java . util . List < LaunchPermission > getLaunchPermissions ( ) { } } | if ( launchPermissions == null ) { launchPermissions = new com . amazonaws . internal . SdkInternalList < LaunchPermission > ( ) ; } return launchPermissions ; |
public class AsyncBufferedInputStream { public void run ( ) { } } | try { final byte [ ] buffer = new byte [ 512 * 1024 ] ; while ( ! this . closed . get ( ) ) { int r = this . is . read ( buffer , 0 , buffer . length ) ; if ( r < 0 ) throw new EOFException ( ) ; int offset = 0 ; while ( r > 0 ) { final int w = write ( buffer , offset , r ) ; r -= w ; offset += w ; } } } catch ( IOException e ) { this . exception = e ; } catch ( Exception e ) { logger . error ( "failed to transfer data" , e ) ; } finally { if ( ! this . closed . get ( ) ) { try { close ( ) ; } catch ( IOException e ) { logger . error ( "failed to close is" , e ) ; } } } |
public class V1InstanceCreator { /** * Create a new retrospective with a name .
* @ param name The name of the retrospective .
* @ param project The project this retrospective belongs to .
* @ return A newly minted Retrospective that exists in the VersionOne
* system . */
public Retrospective retrospective ( String name , Project project ) { } } | return retrospective ( name , project , null ) ; |
public class StringUtf8Utils { /** * This method must have the same result with JDK ' s String . getBytes . */
public static byte [ ] encodeUTF8 ( String str ) { } } | byte [ ] bytes = allocateReuseBytes ( str . length ( ) * MAX_BYTES_PER_CHAR ) ; int len = encodeUTF8 ( str , bytes ) ; return Arrays . copyOf ( bytes , len ) ; |
public class FindbugsPlugin { /** * Read saved bug collection and findbugs project from file . Will populate
* the bug collection and findbugs project session properties if successful .
* If there is no saved bug collection and project for the eclipse project ,
* then FileNotFoundException will be thrown .
* @ param project
* the eclipse project
* @ param monitor
* a progress monitor
* @ throws java . io . FileNotFoundException
* the saved bug collection doesn ' t exist
* @ throws IOException
* @ throws DocumentException
* @ throws CoreException */
private static void readBugCollectionAndProject ( IProject project , IProgressMonitor monitor ) throws IOException , DocumentException , CoreException { } } | SortedBugCollection bugCollection ; IPath bugCollectionPath = getBugCollectionFile ( project ) ; // Don ' t turn the path to an IFile because it isn ' t local to the
// project .
// see the javadoc for org . eclipse . core . runtime . Plugin
File bugCollectionFile = bugCollectionPath . toFile ( ) ; if ( ! bugCollectionFile . exists ( ) ) { // throw new
// FileNotFoundException ( bugCollectionFile . getLocation ( ) . toOSString ( ) ) ;
getDefault ( ) . logInfo ( "creating new bug collection: " + bugCollectionPath . toOSString ( ) ) ; createDefaultEmptyBugCollection ( project ) ; // since we no longer
// throw , have to do this
// here
return ; } UserPreferences prefs = getUserPreferences ( project ) ; bugCollection = new SortedBugCollection ( ) ; bugCollection . getProject ( ) . setGuiCallback ( new EclipseGuiCallback ( project ) ) ; bugCollection . readXML ( bugCollectionFile ) ; cacheBugCollectionAndProject ( project , bugCollection , bugCollection . getProject ( ) ) ; |
public class DistributedLoadCommand { /** * Loads a file or directory in Alluxio space , makes it resident in memory .
* @ param filePath The { @ link AlluxioURI } path to load into Alluxio memory
* @ throws AlluxioException when Alluxio exception occurs
* @ throws IOException when non - Alluxio exception occurs */
private void load ( AlluxioURI filePath , int replication ) throws AlluxioException , IOException , InterruptedException { } } | URIStatus status = mFileSystem . getStatus ( filePath ) ; if ( status . isFolder ( ) ) { List < URIStatus > statuses = mFileSystem . listStatus ( filePath ) ; for ( URIStatus uriStatus : statuses ) { AlluxioURI newPath = new AlluxioURI ( uriStatus . getPath ( ) ) ; load ( newPath , replication ) ; } } else { Thread thread = JobGrpcClientUtils . createProgressThread ( System . out ) ; thread . start ( ) ; try { JobGrpcClientUtils . run ( new LoadConfig ( filePath . getPath ( ) , replication ) , 3 , mFsContext . getPathConf ( filePath ) ) ; } finally { thread . interrupt ( ) ; } } System . out . println ( filePath + " loaded" ) ; |
public class CmsSecurityManager { /** * Writes a list of properties for a specified resource . < p >
* Code calling this method has to ensure that the no properties
* < code > a , b < / code > are contained in the specified list so that < code > a . equals ( b ) < / code > ,
* otherwise an exception is thrown . < p >
* @ param context the current request context
* @ param resource the resource to write the properties for
* @ param properties the list of properties to write
* @ throws CmsException if something goes wrong
* @ throws CmsSecurityException if the user has insufficient permission for the given resource ( { @ link CmsPermissionSet # ACCESS _ WRITE } required )
* @ see CmsObject # writePropertyObjects ( String , List )
* @ see org . opencms . file . types . I _ CmsResourceType # writePropertyObjects ( CmsObject , CmsSecurityManager , CmsResource , List ) */
public void writePropertyObjects ( CmsRequestContext context , CmsResource resource , List < CmsProperty > properties ) throws CmsException , CmsSecurityException { } } | CmsDbContext dbc = m_dbContextFactory . getDbContext ( context ) ; try { checkOfflineProject ( dbc ) ; checkPermissions ( dbc , resource , CmsPermissionSet . ACCESS_WRITE , true , CmsResourceFilter . IGNORE_EXPIRATION ) ; // write the properties
m_driverManager . writePropertyObjects ( dbc , resource , properties , true ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_WRITE_PROPS_1 , context . getSitePath ( resource ) ) , e ) ; } finally { dbc . clear ( ) ; } |
public class Command { /** * Flattens ( serializes , dehydrates , etc . ) this instance to a binary representation .
* @ return a binary representation
* @ throws IOException */
public byte [ ] toBytes ( ) throws IOException { } } | ByteArrayOutputStream bout = new ByteArrayOutputStream ( ) ; ZipOutputStream stream = new ZipOutputStream ( bout ) ; ZipEntry ze = new ZipEntry ( "async_message" ) ; stream . putNextEntry ( ze ) ; stream . write ( toXml ( ) . getBytes ( ) ) ; stream . flush ( ) ; stream . close ( ) ; return bout . toByteArray ( ) ; |
public class DBInstance { /** * The AWS Identity and Access Management ( IAM ) roles associated with the DB instance .
* @ param associatedRoles
* The AWS Identity and Access Management ( IAM ) roles associated with the DB instance . */
public void setAssociatedRoles ( java . util . Collection < DBInstanceRole > associatedRoles ) { } } | if ( associatedRoles == null ) { this . associatedRoles = null ; return ; } this . associatedRoles = new com . amazonaws . internal . SdkInternalList < DBInstanceRole > ( associatedRoles ) ; |
public class StartTimerDecisionAttributesMarshaller { /** * Marshall the given parameter object . */
public void marshall ( StartTimerDecisionAttributes startTimerDecisionAttributes , ProtocolMarshaller protocolMarshaller ) { } } | if ( startTimerDecisionAttributes == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( startTimerDecisionAttributes . getTimerId ( ) , TIMERID_BINDING ) ; protocolMarshaller . marshall ( startTimerDecisionAttributes . getControl ( ) , CONTROL_BINDING ) ; protocolMarshaller . marshall ( startTimerDecisionAttributes . getStartToFireTimeout ( ) , STARTTOFIRETIMEOUT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class LocaleData { /** * Returns LocaleDisplayPattern for this locale , e . g . , { 0 } ( { 1 } )
* @ return locale display pattern as a String . */
public String getLocaleDisplayPattern ( ) { } } | ICUResourceBundle locDispBundle = ( ICUResourceBundle ) langBundle . get ( LOCALE_DISPLAY_PATTERN ) ; String localeDisplayPattern = locDispBundle . getStringWithFallback ( PATTERN ) ; return localeDisplayPattern ; |
public class JobSchedulesImpl { /** * Checks the specified job schedule exists .
* @ param jobScheduleId The ID of the job schedule which you want to check .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the Boolean object */
public Observable < Boolean > existsAsync ( String jobScheduleId ) { } } | return existsWithServiceResponseAsync ( jobScheduleId ) . map ( new Func1 < ServiceResponseWithHeaders < Boolean , JobScheduleExistsHeaders > , Boolean > ( ) { @ Override public Boolean call ( ServiceResponseWithHeaders < Boolean , JobScheduleExistsHeaders > response ) { return response . body ( ) ; } } ) ; |
public class STCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . STC__FRGCOLOR : setFRGCOLOR ( ( Integer ) newValue ) ; return ; case AfplibPackage . STC__PRECSION : setPRECSION ( ( Integer ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class JFapByteBuffer { /** * Returns a dump of the specified number of bytes of the specified buffer .
* @ param buffer
* @ param bytesToDump
* @ return Returns a String containing a dump of the buffer . */
private static String getDumpBytes ( WsByteBuffer buffer , int bytesToDump , boolean rewind ) { } } | // Save the current position
int pos = buffer . position ( ) ; if ( rewind ) { buffer . rewind ( ) ; } byte [ ] data = null ; int start ; int count = bytesToDump ; if ( count > buffer . remaining ( ) || count == ENTIRE_BUFFER ) count = buffer . remaining ( ) ; if ( buffer . hasArray ( ) ) { data = buffer . array ( ) ; start = buffer . arrayOffset ( ) + buffer . position ( ) ; } else { data = new byte [ count ] ; buffer . get ( data ) ; start = 0 ; } String strData = "Dumping " + count + " bytes of buffer data:\r\n" ; if ( count > 0 ) strData += SibTr . formatBytes ( data , start , count ) ; // Return the position to where it should be
if ( rewind ) buffer . position ( pos ) ; return strData ; |
public class ReusableFutureLatch { /** * If the latch is released , completes the provided future without invoking the provided
* runnable . If the latch is not released it will add the provided future to the list to be
* notified and runs the provided runnable if there is not already one running .
* If there are multiple calls to this method , only the runnable of one will be invoked . If the
* runnable throws the exception will be thrown to the caller of this method , and future callers
* may have their runnable method invoked ( Presuming that the latch is not released )
* @ param willCallRelease A runnable that should result in { @ link # release ( Object ) } being called .
* @ param toNotify The future to notify once release is called . */
public void registerAndRunReleaser ( Runnable willCallRelease , CompletableFuture < T > toNotify ) { } } | boolean run = false ; boolean complete = false ; T result = null ; Throwable e = null ; synchronized ( lock ) { if ( released ) { complete = true ; result = this . result ; e = this . e ; } else { waitingFutures . add ( toNotify ) ; if ( runningThreadId == null ) { run = true ; runningThreadId = Thread . currentThread ( ) . getId ( ) ; } } } if ( run ) { log . debug ( "Running releaser now, runningThread:{}" , Thread . currentThread ( ) . getName ( ) ) ; boolean success = false ; try { willCallRelease . run ( ) ; success = true ; } finally { if ( ! success ) { synchronized ( lock ) { if ( runningThreadId != null && runningThreadId == Thread . currentThread ( ) . getId ( ) ) { runningThreadId = null ; } } } } } if ( complete ) { if ( e == null ) { toNotify . complete ( result ) ; } else { toNotify . completeExceptionally ( e ) ; } } |
public class Util { /** * Removes values from the array that meet the criteria for removal via the supplied
* { @ link Predicate } value */
@ SuppressWarnings ( "unchecked" ) public static < T > T [ ] removeValues ( T [ ] values , Predicate < T > shouldRemove , Class < T > type ) { } } | Collection < T > collection = new ArrayList < > ( values . length ) ; for ( T value : values ) { if ( shouldRemove . negate ( ) . test ( value ) ) { collection . add ( value ) ; } } T [ ] array = ( T [ ] ) Array . newInstance ( type , collection . size ( ) ) ; return collection . toArray ( array ) ; |
public class ObserverSpliterator { /** * Offers new item . Return true if item was consumed .
* False if another thread was not waiting for the item .
* @ param t
* @ return */
public boolean offer ( T t ) { } } | try { return queue . offer ( t , offerTimeout , timeUnit ) ; } catch ( InterruptedException ex ) { throw new IllegalArgumentException ( ex ) ; } |
public class TreeEditDistance { /** * This method is a modified version of a " tree _ edit _ graph ( tree & , tree & , GRAPH < string , string > & ) "
* from http : / / www . lsi . upc . es / ~ valiente / algorithm / combin . cpp . Modifications allowed to take into account
* various ( other than equivalence ) semantic relations holding among tree nodes
* @ return true upon success , false otherwise */
private boolean calculateGraph ( ) { } } | // cache size of lists
int list1size = list1 . size ( ) ; int list2size = list2 . size ( ) ; // this stores the order number for each Node
HashMap < INode , Integer > orderNum1 = new HashMap < INode , Integer > ( ) ; HashMap < INode , Integer > orderNum2 = new HashMap < INode , Integer > ( ) ; // calculate preorder numeration and depth information for each node
preorderTreeDepth ( tree1 , orderNum1 , depth1 ) ; preorderTreeDepth ( tree2 , orderNum2 , depth2 ) ; // put all depth information into array ; ordering is by preorder
int [ ] d1 = new int [ list1size + 1 ] ; int [ ] d2 = new int [ list2size + 1 ] ; for ( INode a : list1 ) { d1 [ orderNum1 . get ( a ) ] = depth1 . get ( a ) ; } for ( INode a : list2 ) { d2 [ orderNum2 . get ( a ) ] = depth2 . get ( a ) ; } // clear graph
editDistanceGraph = new SimpleDirectedWeightedGraph ( ) ; // create vertexes for all tree1 / tree2 crossings
GraphVertexTuple [ ] [ ] vertexArray = new GraphVertexTuple [ list1size + 1 ] [ list2size + 1 ] ; for ( int i = 0 ; i <= list1size ; i ++ ) { for ( int j = 0 ; j <= list2size ; j ++ ) { GraphVertexTuple t = new GraphVertexTuple ( i , j ) ; vertexArray [ i ] [ j ] = t ; if ( ! editDistanceGraph . addVertex ( t ) ) return false ; } } // save eckpunkte
firstVertex = vertexArray [ 0 ] [ 0 ] ; lastVertex = vertexArray [ list1size ] [ list2size ] ; // delete edges at outer right
for ( int i = 0 ; i < list1size ; i ++ ) { Edge e = editDistanceGraph . addEdge ( vertexArray [ i ] [ list2size ] , vertexArray [ i + 1 ] [ list2size ] ) ; if ( e == null ) return false ; e . setWeight ( weightDelete ) ; } // insert edges at bottom
for ( int j = 0 ; j < list2size ; j ++ ) { Edge e = editDistanceGraph . addEdge ( vertexArray [ list1size ] [ j ] , vertexArray [ list1size ] [ j + 1 ] ) ; if ( e == null ) return false ; e . setWeight ( weightInsert ) ; } for ( int i = 0 ; i < list1size ; i ++ ) { double sourceNodeWeight = getNodeWeight ( list1 . get ( i ) ) ; for ( int j = 0 ; j < list2size ; j ++ ) { if ( d1 [ i + 1 ] >= d2 [ j + 1 ] ) { Edge e = editDistanceGraph . addEdge ( vertexArray [ i ] [ j ] , vertexArray [ i + 1 ] [ j ] ) ; if ( e == null ) return false ; e . setWeight ( sourceNodeWeight * weightDelete ) ; } if ( d1 [ i + 1 ] == d2 [ j + 1 ] ) { Edge e = editDistanceGraph . addEdge ( vertexArray [ i ] [ j ] , vertexArray [ i + 1 ] [ j + 1 ] ) ; if ( e == null ) return false ; if ( ( comparator . compare ( list1 . get ( i ) , list2 . get ( j ) ) == 1 ) || ( comparator . compare ( list1 . get ( i ) , list2 . get ( j ) ) == 2 ) ) { e . setWeight ( weightSubstitute ) ; } if ( comparator . compare ( list1 . get ( i ) , list2 . get ( j ) ) == 0 ) { e . setWeight ( weightSubstituteEqual ) ; } if ( comparator . compare ( list1 . get ( i ) , list2 . get ( j ) ) == 3 ) { e . setWeight ( Double . POSITIVE_INFINITY ) ; } if ( comparator . compare ( list1 . get ( i ) , list2 . get ( j ) ) == - 1 ) { e . setWeight ( sourceNodeWeight * weightSubstitute * 2 ) ; } } if ( d1 [ i + 1 ] <= d2 [ j + 1 ] ) { Edge e = editDistanceGraph . addEdge ( vertexArray [ i ] [ j ] , vertexArray [ i ] [ j + 1 ] ) ; if ( e == null ) return false ; e . setWeight ( weightInsert ) ; } } } return true ; |
public class CommerceWishListItemPersistenceImpl { /** * Removes the commerce wish list item with the primary key from the database . Also notifies the appropriate model listeners .
* @ param primaryKey the primary key of the commerce wish list item
* @ return the commerce wish list item that was removed
* @ throws NoSuchWishListItemException if a commerce wish list item with the primary key could not be found */
@ Override public CommerceWishListItem remove ( Serializable primaryKey ) throws NoSuchWishListItemException { } } | Session session = null ; try { session = openSession ( ) ; CommerceWishListItem commerceWishListItem = ( CommerceWishListItem ) session . get ( CommerceWishListItemImpl . class , primaryKey ) ; if ( commerceWishListItem == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchWishListItemException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return remove ( commerceWishListItem ) ; } catch ( NoSuchWishListItemException nsee ) { throw nsee ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; } |
public class HtmlWriter { /** * Get the configuration string as a content , replacing spaces
* with non - breaking spaces .
* @ param key the key to look for in the configuration file
* @ return a content tree for the text */
public Content getNonBreakResource ( String key ) { } } | String text = configuration . getText ( key ) ; Content c = configuration . newContent ( ) ; int start = 0 ; int p ; while ( ( p = text . indexOf ( " " , start ) ) != - 1 ) { c . addContent ( text . substring ( start , p ) ) ; c . addContent ( RawHtml . nbsp ) ; start = p + 1 ; } c . addContent ( text . substring ( start ) ) ; return c ; |
public class RtfCtrlWordMap { /** * Get the HashMap object containing the control words .
* Initializes the instance if this is the first instantiation
* of RtfCtrlWords class .
* @ since 2.0.8 */
public RtfCtrlWordHandler getCtrlWordHandler ( String ctrlWord ) { } } | try { if ( ctrlWords . containsKey ( ctrlWord ) ) { // add 1 to known control words
return ( RtfCtrlWordHandler ) ctrlWords . get ( ctrlWord ) ; } else { // add 1 to unknown control words
return ( RtfCtrlWordHandler ) ctrlWords . get ( "unknown" ) ; } } catch ( SecurityException e ) { // TODO Auto - generated catch block
e . printStackTrace ( ) ; } catch ( IllegalArgumentException e ) { // TODO Auto - generated catch block
e . printStackTrace ( ) ; } return null ; |
public class DefaultTraceCollector { /** * This method processes the values associated with the start or end of a scoped
* activity .
* @ param trace The trace
* @ param node The node
* @ param direction The direction
* @ param headers The optional headers
* @ param values The values */
protected void processValues ( Trace trace , Node node , Direction direction , Map < String , ? > headers , Object [ ] values ) { } } | if ( node . interactionNode ( ) ) { Message m = null ; if ( direction == Direction . In ) { m = ( ( InteractionNode ) node ) . getIn ( ) ; if ( m == null ) { m = new Message ( ) ; ( ( InteractionNode ) node ) . setIn ( m ) ; } } else { m = ( ( InteractionNode ) node ) . getOut ( ) ; if ( m == null ) { m = new Message ( ) ; ( ( InteractionNode ) node ) . setOut ( m ) ; } } if ( headers != null && m . getHeaders ( ) . isEmpty ( ) ) { // TODO : Need to have config to determine whether headers should be logged
for ( Map . Entry < String , ? > stringEntry : headers . entrySet ( ) ) { String value = getHeaderValueText ( stringEntry . getValue ( ) ) ; if ( value != null ) { m . getHeaders ( ) . put ( stringEntry . getKey ( ) , value ) ; } } } } if ( processorManager != null ) { processorManager . process ( trace , node , direction , headers , values ) ; } |
public class XESLogParser { /** * Takes a String containing { @ link DataUsage } identifier separated by commas , removes every leading and training whitespace , and parses them into a { @ link List } . < br >
* TODO move to TOVAL into enum { @ link DataUsage } ? */
private static List < DataUsage > parseDataUsageString ( String dataUsageString ) throws ParameterException { } } | List < String > dataUsageStrings = Arrays . asList ( dataUsageString . split ( "\\s*,\\s*" ) ) ; List < DataUsage > dataUsageList = new ArrayList < > ( dataUsageStrings . size ( ) ) ; for ( String d : dataUsageStrings ) { DataUsage dataUsage = DataUsage . parse ( d ) ; if ( ! dataUsageList . contains ( dataUsage ) ) dataUsageList . add ( dataUsage ) ; } return dataUsageList ; |
public class MemoryCacheUtils { /** * Generates key for memory cache for incoming image ( URI + size ) . < br / >
* Pattern for cache key - < b > [ imageUri ] _ [ width ] x [ height ] < / b > . */
public static String generateKey ( String imageUri , ImageSize targetSize ) { } } | return new StringBuilder ( imageUri ) . append ( URI_AND_SIZE_SEPARATOR ) . append ( targetSize . getWidth ( ) ) . append ( WIDTH_AND_HEIGHT_SEPARATOR ) . append ( targetSize . getHeight ( ) ) . toString ( ) ; |
public class OutHttpApp { /** * Returns the next byte buffer . */
@ Override public byte [ ] nextBuffer ( int offset ) throws IOException { } } | if ( offset < 0 || SIZE < offset ) { throw new IllegalStateException ( L . l ( "Invalid offset: " + offset ) ) ; } if ( _bufferCapacity <= SIZE || _bufferCapacity <= offset + _bufferSize ) { _offset = offset ; flushByteBuffer ( ) ; return buffer ( ) ; } else { _tBuf . length ( offset ) ; _bufferSize += offset ; TempBuffer tempBuf = TempBuffer . create ( ) ; _tBuf . next ( tempBuf ) ; _tBuf = tempBuf ; _buffer = _tBuf . buffer ( ) ; _offset = _startOffset ; return _buffer ; } |
public class ProgressFeedback { /** * A helper method that executes a task in a worker thread and displays feedback
* in a progress windows .
* @ param strNotice The text notice to display in the ProgressWindow .
* @ param task The task to execute in a separate ( worker ) thread . */
public static ProgressFeedback runWithProgress ( final String strNotice , final IRunnableWithProgress task ) { } } | return runWithProgress ( strNotice , task , false , false ) ; |
public class CleverTapAPI { /** * Returns the device push token or null
* @ param type com . clevertap . android . sdk . PushType ( FCM or GCM )
* @ return String device token or null
* NOTE : on initial install calling getDevicePushToken may return null , as the device token is
* not yet available
* Implement CleverTapAPI . DevicePushTokenRefreshListener to get a callback once the token is
* available */
@ SuppressWarnings ( "unused" ) public String getDevicePushToken ( final PushType type ) { } } | switch ( type ) { case GCM : return getCachedGCMToken ( ) ; case FCM : return getCachedFCMToken ( ) ; default : return null ; } |
public class TrackedActiveConnection { /** * Initializes this TrackedActiveConnection , copying the data associated
* with the given active connection record . At a minimum , the identifier
* of this active connection will be set , the start date , and the
* identifier of the associated connection will be copied . If requested ,
* sensitive information like the associated username will be copied , as
* well .
* @ param currentUser
* The user that created or retrieved this object .
* @ param activeConnectionRecord
* The active connection record to copy .
* @ param includeSensitiveInformation
* Whether sensitive data should be copied from the connection record
* as well . This includes the remote host , associated tunnel , and
* username . */
public void init ( ModeledAuthenticatedUser currentUser , ActiveConnectionRecord activeConnectionRecord , boolean includeSensitiveInformation ) { } } | super . init ( currentUser ) ; this . connectionRecord = activeConnectionRecord ; // Copy all non - sensitive data from given record
this . connection = activeConnectionRecord . getConnection ( ) ; this . sharingProfileIdentifier = activeConnectionRecord . getSharingProfileIdentifier ( ) ; this . identifier = activeConnectionRecord . getUUID ( ) . toString ( ) ; this . startDate = activeConnectionRecord . getStartDate ( ) ; // Include sensitive data , too , if requested
if ( includeSensitiveInformation ) { this . remoteHost = activeConnectionRecord . getRemoteHost ( ) ; this . tunnel = activeConnectionRecord . getTunnel ( ) ; this . username = activeConnectionRecord . getUsername ( ) ; } |
public class BoneCPConfig { /** * Queries taking longer than this limit to execute are logged .
* @ param queryExecuteTimeLimit the limit to set in milliseconds .
* @ param timeUnit */
public void setQueryExecuteTimeLimit ( long queryExecuteTimeLimit , TimeUnit timeUnit ) { } } | this . queryExecuteTimeLimitInMs = TimeUnit . MILLISECONDS . convert ( queryExecuteTimeLimit , timeUnit ) ; |
public class QueryParametersLazyList { /** * This is reference implementation of universal read function for scrollable ResultSets .
* Currently it is unoptimized , due to possible compatibility issues .
* @ param row row which should be read
* @ return read value
* @ throws SQLException */
private QueryParameters readResultSetRow ( int row ) throws SQLException { } } | int currentRow = getCurrentResultSet ( ) . getRow ( ) ; QueryParameters result = null ; if ( currentRow == 0 ) { // before first or last
if ( getCurrentResultSet ( ) . isAfterLast ( ) == true ) { // positioning on last
getCurrentResultSet ( ) . last ( ) ; } else if ( getCurrentResultSet ( ) . isBeforeFirst ( ) == true ) { // position on first
getCurrentResultSet ( ) . first ( ) ; } else { throw new MjdbcRuntimeException ( "ResultSet need to be repositioned to get row different than zero" ) ; } currentRow = getCurrentResultSet ( ) . getRow ( ) ; } if ( currentRow == row ) { result = convertResultSetCurrentLine ( getCurrentResultSet ( ) ) ; } if ( useRelativePositioning == true ) { getCurrentResultSet ( ) . relative ( row - currentRow ) ; if ( getCurrentResultSet ( ) . getRow ( ) > 0 ) { result = convertResultSetCurrentLine ( getCurrentResultSet ( ) ) ; } } else if ( currentRow < row ) { while ( getCurrentResultSet ( ) . next ( ) == true ) { if ( getCurrentResultSet ( ) . getRow ( ) == row ) { result = convertResultSetCurrentLine ( getCurrentResultSet ( ) ) ; } } } else { while ( getCurrentResultSet ( ) . previous ( ) == true ) { if ( getCurrentResultSet ( ) . getRow ( ) == row ) { result = convertResultSetCurrentLine ( getCurrentResultSet ( ) ) ; } } } return result ; |
public class BlockAndLocation { /** * Implement write of Writable */
public void write ( DataOutput out ) throws IOException { } } | out . writeLong ( blockId ) ; out . writeLong ( blockGenStamp ) ; super . write ( out ) ; |
public class CmsImageFormatHandler { /** * Execute on width change . < p >
* @ param width the new width */
public void onWidthChange ( String width ) { } } | int value = CmsClientStringUtil . parseInt ( width ) ; if ( ( m_croppingParam . getTargetWidth ( ) == value ) || ( value == 0 ) ) { // the value has not changed , ignore ' 0'
return ; } m_croppingParam . setTargetWidth ( value ) ; if ( m_ratioLocked ) { m_croppingParam . setTargetHeight ( ( value * m_originalHeight ) / m_originalWidth ) ; m_formatForm . setHeightInput ( m_croppingParam . getTargetHeight ( ) ) ; } // in case the width and height parameter don ' t match the current format any longer , switch to user defined format
if ( ( ! m_currentFormat . isWidthEditable ( ) || ( m_ratioLocked && ! m_currentFormat . isHeightEditable ( ) ) ) && hasUserFormatRestriction ( ) ) { m_formatForm . setFormatSelectValue ( m_userFormatKey ) ; } else { fireValueChangedEvent ( ) ; } |
public class Levy { /** * Sets location of the Levy distribution .
* @ param location the new location */
public void setLocation ( double location ) { } } | if ( Double . isNaN ( location ) || Double . isInfinite ( location ) ) throw new ArithmeticException ( "location must be a real number" ) ; this . location = location ; |
public class BucketSnippets { /** * [ VARIABLE " my _ blob _ name " ] */
public Blob createBlobFromByteArray ( String blobName ) { } } | // [ START createBlobFromByteArray ]
Blob blob = bucket . create ( blobName , "Hello, World!" . getBytes ( UTF_8 ) ) ; // [ END createBlobFromByteArray ]
return blob ; |
public class Math { /** * Returns the minimum value of an array . */
public static int min ( int [ ] x ) { } } | int m = x [ 0 ] ; for ( int n : x ) { if ( n < m ) { m = n ; } } return m ; |
public class XMLParser { /** * / * ( non - Javadoc )
* @ see com . abubusoft . kripton . xml . XmlPullParser # getAttributeValue ( java . lang . String , java . lang . String ) */
@ Override public String getAttributeValue ( String namespace , String name ) { } } | for ( int i = ( attributeCount * 4 ) - 4 ; i >= 0 ; i -= 4 ) { if ( attributes [ i + 2 ] . equals ( name ) && ( namespace == null || attributes [ i ] . equals ( namespace ) ) ) { return attributes [ i + 3 ] ; } } return null ; |
public class BlobServicesInner { /** * Sets the properties of a storage account ’ s Blob service , including properties for Storage Analytics and CORS ( Cross - Origin Resource Sharing ) rules .
* @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive .
* @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only .
* @ param parameters The properties of a storage account ’ s Blob service , including properties for Storage Analytics and CORS ( Cross - Origin Resource Sharing ) rules .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < BlobServicePropertiesInner > setServicePropertiesAsync ( String resourceGroupName , String accountName , BlobServicePropertiesInner parameters , final ServiceCallback < BlobServicePropertiesInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( setServicePropertiesWithServiceResponseAsync ( resourceGroupName , accountName , parameters ) , serviceCallback ) ; |
public class VecMathDenseDoubleMatrix2D { /** * non - singular matrices only */
public Matrix [ ] lu ( ) { } } | if ( isSquare ( ) ) { GMatrix m = ( GMatrix ) matrix . clone ( ) ; GMatrix lu = ( GMatrix ) matrix . clone ( ) ; GVector piv = new GVector ( matrix . getNumCol ( ) ) ; m . LUD ( lu , piv ) ; Matrix l = new VecMathDenseDoubleMatrix2D ( lu ) . tril ( Ret . NEW , 0 ) ; for ( int i = ( int ) l . getRowCount ( ) - 1 ; i != - 1 ; i -- ) { l . setAsDouble ( 1 , i , i ) ; } Matrix u = new VecMathDenseDoubleMatrix2D ( lu ) . triu ( Ret . NEW , 0 ) ; VecMathDenseDoubleMatrix2D p = new VecMathDenseDoubleMatrix2D ( MathUtil . longToInt ( getRowCount ( ) ) , MathUtil . longToInt ( getColumnCount ( ) ) ) ; for ( int i = piv . getSize ( ) - 1 ; i != - 1 ; i -- ) { p . setDouble ( 1 , i , ( int ) piv . getElement ( i ) ) ; } return new Matrix [ ] { l , u , p } ; } else { throw new RuntimeException ( "only allowed for square matrices" ) ; } |
public class AnimatedImageCompositor { /** * Renders the specified frame . Only should be called on the rendering thread .
* @ param frameNumber the frame to render
* @ param bitmap the bitmap to render into */
public void renderFrame ( int frameNumber , Bitmap bitmap ) { } } | Canvas canvas = new Canvas ( bitmap ) ; canvas . drawColor ( Color . TRANSPARENT , PorterDuff . Mode . SRC ) ; // If blending is required , prepare the canvas with the nearest cached frame .
int nextIndex ; if ( ! isKeyFrame ( frameNumber ) ) { // Blending is required . nextIndex points to the next index to render onto the canvas .
nextIndex = prepareCanvasWithClosestCachedFrame ( frameNumber - 1 , canvas ) ; } else { // Blending isn ' t required . Start at the frame we ' re trying to render .
nextIndex = frameNumber ; } // Iterate from nextIndex to the frame number just preceding the one we ' re trying to render
// and composite them in order according to the Disposal Method .
for ( int index = nextIndex ; index < frameNumber ; index ++ ) { AnimatedDrawableFrameInfo frameInfo = mAnimatedDrawableBackend . getFrameInfo ( index ) ; DisposalMethod disposalMethod = frameInfo . disposalMethod ; if ( disposalMethod == DisposalMethod . DISPOSE_TO_PREVIOUS ) { continue ; } if ( frameInfo . blendOperation == BlendOperation . NO_BLEND ) { disposeToBackground ( canvas , frameInfo ) ; } mAnimatedDrawableBackend . renderFrame ( index , canvas ) ; mCallback . onIntermediateResult ( index , bitmap ) ; if ( disposalMethod == DisposalMethod . DISPOSE_TO_BACKGROUND ) { disposeToBackground ( canvas , frameInfo ) ; } } AnimatedDrawableFrameInfo frameInfo = mAnimatedDrawableBackend . getFrameInfo ( frameNumber ) ; if ( frameInfo . blendOperation == BlendOperation . NO_BLEND ) { disposeToBackground ( canvas , frameInfo ) ; } // Finally , we render the current frame . We don ' t dispose it .
mAnimatedDrawableBackend . renderFrame ( frameNumber , canvas ) ; |
public class ObjectsImplementInterfaces { /** * when completely open */
private void checkObjectImplementsInterface ( GraphQLObjectType objectType , GraphQLInterfaceType interfaceType , SchemaValidationErrorCollector validationErrorCollector ) { } } | List < GraphQLFieldDefinition > fieldDefinitions = interfaceType . getFieldDefinitions ( ) ; for ( GraphQLFieldDefinition interfaceFieldDef : fieldDefinitions ) { GraphQLFieldDefinition objectFieldDef = objectType . getFieldDefinition ( interfaceFieldDef . getName ( ) ) ; if ( objectFieldDef == null ) { validationErrorCollector . addError ( error ( format ( "object type '%s' does not implement interface '%s' because field '%s' is missing" , objectType . getName ( ) , interfaceType . getName ( ) , interfaceFieldDef . getName ( ) ) ) ) ; } else { checkFieldTypeCompatibility ( objectType , interfaceType , validationErrorCollector , interfaceFieldDef , objectFieldDef ) ; } } |
public class LogSemiring { /** * ( non - Javadoc )
* @ see
* edu . cmu . sphinx . fst . weight . Semiring # times ( edu . cmu . sphinx . fst . weight . float ,
* edu . cmu . sphinx . fst . weight . float ) */
@ Override public double times ( double w1 , double w2 ) { } } | if ( ! isMember ( w1 ) || ! isMember ( w2 ) ) { return Double . NEGATIVE_INFINITY ; } return w1 + w2 ; |
public class CmsDriverManager { /** * Collects the groups which constitute a given role . < p >
* @ param dbc the database context
* @ param roleGroupName the group related to the role
* @ param directUsersOnly if true , only the group belonging to the entry itself wil
* @ param accumulator a map for memoizing return values of recursive calls
* @ return the set of groups which constitute the role
* @ throws CmsException if something goes wrong */
public Set < CmsGroup > getRoleGroupsImpl ( CmsDbContext dbc , String roleGroupName , boolean directUsersOnly , Map < String , Set < CmsGroup > > accumulator ) throws CmsException { } } | Set < CmsGroup > result = new HashSet < CmsGroup > ( ) ; if ( accumulator . get ( roleGroupName ) != null ) { return accumulator . get ( roleGroupName ) ; } CmsGroup group = readGroup ( dbc , roleGroupName ) ; // check that the group really exists
if ( ( group == null ) || ( ! group . isRole ( ) ) ) { throw new CmsDbEntryNotFoundException ( Messages . get ( ) . container ( Messages . ERR_UNKNOWN_GROUP_1 , roleGroupName ) ) ; } result . add ( group ) ; if ( ! directUsersOnly ) { CmsRole role = CmsRole . valueOf ( group ) ; if ( role . getParentRole ( ) != null ) { try { String parentGroup = role . getParentRole ( ) . getGroupName ( ) ; // iterate the parent roles
result . addAll ( getRoleGroupsImpl ( dbc , parentGroup , directUsersOnly , accumulator ) ) ; } catch ( CmsDbEntryNotFoundException e ) { // ignore , this may happen while deleting an orgunit
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( e . getLocalizedMessage ( ) , e ) ; } } } String parentOu = CmsOrganizationalUnit . getParentFqn ( group . getOuFqn ( ) ) ; if ( parentOu != null ) { // iterate the parent ou ' s
result . addAll ( getRoleGroupsImpl ( dbc , parentOu + group . getSimpleName ( ) , directUsersOnly , accumulator ) ) ; } } accumulator . put ( roleGroupName , result ) ; return result ; |
public class transformpolicylabel { /** * Use this API to fetch transformpolicylabel resource of given name . */
public static transformpolicylabel get ( nitro_service service , String labelname ) throws Exception { } } | transformpolicylabel obj = new transformpolicylabel ( ) ; obj . set_labelname ( labelname ) ; transformpolicylabel response = ( transformpolicylabel ) obj . get_resource ( service ) ; return response ; |
public class DefaultFastFileStorageClient { /** * 上传文件
* < pre >
* 可通过fastFile对象配置
* 1 . 上传图像分组
* 2 . 上传元数据metaDataSet
* < pre / >
* @ param fastFile
* @ return */
@ Override public StorePath uploadFile ( FastFile fastFile ) { } } | Validate . notNull ( fastFile . getInputStream ( ) , "上传文件流不能为空" ) ; Validate . notBlank ( fastFile . getFileExtName ( ) , "文件扩展名不能为空" ) ; // 获取存储节点
StorageNode client = getStorageNode ( fastFile . getGroupName ( ) ) ; // 上传文件
return uploadFileAndMetaData ( client , fastFile . getInputStream ( ) , fastFile . getFileSize ( ) , fastFile . getFileExtName ( ) , fastFile . getMetaDataSet ( ) ) ; |
public class CPIRGImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case AfplibPackage . CPIRG__GCGID : return getGCGID ( ) ; case AfplibPackage . CPIRG__PRT_FLAGS : return getPrtFlags ( ) ; case AfplibPackage . CPIRG__CODE_POINT : return getCodePoint ( ) ; case AfplibPackage . CPIRG__COUNT : return getCount ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class HashedArray { /** * For testing purposes ( only ) we can remove items from the HashedArray . This is not
* very efficient , but is only used for testcases so probably doesn ' t matter too much .
* This method has been made synchronized for thread - safety , just in case anyone ever uses it */
synchronized public Element remove ( long index ) { } } | Element oldValue = get ( index ) ; if ( oldValue != null ) { int bind = ( ( int ) index & Integer . MAX_VALUE ) % buckets . length ; Element [ ] bucket = buckets [ bind ] ; int count = counts [ bind ] ; int i = 0 ; while ( bucket [ i ] . getIndex ( ) != index ) i ++ ; System . arraycopy ( bucket , i + 1 , bucket , i , count - ( i + 1 ) ) ; counts [ bind ] = count - 1 ; totalSize -= 1 ; } return oldValue ; |
public class RuleElementImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public NotificationChain basicSetGuard ( Expression newGuard , NotificationChain msgs ) { } } | Expression oldGuard = guard ; guard = newGuard ; if ( eNotificationRequired ( ) ) { ENotificationImpl notification = new ENotificationImpl ( this , Notification . SET , SimpleAntlrPackage . RULE_ELEMENT__GUARD , oldGuard , newGuard ) ; if ( msgs == null ) msgs = notification ; else msgs . add ( notification ) ; } return msgs ; |
public class ResponseCardMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ResponseCard responseCard , ProtocolMarshaller protocolMarshaller ) { } } | if ( responseCard == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( responseCard . getVersion ( ) , VERSION_BINDING ) ; protocolMarshaller . marshall ( responseCard . getContentType ( ) , CONTENTTYPE_BINDING ) ; protocolMarshaller . marshall ( responseCard . getGenericAttachments ( ) , GENERICATTACHMENTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class OnDiskMatrix { /** * { @ inheritDoc } */
public void set ( int row , int col , double val ) { } } | int region = getMatrixRegion ( row , col ) ; int regionOffset = getRegionOffset ( row , col ) ; matrixRegions [ region ] . put ( regionOffset , val ) ; |
public class Date { /** * Returns the offset , measured in minutes , for the local time zone
* relative to UTC that is appropriate for the time represented by
* this < code > Date < / code > object .
* For example , in Massachusetts , five time zones west of Greenwich :
* < blockquote > < pre >
* new Date ( 96 , 1 , 14 ) . getTimezoneOffset ( ) returns 300 < / pre > < / blockquote >
* because on February 14 , 1996 , standard time ( Eastern Standard Time )
* is in use , which is offset five hours from UTC ; but :
* < blockquote > < pre >
* new Date ( 96 , 5 , 1 ) . getTimezoneOffset ( ) returns 240 < / pre > < / blockquote >
* because on June 1 , 1996 , daylight saving time ( Eastern Daylight Time )
* is in use , which is offset only four hours from UTC . < p >
* This method produces the same result as if it computed :
* < blockquote > < pre >
* ( this . getTime ( ) - UTC ( this . getYear ( ) ,
* this . getMonth ( ) ,
* this . getDate ( ) ,
* this . getHours ( ) ,
* this . getMinutes ( ) ,
* this . getSeconds ( ) ) ) / ( 60 * 1000)
* < / pre > < / blockquote >
* @ return the time - zone offset , in minutes , for the current time zone .
* @ see java . util . Calendar # ZONE _ OFFSET
* @ see java . util . Calendar # DST _ OFFSET
* @ see java . util . TimeZone # getDefault
* @ deprecated As of JDK version 1.1,
* replaced by < code > - ( Calendar . get ( Calendar . ZONE _ OFFSET ) +
* Calendar . get ( Calendar . DST _ OFFSET ) ) / ( 60 * 1000 ) < / code > . */
@ Deprecated public int getTimezoneOffset ( ) { } } | int zoneOffset ; if ( cdate == null ) { // Android - changed : Android specific time zone logic
GregorianCalendar cal = new GregorianCalendar ( fastTime ) ; zoneOffset = ( cal . get ( Calendar . ZONE_OFFSET ) + cal . get ( Calendar . DST_OFFSET ) ) ; } else { normalize ( ) ; zoneOffset = cdate . getZoneOffset ( ) ; } return - zoneOffset / 60000 ; // convert to minutes |
public class druidGParser { /** * druidG . g : 57:1 : grandDrop returns [ DropProgram program ] : ( s1 = dropStmnt ) ( WS ) ? ( ( OPT _ SEMI _ COLON ) ? | ( OPT _ AMPERSAND ) ? ) ; */
public final DropProgram grandDrop ( ) throws RecognitionException { } } | DropProgram program = null ; DropMeta s1 = null ; program = null ; try { // druidG . g : 59:2 : ( ( s1 = dropStmnt ) ( WS ) ? ( ( OPT _ SEMI _ COLON ) ? | ( OPT _ AMPERSAND ) ? ) )
// druidG . g : 59:4 : ( s1 = dropStmnt ) ( WS ) ? ( ( OPT _ SEMI _ COLON ) ? | ( OPT _ AMPERSAND ) ? )
{ // druidG . g : 59:4 : ( s1 = dropStmnt )
// druidG . g : 59:5 : s1 = dropStmnt
{ pushFollow ( FOLLOW_dropStmnt_in_grandDrop164 ) ; s1 = dropStmnt ( ) ; state . _fsp -- ; } program = new DropProgram ( ) ; program . addStmnt ( s1 ) ; // druidG . g : 60:4 : ( WS ) ?
int alt6 = 2 ; int LA6_0 = input . LA ( 1 ) ; if ( ( LA6_0 == WS ) ) { alt6 = 1 ; } switch ( alt6 ) { case 1 : // druidG . g : 60:4 : WS
{ match ( input , WS , FOLLOW_WS_in_grandDrop173 ) ; } break ; } // druidG . g : 60:8 : ( ( OPT _ SEMI _ COLON ) ? | ( OPT _ AMPERSAND ) ? )
int alt9 = 2 ; switch ( input . LA ( 1 ) ) { case OPT_SEMI_COLON : { alt9 = 1 ; } break ; case EOF : { alt9 = 1 ; } break ; case OPT_AMPERSAND : { alt9 = 2 ; } break ; default : NoViableAltException nvae = new NoViableAltException ( "" , 9 , 0 , input ) ; throw nvae ; } switch ( alt9 ) { case 1 : // druidG . g : 60:9 : ( OPT _ SEMI _ COLON ) ?
{ // druidG . g : 60:9 : ( OPT _ SEMI _ COLON ) ?
int alt7 = 2 ; int LA7_0 = input . LA ( 1 ) ; if ( ( LA7_0 == OPT_SEMI_COLON ) ) { alt7 = 1 ; } switch ( alt7 ) { case 1 : // druidG . g : 60:9 : OPT _ SEMI _ COLON
{ match ( input , OPT_SEMI_COLON , FOLLOW_OPT_SEMI_COLON_in_grandDrop177 ) ; } break ; } } break ; case 2 : // druidG . g : 60:27 : ( OPT _ AMPERSAND ) ?
{ // druidG . g : 60:27 : ( OPT _ AMPERSAND ) ?
int alt8 = 2 ; int LA8_0 = input . LA ( 1 ) ; if ( ( LA8_0 == OPT_AMPERSAND ) ) { alt8 = 1 ; } switch ( alt8 ) { case 1 : // druidG . g : 60:28 : OPT _ AMPERSAND
{ match ( input , OPT_AMPERSAND , FOLLOW_OPT_AMPERSAND_in_grandDrop183 ) ; program . waitForCompletion = false ; } break ; } } break ; } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving
} return program ; |
public class StandardMethodTargetRegistrar { /** * Tries to locate an availability indicator ( a no - arg method that returns
* { @ link Availability } ) for the given command method . The following are tried in order
* for method { @ literal m } :
* < ol >
* < li > If { @ literal m } bears the { @ literal @ } { @ link ShellMethodAvailability } annotation ,
* its value should be the method name to look up < / li >
* < li > a method named { @ literal " < m > Availability " } is looked up . < / li >
* < li > otherwise , if some method { @ literal ai } that returns { @ link Availability } and takes
* no argument exists , that is annotated with { @ literal @ } { @ link ShellMethodAvailability }
* and whose annotation value contains one of the { @ literal commandKeys } , then it is
* selected < / li >
* < / ol > */
private Supplier < Availability > findAvailabilityIndicator ( String [ ] commandKeys , Object bean , Method method ) { } } | ShellMethodAvailability explicit = method . getAnnotation ( ShellMethodAvailability . class ) ; final Method indicator ; if ( explicit != null ) { Assert . isTrue ( explicit . value ( ) . length == 1 , "When set on a @" + ShellMethod . class . getSimpleName ( ) + " method, the value of the @" + ShellMethodAvailability . class . getSimpleName ( ) + " should be a single element, the name of a method that returns " + Availability . class . getSimpleName ( ) + ". Found " + Arrays . asList ( explicit . value ( ) ) + " for " + method ) ; indicator = ReflectionUtils . findMethod ( bean . getClass ( ) , explicit . value ( ) [ 0 ] ) ; } // Try " < method > Availability "
else { Method implicit = ReflectionUtils . findMethod ( bean . getClass ( ) , method . getName ( ) + "Availability" ) ; if ( implicit != null ) { indicator = implicit ; } else { Map < Method , Collection < String > > candidates = new HashMap < > ( ) ; ReflectionUtils . doWithMethods ( bean . getClass ( ) , candidate -> { List < String > matchKeys = new ArrayList < > ( Arrays . asList ( candidate . getAnnotation ( ShellMethodAvailability . class ) . value ( ) ) ) ; if ( matchKeys . contains ( "*" ) ) { Assert . isTrue ( matchKeys . size ( ) == 1 , "When using '*' as a wildcard for " + ShellMethodAvailability . class . getSimpleName ( ) + ", this can be the only value. Found " + matchKeys + " on method " + candidate ) ; candidates . put ( candidate , matchKeys ) ; } else { matchKeys . retainAll ( Arrays . asList ( commandKeys ) ) ; if ( ! matchKeys . isEmpty ( ) ) { candidates . put ( candidate , matchKeys ) ; } } } , m -> m . getAnnotation ( ShellMethodAvailability . class ) != null && m . getAnnotation ( ShellMethod . class ) == null ) ; // Make sure wildcard approach has less precedence than explicit name
Set < Method > notUsingWildcard = candidates . entrySet ( ) . stream ( ) . filter ( e -> ! e . getValue ( ) . contains ( "*" ) ) . map ( Map . Entry :: getKey ) . collect ( Collectors . toSet ( ) ) ; Assert . isTrue ( notUsingWildcard . size ( ) <= 1 , "Found several @" + ShellMethodAvailability . class . getSimpleName ( ) + " annotated methods that could apply for " + method + ". Offending candidates are " + notUsingWildcard ) ; if ( notUsingWildcard . size ( ) == 1 ) { indicator = notUsingWildcard . iterator ( ) . next ( ) ; } // Wildcard was available
else if ( candidates . size ( ) == 1 ) { indicator = candidates . keySet ( ) . iterator ( ) . next ( ) ; } else { indicator = null ; } } } if ( indicator != null ) { Assert . isTrue ( indicator . getReturnType ( ) . equals ( Availability . class ) , "Method " + indicator + " should return " + Availability . class . getSimpleName ( ) ) ; Assert . isTrue ( indicator . getParameterCount ( ) == 0 , "Method " + indicator + " should be a no-arg method" ) ; ReflectionUtils . makeAccessible ( indicator ) ; return ( ) -> ( Availability ) ReflectionUtils . invokeMethod ( indicator , bean ) ; } else { return null ; } |
public class LongestAliphaticChainDescriptor { /** * Depth - First - Search on an acyclic graph . Since we have no cycles we
* don ' t need the visit flags and only need to know which atom we came from .
* @ param adjlist adjacency list representation of grah
* @ param v the current atom index
* @ param prev the previous atom index
* @ return the max length traversed */
private static int getMaxDepth ( int [ ] [ ] adjlist , int v , int prev ) { } } | int longest = 0 ; for ( int w : adjlist [ v ] ) { if ( w == prev ) continue ; // no cycles so don ' t need to check previous
int length = getMaxDepth ( adjlist , w , v ) ; if ( length > longest ) longest = length ; } return 1 + longest ; |
public class RegularExpressionParser { /** * Read a token from the remaining text and return it .
* This is a default implementation that is overridable .
* In the default implementation , the starting and ending
* token characters are not escapable .
* If this implemenation is overridden , A token MUST ALWAYS
* start with ' < ' or ' [ ' and end with ' > ' or ' ] ' .
* @ param remaining
* @ return */
public String readToken ( String remaining ) { } } | int start = 0 ; char c = remaining . charAt ( 0 ) ; int end ; if ( c == '<' ) { end = indexOfClose ( remaining , start , '<' , '>' ) ; } else if ( c == '[' ) { end = indexOfClose ( remaining , start , '[' , ']' ) ; } else { throw new IllegalStateException ( ) ; } // make sure we found the end
if ( end == - 1 ) { throw new TokenizationRegexException ( "bad token. Non-matching brackets (<> or []): " + start + ":\"" + remaining . substring ( start ) + "\"" ) ; } String token = remaining . substring ( start , end + 1 ) ; return token ; |
public class RepositoryApplicationConfiguration { /** * { @ link JpaTargetTagManagement } bean .
* @ return a new { @ link TargetTagManagement } */
@ Bean @ ConditionalOnMissingBean TargetTagManagement targetTagManagement ( final TargetTagRepository targetTagRepository , final TargetRepository targetRepository , final VirtualPropertyReplacer virtualPropertyReplacer , final JpaProperties properties ) { } } | return new JpaTargetTagManagement ( targetTagRepository , targetRepository , virtualPropertyReplacer , properties . getDatabase ( ) ) ; |
public class CommerceAccountUserRelPersistenceImpl { /** * Clears the cache for the commerce account user rel .
* The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */
@ Override public void clearCache ( CommerceAccountUserRel commerceAccountUserRel ) { } } | entityCache . removeResult ( CommerceAccountUserRelModelImpl . ENTITY_CACHE_ENABLED , CommerceAccountUserRelImpl . class , commerceAccountUserRel . getPrimaryKey ( ) ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ; |
public class MariaDbClob { /** * Convert character position into byte position in UTF8 byte array .
* @ param charPosition charPosition
* @ return byte position */
private int utf8Position ( int charPosition ) { } } | int pos = offset ; for ( int i = 0 ; i < charPosition ; i ++ ) { int byteValue = data [ pos ] & 0xff ; if ( byteValue < 0x80 ) { pos += 1 ; } else if ( byteValue < 0xC2 ) { throw new UncheckedIOException ( "invalid UTF8" , new CharacterCodingException ( ) ) ; } else if ( byteValue < 0xE0 ) { pos += 2 ; } else if ( byteValue < 0xF0 ) { pos += 3 ; } else if ( byteValue < 0xF8 ) { pos += 4 ; } else { throw new UncheckedIOException ( "invalid UTF8" , new CharacterCodingException ( ) ) ; } } return pos ; |
public class FuncExtFunction { /** * Call the visitors for the function arguments . */
public void callArgVisitors ( XPathVisitor visitor ) { } } | for ( int i = 0 ; i < m_argVec . size ( ) ; i ++ ) { Expression exp = ( Expression ) m_argVec . elementAt ( i ) ; exp . callVisitors ( new ArgExtOwner ( exp ) , visitor ) ; } |
public class CmsXMLSearchConfigurationParser { /** * Returns the configured request parameter for the current query string , or the default parameter if the core is not specified .
* @ return The configured request parameter for the current query string , or the default parameter if the core is not specified . */
private String getQueryParam ( ) { } } | final String param = parseOptionalStringValue ( XML_ELEMENT_QUERYPARAM ) ; if ( param == null ) { return DEFAULT_QUERY_PARAM ; } else { return param ; } |
public class StringUtils { /** * Creates a random alphanumeric string of given length .
* @ param rnd The random number generator to use .
* @ param length The number of alphanumeric characters to append . */
public static String generateRandomAlphanumericString ( Random rnd , int length ) { } } | checkNotNull ( rnd ) ; checkArgument ( length >= 0 ) ; StringBuilder buffer = new StringBuilder ( length ) ; for ( int i = 0 ; i < length ; i ++ ) { buffer . append ( nextAlphanumericChar ( rnd ) ) ; } return buffer . toString ( ) ; |
public class KeyPadPanel { /** * Initialize layout . */
private void initializeLayout ( ) { } } | final GridBagLayout gbl = new GridBagLayout ( ) ; final GridBagConstraints gbc = new GridBagConstraints ( ) ; this . setLayout ( gbl ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . CENTER , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 0 , 0 , GridBagConstraints . REMAINDER , 1 , 0 , 0 , 1 , 1 , textAreaDisplay , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 0 , 1 , 1 , 1 , 100 , 100 , button1 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 1 , 1 , 1 , 1 , 100 , 100 , button2 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 2 , 1 , 1 , 1 , 100 , 100 , button3 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 3 , 1 , 1 , 1 , 100 , 100 , buttonTable , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 0 , 2 , 1 , 1 , 100 , 100 , button4 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 1 , 2 , 1 , 1 , 100 , 100 , button5 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 2 , 2 , 1 , 1 , 100 , 100 , button6 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 3 , 2 , 1 , 1 , 100 , 100 , buttonCancel , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 0 , 3 , 1 , 1 , 100 , 100 , button7 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 1 , 3 , 1 , 1 , 100 , 100 , button8 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 2 , 3 , 1 , 1 , 100 , 100 , button9 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 3 , 3 , 1 , 1 , 100 , 100 , buttonStorno , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 0 , 4 , 1 , 1 , 100 , 100 , buttonPlus , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 1 , 4 , 1 , 1 , 100 , 100 , button0 , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 2 , 4 , 1 , 1 , 100 , 100 , buttonMinus , this ) ; LayoutExtensions . addComponent ( gbl , gbc , GridBagConstraints . NORTHWEST , GridBagConstraints . BOTH , new Insets ( 2 , 2 , 2 , 2 ) , 3 , 4 , 1 , 1 , 100 , 100 , buttonEnter , this ) ; |
public class AstyanaxBlockedDataReaderDAO { /** * Scans for rows within the specified range , exclusive on start and inclusive on end . */
private Iterator < Row < ByteBuffer , DeltaKey > > rowScan ( final DeltaPlacement placement , final ByteBufferRange rowRange , final ByteBufferRange columnRange , final LimitCounter limit , final ReadConsistency consistency ) { } } | return rowScan ( placement , placement . getBlockedDeltaColumnFamily ( ) , rowRange , columnRange , limit , consistency ) ; |
public class XmlEscape { /** * Perform a ( configurable ) XML 1.0 < strong > escape < / strong > operation on a < tt > String < / tt > input ,
* writing results to a < tt > Writer < / tt > .
* This method will perform an escape operation according to the specified
* { @ link org . unbescape . xml . XmlEscapeType } and { @ link org . unbescape . xml . XmlEscapeLevel }
* argument values .
* All other < tt > String < / tt > / < tt > Writer < / tt > - based < tt > escapeXml10 * ( . . . ) < / tt > methods call this one with preconfigured
* < tt > type < / tt > and < tt > level < / tt > values .
* This method is < strong > thread - safe < / strong > .
* @ param text the < tt > String < / tt > to be escaped .
* @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will
* be written at all to this writer if input is < tt > null < / tt > .
* @ param type the type of escape operation to be performed , see { @ link org . unbescape . xml . XmlEscapeType } .
* @ param level the escape level to be applied , see { @ link org . unbescape . xml . XmlEscapeLevel } .
* @ throws IOException if an input / output exception occurs
* @ since 1.1.2 */
public static void escapeXml10 ( final String text , final Writer writer , final XmlEscapeType type , final XmlEscapeLevel level ) throws IOException { } } | escapeXml ( text , writer , XmlEscapeSymbols . XML10_SYMBOLS , type , level ) ; |
public class BidiWriter { /** * When we have OUTPUT _ REVERSE set on writeReordered ( ) , then we
* semantically write RTL runs in reverse and later reverse them again .
* Instead , we actually write them in forward order to begin with .
* However , if the RTL run was to be mirrored , we need to mirror here now
* since the implicit second reversal must not do it .
* It looks strange to do mirroring in LTR output , but it is only because
* we are writing RTL output in reverse . */
private static String doWriteForward ( String src , int options ) { } } | /* optimize for several combinations of options */
switch ( options & ( Bidi . REMOVE_BIDI_CONTROLS | Bidi . DO_MIRRORING ) ) { case 0 : { /* simply return the LTR run */
return src ; } case Bidi . DO_MIRRORING : { StringBuffer dest = new StringBuffer ( src . length ( ) ) ; /* do mirroring */
int i = 0 ; int c ; do { c = UTF16 . charAt ( src , i ) ; i += UTF16 . getCharCount ( c ) ; UTF16 . append ( dest , UCharacter . getMirror ( c ) ) ; } while ( i < src . length ( ) ) ; return dest . toString ( ) ; } case Bidi . REMOVE_BIDI_CONTROLS : { StringBuilder dest = new StringBuilder ( src . length ( ) ) ; /* copy the LTR run and remove any Bidi control characters */
int i = 0 ; char c ; do { c = src . charAt ( i ++ ) ; if ( ! Bidi . IsBidiControlChar ( c ) ) { dest . append ( c ) ; } } while ( i < src . length ( ) ) ; return dest . toString ( ) ; } default : { StringBuffer dest = new StringBuffer ( src . length ( ) ) ; /* remove Bidi control characters and do mirroring */
int i = 0 ; int c ; do { c = UTF16 . charAt ( src , i ) ; i += UTF16 . getCharCount ( c ) ; if ( ! Bidi . IsBidiControlChar ( c ) ) { UTF16 . append ( dest , UCharacter . getMirror ( c ) ) ; } } while ( i < src . length ( ) ) ; return dest . toString ( ) ; } } /* end of switch */ |
public class PatternFormatter { /** * Format the input parameters .
* @ see com . github . lisicnu . log4android . format . Formatter # format ( String , String , long ,
* com . github . lisicnu . log4android . Level , Object , Throwable ) */
public String format ( String clientID , String name , long time , Level level , Object message , Throwable t ) { } } | if ( ! patternParsed && pattern != null ) { parsePattern ( pattern ) ; } StringBuffer formattedStringBuffer = new StringBuffer ( 64 ) ; if ( commandArray != null ) { int length = commandArray . length ; for ( int index = 0 ; index < length ; index ++ ) { FormatCommandInterface currentConverter = commandArray [ index ] ; if ( currentConverter != null ) { formattedStringBuffer . append ( currentConverter . execute ( clientID , name , time , level , message , t ) ) ; } } } return formattedStringBuffer . toString ( ) ; |
public class SvdImplicitQrDecompose_Ultimate { /** * With the QR algorithm it is possible for the found singular values to be native . This
* makes them all positive by multiplying it by a diagonal matrix that has */
private void makeSingularPositive ( ) { } } | numSingular = qralg . getNumberOfSingularValues ( ) ; singularValues = qralg . getSingularValues ( ) ; for ( int i = 0 ; i < numSingular ; i ++ ) { double val = singularValues [ i ] ; if ( val < 0 ) { singularValues [ i ] = - val ; if ( computeU ) { // compute the results of multiplying it by an element of - 1 at this location in
// a diagonal matrix .
int start = i * Ut . numCols ; int stop = start + Ut . numCols ; for ( int j = start ; j < stop ; j ++ ) { Ut . data [ j ] = - Ut . data [ j ] ; } } } } |
public class StackedEnsembleModel { /** * For StackedEnsemble we call score on all the base _ models and then combine the results
* with the metalearner to create the final predictions frame .
* @ see Model # predictScoreImpl ( Frame , Frame , String , Job , boolean , CFuncRef )
* @ param adaptFrm Already adapted frame
* @ param computeMetrics
* @ return A Frame containing the prediction column , and class distribution */
@ Override protected Frame predictScoreImpl ( Frame fr , Frame adaptFrm , String destination_key , Job j , boolean computeMetrics , CFuncRef customMetricFunc ) { } } | Frame levelOneFrame = new Frame ( Key . < Frame > make ( "preds_levelone_" + this . _key . toString ( ) + fr . _key ) ) ; // TODO : don ' t score models that have 0 coefficients / aren ' t used by the metalearner .
// also we should be able to parallelize scoring of base models
for ( Key < Model > baseKey : this . _parms . _base_models ) { Model base = baseKey . get ( ) ; Frame basePreds = base . score ( fr , "preds_base_" + this . _key . toString ( ) + fr . _key , j , false ) ; StackedEnsemble . addModelPredictionsToLevelOneFrame ( base , basePreds , levelOneFrame ) ; DKV . remove ( basePreds . _key ) ; // Cleanup
Frame . deleteTempFrameAndItsNonSharedVecs ( basePreds , levelOneFrame ) ; } // Add response column to level one frame
levelOneFrame . add ( this . responseColumn , adaptFrm . vec ( this . responseColumn ) ) ; // TODO : what if we ' re running multiple in parallel and have a name collision ?
Log . info ( "Finished creating \"level one\" frame for scoring: " + levelOneFrame . toString ( ) ) ; // Score the dataset , building the class distribution & predictions
Model metalearner = this . _output . _metalearner ; Frame predictFr = metalearner . score ( levelOneFrame , destination_key , j , computeMetrics , CFuncRef . from ( _parms . _custom_metric_func ) ) ; if ( computeMetrics ) { // # score has just stored a ModelMetrics object for the ( metalearner , preds _ levelone ) Model / Frame pair .
// We need to be able to look it up by the ( this , fr ) pair .
// The ModelMetrics object for the metalearner will be removed when the metalearner is removed .
Key < ModelMetrics > [ ] mms = metalearner . _output . getModelMetrics ( ) ; ModelMetrics lastComputedMetric = mms [ mms . length - 1 ] . get ( ) ; ModelMetrics mmStackedEnsemble = lastComputedMetric . deepCloneWithDifferentModelAndFrame ( this , fr ) ; this . addModelMetrics ( mmStackedEnsemble ) ; } Frame . deleteTempFrameAndItsNonSharedVecs ( levelOneFrame , adaptFrm ) ; return predictFr ; |
public class AbstractResource { /** * Validates the owner name and returns the owner object .
* @ param req The HTTP request .
* @ param ownerName Name of the owner . It is optional .
* @ return The owner object
* @ throws WebApplicationException Throws exception if owner name does not exist . */
protected PrincipalUser validateAndGetOwner ( HttpServletRequest req , String ownerName ) { } } | PrincipalUser remoteUser = getRemoteUser ( req ) ; if ( ownerName == null || ownerName . isEmpty ( ) || ownerName . equalsIgnoreCase ( remoteUser . getUserName ( ) ) ) { // If ownerName is not present or if it is present and equal to remote username , then return remoteUser .
return remoteUser ; } else if ( remoteUser . isPrivileged ( ) ) { PrincipalUser owner ; owner = userService . findUserByUsername ( ownerName ) ; if ( owner == null ) { throw new WebApplicationException ( ownerName + ": User does not exist." , Status . NOT_FOUND ) ; } else { return owner ; } } throw new WebApplicationException ( Status . FORBIDDEN . getReasonPhrase ( ) , Status . FORBIDDEN ) ; |
public class DefaultMonetaryCurrenciesSingletonSpi { /** * Get the names of the currently loaded providers .
* @ return the names of the currently loaded providers , never null . */
@ Override public Set < String > getProviderNames ( ) { } } | Set < String > result = new HashSet < > ( ) ; for ( CurrencyProviderSpi spi : Bootstrap . getServices ( CurrencyProviderSpi . class ) ) { try { result . add ( spi . getProviderName ( ) ) ; } catch ( Exception e ) { Logger . getLogger ( DefaultMonetaryCurrenciesSingletonSpi . class . getName ( ) ) . log ( Level . SEVERE , "Error loading currency provider names for " + spi . getClass ( ) . getName ( ) , e ) ; } } return result ; |
public class SingleInputGate { /** * Creates an input gate and all of its input channels . */
public static SingleInputGate create ( String owningTaskName , JobID jobId , InputGateDeploymentDescriptor igdd , NetworkEnvironment networkEnvironment , TaskEventPublisher taskEventPublisher , TaskActions taskActions , InputChannelMetrics metrics , Counter numBytesInCounter ) { } } | final IntermediateDataSetID consumedResultId = checkNotNull ( igdd . getConsumedResultId ( ) ) ; final ResultPartitionType consumedPartitionType = checkNotNull ( igdd . getConsumedPartitionType ( ) ) ; final int consumedSubpartitionIndex = igdd . getConsumedSubpartitionIndex ( ) ; checkArgument ( consumedSubpartitionIndex >= 0 ) ; final InputChannelDeploymentDescriptor [ ] icdd = checkNotNull ( igdd . getInputChannelDeploymentDescriptors ( ) ) ; final NetworkEnvironmentConfiguration networkConfig = networkEnvironment . getConfiguration ( ) ; final SingleInputGate inputGate = new SingleInputGate ( owningTaskName , jobId , consumedResultId , consumedPartitionType , consumedSubpartitionIndex , icdd . length , taskActions , numBytesInCounter , networkConfig . isCreditBased ( ) ) ; // Create the input channels . There is one input channel for each consumed partition .
final InputChannel [ ] inputChannels = new InputChannel [ icdd . length ] ; int numLocalChannels = 0 ; int numRemoteChannels = 0 ; int numUnknownChannels = 0 ; for ( int i = 0 ; i < inputChannels . length ; i ++ ) { final ResultPartitionID partitionId = icdd [ i ] . getConsumedPartitionId ( ) ; final ResultPartitionLocation partitionLocation = icdd [ i ] . getConsumedPartitionLocation ( ) ; if ( partitionLocation . isLocal ( ) ) { inputChannels [ i ] = new LocalInputChannel ( inputGate , i , partitionId , networkEnvironment . getResultPartitionManager ( ) , taskEventPublisher , networkConfig . partitionRequestInitialBackoff ( ) , networkConfig . partitionRequestMaxBackoff ( ) , metrics ) ; numLocalChannels ++ ; } else if ( partitionLocation . isRemote ( ) ) { inputChannels [ i ] = new RemoteInputChannel ( inputGate , i , partitionId , partitionLocation . getConnectionId ( ) , networkEnvironment . getConnectionManager ( ) , networkConfig . partitionRequestInitialBackoff ( ) , networkConfig . partitionRequestMaxBackoff ( ) , metrics ) ; numRemoteChannels ++ ; } else if ( partitionLocation . isUnknown ( ) ) { inputChannels [ i ] = new UnknownInputChannel ( inputGate , i , partitionId , networkEnvironment . getResultPartitionManager ( ) , taskEventPublisher , networkEnvironment . getConnectionManager ( ) , networkConfig . partitionRequestInitialBackoff ( ) , networkConfig . partitionRequestMaxBackoff ( ) , metrics ) ; numUnknownChannels ++ ; } else { throw new IllegalStateException ( "Unexpected partition location." ) ; } inputGate . setInputChannel ( partitionId . getPartitionId ( ) , inputChannels [ i ] ) ; } LOG . debug ( "{}: Created {} input channels (local: {}, remote: {}, unknown: {})." , owningTaskName , inputChannels . length , numLocalChannels , numRemoteChannels , numUnknownChannels ) ; return inputGate ; |
public class RunnableUtils { /** * Safely sleeps for the given amount of milliseconds .
* If the { @ link Thread # currentThread ( ) current Thread } is { @ link Thread # isInterrupted ( ) interrupted } while
* { @ link Thread # sleep ( long , int ) sleeping } , then the { @ link Thread # currentThread ( ) current Thread } will continue
* to { @ link Thread # sleep ( long , int ) sleep } until the given number of milliseconds have expired and the interrupt bit
* will be set on return .
* @ param milliseconds the number of milliseconds that the { @ link Thread # currentThread ( ) current Thread } will sleep .
* @ return { @ literal true } if the { @ link Thread # currentThread ( ) current Thread } was able to
* { @ link Thread # sleep ( long , int ) } for the given number of milliseconds uninterrupted .
* @ see java . lang . Thread # sleep ( long , int ) */
private static boolean safeSleep ( long milliseconds ) { } } | boolean interrupted = false ; long timeout = ( System . currentTimeMillis ( ) + milliseconds ) ; while ( System . currentTimeMillis ( ) < timeout ) { try { Thread . sleep ( milliseconds ) ; } catch ( InterruptedException cause ) { interrupted = true ; } finally { milliseconds = Math . min ( timeout - System . currentTimeMillis ( ) , 0 ) ; } } if ( interrupted ) { Thread . currentThread ( ) . interrupt ( ) ; } return ! Thread . currentThread ( ) . isInterrupted ( ) ; |
public class InitiatorLeaderMonitor { /** * Start monitoring the leaders . This is a blocking operation .
* @ throws InterruptedException
* @ throws ExecutionException */
public void start ( ) throws InterruptedException , ExecutionException { } } | Future < ? > task = es . submit ( handlePartitionChange ) ; task . get ( ) ; |
public class SpotifyApi { /** * Check if a track is saved in the users " Your Music " library .
* @ param ids The tracks IDs to check for in the user ' s Your Music library . Maximum : 50 IDs .
* @ return A builder object that can be used to check if an user has saved a track . */
public CheckUsersSavedTracksRequest . Builder checkUsersSavedTracks ( String ... ids ) { } } | return new CheckUsersSavedTracksRequest . Builder ( accessToken ) . setDefaults ( httpManager , scheme , host , port ) . ids ( concat ( ids , ',' ) ) ; |
public class Isomorphism { /** * { @ inheritDoc } */
@ Override public synchronized Double getEnergyScore ( int key ) { } } | return ( bEnergies != null && ! bEnergies . isEmpty ( ) ) ? bEnergies . get ( key ) : null ; |
public class ChronoDateImpl { /** * Returns a copy of this date with the specified number of days subtracted .
* This subtracts the specified period in days to the date .
* The default implementation uses { @ link # plusDays ( long ) } .
* This instance is immutable and unaffected by this method call .
* @ param daysToSubtract the days to subtract , may be negative
* @ return a date based on this one with the days subtracted , not null
* @ throws DateTimeException if the result exceeds the supported date range */
ChronoDateImpl < D > minusDays ( long daysToSubtract ) { } } | return ( daysToSubtract == Long . MIN_VALUE ? plusDays ( Long . MAX_VALUE ) . plusDays ( 1 ) : plusDays ( - daysToSubtract ) ) ; |
public class SibRaConnection { /** * Creates a unique identifier . Checks that the connection is valid and then
* delegates .
* @ return a unique identifier
* @ throws SIErrorException
* if the delegation fails
* @ throws SIResourceException
* if the delegation fails
* @ throws SIConnectionLostException
* if the delegation fails
* @ throws SIConnectionUnavailableException
* if the connection is not valid
* @ throws SIConnectionDroppedException
* if the delegation fails */
@ Override public byte [ ] createUniqueId ( ) throws SIConnectionDroppedException , SIConnectionUnavailableException , SIConnectionLostException , SIResourceException , SIErrorException { } } | checkValid ( ) ; return _delegateConnection . createUniqueId ( ) ; |
public class MediaIntents { /** * Open the media player to play the given media
* @ param file The file path of the media to play .
* @ param type The mime type
* @ return the intent */
public static Intent newPlayMediaFileIntent ( File file , String type ) { } } | return newPlayMediaIntent ( Uri . fromFile ( file ) , type ) ; |
public class InflowRecoveryImpl { /** * { @ inheritDoc } */
public boolean deactivate ( ) throws Exception { } } | if ( activated ) { recovery . shutdown ( ) ; registry . removeXAResourceRecovery ( recovery ) ; activated = false ; return true ; } return false ; |
public class LoganSquare { /** * Parse a map of objects from an InputStream .
* @ param is The inputStream , most likely from your networking library .
* @ param jsonObjectClass The @ JsonObject class to parse the InputStream into */
public static < E > Map < String , E > parseMap ( InputStream is , Class < E > jsonObjectClass ) throws IOException { } } | return mapperFor ( jsonObjectClass ) . parseMap ( is ) ; |
public class Net { /** * Unblock IPv6 source */
static void unblock6 ( FileDescriptor fd , byte [ ] group , int index , byte [ ] source ) throws IOException { } } | blockOrUnblock6 ( false , fd , group , index , source ) ; |
public class JsonModelDescriptorReader { /** * Finds an element in GSON ' s JSON document representation
* @ param jsonElement A ( potentially complex ) element to search in
* @ param jsonPath Path in the given JSON to the desired table . Levels are dot - separated .
* E . g . ' model . _ output . variable _ importances ' .
* @ return JsonElement , if found . Otherwise { @ link JsonNull } . */
private static JsonElement findInJson ( JsonElement jsonElement , String jsonPath ) { } } | final String [ ] route = JSON_PATH_PATTERN . split ( jsonPath ) ; JsonElement result = jsonElement ; for ( String key : route ) { key = key . trim ( ) ; if ( key . isEmpty ( ) ) continue ; if ( result == null ) { result = JsonNull . INSTANCE ; break ; } if ( result . isJsonObject ( ) ) { result = ( ( JsonObject ) result ) . get ( key ) ; } else if ( result . isJsonArray ( ) ) { int value = Integer . valueOf ( key ) - 1 ; result = ( ( JsonArray ) result ) . get ( value ) ; } else break ; } return result ; |
public class IpcBuffer { /** * absolute get */
public byte getByte ( int index ) { } } | Segment segment = segmentContaining ( index ) ; return segment . buffer . get ( segment . relativize ( index ) ) ; |
public class ConfigEventDispatcher { /** * Dispatch ConfigurationEvent to the ConfigurationListeners .
* @ param pid
* - Service PID
* @ param factoryPid
* - factory PID
* @ param eventType
* - ConfigurationEvent type */
protected Future < ? > dispatch ( final int eventType , final String factoryPid , final String pid ) { } } | final ConfigurationEvent event = createConfigurationEvent ( eventType , factoryPid , pid ) ; if ( event == null ) return null ; final ServiceReference < ConfigurationListener > [ ] refs = st . getServiceReferences ( ) ; if ( refs == null ) return null ; final String qPid = ( factoryPid != null ) ? factoryPid : pid ; return caFactory . updateQueue . add ( qPid , new Runnable ( ) { @ Override @ FFDCIgnore ( Exception . class ) public void run ( ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "dispatch: sending configuration listener event for " + qPid ) ; } for ( ServiceReference < ConfigurationListener > sr : refs ) { if ( sr != null ) { ConfigurationListener cl = st . getService ( sr ) ; if ( cl != null && FrameworkState . isValid ( ) ) { try { cl . configurationEvent ( event ) ; } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "dispatch(): Exception thrown while trying to dispatch ConfigurationEvent." , e ) ; } FFDCFilter . processException ( e , ME , "dispatch(): Exception thrown while trying to dispatch ConfigurationEvent." , new Object [ ] { pid , factoryPid , eventType , cl } ) ; } } } } } } ) ; |
public class DatabaseManager { /** * Registers a server as serving a given database . */
private static void registerServer ( Server server , Database db ) { } } | if ( ! serverMap . containsKey ( server ) ) { serverMap . put ( server , new HashSet ( ) ) ; } HashSet databases = ( HashSet ) serverMap . get ( server ) ; databases . add ( db ) ; |
public class ODMGBaseBeanImpl { /** * Store a collection of objects . */
public Collection storeObjects ( Collection objects ) { } } | try { /* One possibility of storing objects is to use the current transaction
associated with the container */
Transaction tx = odmg . currentTransaction ( ) ; for ( Iterator iterator = objects . iterator ( ) ; iterator . hasNext ( ) ; ) { tx . lock ( iterator . next ( ) , Transaction . WRITE ) ; } } catch ( LockNotGrantedException e ) { log . error ( "Failure while storing objects " + objects , e ) ; throw new EJBException ( "Failure while storing objects" , e ) ; } return objects ; |
public class SarlFieldBuilderImpl { /** * Add a modifier .
* @ param modifier the modifier to add . */
public void addModifier ( String modifier ) { } } | if ( ! Strings . isEmpty ( modifier ) ) { getSarlField ( ) . getModifiers ( ) . add ( modifier ) ; } |
public class Wrap { /** * wrap a single line
* @ param str
* @ param wrapTextLength
* @ return */
private static String wrapLine ( String str , int wrapTextLength ) { } } | int wtl = wrapTextLength ; if ( str . length ( ) <= wtl ) return str ; String sub = str . substring ( 0 , wtl ) ; String rest = str . substring ( wtl ) ; char firstR = rest . charAt ( 0 ) ; String ls = SystemUtil . getOSSpecificLineSeparator ( ) ; if ( firstR == ' ' || firstR == '\t' ) return sub + ls + wrapLine ( rest . length ( ) > 1 ? rest . substring ( 1 ) : "" , wrapTextLength ) ; int indexSpace = sub . lastIndexOf ( ' ' ) ; int indexTab = sub . lastIndexOf ( '\t' ) ; int index = indexSpace <= indexTab ? indexTab : indexSpace ; if ( index == - 1 ) return sub + ls + wrapLine ( rest , wrapTextLength ) ; return sub . substring ( 0 , index ) + ls + wrapLine ( sub . substring ( index + 1 ) + rest , wrapTextLength ) ; |
public class ConceptMention { /** * getter for ref - gets The reference to the Concept , we use here the super type TOP in order to avoid the recursive dependencies between type systems
* @ generated
* @ return value of the feature */
public TOP getRef ( ) { } } | if ( ConceptMention_Type . featOkTst && ( ( ConceptMention_Type ) jcasType ) . casFeat_ref == null ) jcasType . jcas . throwFeatMissing ( "ref" , "de.julielab.jules.types.ConceptMention" ) ; return ( TOP ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( ConceptMention_Type ) jcasType ) . casFeatCode_ref ) ) ) ; |
public class DecodingStateProtocolDecoder { /** * { @ inheritDoc } */
public void decode ( IoSession session , IoBuffer in , ProtocolDecoderOutput out ) throws Exception { } } | if ( this . session == null ) { this . session = session ; } else if ( this . session != session ) { throw new IllegalStateException ( getClass ( ) . getSimpleName ( ) + " is a stateful decoder. " + "You have to create one per session." ) ; } undecodedBuffers . offer ( in ) ; for ( ; ; ) { IoBuffer b = undecodedBuffers . peek ( ) ; if ( b == null ) { break ; } int oldRemaining = b . remaining ( ) ; state . decode ( b , out ) ; int newRemaining = b . remaining ( ) ; if ( newRemaining != 0 ) { if ( oldRemaining == newRemaining ) { throw new IllegalStateException ( DecodingState . class . getSimpleName ( ) + " must " + "consume at least one byte per decode()." ) ; } } else { undecodedBuffers . poll ( ) ; } } |
public class AbstractCollectionValidatorBuilder { /** * Creates a new { @ link ObjectValidatorBuilder builder } for the { @ link AbstractValidator validators } to invoke for
* each { @ link Collection # contains ( Object ) element contained } in the { @ link Collection } . < br / >
* Use { @ link # and ( ) } to return to this builder after the sub - builder is complete . < br / >
* A typical usage looks like this :
* < pre >
* [ . . . ] . with ( ( f , v ) - > f . create ( v ) ) . [ . . . ] . and ( ) . [ . . . ] . build ( )
* < / pre >
* @ param < SUB > the generic type of the returned sub - builder .
* @ param factory lambda function used to create the returned sub - builder by calling the according { @ code create }
* method on the supplied { @ link ObjectValidatorBuilderFactory } with the given dummy element .
* @ return the new sub - builder . */
public < SUB extends ObjectValidatorBuilder < E , ? extends SELF , ? > > SUB with ( BiFunction < ObjectValidatorBuilderFactory < SELF > , E , SUB > factory ) { } } | if ( this . subBuilder != null ) { throw new IllegalStateException ( "subBuilder already exists!" ) ; } SUB sub = factory . apply ( getSubFactory ( ) , null ) ; this . subBuilder = sub ; return sub ; |
public class CommerceDiscountRuleUtil { /** * Returns the last commerce discount rule in the ordered set where commerceDiscountId = & # 63 ; .
* @ param commerceDiscountId the commerce discount ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce discount rule
* @ throws NoSuchDiscountRuleException if a matching commerce discount rule could not be found */
public static CommerceDiscountRule findByCommerceDiscountId_Last ( long commerceDiscountId , OrderByComparator < CommerceDiscountRule > orderByComparator ) throws com . liferay . commerce . discount . exception . NoSuchDiscountRuleException { } } | return getPersistence ( ) . findByCommerceDiscountId_Last ( commerceDiscountId , orderByComparator ) ; |
public class ASTUtils { /** * Determines if the given ASTType inherits or extends from the given inheritable ASTType
* @ param astType target
* @ param inheritable inheritance target
* @ return true if the given astType target inherits from the inheritable type with the given rules . */
public boolean inherits ( ASTType astType , ASTType inheritable ) { } } | if ( astType == null ) { return false ; } if ( inheritable == null || inheritable . equals ( OBJECT_TYPE ) ) { return true ; } if ( astType . equals ( inheritable ) ) { return true ; } for ( ASTType typeInterfaces : astType . getInterfaces ( ) ) { if ( inherits ( typeInterfaces , inheritable ) ) { return true ; } } return inherits ( astType . getSuperClass ( ) , inheritable ) ; |
public class ListFiles { /** * Lists the contents of the given { @ link File directory } displayed from the given { @ link String indent } .
* @ param directory { @ link File } referring to the directory for which the contents will be listed .
* @ param indent { @ link String } containing the characters of the indent in which to begin the view
* of the directory hierarchy / tree .
* @ throws IllegalArgumentException if the given { @ link File } is not a valid directory .
* @ see # validateDirectory ( File )
* @ see java . io . File */
public void listFiles ( File directory , String indent ) { } } | directory = validateDirectory ( directory ) ; indent = Optional . ofNullable ( indent ) . filter ( StringUtils :: hasText ) . orElse ( StringUtils . EMPTY_STRING ) ; printDirectoryName ( indent , directory ) ; String directoryContentIndent = buildDirectoryContentIndent ( indent ) ; stream ( sort ( nullSafeArray ( directory . listFiles ( ) , File . class ) ) ) . forEach ( file -> { if ( FileSystemUtils . isDirectory ( file ) ) { listFiles ( file , directoryContentIndent ) ; } else { printFileName ( directoryContentIndent , file ) ; } } ) ; |
public class NettyOptions { /** * The number of event loop threads . */
public int threads ( ) { } } | int threads = reader . getInteger ( THREADS , DEFAULT_THREADS ) ; if ( threads == - 1 ) { return Runtime . getRuntime ( ) . availableProcessors ( ) ; } return threads ; |
Subsets and Splits