defects4j_project
stringclasses 12
values | defects4j_bug_id
stringlengths 1
3
| file_path
stringlengths 38
95
| bug_start_line
stringlengths 1
4
| bug_end_line
stringlengths 2
4
| issue_title
stringlengths 13
150
| issue_description
stringlengths 4
8.74k
| original_src
stringlengths 44
9k
| original_src_wo_comments
stringlengths 38
5.83k
| fixed_src
stringlengths 40
9.55k
| fixed_src_wo_comments
stringlengths 34
5.76k
|
---|---|---|---|---|---|---|---|---|---|---|
Math | 19 | src/main/java/org/apache/commons/math3/optimization/direct/CMAESOptimizer.java | 504 | 561 | Wide bounds to CMAESOptimizer result in NaN parameters passed to fitness function | If you give large values as lower/upper bounds (for example -Double.MAX_VALUE as a lower bound), the optimizer can call the fitness function with parameters set to NaN. My guess is this is due to FitnessFunction.encode/decode generating NaN when normalizing/denormalizing parameters. For example, if the difference between the lower and upper bound is greater than Double.MAX_VALUE, encode could divide infinity by infinity. | private void checkParameters() {
final double[] init = getStartPoint();
final double[] lB = getLowerBound();
final double[] uB = getUpperBound();
// Checks whether there is at least one finite bound value.
boolean hasFiniteBounds = false;
for (int i = 0; i < lB.length; i++) {
if (!Double.isInfinite(lB[i]) ||
!Double.isInfinite(uB[i])) {
hasFiniteBounds = true;
break;
}
}
// Checks whether there is at least one infinite bound value.
boolean hasInfiniteBounds = false;
if (hasFiniteBounds) {
for (int i = 0; i < lB.length; i++) {
if (Double.isInfinite(lB[i]) ||
Double.isInfinite(uB[i])) {
hasInfiniteBounds = true;
break;
}
}
if (hasInfiniteBounds) {
// If there is at least one finite bound, none can be infinite,
// because mixed cases are not supported by the current code.
throw new MathUnsupportedOperationException();
} else {
// Convert API to internal handling of boundaries.
boundaries = new double[2][];
boundaries[0] = lB;
boundaries[1] = uB;
// Abort early if the normalization will overflow (cf. "encode" method).
}
} else {
// Convert API to internal handling of boundaries.
boundaries = null;
}
if (inputSigma != null) {
if (inputSigma.length != init.length) {
throw new DimensionMismatchException(inputSigma.length, init.length);
}
for (int i = 0; i < init.length; i++) {
if (inputSigma[i] < 0) {
throw new NotPositiveException(inputSigma[i]);
}
if (boundaries != null) {
if (inputSigma[i] > boundaries[1][i] - boundaries[0][i]) {
throw new OutOfRangeException(inputSigma[i], 0, boundaries[1][i] - boundaries[0][i]);
}
}
}
}
} | private void checkParameters ( ) { final double [ ] init = getStartPoint ( ) ; final double [ ] lB = getLowerBound ( ) ; final double [ ] uB = getUpperBound ( ) ; boolean hasFiniteBounds = false ; for ( int i = 0 ; i < lB . length ; i ++ ) { if ( ! Double . isInfinite ( lB [ i ] ) || ! Double . isInfinite ( uB [ i ] ) ) { hasFiniteBounds = true ; break ; } } boolean hasInfiniteBounds = false ; if ( hasFiniteBounds ) { for ( int i = 0 ; i < lB . length ; i ++ ) { if ( Double . isInfinite ( lB [ i ] ) || Double . isInfinite ( uB [ i ] ) ) { hasInfiniteBounds = true ; break ; } } if ( hasInfiniteBounds ) { throw new MathUnsupportedOperationException ( ) ; } else { boundaries = new double [ 2 ] [ ] ; boundaries [ 0 ] = lB ; boundaries [ 1 ] = uB ; } } else { boundaries = null ; } if ( inputSigma != null ) { if ( inputSigma . length != init . length ) { throw new DimensionMismatchException ( inputSigma . length , init . length ) ; } for ( int i = 0 ; i < init . length ; i ++ ) { if ( inputSigma [ i ] < 0 ) { throw new NotPositiveException ( inputSigma [ i ] ) ; } if ( boundaries != null ) { if ( inputSigma [ i ] > boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) { throw new OutOfRangeException ( inputSigma [ i ] , 0 , boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) ; } } } } } | private void checkParameters() {
final double[] init = getStartPoint();
final double[] lB = getLowerBound();
final double[] uB = getUpperBound();
// Checks whether there is at least one finite bound value.
boolean hasFiniteBounds = false;
for (int i = 0; i < lB.length; i++) {
if (!Double.isInfinite(lB[i]) ||
!Double.isInfinite(uB[i])) {
hasFiniteBounds = true;
break;
}
}
// Checks whether there is at least one infinite bound value.
boolean hasInfiniteBounds = false;
if (hasFiniteBounds) {
for (int i = 0; i < lB.length; i++) {
if (Double.isInfinite(lB[i]) ||
Double.isInfinite(uB[i])) {
hasInfiniteBounds = true;
break;
}
}
if (hasInfiniteBounds) {
// If there is at least one finite bound, none can be infinite,
// because mixed cases are not supported by the current code.
throw new MathUnsupportedOperationException();
} else {
// Convert API to internal handling of boundaries.
boundaries = new double[2][];
boundaries[0] = lB;
boundaries[1] = uB;
// Abort early if the normalization will overflow (cf. "encode" method).
for (int i = 0; i < lB.length; i++) {
if (Double.isInfinite(boundaries[1][i] - boundaries[0][i])) {
final double max = Double.MAX_VALUE + boundaries[0][i];
final NumberIsTooLargeException e
= new NumberIsTooLargeException(boundaries[1][i],
max,
true);
e.getContext().addMessage(LocalizedFormats.OVERFLOW);
e.getContext().addMessage(LocalizedFormats.INDEX, i);
throw e;
}
}
}
} else {
// Convert API to internal handling of boundaries.
boundaries = null;
}
if (inputSigma != null) {
if (inputSigma.length != init.length) {
throw new DimensionMismatchException(inputSigma.length, init.length);
}
for (int i = 0; i < init.length; i++) {
if (inputSigma[i] < 0) {
throw new NotPositiveException(inputSigma[i]);
}
if (boundaries != null) {
if (inputSigma[i] > boundaries[1][i] - boundaries[0][i]) {
throw new OutOfRangeException(inputSigma[i], 0, boundaries[1][i] - boundaries[0][i]);
}
}
}
}
} | private void checkParameters ( ) { final double [ ] init = getStartPoint ( ) ; final double [ ] lB = getLowerBound ( ) ; final double [ ] uB = getUpperBound ( ) ; boolean hasFiniteBounds = false ; for ( int i = 0 ; i < lB . length ; i ++ ) { if ( ! Double . isInfinite ( lB [ i ] ) || ! Double . isInfinite ( uB [ i ] ) ) { hasFiniteBounds = true ; break ; } } boolean hasInfiniteBounds = false ; if ( hasFiniteBounds ) { for ( int i = 0 ; i < lB . length ; i ++ ) { if ( Double . isInfinite ( lB [ i ] ) || Double . isInfinite ( uB [ i ] ) ) { hasInfiniteBounds = true ; break ; } } if ( hasInfiniteBounds ) { throw new MathUnsupportedOperationException ( ) ; } else { boundaries = new double [ 2 ] [ ] ; boundaries [ 0 ] = lB ; boundaries [ 1 ] = uB ; for ( int i = 0 ; i < lB . length ; i ++ ) { if ( Double . isInfinite ( boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) ) { final double max = Double . MAX_VALUE + boundaries [ 0 ] [ i ] ; final NumberIsTooLargeException e = new NumberIsTooLargeException ( boundaries [ 1 ] [ i ] , max , true ) ; e . getContext ( ) . addMessage ( LocalizedFormats . OVERFLOW ) ; e . getContext ( ) . addMessage ( LocalizedFormats . INDEX , i ) ; throw e ; } } } } else { boundaries = null ; } if ( inputSigma != null ) { if ( inputSigma . length != init . length ) { throw new DimensionMismatchException ( inputSigma . length , init . length ) ; } for ( int i = 0 ; i < init . length ; i ++ ) { if ( inputSigma [ i ] < 0 ) { throw new NotPositiveException ( inputSigma [ i ] ) ; } if ( boundaries != null ) { if ( inputSigma [ i ] > boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) { throw new OutOfRangeException ( inputSigma [ i ] , 0 , boundaries [ 1 ] [ i ] - boundaries [ 0 ] [ i ] ) ; } } } } } |
Compress | 16 | src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java | 197 | 258 | Too relaxed tar detection in ArchiveStreamFactory | The relaxed tar detection logic added in COMPRESS-117 unfortunately matches also some non-tar files like a [test AIFF file|https://svn.apache.org/repos/asf/tika/trunk/tika-parsers/src/test/resources/test-documents/testAIFF.aif] that Apache Tika uses. It would be good to improve the detection heuristics to still match files like the one in COMPRESS-117 but avoid false positives like the AIFF file in Tika. | public ArchiveInputStream createArchiveInputStream(final InputStream in)
throws ArchiveException {
if (in == null) {
throw new IllegalArgumentException("Stream must not be null.");
}
if (!in.markSupported()) {
throw new IllegalArgumentException("Mark is not supported.");
}
final byte[] signature = new byte[12];
in.mark(signature.length);
try {
int signatureLength = in.read(signature);
in.reset();
if (ZipArchiveInputStream.matches(signature, signatureLength)) {
return new ZipArchiveInputStream(in);
} else if (JarArchiveInputStream.matches(signature, signatureLength)) {
return new JarArchiveInputStream(in);
} else if (ArArchiveInputStream.matches(signature, signatureLength)) {
return new ArArchiveInputStream(in);
} else if (CpioArchiveInputStream.matches(signature, signatureLength)) {
return new CpioArchiveInputStream(in);
}
// Dump needs a bigger buffer to check the signature;
final byte[] dumpsig = new byte[32];
in.mark(dumpsig.length);
signatureLength = in.read(dumpsig);
in.reset();
if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) {
return new DumpArchiveInputStream(in);
}
// Tar needs an even bigger buffer to check the signature; read the first block
final byte[] tarheader = new byte[512];
in.mark(tarheader.length);
signatureLength = in.read(tarheader);
in.reset();
if (TarArchiveInputStream.matches(tarheader, signatureLength)) {
return new TarArchiveInputStream(in);
}
// COMPRESS-117 - improve auto-recognition
if (signatureLength >= 512) {
try {
TarArchiveInputStream tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader));
// COMPRESS-191 - verify the header checksum
tais.getNextEntry();
return new TarArchiveInputStream(in);
} catch (Exception e) { // NOPMD
// can generate IllegalArgumentException as well
// as IOException
// autodetection, simply not a TAR
// ignored
}
}
} catch (IOException e) {
throw new ArchiveException("Could not use reset and mark operations.", e);
}
throw new ArchiveException("No Archiver found for the stream signature");
} | public ArchiveInputStream createArchiveInputStream ( final InputStream in ) throws ArchiveException { if ( in == null ) { throw new IllegalArgumentException ( "Stream must not be null." ) ; } if ( ! in . markSupported ( ) ) { throw new IllegalArgumentException ( "Mark is not supported." ) ; } final byte [ ] signature = new byte [ 12 ] ; in . mark ( signature . length ) ; try { int signatureLength = in . read ( signature ) ; in . reset ( ) ; if ( ZipArchiveInputStream . matches ( signature , signatureLength ) ) { return new ZipArchiveInputStream ( in ) ; } else if ( JarArchiveInputStream . matches ( signature , signatureLength ) ) { return new JarArchiveInputStream ( in ) ; } else if ( ArArchiveInputStream . matches ( signature , signatureLength ) ) { return new ArArchiveInputStream ( in ) ; } else if ( CpioArchiveInputStream . matches ( signature , signatureLength ) ) { return new CpioArchiveInputStream ( in ) ; } final byte [ ] dumpsig = new byte [ 32 ] ; in . mark ( dumpsig . length ) ; signatureLength = in . read ( dumpsig ) ; in . reset ( ) ; if ( DumpArchiveInputStream . matches ( dumpsig , signatureLength ) ) { return new DumpArchiveInputStream ( in ) ; } final byte [ ] tarheader = new byte [ 512 ] ; in . mark ( tarheader . length ) ; signatureLength = in . read ( tarheader ) ; in . reset ( ) ; if ( TarArchiveInputStream . matches ( tarheader , signatureLength ) ) { return new TarArchiveInputStream ( in ) ; } if ( signatureLength >= 512 ) { try { TarArchiveInputStream tais = new TarArchiveInputStream ( new ByteArrayInputStream ( tarheader ) ) ; tais . getNextEntry ( ) ; return new TarArchiveInputStream ( in ) ; } catch ( Exception e ) { } } } catch ( IOException e ) { throw new ArchiveException ( "Could not use reset and mark operations." , e ) ; } throw new ArchiveException ( "No Archiver found for the stream signature" ) ; } | public ArchiveInputStream createArchiveInputStream(final InputStream in)
throws ArchiveException {
if (in == null) {
throw new IllegalArgumentException("Stream must not be null.");
}
if (!in.markSupported()) {
throw new IllegalArgumentException("Mark is not supported.");
}
final byte[] signature = new byte[12];
in.mark(signature.length);
try {
int signatureLength = in.read(signature);
in.reset();
if (ZipArchiveInputStream.matches(signature, signatureLength)) {
return new ZipArchiveInputStream(in);
} else if (JarArchiveInputStream.matches(signature, signatureLength)) {
return new JarArchiveInputStream(in);
} else if (ArArchiveInputStream.matches(signature, signatureLength)) {
return new ArArchiveInputStream(in);
} else if (CpioArchiveInputStream.matches(signature, signatureLength)) {
return new CpioArchiveInputStream(in);
}
// Dump needs a bigger buffer to check the signature;
final byte[] dumpsig = new byte[32];
in.mark(dumpsig.length);
signatureLength = in.read(dumpsig);
in.reset();
if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) {
return new DumpArchiveInputStream(in);
}
// Tar needs an even bigger buffer to check the signature; read the first block
final byte[] tarheader = new byte[512];
in.mark(tarheader.length);
signatureLength = in.read(tarheader);
in.reset();
if (TarArchiveInputStream.matches(tarheader, signatureLength)) {
return new TarArchiveInputStream(in);
}
// COMPRESS-117 - improve auto-recognition
if (signatureLength >= 512) {
try {
TarArchiveInputStream tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader));
// COMPRESS-191 - verify the header checksum
if (tais.getNextTarEntry().isCheckSumOK()) {
return new TarArchiveInputStream(in);
}
} catch (Exception e) { // NOPMD
// can generate IllegalArgumentException as well
// as IOException
// autodetection, simply not a TAR
// ignored
}
}
} catch (IOException e) {
throw new ArchiveException("Could not use reset and mark operations.", e);
}
throw new ArchiveException("No Archiver found for the stream signature");
} | public ArchiveInputStream createArchiveInputStream ( final InputStream in ) throws ArchiveException { if ( in == null ) { throw new IllegalArgumentException ( "Stream must not be null." ) ; } if ( ! in . markSupported ( ) ) { throw new IllegalArgumentException ( "Mark is not supported." ) ; } final byte [ ] signature = new byte [ 12 ] ; in . mark ( signature . length ) ; try { int signatureLength = in . read ( signature ) ; in . reset ( ) ; if ( ZipArchiveInputStream . matches ( signature , signatureLength ) ) { return new ZipArchiveInputStream ( in ) ; } else if ( JarArchiveInputStream . matches ( signature , signatureLength ) ) { return new JarArchiveInputStream ( in ) ; } else if ( ArArchiveInputStream . matches ( signature , signatureLength ) ) { return new ArArchiveInputStream ( in ) ; } else if ( CpioArchiveInputStream . matches ( signature , signatureLength ) ) { return new CpioArchiveInputStream ( in ) ; } final byte [ ] dumpsig = new byte [ 32 ] ; in . mark ( dumpsig . length ) ; signatureLength = in . read ( dumpsig ) ; in . reset ( ) ; if ( DumpArchiveInputStream . matches ( dumpsig , signatureLength ) ) { return new DumpArchiveInputStream ( in ) ; } final byte [ ] tarheader = new byte [ 512 ] ; in . mark ( tarheader . length ) ; signatureLength = in . read ( tarheader ) ; in . reset ( ) ; if ( TarArchiveInputStream . matches ( tarheader , signatureLength ) ) { return new TarArchiveInputStream ( in ) ; } if ( signatureLength >= 512 ) { try { TarArchiveInputStream tais = new TarArchiveInputStream ( new ByteArrayInputStream ( tarheader ) ) ; if ( tais . getNextTarEntry ( ) . isCheckSumOK ( ) ) { return new TarArchiveInputStream ( in ) ; } } catch ( Exception e ) { } } } catch ( IOException e ) { throw new ArchiveException ( "Could not use reset and mark operations." , e ) ; } throw new ArchiveException ( "No Archiver found for the stream signature" ) ; } |
Compress | 41 | src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java | 219 | 324 | ZipArchiveInputStream.getNextZipEntry() should differentiate between "invalid entry encountered" and "no more entries" | ZipArchiveInputStream.getNextZipEntry() currently returns null if an invalid entry is encountered. Thus, it's not possible to differentiate between "no more entries" and "invalid entry encountered" conditions.
Instead, it should throw an exception if an invalid entry is encountered.
I've created a test case and fix. I will submit a pull request shortly. | public ZipArchiveEntry getNextZipEntry() throws IOException {
boolean firstEntry = true;
if (closed || hitCentralDirectory) {
return null;
}
if (current != null) {
closeEntry();
firstEntry = false;
}
try {
if (firstEntry) {
// split archives have a special signature before the
// first local file header - look for it and fail with
// the appropriate error message if this is a split
// archive.
readFirstLocalFileHeader(LFH_BUF);
} else {
readFully(LFH_BUF);
}
} catch (final EOFException e) {
return null;
}
final ZipLong sig = new ZipLong(LFH_BUF);
if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG)) {
hitCentralDirectory = true;
skipRemainderOfArchive();
}
if (!sig.equals(ZipLong.LFH_SIG)) {
return null;
}
int off = WORD;
current = new CurrentEntry();
final int versionMadeBy = ZipShort.getValue(LFH_BUF, off);
off += SHORT;
current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK);
final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(LFH_BUF, off);
final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames();
final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding;
current.hasDataDescriptor = gpFlag.usesDataDescriptor();
current.entry.setGeneralPurposeBit(gpFlag);
off += SHORT;
current.entry.setMethod(ZipShort.getValue(LFH_BUF, off));
off += SHORT;
final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(LFH_BUF, off));
current.entry.setTime(time);
off += WORD;
ZipLong size = null, cSize = null;
if (!current.hasDataDescriptor) {
current.entry.setCrc(ZipLong.getValue(LFH_BUF, off));
off += WORD;
cSize = new ZipLong(LFH_BUF, off);
off += WORD;
size = new ZipLong(LFH_BUF, off);
off += WORD;
} else {
off += 3 * WORD;
}
final int fileNameLen = ZipShort.getValue(LFH_BUF, off);
off += SHORT;
final int extraLen = ZipShort.getValue(LFH_BUF, off);
off += SHORT;
final byte[] fileName = new byte[fileNameLen];
readFully(fileName);
current.entry.setName(entryEncoding.decode(fileName), fileName);
final byte[] extraData = new byte[extraLen];
readFully(extraData);
current.entry.setExtra(extraData);
if (!hasUTF8Flag && useUnicodeExtraFields) {
ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null);
}
processZip64Extra(size, cSize);
if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) {
if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) {
current.in = new UnshrinkingInputStream(new BoundedInputStream(in, current.entry.getCompressedSize()));
} else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) {
current.in = new ExplodingInputStream(
current.entry.getGeneralPurposeBit().getSlidingDictionarySize(),
current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(),
new BoundedInputStream(in, current.entry.getCompressedSize()));
} else if (current.entry.getMethod() == ZipMethod.BZIP2.getCode()) {
current.in = new BZip2CompressorInputStream(new BoundedInputStream(in, current.entry.getCompressedSize()));
}
}
entriesRead++;
return current.entry;
} | public ZipArchiveEntry getNextZipEntry ( ) throws IOException { boolean firstEntry = true ; if ( closed || hitCentralDirectory ) { return null ; } if ( current != null ) { closeEntry ( ) ; firstEntry = false ; } try { if ( firstEntry ) { readFirstLocalFileHeader ( LFH_BUF ) ; } else { readFully ( LFH_BUF ) ; } } catch ( final EOFException e ) { return null ; } final ZipLong sig = new ZipLong ( LFH_BUF ) ; if ( sig . equals ( ZipLong . CFH_SIG ) || sig . equals ( ZipLong . AED_SIG ) ) { hitCentralDirectory = true ; skipRemainderOfArchive ( ) ; } if ( ! sig . equals ( ZipLong . LFH_SIG ) ) { return null ; } int off = WORD ; current = new CurrentEntry ( ) ; final int versionMadeBy = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; current . entry . setPlatform ( ( versionMadeBy >> ZipFile . BYTE_SHIFT ) & ZipFile . NIBLET_MASK ) ; final GeneralPurposeBit gpFlag = GeneralPurposeBit . parse ( LFH_BUF , off ) ; final boolean hasUTF8Flag = gpFlag . usesUTF8ForNames ( ) ; final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper . UTF8_ZIP_ENCODING : zipEncoding ; current . hasDataDescriptor = gpFlag . usesDataDescriptor ( ) ; current . entry . setGeneralPurposeBit ( gpFlag ) ; off += SHORT ; current . entry . setMethod ( ZipShort . getValue ( LFH_BUF , off ) ) ; off += SHORT ; final long time = ZipUtil . dosToJavaTime ( ZipLong . getValue ( LFH_BUF , off ) ) ; current . entry . setTime ( time ) ; off += WORD ; ZipLong size = null , cSize = null ; if ( ! current . hasDataDescriptor ) { current . entry . setCrc ( ZipLong . getValue ( LFH_BUF , off ) ) ; off += WORD ; cSize = new ZipLong ( LFH_BUF , off ) ; off += WORD ; size = new ZipLong ( LFH_BUF , off ) ; off += WORD ; } else { off += 3 * WORD ; } final int fileNameLen = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; final int extraLen = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; final byte [ ] fileName = new byte [ fileNameLen ] ; readFully ( fileName ) ; current . entry . setName ( entryEncoding . decode ( fileName ) , fileName ) ; final byte [ ] extraData = new byte [ extraLen ] ; readFully ( extraData ) ; current . entry . setExtra ( extraData ) ; if ( ! hasUTF8Flag && useUnicodeExtraFields ) { ZipUtil . setNameAndCommentFromExtraFields ( current . entry , fileName , null ) ; } processZip64Extra ( size , cSize ) ; if ( current . entry . getCompressedSize ( ) != ArchiveEntry . SIZE_UNKNOWN ) { if ( current . entry . getMethod ( ) == ZipMethod . UNSHRINKING . getCode ( ) ) { current . in = new UnshrinkingInputStream ( new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } else if ( current . entry . getMethod ( ) == ZipMethod . IMPLODING . getCode ( ) ) { current . in = new ExplodingInputStream ( current . entry . getGeneralPurposeBit ( ) . getSlidingDictionarySize ( ) , current . entry . getGeneralPurposeBit ( ) . getNumberOfShannonFanoTrees ( ) , new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } else if ( current . entry . getMethod ( ) == ZipMethod . BZIP2 . getCode ( ) ) { current . in = new BZip2CompressorInputStream ( new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } } entriesRead ++ ; return current . entry ; } | public ZipArchiveEntry getNextZipEntry() throws IOException {
boolean firstEntry = true;
if (closed || hitCentralDirectory) {
return null;
}
if (current != null) {
closeEntry();
firstEntry = false;
}
try {
if (firstEntry) {
// split archives have a special signature before the
// first local file header - look for it and fail with
// the appropriate error message if this is a split
// archive.
readFirstLocalFileHeader(LFH_BUF);
} else {
readFully(LFH_BUF);
}
} catch (final EOFException e) {
return null;
}
final ZipLong sig = new ZipLong(LFH_BUF);
if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG)) {
hitCentralDirectory = true;
skipRemainderOfArchive();
return null;
}
if (!sig.equals(ZipLong.LFH_SIG)) {
throw new ZipException(String.format("Unexpected record signature: 0X%X", sig.getValue()));
}
int off = WORD;
current = new CurrentEntry();
final int versionMadeBy = ZipShort.getValue(LFH_BUF, off);
off += SHORT;
current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK);
final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(LFH_BUF, off);
final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames();
final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding;
current.hasDataDescriptor = gpFlag.usesDataDescriptor();
current.entry.setGeneralPurposeBit(gpFlag);
off += SHORT;
current.entry.setMethod(ZipShort.getValue(LFH_BUF, off));
off += SHORT;
final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(LFH_BUF, off));
current.entry.setTime(time);
off += WORD;
ZipLong size = null, cSize = null;
if (!current.hasDataDescriptor) {
current.entry.setCrc(ZipLong.getValue(LFH_BUF, off));
off += WORD;
cSize = new ZipLong(LFH_BUF, off);
off += WORD;
size = new ZipLong(LFH_BUF, off);
off += WORD;
} else {
off += 3 * WORD;
}
final int fileNameLen = ZipShort.getValue(LFH_BUF, off);
off += SHORT;
final int extraLen = ZipShort.getValue(LFH_BUF, off);
off += SHORT;
final byte[] fileName = new byte[fileNameLen];
readFully(fileName);
current.entry.setName(entryEncoding.decode(fileName), fileName);
final byte[] extraData = new byte[extraLen];
readFully(extraData);
current.entry.setExtra(extraData);
if (!hasUTF8Flag && useUnicodeExtraFields) {
ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null);
}
processZip64Extra(size, cSize);
if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) {
if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) {
current.in = new UnshrinkingInputStream(new BoundedInputStream(in, current.entry.getCompressedSize()));
} else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) {
current.in = new ExplodingInputStream(
current.entry.getGeneralPurposeBit().getSlidingDictionarySize(),
current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(),
new BoundedInputStream(in, current.entry.getCompressedSize()));
} else if (current.entry.getMethod() == ZipMethod.BZIP2.getCode()) {
current.in = new BZip2CompressorInputStream(new BoundedInputStream(in, current.entry.getCompressedSize()));
}
}
entriesRead++;
return current.entry;
} | public ZipArchiveEntry getNextZipEntry ( ) throws IOException { boolean firstEntry = true ; if ( closed || hitCentralDirectory ) { return null ; } if ( current != null ) { closeEntry ( ) ; firstEntry = false ; } try { if ( firstEntry ) { readFirstLocalFileHeader ( LFH_BUF ) ; } else { readFully ( LFH_BUF ) ; } } catch ( final EOFException e ) { return null ; } final ZipLong sig = new ZipLong ( LFH_BUF ) ; if ( sig . equals ( ZipLong . CFH_SIG ) || sig . equals ( ZipLong . AED_SIG ) ) { hitCentralDirectory = true ; skipRemainderOfArchive ( ) ; return null ; } if ( ! sig . equals ( ZipLong . LFH_SIG ) ) { throw new ZipException ( String . format ( "Unexpected record signature: 0X%X" , sig . getValue ( ) ) ) ; } int off = WORD ; current = new CurrentEntry ( ) ; final int versionMadeBy = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; current . entry . setPlatform ( ( versionMadeBy >> ZipFile . BYTE_SHIFT ) & ZipFile . NIBLET_MASK ) ; final GeneralPurposeBit gpFlag = GeneralPurposeBit . parse ( LFH_BUF , off ) ; final boolean hasUTF8Flag = gpFlag . usesUTF8ForNames ( ) ; final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper . UTF8_ZIP_ENCODING : zipEncoding ; current . hasDataDescriptor = gpFlag . usesDataDescriptor ( ) ; current . entry . setGeneralPurposeBit ( gpFlag ) ; off += SHORT ; current . entry . setMethod ( ZipShort . getValue ( LFH_BUF , off ) ) ; off += SHORT ; final long time = ZipUtil . dosToJavaTime ( ZipLong . getValue ( LFH_BUF , off ) ) ; current . entry . setTime ( time ) ; off += WORD ; ZipLong size = null , cSize = null ; if ( ! current . hasDataDescriptor ) { current . entry . setCrc ( ZipLong . getValue ( LFH_BUF , off ) ) ; off += WORD ; cSize = new ZipLong ( LFH_BUF , off ) ; off += WORD ; size = new ZipLong ( LFH_BUF , off ) ; off += WORD ; } else { off += 3 * WORD ; } final int fileNameLen = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; final int extraLen = ZipShort . getValue ( LFH_BUF , off ) ; off += SHORT ; final byte [ ] fileName = new byte [ fileNameLen ] ; readFully ( fileName ) ; current . entry . setName ( entryEncoding . decode ( fileName ) , fileName ) ; final byte [ ] extraData = new byte [ extraLen ] ; readFully ( extraData ) ; current . entry . setExtra ( extraData ) ; if ( ! hasUTF8Flag && useUnicodeExtraFields ) { ZipUtil . setNameAndCommentFromExtraFields ( current . entry , fileName , null ) ; } processZip64Extra ( size , cSize ) ; if ( current . entry . getCompressedSize ( ) != ArchiveEntry . SIZE_UNKNOWN ) { if ( current . entry . getMethod ( ) == ZipMethod . UNSHRINKING . getCode ( ) ) { current . in = new UnshrinkingInputStream ( new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } else if ( current . entry . getMethod ( ) == ZipMethod . IMPLODING . getCode ( ) ) { current . in = new ExplodingInputStream ( current . entry . getGeneralPurposeBit ( ) . getSlidingDictionarySize ( ) , current . entry . getGeneralPurposeBit ( ) . getNumberOfShannonFanoTrees ( ) , new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } else if ( current . entry . getMethod ( ) == ZipMethod . BZIP2 . getCode ( ) ) { current . in = new BZip2CompressorInputStream ( new BoundedInputStream ( in , current . entry . getCompressedSize ( ) ) ) ; } } entriesRead ++ ; return current . entry ; } |
JacksonDatabind | 93 | src/main/java/com/fasterxml/jackson/databind/jsontype/impl/SubTypeValidator.java | 67 | 99 | `NullPointerException` in `SubTypeValidator.validateSubType` when validating Spring interface | In jackson-databind-2.8.11 jackson-databind-2.9.3 and jackson-databind-2.9.4-SNAPSHOT `SubTypeValidator.validateSubType` fails with a `NullPointerException` if the `JavaType.getRawClass()` is an interface that starts with `org.springframework.` For example, the following will fail:
```java
package org.springframework.security.core;
import java.util.*;
public class Authentication {
private List<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>();
public List<GrantedAuthority> getAuthorities() {
return this.authorities;
}
public void setAuthorities(List<GrantedAuthority> authorities) {
this.authorities = authorities;
}
}
```
```java
package org.springframework.security.core;
public interface GrantedAuthority {
String getAuthority();
}
```
```java
@Test
public void validateSubTypeFailsWithNPE() throws Exception {
ObjectMapper mapper = new ObjectMapper();
mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL, JsonTypeInfo.As.PROPERTY);
String json = "{\"@class\":\"org.springframework.security.core.Authentication\",\"authorities\":[\"java.util.ArrayList\",[]]}";
Authentication authentication = mapper.readValue(json, Authentication.class);
}
```
with the following stacktrace:
```
java.lang.NullPointerException
at com.fasterxml.jackson.databind.jsontype.impl.SubTypeValidator.validateSubType(SubTypeValidator.java:86)
at com.fasterxml.jackson.databind.deser.BeanDeserializerFactory._validateSubType(BeanDeserializerFactory.java:916)
at com.fasterxml.jackson.databind.deser.BeanDeserializerFactory.createBeanDeserializer(BeanDeserializerFactory.java:135)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createDeserializer2(DeserializerCache.java:411)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createDeserializer(DeserializerCache.java:349)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:264)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findContextualValueDeserializer(DeserializationContext.java:444)
at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.createContextual(CollectionDeserializer.java:183)
at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.createContextual(CollectionDeserializer.java:27)
at com.fasterxml.jackson.databind.DeserializationContext.handlePrimaryContextualization(DeserializationContext.java:651)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:471)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findRootValueDeserializer(DeserializationContext.java:477)
at com.fasterxml.jackson.databind.ObjectMapper._findRootDeserializer(ObjectMapper.java:4178)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3997)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2992)
```
In prior versions, the test works. | public void validateSubType(DeserializationContext ctxt, JavaType type) throws JsonMappingException
{
// There are certain nasty classes that could cause problems, mostly
// via default typing -- catch them here.
final Class<?> raw = type.getRawClass();
String full = raw.getName();
main_check:
do {
if (_cfgIllegalClassNames.contains(full)) {
break;
}
// 18-Dec-2017, tatu: As per [databind#1855], need bit more sophisticated handling
// for some Spring framework types
// 05-Jan-2017, tatu: ... also, only applies to classes, not interfaces
if (full.startsWith(PREFIX_STRING)) {
for (Class<?> cls = raw; cls != Object.class; cls = cls.getSuperclass()) {
String name = cls.getSimpleName();
// looking for "AbstractBeanFactoryPointcutAdvisor" but no point to allow any is there?
if ("AbstractPointcutAdvisor".equals(name)
// ditto for "FileSystemXmlApplicationContext": block all ApplicationContexts
|| "AbstractApplicationContext".equals(name)) {
break main_check;
}
}
}
return;
} while (false);
throw JsonMappingException.from(ctxt,
String.format("Illegal type (%s) to deserialize: prevented for security reasons", full));
} | public void validateSubType ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { final Class < ? > raw = type . getRawClass ( ) ; String full = raw . getName ( ) ; main_check : do { if ( _cfgIllegalClassNames . contains ( full ) ) { break ; } if ( full . startsWith ( PREFIX_STRING ) ) { for ( Class < ? > cls = raw ; cls != Object . class ; cls = cls . getSuperclass ( ) ) { String name = cls . getSimpleName ( ) ; if ( "AbstractPointcutAdvisor" . equals ( name ) || "AbstractApplicationContext" . equals ( name ) ) { break main_check ; } } } return ; } while ( false ) ; throw JsonMappingException . from ( ctxt , String . format ( "Illegal type (%s) to deserialize: prevented for security reasons" , full ) ) ; } | public void validateSubType(DeserializationContext ctxt, JavaType type) throws JsonMappingException
{
// There are certain nasty classes that could cause problems, mostly
// via default typing -- catch them here.
final Class<?> raw = type.getRawClass();
String full = raw.getName();
main_check:
do {
if (_cfgIllegalClassNames.contains(full)) {
break;
}
// 18-Dec-2017, tatu: As per [databind#1855], need bit more sophisticated handling
// for some Spring framework types
// 05-Jan-2017, tatu: ... also, only applies to classes, not interfaces
if (!raw.isInterface() && full.startsWith(PREFIX_STRING)) {
for (Class<?> cls = raw; (cls != null) && (cls != Object.class); cls = cls.getSuperclass()) {
String name = cls.getSimpleName();
// looking for "AbstractBeanFactoryPointcutAdvisor" but no point to allow any is there?
if ("AbstractPointcutAdvisor".equals(name)
// ditto for "FileSystemXmlApplicationContext": block all ApplicationContexts
|| "AbstractApplicationContext".equals(name)) {
break main_check;
}
}
}
return;
} while (false);
throw JsonMappingException.from(ctxt,
String.format("Illegal type (%s) to deserialize: prevented for security reasons", full));
} | public void validateSubType ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { final Class < ? > raw = type . getRawClass ( ) ; String full = raw . getName ( ) ; main_check : do { if ( _cfgIllegalClassNames . contains ( full ) ) { break ; } if ( ! raw . isInterface ( ) && full . startsWith ( PREFIX_STRING ) ) { for ( Class < ? > cls = raw ; ( cls != null ) && ( cls != Object . class ) ; cls = cls . getSuperclass ( ) ) { String name = cls . getSimpleName ( ) ; if ( "AbstractPointcutAdvisor" . equals ( name ) || "AbstractApplicationContext" . equals ( name ) ) { break main_check ; } } } return ; } while ( false ) ; throw JsonMappingException . from ( ctxt , String . format ( "Illegal type (%s) to deserialize: prevented for security reasons" , full ) ) ; } |
Math | 2 | src/main/java/org/apache/commons/math3/distribution/HypergeometricDistribution.java | 267 | 269 | HypergeometricDistribution.sample suffers from integer overflow | Hi, I have an application which broke when ported from commons math 2.2 to 3.2. It looks like the HypergeometricDistribution.sample() method doesn't work as well as it used to with large integer values -- the example code below should return a sample between 0 and 50, but usually returns -50.
{code}
import org.apache.commons.math3.distribution.HypergeometricDistribution;
public class Foo {
public static void main(String[] args) {
HypergeometricDistribution a = new HypergeometricDistribution(
43130568, 42976365, 50);
System.out.printf("%d %d%n", a.getSupportLowerBound(), a.getSupportUpperBound()); // Prints "0 50"
System.out.printf("%d%n",a.sample()); // Prints "-50"
}
}
{code}
In the debugger, I traced it as far as an integer overflow in HypergeometricDistribution.getNumericalMean() -- instead of doing
{code}
return (double) (getSampleSize() * getNumberOfSuccesses()) / (double) getPopulationSize();
{code}
it could do:
{code}
return getSampleSize() * ((double) getNumberOfSuccesses() / (double) getPopulationSize());
{code}
This seemed to fix it, based on a quick test. | public double getNumericalMean() {
return (double) (getSampleSize() * getNumberOfSuccesses()) / (double) getPopulationSize();
} | public double getNumericalMean ( ) { return ( double ) ( getSampleSize ( ) * getNumberOfSuccesses ( ) ) / ( double ) getPopulationSize ( ) ; } | public double getNumericalMean() {
return getSampleSize() * (getNumberOfSuccesses() / (double) getPopulationSize());
} | public double getNumericalMean ( ) { return getSampleSize ( ) * ( getNumberOfSuccesses ( ) / ( double ) getPopulationSize ( ) ) ; } |
Math | 58 | src/main/java/org/apache/commons/math/optimization/fitting/GaussianFitter.java | 119 | 122 | GaussianFitter Unexpectedly Throws NotStrictlyPositiveException | Running the following:
double[] observations =
{
1.1143831578403364E-29,
4.95281403484594E-28,
1.1171347211930288E-26,
1.7044813962636277E-25,
1.9784716574832164E-24,
1.8630236407866774E-23,
1.4820532905097742E-22,
1.0241963854632831E-21,
6.275077366673128E-21,
3.461808994532493E-20,
1.7407124684715706E-19,
8.056687953553974E-19,
3.460193945992071E-18,
1.3883326374011525E-17,
5.233894983671116E-17,
1.8630791465263745E-16,
6.288759227922111E-16,
2.0204433920597856E-15,
6.198768938576155E-15,
1.821419346860626E-14,
5.139176445538471E-14,
1.3956427429045787E-13,
3.655705706448139E-13,
9.253753324779779E-13,
2.267636001476696E-12,
5.3880460095836855E-12,
1.2431632654852931E-11
};
GaussianFitter g =
new GaussianFitter(new LevenbergMarquardtOptimizer());
for (int index = 0; index < 27; index++)
{
g.addObservedPoint(index, observations[index]);
}
g.fit();
Results in:
org.apache.commons.math.exception.NotStrictlyPositiveException: -1.277 is smaller than, or equal to, the minimum (0)
at org.apache.commons.math.analysis.function.Gaussian$Parametric.validateParameters(Gaussian.java:184)
at org.apache.commons.math.analysis.function.Gaussian$Parametric.value(Gaussian.java:129)
I'm guessing the initial guess for sigma is off. | public double[] fit() {
final double[] guess = (new ParameterGuesser(getObservations())).guess();
return fit(new Gaussian.Parametric(), guess);
} | public double [ ] fit ( ) { final double [ ] guess = ( new ParameterGuesser ( getObservations ( ) ) ) . guess ( ) ; return fit ( new Gaussian . Parametric ( ) , guess ) ; } | public double[] fit() {
final double[] guess = (new ParameterGuesser(getObservations())).guess();
return fit(guess);
} | public double [ ] fit ( ) { final double [ ] guess = ( new ParameterGuesser ( getObservations ( ) ) ) . guess ( ) ; return fit ( guess ) ; } |
JacksonDatabind | 85 | src/main/java/com/fasterxml/jackson/databind/ser/std/DateTimeSerializerBase.java | 48 | 95 | `DateTimeSerializerBase` ignores configured date format when creating contextual | `DateTimeSerializerBase#createContextual` creates a new serializer with `StdDateFormat.DATE_FORMAT_STR_ISO8601` format instead of re-using the actual format that may have been specified on the configuration. See the following code:
```
final String pattern = format.hasPattern()
? format.getPattern()
: StdDateFormat.DATE_FORMAT_STR_ISO8601;
```
Using the `@JsonFormat` annotation on a field will therefore reset the format to Jackson's default even if the annotation doesn't specify any custom format.
`DateBasedDeserializer#createContextual` behaves differently and tries to re-use the configured format:
```
DateFormat df = ctxt.getConfig().getDateFormat();
// one shortcut: with our custom format, can simplify handling a bit
if (df.getClass() == StdDateFormat.class) {
...
StdDateFormat std = (StdDateFormat) df;
std = std.withTimeZone(tz);
...
} else {
// otherwise need to clone, re-set timezone:
df = (DateFormat) df.clone();
df.setTimeZone(tz);
}
```
Shouldn't the serializer follow the same approach ?
| @Override
public JsonSerializer<?> createContextual(SerializerProvider serializers,
BeanProperty property) throws JsonMappingException
{
if (property == null) {
return this;
}
JsonFormat.Value format = findFormatOverrides(serializers, property, handledType());
if (format == null) {
return this;
}
// Simple case first: serialize as numeric timestamp?
JsonFormat.Shape shape = format.getShape();
if (shape.isNumeric()) {
return withFormat(Boolean.TRUE, null);
}
// 08-Jun-2017, tatu: With [databind#1648], this gets bit tricky..
// First: custom pattern will override things
if ((shape == JsonFormat.Shape.STRING) || format.hasPattern()
|| format.hasLocale() || format.hasTimeZone()) {
TimeZone tz = format.getTimeZone();
final String pattern = format.hasPattern()
? format.getPattern()
: StdDateFormat.DATE_FORMAT_STR_ISO8601;
final Locale loc = format.hasLocale()
? format.getLocale()
: serializers.getLocale();
SimpleDateFormat df = new SimpleDateFormat(pattern, loc);
if (tz == null) {
tz = serializers.getTimeZone();
}
df.setTimeZone(tz);
return withFormat(Boolean.FALSE, df);
}
// Otherwise, need one of these changes:
// Jackson's own `StdDateFormat` is quite easy to deal with...
// 08-Jun-2017, tatu: Unfortunately there's no generally usable
// mechanism for changing `DateFormat` instances (or even clone()ing)
// So: require it be `SimpleDateFormat`; can't config other types
// serializers.reportBadDefinition(handledType(), String.format(
// Ugh. No way to change `Locale`, create copy; must re-crete completely:
return this;
} | @ Override public JsonSerializer < ? > createContextual ( SerializerProvider serializers , BeanProperty property ) throws JsonMappingException { if ( property == null ) { return this ; } JsonFormat . Value format = findFormatOverrides ( serializers , property , handledType ( ) ) ; if ( format == null ) { return this ; } JsonFormat . Shape shape = format . getShape ( ) ; if ( shape . isNumeric ( ) ) { return withFormat ( Boolean . TRUE , null ) ; } if ( ( shape == JsonFormat . Shape . STRING ) || format . hasPattern ( ) || format . hasLocale ( ) || format . hasTimeZone ( ) ) { TimeZone tz = format . getTimeZone ( ) ; final String pattern = format . hasPattern ( ) ? format . getPattern ( ) : StdDateFormat . DATE_FORMAT_STR_ISO8601 ; final Locale loc = format . hasLocale ( ) ? format . getLocale ( ) : serializers . getLocale ( ) ; SimpleDateFormat df = new SimpleDateFormat ( pattern , loc ) ; if ( tz == null ) { tz = serializers . getTimeZone ( ) ; } df . setTimeZone ( tz ) ; return withFormat ( Boolean . FALSE , df ) ; } return this ; } | @Override
public JsonSerializer<?> createContextual(SerializerProvider serializers,
BeanProperty property) throws JsonMappingException
{
if (property == null) {
return this;
}
JsonFormat.Value format = findFormatOverrides(serializers, property, handledType());
if (format == null) {
return this;
}
// Simple case first: serialize as numeric timestamp?
JsonFormat.Shape shape = format.getShape();
if (shape.isNumeric()) {
return withFormat(Boolean.TRUE, null);
}
// 08-Jun-2017, tatu: With [databind#1648], this gets bit tricky..
// First: custom pattern will override things
if (format.hasPattern()) {
final Locale loc = format.hasLocale()
? format.getLocale()
: serializers.getLocale();
SimpleDateFormat df = new SimpleDateFormat(format.getPattern(), loc);
TimeZone tz = format.hasTimeZone() ? format.getTimeZone()
: serializers.getTimeZone();
df.setTimeZone(tz);
return withFormat(Boolean.FALSE, df);
}
// Otherwise, need one of these changes:
final boolean hasLocale = format.hasLocale();
final boolean hasTZ = format.hasTimeZone();
final boolean asString = (shape == JsonFormat.Shape.STRING);
if (!hasLocale && !hasTZ && !asString) {
return this;
}
DateFormat df0 = serializers.getConfig().getDateFormat();
// Jackson's own `StdDateFormat` is quite easy to deal with...
if (df0 instanceof StdDateFormat) {
StdDateFormat std = (StdDateFormat) df0;
if (format.hasLocale()) {
std = std.withLocale(format.getLocale());
}
if (format.hasTimeZone()) {
std = std.withTimeZone(format.getTimeZone());
}
return withFormat(Boolean.FALSE, std);
}
// 08-Jun-2017, tatu: Unfortunately there's no generally usable
// mechanism for changing `DateFormat` instances (or even clone()ing)
// So: require it be `SimpleDateFormat`; can't config other types
if (!(df0 instanceof SimpleDateFormat)) {
// serializers.reportBadDefinition(handledType(), String.format(
serializers.reportMappingProblem(
"Configured `DateFormat` (%s) not a `SimpleDateFormat`; can not configure `Locale` or `TimeZone`",
df0.getClass().getName());
}
SimpleDateFormat df = (SimpleDateFormat) df0;
if (hasLocale) {
// Ugh. No way to change `Locale`, create copy; must re-crete completely:
df = new SimpleDateFormat(df.toPattern(), format.getLocale());
} else {
df = (SimpleDateFormat) df.clone();
}
TimeZone newTz = format.getTimeZone();
boolean changeTZ = (newTz != null) && !newTz.equals(df.getTimeZone());
if (changeTZ) {
df.setTimeZone(newTz);
}
return withFormat(Boolean.FALSE, df);
} | @ Override public JsonSerializer < ? > createContextual ( SerializerProvider serializers , BeanProperty property ) throws JsonMappingException { if ( property == null ) { return this ; } JsonFormat . Value format = findFormatOverrides ( serializers , property , handledType ( ) ) ; if ( format == null ) { return this ; } JsonFormat . Shape shape = format . getShape ( ) ; if ( shape . isNumeric ( ) ) { return withFormat ( Boolean . TRUE , null ) ; } if ( format . hasPattern ( ) ) { final Locale loc = format . hasLocale ( ) ? format . getLocale ( ) : serializers . getLocale ( ) ; SimpleDateFormat df = new SimpleDateFormat ( format . getPattern ( ) , loc ) ; TimeZone tz = format . hasTimeZone ( ) ? format . getTimeZone ( ) : serializers . getTimeZone ( ) ; df . setTimeZone ( tz ) ; return withFormat ( Boolean . FALSE , df ) ; } final boolean hasLocale = format . hasLocale ( ) ; final boolean hasTZ = format . hasTimeZone ( ) ; final boolean asString = ( shape == JsonFormat . Shape . STRING ) ; if ( ! hasLocale && ! hasTZ && ! asString ) { return this ; } DateFormat df0 = serializers . getConfig ( ) . getDateFormat ( ) ; if ( df0 instanceof StdDateFormat ) { StdDateFormat std = ( StdDateFormat ) df0 ; if ( format . hasLocale ( ) ) { std = std . withLocale ( format . getLocale ( ) ) ; } if ( format . hasTimeZone ( ) ) { std = std . withTimeZone ( format . getTimeZone ( ) ) ; } return withFormat ( Boolean . FALSE , std ) ; } if ( ! ( df0 instanceof SimpleDateFormat ) ) { serializers . reportMappingProblem ( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; can not configure `Locale` or `TimeZone`" , df0 . getClass ( ) . getName ( ) ) ; } SimpleDateFormat df = ( SimpleDateFormat ) df0 ; if ( hasLocale ) { df = new SimpleDateFormat ( df . toPattern ( ) , format . getLocale ( ) ) ; } else { df = ( SimpleDateFormat ) df . clone ( ) ; } TimeZone newTz = format . getTimeZone ( ) ; boolean changeTZ = ( newTz != null ) && ! newTz . equals ( df . getTimeZone ( ) ) ; if ( changeTZ ) { df . setTimeZone ( newTz ) ; } return withFormat ( Boolean . FALSE , df ) ; } |
JacksonDatabind | 1 | src/main/java/com/fasterxml/jackson/databind/ser/BeanPropertyWriter.java | 582 | 624 | NULL values are duplicated when serializing as array [via @JsonFormat(shape = JsonFormat.Shape.ARRAY)] | Example:
``` java
public class TestOuter {
@JsonFormat(shape = JsonFormat.Shape.ARRAY)
public ArrayList<TestInner> array;
public TestOuter() {
this.array = new ArrayList<TestInner>();
this.array.add(new TestInner(1, "one"));
this.array.add(new TestInner(0, null));
}
private class TestInner {
public int i;
public String mayBeNull;
public TestInner(int i, String s) {
this.i = i;
this.mayBeNull = s;
}
}
}
```
Serializing an instance of TestOuter will produce the following incorrect result (as of Jackson 2.2.1):
``` json
"array": [[1, "one"], [0, null, null]]
```
where the null value is duplicated. The expected result would be:
``` json
"array": [[1, "one"], [0, null]]
```
I tracked the issue down to:
``` java
package com.fasterxml.jackson.databind.ser;
// ...
public class BeanPropertyWriter {
// ...
public void serializeAsColumn(Object bean, JsonGenerator jgen, SerializerProvider prov)
throws Exception
{
Object value = get(bean);
if (value == null) { // nulls need specialized handling
if (_nullSerializer != null) {
_nullSerializer.serialize(null, jgen, prov);
} else { // can NOT suppress entries in tabular output
jgen.writeNull();
}
}
// otherwise find serializer to use
JsonSerializer<Object> ser = _serializer;
// ... ...
```
where I suspect there is a missing "return", to exit the function once handling of the null value in the dedicated branch is done.
As it is now, a null value is first serialized in the dedicated branch (jgen.writeNull()), and then execution continues on the "normal" (non-null) path and eventually the value is serialized once again.
| public void serializeAsColumn(Object bean, JsonGenerator jgen, SerializerProvider prov)
throws Exception
{
Object value = get(bean);
if (value == null) { // nulls need specialized handling
if (_nullSerializer != null) {
_nullSerializer.serialize(null, jgen, prov);
} else { // can NOT suppress entries in tabular output
jgen.writeNull();
}
}
// otherwise find serializer to use
JsonSerializer<Object> ser = _serializer;
if (ser == null) {
Class<?> cls = value.getClass();
PropertySerializerMap map = _dynamicSerializers;
ser = map.serializerFor(cls);
if (ser == null) {
ser = _findAndAddDynamic(map, cls, prov);
}
}
// and then see if we must suppress certain values (default, empty)
if (_suppressableValue != null) {
if (MARKER_FOR_EMPTY == _suppressableValue) {
if (ser.isEmpty(value)) { // can NOT suppress entries in tabular output
serializeAsPlaceholder(bean, jgen, prov);
return;
}
} else if (_suppressableValue.equals(value)) { // can NOT suppress entries in tabular output
serializeAsPlaceholder(bean, jgen, prov);
return;
}
}
// For non-nulls: simple check for direct cycles
if (value == bean) {
_handleSelfReference(bean, ser);
}
if (_typeSerializer == null) {
ser.serialize(value, jgen, prov);
} else {
ser.serializeWithType(value, jgen, prov, _typeSerializer);
}
} | public void serializeAsColumn ( Object bean , JsonGenerator jgen , SerializerProvider prov ) throws Exception { Object value = get ( bean ) ; if ( value == null ) { if ( _nullSerializer != null ) { _nullSerializer . serialize ( null , jgen , prov ) ; } else { jgen . writeNull ( ) ; } } JsonSerializer < Object > ser = _serializer ; if ( ser == null ) { Class < ? > cls = value . getClass ( ) ; PropertySerializerMap map = _dynamicSerializers ; ser = map . serializerFor ( cls ) ; if ( ser == null ) { ser = _findAndAddDynamic ( map , cls , prov ) ; } } if ( _suppressableValue != null ) { if ( MARKER_FOR_EMPTY == _suppressableValue ) { if ( ser . isEmpty ( value ) ) { serializeAsPlaceholder ( bean , jgen , prov ) ; return ; } } else if ( _suppressableValue . equals ( value ) ) { serializeAsPlaceholder ( bean , jgen , prov ) ; return ; } } if ( value == bean ) { _handleSelfReference ( bean , ser ) ; } if ( _typeSerializer == null ) { ser . serialize ( value , jgen , prov ) ; } else { ser . serializeWithType ( value , jgen , prov , _typeSerializer ) ; } } | public void serializeAsColumn(Object bean, JsonGenerator jgen, SerializerProvider prov)
throws Exception
{
Object value = get(bean);
if (value == null) { // nulls need specialized handling
if (_nullSerializer != null) {
_nullSerializer.serialize(null, jgen, prov);
} else { // can NOT suppress entries in tabular output
jgen.writeNull();
}
return;
}
// otherwise find serializer to use
JsonSerializer<Object> ser = _serializer;
if (ser == null) {
Class<?> cls = value.getClass();
PropertySerializerMap map = _dynamicSerializers;
ser = map.serializerFor(cls);
if (ser == null) {
ser = _findAndAddDynamic(map, cls, prov);
}
}
// and then see if we must suppress certain values (default, empty)
if (_suppressableValue != null) {
if (MARKER_FOR_EMPTY == _suppressableValue) {
if (ser.isEmpty(value)) { // can NOT suppress entries in tabular output
serializeAsPlaceholder(bean, jgen, prov);
return;
}
} else if (_suppressableValue.equals(value)) { // can NOT suppress entries in tabular output
serializeAsPlaceholder(bean, jgen, prov);
return;
}
}
// For non-nulls: simple check for direct cycles
if (value == bean) {
_handleSelfReference(bean, ser);
}
if (_typeSerializer == null) {
ser.serialize(value, jgen, prov);
} else {
ser.serializeWithType(value, jgen, prov, _typeSerializer);
}
} | public void serializeAsColumn ( Object bean , JsonGenerator jgen , SerializerProvider prov ) throws Exception { Object value = get ( bean ) ; if ( value == null ) { if ( _nullSerializer != null ) { _nullSerializer . serialize ( null , jgen , prov ) ; } else { jgen . writeNull ( ) ; } return ; } JsonSerializer < Object > ser = _serializer ; if ( ser == null ) { Class < ? > cls = value . getClass ( ) ; PropertySerializerMap map = _dynamicSerializers ; ser = map . serializerFor ( cls ) ; if ( ser == null ) { ser = _findAndAddDynamic ( map , cls , prov ) ; } } if ( _suppressableValue != null ) { if ( MARKER_FOR_EMPTY == _suppressableValue ) { if ( ser . isEmpty ( value ) ) { serializeAsPlaceholder ( bean , jgen , prov ) ; return ; } } else if ( _suppressableValue . equals ( value ) ) { serializeAsPlaceholder ( bean , jgen , prov ) ; return ; } } if ( value == bean ) { _handleSelfReference ( bean , ser ) ; } if ( _typeSerializer == null ) { ser . serialize ( value , jgen , prov ) ; } else { ser . serializeWithType ( value , jgen , prov , _typeSerializer ) ; } } |
Math | 74 | src/main/java/org/apache/commons/math/ode/nonstiff/EmbeddedRungeKuttaIntegrator.java | 191 | 359 | Wrong parameter for first step size guess for Embedded Runge Kutta methods | In a space application using DOP853 i detected what seems to be a bad parameter in the call to the method initializeStep of class AdaptiveStepsizeIntegrator.
Here, DormandPrince853Integrator is a subclass for EmbeddedRungeKuttaIntegrator which perform the call to initializeStep at the beginning of its method integrate(...)
The problem comes from the array "scale" that is used as a parameter in the call off initializeStep(..)
Following the theory described by Hairer in his book "Solving Ordinary Differential Equations 1 : Nonstiff Problems", the scaling should be :
sci = Atol i + |y0i| * Rtoli
Whereas EmbeddedRungeKuttaIntegrator uses : sci = Atoli
Note that the Gragg-Bulirsch-Stoer integrator uses the good implementation "sci = Atol i + |y0i| * Rtoli " when he performs the call to the same method initializeStep(..)
In the method initializeStep, the error leads to a wrong step size h used to perform an Euler step. Most of the time it is unvisible for the user.
But in my space application the Euler step with this wrong step size h (much bigger than it should be) makes an exception occur (my satellite hits the ground...)
To fix the bug, one should use the same algorithm as in the rescale method in GraggBulirschStoerIntegrator
For exemple :
final double[] scale= new double[y0.length];;
if (vecAbsoluteTolerance == null) {
for (int i = 0; i < scale.length; ++i) {
final double yi = Math.max(Math.abs(y0[i]), Math.abs(y0[i]));
scale[i] = scalAbsoluteTolerance + scalRelativeTolerance * yi;
}
} else {
for (int i = 0; i < scale.length; ++i) {
final double yi = Math.max(Math.abs(y0[i]), Math.abs(y0[i]));
scale[i] = vecAbsoluteTolerance[i] + vecRelativeTolerance[i] * yi;
}
}
hNew = initializeStep(equations, forward, getOrder(), scale,
stepStart, y, yDotK[0], yTmp, yDotK[1]);
Sorry for the length of this message, looking forward to hearing from you soon
Vincent Morand
| @Override
public double integrate(final FirstOrderDifferentialEquations equations,
final double t0, final double[] y0,
final double t, final double[] y)
throws DerivativeException, IntegratorException {
sanityChecks(equations, t0, y0, t, y);
setEquations(equations);
resetEvaluations();
final boolean forward = t > t0;
// create some internal working arrays
final int stages = c.length + 1;
if (y != y0) {
System.arraycopy(y0, 0, y, 0, y0.length);
}
final double[][] yDotK = new double[stages][y0.length];
final double[] yTmp = new double[y0.length];
// set up an interpolator sharing the integrator arrays
AbstractStepInterpolator interpolator;
if (requiresDenseOutput() || (! eventsHandlersManager.isEmpty())) {
final RungeKuttaStepInterpolator rki = (RungeKuttaStepInterpolator) prototype.copy();
rki.reinitialize(this, yTmp, yDotK, forward);
interpolator = rki;
} else {
interpolator = new DummyStepInterpolator(yTmp, forward);
}
interpolator.storeTime(t0);
// set up integration control objects
stepStart = t0;
double hNew = 0;
boolean firstTime = true;
for (StepHandler handler : stepHandlers) {
handler.reset();
}
CombinedEventsManager manager = addEndTimeChecker(t0, t, eventsHandlersManager);
boolean lastStep = false;
// main integration loop
while (!lastStep) {
interpolator.shift();
double error = 0;
for (boolean loop = true; loop;) {
if (firstTime || !fsal) {
// first stage
computeDerivatives(stepStart, y, yDotK[0]);
}
if (firstTime) {
final double[] scale;
if (vecAbsoluteTolerance == null) {
scale = new double[y0.length];
java.util.Arrays.fill(scale, scalAbsoluteTolerance);
} else {
scale = vecAbsoluteTolerance;
}
hNew = initializeStep(equations, forward, getOrder(), scale,
stepStart, y, yDotK[0], yTmp, yDotK[1]);
firstTime = false;
}
stepSize = hNew;
// next stages
for (int k = 1; k < stages; ++k) {
for (int j = 0; j < y0.length; ++j) {
double sum = a[k-1][0] * yDotK[0][j];
for (int l = 1; l < k; ++l) {
sum += a[k-1][l] * yDotK[l][j];
}
yTmp[j] = y[j] + stepSize * sum;
}
computeDerivatives(stepStart + c[k-1] * stepSize, yTmp, yDotK[k]);
}
// estimate the state at the end of the step
for (int j = 0; j < y0.length; ++j) {
double sum = b[0] * yDotK[0][j];
for (int l = 1; l < stages; ++l) {
sum += b[l] * yDotK[l][j];
}
yTmp[j] = y[j] + stepSize * sum;
}
// estimate the error at the end of the step
error = estimateError(yDotK, y, yTmp, stepSize);
if (error <= 1.0) {
// discrete events handling
interpolator.storeTime(stepStart + stepSize);
if (manager.evaluateStep(interpolator)) {
final double dt = manager.getEventTime() - stepStart;
if (Math.abs(dt) <= Math.ulp(stepStart)) {
// rejecting the step would lead to a too small next step, we accept it
loop = false;
} else {
// reject the step to match exactly the next switch time
hNew = dt;
}
} else {
// accept the step
loop = false;
}
} else {
// reject the step and attempt to reduce error by stepsize control
final double factor =
Math.min(maxGrowth,
Math.max(minReduction, safety * Math.pow(error, exp)));
hNew = filterStep(stepSize * factor, forward, false);
}
}
// the step has been accepted
final double nextStep = stepStart + stepSize;
System.arraycopy(yTmp, 0, y, 0, y0.length);
manager.stepAccepted(nextStep, y);
lastStep = manager.stop();
// provide the step data to the step handler
interpolator.storeTime(nextStep);
for (StepHandler handler : stepHandlers) {
handler.handleStep(interpolator, lastStep);
}
stepStart = nextStep;
if (fsal) {
// save the last evaluation for the next step
System.arraycopy(yDotK[stages - 1], 0, yDotK[0], 0, y0.length);
}
if (manager.reset(stepStart, y) && ! lastStep) {
// some event handler has triggered changes that
// invalidate the derivatives, we need to recompute them
computeDerivatives(stepStart, y, yDotK[0]);
}
if (! lastStep) {
// in some rare cases we may get here with stepSize = 0, for example
// when an event occurs at integration start, reducing the first step
// to zero; we have to reset the step to some safe non zero value
stepSize = filterStep(stepSize, forward, true);
// stepsize control for next step
final double factor = Math.min(maxGrowth,
Math.max(minReduction,
safety * Math.pow(error, exp)));
final double scaledH = stepSize * factor;
final double nextT = stepStart + scaledH;
final boolean nextIsLast = forward ? (nextT >= t) : (nextT <= t);
hNew = filterStep(scaledH, forward, nextIsLast);
}
}
final double stopTime = stepStart;
resetInternalState();
return stopTime;
} | @ Override public double integrate ( final FirstOrderDifferentialEquations equations , final double t0 , final double [ ] y0 , final double t , final double [ ] y ) throws DerivativeException , IntegratorException { sanityChecks ( equations , t0 , y0 , t , y ) ; setEquations ( equations ) ; resetEvaluations ( ) ; final boolean forward = t > t0 ; final int stages = c . length + 1 ; if ( y != y0 ) { System . arraycopy ( y0 , 0 , y , 0 , y0 . length ) ; } final double [ ] [ ] yDotK = new double [ stages ] [ y0 . length ] ; final double [ ] yTmp = new double [ y0 . length ] ; AbstractStepInterpolator interpolator ; if ( requiresDenseOutput ( ) || ( ! eventsHandlersManager . isEmpty ( ) ) ) { final RungeKuttaStepInterpolator rki = ( RungeKuttaStepInterpolator ) prototype . copy ( ) ; rki . reinitialize ( this , yTmp , yDotK , forward ) ; interpolator = rki ; } else { interpolator = new DummyStepInterpolator ( yTmp , forward ) ; } interpolator . storeTime ( t0 ) ; stepStart = t0 ; double hNew = 0 ; boolean firstTime = true ; for ( StepHandler handler : stepHandlers ) { handler . reset ( ) ; } CombinedEventsManager manager = addEndTimeChecker ( t0 , t , eventsHandlersManager ) ; boolean lastStep = false ; while ( ! lastStep ) { interpolator . shift ( ) ; double error = 0 ; for ( boolean loop = true ; loop ; ) { if ( firstTime || ! fsal ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( firstTime ) { final double [ ] scale ; if ( vecAbsoluteTolerance == null ) { scale = new double [ y0 . length ] ; java . util . Arrays . fill ( scale , scalAbsoluteTolerance ) ; } else { scale = vecAbsoluteTolerance ; } hNew = initializeStep ( equations , forward , getOrder ( ) , scale , stepStart , y , yDotK [ 0 ] , yTmp , yDotK [ 1 ] ) ; firstTime = false ; } stepSize = hNew ; for ( int k = 1 ; k < stages ; ++ k ) { for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = a [ k - 1 ] [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < k ; ++ l ) { sum += a [ k - 1 ] [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } computeDerivatives ( stepStart + c [ k - 1 ] * stepSize , yTmp , yDotK [ k ] ) ; } for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = b [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < stages ; ++ l ) { sum += b [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } error = estimateError ( yDotK , y , yTmp , stepSize ) ; if ( error <= 1.0 ) { interpolator . storeTime ( stepStart + stepSize ) ; if ( manager . evaluateStep ( interpolator ) ) { final double dt = manager . getEventTime ( ) - stepStart ; if ( Math . abs ( dt ) <= Math . ulp ( stepStart ) ) { loop = false ; } else { hNew = dt ; } } else { loop = false ; } } else { final double factor = Math . min ( maxGrowth , Math . max ( minReduction , safety * Math . pow ( error , exp ) ) ) ; hNew = filterStep ( stepSize * factor , forward , false ) ; } } final double nextStep = stepStart + stepSize ; System . arraycopy ( yTmp , 0 , y , 0 , y0 . length ) ; manager . stepAccepted ( nextStep , y ) ; lastStep = manager . stop ( ) ; interpolator . storeTime ( nextStep ) ; for ( StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , lastStep ) ; } stepStart = nextStep ; if ( fsal ) { System . arraycopy ( yDotK [ stages - 1 ] , 0 , yDotK [ 0 ] , 0 , y0 . length ) ; } if ( manager . reset ( stepStart , y ) && ! lastStep ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( ! lastStep ) { stepSize = filterStep ( stepSize , forward , true ) ; final double factor = Math . min ( maxGrowth , Math . max ( minReduction , safety * Math . pow ( error , exp ) ) ) ; final double scaledH = stepSize * factor ; final double nextT = stepStart + scaledH ; final boolean nextIsLast = forward ? ( nextT >= t ) : ( nextT <= t ) ; hNew = filterStep ( scaledH , forward , nextIsLast ) ; } } final double stopTime = stepStart ; resetInternalState ( ) ; return stopTime ; } | @Override
public double integrate(final FirstOrderDifferentialEquations equations,
final double t0, final double[] y0,
final double t, final double[] y)
throws DerivativeException, IntegratorException {
sanityChecks(equations, t0, y0, t, y);
setEquations(equations);
resetEvaluations();
final boolean forward = t > t0;
// create some internal working arrays
final int stages = c.length + 1;
if (y != y0) {
System.arraycopy(y0, 0, y, 0, y0.length);
}
final double[][] yDotK = new double[stages][y0.length];
final double[] yTmp = new double[y0.length];
// set up an interpolator sharing the integrator arrays
AbstractStepInterpolator interpolator;
if (requiresDenseOutput() || (! eventsHandlersManager.isEmpty())) {
final RungeKuttaStepInterpolator rki = (RungeKuttaStepInterpolator) prototype.copy();
rki.reinitialize(this, yTmp, yDotK, forward);
interpolator = rki;
} else {
interpolator = new DummyStepInterpolator(yTmp, forward);
}
interpolator.storeTime(t0);
// set up integration control objects
stepStart = t0;
double hNew = 0;
boolean firstTime = true;
for (StepHandler handler : stepHandlers) {
handler.reset();
}
CombinedEventsManager manager = addEndTimeChecker(t0, t, eventsHandlersManager);
boolean lastStep = false;
// main integration loop
while (!lastStep) {
interpolator.shift();
double error = 0;
for (boolean loop = true; loop;) {
if (firstTime || !fsal) {
// first stage
computeDerivatives(stepStart, y, yDotK[0]);
}
if (firstTime) {
final double[] scale = new double[y0.length];
if (vecAbsoluteTolerance == null) {
for (int i = 0; i < scale.length; ++i) {
scale[i] = scalAbsoluteTolerance + scalRelativeTolerance * Math.abs(y[i]);
}
} else {
for (int i = 0; i < scale.length; ++i) {
scale[i] = vecAbsoluteTolerance[i] + vecRelativeTolerance[i] * Math.abs(y[i]);
}
}
hNew = initializeStep(equations, forward, getOrder(), scale,
stepStart, y, yDotK[0], yTmp, yDotK[1]);
firstTime = false;
}
stepSize = hNew;
// next stages
for (int k = 1; k < stages; ++k) {
for (int j = 0; j < y0.length; ++j) {
double sum = a[k-1][0] * yDotK[0][j];
for (int l = 1; l < k; ++l) {
sum += a[k-1][l] * yDotK[l][j];
}
yTmp[j] = y[j] + stepSize * sum;
}
computeDerivatives(stepStart + c[k-1] * stepSize, yTmp, yDotK[k]);
}
// estimate the state at the end of the step
for (int j = 0; j < y0.length; ++j) {
double sum = b[0] * yDotK[0][j];
for (int l = 1; l < stages; ++l) {
sum += b[l] * yDotK[l][j];
}
yTmp[j] = y[j] + stepSize * sum;
}
// estimate the error at the end of the step
error = estimateError(yDotK, y, yTmp, stepSize);
if (error <= 1.0) {
// discrete events handling
interpolator.storeTime(stepStart + stepSize);
if (manager.evaluateStep(interpolator)) {
final double dt = manager.getEventTime() - stepStart;
if (Math.abs(dt) <= Math.ulp(stepStart)) {
// rejecting the step would lead to a too small next step, we accept it
loop = false;
} else {
// reject the step to match exactly the next switch time
hNew = dt;
}
} else {
// accept the step
loop = false;
}
} else {
// reject the step and attempt to reduce error by stepsize control
final double factor =
Math.min(maxGrowth,
Math.max(minReduction, safety * Math.pow(error, exp)));
hNew = filterStep(stepSize * factor, forward, false);
}
}
// the step has been accepted
final double nextStep = stepStart + stepSize;
System.arraycopy(yTmp, 0, y, 0, y0.length);
manager.stepAccepted(nextStep, y);
lastStep = manager.stop();
// provide the step data to the step handler
interpolator.storeTime(nextStep);
for (StepHandler handler : stepHandlers) {
handler.handleStep(interpolator, lastStep);
}
stepStart = nextStep;
if (fsal) {
// save the last evaluation for the next step
System.arraycopy(yDotK[stages - 1], 0, yDotK[0], 0, y0.length);
}
if (manager.reset(stepStart, y) && ! lastStep) {
// some event handler has triggered changes that
// invalidate the derivatives, we need to recompute them
computeDerivatives(stepStart, y, yDotK[0]);
}
if (! lastStep) {
// in some rare cases we may get here with stepSize = 0, for example
// when an event occurs at integration start, reducing the first step
// to zero; we have to reset the step to some safe non zero value
stepSize = filterStep(stepSize, forward, true);
// stepsize control for next step
final double factor = Math.min(maxGrowth,
Math.max(minReduction,
safety * Math.pow(error, exp)));
final double scaledH = stepSize * factor;
final double nextT = stepStart + scaledH;
final boolean nextIsLast = forward ? (nextT >= t) : (nextT <= t);
hNew = filterStep(scaledH, forward, nextIsLast);
}
}
final double stopTime = stepStart;
resetInternalState();
return stopTime;
} | @ Override public double integrate ( final FirstOrderDifferentialEquations equations , final double t0 , final double [ ] y0 , final double t , final double [ ] y ) throws DerivativeException , IntegratorException { sanityChecks ( equations , t0 , y0 , t , y ) ; setEquations ( equations ) ; resetEvaluations ( ) ; final boolean forward = t > t0 ; final int stages = c . length + 1 ; if ( y != y0 ) { System . arraycopy ( y0 , 0 , y , 0 , y0 . length ) ; } final double [ ] [ ] yDotK = new double [ stages ] [ y0 . length ] ; final double [ ] yTmp = new double [ y0 . length ] ; AbstractStepInterpolator interpolator ; if ( requiresDenseOutput ( ) || ( ! eventsHandlersManager . isEmpty ( ) ) ) { final RungeKuttaStepInterpolator rki = ( RungeKuttaStepInterpolator ) prototype . copy ( ) ; rki . reinitialize ( this , yTmp , yDotK , forward ) ; interpolator = rki ; } else { interpolator = new DummyStepInterpolator ( yTmp , forward ) ; } interpolator . storeTime ( t0 ) ; stepStart = t0 ; double hNew = 0 ; boolean firstTime = true ; for ( StepHandler handler : stepHandlers ) { handler . reset ( ) ; } CombinedEventsManager manager = addEndTimeChecker ( t0 , t , eventsHandlersManager ) ; boolean lastStep = false ; while ( ! lastStep ) { interpolator . shift ( ) ; double error = 0 ; for ( boolean loop = true ; loop ; ) { if ( firstTime || ! fsal ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( firstTime ) { final double [ ] scale = new double [ y0 . length ] ; if ( vecAbsoluteTolerance == null ) { for ( int i = 0 ; i < scale . length ; ++ i ) { scale [ i ] = scalAbsoluteTolerance + scalRelativeTolerance * Math . abs ( y [ i ] ) ; } } else { for ( int i = 0 ; i < scale . length ; ++ i ) { scale [ i ] = vecAbsoluteTolerance [ i ] + vecRelativeTolerance [ i ] * Math . abs ( y [ i ] ) ; } } hNew = initializeStep ( equations , forward , getOrder ( ) , scale , stepStart , y , yDotK [ 0 ] , yTmp , yDotK [ 1 ] ) ; firstTime = false ; } stepSize = hNew ; for ( int k = 1 ; k < stages ; ++ k ) { for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = a [ k - 1 ] [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < k ; ++ l ) { sum += a [ k - 1 ] [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } computeDerivatives ( stepStart + c [ k - 1 ] * stepSize , yTmp , yDotK [ k ] ) ; } for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = b [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < stages ; ++ l ) { sum += b [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } error = estimateError ( yDotK , y , yTmp , stepSize ) ; if ( error <= 1.0 ) { interpolator . storeTime ( stepStart + stepSize ) ; if ( manager . evaluateStep ( interpolator ) ) { final double dt = manager . getEventTime ( ) - stepStart ; if ( Math . abs ( dt ) <= Math . ulp ( stepStart ) ) { loop = false ; } else { hNew = dt ; } } else { loop = false ; } } else { final double factor = Math . min ( maxGrowth , Math . max ( minReduction , safety * Math . pow ( error , exp ) ) ) ; hNew = filterStep ( stepSize * factor , forward , false ) ; } } final double nextStep = stepStart + stepSize ; System . arraycopy ( yTmp , 0 , y , 0 , y0 . length ) ; manager . stepAccepted ( nextStep , y ) ; lastStep = manager . stop ( ) ; interpolator . storeTime ( nextStep ) ; for ( StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , lastStep ) ; } stepStart = nextStep ; if ( fsal ) { System . arraycopy ( yDotK [ stages - 1 ] , 0 , yDotK [ 0 ] , 0 , y0 . length ) ; } if ( manager . reset ( stepStart , y ) && ! lastStep ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( ! lastStep ) { stepSize = filterStep ( stepSize , forward , true ) ; final double factor = Math . min ( maxGrowth , Math . max ( minReduction , safety * Math . pow ( error , exp ) ) ) ; final double scaledH = stepSize * factor ; final double nextT = stepStart + scaledH ; final boolean nextIsLast = forward ? ( nextT >= t ) : ( nextT <= t ) ; hNew = filterStep ( scaledH , forward , nextIsLast ) ; } } final double stopTime = stepStart ; resetInternalState ( ) ; return stopTime ; } |
JacksonCore | 6 | src/main/java/com/fasterxml/jackson/core/JsonPointer.java | 185 | 206 | `JsonPointer` should not consider "00" to be valid index | Although `00` can be parsed as `0` in some cases, it is not a valid JSON number; and is also not legal numeric index for JSON Pointer. As such, `JsonPointer` class should ensure it can only match property name "00" and not array index.
| private final static int _parseIndex(String str) {
final int len = str.length();
// [core#133]: beware of super long indexes; assume we never
// have arrays over 2 billion entries so ints are fine.
if (len == 0 || len > 10) {
return -1;
}
// [core#176]: no leading zeroes allowed
for (int i = 0; i < len; ++i) {
char c = str.charAt(i);
if (c > '9' || c < '0') {
return -1;
}
}
if (len == 10) {
long l = NumberInput.parseLong(str);
if (l > Integer.MAX_VALUE) {
return -1;
}
}
return NumberInput.parseInt(str);
} | private final static int _parseIndex ( String str ) { final int len = str . length ( ) ; if ( len == 0 || len > 10 ) { return - 1 ; } for ( int i = 0 ; i < len ; ++ i ) { char c = str . charAt ( i ) ; if ( c > '9' || c < '0' ) { return - 1 ; } } if ( len == 10 ) { long l = NumberInput . parseLong ( str ) ; if ( l > Integer . MAX_VALUE ) { return - 1 ; } } return NumberInput . parseInt ( str ) ; } | private final static int _parseIndex(String str) {
final int len = str.length();
// [core#133]: beware of super long indexes; assume we never
// have arrays over 2 billion entries so ints are fine.
if (len == 0 || len > 10) {
return -1;
}
// [core#176]: no leading zeroes allowed
char c = str.charAt(0);
if (c <= '0') {
return (len == 1 && c == '0') ? 0 : -1;
}
if (c > '9') {
return -1;
}
for (int i = 1; i < len; ++i) {
c = str.charAt(i);
if (c > '9' || c < '0') {
return -1;
}
}
if (len == 10) {
long l = NumberInput.parseLong(str);
if (l > Integer.MAX_VALUE) {
return -1;
}
}
return NumberInput.parseInt(str);
} | private final static int _parseIndex ( String str ) { final int len = str . length ( ) ; if ( len == 0 || len > 10 ) { return - 1 ; } char c = str . charAt ( 0 ) ; if ( c <= '0' ) { return ( len == 1 && c == '0' ) ? 0 : - 1 ; } if ( c > '9' ) { return - 1 ; } for ( int i = 1 ; i < len ; ++ i ) { c = str . charAt ( i ) ; if ( c > '9' || c < '0' ) { return - 1 ; } } if ( len == 10 ) { long l = NumberInput . parseLong ( str ) ; if ( l > Integer . MAX_VALUE ) { return - 1 ; } } return NumberInput . parseInt ( str ) ; } |
JacksonDatabind | 46 | src/main/java/com/fasterxml/jackson/databind/type/ReferenceType.java | 150 | 158 | Incorrect signature for generic type via `JavaType.getGenericSignature | (see https://github.com/FasterXML/jackson-modules-base/issues/8 for background)
It looks like generic signature generation is missing one closing `>` character to produce:
```
()Ljava/util/concurrent/atomic/AtomicReference<Ljava/lang/String;;
```
instead of expected
```
()Ljava/util/concurrent/atomic/AtomicReference<Ljava/lang/String;>;
```
that is, closing '>' is missing.
| @Override
public StringBuilder getGenericSignature(StringBuilder sb)
{
_classSignature(_class, sb, false);
sb.append('<');
sb = _referencedType.getGenericSignature(sb);
sb.append(';');
return sb;
} | @ Override public StringBuilder getGenericSignature ( StringBuilder sb ) { _classSignature ( _class , sb , false ) ; sb . append ( '<' ) ; sb = _referencedType . getGenericSignature ( sb ) ; sb . append ( ';' ) ; return sb ; } | @Override
public StringBuilder getGenericSignature(StringBuilder sb)
{
_classSignature(_class, sb, false);
sb.append('<');
sb = _referencedType.getGenericSignature(sb);
sb.append(">;");
return sb;
} | @ Override public StringBuilder getGenericSignature ( StringBuilder sb ) { _classSignature ( _class , sb , false ) ; sb . append ( '<' ) ; sb = _referencedType . getGenericSignature ( sb ) ; sb . append ( ">;" ) ; return sb ; } |
Math | 23 | src/main/java/org/apache/commons/math3/optimization/univariate/BrentOptimizer.java | 114 | 281 | "BrentOptimizer" not always reporting the best point | {{BrentOptimizer}} (package "o.a.c.m.optimization.univariate") does not check that the point it is going to return is indeed the best one it has encountered. Indeed, the last evaluated point might be slightly worse than the one before last. | @Override
protected UnivariatePointValuePair doOptimize() {
final boolean isMinim = getGoalType() == GoalType.MINIMIZE;
final double lo = getMin();
final double mid = getStartValue();
final double hi = getMax();
// Optional additional convergence criteria.
final ConvergenceChecker<UnivariatePointValuePair> checker
= getConvergenceChecker();
double a;
double b;
if (lo < hi) {
a = lo;
b = hi;
} else {
a = hi;
b = lo;
}
double x = mid;
double v = x;
double w = x;
double d = 0;
double e = 0;
double fx = computeObjectiveValue(x);
if (!isMinim) {
fx = -fx;
}
double fv = fx;
double fw = fx;
UnivariatePointValuePair previous = null;
UnivariatePointValuePair current
= new UnivariatePointValuePair(x, isMinim ? fx : -fx);
// Best point encountered so far (which is the initial guess).
int iter = 0;
while (true) {
final double m = 0.5 * (a + b);
final double tol1 = relativeThreshold * FastMath.abs(x) + absoluteThreshold;
final double tol2 = 2 * tol1;
// Default stopping criterion.
final boolean stop = FastMath.abs(x - m) <= tol2 - 0.5 * (b - a);
if (!stop) {
double p = 0;
double q = 0;
double r = 0;
double u = 0;
if (FastMath.abs(e) > tol1) { // Fit parabola.
r = (x - w) * (fx - fv);
q = (x - v) * (fx - fw);
p = (x - v) * q - (x - w) * r;
q = 2 * (q - r);
if (q > 0) {
p = -p;
} else {
q = -q;
}
r = e;
e = d;
if (p > q * (a - x) &&
p < q * (b - x) &&
FastMath.abs(p) < FastMath.abs(0.5 * q * r)) {
// Parabolic interpolation step.
d = p / q;
u = x + d;
// f must not be evaluated too close to a or b.
if (u - a < tol2 || b - u < tol2) {
if (x <= m) {
d = tol1;
} else {
d = -tol1;
}
}
} else {
// Golden section step.
if (x < m) {
e = b - x;
} else {
e = a - x;
}
d = GOLDEN_SECTION * e;
}
} else {
// Golden section step.
if (x < m) {
e = b - x;
} else {
e = a - x;
}
d = GOLDEN_SECTION * e;
}
// Update by at least "tol1".
if (FastMath.abs(d) < tol1) {
if (d >= 0) {
u = x + tol1;
} else {
u = x - tol1;
}
} else {
u = x + d;
}
double fu = computeObjectiveValue(u);
if (!isMinim) {
fu = -fu;
}
// User-defined convergence checker.
previous = current;
current = new UnivariatePointValuePair(u, isMinim ? fu : -fu);
if (checker != null) {
if (checker.converged(iter, previous, current)) {
return best(current, previous, isMinim);
}
}
// Update a, b, v, w and x.
if (fu <= fx) {
if (u < x) {
b = x;
} else {
a = x;
}
v = w;
fv = fw;
w = x;
fw = fx;
x = u;
fx = fu;
} else {
if (u < x) {
a = u;
} else {
b = u;
}
if (fu <= fw ||
Precision.equals(w, x)) {
v = w;
fv = fw;
w = u;
fw = fu;
} else if (fu <= fv ||
Precision.equals(v, x) ||
Precision.equals(v, w)) {
v = u;
fv = fu;
}
}
} else { // Default termination (Brent's criterion).
return
best(current,
previous,
isMinim);
}
++iter;
}
} | @ Override protected UnivariatePointValuePair doOptimize ( ) { final boolean isMinim = getGoalType ( ) == GoalType . MINIMIZE ; final double lo = getMin ( ) ; final double mid = getStartValue ( ) ; final double hi = getMax ( ) ; final ConvergenceChecker < UnivariatePointValuePair > checker = getConvergenceChecker ( ) ; double a ; double b ; if ( lo < hi ) { a = lo ; b = hi ; } else { a = hi ; b = lo ; } double x = mid ; double v = x ; double w = x ; double d = 0 ; double e = 0 ; double fx = computeObjectiveValue ( x ) ; if ( ! isMinim ) { fx = - fx ; } double fv = fx ; double fw = fx ; UnivariatePointValuePair previous = null ; UnivariatePointValuePair current = new UnivariatePointValuePair ( x , isMinim ? fx : - fx ) ; int iter = 0 ; while ( true ) { final double m = 0.5 * ( a + b ) ; final double tol1 = relativeThreshold * FastMath . abs ( x ) + absoluteThreshold ; final double tol2 = 2 * tol1 ; final boolean stop = FastMath . abs ( x - m ) <= tol2 - 0.5 * ( b - a ) ; if ( ! stop ) { double p = 0 ; double q = 0 ; double r = 0 ; double u = 0 ; if ( FastMath . abs ( e ) > tol1 ) { r = ( x - w ) * ( fx - fv ) ; q = ( x - v ) * ( fx - fw ) ; p = ( x - v ) * q - ( x - w ) * r ; q = 2 * ( q - r ) ; if ( q > 0 ) { p = - p ; } else { q = - q ; } r = e ; e = d ; if ( p > q * ( a - x ) && p < q * ( b - x ) && FastMath . abs ( p ) < FastMath . abs ( 0.5 * q * r ) ) { d = p / q ; u = x + d ; if ( u - a < tol2 || b - u < tol2 ) { if ( x <= m ) { d = tol1 ; } else { d = - tol1 ; } } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } if ( FastMath . abs ( d ) < tol1 ) { if ( d >= 0 ) { u = x + tol1 ; } else { u = x - tol1 ; } } else { u = x + d ; } double fu = computeObjectiveValue ( u ) ; if ( ! isMinim ) { fu = - fu ; } previous = current ; current = new UnivariatePointValuePair ( u , isMinim ? fu : - fu ) ; if ( checker != null ) { if ( checker . converged ( iter , previous , current ) ) { return best ( current , previous , isMinim ) ; } } if ( fu <= fx ) { if ( u < x ) { b = x ; } else { a = x ; } v = w ; fv = fw ; w = x ; fw = fx ; x = u ; fx = fu ; } else { if ( u < x ) { a = u ; } else { b = u ; } if ( fu <= fw || Precision . equals ( w , x ) ) { v = w ; fv = fw ; w = u ; fw = fu ; } else if ( fu <= fv || Precision . equals ( v , x ) || Precision . equals ( v , w ) ) { v = u ; fv = fu ; } } } else { return best ( current , previous , isMinim ) ; } ++ iter ; } } | @Override
protected UnivariatePointValuePair doOptimize() {
final boolean isMinim = getGoalType() == GoalType.MINIMIZE;
final double lo = getMin();
final double mid = getStartValue();
final double hi = getMax();
// Optional additional convergence criteria.
final ConvergenceChecker<UnivariatePointValuePair> checker
= getConvergenceChecker();
double a;
double b;
if (lo < hi) {
a = lo;
b = hi;
} else {
a = hi;
b = lo;
}
double x = mid;
double v = x;
double w = x;
double d = 0;
double e = 0;
double fx = computeObjectiveValue(x);
if (!isMinim) {
fx = -fx;
}
double fv = fx;
double fw = fx;
UnivariatePointValuePair previous = null;
UnivariatePointValuePair current
= new UnivariatePointValuePair(x, isMinim ? fx : -fx);
// Best point encountered so far (which is the initial guess).
UnivariatePointValuePair best = current;
int iter = 0;
while (true) {
final double m = 0.5 * (a + b);
final double tol1 = relativeThreshold * FastMath.abs(x) + absoluteThreshold;
final double tol2 = 2 * tol1;
// Default stopping criterion.
final boolean stop = FastMath.abs(x - m) <= tol2 - 0.5 * (b - a);
if (!stop) {
double p = 0;
double q = 0;
double r = 0;
double u = 0;
if (FastMath.abs(e) > tol1) { // Fit parabola.
r = (x - w) * (fx - fv);
q = (x - v) * (fx - fw);
p = (x - v) * q - (x - w) * r;
q = 2 * (q - r);
if (q > 0) {
p = -p;
} else {
q = -q;
}
r = e;
e = d;
if (p > q * (a - x) &&
p < q * (b - x) &&
FastMath.abs(p) < FastMath.abs(0.5 * q * r)) {
// Parabolic interpolation step.
d = p / q;
u = x + d;
// f must not be evaluated too close to a or b.
if (u - a < tol2 || b - u < tol2) {
if (x <= m) {
d = tol1;
} else {
d = -tol1;
}
}
} else {
// Golden section step.
if (x < m) {
e = b - x;
} else {
e = a - x;
}
d = GOLDEN_SECTION * e;
}
} else {
// Golden section step.
if (x < m) {
e = b - x;
} else {
e = a - x;
}
d = GOLDEN_SECTION * e;
}
// Update by at least "tol1".
if (FastMath.abs(d) < tol1) {
if (d >= 0) {
u = x + tol1;
} else {
u = x - tol1;
}
} else {
u = x + d;
}
double fu = computeObjectiveValue(u);
if (!isMinim) {
fu = -fu;
}
// User-defined convergence checker.
previous = current;
current = new UnivariatePointValuePair(u, isMinim ? fu : -fu);
best = best(best,
best(current,
previous,
isMinim),
isMinim);
if (checker != null) {
if (checker.converged(iter, previous, current)) {
return best;
}
}
// Update a, b, v, w and x.
if (fu <= fx) {
if (u < x) {
b = x;
} else {
a = x;
}
v = w;
fv = fw;
w = x;
fw = fx;
x = u;
fx = fu;
} else {
if (u < x) {
a = u;
} else {
b = u;
}
if (fu <= fw ||
Precision.equals(w, x)) {
v = w;
fv = fw;
w = u;
fw = fu;
} else if (fu <= fv ||
Precision.equals(v, x) ||
Precision.equals(v, w)) {
v = u;
fv = fu;
}
}
} else { // Default termination (Brent's criterion).
return best(best,
best(current,
previous,
isMinim),
isMinim);
}
++iter;
}
} | @ Override protected UnivariatePointValuePair doOptimize ( ) { final boolean isMinim = getGoalType ( ) == GoalType . MINIMIZE ; final double lo = getMin ( ) ; final double mid = getStartValue ( ) ; final double hi = getMax ( ) ; final ConvergenceChecker < UnivariatePointValuePair > checker = getConvergenceChecker ( ) ; double a ; double b ; if ( lo < hi ) { a = lo ; b = hi ; } else { a = hi ; b = lo ; } double x = mid ; double v = x ; double w = x ; double d = 0 ; double e = 0 ; double fx = computeObjectiveValue ( x ) ; if ( ! isMinim ) { fx = - fx ; } double fv = fx ; double fw = fx ; UnivariatePointValuePair previous = null ; UnivariatePointValuePair current = new UnivariatePointValuePair ( x , isMinim ? fx : - fx ) ; UnivariatePointValuePair best = current ; int iter = 0 ; while ( true ) { final double m = 0.5 * ( a + b ) ; final double tol1 = relativeThreshold * FastMath . abs ( x ) + absoluteThreshold ; final double tol2 = 2 * tol1 ; final boolean stop = FastMath . abs ( x - m ) <= tol2 - 0.5 * ( b - a ) ; if ( ! stop ) { double p = 0 ; double q = 0 ; double r = 0 ; double u = 0 ; if ( FastMath . abs ( e ) > tol1 ) { r = ( x - w ) * ( fx - fv ) ; q = ( x - v ) * ( fx - fw ) ; p = ( x - v ) * q - ( x - w ) * r ; q = 2 * ( q - r ) ; if ( q > 0 ) { p = - p ; } else { q = - q ; } r = e ; e = d ; if ( p > q * ( a - x ) && p < q * ( b - x ) && FastMath . abs ( p ) < FastMath . abs ( 0.5 * q * r ) ) { d = p / q ; u = x + d ; if ( u - a < tol2 || b - u < tol2 ) { if ( x <= m ) { d = tol1 ; } else { d = - tol1 ; } } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } } else { if ( x < m ) { e = b - x ; } else { e = a - x ; } d = GOLDEN_SECTION * e ; } if ( FastMath . abs ( d ) < tol1 ) { if ( d >= 0 ) { u = x + tol1 ; } else { u = x - tol1 ; } } else { u = x + d ; } double fu = computeObjectiveValue ( u ) ; if ( ! isMinim ) { fu = - fu ; } previous = current ; current = new UnivariatePointValuePair ( u , isMinim ? fu : - fu ) ; best = best ( best , best ( current , previous , isMinim ) , isMinim ) ; if ( checker != null ) { if ( checker . converged ( iter , previous , current ) ) { return best ; } } if ( fu <= fx ) { if ( u < x ) { b = x ; } else { a = x ; } v = w ; fv = fw ; w = x ; fw = fx ; x = u ; fx = fu ; } else { if ( u < x ) { a = u ; } else { b = u ; } if ( fu <= fw || Precision . equals ( w , x ) ) { v = w ; fv = fw ; w = u ; fw = fu ; } else if ( fu <= fv || Precision . equals ( v , x ) || Precision . equals ( v , w ) ) { v = u ; fv = fu ; } } } else { return best ( best , best ( current , previous , isMinim ) , isMinim ) ; } ++ iter ; } } |
JacksonDatabind | 102 | src/main/java/com/fasterxml/jackson/databind/ser/std/DateTimeSerializerBase.java | 61 | 136 | Cannot set custom format for `SqlDateSerializer` globally | Version: 2.9.5
After https://github.com/FasterXML/jackson-databind/issues/219 was fixed, the default format for `java.sql.Date` serialization switched from string to numeric, following the default value of `WRITE_DATES_AS_TIMESTAMPS`.
In order to prevent breaks, I want `java.sql.Date` to serialize as a string, without changing behavior for `java.util.Date` (which has always serialized as a number by default).
According to https://github.com/FasterXML/jackson-databind/issues/219#issuecomment-370690333, I should be able to revert the behavior for `java.sql.Date` only with
```
final ObjectMapper mapper = new ObjectMapper();
mapper.configOverride(java.sql.Date.class).setFormat(JsonFormat.Value.forPattern("yyyy-MM-dd"));
```
This doesn't seem to do anything, though. Looking at the code, it looks like it's because the custom format isn't actually added to `SqlDateSerializer` except in the `createContextual` method (https://github.com/FasterXML/jackson-databind/blob/master/src/main/java/com/fasterxml/jackson/databind/ser/std/DateTimeSerializerBase.java#L59).
For now, I've reverted this behavior with
```
mapper.registerModule(new SimpleModule() {
{
addSerializer(
java.sql.Date.class,
new SqlDateSerializer().withFormat(false, new SimpleDateFormat("yyyy-MM-dd"))
);
}
});
```
but it seems pretty hacky so I'd prefer the other method if possible.
| @Override
public JsonSerializer<?> createContextual(SerializerProvider serializers,
BeanProperty property) throws JsonMappingException
{
// Note! Should not skip if `property` null since that'd skip check
// for config overrides, in case of root value
if (property == null) {
return this;
}
JsonFormat.Value format = findFormatOverrides(serializers, property, handledType());
if (format == null) {
return this;
}
// Simple case first: serialize as numeric timestamp?
JsonFormat.Shape shape = format.getShape();
if (shape.isNumeric()) {
return withFormat(Boolean.TRUE, null);
}
// 08-Jun-2017, tatu: With [databind#1648], this gets bit tricky..
// First: custom pattern will override things
if (format.hasPattern()) {
final Locale loc = format.hasLocale()
? format.getLocale()
: serializers.getLocale();
SimpleDateFormat df = new SimpleDateFormat(format.getPattern(), loc);
TimeZone tz = format.hasTimeZone() ? format.getTimeZone()
: serializers.getTimeZone();
df.setTimeZone(tz);
return withFormat(Boolean.FALSE, df);
}
// Otherwise, need one of these changes:
final boolean hasLocale = format.hasLocale();
final boolean hasTZ = format.hasTimeZone();
final boolean asString = (shape == JsonFormat.Shape.STRING);
if (!hasLocale && !hasTZ && !asString) {
return this;
}
DateFormat df0 = serializers.getConfig().getDateFormat();
// Jackson's own `StdDateFormat` is quite easy to deal with...
if (df0 instanceof StdDateFormat) {
StdDateFormat std = (StdDateFormat) df0;
if (format.hasLocale()) {
std = std.withLocale(format.getLocale());
}
if (format.hasTimeZone()) {
std = std.withTimeZone(format.getTimeZone());
}
return withFormat(Boolean.FALSE, std);
}
// 08-Jun-2017, tatu: Unfortunately there's no generally usable
// mechanism for changing `DateFormat` instances (or even clone()ing)
// So: require it be `SimpleDateFormat`; can't config other types
if (!(df0 instanceof SimpleDateFormat)) {
serializers.reportBadDefinition(handledType(), String.format(
"Configured `DateFormat` (%s) not a `SimpleDateFormat`; cannot configure `Locale` or `TimeZone`",
df0.getClass().getName()));
}
SimpleDateFormat df = (SimpleDateFormat) df0;
if (hasLocale) {
// Ugh. No way to change `Locale`, create copy; must re-crete completely:
df = new SimpleDateFormat(df.toPattern(), format.getLocale());
} else {
df = (SimpleDateFormat) df.clone();
}
TimeZone newTz = format.getTimeZone();
boolean changeTZ = (newTz != null) && !newTz.equals(df.getTimeZone());
if (changeTZ) {
df.setTimeZone(newTz);
}
return withFormat(Boolean.FALSE, df);
} | @ Override public JsonSerializer < ? > createContextual ( SerializerProvider serializers , BeanProperty property ) throws JsonMappingException { if ( property == null ) { return this ; } JsonFormat . Value format = findFormatOverrides ( serializers , property , handledType ( ) ) ; if ( format == null ) { return this ; } JsonFormat . Shape shape = format . getShape ( ) ; if ( shape . isNumeric ( ) ) { return withFormat ( Boolean . TRUE , null ) ; } if ( format . hasPattern ( ) ) { final Locale loc = format . hasLocale ( ) ? format . getLocale ( ) : serializers . getLocale ( ) ; SimpleDateFormat df = new SimpleDateFormat ( format . getPattern ( ) , loc ) ; TimeZone tz = format . hasTimeZone ( ) ? format . getTimeZone ( ) : serializers . getTimeZone ( ) ; df . setTimeZone ( tz ) ; return withFormat ( Boolean . FALSE , df ) ; } final boolean hasLocale = format . hasLocale ( ) ; final boolean hasTZ = format . hasTimeZone ( ) ; final boolean asString = ( shape == JsonFormat . Shape . STRING ) ; if ( ! hasLocale && ! hasTZ && ! asString ) { return this ; } DateFormat df0 = serializers . getConfig ( ) . getDateFormat ( ) ; if ( df0 instanceof StdDateFormat ) { StdDateFormat std = ( StdDateFormat ) df0 ; if ( format . hasLocale ( ) ) { std = std . withLocale ( format . getLocale ( ) ) ; } if ( format . hasTimeZone ( ) ) { std = std . withTimeZone ( format . getTimeZone ( ) ) ; } return withFormat ( Boolean . FALSE , std ) ; } if ( ! ( df0 instanceof SimpleDateFormat ) ) { serializers . reportBadDefinition ( handledType ( ) , String . format ( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; cannot configure `Locale` or `TimeZone`" , df0 . getClass ( ) . getName ( ) ) ) ; } SimpleDateFormat df = ( SimpleDateFormat ) df0 ; if ( hasLocale ) { df = new SimpleDateFormat ( df . toPattern ( ) , format . getLocale ( ) ) ; } else { df = ( SimpleDateFormat ) df . clone ( ) ; } TimeZone newTz = format . getTimeZone ( ) ; boolean changeTZ = ( newTz != null ) && ! newTz . equals ( df . getTimeZone ( ) ) ; if ( changeTZ ) { df . setTimeZone ( newTz ) ; } return withFormat ( Boolean . FALSE , df ) ; } | @Override
public JsonSerializer<?> createContextual(SerializerProvider serializers,
BeanProperty property) throws JsonMappingException
{
// Note! Should not skip if `property` null since that'd skip check
// for config overrides, in case of root value
JsonFormat.Value format = findFormatOverrides(serializers, property, handledType());
if (format == null) {
return this;
}
// Simple case first: serialize as numeric timestamp?
JsonFormat.Shape shape = format.getShape();
if (shape.isNumeric()) {
return withFormat(Boolean.TRUE, null);
}
// 08-Jun-2017, tatu: With [databind#1648], this gets bit tricky..
// First: custom pattern will override things
if (format.hasPattern()) {
final Locale loc = format.hasLocale()
? format.getLocale()
: serializers.getLocale();
SimpleDateFormat df = new SimpleDateFormat(format.getPattern(), loc);
TimeZone tz = format.hasTimeZone() ? format.getTimeZone()
: serializers.getTimeZone();
df.setTimeZone(tz);
return withFormat(Boolean.FALSE, df);
}
// Otherwise, need one of these changes:
final boolean hasLocale = format.hasLocale();
final boolean hasTZ = format.hasTimeZone();
final boolean asString = (shape == JsonFormat.Shape.STRING);
if (!hasLocale && !hasTZ && !asString) {
return this;
}
DateFormat df0 = serializers.getConfig().getDateFormat();
// Jackson's own `StdDateFormat` is quite easy to deal with...
if (df0 instanceof StdDateFormat) {
StdDateFormat std = (StdDateFormat) df0;
if (format.hasLocale()) {
std = std.withLocale(format.getLocale());
}
if (format.hasTimeZone()) {
std = std.withTimeZone(format.getTimeZone());
}
return withFormat(Boolean.FALSE, std);
}
// 08-Jun-2017, tatu: Unfortunately there's no generally usable
// mechanism for changing `DateFormat` instances (or even clone()ing)
// So: require it be `SimpleDateFormat`; can't config other types
if (!(df0 instanceof SimpleDateFormat)) {
serializers.reportBadDefinition(handledType(), String.format(
"Configured `DateFormat` (%s) not a `SimpleDateFormat`; cannot configure `Locale` or `TimeZone`",
df0.getClass().getName()));
}
SimpleDateFormat df = (SimpleDateFormat) df0;
if (hasLocale) {
// Ugh. No way to change `Locale`, create copy; must re-crete completely:
df = new SimpleDateFormat(df.toPattern(), format.getLocale());
} else {
df = (SimpleDateFormat) df.clone();
}
TimeZone newTz = format.getTimeZone();
boolean changeTZ = (newTz != null) && !newTz.equals(df.getTimeZone());
if (changeTZ) {
df.setTimeZone(newTz);
}
return withFormat(Boolean.FALSE, df);
} | @ Override public JsonSerializer < ? > createContextual ( SerializerProvider serializers , BeanProperty property ) throws JsonMappingException { JsonFormat . Value format = findFormatOverrides ( serializers , property , handledType ( ) ) ; if ( format == null ) { return this ; } JsonFormat . Shape shape = format . getShape ( ) ; if ( shape . isNumeric ( ) ) { return withFormat ( Boolean . TRUE , null ) ; } if ( format . hasPattern ( ) ) { final Locale loc = format . hasLocale ( ) ? format . getLocale ( ) : serializers . getLocale ( ) ; SimpleDateFormat df = new SimpleDateFormat ( format . getPattern ( ) , loc ) ; TimeZone tz = format . hasTimeZone ( ) ? format . getTimeZone ( ) : serializers . getTimeZone ( ) ; df . setTimeZone ( tz ) ; return withFormat ( Boolean . FALSE , df ) ; } final boolean hasLocale = format . hasLocale ( ) ; final boolean hasTZ = format . hasTimeZone ( ) ; final boolean asString = ( shape == JsonFormat . Shape . STRING ) ; if ( ! hasLocale && ! hasTZ && ! asString ) { return this ; } DateFormat df0 = serializers . getConfig ( ) . getDateFormat ( ) ; if ( df0 instanceof StdDateFormat ) { StdDateFormat std = ( StdDateFormat ) df0 ; if ( format . hasLocale ( ) ) { std = std . withLocale ( format . getLocale ( ) ) ; } if ( format . hasTimeZone ( ) ) { std = std . withTimeZone ( format . getTimeZone ( ) ) ; } return withFormat ( Boolean . FALSE , std ) ; } if ( ! ( df0 instanceof SimpleDateFormat ) ) { serializers . reportBadDefinition ( handledType ( ) , String . format ( "Configured `DateFormat` (%s) not a `SimpleDateFormat`; cannot configure `Locale` or `TimeZone`" , df0 . getClass ( ) . getName ( ) ) ) ; } SimpleDateFormat df = ( SimpleDateFormat ) df0 ; if ( hasLocale ) { df = new SimpleDateFormat ( df . toPattern ( ) , format . getLocale ( ) ) ; } else { df = ( SimpleDateFormat ) df . clone ( ) ; } TimeZone newTz = format . getTimeZone ( ) ; boolean changeTZ = ( newTz != null ) && ! newTz . equals ( df . getTimeZone ( ) ) ; if ( changeTZ ) { df . setTimeZone ( newTz ) ; } return withFormat ( Boolean . FALSE , df ) ; } |
JacksonDatabind | 11 | src/main/java/com/fasterxml/jackson/databind/type/TypeFactory.java | 889 | 930 | Problem resolving locally declared generic type | (reported by Hal H)
Case like:
``` java
class Something {
public <T extends Ruleform> T getEntity()
public <T extends Ruleform> void setEntity(T entity)
}
```
appears to fail on deserialization.
| protected JavaType _fromVariable(TypeVariable<?> type, TypeBindings context)
{
final String name = type.getName();
// 19-Mar-2015: Without context, all we can check are bounds.
if (context == null) {
// And to prevent infinite loops, now need this:
return _unknownType();
} else {
// Ok: here's where context might come in handy!
/* 19-Mar-2015, tatu: As per [databind#609], may need to allow
* unresolved type variables to handle some cases where bounds
* are enough. Let's hope it does not hide real fail cases.
*/
JavaType actualType = context.findType(name);
if (actualType != null) {
return actualType;
}
}
/* 29-Jan-2010, tatu: We used to throw exception here, if type was
* bound: but the problem is that this can occur for generic "base"
* method, overridden by sub-class. If so, we will want to ignore
* current type (for method) since it will be masked.
*/
Type[] bounds = type.getBounds();
// With type variables we must use bound information.
// Theoretically this gets tricky, as there may be multiple
// bounds ("... extends A & B"); and optimally we might
// want to choose the best match. Also, bounds are optional;
// but here we are lucky in that implicit "Object" is
// added as bounds if so.
// Either way let's just use the first bound, for now, and
// worry about better match later on if there is need.
/* 29-Jan-2010, tatu: One more problem are recursive types
* (T extends Comparable<T>). Need to add "placeholder"
* for resolution to catch those.
*/
context._addPlaceholder(name);
return _constructType(bounds[0], context);
} | protected JavaType _fromVariable ( TypeVariable < ? > type , TypeBindings context ) { final String name = type . getName ( ) ; if ( context == null ) { return _unknownType ( ) ; } else { JavaType actualType = context . findType ( name ) ; if ( actualType != null ) { return actualType ; } } Type [ ] bounds = type . getBounds ( ) ; context . _addPlaceholder ( name ) ; return _constructType ( bounds [ 0 ] , context ) ; } | protected JavaType _fromVariable(TypeVariable<?> type, TypeBindings context)
{
final String name = type.getName();
// 19-Mar-2015: Without context, all we can check are bounds.
if (context == null) {
// And to prevent infinite loops, now need this:
context = new TypeBindings(this, (Class<?>) null);
} else {
// Ok: here's where context might come in handy!
/* 19-Mar-2015, tatu: As per [databind#609], may need to allow
* unresolved type variables to handle some cases where bounds
* are enough. Let's hope it does not hide real fail cases.
*/
JavaType actualType = context.findType(name, false);
if (actualType != null) {
return actualType;
}
}
/* 29-Jan-2010, tatu: We used to throw exception here, if type was
* bound: but the problem is that this can occur for generic "base"
* method, overridden by sub-class. If so, we will want to ignore
* current type (for method) since it will be masked.
*/
Type[] bounds = type.getBounds();
// With type variables we must use bound information.
// Theoretically this gets tricky, as there may be multiple
// bounds ("... extends A & B"); and optimally we might
// want to choose the best match. Also, bounds are optional;
// but here we are lucky in that implicit "Object" is
// added as bounds if so.
// Either way let's just use the first bound, for now, and
// worry about better match later on if there is need.
/* 29-Jan-2010, tatu: One more problem are recursive types
* (T extends Comparable<T>). Need to add "placeholder"
* for resolution to catch those.
*/
context._addPlaceholder(name);
return _constructType(bounds[0], context);
} | protected JavaType _fromVariable ( TypeVariable < ? > type , TypeBindings context ) { final String name = type . getName ( ) ; if ( context == null ) { context = new TypeBindings ( this , ( Class < ? > ) null ) ; } else { JavaType actualType = context . findType ( name , false ) ; if ( actualType != null ) { return actualType ; } } Type [ ] bounds = type . getBounds ( ) ; context . _addPlaceholder ( name ) ; return _constructType ( bounds [ 0 ] , context ) ; } |
Cli | 4 | src/java/org/apache/commons/cli/Parser.java | 290 | 309 | PosixParser interupts "-target opt" as "-t arget opt" | This was posted on the Commons-Developer list and confirmed as a bug.
> Is this a bug? Or am I using this incorrectly?
> I have an option with short and long values. Given code that is
> essentially what is below, with a PosixParser I see results as
> follows:
>
> A command line with just "-t" prints out the results of the catch
> block
> (OK)
> A command line with just "-target" prints out the results of the catch
> block (OK)
> A command line with just "-t foobar.com" prints out "processing selected
> target: foobar.com" (OK)
> A command line with just "-target foobar.com" prints out "processing
> selected target: arget" (ERROR?)
>
> ======================================================================
> ==
> =======================
> private static final String OPTION_TARGET = "t";
> private static final String OPTION_TARGET_LONG = "target";
> // ...
> Option generateTarget = new Option(OPTION_TARGET,
> OPTION_TARGET_LONG,
> true,
> "Generate files for the specified
> target machine");
> // ...
> try {
> parsedLine = parser.parse(cmdLineOpts, args);
> } catch (ParseException pe) {
> System.out.println("Invalid command: " + pe.getMessage() +
> "\n");
> HelpFormatter hf = new HelpFormatter();
> hf.printHelp(USAGE, cmdLineOpts);
> System.exit(-1);
> }
>
> if (parsedLine.hasOption(OPTION_TARGET)) {
> System.out.println("processing selected target: " +
> parsedLine.getOptionValue(OPTION_TARGET));
> }
It is a bug but it is due to well defined behaviour (so that makes me feel a
little better about myself ;). To support *special*
(well I call them special anyway) like -Dsystem.property=value we need to be
able to examine the first character of an option. If the first character is
itself defined as an Option then the remainder of the token is used as the
value, e.g. 'D' is the token, it is an option so 'system.property=value' is the
argument value for that option. This is the behaviour that we are seeing for
your example.
't' is the token, it is an options so 'arget' is the argument value.
I suppose a solution to this could be to have a way to specify properties for
parsers. In this case 'posix.special.option == true' for turning
on *special* options. I'll have a look into this and let you know.
Just to keep track of this and to get you used to how we operate, can you log a
bug in bugzilla for this.
Thanks,
-John K | private void checkRequiredOptions()
throws MissingOptionException
{
// if there are required options that have not been
// processsed
if (requiredOptions.size() > 0)
{
Iterator iter = requiredOptions.iterator();
StringBuffer buff = new StringBuffer();
// loop through the required options
while (iter.hasNext())
{
buff.append(iter.next());
}
throw new MissingOptionException(buff.toString());
}
} | private void checkRequiredOptions ( ) throws MissingOptionException { if ( requiredOptions . size ( ) > 0 ) { Iterator iter = requiredOptions . iterator ( ) ; StringBuffer buff = new StringBuffer ( ) ; while ( iter . hasNext ( ) ) { buff . append ( iter . next ( ) ) ; } throw new MissingOptionException ( buff . toString ( ) ) ; } } | private void checkRequiredOptions()
throws MissingOptionException
{
// if there are required options that have not been
// processsed
if (requiredOptions.size() > 0)
{
Iterator iter = requiredOptions.iterator();
StringBuffer buff = new StringBuffer("Missing required option");
buff.append(requiredOptions.size() == 1 ? "" : "s");
buff.append(": ");
// loop through the required options
while (iter.hasNext())
{
buff.append(iter.next());
}
throw new MissingOptionException(buff.toString());
}
} | private void checkRequiredOptions ( ) throws MissingOptionException { if ( requiredOptions . size ( ) > 0 ) { Iterator iter = requiredOptions . iterator ( ) ; StringBuffer buff = new StringBuffer ( "Missing required option" ) ; buff . append ( requiredOptions . size ( ) == 1 ? "" : "s" ) ; buff . append ( ": " ) ; while ( iter . hasNext ( ) ) { buff . append ( iter . next ( ) ) ; } throw new MissingOptionException ( buff . toString ( ) ) ; } } |
Codec | 9 | src/java/org/apache/commons/codec/binary/Base64.java | 822 | 837 | Base64.encodeBase64(byte[] binaryData, boolean isChunked, boolean urlSafe, int maxResultSize) throws IAE for valid maxResultSize if isChunked is false | If isChunked is false, Base64.encodeBase64(byte[] binaryData, boolean isChunked, boolean urlSafe, int maxResultSize) throws IAE for valid maxResultSize.
Test case and fix will be applied shortly. | public static byte[] encodeBase64(byte[] binaryData, boolean isChunked, boolean urlSafe, int maxResultSize) {
if (binaryData == null || binaryData.length == 0) {
return binaryData;
}
long len = getEncodeLength(binaryData, MIME_CHUNK_SIZE, CHUNK_SEPARATOR);
if (len > maxResultSize) {
throw new IllegalArgumentException("Input array too big, the output array would be bigger (" +
len +
") than the specified maxium size of " +
maxResultSize);
}
Base64 b64 = isChunked ? new Base64(urlSafe) : new Base64(0, CHUNK_SEPARATOR, urlSafe);
return b64.encode(binaryData);
} | public static byte [ ] encodeBase64 ( byte [ ] binaryData , boolean isChunked , boolean urlSafe , int maxResultSize ) { if ( binaryData == null || binaryData . length == 0 ) { return binaryData ; } long len = getEncodeLength ( binaryData , MIME_CHUNK_SIZE , CHUNK_SEPARATOR ) ; if ( len > maxResultSize ) { throw new IllegalArgumentException ( "Input array too big, the output array would be bigger (" + len + ") than the specified maxium size of " + maxResultSize ) ; } Base64 b64 = isChunked ? new Base64 ( urlSafe ) : new Base64 ( 0 , CHUNK_SEPARATOR , urlSafe ) ; return b64 . encode ( binaryData ) ; } | public static byte[] encodeBase64(byte[] binaryData, boolean isChunked, boolean urlSafe, int maxResultSize) {
if (binaryData == null || binaryData.length == 0) {
return binaryData;
}
long len = getEncodeLength(binaryData, isChunked ? MIME_CHUNK_SIZE : 0, CHUNK_SEPARATOR);
if (len > maxResultSize) {
throw new IllegalArgumentException("Input array too big, the output array would be bigger (" +
len +
") than the specified maxium size of " +
maxResultSize);
}
Base64 b64 = isChunked ? new Base64(urlSafe) : new Base64(0, CHUNK_SEPARATOR, urlSafe);
return b64.encode(binaryData);
} | public static byte [ ] encodeBase64 ( byte [ ] binaryData , boolean isChunked , boolean urlSafe , int maxResultSize ) { if ( binaryData == null || binaryData . length == 0 ) { return binaryData ; } long len = getEncodeLength ( binaryData , isChunked ? MIME_CHUNK_SIZE : 0 , CHUNK_SEPARATOR ) ; if ( len > maxResultSize ) { throw new IllegalArgumentException ( "Input array too big, the output array would be bigger (" + len + ") than the specified maxium size of " + maxResultSize ) ; } Base64 b64 = isChunked ? new Base64 ( urlSafe ) : new Base64 ( 0 , CHUNK_SEPARATOR , urlSafe ) ; return b64 . encode ( binaryData ) ; } |
JacksonCore | 26 | src/main/java/com/fasterxml/jackson/core/json/async/NonBlockingJsonParser.java | 87 | 112 | Non-blocking parser reports incorrect locations when fed with non-zero offset | When feeding a non-blocking parser, the input array offset leaks into the offsets reported by `getCurrentLocation()` and `getTokenLocation()`.
For example, feeding with an offset of 7 yields tokens whose reported locations are 7 greater than they should be. Likewise the current location reported by the parser is 7 greater than the correct location.
It's not possible for a user to work around this issue by subtracting 7 from the reported locations, because the token location may have been established by an earlier feeding with a different offset.
Jackson version: 2.9.8
Unit test:
```java
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.async.ByteArrayFeeder;
import org.junit.Test;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertEquals;
public class FeedingOffsetTest {
@Test
public void inputOffsetShouldNotAffectLocations() throws Exception {
JsonFactory jsonFactory = new JsonFactory();
JsonParser parser = jsonFactory.createNonBlockingByteArrayParser();
ByteArrayFeeder feeder = (ByteArrayFeeder) parser.getNonBlockingInputFeeder();
byte[] input = "[[[".getBytes(UTF_8);
feeder.feedInput(input, 2, 3);
assertEquals(JsonToken.START_ARRAY, parser.nextToken());
assertEquals(1, parser.getCurrentLocation().getByteOffset()); // ACTUAL = 3
assertEquals(1, parser.getTokenLocation().getByteOffset()); // ACTUAL = 3
feeder.feedInput(input, 0, 1);
assertEquals(JsonToken.START_ARRAY, parser.nextToken());
assertEquals(2, parser.getCurrentLocation().getByteOffset());
assertEquals(2, parser.getTokenLocation().getByteOffset());
}
}
``` | @Override
public void feedInput(byte[] buf, int start, int end) throws IOException
{
// Must not have remaining input
if (_inputPtr < _inputEnd) {
_reportError("Still have %d undecoded bytes, should not call 'feedInput'", _inputEnd - _inputPtr);
}
if (end < start) {
_reportError("Input end (%d) may not be before start (%d)", end, start);
}
// and shouldn't have been marked as end-of-input
if (_endOfInput) {
_reportError("Already closed, can not feed more input");
}
// Time to update pointers first
_currInputProcessed += _origBufferLen;
// Also need to adjust row start, to work as if it extended into the past wrt new buffer
_currInputRowStart = start - (_inputEnd - _currInputRowStart);
// And then update buffer settings
_inputBuffer = buf;
_inputPtr = start;
_inputEnd = end;
_origBufferLen = end - start;
} | @ Override public void feedInput ( byte [ ] buf , int start , int end ) throws IOException { if ( _inputPtr < _inputEnd ) { _reportError ( "Still have %d undecoded bytes, should not call 'feedInput'" , _inputEnd - _inputPtr ) ; } if ( end < start ) { _reportError ( "Input end (%d) may not be before start (%d)" , end , start ) ; } if ( _endOfInput ) { _reportError ( "Already closed, can not feed more input" ) ; } _currInputProcessed += _origBufferLen ; _currInputRowStart = start - ( _inputEnd - _currInputRowStart ) ; _inputBuffer = buf ; _inputPtr = start ; _inputEnd = end ; _origBufferLen = end - start ; } | @Override
public void feedInput(byte[] buf, int start, int end) throws IOException
{
// Must not have remaining input
if (_inputPtr < _inputEnd) {
_reportError("Still have %d undecoded bytes, should not call 'feedInput'", _inputEnd - _inputPtr);
}
if (end < start) {
_reportError("Input end (%d) may not be before start (%d)", end, start);
}
// and shouldn't have been marked as end-of-input
if (_endOfInput) {
_reportError("Already closed, can not feed more input");
}
// Time to update pointers first
_currInputProcessed += _origBufferLen;
// Also need to adjust row start, to work as if it extended into the past wrt new buffer
_currInputRowStart = start - (_inputEnd - _currInputRowStart);
// And then update buffer settings
_currBufferStart = start;
_inputBuffer = buf;
_inputPtr = start;
_inputEnd = end;
_origBufferLen = end - start;
} | @ Override public void feedInput ( byte [ ] buf , int start , int end ) throws IOException { if ( _inputPtr < _inputEnd ) { _reportError ( "Still have %d undecoded bytes, should not call 'feedInput'" , _inputEnd - _inputPtr ) ; } if ( end < start ) { _reportError ( "Input end (%d) may not be before start (%d)" , end , start ) ; } if ( _endOfInput ) { _reportError ( "Already closed, can not feed more input" ) ; } _currInputProcessed += _origBufferLen ; _currInputRowStart = start - ( _inputEnd - _currInputRowStart ) ; _currBufferStart = start ; _inputBuffer = buf ; _inputPtr = start ; _inputEnd = end ; _origBufferLen = end - start ; } |
Mockito | 5 | src/org/mockito/internal/verification/VerificationOverTimeImpl.java | 75 | 99 | Mockito 1.10.x timeout verification needs JUnit classes (VerifyError, NoClassDefFoundError) | If JUnit is not on the classpath and mockito is version 1.10.x (as of now 1.10.1 up to 1.10.19) and the code is using the timeout verification which is not supposed to be related to JUnit, then the JVM may fail with a `VerifyError` or a `NoClassDefFoundError`.
This issue has been reported on the [mailing list](https://groups.google.com/forum/#!topic/mockito/A6D7myKiD5k) and on [StackOverflow](http://stackoverflow.com/questions/27721621/java-lang-verifyerror-with-mockito-1-10-17)
A simple test like that with **TestNG** (and no JUnit in the class path of course) exposes the issue:
```
import org.testng.annotations.Test;
import java.util.Observable;
import static org.mockito.Mockito.*;
public class VerifyErrorOnVerificationWithTimeoutTest {
@Test public void should_not_throw_VerifyError() {
verify(mock(Observable.class), timeout(500)).countObservers();
}
}
```
With TestNG 5.13.1, the stack trace is :
```
java.lang.VerifyError: (class: org/mockito/internal/verification/VerificationOverTimeImpl, method: verify signature: (Lorg/mockito/internal/verification/api/VerificationData;)V) Incompatible argument to function
at org.mockito.verification.Timeout.<init>(Timeout.java:32)
at org.mockito.verification.Timeout.<init>(Timeout.java:25)
at org.mockito.Mockito.timeout(Mockito.java:2103)
at com.example.UserServiceImplTest.test(UserServiceImplTest.java:26)
```
TestNG includes a dependency on JUnit 3.8.1, which has the `junit.framework.ComparisonFailure`, but the JVM cannot perform the linking at runtime (`VerifyError` extends `LinkageError`), probably because for the JVM there's some incompatible changes in this class between version 3.x and 4.x.
Note that Mockito is compiled against JUnit 4.x. This also reveal that Mockito is not anymore compatible with JUnit 3.x.
With TestNG 6.8.13, the stack trace is :
```
java.lang.NoClassDefFoundError: junit/framework/ComparisonFailure
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClassCond(ClassLoader.java:637)
at java.lang.ClassLoader.defineClass(ClassLoader.java:621)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:283)
at java.net.URLClassLoader.access$000(URLClassLoader.java:58)
at java.net.URLClassLoader$1.run(URLClassLoader.java:197)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
at org.mockito.verification.Timeout.<init>(Timeout.java:32)
at org.mockito.verification.Timeout.<init>(Timeout.java:25)
at org.mockito.Mockito.timeout(Mockito.java:2103)
at com.example.UserServiceImplTest.test(UserServiceImplTest.java:26)
Caused by: java.lang.ClassNotFoundException: junit.framework.ComparisonFailure
at java.net.URLClassLoader$1.run(URLClassLoader.java:202)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:190)
at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301)
at java.lang.ClassLoader.loadClass(ClassLoader.java:247)
... 49 more
```
Indeed JUnit is not anymore a dependency of TestNG.
In this specific case the issue is that the `Timeout` class wraps a `VerficationOverTimeImpl` that uses in try/catch block the exception `org.mockito.exceptions.verification.junit.ArgumentsAreDifferent` which extends `junit.framework.ComparisonFailure`.
At this time it seems to be the only place where JUnit is needed, this affect the following public API :
``` java
Mockito.timeout(...)
Mockito.after(...)
```
| public void verify(VerificationData data) {
AssertionError error = null;
timer.start();
while (timer.isCounting()) {
try {
delegate.verify(data);
if (returnOnSuccess) {
return;
} else {
error = null;
}
} catch (MockitoAssertionError e) {
error = handleVerifyException(e);
}
catch (org.mockito.exceptions.verification.junit.ArgumentsAreDifferent e) {
error = handleVerifyException(e);
}
}
if (error != null) {
throw error;
}
} | public void verify ( VerificationData data ) { AssertionError error = null ; timer . start ( ) ; while ( timer . isCounting ( ) ) { try { delegate . verify ( data ) ; if ( returnOnSuccess ) { return ; } else { error = null ; } } catch ( MockitoAssertionError e ) { error = handleVerifyException ( e ) ; } catch ( org . mockito . exceptions . verification . junit . ArgumentsAreDifferent e ) { error = handleVerifyException ( e ) ; } } if ( error != null ) { throw error ; } } | public void verify(VerificationData data) {
AssertionError error = null;
timer.start();
while (timer.isCounting()) {
try {
delegate.verify(data);
if (returnOnSuccess) {
return;
} else {
error = null;
}
} catch (MockitoAssertionError e) {
error = handleVerifyException(e);
}
catch (AssertionError e) {
error = handleVerifyException(e);
}
}
if (error != null) {
throw error;
}
} | public void verify ( VerificationData data ) { AssertionError error = null ; timer . start ( ) ; while ( timer . isCounting ( ) ) { try { delegate . verify ( data ) ; if ( returnOnSuccess ) { return ; } else { error = null ; } } catch ( MockitoAssertionError e ) { error = handleVerifyException ( e ) ; } catch ( AssertionError e ) { error = handleVerifyException ( e ) ; } } if ( error != null ) { throw error ; } } |
JacksonDatabind | 50 | src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializer.java | 376 | 474 | `@JsonIdentityInfo` deserialization fails with combination of forward references, `@JsonCreator` | As a follow-up to bug #1255, the patch I provided exposes related deserialization problems.
I have attached a small project ('jackson-test.zip') to demonstrate these issues. When run with both patches from #1255, the output is provided in the attached 'both.txt'. When run with just the first patch from #1255, the output is provided in the attached 'first.txt'.
Important points:
1. When the object expressed as an id is contained within a collection or map (List in this example), deserialization works correctly. When it is a field of an object, deserialization is broken.
2. This particular example doesn't have forward references, but it does have cycles. Nevertheless, I have seen situations where non-cyclical forward-references also do not deserialize properly, with the same caveat as in 1.
[jackson-test.zip](https://github.com/FasterXML/jackson-databind/files/301884/jackson-test.zip)
[both.txt](https://github.com/FasterXML/jackson-databind/files/301885/both.txt)
[first.txt](https://github.com/FasterXML/jackson-databind/files/301886/first.txt)
| @Override
@SuppressWarnings("resource")
protected Object _deserializeUsingPropertyBased(final JsonParser p, final DeserializationContext ctxt)
throws IOException
{
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
TokenBuffer unknown = null;
JsonToken t = p.getCurrentToken();
for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) {
String propName = p.getCurrentName();
p.nextToken(); // to point to value
// creator property?
SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
if (creatorProp != null) {
// Last creator property to set?
if (buffer.assignParameter(creatorProp,
_deserializeWithErrorWrapping(p, ctxt, creatorProp))) {
p.nextToken(); // to move to following FIELD_NAME/END_OBJECT
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
bean = wrapInstantiationProblem(e, ctxt);
}
if (bean == null) {
return ctxt.handleInstantiationProblem(handledType(), null,
_creatorReturnedNullException());
}
// [databind#631]: Assign current value, to be accessible by custom serializers
p.setCurrentValue(bean);
// polymorphic?
if (bean.getClass() != _beanType.getRawClass()) {
return handlePolymorphic(p, ctxt, bean, unknown);
}
if (unknown != null) { // nope, just extra unknown stuff...
bean = handleUnknownProperties(ctxt, bean, unknown);
}
// or just clean?
return deserialize(p, ctxt, bean);
}
continue;
}
// Object Id property?
if (buffer.readIdProperty(propName)) {
continue;
}
// regular property? needs buffering
SettableBeanProperty prop = _beanProperties.find(propName);
if (prop != null) {
buffer.bufferProperty(prop, _deserializeWithErrorWrapping(p, ctxt, prop));
// 14-Jun-2016, tatu: As per [databind#1261], looks like we need additional
// handling of forward references here. Not exactly sure why existing
// facilities did not cover, but this does appear to solve the problem
continue;
}
// Things marked as ignorable should not be passed to any setter
if (_ignorableProps != null && _ignorableProps.contains(propName)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
// "any property"?
if (_anySetter != null) {
try {
buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt));
} catch (Exception e) {
wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
}
continue;
}
// Ok then, let's collect the whole field; name and value
if (unknown == null) {
unknown = new TokenBuffer(p, ctxt);
}
unknown.writeFieldName(propName);
unknown.copyCurrentStructure(p);
}
// We hit END_OBJECT, so:
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
wrapInstantiationProblem(e, ctxt);
bean = null; // never gets here
}
if (unknown != null) {
// polymorphic?
if (bean.getClass() != _beanType.getRawClass()) {
return handlePolymorphic(null, ctxt, bean, unknown);
}
// no, just some extra unknown properties
return handleUnknownProperties(ctxt, bean, unknown);
}
return bean;
} | @ Override @ SuppressWarnings ( "resource" ) protected Object _deserializeUsingPropertyBased ( final JsonParser p , final DeserializationContext ctxt ) throws IOException { final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer unknown = null ; JsonToken t = p . getCurrentToken ( ) ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( buffer . assignParameter ( creatorProp , _deserializeWithErrorWrapping ( p , ctxt , creatorProp ) ) ) { p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { bean = wrapInstantiationProblem ( e , ctxt ) ; } if ( bean == null ) { return ctxt . handleInstantiationProblem ( handledType ( ) , null , _creatorReturnedNullException ( ) ) ; } p . setCurrentValue ( bean ) ; if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { return handlePolymorphic ( p , ctxt , bean , unknown ) ; } if ( unknown != null ) { bean = handleUnknownProperties ( ctxt , bean , unknown ) ; } return deserialize ( p , ctxt , bean ) ; } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { buffer . bufferProperty ( prop , _deserializeWithErrorWrapping ( p , ctxt , prop ) ) ; continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } if ( _anySetter != null ) { try { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; } continue ; } if ( unknown == null ) { unknown = new TokenBuffer ( p , ctxt ) ; } unknown . writeFieldName ( propName ) ; unknown . copyCurrentStructure ( p ) ; } Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapInstantiationProblem ( e , ctxt ) ; bean = null ; } if ( unknown != null ) { if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { return handlePolymorphic ( null , ctxt , bean , unknown ) ; } return handleUnknownProperties ( ctxt , bean , unknown ) ; } return bean ; } | @Override
@SuppressWarnings("resource")
protected Object _deserializeUsingPropertyBased(final JsonParser p, final DeserializationContext ctxt)
throws IOException
{
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
TokenBuffer unknown = null;
JsonToken t = p.getCurrentToken();
List<BeanReferring> referrings = null;
for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) {
String propName = p.getCurrentName();
p.nextToken(); // to point to value
// creator property?
SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
if (creatorProp != null) {
// Last creator property to set?
if (buffer.assignParameter(creatorProp,
_deserializeWithErrorWrapping(p, ctxt, creatorProp))) {
p.nextToken(); // to move to following FIELD_NAME/END_OBJECT
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
bean = wrapInstantiationProblem(e, ctxt);
}
if (bean == null) {
return ctxt.handleInstantiationProblem(handledType(), null,
_creatorReturnedNullException());
}
// [databind#631]: Assign current value, to be accessible by custom serializers
p.setCurrentValue(bean);
// polymorphic?
if (bean.getClass() != _beanType.getRawClass()) {
return handlePolymorphic(p, ctxt, bean, unknown);
}
if (unknown != null) { // nope, just extra unknown stuff...
bean = handleUnknownProperties(ctxt, bean, unknown);
}
// or just clean?
return deserialize(p, ctxt, bean);
}
continue;
}
// Object Id property?
if (buffer.readIdProperty(propName)) {
continue;
}
// regular property? needs buffering
SettableBeanProperty prop = _beanProperties.find(propName);
if (prop != null) {
try {
buffer.bufferProperty(prop, _deserializeWithErrorWrapping(p, ctxt, prop));
} catch (UnresolvedForwardReference reference) {
// 14-Jun-2016, tatu: As per [databind#1261], looks like we need additional
// handling of forward references here. Not exactly sure why existing
// facilities did not cover, but this does appear to solve the problem
BeanReferring referring = handleUnresolvedReference(p, prop, buffer, reference);
if (referrings == null) {
referrings = new ArrayList<BeanReferring>();
}
referrings.add(referring);
}
continue;
}
// Things marked as ignorable should not be passed to any setter
if (_ignorableProps != null && _ignorableProps.contains(propName)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
// "any property"?
if (_anySetter != null) {
try {
buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt));
} catch (Exception e) {
wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
}
continue;
}
// Ok then, let's collect the whole field; name and value
if (unknown == null) {
unknown = new TokenBuffer(p, ctxt);
}
unknown.writeFieldName(propName);
unknown.copyCurrentStructure(p);
}
// We hit END_OBJECT, so:
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
wrapInstantiationProblem(e, ctxt);
bean = null; // never gets here
}
if (referrings != null) {
for (BeanReferring referring : referrings) {
referring.setBean(bean);
}
}
if (unknown != null) {
// polymorphic?
if (bean.getClass() != _beanType.getRawClass()) {
return handlePolymorphic(null, ctxt, bean, unknown);
}
// no, just some extra unknown properties
return handleUnknownProperties(ctxt, bean, unknown);
}
return bean;
} | @ Override @ SuppressWarnings ( "resource" ) protected Object _deserializeUsingPropertyBased ( final JsonParser p , final DeserializationContext ctxt ) throws IOException { final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer unknown = null ; JsonToken t = p . getCurrentToken ( ) ; List < BeanReferring > referrings = null ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( buffer . assignParameter ( creatorProp , _deserializeWithErrorWrapping ( p , ctxt , creatorProp ) ) ) { p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { bean = wrapInstantiationProblem ( e , ctxt ) ; } if ( bean == null ) { return ctxt . handleInstantiationProblem ( handledType ( ) , null , _creatorReturnedNullException ( ) ) ; } p . setCurrentValue ( bean ) ; if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { return handlePolymorphic ( p , ctxt , bean , unknown ) ; } if ( unknown != null ) { bean = handleUnknownProperties ( ctxt , bean , unknown ) ; } return deserialize ( p , ctxt , bean ) ; } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { try { buffer . bufferProperty ( prop , _deserializeWithErrorWrapping ( p , ctxt , prop ) ) ; } catch ( UnresolvedForwardReference reference ) { BeanReferring referring = handleUnresolvedReference ( p , prop , buffer , reference ) ; if ( referrings == null ) { referrings = new ArrayList < BeanReferring > ( ) ; } referrings . add ( referring ) ; } continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } if ( _anySetter != null ) { try { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; } continue ; } if ( unknown == null ) { unknown = new TokenBuffer ( p , ctxt ) ; } unknown . writeFieldName ( propName ) ; unknown . copyCurrentStructure ( p ) ; } Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapInstantiationProblem ( e , ctxt ) ; bean = null ; } if ( referrings != null ) { for ( BeanReferring referring : referrings ) { referring . setBean ( bean ) ; } } if ( unknown != null ) { if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { return handlePolymorphic ( null , ctxt , bean , unknown ) ; } return handleUnknownProperties ( ctxt , bean , unknown ) ; } return bean ; } |
JacksonDatabind | 27 | src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializer.java | 773 | 857 | Problem deserializing External Type Id if type id comes before POJO | (note: seems to be similar or related to https://github.com/FasterXML/jackson-module-afterburner/issues/58)
With 2.6, looks like handling of External Type Id is broken in some rare (?) cases; existing unit tests did not catch this. At this point I am speculating this is due to some refactoring, or change to use more efficient 'nextFieldName()' method.
| @SuppressWarnings("resource")
protected Object deserializeUsingPropertyBasedWithExternalTypeId(JsonParser p, DeserializationContext ctxt)
throws IOException
{
final ExternalTypeHandler ext = _externalTypeIdHandler.start();
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
TokenBuffer tokens = new TokenBuffer(p);
tokens.writeStartObject();
JsonToken t = p.getCurrentToken();
for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) {
String propName = p.getCurrentName();
p.nextToken(); // to point to value
// creator property?
SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
if (creatorProp != null) {
// first: let's check to see if this might be part of value with external type id:
// 11-Sep-2015, tatu: Important; do NOT pass buffer as last arg, but null,
// since it is not the bean
if (ext.handlePropertyValue(p, ctxt, propName, buffer)) {
;
} else {
// Last creator property to set?
if (buffer.assignParameter(creatorProp, _deserializeWithErrorWrapping(p, ctxt, creatorProp))) {
t = p.nextToken(); // to move to following FIELD_NAME/END_OBJECT
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
continue; // never gets here
}
// if so, need to copy all remaining tokens into buffer
while (t == JsonToken.FIELD_NAME) {
p.nextToken(); // to skip name
tokens.copyCurrentStructure(p);
t = p.nextToken();
}
if (bean.getClass() != _beanType.getRawClass()) {
// !!! 08-Jul-2011, tatu: Could theoretically support; but for now
// it's too complicated, so bail out
throw ctxt.mappingException("Can not create polymorphic instances with unwrapped values");
}
return ext.complete(p, ctxt, bean);
}
}
continue;
}
// Object Id property?
if (buffer.readIdProperty(propName)) {
continue;
}
// regular property? needs buffering
SettableBeanProperty prop = _beanProperties.find(propName);
if (prop != null) {
buffer.bufferProperty(prop, prop.deserialize(p, ctxt));
continue;
}
// external type id (or property that depends on it)?
if (ext.handlePropertyValue(p, ctxt, propName, null)) {
continue;
}
/* As per [JACKSON-313], things marked as ignorable should not be
* passed to any setter
*/
if (_ignorableProps != null && _ignorableProps.contains(propName)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
// "any property"?
if (_anySetter != null) {
buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt));
}
}
// We hit END_OBJECT; resolve the pieces:
try {
return ext.complete(p, ctxt, buffer, creator);
} catch (Exception e) {
wrapInstantiationProblem(e, ctxt);
return null; // never gets here
}
} | @ SuppressWarnings ( "resource" ) protected Object deserializeUsingPropertyBasedWithExternalTypeId ( JsonParser p , DeserializationContext ctxt ) throws IOException { final ExternalTypeHandler ext = _externalTypeIdHandler . start ( ) ; final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer tokens = new TokenBuffer ( p ) ; tokens . writeStartObject ( ) ; JsonToken t = p . getCurrentToken ( ) ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( ext . handlePropertyValue ( p , ctxt , propName , buffer ) ) { ; } else { if ( buffer . assignParameter ( creatorProp , _deserializeWithErrorWrapping ( p , ctxt , creatorProp ) ) ) { t = p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; continue ; } while ( t == JsonToken . FIELD_NAME ) { p . nextToken ( ) ; tokens . copyCurrentStructure ( p ) ; t = p . nextToken ( ) ; } if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { throw ctxt . mappingException ( "Can not create polymorphic instances with unwrapped values" ) ; } return ext . complete ( p , ctxt , bean ) ; } } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { buffer . bufferProperty ( prop , prop . deserialize ( p , ctxt ) ) ; continue ; } if ( ext . handlePropertyValue ( p , ctxt , propName , null ) ) { continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } if ( _anySetter != null ) { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } } try { return ext . complete ( p , ctxt , buffer , creator ) ; } catch ( Exception e ) { wrapInstantiationProblem ( e , ctxt ) ; return null ; } } | @SuppressWarnings("resource")
protected Object deserializeUsingPropertyBasedWithExternalTypeId(JsonParser p, DeserializationContext ctxt)
throws IOException
{
final ExternalTypeHandler ext = _externalTypeIdHandler.start();
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
TokenBuffer tokens = new TokenBuffer(p);
tokens.writeStartObject();
JsonToken t = p.getCurrentToken();
for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) {
String propName = p.getCurrentName();
p.nextToken(); // to point to value
// creator property?
SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
if (creatorProp != null) {
// first: let's check to see if this might be part of value with external type id:
// 11-Sep-2015, tatu: Important; do NOT pass buffer as last arg, but null,
// since it is not the bean
if (ext.handlePropertyValue(p, ctxt, propName, null)) {
;
} else {
// Last creator property to set?
if (buffer.assignParameter(creatorProp, _deserializeWithErrorWrapping(p, ctxt, creatorProp))) {
t = p.nextToken(); // to move to following FIELD_NAME/END_OBJECT
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
continue; // never gets here
}
// if so, need to copy all remaining tokens into buffer
while (t == JsonToken.FIELD_NAME) {
p.nextToken(); // to skip name
tokens.copyCurrentStructure(p);
t = p.nextToken();
}
if (bean.getClass() != _beanType.getRawClass()) {
// !!! 08-Jul-2011, tatu: Could theoretically support; but for now
// it's too complicated, so bail out
throw ctxt.mappingException("Can not create polymorphic instances with unwrapped values");
}
return ext.complete(p, ctxt, bean);
}
}
continue;
}
// Object Id property?
if (buffer.readIdProperty(propName)) {
continue;
}
// regular property? needs buffering
SettableBeanProperty prop = _beanProperties.find(propName);
if (prop != null) {
buffer.bufferProperty(prop, prop.deserialize(p, ctxt));
continue;
}
// external type id (or property that depends on it)?
if (ext.handlePropertyValue(p, ctxt, propName, null)) {
continue;
}
/* As per [JACKSON-313], things marked as ignorable should not be
* passed to any setter
*/
if (_ignorableProps != null && _ignorableProps.contains(propName)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
// "any property"?
if (_anySetter != null) {
buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt));
}
}
// We hit END_OBJECT; resolve the pieces:
try {
return ext.complete(p, ctxt, buffer, creator);
} catch (Exception e) {
wrapInstantiationProblem(e, ctxt);
return null; // never gets here
}
} | @ SuppressWarnings ( "resource" ) protected Object deserializeUsingPropertyBasedWithExternalTypeId ( JsonParser p , DeserializationContext ctxt ) throws IOException { final ExternalTypeHandler ext = _externalTypeIdHandler . start ( ) ; final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer tokens = new TokenBuffer ( p ) ; tokens . writeStartObject ( ) ; JsonToken t = p . getCurrentToken ( ) ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( ext . handlePropertyValue ( p , ctxt , propName , null ) ) { ; } else { if ( buffer . assignParameter ( creatorProp , _deserializeWithErrorWrapping ( p , ctxt , creatorProp ) ) ) { t = p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; continue ; } while ( t == JsonToken . FIELD_NAME ) { p . nextToken ( ) ; tokens . copyCurrentStructure ( p ) ; t = p . nextToken ( ) ; } if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { throw ctxt . mappingException ( "Can not create polymorphic instances with unwrapped values" ) ; } return ext . complete ( p , ctxt , bean ) ; } } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { buffer . bufferProperty ( prop , prop . deserialize ( p , ctxt ) ) ; continue ; } if ( ext . handlePropertyValue ( p , ctxt , propName , null ) ) { continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } if ( _anySetter != null ) { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } } try { return ext . complete ( p , ctxt , buffer , creator ) ; } catch ( Exception e ) { wrapInstantiationProblem ( e , ctxt ) ; return null ; } } |
Cli | 14 | src/java/org/apache/commons/cli2/option/GroupImpl.java | 237 | 282 | adding a FileValidator results in ClassCastException in parser.parseAndHelp(args) | When I add a FileValidator.getExistingFileInstance() to an Argument, I get a ClassCastException when I parse args.
Below is a testcase invoke with
java org.apache.commons.cli2.issues.CLI2Sample -classpath commons-cli-2.0-SNAPSHOT.jar --file-name path-to-an-existing-file
Run it and you get:
Exception in thread "main" java.lang.ClassCastException: java.io.File cannot be cast to java.lang.String
at org.apache.commons.cli2.validation.FileValidator.validate(FileValidator.java:122)
at org.apache.commons.cli2.option.ArgumentImpl.validate(ArgumentImpl.java:250)
at org.apache.commons.cli2.option.ParentImpl.validate(ParentImpl.java:123)
at org.apache.commons.cli2.option.DefaultOption.validate(DefaultOption.java:175)
at org.apache.commons.cli2.option.GroupImpl.validate(GroupImpl.java:264)
at org.apache.commons.cli2.commandline.Parser.parse(Parser.java:105)
at org.apache.commons.cli2.commandline.Parser.parseAndHelp(Parser.java:125)
at org.apache.commons.cli2.issues.CLI2Sample.main(CLI2Sample.java:38)
Comment out the withValidator call and it runs with no exception.
I also get a similar ClassCastException if I add a
.withValidator(NumberValidator.getIntegerInstance())
to another option/argument.
Here is the source
package org.apache.commons.cli2.issues;
import java.io.File;
import org.apache.commons.cli2.CommandLine;
import org.apache.commons.cli2.Group;
import org.apache.commons.cli2.builder.ArgumentBuilder;
import org.apache.commons.cli2.builder.DefaultOptionBuilder;
import org.apache.commons.cli2.builder.GroupBuilder;
import org.apache.commons.cli2.commandline.Parser;
import org.apache.commons.cli2.option.DefaultOption;
import org.apache.commons.cli2.validation.FileValidator;
public class CLI2Sample
{
public static void main(String[] args)
{
final DefaultOptionBuilder obuilder = new DefaultOptionBuilder();
final ArgumentBuilder abuilder = new ArgumentBuilder();
final GroupBuilder gbuilder = new GroupBuilder();
DefaultOption fileNameOption = obuilder
.withShortName("f")
.withLongName("file-name")
.withRequired(true)
.withDescription("name of an existing file")
.withArgument(abuilder
.withName("file-name")
.withValidator(FileValidator.getExistingFileInstance())
.create())
.create();
Group options = gbuilder
.withName("options")
.withOption(fileNameOption)
.create();
Parser parser = new Parser();
parser.setHelpTrigger("--help");
parser.setGroup(options);
CommandLine cl = parser.parseAndHelp(args);
}
}
| public void validate(final WriteableCommandLine commandLine)
throws OptionException {
// number of options found
int present = 0;
// reference to first unexpected option
Option unexpected = null;
for (final Iterator i = options.iterator(); i.hasNext();) {
final Option option = (Option) i.next();
// needs validation?
boolean validate = option.isRequired() || option instanceof Group;
if (validate) {
option.validate(commandLine);
}
// if the child option is present then validate it
if (commandLine.hasOption(option)) {
if (++present > maximum) {
unexpected = option;
break;
}
option.validate(commandLine);
}
}
// too many options
if (unexpected != null) {
throw new OptionException(this, ResourceConstants.UNEXPECTED_TOKEN,
unexpected.getPreferredName());
}
// too few option
if (present < minimum) {
throw new OptionException(this, ResourceConstants.MISSING_OPTION);
}
// validate each anonymous argument
for (final Iterator i = anonymous.iterator(); i.hasNext();) {
final Option option = (Option) i.next();
option.validate(commandLine);
}
} | public void validate ( final WriteableCommandLine commandLine ) throws OptionException { int present = 0 ; Option unexpected = null ; for ( final Iterator i = options . iterator ( ) ; i . hasNext ( ) ; ) { final Option option = ( Option ) i . next ( ) ; boolean validate = option . isRequired ( ) || option instanceof Group ; if ( validate ) { option . validate ( commandLine ) ; } if ( commandLine . hasOption ( option ) ) { if ( ++ present > maximum ) { unexpected = option ; break ; } option . validate ( commandLine ) ; } } if ( unexpected != null ) { throw new OptionException ( this , ResourceConstants . UNEXPECTED_TOKEN , unexpected . getPreferredName ( ) ) ; } if ( present < minimum ) { throw new OptionException ( this , ResourceConstants . MISSING_OPTION ) ; } for ( final Iterator i = anonymous . iterator ( ) ; i . hasNext ( ) ; ) { final Option option = ( Option ) i . next ( ) ; option . validate ( commandLine ) ; } } | public void validate(final WriteableCommandLine commandLine)
throws OptionException {
// number of options found
int present = 0;
// reference to first unexpected option
Option unexpected = null;
for (final Iterator i = options.iterator(); i.hasNext();) {
final Option option = (Option) i.next();
// needs validation?
boolean validate = option.isRequired() || option instanceof Group;
// if the child option is present then validate it
if (commandLine.hasOption(option)) {
if (++present > maximum) {
unexpected = option;
break;
}
validate = true;
}
if (validate) {
option.validate(commandLine);
}
}
// too many options
if (unexpected != null) {
throw new OptionException(this, ResourceConstants.UNEXPECTED_TOKEN,
unexpected.getPreferredName());
}
// too few option
if (present < minimum) {
throw new OptionException(this, ResourceConstants.MISSING_OPTION);
}
// validate each anonymous argument
for (final Iterator i = anonymous.iterator(); i.hasNext();) {
final Option option = (Option) i.next();
option.validate(commandLine);
}
} | public void validate ( final WriteableCommandLine commandLine ) throws OptionException { int present = 0 ; Option unexpected = null ; for ( final Iterator i = options . iterator ( ) ; i . hasNext ( ) ; ) { final Option option = ( Option ) i . next ( ) ; boolean validate = option . isRequired ( ) || option instanceof Group ; if ( commandLine . hasOption ( option ) ) { if ( ++ present > maximum ) { unexpected = option ; break ; } validate = true ; } if ( validate ) { option . validate ( commandLine ) ; } } if ( unexpected != null ) { throw new OptionException ( this , ResourceConstants . UNEXPECTED_TOKEN , unexpected . getPreferredName ( ) ) ; } if ( present < minimum ) { throw new OptionException ( this , ResourceConstants . MISSING_OPTION ) ; } for ( final Iterator i = anonymous . iterator ( ) ; i . hasNext ( ) ; ) { final Option option = ( Option ) i . next ( ) ; option . validate ( commandLine ) ; } } |
Math | 15 | src/main/java/org/apache/commons/math3/util/FastMath.java | 1441 | 1599 | FastMath.pow deviates from Math.pow for negative, finite base values with an exponent 2^52 < y < 2^53 | As reported by Jeff Hain:
pow(double,double):
Math.pow(-1.0,5.000000000000001E15) = -1.0
FastMath.pow(-1.0,5.000000000000001E15) = 1.0
===> This is due to considering that power is an even
integer if it is >= 2^52, while you need to test
that it is >= 2^53 for it.
===> replace
"if (y >= TWO_POWER_52 || y <= -TWO_POWER_52)"
with
"if (y >= 2*TWO_POWER_52 || y <= -2*TWO_POWER_52)"
and that solves it. | public static double pow(double x, double y) {
final double lns[] = new double[2];
if (y == 0.0) {
return 1.0;
}
if (x != x) { // X is NaN
return x;
}
if (x == 0) {
long bits = Double.doubleToLongBits(x);
if ((bits & 0x8000000000000000L) != 0) {
// -zero
long yi = (long) y;
if (y < 0 && y == yi && (yi & 1) == 1) {
return Double.NEGATIVE_INFINITY;
}
if (y > 0 && y == yi && (yi & 1) == 1) {
return -0.0;
}
}
if (y < 0) {
return Double.POSITIVE_INFINITY;
}
if (y > 0) {
return 0.0;
}
return Double.NaN;
}
if (x == Double.POSITIVE_INFINITY) {
if (y != y) { // y is NaN
return y;
}
if (y < 0.0) {
return 0.0;
} else {
return Double.POSITIVE_INFINITY;
}
}
if (y == Double.POSITIVE_INFINITY) {
if (x * x == 1.0) {
return Double.NaN;
}
if (x * x > 1.0) {
return Double.POSITIVE_INFINITY;
} else {
return 0.0;
}
}
if (x == Double.NEGATIVE_INFINITY) {
if (y != y) { // y is NaN
return y;
}
if (y < 0) {
long yi = (long) y;
if (y == yi && (yi & 1) == 1) {
return -0.0;
}
return 0.0;
}
if (y > 0) {
long yi = (long) y;
if (y == yi && (yi & 1) == 1) {
return Double.NEGATIVE_INFINITY;
}
return Double.POSITIVE_INFINITY;
}
}
if (y == Double.NEGATIVE_INFINITY) {
if (x * x == 1.0) {
return Double.NaN;
}
if (x * x < 1.0) {
return Double.POSITIVE_INFINITY;
} else {
return 0.0;
}
}
/* Handle special case x<0 */
if (x < 0) {
// y is an even integer in this case
if (y >= TWO_POWER_52 || y <= -TWO_POWER_52) {
return pow(-x, y);
}
if (y == (long) y) {
// If y is an integer
return ((long)y & 1) == 0 ? pow(-x, y) : -pow(-x, y);
} else {
return Double.NaN;
}
}
/* Split y into ya and yb such that y = ya+yb */
double ya;
double yb;
if (y < 8e298 && y > -8e298) {
double tmp1 = y * HEX_40000000;
ya = y + tmp1 - tmp1;
yb = y - ya;
} else {
double tmp1 = y * 9.31322574615478515625E-10;
double tmp2 = tmp1 * 9.31322574615478515625E-10;
ya = (tmp1 + tmp2 - tmp1) * HEX_40000000 * HEX_40000000;
yb = y - ya;
}
/* Compute ln(x) */
final double lores = log(x, lns);
if (Double.isInfinite(lores)){ // don't allow this to be converted to NaN
return lores;
}
double lna = lns[0];
double lnb = lns[1];
/* resplit lns */
double tmp1 = lna * HEX_40000000;
double tmp2 = lna + tmp1 - tmp1;
lnb += lna - tmp2;
lna = tmp2;
// y*ln(x) = (aa+ab)
final double aa = lna * ya;
final double ab = lna * yb + lnb * ya + lnb * yb;
lna = aa+ab;
lnb = -(lna - aa - ab);
double z = 1.0 / 120.0;
z = z * lnb + (1.0 / 24.0);
z = z * lnb + (1.0 / 6.0);
z = z * lnb + 0.5;
z = z * lnb + 1.0;
z = z * lnb;
final double result = exp(lna, z, null);
//result = result + result * z;
return result;
} | public static double pow ( double x , double y ) { final double lns [ ] = new double [ 2 ] ; if ( y == 0.0 ) { return 1.0 ; } if ( x != x ) { return x ; } if ( x == 0 ) { long bits = Double . doubleToLongBits ( x ) ; if ( ( bits & 0x8000000000000000L ) != 0 ) { long yi = ( long ) y ; if ( y < 0 && y == yi && ( yi & 1 ) == 1 ) { return Double . NEGATIVE_INFINITY ; } if ( y > 0 && y == yi && ( yi & 1 ) == 1 ) { return - 0.0 ; } } if ( y < 0 ) { return Double . POSITIVE_INFINITY ; } if ( y > 0 ) { return 0.0 ; } return Double . NaN ; } if ( x == Double . POSITIVE_INFINITY ) { if ( y != y ) { return y ; } if ( y < 0.0 ) { return 0.0 ; } else { return Double . POSITIVE_INFINITY ; } } if ( y == Double . POSITIVE_INFINITY ) { if ( x * x == 1.0 ) { return Double . NaN ; } if ( x * x > 1.0 ) { return Double . POSITIVE_INFINITY ; } else { return 0.0 ; } } if ( x == Double . NEGATIVE_INFINITY ) { if ( y != y ) { return y ; } if ( y < 0 ) { long yi = ( long ) y ; if ( y == yi && ( yi & 1 ) == 1 ) { return - 0.0 ; } return 0.0 ; } if ( y > 0 ) { long yi = ( long ) y ; if ( y == yi && ( yi & 1 ) == 1 ) { return Double . NEGATIVE_INFINITY ; } return Double . POSITIVE_INFINITY ; } } if ( y == Double . NEGATIVE_INFINITY ) { if ( x * x == 1.0 ) { return Double . NaN ; } if ( x * x < 1.0 ) { return Double . POSITIVE_INFINITY ; } else { return 0.0 ; } } if ( x < 0 ) { if ( y >= TWO_POWER_52 || y <= - TWO_POWER_52 ) { return pow ( - x , y ) ; } if ( y == ( long ) y ) { return ( ( long ) y & 1 ) == 0 ? pow ( - x , y ) : - pow ( - x , y ) ; } else { return Double . NaN ; } } double ya ; double yb ; if ( y < 8e298 && y > - 8e298 ) { double tmp1 = y * HEX_40000000 ; ya = y + tmp1 - tmp1 ; yb = y - ya ; } else { double tmp1 = y * 9.31322574615478515625E-10 ; double tmp2 = tmp1 * 9.31322574615478515625E-10 ; ya = ( tmp1 + tmp2 - tmp1 ) * HEX_40000000 * HEX_40000000 ; yb = y - ya ; } final double lores = log ( x , lns ) ; if ( Double . isInfinite ( lores ) ) { return lores ; } double lna = lns [ 0 ] ; double lnb = lns [ 1 ] ; double tmp1 = lna * HEX_40000000 ; double tmp2 = lna + tmp1 - tmp1 ; lnb += lna - tmp2 ; lna = tmp2 ; final double aa = lna * ya ; final double ab = lna * yb + lnb * ya + lnb * yb ; lna = aa + ab ; lnb = - ( lna - aa - ab ) ; double z = 1.0 / 120.0 ; z = z * lnb + ( 1.0 / 24.0 ) ; z = z * lnb + ( 1.0 / 6.0 ) ; z = z * lnb + 0.5 ; z = z * lnb + 1.0 ; z = z * lnb ; final double result = exp ( lna , z , null ) ; return result ; } | public static double pow(double x, double y) {
final double lns[] = new double[2];
if (y == 0.0) {
return 1.0;
}
if (x != x) { // X is NaN
return x;
}
if (x == 0) {
long bits = Double.doubleToLongBits(x);
if ((bits & 0x8000000000000000L) != 0) {
// -zero
long yi = (long) y;
if (y < 0 && y == yi && (yi & 1) == 1) {
return Double.NEGATIVE_INFINITY;
}
if (y > 0 && y == yi && (yi & 1) == 1) {
return -0.0;
}
}
if (y < 0) {
return Double.POSITIVE_INFINITY;
}
if (y > 0) {
return 0.0;
}
return Double.NaN;
}
if (x == Double.POSITIVE_INFINITY) {
if (y != y) { // y is NaN
return y;
}
if (y < 0.0) {
return 0.0;
} else {
return Double.POSITIVE_INFINITY;
}
}
if (y == Double.POSITIVE_INFINITY) {
if (x * x == 1.0) {
return Double.NaN;
}
if (x * x > 1.0) {
return Double.POSITIVE_INFINITY;
} else {
return 0.0;
}
}
if (x == Double.NEGATIVE_INFINITY) {
if (y != y) { // y is NaN
return y;
}
if (y < 0) {
long yi = (long) y;
if (y == yi && (yi & 1) == 1) {
return -0.0;
}
return 0.0;
}
if (y > 0) {
long yi = (long) y;
if (y == yi && (yi & 1) == 1) {
return Double.NEGATIVE_INFINITY;
}
return Double.POSITIVE_INFINITY;
}
}
if (y == Double.NEGATIVE_INFINITY) {
if (x * x == 1.0) {
return Double.NaN;
}
if (x * x < 1.0) {
return Double.POSITIVE_INFINITY;
} else {
return 0.0;
}
}
/* Handle special case x<0 */
if (x < 0) {
// y is an even integer in this case
if (y >= TWO_POWER_53 || y <= -TWO_POWER_53) {
return pow(-x, y);
}
if (y == (long) y) {
// If y is an integer
return ((long)y & 1) == 0 ? pow(-x, y) : -pow(-x, y);
} else {
return Double.NaN;
}
}
/* Split y into ya and yb such that y = ya+yb */
double ya;
double yb;
if (y < 8e298 && y > -8e298) {
double tmp1 = y * HEX_40000000;
ya = y + tmp1 - tmp1;
yb = y - ya;
} else {
double tmp1 = y * 9.31322574615478515625E-10;
double tmp2 = tmp1 * 9.31322574615478515625E-10;
ya = (tmp1 + tmp2 - tmp1) * HEX_40000000 * HEX_40000000;
yb = y - ya;
}
/* Compute ln(x) */
final double lores = log(x, lns);
if (Double.isInfinite(lores)){ // don't allow this to be converted to NaN
return lores;
}
double lna = lns[0];
double lnb = lns[1];
/* resplit lns */
double tmp1 = lna * HEX_40000000;
double tmp2 = lna + tmp1 - tmp1;
lnb += lna - tmp2;
lna = tmp2;
// y*ln(x) = (aa+ab)
final double aa = lna * ya;
final double ab = lna * yb + lnb * ya + lnb * yb;
lna = aa+ab;
lnb = -(lna - aa - ab);
double z = 1.0 / 120.0;
z = z * lnb + (1.0 / 24.0);
z = z * lnb + (1.0 / 6.0);
z = z * lnb + 0.5;
z = z * lnb + 1.0;
z = z * lnb;
final double result = exp(lna, z, null);
//result = result + result * z;
return result;
} | public static double pow ( double x , double y ) { final double lns [ ] = new double [ 2 ] ; if ( y == 0.0 ) { return 1.0 ; } if ( x != x ) { return x ; } if ( x == 0 ) { long bits = Double . doubleToLongBits ( x ) ; if ( ( bits & 0x8000000000000000L ) != 0 ) { long yi = ( long ) y ; if ( y < 0 && y == yi && ( yi & 1 ) == 1 ) { return Double . NEGATIVE_INFINITY ; } if ( y > 0 && y == yi && ( yi & 1 ) == 1 ) { return - 0.0 ; } } if ( y < 0 ) { return Double . POSITIVE_INFINITY ; } if ( y > 0 ) { return 0.0 ; } return Double . NaN ; } if ( x == Double . POSITIVE_INFINITY ) { if ( y != y ) { return y ; } if ( y < 0.0 ) { return 0.0 ; } else { return Double . POSITIVE_INFINITY ; } } if ( y == Double . POSITIVE_INFINITY ) { if ( x * x == 1.0 ) { return Double . NaN ; } if ( x * x > 1.0 ) { return Double . POSITIVE_INFINITY ; } else { return 0.0 ; } } if ( x == Double . NEGATIVE_INFINITY ) { if ( y != y ) { return y ; } if ( y < 0 ) { long yi = ( long ) y ; if ( y == yi && ( yi & 1 ) == 1 ) { return - 0.0 ; } return 0.0 ; } if ( y > 0 ) { long yi = ( long ) y ; if ( y == yi && ( yi & 1 ) == 1 ) { return Double . NEGATIVE_INFINITY ; } return Double . POSITIVE_INFINITY ; } } if ( y == Double . NEGATIVE_INFINITY ) { if ( x * x == 1.0 ) { return Double . NaN ; } if ( x * x < 1.0 ) { return Double . POSITIVE_INFINITY ; } else { return 0.0 ; } } if ( x < 0 ) { if ( y >= TWO_POWER_53 || y <= - TWO_POWER_53 ) { return pow ( - x , y ) ; } if ( y == ( long ) y ) { return ( ( long ) y & 1 ) == 0 ? pow ( - x , y ) : - pow ( - x , y ) ; } else { return Double . NaN ; } } double ya ; double yb ; if ( y < 8e298 && y > - 8e298 ) { double tmp1 = y * HEX_40000000 ; ya = y + tmp1 - tmp1 ; yb = y - ya ; } else { double tmp1 = y * 9.31322574615478515625E-10 ; double tmp2 = tmp1 * 9.31322574615478515625E-10 ; ya = ( tmp1 + tmp2 - tmp1 ) * HEX_40000000 * HEX_40000000 ; yb = y - ya ; } final double lores = log ( x , lns ) ; if ( Double . isInfinite ( lores ) ) { return lores ; } double lna = lns [ 0 ] ; double lnb = lns [ 1 ] ; double tmp1 = lna * HEX_40000000 ; double tmp2 = lna + tmp1 - tmp1 ; lnb += lna - tmp2 ; lna = tmp2 ; final double aa = lna * ya ; final double ab = lna * yb + lnb * ya + lnb * yb ; lna = aa + ab ; lnb = - ( lna - aa - ab ) ; double z = 1.0 / 120.0 ; z = z * lnb + ( 1.0 / 24.0 ) ; z = z * lnb + ( 1.0 / 6.0 ) ; z = z * lnb + 0.5 ; z = z * lnb + 1.0 ; z = z * lnb ; final double result = exp ( lna , z , null ) ; return result ; } |
JacksonDatabind | 70 | src/main/java/com/fasterxml/jackson/databind/deser/impl/BeanPropertyMap.java | 426 | 453 | `ACCEPT_CASE_INSENSITIVE_PROPERTIES` fails with `@JsonUnwrapped` | (note: moved from https://github.com/FasterXML/jackson-dataformat-csv/issues/133)
When trying to deserialize type like:
```java
public class Person {
@JsonUnwrapped(prefix = "businessAddress.")
public Address businessAddress;
}
public class Address {
public String street;
public String addon;
public String zip = "";
public String town;
public String country;
}
```
with case-insensitive mapper (`mapper.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES);`) I get exception:
```
java.util.NoSuchElementException: No entry 'businessAddress' found, can't remove
at com.fasterxml.jackson.databind.deser.impl.BeanPropertyMap.remove(BeanPropertyMap.java:447)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:534)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
...
```
| public void remove(SettableBeanProperty propToRm)
{
ArrayList<SettableBeanProperty> props = new ArrayList<SettableBeanProperty>(_size);
String key = getPropertyName(propToRm);
boolean found = false;
for (int i = 1, end = _hashArea.length; i < end; i += 2) {
SettableBeanProperty prop = (SettableBeanProperty) _hashArea[i];
if (prop == null) {
continue;
}
if (!found) {
// 09-Jan-2017, tatu: Important: must check name slot and NOT property name,
// as only former is lower-case in case-insensitive case
found = key.equals(prop.getName());
if (found) {
// need to leave a hole here
_propsInOrder[_findFromOrdered(prop)] = null;
continue;
}
}
props.add(prop);
}
if (!found) {
throw new NoSuchElementException("No entry '"+propToRm.getName()+"' found, can't remove");
}
init(props);
} | public void remove ( SettableBeanProperty propToRm ) { ArrayList < SettableBeanProperty > props = new ArrayList < SettableBeanProperty > ( _size ) ; String key = getPropertyName ( propToRm ) ; boolean found = false ; for ( int i = 1 , end = _hashArea . length ; i < end ; i += 2 ) { SettableBeanProperty prop = ( SettableBeanProperty ) _hashArea [ i ] ; if ( prop == null ) { continue ; } if ( ! found ) { found = key . equals ( prop . getName ( ) ) ; if ( found ) { _propsInOrder [ _findFromOrdered ( prop ) ] = null ; continue ; } } props . add ( prop ) ; } if ( ! found ) { throw new NoSuchElementException ( "No entry '" + propToRm . getName ( ) + "' found, can't remove" ) ; } init ( props ) ; } | public void remove(SettableBeanProperty propToRm)
{
ArrayList<SettableBeanProperty> props = new ArrayList<SettableBeanProperty>(_size);
String key = getPropertyName(propToRm);
boolean found = false;
for (int i = 1, end = _hashArea.length; i < end; i += 2) {
SettableBeanProperty prop = (SettableBeanProperty) _hashArea[i];
if (prop == null) {
continue;
}
if (!found) {
// 09-Jan-2017, tatu: Important: must check name slot and NOT property name,
// as only former is lower-case in case-insensitive case
found = key.equals(_hashArea[i-1]);
if (found) {
// need to leave a hole here
_propsInOrder[_findFromOrdered(prop)] = null;
continue;
}
}
props.add(prop);
}
if (!found) {
throw new NoSuchElementException("No entry '"+propToRm.getName()+"' found, can't remove");
}
init(props);
} | public void remove ( SettableBeanProperty propToRm ) { ArrayList < SettableBeanProperty > props = new ArrayList < SettableBeanProperty > ( _size ) ; String key = getPropertyName ( propToRm ) ; boolean found = false ; for ( int i = 1 , end = _hashArea . length ; i < end ; i += 2 ) { SettableBeanProperty prop = ( SettableBeanProperty ) _hashArea [ i ] ; if ( prop == null ) { continue ; } if ( ! found ) { found = key . equals ( _hashArea [ i - 1 ] ) ; if ( found ) { _propsInOrder [ _findFromOrdered ( prop ) ] = null ; continue ; } } props . add ( prop ) ; } if ( ! found ) { throw new NoSuchElementException ( "No entry '" + propToRm . getName ( ) + "' found, can't remove" ) ; } init ( props ) ; } |
Math | 42 | src/main/java/org/apache/commons/math/optimization/linear/SimplexTableau.java | 396 | 425 | Negative value with restrictNonNegative | Problem: commons-math-2.2 SimplexSolver.
A variable with 0 coefficient may be assigned a negative value nevertheless restrictToNonnegative flag in call:
SimplexSolver.optimize(function, constraints, GoalType.MINIMIZE, true);
Function
1 * x + 1 * y + 0
Constraints:
1 * x + 0 * y = 1
Result:
x = 1; y = -1;
Probably variables with 0 coefficients are omitted at some point of computation and because of that the restrictions do not affect their values. | protected RealPointValuePair getSolution() {
int negativeVarColumn = columnLabels.indexOf(NEGATIVE_VAR_COLUMN_LABEL);
Integer negativeVarBasicRow = negativeVarColumn > 0 ? getBasicRow(negativeVarColumn) : null;
double mostNegative = negativeVarBasicRow == null ? 0 : getEntry(negativeVarBasicRow, getRhsOffset());
Set<Integer> basicRows = new HashSet<Integer>();
double[] coefficients = new double[getOriginalNumDecisionVariables()];
for (int i = 0; i < coefficients.length; i++) {
int colIndex = columnLabels.indexOf("x" + i);
if (colIndex < 0) {
coefficients[i] = 0;
continue;
}
Integer basicRow = getBasicRow(colIndex);
// if the basic row is found to be the objective function row
// set the coefficient to 0 -> this case handles unconstrained
// variables that are still part of the objective function
if (basicRows.contains(basicRow)) {
// if multiple variables can take a given value
// then we choose the first and set the rest equal to 0
coefficients[i] = 0 - (restrictToNonNegative ? 0 : mostNegative);
} else {
basicRows.add(basicRow);
coefficients[i] =
(basicRow == null ? 0 : getEntry(basicRow, getRhsOffset())) -
(restrictToNonNegative ? 0 : mostNegative);
}
}
return new RealPointValuePair(coefficients, f.getValue(coefficients));
} | protected RealPointValuePair getSolution ( ) { int negativeVarColumn = columnLabels . indexOf ( NEGATIVE_VAR_COLUMN_LABEL ) ; Integer negativeVarBasicRow = negativeVarColumn > 0 ? getBasicRow ( negativeVarColumn ) : null ; double mostNegative = negativeVarBasicRow == null ? 0 : getEntry ( negativeVarBasicRow , getRhsOffset ( ) ) ; Set < Integer > basicRows = new HashSet < Integer > ( ) ; double [ ] coefficients = new double [ getOriginalNumDecisionVariables ( ) ] ; for ( int i = 0 ; i < coefficients . length ; i ++ ) { int colIndex = columnLabels . indexOf ( "x" + i ) ; if ( colIndex < 0 ) { coefficients [ i ] = 0 ; continue ; } Integer basicRow = getBasicRow ( colIndex ) ; if ( basicRows . contains ( basicRow ) ) { coefficients [ i ] = 0 - ( restrictToNonNegative ? 0 : mostNegative ) ; } else { basicRows . add ( basicRow ) ; coefficients [ i ] = ( basicRow == null ? 0 : getEntry ( basicRow , getRhsOffset ( ) ) ) - ( restrictToNonNegative ? 0 : mostNegative ) ; } } return new RealPointValuePair ( coefficients , f . getValue ( coefficients ) ) ; } | protected RealPointValuePair getSolution() {
int negativeVarColumn = columnLabels.indexOf(NEGATIVE_VAR_COLUMN_LABEL);
Integer negativeVarBasicRow = negativeVarColumn > 0 ? getBasicRow(negativeVarColumn) : null;
double mostNegative = negativeVarBasicRow == null ? 0 : getEntry(negativeVarBasicRow, getRhsOffset());
Set<Integer> basicRows = new HashSet<Integer>();
double[] coefficients = new double[getOriginalNumDecisionVariables()];
for (int i = 0; i < coefficients.length; i++) {
int colIndex = columnLabels.indexOf("x" + i);
if (colIndex < 0) {
coefficients[i] = 0;
continue;
}
Integer basicRow = getBasicRow(colIndex);
if (basicRow != null && basicRow == 0) {
// if the basic row is found to be the objective function row
// set the coefficient to 0 -> this case handles unconstrained
// variables that are still part of the objective function
coefficients[i] = 0;
} else if (basicRows.contains(basicRow)) {
// if multiple variables can take a given value
// then we choose the first and set the rest equal to 0
coefficients[i] = 0 - (restrictToNonNegative ? 0 : mostNegative);
} else {
basicRows.add(basicRow);
coefficients[i] =
(basicRow == null ? 0 : getEntry(basicRow, getRhsOffset())) -
(restrictToNonNegative ? 0 : mostNegative);
}
}
return new RealPointValuePair(coefficients, f.getValue(coefficients));
} | protected RealPointValuePair getSolution ( ) { int negativeVarColumn = columnLabels . indexOf ( NEGATIVE_VAR_COLUMN_LABEL ) ; Integer negativeVarBasicRow = negativeVarColumn > 0 ? getBasicRow ( negativeVarColumn ) : null ; double mostNegative = negativeVarBasicRow == null ? 0 : getEntry ( negativeVarBasicRow , getRhsOffset ( ) ) ; Set < Integer > basicRows = new HashSet < Integer > ( ) ; double [ ] coefficients = new double [ getOriginalNumDecisionVariables ( ) ] ; for ( int i = 0 ; i < coefficients . length ; i ++ ) { int colIndex = columnLabels . indexOf ( "x" + i ) ; if ( colIndex < 0 ) { coefficients [ i ] = 0 ; continue ; } Integer basicRow = getBasicRow ( colIndex ) ; if ( basicRow != null && basicRow == 0 ) { coefficients [ i ] = 0 ; } else if ( basicRows . contains ( basicRow ) ) { coefficients [ i ] = 0 - ( restrictToNonNegative ? 0 : mostNegative ) ; } else { basicRows . add ( basicRow ) ; coefficients [ i ] = ( basicRow == null ? 0 : getEntry ( basicRow , getRhsOffset ( ) ) ) - ( restrictToNonNegative ? 0 : mostNegative ) ; } } return new RealPointValuePair ( coefficients , f . getValue ( coefficients ) ) ; } |
JacksonDatabind | 66 | src/main/java/com/fasterxml/jackson/databind/deser/std/StdKeyDeserializer.java | 306 | 324 | Failure with custom Enum key deserializer, polymorphic types | Normally the `JsonParser` and the `DeserializationContext` is passed to a `Module`'s `JsonDeserializer`.
However, in the `MapDeserializer`, when deserializing a `Map` with an `Enum` key, the `KeyDeserializer` doesn't accept the `JsonParser` as an argument:
https://github.com/FasterXML/jackson-databind/blob/master/src/main/java/com/fasterxml/jackson/databind/deser/std/MapDeserializer.java#L453
Object key = keyDes.deserializeKey(keyStr, ctxt);
and the `StdKeyDeserializer.DelegatingKD` uses the context's parser
https://github.com/FasterXML/jackson-databind/blob/master/src/main/java/com/fasterxml/jackson/databind/deser/std/StdKeyDeserializer.java#L315
Object result = _delegate.deserialize(ctxt.getParser(), ctxt);
When the type info field is missing from the json, the `DeserializationContext`'s `JsonParser`'s token is `END_OBJECT` (presumably because it `nextToken`'d through the object to find type and whiffed).
This makes the module fail since the `JsonParser` in the `Module` is wrong, i.e. not the same as the `JsonParser` in the `MapDeserializer`.
Class:
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import java.util.Map;
import static com.fasterxml.jackson.annotation.JsonTypeInfo.Id.NAME;
@JsonTypeInfo(use = NAME, property = "@type", defaultImpl = SuperType.class)
public class SuperType {
private Map<SuperTypeEnum, String> someMap;
public Map<SuperTypeEnum, String> getSomeMap() {
return someMap;
}
public void setSomeMap(Map<SuperTypeEnum, String> someMap) {
this.someMap = someMap;
}
}
Enum:
public enum SuperTypeEnum {
FOO
}
Test:
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import org.junit.*;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
public class TestDeserializeType {
@Test
public void testNoTypeShouldDeserialize() throws IOException {
String json = "{\"someMap\": {\"FOO\": \"bar\"}}";
ObjectMapper mapper = new ObjectMapper();
SuperType superType = mapper.readValue(json, SuperType.class);
assertEquals("Deserialized someMap.FOO should equal bar", "bar", superType.getSomeMap().get(SuperTypeEnum.FOO));
}
@Test
public void testNoTypeWithModuleShouldDeserialize() throws IOException {
String json = "{\"someMap\": {\"FOO\": \"bar\"}}";
ObjectMapper mapper = new ObjectMapper();
SimpleModule simpleModule = new SimpleModule();
simpleModule.addDeserializer(SuperTypeEnum.class, new JsonDeserializer<SuperTypeEnum>() {
@Override
public SuperTypeEnum deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
throws IOException {
return SuperTypeEnum.valueOf(jsonParser.getText());
}
});
mapper.registerModule(simpleModule);
SuperType superType = mapper.readValue(json, SuperType.class);
assertEquals("Deserialized someMap.FOO should equal bar", "bar", superType.getSomeMap().get(SuperTypeEnum.FOO));
}
}
| @SuppressWarnings("resource")
@Override
public final Object deserializeKey(String key, DeserializationContext ctxt)
throws IOException
{
if (key == null) { // is this even legal call?
return null;
}
try {
// Ugh... should not have to give parser which may or may not be correct one...
Object result = _delegate.deserialize(ctxt.getParser(), ctxt);
if (result != null) {
return result;
}
return ctxt.handleWeirdKey(_keyClass, key, "not a valid representation");
} catch (Exception re) {
return ctxt.handleWeirdKey(_keyClass, key, "not a valid representation: %s", re.getMessage());
}
} | @ SuppressWarnings ( "resource" ) @ Override public final Object deserializeKey ( String key , DeserializationContext ctxt ) throws IOException { if ( key == null ) { return null ; } try { Object result = _delegate . deserialize ( ctxt . getParser ( ) , ctxt ) ; if ( result != null ) { return result ; } return ctxt . handleWeirdKey ( _keyClass , key , "not a valid representation" ) ; } catch ( Exception re ) { return ctxt . handleWeirdKey ( _keyClass , key , "not a valid representation: %s" , re . getMessage ( ) ) ; } } | @SuppressWarnings("resource")
@Override
public final Object deserializeKey(String key, DeserializationContext ctxt)
throws IOException
{
if (key == null) { // is this even legal call?
return null;
}
TokenBuffer tb = new TokenBuffer(ctxt.getParser(), ctxt);
tb.writeString(key);
try {
// Ugh... should not have to give parser which may or may not be correct one...
JsonParser p = tb.asParser();
p.nextToken();
Object result = _delegate.deserialize(p, ctxt);
if (result != null) {
return result;
}
return ctxt.handleWeirdKey(_keyClass, key, "not a valid representation");
} catch (Exception re) {
return ctxt.handleWeirdKey(_keyClass, key, "not a valid representation: %s", re.getMessage());
}
} | @ SuppressWarnings ( "resource" ) @ Override public final Object deserializeKey ( String key , DeserializationContext ctxt ) throws IOException { if ( key == null ) { return null ; } TokenBuffer tb = new TokenBuffer ( ctxt . getParser ( ) , ctxt ) ; tb . writeString ( key ) ; try { JsonParser p = tb . asParser ( ) ; p . nextToken ( ) ; Object result = _delegate . deserialize ( p , ctxt ) ; if ( result != null ) { return result ; } return ctxt . handleWeirdKey ( _keyClass , key , "not a valid representation" ) ; } catch ( Exception re ) { return ctxt . handleWeirdKey ( _keyClass , key , "not a valid representation: %s" , re . getMessage ( ) ) ; } } |
Codec | 10 | src/java/org/apache/commons/codec/language/Caverphone.java | 50 | 142 | Caverphone encodes names starting and ending with "mb" incorrectly. | Caverphone encode names starting and ending with "mb" incorrectly.
According to the spec:
"If the name ends with mb make it m2".
This has been coded as:
"If the name _starts_ with mb make it m2". | public String caverphone(String txt) {
// NOTE: Version 1.0 of Caverphone is easily derivable from this code
// by commenting out the 2.0 lines and adding in the 1.0 lines
if( txt == null || txt.length() == 0 ) {
return "1111111111";
}
// 1. Convert to lowercase
txt = txt.toLowerCase(java.util.Locale.ENGLISH);
// 2. Remove anything not A-Z
txt = txt.replaceAll("[^a-z]", "");
// 2.5. Remove final e
txt = txt.replaceAll("e$", ""); // 2.0 only
// 3. Handle various start options
txt = txt.replaceAll("^cough", "cou2f");
txt = txt.replaceAll("^rough", "rou2f");
txt = txt.replaceAll("^tough", "tou2f");
txt = txt.replaceAll("^enough", "enou2f"); // 2.0 only
txt = txt.replaceAll("^trough", "trou2f"); // 2.0 only - note the spec says ^enough here again, c+p error I assume
txt = txt.replaceAll("^gn", "2n");
// End
txt = txt.replaceAll("^mb", "m2");
// 4. Handle replacements
txt = txt.replaceAll("cq", "2q");
txt = txt.replaceAll("ci", "si");
txt = txt.replaceAll("ce", "se");
txt = txt.replaceAll("cy", "sy");
txt = txt.replaceAll("tch", "2ch");
txt = txt.replaceAll("c", "k");
txt = txt.replaceAll("q", "k");
txt = txt.replaceAll("x", "k");
txt = txt.replaceAll("v", "f");
txt = txt.replaceAll("dg", "2g");
txt = txt.replaceAll("tio", "sio");
txt = txt.replaceAll("tia", "sia");
txt = txt.replaceAll("d", "t");
txt = txt.replaceAll("ph", "fh");
txt = txt.replaceAll("b", "p");
txt = txt.replaceAll("sh", "s2");
txt = txt.replaceAll("z", "s");
txt = txt.replaceAll("^[aeiou]", "A");
txt = txt.replaceAll("[aeiou]", "3");
txt = txt.replaceAll("j", "y"); // 2.0 only
txt = txt.replaceAll("^y3", "Y3"); // 2.0 only
txt = txt.replaceAll("^y", "A"); // 2.0 only
txt = txt.replaceAll("y", "3"); // 2.0 only
txt = txt.replaceAll("3gh3", "3kh3");
txt = txt.replaceAll("gh", "22");
txt = txt.replaceAll("g", "k");
txt = txt.replaceAll("s+", "S");
txt = txt.replaceAll("t+", "T");
txt = txt.replaceAll("p+", "P");
txt = txt.replaceAll("k+", "K");
txt = txt.replaceAll("f+", "F");
txt = txt.replaceAll("m+", "M");
txt = txt.replaceAll("n+", "N");
txt = txt.replaceAll("w3", "W3");
//txt = txt.replaceAll("wy", "Wy"); // 1.0 only
txt = txt.replaceAll("wh3", "Wh3");
txt = txt.replaceAll("w$", "3"); // 2.0 only
//txt = txt.replaceAll("why", "Why"); // 1.0 only
txt = txt.replaceAll("w", "2");
txt = txt.replaceAll("^h", "A");
txt = txt.replaceAll("h", "2");
txt = txt.replaceAll("r3", "R3");
txt = txt.replaceAll("r$", "3"); // 2.0 only
//txt = txt.replaceAll("ry", "Ry"); // 1.0 only
txt = txt.replaceAll("r", "2");
txt = txt.replaceAll("l3", "L3");
txt = txt.replaceAll("l$", "3"); // 2.0 only
//txt = txt.replaceAll("ly", "Ly"); // 1.0 only
txt = txt.replaceAll("l", "2");
//txt = txt.replaceAll("j", "y"); // 1.0 only
//txt = txt.replaceAll("y3", "Y3"); // 1.0 only
//txt = txt.replaceAll("y", "2"); // 1.0 only
// 5. Handle removals
txt = txt.replaceAll("2", "");
txt = txt.replaceAll("3$", "A"); // 2.0 only
txt = txt.replaceAll("3", "");
// 6. put ten 1s on the end
txt = txt + "111111" + "1111"; // 1.0 only has 6 1s
// 7. take the first six characters as the code
return txt.substring(0, 10); // 1.0 truncates to 6
} | public String caverphone ( String txt ) { if ( txt == null || txt . length ( ) == 0 ) { return "1111111111" ; } txt = txt . toLowerCase ( java . util . Locale . ENGLISH ) ; txt = txt . replaceAll ( "[^a-z]" , "" ) ; txt = txt . replaceAll ( "e$" , "" ) ; txt = txt . replaceAll ( "^cough" , "cou2f" ) ; txt = txt . replaceAll ( "^rough" , "rou2f" ) ; txt = txt . replaceAll ( "^tough" , "tou2f" ) ; txt = txt . replaceAll ( "^enough" , "enou2f" ) ; txt = txt . replaceAll ( "^trough" , "trou2f" ) ; txt = txt . replaceAll ( "^gn" , "2n" ) ; txt = txt . replaceAll ( "^mb" , "m2" ) ; txt = txt . replaceAll ( "cq" , "2q" ) ; txt = txt . replaceAll ( "ci" , "si" ) ; txt = txt . replaceAll ( "ce" , "se" ) ; txt = txt . replaceAll ( "cy" , "sy" ) ; txt = txt . replaceAll ( "tch" , "2ch" ) ; txt = txt . replaceAll ( "c" , "k" ) ; txt = txt . replaceAll ( "q" , "k" ) ; txt = txt . replaceAll ( "x" , "k" ) ; txt = txt . replaceAll ( "v" , "f" ) ; txt = txt . replaceAll ( "dg" , "2g" ) ; txt = txt . replaceAll ( "tio" , "sio" ) ; txt = txt . replaceAll ( "tia" , "sia" ) ; txt = txt . replaceAll ( "d" , "t" ) ; txt = txt . replaceAll ( "ph" , "fh" ) ; txt = txt . replaceAll ( "b" , "p" ) ; txt = txt . replaceAll ( "sh" , "s2" ) ; txt = txt . replaceAll ( "z" , "s" ) ; txt = txt . replaceAll ( "^[aeiou]" , "A" ) ; txt = txt . replaceAll ( "[aeiou]" , "3" ) ; txt = txt . replaceAll ( "j" , "y" ) ; txt = txt . replaceAll ( "^y3" , "Y3" ) ; txt = txt . replaceAll ( "^y" , "A" ) ; txt = txt . replaceAll ( "y" , "3" ) ; txt = txt . replaceAll ( "3gh3" , "3kh3" ) ; txt = txt . replaceAll ( "gh" , "22" ) ; txt = txt . replaceAll ( "g" , "k" ) ; txt = txt . replaceAll ( "s+" , "S" ) ; txt = txt . replaceAll ( "t+" , "T" ) ; txt = txt . replaceAll ( "p+" , "P" ) ; txt = txt . replaceAll ( "k+" , "K" ) ; txt = txt . replaceAll ( "f+" , "F" ) ; txt = txt . replaceAll ( "m+" , "M" ) ; txt = txt . replaceAll ( "n+" , "N" ) ; txt = txt . replaceAll ( "w3" , "W3" ) ; txt = txt . replaceAll ( "wh3" , "Wh3" ) ; txt = txt . replaceAll ( "w$" , "3" ) ; txt = txt . replaceAll ( "w" , "2" ) ; txt = txt . replaceAll ( "^h" , "A" ) ; txt = txt . replaceAll ( "h" , "2" ) ; txt = txt . replaceAll ( "r3" , "R3" ) ; txt = txt . replaceAll ( "r$" , "3" ) ; txt = txt . replaceAll ( "r" , "2" ) ; txt = txt . replaceAll ( "l3" , "L3" ) ; txt = txt . replaceAll ( "l$" , "3" ) ; txt = txt . replaceAll ( "l" , "2" ) ; txt = txt . replaceAll ( "2" , "" ) ; txt = txt . replaceAll ( "3$" , "A" ) ; txt = txt . replaceAll ( "3" , "" ) ; txt = txt + "111111" + "1111" ; return txt . substring ( 0 , 10 ) ; } | public String caverphone(String txt) {
// NOTE: Version 1.0 of Caverphone is easily derivable from this code
// by commenting out the 2.0 lines and adding in the 1.0 lines
if( txt == null || txt.length() == 0 ) {
return "1111111111";
}
// 1. Convert to lowercase
txt = txt.toLowerCase(java.util.Locale.ENGLISH);
// 2. Remove anything not A-Z
txt = txt.replaceAll("[^a-z]", "");
// 2.5. Remove final e
txt = txt.replaceAll("e$", ""); // 2.0 only
// 3. Handle various start options
txt = txt.replaceAll("^cough", "cou2f");
txt = txt.replaceAll("^rough", "rou2f");
txt = txt.replaceAll("^tough", "tou2f");
txt = txt.replaceAll("^enough", "enou2f"); // 2.0 only
txt = txt.replaceAll("^trough", "trou2f"); // 2.0 only - note the spec says ^enough here again, c+p error I assume
txt = txt.replaceAll("^gn", "2n");
// End
txt = txt.replaceAll("mb$", "m2");
// 4. Handle replacements
txt = txt.replaceAll("cq", "2q");
txt = txt.replaceAll("ci", "si");
txt = txt.replaceAll("ce", "se");
txt = txt.replaceAll("cy", "sy");
txt = txt.replaceAll("tch", "2ch");
txt = txt.replaceAll("c", "k");
txt = txt.replaceAll("q", "k");
txt = txt.replaceAll("x", "k");
txt = txt.replaceAll("v", "f");
txt = txt.replaceAll("dg", "2g");
txt = txt.replaceAll("tio", "sio");
txt = txt.replaceAll("tia", "sia");
txt = txt.replaceAll("d", "t");
txt = txt.replaceAll("ph", "fh");
txt = txt.replaceAll("b", "p");
txt = txt.replaceAll("sh", "s2");
txt = txt.replaceAll("z", "s");
txt = txt.replaceAll("^[aeiou]", "A");
txt = txt.replaceAll("[aeiou]", "3");
txt = txt.replaceAll("j", "y"); // 2.0 only
txt = txt.replaceAll("^y3", "Y3"); // 2.0 only
txt = txt.replaceAll("^y", "A"); // 2.0 only
txt = txt.replaceAll("y", "3"); // 2.0 only
txt = txt.replaceAll("3gh3", "3kh3");
txt = txt.replaceAll("gh", "22");
txt = txt.replaceAll("g", "k");
txt = txt.replaceAll("s+", "S");
txt = txt.replaceAll("t+", "T");
txt = txt.replaceAll("p+", "P");
txt = txt.replaceAll("k+", "K");
txt = txt.replaceAll("f+", "F");
txt = txt.replaceAll("m+", "M");
txt = txt.replaceAll("n+", "N");
txt = txt.replaceAll("w3", "W3");
//txt = txt.replaceAll("wy", "Wy"); // 1.0 only
txt = txt.replaceAll("wh3", "Wh3");
txt = txt.replaceAll("w$", "3"); // 2.0 only
//txt = txt.replaceAll("why", "Why"); // 1.0 only
txt = txt.replaceAll("w", "2");
txt = txt.replaceAll("^h", "A");
txt = txt.replaceAll("h", "2");
txt = txt.replaceAll("r3", "R3");
txt = txt.replaceAll("r$", "3"); // 2.0 only
//txt = txt.replaceAll("ry", "Ry"); // 1.0 only
txt = txt.replaceAll("r", "2");
txt = txt.replaceAll("l3", "L3");
txt = txt.replaceAll("l$", "3"); // 2.0 only
//txt = txt.replaceAll("ly", "Ly"); // 1.0 only
txt = txt.replaceAll("l", "2");
//txt = txt.replaceAll("j", "y"); // 1.0 only
//txt = txt.replaceAll("y3", "Y3"); // 1.0 only
//txt = txt.replaceAll("y", "2"); // 1.0 only
// 5. Handle removals
txt = txt.replaceAll("2", "");
txt = txt.replaceAll("3$", "A"); // 2.0 only
txt = txt.replaceAll("3", "");
// 6. put ten 1s on the end
txt = txt + "111111" + "1111"; // 1.0 only has 6 1s
// 7. take the first six characters as the code
return txt.substring(0, 10); // 1.0 truncates to 6
} | public String caverphone ( String txt ) { if ( txt == null || txt . length ( ) == 0 ) { return "1111111111" ; } txt = txt . toLowerCase ( java . util . Locale . ENGLISH ) ; txt = txt . replaceAll ( "[^a-z]" , "" ) ; txt = txt . replaceAll ( "e$" , "" ) ; txt = txt . replaceAll ( "^cough" , "cou2f" ) ; txt = txt . replaceAll ( "^rough" , "rou2f" ) ; txt = txt . replaceAll ( "^tough" , "tou2f" ) ; txt = txt . replaceAll ( "^enough" , "enou2f" ) ; txt = txt . replaceAll ( "^trough" , "trou2f" ) ; txt = txt . replaceAll ( "^gn" , "2n" ) ; txt = txt . replaceAll ( "mb$" , "m2" ) ; txt = txt . replaceAll ( "cq" , "2q" ) ; txt = txt . replaceAll ( "ci" , "si" ) ; txt = txt . replaceAll ( "ce" , "se" ) ; txt = txt . replaceAll ( "cy" , "sy" ) ; txt = txt . replaceAll ( "tch" , "2ch" ) ; txt = txt . replaceAll ( "c" , "k" ) ; txt = txt . replaceAll ( "q" , "k" ) ; txt = txt . replaceAll ( "x" , "k" ) ; txt = txt . replaceAll ( "v" , "f" ) ; txt = txt . replaceAll ( "dg" , "2g" ) ; txt = txt . replaceAll ( "tio" , "sio" ) ; txt = txt . replaceAll ( "tia" , "sia" ) ; txt = txt . replaceAll ( "d" , "t" ) ; txt = txt . replaceAll ( "ph" , "fh" ) ; txt = txt . replaceAll ( "b" , "p" ) ; txt = txt . replaceAll ( "sh" , "s2" ) ; txt = txt . replaceAll ( "z" , "s" ) ; txt = txt . replaceAll ( "^[aeiou]" , "A" ) ; txt = txt . replaceAll ( "[aeiou]" , "3" ) ; txt = txt . replaceAll ( "j" , "y" ) ; txt = txt . replaceAll ( "^y3" , "Y3" ) ; txt = txt . replaceAll ( "^y" , "A" ) ; txt = txt . replaceAll ( "y" , "3" ) ; txt = txt . replaceAll ( "3gh3" , "3kh3" ) ; txt = txt . replaceAll ( "gh" , "22" ) ; txt = txt . replaceAll ( "g" , "k" ) ; txt = txt . replaceAll ( "s+" , "S" ) ; txt = txt . replaceAll ( "t+" , "T" ) ; txt = txt . replaceAll ( "p+" , "P" ) ; txt = txt . replaceAll ( "k+" , "K" ) ; txt = txt . replaceAll ( "f+" , "F" ) ; txt = txt . replaceAll ( "m+" , "M" ) ; txt = txt . replaceAll ( "n+" , "N" ) ; txt = txt . replaceAll ( "w3" , "W3" ) ; txt = txt . replaceAll ( "wh3" , "Wh3" ) ; txt = txt . replaceAll ( "w$" , "3" ) ; txt = txt . replaceAll ( "w" , "2" ) ; txt = txt . replaceAll ( "^h" , "A" ) ; txt = txt . replaceAll ( "h" , "2" ) ; txt = txt . replaceAll ( "r3" , "R3" ) ; txt = txt . replaceAll ( "r$" , "3" ) ; txt = txt . replaceAll ( "r" , "2" ) ; txt = txt . replaceAll ( "l3" , "L3" ) ; txt = txt . replaceAll ( "l$" , "3" ) ; txt = txt . replaceAll ( "l" , "2" ) ; txt = txt . replaceAll ( "2" , "" ) ; txt = txt . replaceAll ( "3$" , "A" ) ; txt = txt . replaceAll ( "3" , "" ) ; txt = txt + "111111" + "1111" ; return txt . substring ( 0 , 10 ) ; } |
Math | 102 | src/java/org/apache/commons/math/stat/inference/ChiSquareTestImpl.java | 64 | 81 | chiSquare(double[] expected, long[] observed) is returning incorrect test statistic | ChiSquareTestImpl is returning incorrect chi-squared value. An implicit assumption of public double chiSquare(double[] expected, long[] observed) is that the sum of expected and observed are equal. That is, in the code:
for (int i = 0; i < observed.length; i++) {
dev = ((double) observed[i] - expected[i]);
sumSq += dev * dev / expected[i];
}
this calculation is only correct if sum(observed)==sum(expected). When they are not equal then one must rescale the expected value by sum(observed) / sum(expected) so that they are.
Ironically, it is an example in the unit test ChiSquareTestTest that highlights the error:
long[] observed1 = { 500, 623, 72, 70, 31 };
double[] expected1 = { 485, 541, 82, 61, 37 };
assertEquals( "chi-square test statistic", 16.4131070362, testStatistic.chiSquare(expected1, observed1), 1E-10);
assertEquals("chi-square p-value", 0.002512096, testStatistic.chiSquareTest(expected1, observed1), 1E-9);
16.413 is not correct because the expected values do not make sense, they should be: 521.19403 581.37313 88.11940 65.55224 39.76119 so that the sum of expected equals 1296 which is the sum of observed.
Here is some R code (r-project.org) which proves it:
> o1
[1] 500 623 72 70 31
> e1
[1] 485 541 82 61 37
> chisq.test(o1,p=e1,rescale.p=TRUE)
Chi-squared test for given probabilities
data: o1
X-squared = 9.0233, df = 4, p-value = 0.06052
> chisq.test(o1,p=e1,rescale.p=TRUE)$observed
[1] 500 623 72 70 31
> chisq.test(o1,p=e1,rescale.p=TRUE)$expected
[1] 521.19403 581.37313 88.11940 65.55224 39.76119
| public double chiSquare(double[] expected, long[] observed)
throws IllegalArgumentException {
if ((expected.length < 2) || (expected.length != observed.length)) {
throw new IllegalArgumentException(
"observed, expected array lengths incorrect");
}
if (!isPositive(expected) || !isNonNegative(observed)) {
throw new IllegalArgumentException(
"observed counts must be non-negative and expected counts must be postive");
}
double sumSq = 0.0d;
double dev = 0.0d;
for (int i = 0; i < observed.length; i++) {
dev = ((double) observed[i] - expected[i]);
sumSq += dev * dev / expected[i];
}
return sumSq;
} | public double chiSquare ( double [ ] expected , long [ ] observed ) throws IllegalArgumentException { if ( ( expected . length < 2 ) || ( expected . length != observed . length ) ) { throw new IllegalArgumentException ( "observed, expected array lengths incorrect" ) ; } if ( ! isPositive ( expected ) || ! isNonNegative ( observed ) ) { throw new IllegalArgumentException ( "observed counts must be non-negative and expected counts must be postive" ) ; } double sumSq = 0.0d ; double dev = 0.0d ; for ( int i = 0 ; i < observed . length ; i ++ ) { dev = ( ( double ) observed [ i ] - expected [ i ] ) ; sumSq += dev * dev / expected [ i ] ; } return sumSq ; } | public double chiSquare(double[] expected, long[] observed)
throws IllegalArgumentException {
if ((expected.length < 2) || (expected.length != observed.length)) {
throw new IllegalArgumentException(
"observed, expected array lengths incorrect");
}
if (!isPositive(expected) || !isNonNegative(observed)) {
throw new IllegalArgumentException(
"observed counts must be non-negative and expected counts must be postive");
}
double sumExpected = 0d;
double sumObserved = 0d;
for (int i = 0; i < observed.length; i++) {
sumExpected += expected[i];
sumObserved += observed[i];
}
double ratio = 1.0d;
boolean rescale = false;
if (Math.abs(sumExpected - sumObserved) > 10E-6) {
ratio = sumObserved / sumExpected;
rescale = true;
}
double sumSq = 0.0d;
double dev = 0.0d;
for (int i = 0; i < observed.length; i++) {
if (rescale) {
dev = ((double) observed[i] - ratio * expected[i]);
sumSq += dev * dev / (ratio * expected[i]);
} else {
dev = ((double) observed[i] - expected[i]);
sumSq += dev * dev / expected[i];
}
}
return sumSq;
} | public double chiSquare ( double [ ] expected , long [ ] observed ) throws IllegalArgumentException { if ( ( expected . length < 2 ) || ( expected . length != observed . length ) ) { throw new IllegalArgumentException ( "observed, expected array lengths incorrect" ) ; } if ( ! isPositive ( expected ) || ! isNonNegative ( observed ) ) { throw new IllegalArgumentException ( "observed counts must be non-negative and expected counts must be postive" ) ; } double sumExpected = 0d ; double sumObserved = 0d ; for ( int i = 0 ; i < observed . length ; i ++ ) { sumExpected += expected [ i ] ; sumObserved += observed [ i ] ; } double ratio = 1.0d ; boolean rescale = false ; if ( Math . abs ( sumExpected - sumObserved ) > 10E-6 ) { ratio = sumObserved / sumExpected ; rescale = true ; } double sumSq = 0.0d ; double dev = 0.0d ; for ( int i = 0 ; i < observed . length ; i ++ ) { if ( rescale ) { dev = ( ( double ) observed [ i ] - ratio * expected [ i ] ) ; sumSq += dev * dev / ( ratio * expected [ i ] ) ; } else { dev = ( ( double ) observed [ i ] - expected [ i ] ) ; sumSq += dev * dev / expected [ i ] ; } } return sumSq ; } |
Math | 78 | src/main/java/org/apache/commons/math/ode/events/EventState.java | 167 | 263 | during ODE integration, the last event in a pair of very close event may not be detected | When an events follows a previous one very closely, it may be ignored. The occurrence of the bug depends on the side of the bracketing interval that was selected. For example consider a switching function that is increasing around first event around t = 90, reaches its maximum and is decreasing around the second event around t = 135. If an integration step spans from 67.5 and 112.5, the switching function values at start and end of step will have opposite signs, so the first event will be detected. The solver will find the event really occurs at 90.0 and will therefore truncate the step at 90.0. The next step will start from where the first step ends, i.e. it will start at 90.0. Let's say this step spans from 90.0 to 153.0. The switching function switches once again in this step.
If the solver for the first event converged to a value slightly before 90.0 (say 89.9999999), then the switch will not be detected because g(89.9999999) and g(153.0) are both negative.
This bug was introduced as of r781157 (2009-06-02) when special handling of events very close to step start was added. | public boolean evaluateStep(final StepInterpolator interpolator)
throws DerivativeException, EventException, ConvergenceException {
try {
forward = interpolator.isForward();
final double t1 = interpolator.getCurrentTime();
final int n = Math.max(1, (int) Math.ceil(Math.abs(t1 - t0) / maxCheckInterval));
final double h = (t1 - t0) / n;
double ta = t0;
double ga = g0;
double tb = t0 + (interpolator.isForward() ? convergence : -convergence);
for (int i = 0; i < n; ++i) {
// evaluate handler value at the end of the substep
tb += h;
interpolator.setInterpolatedTime(tb);
final double gb = handler.g(tb, interpolator.getInterpolatedState());
// check events occurrence
if (g0Positive ^ (gb >= 0)) {
// there is a sign change: an event is expected during this step
// this is a corner case:
// - there was an event near ta,
// - there is another event between ta and tb
// - when ta was computed, convergence was reached on the "wrong side" of the interval
// this implies that the real sign of ga is the same as gb, so we need to slightly
// shift ta to make sure ga and gb get opposite signs and the solver won't complain
// about bracketing
// this should never happen
// variation direction, with respect to the integration direction
increasing = gb >= ga;
final UnivariateRealFunction f = new UnivariateRealFunction() {
public double value(final double t) throws FunctionEvaluationException {
try {
interpolator.setInterpolatedTime(t);
return handler.g(t, interpolator.getInterpolatedState());
} catch (DerivativeException e) {
throw new FunctionEvaluationException(e, t);
} catch (EventException e) {
throw new FunctionEvaluationException(e, t);
}
}
};
final BrentSolver solver = new BrentSolver();
solver.setAbsoluteAccuracy(convergence);
solver.setMaximalIterationCount(maxIterationCount);
final double root = (ta <= tb) ? solver.solve(f, ta, tb) : solver.solve(f, tb, ta);
if ((Math.abs(root - ta) <= convergence) &&
(Math.abs(root - previousEventTime) <= convergence)) {
// we have either found nothing or found (again ?) a past event, we simply ignore it
ta = tb;
ga = gb;
} else if (Double.isNaN(previousEventTime) ||
(Math.abs(previousEventTime - root) > convergence)) {
pendingEventTime = root;
if (pendingEvent && (Math.abs(t1 - pendingEventTime) <= convergence)) {
// we were already waiting for this event which was
// found during a previous call for a step that was
// rejected, this step must now be accepted since it
// properly ends exactly at the event occurrence
return false;
}
// either we were not waiting for the event or it has
// moved in such a way the step cannot be accepted
pendingEvent = true;
return true;
}
} else {
// no sign change: there is no event for now
ta = tb;
ga = gb;
}
}
// no event during the whole step
pendingEvent = false;
pendingEventTime = Double.NaN;
return false;
} catch (FunctionEvaluationException e) {
final Throwable cause = e.getCause();
if ((cause != null) && (cause instanceof DerivativeException)) {
throw (DerivativeException) cause;
} else if ((cause != null) && (cause instanceof EventException)) {
throw (EventException) cause;
}
throw new EventException(e);
}
} | public boolean evaluateStep ( final StepInterpolator interpolator ) throws DerivativeException , EventException , ConvergenceException { try { forward = interpolator . isForward ( ) ; final double t1 = interpolator . getCurrentTime ( ) ; final int n = Math . max ( 1 , ( int ) Math . ceil ( Math . abs ( t1 - t0 ) / maxCheckInterval ) ) ; final double h = ( t1 - t0 ) / n ; double ta = t0 ; double ga = g0 ; double tb = t0 + ( interpolator . isForward ( ) ? convergence : - convergence ) ; for ( int i = 0 ; i < n ; ++ i ) { tb += h ; interpolator . setInterpolatedTime ( tb ) ; final double gb = handler . g ( tb , interpolator . getInterpolatedState ( ) ) ; if ( g0Positive ^ ( gb >= 0 ) ) { increasing = gb >= ga ; final UnivariateRealFunction f = new UnivariateRealFunction ( ) { public double value ( final double t ) throws FunctionEvaluationException { try { interpolator . setInterpolatedTime ( t ) ; return handler . g ( t , interpolator . getInterpolatedState ( ) ) ; } catch ( DerivativeException e ) { throw new FunctionEvaluationException ( e , t ) ; } catch ( EventException e ) { throw new FunctionEvaluationException ( e , t ) ; } } } ; final BrentSolver solver = new BrentSolver ( ) ; solver . setAbsoluteAccuracy ( convergence ) ; solver . setMaximalIterationCount ( maxIterationCount ) ; final double root = ( ta <= tb ) ? solver . solve ( f , ta , tb ) : solver . solve ( f , tb , ta ) ; if ( ( Math . abs ( root - ta ) <= convergence ) && ( Math . abs ( root - previousEventTime ) <= convergence ) ) { ta = tb ; ga = gb ; } else if ( Double . isNaN ( previousEventTime ) || ( Math . abs ( previousEventTime - root ) > convergence ) ) { pendingEventTime = root ; if ( pendingEvent && ( Math . abs ( t1 - pendingEventTime ) <= convergence ) ) { return false ; } pendingEvent = true ; return true ; } } else { ta = tb ; ga = gb ; } } pendingEvent = false ; pendingEventTime = Double . NaN ; return false ; } catch ( FunctionEvaluationException e ) { final Throwable cause = e . getCause ( ) ; if ( ( cause != null ) && ( cause instanceof DerivativeException ) ) { throw ( DerivativeException ) cause ; } else if ( ( cause != null ) && ( cause instanceof EventException ) ) { throw ( EventException ) cause ; } throw new EventException ( e ) ; } } | public boolean evaluateStep(final StepInterpolator interpolator)
throws DerivativeException, EventException, ConvergenceException {
try {
forward = interpolator.isForward();
final double t1 = interpolator.getCurrentTime();
final int n = Math.max(1, (int) Math.ceil(Math.abs(t1 - t0) / maxCheckInterval));
final double h = (t1 - t0) / n;
double ta = t0;
double ga = g0;
double tb = t0 + (interpolator.isForward() ? convergence : -convergence);
for (int i = 0; i < n; ++i) {
// evaluate handler value at the end of the substep
tb += h;
interpolator.setInterpolatedTime(tb);
final double gb = handler.g(tb, interpolator.getInterpolatedState());
// check events occurrence
if (g0Positive ^ (gb >= 0)) {
// there is a sign change: an event is expected during this step
if (ga * gb > 0) {
// this is a corner case:
// - there was an event near ta,
// - there is another event between ta and tb
// - when ta was computed, convergence was reached on the "wrong side" of the interval
// this implies that the real sign of ga is the same as gb, so we need to slightly
// shift ta to make sure ga and gb get opposite signs and the solver won't complain
// about bracketing
final double epsilon = (forward ? 0.25 : -0.25) * convergence;
for (int k = 0; (k < 4) && (ga * gb > 0); ++k) {
ta += epsilon;
interpolator.setInterpolatedTime(ta);
ga = handler.g(ta, interpolator.getInterpolatedState());
}
if (ga * gb > 0) {
// this should never happen
throw MathRuntimeException.createInternalError(null);
}
}
// variation direction, with respect to the integration direction
increasing = gb >= ga;
final UnivariateRealFunction f = new UnivariateRealFunction() {
public double value(final double t) throws FunctionEvaluationException {
try {
interpolator.setInterpolatedTime(t);
return handler.g(t, interpolator.getInterpolatedState());
} catch (DerivativeException e) {
throw new FunctionEvaluationException(e, t);
} catch (EventException e) {
throw new FunctionEvaluationException(e, t);
}
}
};
final BrentSolver solver = new BrentSolver();
solver.setAbsoluteAccuracy(convergence);
solver.setMaximalIterationCount(maxIterationCount);
final double root = (ta <= tb) ? solver.solve(f, ta, tb) : solver.solve(f, tb, ta);
if ((Math.abs(root - ta) <= convergence) &&
(Math.abs(root - previousEventTime) <= convergence)) {
// we have either found nothing or found (again ?) a past event, we simply ignore it
ta = tb;
ga = gb;
} else if (Double.isNaN(previousEventTime) ||
(Math.abs(previousEventTime - root) > convergence)) {
pendingEventTime = root;
if (pendingEvent && (Math.abs(t1 - pendingEventTime) <= convergence)) {
// we were already waiting for this event which was
// found during a previous call for a step that was
// rejected, this step must now be accepted since it
// properly ends exactly at the event occurrence
return false;
}
// either we were not waiting for the event or it has
// moved in such a way the step cannot be accepted
pendingEvent = true;
return true;
}
} else {
// no sign change: there is no event for now
ta = tb;
ga = gb;
}
}
// no event during the whole step
pendingEvent = false;
pendingEventTime = Double.NaN;
return false;
} catch (FunctionEvaluationException e) {
final Throwable cause = e.getCause();
if ((cause != null) && (cause instanceof DerivativeException)) {
throw (DerivativeException) cause;
} else if ((cause != null) && (cause instanceof EventException)) {
throw (EventException) cause;
}
throw new EventException(e);
}
} | public boolean evaluateStep ( final StepInterpolator interpolator ) throws DerivativeException , EventException , ConvergenceException { try { forward = interpolator . isForward ( ) ; final double t1 = interpolator . getCurrentTime ( ) ; final int n = Math . max ( 1 , ( int ) Math . ceil ( Math . abs ( t1 - t0 ) / maxCheckInterval ) ) ; final double h = ( t1 - t0 ) / n ; double ta = t0 ; double ga = g0 ; double tb = t0 + ( interpolator . isForward ( ) ? convergence : - convergence ) ; for ( int i = 0 ; i < n ; ++ i ) { tb += h ; interpolator . setInterpolatedTime ( tb ) ; final double gb = handler . g ( tb , interpolator . getInterpolatedState ( ) ) ; if ( g0Positive ^ ( gb >= 0 ) ) { if ( ga * gb > 0 ) { final double epsilon = ( forward ? 0.25 : - 0.25 ) * convergence ; for ( int k = 0 ; ( k < 4 ) && ( ga * gb > 0 ) ; ++ k ) { ta += epsilon ; interpolator . setInterpolatedTime ( ta ) ; ga = handler . g ( ta , interpolator . getInterpolatedState ( ) ) ; } if ( ga * gb > 0 ) { throw MathRuntimeException . createInternalError ( null ) ; } } increasing = gb >= ga ; final UnivariateRealFunction f = new UnivariateRealFunction ( ) { public double value ( final double t ) throws FunctionEvaluationException { try { interpolator . setInterpolatedTime ( t ) ; return handler . g ( t , interpolator . getInterpolatedState ( ) ) ; } catch ( DerivativeException e ) { throw new FunctionEvaluationException ( e , t ) ; } catch ( EventException e ) { throw new FunctionEvaluationException ( e , t ) ; } } } ; final BrentSolver solver = new BrentSolver ( ) ; solver . setAbsoluteAccuracy ( convergence ) ; solver . setMaximalIterationCount ( maxIterationCount ) ; final double root = ( ta <= tb ) ? solver . solve ( f , ta , tb ) : solver . solve ( f , tb , ta ) ; if ( ( Math . abs ( root - ta ) <= convergence ) && ( Math . abs ( root - previousEventTime ) <= convergence ) ) { ta = tb ; ga = gb ; } else if ( Double . isNaN ( previousEventTime ) || ( Math . abs ( previousEventTime - root ) > convergence ) ) { pendingEventTime = root ; if ( pendingEvent && ( Math . abs ( t1 - pendingEventTime ) <= convergence ) ) { return false ; } pendingEvent = true ; return true ; } } else { ta = tb ; ga = gb ; } } pendingEvent = false ; pendingEventTime = Double . NaN ; return false ; } catch ( FunctionEvaluationException e ) { final Throwable cause = e . getCause ( ) ; if ( ( cause != null ) && ( cause instanceof DerivativeException ) ) { throw ( DerivativeException ) cause ; } else if ( ( cause != null ) && ( cause instanceof EventException ) ) { throw ( EventException ) cause ; } throw new EventException ( e ) ; } } |
Math | 97 | src/java/org/apache/commons/math/analysis/BrentSolver.java | 125 | 152 | BrentSolver throws IllegalArgumentException | I am getting this exception:
java.lang.IllegalArgumentException: Function values at endpoints do not have different signs. Endpoints: [-100000.0,1.7976931348623157E308] Values: [0.0,-101945.04630982173]
at org.apache.commons.math.analysis.BrentSolver.solve(BrentSolver.java:99)
at org.apache.commons.math.analysis.BrentSolver.solve(BrentSolver.java:62)
The exception should not be thrown with values [0.0,-101945.04630982173] because 0.0 is positive.
According to Brent Worden, the algorithm should stop and return 0 as the root instead of throwing an exception.
The problem comes from this method:
public double solve(double min, double max) throws MaxIterationsExceededException,
FunctionEvaluationException {
clearResult();
verifyInterval(min, max);
double yMin = f.value(min);
double yMax = f.value(max);
// Verify bracketing
if (yMin * yMax >= 0) {
throw new IllegalArgumentException
("Function values at endpoints do not have different signs." +
" Endpoints: [" + min + "," + max + "]" +
" Values: [" + yMin + "," + yMax + "]");
}
// solve using only the first endpoint as initial guess
return solve(min, yMin, max, yMax, min, yMin);
}
One way to fix it would be to add this code after the assignment of yMin and yMax:
if (yMin ==0 || yMax == 0) {
return 0;
}
| public double solve(double min, double max) throws MaxIterationsExceededException,
FunctionEvaluationException {
clearResult();
verifyInterval(min, max);
double ret = Double.NaN;
double yMin = f.value(min);
double yMax = f.value(max);
// Verify bracketing
double sign = yMin * yMax;
if (sign >= 0) {
// check if either value is close to a zero
// neither value is close to zero and min and max do not bracket root.
throw new IllegalArgumentException
("Function values at endpoints do not have different signs." +
" Endpoints: [" + min + "," + max + "]" +
" Values: [" + yMin + "," + yMax + "]");
} else {
// solve using only the first endpoint as initial guess
ret = solve(min, yMin, max, yMax, min, yMin);
// either min or max is a root
}
return ret;
} | public double solve ( double min , double max ) throws MaxIterationsExceededException , FunctionEvaluationException { clearResult ( ) ; verifyInterval ( min , max ) ; double ret = Double . NaN ; double yMin = f . value ( min ) ; double yMax = f . value ( max ) ; double sign = yMin * yMax ; if ( sign >= 0 ) { throw new IllegalArgumentException ( "Function values at endpoints do not have different signs." + " Endpoints: [" + min + "," + max + "]" + " Values: [" + yMin + "," + yMax + "]" ) ; } else { ret = solve ( min , yMin , max , yMax , min , yMin ) ; } return ret ; } | public double solve(double min, double max) throws MaxIterationsExceededException,
FunctionEvaluationException {
clearResult();
verifyInterval(min, max);
double ret = Double.NaN;
double yMin = f.value(min);
double yMax = f.value(max);
// Verify bracketing
double sign = yMin * yMax;
if (sign > 0) {
// check if either value is close to a zero
if (Math.abs(yMin) <= functionValueAccuracy) {
setResult(min, 0);
ret = min;
} else if (Math.abs(yMax) <= functionValueAccuracy) {
setResult(max, 0);
ret = max;
} else {
// neither value is close to zero and min and max do not bracket root.
throw new IllegalArgumentException
("Function values at endpoints do not have different signs." +
" Endpoints: [" + min + "," + max + "]" +
" Values: [" + yMin + "," + yMax + "]");
}
} else if (sign < 0){
// solve using only the first endpoint as initial guess
ret = solve(min, yMin, max, yMax, min, yMin);
} else {
// either min or max is a root
if (yMin == 0.0) {
ret = min;
} else {
ret = max;
}
}
return ret;
} | public double solve ( double min , double max ) throws MaxIterationsExceededException , FunctionEvaluationException { clearResult ( ) ; verifyInterval ( min , max ) ; double ret = Double . NaN ; double yMin = f . value ( min ) ; double yMax = f . value ( max ) ; double sign = yMin * yMax ; if ( sign > 0 ) { if ( Math . abs ( yMin ) <= functionValueAccuracy ) { setResult ( min , 0 ) ; ret = min ; } else if ( Math . abs ( yMax ) <= functionValueAccuracy ) { setResult ( max , 0 ) ; ret = max ; } else { throw new IllegalArgumentException ( "Function values at endpoints do not have different signs." + " Endpoints: [" + min + "," + max + "]" + " Values: [" + yMin + "," + yMax + "]" ) ; } } else if ( sign < 0 ) { ret = solve ( min , yMin , max , yMax , min , yMin ) ; } else { if ( yMin == 0.0 ) { ret = min ; } else { ret = max ; } } return ret ; } |
Cli | 8 | src/java/org/apache/commons/cli/HelpFormatter.java | 792 | 823 | HelpFormatter wraps incorrectly on every line beyond the first | The method findWrapPos(...) in the HelpFormatter is a couple of bugs in the way that it deals with the "startPos" variable. This causes it to format every line beyond the first line by "startPos" to many characters, beyond the specified width.
To see this, create an option with a long description, and then use the help formatter to print it. The first line will be the correct length. The 2nd, 3rd, etc lines will all be too long.
I don't have a patch (sorry) - but here is a corrected version of the method.
I fixed it in two places - both were using "width + startPos" when they should have been using width.
{code}
protected int findWrapPos(String text, int width, int startPos)
{
int pos = -1;
// the line ends before the max wrap pos or a new line char found
if (((pos = text.indexOf('\n', startPos)) != -1 && pos <= width)
|| ((pos = text.indexOf('\t', startPos)) != -1 && pos <= width))
{
return pos+1;
}
else if ((width) >= text.length())
{
return -1;
}
// look for the last whitespace character before startPos+width
pos = width;
char c;
while ((pos >= startPos) && ((c = text.charAt(pos)) != ' ')
&& (c != '\n') && (c != '\r'))
{
--pos;
}
// if we found it - just return
if (pos > startPos)
{
return pos;
}
// must look for the first whitespace chearacter after startPos
// + width
pos = startPos + width;
while ((pos <= text.length()) && ((c = text.charAt(pos)) != ' ')
&& (c != '\n') && (c != '\r'))
{
++pos;
}
return (pos == text.length()) ? (-1) : pos;
}
{code} | protected StringBuffer renderWrappedText(StringBuffer sb, int width,
int nextLineTabStop, String text)
{
int pos = findWrapPos(text, width, 0);
if (pos == -1)
{
sb.append(rtrim(text));
return sb;
}
sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine);
// all following lines must be padded with nextLineTabStop space
// characters
final String padding = createPadding(nextLineTabStop);
while (true)
{
text = padding + text.substring(pos).trim();
pos = findWrapPos(text, width, nextLineTabStop);
if (pos == -1)
{
sb.append(text);
return sb;
}
sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine);
}
} | protected StringBuffer renderWrappedText ( StringBuffer sb , int width , int nextLineTabStop , String text ) { int pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( rtrim ( text ) ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; final String padding = createPadding ( nextLineTabStop ) ; while ( true ) { text = padding + text . substring ( pos ) . trim ( ) ; pos = findWrapPos ( text , width , nextLineTabStop ) ; if ( pos == - 1 ) { sb . append ( text ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; } } | protected StringBuffer renderWrappedText(StringBuffer sb, int width,
int nextLineTabStop, String text)
{
int pos = findWrapPos(text, width, 0);
if (pos == -1)
{
sb.append(rtrim(text));
return sb;
}
sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine);
// all following lines must be padded with nextLineTabStop space
// characters
final String padding = createPadding(nextLineTabStop);
while (true)
{
text = padding + text.substring(pos).trim();
pos = findWrapPos(text, width, 0);
if (pos == -1)
{
sb.append(text);
return sb;
}
sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine);
}
} | protected StringBuffer renderWrappedText ( StringBuffer sb , int width , int nextLineTabStop , String text ) { int pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( rtrim ( text ) ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; final String padding = createPadding ( nextLineTabStop ) ; while ( true ) { text = padding + text . substring ( pos ) . trim ( ) ; pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( text ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; } } |
Cli | 38 | src/main/java/org/apache/commons/cli/DefaultParser.java | 299 | 312 | Optional argument picking up next regular option as its argument | None | private boolean isShortOption(String token)
{
// short options (-S, -SV, -S=V, -SV1=V2, -S1S2)
if (!token.startsWith("-") || token.length() == 1)
{
return false;
}
// remove leading "-" and "=value"
int pos = token.indexOf("=");
String optName = pos == -1 ? token.substring(1) : token.substring(1, pos);
return options.hasShortOption(optName);
// check for several concatenated short options
} | private boolean isShortOption ( String token ) { if ( ! token . startsWith ( "-" ) || token . length ( ) == 1 ) { return false ; } int pos = token . indexOf ( "=" ) ; String optName = pos == - 1 ? token . substring ( 1 ) : token . substring ( 1 , pos ) ; return options . hasShortOption ( optName ) ; } | private boolean isShortOption(String token)
{
// short options (-S, -SV, -S=V, -SV1=V2, -S1S2)
if (!token.startsWith("-") || token.length() == 1)
{
return false;
}
// remove leading "-" and "=value"
int pos = token.indexOf("=");
String optName = pos == -1 ? token.substring(1) : token.substring(1, pos);
if (options.hasShortOption(optName))
{
return true;
}
// check for several concatenated short options
return optName.length() > 0 && options.hasShortOption(String.valueOf(optName.charAt(0)));
} | private boolean isShortOption ( String token ) { if ( ! token . startsWith ( "-" ) || token . length ( ) == 1 ) { return false ; } int pos = token . indexOf ( "=" ) ; String optName = pos == - 1 ? token . substring ( 1 ) : token . substring ( 1 , pos ) ; if ( options . hasShortOption ( optName ) ) { return true ; } return optName . length ( ) > 0 && options . hasShortOption ( String . valueOf ( optName . charAt ( 0 ) ) ) ; } |
Compress | 36 | src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java | 901 | 916 | Calling SevenZFile.read() on empty SevenZArchiveEntry throws IllegalStateException | I'm pretty sure COMPRESS-340 breaks reading empty archive entries. When calling getNextEntry() and that entry has no content, the code jumps into the first block at line 830 (SevenZFile.class), clearing the deferredBlockStreams. When calling entry.read(...) afterwards an IllegalStateException ("No current 7z entry (call getNextEntry() first).") is thrown. IMHO, there should be another check for entry.getSize() == 0.
This worked correctly up until 1.10.
| private InputStream getCurrentStream() throws IOException {
if (deferredBlockStreams.isEmpty()) {
throw new IllegalStateException("No current 7z entry (call getNextEntry() first).");
}
while (deferredBlockStreams.size() > 1) {
// In solid compression mode we need to decompress all leading folder'
// streams to get access to an entry. We defer this until really needed
// so that entire blocks can be skipped without wasting time for decompression.
final InputStream stream = deferredBlockStreams.remove(0);
IOUtils.skip(stream, Long.MAX_VALUE);
stream.close();
}
return deferredBlockStreams.get(0);
} | private InputStream getCurrentStream ( ) throws IOException { if ( deferredBlockStreams . isEmpty ( ) ) { throw new IllegalStateException ( "No current 7z entry (call getNextEntry() first)." ) ; } while ( deferredBlockStreams . size ( ) > 1 ) { final InputStream stream = deferredBlockStreams . remove ( 0 ) ; IOUtils . skip ( stream , Long . MAX_VALUE ) ; stream . close ( ) ; } return deferredBlockStreams . get ( 0 ) ; } | private InputStream getCurrentStream() throws IOException {
if (archive.files[currentEntryIndex].getSize() == 0) {
return new ByteArrayInputStream(new byte[0]);
}
if (deferredBlockStreams.isEmpty()) {
throw new IllegalStateException("No current 7z entry (call getNextEntry() first).");
}
while (deferredBlockStreams.size() > 1) {
// In solid compression mode we need to decompress all leading folder'
// streams to get access to an entry. We defer this until really needed
// so that entire blocks can be skipped without wasting time for decompression.
final InputStream stream = deferredBlockStreams.remove(0);
IOUtils.skip(stream, Long.MAX_VALUE);
stream.close();
}
return deferredBlockStreams.get(0);
} | private InputStream getCurrentStream ( ) throws IOException { if ( archive . files [ currentEntryIndex ] . getSize ( ) == 0 ) { return new ByteArrayInputStream ( new byte [ 0 ] ) ; } if ( deferredBlockStreams . isEmpty ( ) ) { throw new IllegalStateException ( "No current 7z entry (call getNextEntry() first)." ) ; } while ( deferredBlockStreams . size ( ) > 1 ) { final InputStream stream = deferredBlockStreams . remove ( 0 ) ; IOUtils . skip ( stream , Long . MAX_VALUE ) ; stream . close ( ) ; } return deferredBlockStreams . get ( 0 ) ; } |
Math | 39 | src/main/java/org/apache/commons/math/ode/nonstiff/EmbeddedRungeKuttaIntegrator.java | 190 | 328 | too large first step with embedded Runge-Kutta integrators (Dormand-Prince 8(5,3) ...) | Adaptive step size integrators compute the first step size by themselves if it is not provided.
For embedded Runge-Kutta type, this step size is not checked against the integration range, so if the integration range is extremely short, this step size may evaluate the function out of the range (and in fact it tries afterward to go back, and fails to stop). Gragg-Bulirsch-Stoer integrators do not have this problem, the step size is checked and truncated if needed. | @Override
public void integrate(final ExpandableStatefulODE equations, final double t)
throws MathIllegalStateException, MathIllegalArgumentException {
sanityChecks(equations, t);
setEquations(equations);
final boolean forward = t > equations.getTime();
// create some internal working arrays
final double[] y0 = equations.getCompleteState();
final double[] y = y0.clone();
final int stages = c.length + 1;
final double[][] yDotK = new double[stages][y.length];
final double[] yTmp = y0.clone();
final double[] yDotTmp = new double[y.length];
// set up an interpolator sharing the integrator arrays
final RungeKuttaStepInterpolator interpolator = (RungeKuttaStepInterpolator) prototype.copy();
interpolator.reinitialize(this, yTmp, yDotK, forward,
equations.getPrimaryMapper(), equations.getSecondaryMappers());
interpolator.storeTime(equations.getTime());
// set up integration control objects
stepStart = equations.getTime();
double hNew = 0;
boolean firstTime = true;
initIntegration(equations.getTime(), y0, t);
// main integration loop
isLastStep = false;
do {
interpolator.shift();
// iterate over step size, ensuring local normalized error is smaller than 1
double error = 10;
while (error >= 1.0) {
if (firstTime || !fsal) {
// first stage
computeDerivatives(stepStart, y, yDotK[0]);
}
if (firstTime) {
final double[] scale = new double[mainSetDimension];
if (vecAbsoluteTolerance == null) {
for (int i = 0; i < scale.length; ++i) {
scale[i] = scalAbsoluteTolerance + scalRelativeTolerance * FastMath.abs(y[i]);
}
} else {
for (int i = 0; i < scale.length; ++i) {
scale[i] = vecAbsoluteTolerance[i] + vecRelativeTolerance[i] * FastMath.abs(y[i]);
}
}
hNew = initializeStep(forward, getOrder(), scale,
stepStart, y, yDotK[0], yTmp, yDotK[1]);
firstTime = false;
}
stepSize = hNew;
// next stages
for (int k = 1; k < stages; ++k) {
for (int j = 0; j < y0.length; ++j) {
double sum = a[k-1][0] * yDotK[0][j];
for (int l = 1; l < k; ++l) {
sum += a[k-1][l] * yDotK[l][j];
}
yTmp[j] = y[j] + stepSize * sum;
}
computeDerivatives(stepStart + c[k-1] * stepSize, yTmp, yDotK[k]);
}
// estimate the state at the end of the step
for (int j = 0; j < y0.length; ++j) {
double sum = b[0] * yDotK[0][j];
for (int l = 1; l < stages; ++l) {
sum += b[l] * yDotK[l][j];
}
yTmp[j] = y[j] + stepSize * sum;
}
// estimate the error at the end of the step
error = estimateError(yDotK, y, yTmp, stepSize);
if (error >= 1.0) {
// reject the step and attempt to reduce error by stepsize control
final double factor =
FastMath.min(maxGrowth,
FastMath.max(minReduction, safety * FastMath.pow(error, exp)));
hNew = filterStep(stepSize * factor, forward, false);
}
}
// local error is small enough: accept the step, trigger events and step handlers
interpolator.storeTime(stepStart + stepSize);
System.arraycopy(yTmp, 0, y, 0, y0.length);
System.arraycopy(yDotK[stages - 1], 0, yDotTmp, 0, y0.length);
stepStart = acceptStep(interpolator, y, yDotTmp, t);
System.arraycopy(y, 0, yTmp, 0, y.length);
if (!isLastStep) {
// prepare next step
interpolator.storeTime(stepStart);
if (fsal) {
// save the last evaluation for the next step
System.arraycopy(yDotTmp, 0, yDotK[0], 0, y0.length);
}
// stepsize control for next step
final double factor =
FastMath.min(maxGrowth, FastMath.max(minReduction, safety * FastMath.pow(error, exp)));
final double scaledH = stepSize * factor;
final double nextT = stepStart + scaledH;
final boolean nextIsLast = forward ? (nextT >= t) : (nextT <= t);
hNew = filterStep(scaledH, forward, nextIsLast);
final double filteredNextT = stepStart + hNew;
final boolean filteredNextIsLast = forward ? (filteredNextT >= t) : (filteredNextT <= t);
if (filteredNextIsLast) {
hNew = t - stepStart;
}
}
} while (!isLastStep);
// dispatch results
equations.setTime(stepStart);
equations.setCompleteState(y);
resetInternalState();
} | @ Override public void integrate ( final ExpandableStatefulODE equations , final double t ) throws MathIllegalStateException , MathIllegalArgumentException { sanityChecks ( equations , t ) ; setEquations ( equations ) ; final boolean forward = t > equations . getTime ( ) ; final double [ ] y0 = equations . getCompleteState ( ) ; final double [ ] y = y0 . clone ( ) ; final int stages = c . length + 1 ; final double [ ] [ ] yDotK = new double [ stages ] [ y . length ] ; final double [ ] yTmp = y0 . clone ( ) ; final double [ ] yDotTmp = new double [ y . length ] ; final RungeKuttaStepInterpolator interpolator = ( RungeKuttaStepInterpolator ) prototype . copy ( ) ; interpolator . reinitialize ( this , yTmp , yDotK , forward , equations . getPrimaryMapper ( ) , equations . getSecondaryMappers ( ) ) ; interpolator . storeTime ( equations . getTime ( ) ) ; stepStart = equations . getTime ( ) ; double hNew = 0 ; boolean firstTime = true ; initIntegration ( equations . getTime ( ) , y0 , t ) ; isLastStep = false ; do { interpolator . shift ( ) ; double error = 10 ; while ( error >= 1.0 ) { if ( firstTime || ! fsal ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( firstTime ) { final double [ ] scale = new double [ mainSetDimension ] ; if ( vecAbsoluteTolerance == null ) { for ( int i = 0 ; i < scale . length ; ++ i ) { scale [ i ] = scalAbsoluteTolerance + scalRelativeTolerance * FastMath . abs ( y [ i ] ) ; } } else { for ( int i = 0 ; i < scale . length ; ++ i ) { scale [ i ] = vecAbsoluteTolerance [ i ] + vecRelativeTolerance [ i ] * FastMath . abs ( y [ i ] ) ; } } hNew = initializeStep ( forward , getOrder ( ) , scale , stepStart , y , yDotK [ 0 ] , yTmp , yDotK [ 1 ] ) ; firstTime = false ; } stepSize = hNew ; for ( int k = 1 ; k < stages ; ++ k ) { for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = a [ k - 1 ] [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < k ; ++ l ) { sum += a [ k - 1 ] [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } computeDerivatives ( stepStart + c [ k - 1 ] * stepSize , yTmp , yDotK [ k ] ) ; } for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = b [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < stages ; ++ l ) { sum += b [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } error = estimateError ( yDotK , y , yTmp , stepSize ) ; if ( error >= 1.0 ) { final double factor = FastMath . min ( maxGrowth , FastMath . max ( minReduction , safety * FastMath . pow ( error , exp ) ) ) ; hNew = filterStep ( stepSize * factor , forward , false ) ; } } interpolator . storeTime ( stepStart + stepSize ) ; System . arraycopy ( yTmp , 0 , y , 0 , y0 . length ) ; System . arraycopy ( yDotK [ stages - 1 ] , 0 , yDotTmp , 0 , y0 . length ) ; stepStart = acceptStep ( interpolator , y , yDotTmp , t ) ; System . arraycopy ( y , 0 , yTmp , 0 , y . length ) ; if ( ! isLastStep ) { interpolator . storeTime ( stepStart ) ; if ( fsal ) { System . arraycopy ( yDotTmp , 0 , yDotK [ 0 ] , 0 , y0 . length ) ; } final double factor = FastMath . min ( maxGrowth , FastMath . max ( minReduction , safety * FastMath . pow ( error , exp ) ) ) ; final double scaledH = stepSize * factor ; final double nextT = stepStart + scaledH ; final boolean nextIsLast = forward ? ( nextT >= t ) : ( nextT <= t ) ; hNew = filterStep ( scaledH , forward , nextIsLast ) ; final double filteredNextT = stepStart + hNew ; final boolean filteredNextIsLast = forward ? ( filteredNextT >= t ) : ( filteredNextT <= t ) ; if ( filteredNextIsLast ) { hNew = t - stepStart ; } } } while ( ! isLastStep ) ; equations . setTime ( stepStart ) ; equations . setCompleteState ( y ) ; resetInternalState ( ) ; } | @Override
public void integrate(final ExpandableStatefulODE equations, final double t)
throws MathIllegalStateException, MathIllegalArgumentException {
sanityChecks(equations, t);
setEquations(equations);
final boolean forward = t > equations.getTime();
// create some internal working arrays
final double[] y0 = equations.getCompleteState();
final double[] y = y0.clone();
final int stages = c.length + 1;
final double[][] yDotK = new double[stages][y.length];
final double[] yTmp = y0.clone();
final double[] yDotTmp = new double[y.length];
// set up an interpolator sharing the integrator arrays
final RungeKuttaStepInterpolator interpolator = (RungeKuttaStepInterpolator) prototype.copy();
interpolator.reinitialize(this, yTmp, yDotK, forward,
equations.getPrimaryMapper(), equations.getSecondaryMappers());
interpolator.storeTime(equations.getTime());
// set up integration control objects
stepStart = equations.getTime();
double hNew = 0;
boolean firstTime = true;
initIntegration(equations.getTime(), y0, t);
// main integration loop
isLastStep = false;
do {
interpolator.shift();
// iterate over step size, ensuring local normalized error is smaller than 1
double error = 10;
while (error >= 1.0) {
if (firstTime || !fsal) {
// first stage
computeDerivatives(stepStart, y, yDotK[0]);
}
if (firstTime) {
final double[] scale = new double[mainSetDimension];
if (vecAbsoluteTolerance == null) {
for (int i = 0; i < scale.length; ++i) {
scale[i] = scalAbsoluteTolerance + scalRelativeTolerance * FastMath.abs(y[i]);
}
} else {
for (int i = 0; i < scale.length; ++i) {
scale[i] = vecAbsoluteTolerance[i] + vecRelativeTolerance[i] * FastMath.abs(y[i]);
}
}
hNew = initializeStep(forward, getOrder(), scale,
stepStart, y, yDotK[0], yTmp, yDotK[1]);
firstTime = false;
}
stepSize = hNew;
if (forward) {
if (stepStart + stepSize >= t) {
stepSize = t - stepStart;
}
} else {
if (stepStart + stepSize <= t) {
stepSize = t - stepStart;
}
}
// next stages
for (int k = 1; k < stages; ++k) {
for (int j = 0; j < y0.length; ++j) {
double sum = a[k-1][0] * yDotK[0][j];
for (int l = 1; l < k; ++l) {
sum += a[k-1][l] * yDotK[l][j];
}
yTmp[j] = y[j] + stepSize * sum;
}
computeDerivatives(stepStart + c[k-1] * stepSize, yTmp, yDotK[k]);
}
// estimate the state at the end of the step
for (int j = 0; j < y0.length; ++j) {
double sum = b[0] * yDotK[0][j];
for (int l = 1; l < stages; ++l) {
sum += b[l] * yDotK[l][j];
}
yTmp[j] = y[j] + stepSize * sum;
}
// estimate the error at the end of the step
error = estimateError(yDotK, y, yTmp, stepSize);
if (error >= 1.0) {
// reject the step and attempt to reduce error by stepsize control
final double factor =
FastMath.min(maxGrowth,
FastMath.max(minReduction, safety * FastMath.pow(error, exp)));
hNew = filterStep(stepSize * factor, forward, false);
}
}
// local error is small enough: accept the step, trigger events and step handlers
interpolator.storeTime(stepStart + stepSize);
System.arraycopy(yTmp, 0, y, 0, y0.length);
System.arraycopy(yDotK[stages - 1], 0, yDotTmp, 0, y0.length);
stepStart = acceptStep(interpolator, y, yDotTmp, t);
System.arraycopy(y, 0, yTmp, 0, y.length);
if (!isLastStep) {
// prepare next step
interpolator.storeTime(stepStart);
if (fsal) {
// save the last evaluation for the next step
System.arraycopy(yDotTmp, 0, yDotK[0], 0, y0.length);
}
// stepsize control for next step
final double factor =
FastMath.min(maxGrowth, FastMath.max(minReduction, safety * FastMath.pow(error, exp)));
final double scaledH = stepSize * factor;
final double nextT = stepStart + scaledH;
final boolean nextIsLast = forward ? (nextT >= t) : (nextT <= t);
hNew = filterStep(scaledH, forward, nextIsLast);
final double filteredNextT = stepStart + hNew;
final boolean filteredNextIsLast = forward ? (filteredNextT >= t) : (filteredNextT <= t);
if (filteredNextIsLast) {
hNew = t - stepStart;
}
}
} while (!isLastStep);
// dispatch results
equations.setTime(stepStart);
equations.setCompleteState(y);
resetInternalState();
} | @ Override public void integrate ( final ExpandableStatefulODE equations , final double t ) throws MathIllegalStateException , MathIllegalArgumentException { sanityChecks ( equations , t ) ; setEquations ( equations ) ; final boolean forward = t > equations . getTime ( ) ; final double [ ] y0 = equations . getCompleteState ( ) ; final double [ ] y = y0 . clone ( ) ; final int stages = c . length + 1 ; final double [ ] [ ] yDotK = new double [ stages ] [ y . length ] ; final double [ ] yTmp = y0 . clone ( ) ; final double [ ] yDotTmp = new double [ y . length ] ; final RungeKuttaStepInterpolator interpolator = ( RungeKuttaStepInterpolator ) prototype . copy ( ) ; interpolator . reinitialize ( this , yTmp , yDotK , forward , equations . getPrimaryMapper ( ) , equations . getSecondaryMappers ( ) ) ; interpolator . storeTime ( equations . getTime ( ) ) ; stepStart = equations . getTime ( ) ; double hNew = 0 ; boolean firstTime = true ; initIntegration ( equations . getTime ( ) , y0 , t ) ; isLastStep = false ; do { interpolator . shift ( ) ; double error = 10 ; while ( error >= 1.0 ) { if ( firstTime || ! fsal ) { computeDerivatives ( stepStart , y , yDotK [ 0 ] ) ; } if ( firstTime ) { final double [ ] scale = new double [ mainSetDimension ] ; if ( vecAbsoluteTolerance == null ) { for ( int i = 0 ; i < scale . length ; ++ i ) { scale [ i ] = scalAbsoluteTolerance + scalRelativeTolerance * FastMath . abs ( y [ i ] ) ; } } else { for ( int i = 0 ; i < scale . length ; ++ i ) { scale [ i ] = vecAbsoluteTolerance [ i ] + vecRelativeTolerance [ i ] * FastMath . abs ( y [ i ] ) ; } } hNew = initializeStep ( forward , getOrder ( ) , scale , stepStart , y , yDotK [ 0 ] , yTmp , yDotK [ 1 ] ) ; firstTime = false ; } stepSize = hNew ; if ( forward ) { if ( stepStart + stepSize >= t ) { stepSize = t - stepStart ; } } else { if ( stepStart + stepSize <= t ) { stepSize = t - stepStart ; } } for ( int k = 1 ; k < stages ; ++ k ) { for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = a [ k - 1 ] [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < k ; ++ l ) { sum += a [ k - 1 ] [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } computeDerivatives ( stepStart + c [ k - 1 ] * stepSize , yTmp , yDotK [ k ] ) ; } for ( int j = 0 ; j < y0 . length ; ++ j ) { double sum = b [ 0 ] * yDotK [ 0 ] [ j ] ; for ( int l = 1 ; l < stages ; ++ l ) { sum += b [ l ] * yDotK [ l ] [ j ] ; } yTmp [ j ] = y [ j ] + stepSize * sum ; } error = estimateError ( yDotK , y , yTmp , stepSize ) ; if ( error >= 1.0 ) { final double factor = FastMath . min ( maxGrowth , FastMath . max ( minReduction , safety * FastMath . pow ( error , exp ) ) ) ; hNew = filterStep ( stepSize * factor , forward , false ) ; } } interpolator . storeTime ( stepStart + stepSize ) ; System . arraycopy ( yTmp , 0 , y , 0 , y0 . length ) ; System . arraycopy ( yDotK [ stages - 1 ] , 0 , yDotTmp , 0 , y0 . length ) ; stepStart = acceptStep ( interpolator , y , yDotTmp , t ) ; System . arraycopy ( y , 0 , yTmp , 0 , y . length ) ; if ( ! isLastStep ) { interpolator . storeTime ( stepStart ) ; if ( fsal ) { System . arraycopy ( yDotTmp , 0 , yDotK [ 0 ] , 0 , y0 . length ) ; } final double factor = FastMath . min ( maxGrowth , FastMath . max ( minReduction , safety * FastMath . pow ( error , exp ) ) ) ; final double scaledH = stepSize * factor ; final double nextT = stepStart + scaledH ; final boolean nextIsLast = forward ? ( nextT >= t ) : ( nextT <= t ) ; hNew = filterStep ( scaledH , forward , nextIsLast ) ; final double filteredNextT = stepStart + hNew ; final boolean filteredNextIsLast = forward ? ( filteredNextT >= t ) : ( filteredNextT <= t ) ; if ( filteredNextIsLast ) { hNew = t - stepStart ; } } } while ( ! isLastStep ) ; equations . setTime ( stepStart ) ; equations . setCompleteState ( y ) ; resetInternalState ( ) ; } |
Codec | 5 | src/java/org/apache/commons/codec/binary/Base64.java | 550 | 599 | Base64InputStream causes NullPointerException on some input | Certain (malformed?) input to {{Base64InputStream}} causes a {{NullPointerException}} in {{Base64.decode}}.
The exception occurs when {{Base64.decode}} is entered with the following conditions:
* {{buffer}} is {{null}}
* {{modulus}} is {{3}} from a previous entry.
* {{inAvail}} is {{-1}} because {{Base64InputStream.read}} reached EOF on line 150.
Under these conditions, {{Base64.decode}} reaches line 581 with {{buffer}} still {{null}} and throws a {{NullPointerException}}.
Here is some input data that will trigger it:
{noformat}
H4sIAAAAAAAAAFvzloG1uIhBKiuxLFGvODW5tCizpFIvODM9LzXFPykrNbmE8//eDC2bq/+ZGJij
GdiT8/NKUvNKShiYop2iGTiLgQoTS0qLUgsZ6hgYfRh4SjJSE3PS84GmZOSWMAj5gMzVz0nMS9cP
LinKzEu3rigoLQJpXvNZ/AcbR8gDJgaGigIGBqbLayAuMUxNKdVLTyxJTc7QS07WSyzKLC7JL8lJ
1StJLErMKynNSdTLyUxOzStO1fOB0AwQwMjEwOrJwJMbn+mSWFkclpiTmeID4joml2SWpYZk5qaW
MEj45Bel62flpyTqlwAF9F2A9oBkrMEqnYtSoXyob1hy4z1dShgEIL4oLcnM0Q8N9XQBqubKjYfa
DjTV1AfoZn2Im/WTk/XhbtaHu1kf6mZ9T5g2YED8BwKgj8WAbtIDuUkP5CY9mJt22FSkZEXf/QkK
oCIGeVRFSYlA/zsBCZjq//9/PvSP1VvMxMDkxcCe6ZuZk5NZ7MPAnemcUZSfl5+Tn15ZwiCF5n2E
nDUoDhjVfhrpNABdpI5qWTJYmZ5nsD9Cg0pwSWnSyhOCaYXmAerMoDgsxnAkzG1R+XmpYPXL9Bln
1RhJPQarL+dgYNM1MLUyMKioKAYFOCvIBb8vl8qCOFxA4/jAiRIU7HqgYN8zk/n7jNxWfbAXeXJS
E4tLgOnUKbOk2IuBOzcfzqso6M1QmrzKkedPzcYO3QZu129As4xITlZI6QqYFNhz44v9EkFpCGua
LmEQdkktS83JL8gF5g4FqBGlIJ+wAI1gKJtZEvTws/j3FluPu4lcr7ra9OfHKXIZNTa4FPd8n33J
QXPFLte9AZe5uBaJvGrKVl+rbrTaXDZO6NwU7gnHOVgzzsmnGX2Y5GDqrst8wcTear0Ab1yj6PrD
F977vL/5iUMg773My5qLLK8OVAu6Tz7Xcyjy9Uym02Z/+xY7m85nYo/t4E93FXFKOf9/a3X78neS
jE5Tu066K3Mdf17m66mbpXN9y34ZZ3ErRobfn+RfzVBIWj0vc82vY7YPvM5eLHHOulV77M6CoB4h
xb/FjHWHRR+ldb6QmSP1ROGwGs+nx2quwitN7+mIpsRFhU37JPRoZe2ZjiX/70j7CS1tz51YP/3W
/xfnV2i/4rAoYeAN9nA0NTQqBxYMQcGOAG5
{noformat}
Say this is read from file with a {{byte[]}} of size {{1024}} using {{Base64InputStream.read(byte[])}}. In the first iteration, all {{1190}} bytes get read into {{buf}}, then it enters {{Base64.setInitialBuffer}} and assigns the {{byte[1024]}} to {{buffer}} and does a round of decoding. When it then enters {{Base64.readResults}} on line {{162}} in {{Base64InputStream}}, it sets {{buffer}} to {{null}}, {{modulus}} has the left-over value {{3}}, and the NPE occurs the next iteration.
{{Base64InputStream}} could avoid this by returning right away on EOF ({{-1}}), but I think the real fix needs to happen in {{Base64}} since it this same situation could be created by direct use. My guess is either more needs to happen in the body of the {{if}} on line {{542}} (set {{modulus}} to {{0}}?) or the condition on line {{573}} is flawed and needs adjusting.
| void decode(byte[] in, int inPos, int inAvail) {
if (eof) {
return;
}
if (inAvail < 0) {
eof = true;
}
for (int i = 0; i < inAvail; i++) {
if (buffer == null || buffer.length - pos < decodeSize) {
resizeBuffer();
}
byte b = in[inPos++];
if (b == PAD) {
// We're done.
eof = true;
break;
} else {
if (b >= 0 && b < DECODE_TABLE.length) {
int result = DECODE_TABLE[b];
if (result >= 0) {
modulus = (++modulus) % 4;
x = (x << 6) + result;
if (modulus == 0) {
buffer[pos++] = (byte) ((x >> 16) & MASK_8BITS);
buffer[pos++] = (byte) ((x >> 8) & MASK_8BITS);
buffer[pos++] = (byte) (x & MASK_8BITS);
}
}
}
}
}
// Two forms of EOF as far as base64 decoder is concerned: actual
// EOF (-1) and first time '=' character is encountered in stream.
// This approach makes the '=' padding characters completely optional.
if (eof && modulus != 0) {
x = x << 6;
switch (modulus) {
case 2 :
x = x << 6;
buffer[pos++] = (byte) ((x >> 16) & MASK_8BITS);
break;
case 3 :
buffer[pos++] = (byte) ((x >> 16) & MASK_8BITS);
buffer[pos++] = (byte) ((x >> 8) & MASK_8BITS);
break;
}
}
} | void decode ( byte [ ] in , int inPos , int inAvail ) { if ( eof ) { return ; } if ( inAvail < 0 ) { eof = true ; } for ( int i = 0 ; i < inAvail ; i ++ ) { if ( buffer == null || buffer . length - pos < decodeSize ) { resizeBuffer ( ) ; } byte b = in [ inPos ++ ] ; if ( b == PAD ) { eof = true ; break ; } else { if ( b >= 0 && b < DECODE_TABLE . length ) { int result = DECODE_TABLE [ b ] ; if ( result >= 0 ) { modulus = ( ++ modulus ) % 4 ; x = ( x << 6 ) + result ; if ( modulus == 0 ) { buffer [ pos ++ ] = ( byte ) ( ( x >> 16 ) & MASK_8BITS ) ; buffer [ pos ++ ] = ( byte ) ( ( x >> 8 ) & MASK_8BITS ) ; buffer [ pos ++ ] = ( byte ) ( x & MASK_8BITS ) ; } } } } } if ( eof && modulus != 0 ) { x = x << 6 ; switch ( modulus ) { case 2 : x = x << 6 ; buffer [ pos ++ ] = ( byte ) ( ( x >> 16 ) & MASK_8BITS ) ; break ; case 3 : buffer [ pos ++ ] = ( byte ) ( ( x >> 16 ) & MASK_8BITS ) ; buffer [ pos ++ ] = ( byte ) ( ( x >> 8 ) & MASK_8BITS ) ; break ; } } } | void decode(byte[] in, int inPos, int inAvail) {
if (eof) {
return;
}
if (inAvail < 0) {
eof = true;
}
for (int i = 0; i < inAvail; i++) {
if (buffer == null || buffer.length - pos < decodeSize) {
resizeBuffer();
}
byte b = in[inPos++];
if (b == PAD) {
// We're done.
eof = true;
break;
} else {
if (b >= 0 && b < DECODE_TABLE.length) {
int result = DECODE_TABLE[b];
if (result >= 0) {
modulus = (++modulus) % 4;
x = (x << 6) + result;
if (modulus == 0) {
buffer[pos++] = (byte) ((x >> 16) & MASK_8BITS);
buffer[pos++] = (byte) ((x >> 8) & MASK_8BITS);
buffer[pos++] = (byte) (x & MASK_8BITS);
}
}
}
}
}
// Two forms of EOF as far as base64 decoder is concerned: actual
// EOF (-1) and first time '=' character is encountered in stream.
// This approach makes the '=' padding characters completely optional.
if (eof && modulus != 0) {
if (buffer == null || buffer.length - pos < decodeSize) {
resizeBuffer();
}
x = x << 6;
switch (modulus) {
case 2 :
x = x << 6;
buffer[pos++] = (byte) ((x >> 16) & MASK_8BITS);
break;
case 3 :
buffer[pos++] = (byte) ((x >> 16) & MASK_8BITS);
buffer[pos++] = (byte) ((x >> 8) & MASK_8BITS);
break;
}
}
} | void decode ( byte [ ] in , int inPos , int inAvail ) { if ( eof ) { return ; } if ( inAvail < 0 ) { eof = true ; } for ( int i = 0 ; i < inAvail ; i ++ ) { if ( buffer == null || buffer . length - pos < decodeSize ) { resizeBuffer ( ) ; } byte b = in [ inPos ++ ] ; if ( b == PAD ) { eof = true ; break ; } else { if ( b >= 0 && b < DECODE_TABLE . length ) { int result = DECODE_TABLE [ b ] ; if ( result >= 0 ) { modulus = ( ++ modulus ) % 4 ; x = ( x << 6 ) + result ; if ( modulus == 0 ) { buffer [ pos ++ ] = ( byte ) ( ( x >> 16 ) & MASK_8BITS ) ; buffer [ pos ++ ] = ( byte ) ( ( x >> 8 ) & MASK_8BITS ) ; buffer [ pos ++ ] = ( byte ) ( x & MASK_8BITS ) ; } } } } } if ( eof && modulus != 0 ) { if ( buffer == null || buffer . length - pos < decodeSize ) { resizeBuffer ( ) ; } x = x << 6 ; switch ( modulus ) { case 2 : x = x << 6 ; buffer [ pos ++ ] = ( byte ) ( ( x >> 16 ) & MASK_8BITS ) ; break ; case 3 : buffer [ pos ++ ] = ( byte ) ( ( x >> 16 ) & MASK_8BITS ) ; buffer [ pos ++ ] = ( byte ) ( ( x >> 8 ) & MASK_8BITS ) ; break ; } } } |
JxPath | 5 | src/java/org/apache/commons/jxpath/ri/model/NodePointer.java | 642 | 675 | Cannot compare pointers that do not belong to the same tree | For XPath "$var | /MAIN/A" exception is thrown:
org.apache.commons.jxpath.JXPathException: Cannot compare pointers that do not belong to the same tree: '$var' and ''
at org.apache.commons.jxpath.ri.model.NodePointer.compareNodePointers(NodePointer.java:665)
at org.apache.commons.jxpath.ri.model.NodePointer.compareNodePointers(NodePointer.java:649)
at org.apache.commons.jxpath.ri.model.NodePointer.compareNodePointers(NodePointer.java:649)
at org.apache.commons.jxpath.ri.model.NodePointer.compareTo(NodePointer.java:639)
at java.util.Arrays.mergeSort(Arrays.java:1152)
at java.util.Arrays.sort(Arrays.java:1079)
at java.util.Collections.sort(Collections.java:113)
at org.apache.commons.jxpath.ri.EvalContext.constructIterator(EvalContext.java:176)
at org.apache.commons.jxpath.ri.EvalContext.hasNext(EvalContext.java:100)
at org.apache.commons.jxpath.JXPathContext.selectNodes(JXPathContext.java:648)
at org.apache.commons.jxpath.ri.model.VariablePointerTestCase.testUnionOfVariableAndNode(VariablePointerTestCase.java:76) | private int compareNodePointers(
NodePointer p1,
int depth1,
NodePointer p2,
int depth2)
{
if (depth1 < depth2) {
int r = compareNodePointers(p1, depth1, p2.parent, depth2 - 1);
return r == 0 ? -1 : r;
}
if (depth1 > depth2) {
int r = compareNodePointers(p1.parent, depth1 - 1, p2, depth2);
return r == 0 ? 1 : r;
}
if (p1 == null && p2 == null) {
return 0;
}
if (p1 != null && p1.equals(p2)) {
return 0;
}
if (depth1 == 1) {
throw new JXPathException(
"Cannot compare pointers that do not belong to the same tree: '"
+ p1 + "' and '" + p2 + "'");
}
int r = compareNodePointers(p1.parent, depth1 - 1, p2.parent, depth2 - 1);
if (r != 0) {
return r;
}
return p1.parent.compareChildNodePointers(p1, p2);
} | private int compareNodePointers ( NodePointer p1 , int depth1 , NodePointer p2 , int depth2 ) { if ( depth1 < depth2 ) { int r = compareNodePointers ( p1 , depth1 , p2 . parent , depth2 - 1 ) ; return r == 0 ? - 1 : r ; } if ( depth1 > depth2 ) { int r = compareNodePointers ( p1 . parent , depth1 - 1 , p2 , depth2 ) ; return r == 0 ? 1 : r ; } if ( p1 == null && p2 == null ) { return 0 ; } if ( p1 != null && p1 . equals ( p2 ) ) { return 0 ; } if ( depth1 == 1 ) { throw new JXPathException ( "Cannot compare pointers that do not belong to the same tree: '" + p1 + "' and '" + p2 + "'" ) ; } int r = compareNodePointers ( p1 . parent , depth1 - 1 , p2 . parent , depth2 - 1 ) ; if ( r != 0 ) { return r ; } return p1 . parent . compareChildNodePointers ( p1 , p2 ) ; } | private int compareNodePointers(
NodePointer p1,
int depth1,
NodePointer p2,
int depth2)
{
if (depth1 < depth2) {
int r = compareNodePointers(p1, depth1, p2.parent, depth2 - 1);
return r == 0 ? -1 : r;
}
if (depth1 > depth2) {
int r = compareNodePointers(p1.parent, depth1 - 1, p2, depth2);
return r == 0 ? 1 : r;
}
if (p1 == null && p2 == null) {
return 0;
}
if (p1 != null && p1.equals(p2)) {
return 0;
}
if (depth1 == 1) {
return 0;
}
int r = compareNodePointers(p1.parent, depth1 - 1, p2.parent, depth2 - 1);
if (r != 0) {
return r;
}
return p1.parent.compareChildNodePointers(p1, p2);
} | private int compareNodePointers ( NodePointer p1 , int depth1 , NodePointer p2 , int depth2 ) { if ( depth1 < depth2 ) { int r = compareNodePointers ( p1 , depth1 , p2 . parent , depth2 - 1 ) ; return r == 0 ? - 1 : r ; } if ( depth1 > depth2 ) { int r = compareNodePointers ( p1 . parent , depth1 - 1 , p2 , depth2 ) ; return r == 0 ? 1 : r ; } if ( p1 == null && p2 == null ) { return 0 ; } if ( p1 != null && p1 . equals ( p2 ) ) { return 0 ; } if ( depth1 == 1 ) { return 0 ; } int r = compareNodePointers ( p1 . parent , depth1 - 1 , p2 . parent , depth2 - 1 ) ; if ( r != 0 ) { return r ; } return p1 . parent . compareChildNodePointers ( p1 , p2 ) ; } |
Chart | 17 | source/org/jfree/data/time/TimeSeries.java | 856 | 859 | cloning of TimeSeries | It's just a minor bug!
When I clone a TimeSeries which has no items, I get an IllegalArgumentException ("Requires start <= end").
But I don't think the user should be responsible for checking whether the TimeSeries has any items or not. | public Object clone() throws CloneNotSupportedException {
Object clone = createCopy(0, getItemCount() - 1);
return clone;
} | public Object clone ( ) throws CloneNotSupportedException { Object clone = createCopy ( 0 , getItemCount ( ) - 1 ) ; return clone ; } | public Object clone() throws CloneNotSupportedException {
TimeSeries clone = (TimeSeries) super.clone();
clone.data = (List) ObjectUtilities.deepClone(this.data);
return clone;
} | public Object clone ( ) throws CloneNotSupportedException { TimeSeries clone = ( TimeSeries ) super . clone ( ) ; clone . data = ( List ) ObjectUtilities . deepClone ( this . data ) ; return clone ; } |
Mockito | 9 | src/org/mockito/internal/stubbing/answers/CallsRealMethods.java | 35 | 37 | Problem spying on abstract classes | There's a problem with spying on abstract classes when the real implementation calls out to the abstract method. More details: #121
| public Object answer(InvocationOnMock invocation) throws Throwable {
return invocation.callRealMethod();
} | public Object answer ( InvocationOnMock invocation ) throws Throwable { return invocation . callRealMethod ( ) ; } | public Object answer(InvocationOnMock invocation) throws Throwable {
if (Modifier.isAbstract(invocation.getMethod().getModifiers())) {
return new GloballyConfiguredAnswer().answer(invocation);
}
return invocation.callRealMethod();
} | public Object answer ( InvocationOnMock invocation ) throws Throwable { if ( Modifier . isAbstract ( invocation . getMethod ( ) . getModifiers ( ) ) ) { return new GloballyConfiguredAnswer ( ) . answer ( invocation ) ; } return invocation . callRealMethod ( ) ; } |
Mockito | 8 | src/org/mockito/internal/util/reflection/GenericMetadataSupport.java | 66 | 84 | 1.10 regression (StackOverflowError) with interface where generic type has itself as upper bound | Add this to `GenericMetadataSupportTest`:
``` java
interface GenericsSelfReference<T extends GenericsSelfReference<T>> {
T self();
}
@Test
public void typeVariable_of_self_type() {
GenericMetadataSupport genericMetadata = inferFrom(GenericsSelfReference.class).resolveGenericReturnType(firstNamedMethod("self", GenericsSelfReference.class));
assertThat(genericMetadata.rawType()).isEqualTo(GenericsSelfReference.class);
}
```
It fails on master and 1.10.8 with this:
```
java.lang.StackOverflowError
at sun.reflect.generics.reflectiveObjects.TypeVariableImpl.hashCode(TypeVariableImpl.java:201)
at java.util.HashMap.hash(HashMap.java:338)
at java.util.HashMap.get(HashMap.java:556)
at org.mockito.internal.util.reflection.GenericMetadataSupport.getActualTypeArgumentFor(GenericMetadataSupport.java:193)
at org.mockito.internal.util.reflection.GenericMetadataSupport.getActualTypeArgumentFor(GenericMetadataSupport.java:196)
at org.mockito.internal.util.reflection.GenericMetadataSupport.getActualTypeArgumentFor(GenericMetadataSupport.java:196)
```
It worked on 1.9.5. May be caused by the changes in ab9e9f3 (cc @bric3).
(Also note that while the above interface looks strange, it is commonly used for builder hierarchies, where base class methods want to return this with a more specific type.)
| protected void registerTypeVariablesOn(Type classType) {
if (!(classType instanceof ParameterizedType)) {
return;
}
ParameterizedType parameterizedType = (ParameterizedType) classType;
TypeVariable[] typeParameters = ((Class<?>) parameterizedType.getRawType()).getTypeParameters();
Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
for (int i = 0; i < actualTypeArguments.length; i++) {
TypeVariable typeParameter = typeParameters[i];
Type actualTypeArgument = actualTypeArguments[i];
if (actualTypeArgument instanceof WildcardType) {
contextualActualTypeParameters.put(typeParameter, boundsOf((WildcardType) actualTypeArgument));
} else {
contextualActualTypeParameters.put(typeParameter, actualTypeArgument);
}
// logger.log("For '" + parameterizedType + "' found type variable : { '" + typeParameter + "(" + System.identityHashCode(typeParameter) + ")" + "' : '" + actualTypeArgument + "(" + System.identityHashCode(typeParameter) + ")" + "' }");
}
} | protected void registerTypeVariablesOn ( Type classType ) { if ( ! ( classType instanceof ParameterizedType ) ) { return ; } ParameterizedType parameterizedType = ( ParameterizedType ) classType ; TypeVariable [ ] typeParameters = ( ( Class < ? > ) parameterizedType . getRawType ( ) ) . getTypeParameters ( ) ; Type [ ] actualTypeArguments = parameterizedType . getActualTypeArguments ( ) ; for ( int i = 0 ; i < actualTypeArguments . length ; i ++ ) { TypeVariable typeParameter = typeParameters [ i ] ; Type actualTypeArgument = actualTypeArguments [ i ] ; if ( actualTypeArgument instanceof WildcardType ) { contextualActualTypeParameters . put ( typeParameter , boundsOf ( ( WildcardType ) actualTypeArgument ) ) ; } else { contextualActualTypeParameters . put ( typeParameter , actualTypeArgument ) ; } } } | protected void registerTypeVariablesOn(Type classType) {
if (!(classType instanceof ParameterizedType)) {
return;
}
ParameterizedType parameterizedType = (ParameterizedType) classType;
TypeVariable[] typeParameters = ((Class<?>) parameterizedType.getRawType()).getTypeParameters();
Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
for (int i = 0; i < actualTypeArguments.length; i++) {
TypeVariable typeParameter = typeParameters[i];
Type actualTypeArgument = actualTypeArguments[i];
if (actualTypeArgument instanceof WildcardType) {
contextualActualTypeParameters.put(typeParameter, boundsOf((WildcardType) actualTypeArgument));
} else if (typeParameter != actualTypeArgument) {
contextualActualTypeParameters.put(typeParameter, actualTypeArgument);
}
// logger.log("For '" + parameterizedType + "' found type variable : { '" + typeParameter + "(" + System.identityHashCode(typeParameter) + ")" + "' : '" + actualTypeArgument + "(" + System.identityHashCode(typeParameter) + ")" + "' }");
}
} | protected void registerTypeVariablesOn ( Type classType ) { if ( ! ( classType instanceof ParameterizedType ) ) { return ; } ParameterizedType parameterizedType = ( ParameterizedType ) classType ; TypeVariable [ ] typeParameters = ( ( Class < ? > ) parameterizedType . getRawType ( ) ) . getTypeParameters ( ) ; Type [ ] actualTypeArguments = parameterizedType . getActualTypeArguments ( ) ; for ( int i = 0 ; i < actualTypeArguments . length ; i ++ ) { TypeVariable typeParameter = typeParameters [ i ] ; Type actualTypeArgument = actualTypeArguments [ i ] ; if ( actualTypeArgument instanceof WildcardType ) { contextualActualTypeParameters . put ( typeParameter , boundsOf ( ( WildcardType ) actualTypeArgument ) ) ; } else if ( typeParameter != actualTypeArgument ) { contextualActualTypeParameters . put ( typeParameter , actualTypeArgument ) ; } } } |
Csv | 1 | src/main/java/org/apache/commons/csv/ExtendedBufferedReader.java | 55 | 63 | ExtendedBufferReader does not handle EOL consistently | ExtendedBufferReader checks for '\n' (LF) in the read() methods, incrementing linecount when found.
However, the readLine() method calls BufferedReader.readLine() which treats CR, LF and CRLF equally (and drops them).
If the code is to be flexible in what it accepts, the class should also allow for CR alone as a line terminator.
It should work if the code increments the line counter for CR, and for LF if the previous character was not CR. | @Override
public int read() throws IOException {
int current = super.read();
if (current == '\n') {
lineCounter++;
}
lastChar = current;
return lastChar;
} | @ Override public int read ( ) throws IOException { int current = super . read ( ) ; if ( current == '\n' ) { lineCounter ++ ; } lastChar = current ; return lastChar ; } | @Override
public int read() throws IOException {
int current = super.read();
if (current == '\r' || (current == '\n' && lastChar != '\r')) {
lineCounter++;
}
lastChar = current;
return lastChar;
} | @ Override public int read ( ) throws IOException { int current = super . read ( ) ; if ( current == '\r' || ( current == '\n' && lastChar != '\r' ) ) { lineCounter ++ ; } lastChar = current ; return lastChar ; } |
Math | 80 | src/main/java/org/apache/commons/math/linear/EigenDecompositionImpl.java | 1132 | 1147 | wrong result in eigen decomposition | Some results computed by EigenDecompositionImpl are wrong. The following case computed by Fortran Lapack fails with version 2.0
{code}
public void testMathpbx02() {
double[] mainTridiagonal = {
7484.860960227216, 18405.28129035345, 13855.225609560746,
10016.708722343366, 559.8117399576674, 6750.190788301587,
71.21428769782159
};
double[] secondaryTridiagonal = {
-4175.088570476366,1975.7955858241994,5193.178422374075,
1995.286659169179,75.34535882933804,-234.0808002076056
};
// the reference values have been computed using routine DSTEMR
// from the fortran library LAPACK version 3.2.1
double[] refEigenValues = {
20654.744890306974412,16828.208208485466457,
6893.155912634994820,6757.083016675340332,
5887.799885688558788,64.309089923240379,
57.992628792736340
};
RealVector[] refEigenVectors = {
new ArrayRealVector(new double[] {-0.270356342026904, 0.852811091326997, 0.399639490702077, 0.198794657813990, 0.019739323307666, 0.000106983022327, -0.000001216636321}),
new ArrayRealVector(new double[] {0.179995273578326,-0.402807848153042,0.701870993525734,0.555058211014888,0.068079148898236,0.000509139115227,-0.000007112235617}),
new ArrayRealVector(new double[] {-0.399582721284727,-0.056629954519333,-0.514406488522827,0.711168164518580,0.225548081276367,0.125943999652923,-0.004321507456014}),
new ArrayRealVector(new double[] {0.058515721572821,0.010200130057739,0.063516274916536,-0.090696087449378,-0.017148420432597,0.991318870265707,-0.034707338554096}),
new ArrayRealVector(new double[] {0.855205995537564,0.327134656629775,-0.265382397060548,0.282690729026706,0.105736068025572,-0.009138126622039,0.000367751821196}),
new ArrayRealVector(new double[] {-0.002913069901144,-0.005177515777101,0.041906334478672,-0.109315918416258,0.436192305456741,0.026307315639535,0.891797507436344}),
new ArrayRealVector(new double[] {-0.005738311176435,-0.010207611670378,0.082662420517928,-0.215733886094368,0.861606487840411,-0.025478530652759,-0.451080697503958})
};
// the following line triggers the exception
EigenDecomposition decomposition =
new EigenDecompositionImpl(mainTridiagonal, secondaryTridiagonal, MathUtils.SAFE_MIN);
double[] eigenValues = decomposition.getRealEigenvalues();
for (int i = 0; i < refEigenValues.length; ++i) {
assertEquals(refEigenValues[i], eigenValues[i], 1.0e-3);
if (refEigenVectors[i].dotProduct(decomposition.getEigenvector(i)) < 0) {
assertEquals(0, refEigenVectors[i].add(decomposition.getEigenvector(i)).getNorm(), 1.0e-5);
} else {
assertEquals(0, refEigenVectors[i].subtract(decomposition.getEigenvector(i)).getNorm(), 1.0e-5);
}
}
}
{code} | private boolean flipIfWarranted(final int n, final int step) {
if (1.5 * work[pingPong] < work[4 * (n - 1) + pingPong]) {
// flip array
int j = 4 * n - 1;
for (int i = 0; i < j; i += 4) {
for (int k = 0; k < 4; k += step) {
final double tmp = work[i + k];
work[i + k] = work[j - k];
work[j - k] = tmp;
}
j -= 4;
}
return true;
}
return false;
} | private boolean flipIfWarranted ( final int n , final int step ) { if ( 1.5 * work [ pingPong ] < work [ 4 * ( n - 1 ) + pingPong ] ) { int j = 4 * n - 1 ; for ( int i = 0 ; i < j ; i += 4 ) { for ( int k = 0 ; k < 4 ; k += step ) { final double tmp = work [ i + k ] ; work [ i + k ] = work [ j - k ] ; work [ j - k ] = tmp ; } j -= 4 ; } return true ; } return false ; } | private boolean flipIfWarranted(final int n, final int step) {
if (1.5 * work[pingPong] < work[4 * (n - 1) + pingPong]) {
// flip array
int j = 4 * (n - 1);
for (int i = 0; i < j; i += 4) {
for (int k = 0; k < 4; k += step) {
final double tmp = work[i + k];
work[i + k] = work[j - k];
work[j - k] = tmp;
}
j -= 4;
}
return true;
}
return false;
} | private boolean flipIfWarranted ( final int n , final int step ) { if ( 1.5 * work [ pingPong ] < work [ 4 * ( n - 1 ) + pingPong ] ) { int j = 4 * ( n - 1 ) ; for ( int i = 0 ; i < j ; i += 4 ) { for ( int k = 0 ; k < 4 ; k += step ) { final double tmp = work [ i + k ] ; work [ i + k ] = work [ j - k ] ; work [ j - k ] = tmp ; } j -= 4 ; } return true ; } return false ; } |
Mockito | 27 | src/org/mockito/internal/util/MockUtil.java | 62 | 67 | MockUtil.resetMock() doesn't create InvocationNotifierHandler when recreating a filter | What steps will reproduce the problem? 1. create a mock with verbose logging 2. reset a mock What is the expected output? What do you see instead? Verbose logging doesn't work after reseting a mock. What version of the product are you using? On what operating system? 1.9.0-rc1 Patch attached. | public <T> void resetMock(T mock) {
MockHandlerInterface<T> oldMockHandler = getMockHandler(mock);
MockHandler<T> newMockHandler = new MockHandler<T>(oldMockHandler);
MethodInterceptorFilter newFilter = new MethodInterceptorFilter(newMockHandler, (MockSettingsImpl) org.mockito.Mockito.withSettings().defaultAnswer(org.mockito.Mockito.RETURNS_DEFAULTS));
((Factory) mock).setCallback(0, newFilter);
} | public < T > void resetMock ( T mock ) { MockHandlerInterface < T > oldMockHandler = getMockHandler ( mock ) ; MockHandler < T > newMockHandler = new MockHandler < T > ( oldMockHandler ) ; MethodInterceptorFilter newFilter = new MethodInterceptorFilter ( newMockHandler , ( MockSettingsImpl ) org . mockito . Mockito . withSettings ( ) . defaultAnswer ( org . mockito . Mockito . RETURNS_DEFAULTS ) ) ; ( ( Factory ) mock ) . setCallback ( 0 , newFilter ) ; } | public <T> void resetMock(T mock) {
MockHandlerInterface<T> oldMockHandler = getMockHandler(mock);
MethodInterceptorFilter newFilter = newMethodInterceptorFilter(oldMockHandler.getMockSettings());
((Factory) mock).setCallback(0, newFilter);
} | public < T > void resetMock ( T mock ) { MockHandlerInterface < T > oldMockHandler = getMockHandler ( mock ) ; MethodInterceptorFilter newFilter = newMethodInterceptorFilter ( oldMockHandler . getMockSettings ( ) ) ; ( ( Factory ) mock ) . setCallback ( 0 , newFilter ) ; } |
Math | 38 | src/main/java/org/apache/commons/math/optimization/direct/BOBYQAOptimizer.java | 1582 | 1755 | Errors in BOBYQAOptimizer when numberOfInterpolationPoints is greater than 2*dim+1 | I've been having trouble getting BOBYQA to minimize a function (actually a non-linear least squares fit) so as one change I increased the number of interpolation points. It seems that anything larger than 2*dim+1 causes an error (typically at
line 1662
interpolationPoints.setEntry(nfm, ipt, interpolationPoints.getEntry(ipt, ipt));
I'm guessing there is an off by one error in the translation from FORTRAN. Changing the BOBYQAOptimizerTest as follows (increasing number of interpolation points by one) will cause failures.
Bruce
Index: src/test/java/org/apache/commons/math/optimization/direct/BOBYQAOptimizerTest.java
===================================================================
--- src/test/java/org/apache/commons/math/optimization/direct/BOBYQAOptimizerTest.java (revision 1221065)
+++ src/test/java/org/apache/commons/math/optimization/direct/BOBYQAOptimizerTest.java (working copy)
@@ -258,7 +258,7 @@
// RealPointValuePair result = optim.optimize(100000, func, goal, startPoint);
final double[] lB = boundaries == null ? null : boundaries[0];
final double[] uB = boundaries == null ? null : boundaries[1];
- BOBYQAOptimizer optim = new BOBYQAOptimizer(2 * dim + 1);
+ BOBYQAOptimizer optim = new BOBYQAOptimizer(2 * dim + 2);
RealPointValuePair result = optim.optimize(maxEvaluations, func, goal, startPoint, lB, uB);
// System.out.println(func.getClass().getName() + " = "
// + optim.getEvaluations() + " f(");
| private void prelim(double[] lowerBound,
double[] upperBound) {
printMethod(); // XXX
final int n = currentBest.getDimension();
final int npt = numberOfInterpolationPoints;
final int ndim = bMatrix.getRowDimension();
final double rhosq = initialTrustRegionRadius * initialTrustRegionRadius;
final double recip = 1d / rhosq;
final int np = n + 1;
// Set XBASE to the initial vector of variables, and set the initial
// elements of XPT, BMAT, HQ, PQ and ZMAT to zero.
for (int j = 0; j < n; j++) {
originShift.setEntry(j, currentBest.getEntry(j));
for (int k = 0; k < npt; k++) {
interpolationPoints.setEntry(k, j, ZERO);
}
for (int i = 0; i < ndim; i++) {
bMatrix.setEntry(i, j, ZERO);
}
}
for (int i = 0, max = n * np / 2; i < max; i++) {
modelSecondDerivativesValues.setEntry(i, ZERO);
}
for (int k = 0; k < npt; k++) {
modelSecondDerivativesParameters.setEntry(k, ZERO);
for (int j = 0, max = npt - np; j < max; j++) {
zMatrix.setEntry(k, j, ZERO);
}
}
// Begin the initialization procedure. NF becomes one more than the number
// of function values so far. The coordinates of the displacement of the
// next initial interpolation point from XBASE are set in XPT(NF+1,.).
int ipt = 0;
int jpt = 0;
double fbeg = Double.NaN;
do {
final int nfm = getEvaluations();
final int nfx = nfm - n;
final int nfmm = nfm - 1;
final int nfxm = nfx - 1;
double stepa = 0;
double stepb = 0;
if (nfm <= 2 * n) {
if (nfm >= 1 &&
nfm <= n) {
stepa = initialTrustRegionRadius;
if (upperDifference.getEntry(nfmm) == ZERO) {
stepa = -stepa;
throw new PathIsExploredException(); // XXX
}
interpolationPoints.setEntry(nfm, nfmm, stepa);
} else if (nfm > n) {
stepa = interpolationPoints.getEntry(nfx, nfxm);
stepb = -initialTrustRegionRadius;
if (lowerDifference.getEntry(nfxm) == ZERO) {
stepb = Math.min(TWO * initialTrustRegionRadius, upperDifference.getEntry(nfxm));
throw new PathIsExploredException(); // XXX
}
if (upperDifference.getEntry(nfxm) == ZERO) {
stepb = Math.max(-TWO * initialTrustRegionRadius, lowerDifference.getEntry(nfxm));
throw new PathIsExploredException(); // XXX
}
interpolationPoints.setEntry(nfm, nfxm, stepb);
}
} else {
final int tmp1 = (nfm - np) / n;
jpt = nfm - tmp1 * n - n;
ipt = jpt + tmp1;
if (ipt > n) {
final int tmp2 = jpt;
jpt = ipt - n;
ipt = tmp2;
throw new PathIsExploredException(); // XXX
}
final int iptMinus1 = ipt;
final int jptMinus1 = jpt;
interpolationPoints.setEntry(nfm, iptMinus1, interpolationPoints.getEntry(ipt, iptMinus1));
interpolationPoints.setEntry(nfm, jptMinus1, interpolationPoints.getEntry(jpt, jptMinus1));
}
// Calculate the next value of F. The least function value so far and
// its index are required.
for (int j = 0; j < n; j++) {
currentBest.setEntry(j, Math.min(Math.max(lowerBound[j],
originShift.getEntry(j) + interpolationPoints.getEntry(nfm, j)),
upperBound[j]));
if (interpolationPoints.getEntry(nfm, j) == lowerDifference.getEntry(j)) {
currentBest.setEntry(j, lowerBound[j]);
}
if (interpolationPoints.getEntry(nfm, j) == upperDifference.getEntry(j)) {
currentBest.setEntry(j, upperBound[j]);
}
}
final double objectiveValue = computeObjectiveValue(currentBest.toArray());
final double f = isMinimize ? objectiveValue : -objectiveValue;
final int numEval = getEvaluations(); // nfm + 1
fAtInterpolationPoints.setEntry(nfm, f);
if (numEval == 1) {
fbeg = f;
trustRegionCenterInterpolationPointIndex = 0;
} else if (f < fAtInterpolationPoints.getEntry(trustRegionCenterInterpolationPointIndex)) {
trustRegionCenterInterpolationPointIndex = nfm;
}
// Set the nonzero initial elements of BMAT and the quadratic model in the
// cases when NF is at most 2*N+1. If NF exceeds N+1, then the positions
// of the NF-th and (NF-N)-th interpolation points may be switched, in
// order that the function value at the first of them contributes to the
// off-diagonal second derivative terms of the initial quadratic model.
if (numEval <= 2 * n + 1) {
if (numEval >= 2 &&
numEval <= n + 1) {
gradientAtTrustRegionCenter.setEntry(nfmm, (f - fbeg) / stepa);
if (npt < numEval + n) {
final double oneOverStepA = ONE / stepa;
bMatrix.setEntry(0, nfmm, -oneOverStepA);
bMatrix.setEntry(nfm, nfmm, oneOverStepA);
bMatrix.setEntry(npt + nfmm, nfmm, -HALF * rhosq);
throw new PathIsExploredException(); // XXX
}
} else if (numEval >= n + 2) {
final int ih = nfx * (nfx + 1) / 2 - 1;
final double tmp = (f - fbeg) / stepb;
final double diff = stepb - stepa;
modelSecondDerivativesValues.setEntry(ih, TWO * (tmp - gradientAtTrustRegionCenter.getEntry(nfxm)) / diff);
gradientAtTrustRegionCenter.setEntry(nfxm, (gradientAtTrustRegionCenter.getEntry(nfxm) * stepb - tmp * stepa) / diff);
if (stepa * stepb < ZERO) {
if (f < fAtInterpolationPoints.getEntry(nfm - n)) {
fAtInterpolationPoints.setEntry(nfm, fAtInterpolationPoints.getEntry(nfm - n));
fAtInterpolationPoints.setEntry(nfm - n, f);
if (trustRegionCenterInterpolationPointIndex == nfm) {
trustRegionCenterInterpolationPointIndex = nfm - n;
}
interpolationPoints.setEntry(nfm - n, nfxm, stepb);
interpolationPoints.setEntry(nfm, nfxm, stepa);
}
}
bMatrix.setEntry(0, nfxm, -(stepa + stepb) / (stepa * stepb));
bMatrix.setEntry(nfm, nfxm, -HALF / interpolationPoints.getEntry(nfm - n, nfxm));
bMatrix.setEntry(nfm - n, nfxm,
-bMatrix.getEntry(0, nfxm) - bMatrix.getEntry(nfm, nfxm));
zMatrix.setEntry(0, nfxm, Math.sqrt(TWO) / (stepa * stepb));
zMatrix.setEntry(nfm, nfxm, Math.sqrt(HALF) / rhosq);
// zMatrix.setEntry(nfm, nfxm, Math.sqrt(HALF) * recip); // XXX "testAckley" and "testDiffPow" fail.
zMatrix.setEntry(nfm - n, nfxm,
-zMatrix.getEntry(0, nfxm) - zMatrix.getEntry(nfm, nfxm));
}
// Set the off-diagonal second derivatives of the Lagrange functions and
// the initial quadratic model.
} else {
zMatrix.setEntry(0, nfxm, recip);
zMatrix.setEntry(nfm, nfxm, recip);
zMatrix.setEntry(ipt, nfxm, -recip);
zMatrix.setEntry(jpt, nfxm, -recip);
final int ih = ipt * (ipt - 1) / 2 + jpt - 1;
final double tmp = interpolationPoints.getEntry(nfm, ipt - 1) * interpolationPoints.getEntry(nfm, jpt - 1);
modelSecondDerivativesValues.setEntry(ih, (fbeg - fAtInterpolationPoints.getEntry(ipt) - fAtInterpolationPoints.getEntry(jpt) + f) / tmp);
throw new PathIsExploredException(); // XXX
}
} while (getEvaluations() < npt);
} | private void prelim ( double [ ] lowerBound , double [ ] upperBound ) { printMethod ( ) ; final int n = currentBest . getDimension ( ) ; final int npt = numberOfInterpolationPoints ; final int ndim = bMatrix . getRowDimension ( ) ; final double rhosq = initialTrustRegionRadius * initialTrustRegionRadius ; final double recip = 1d / rhosq ; final int np = n + 1 ; for ( int j = 0 ; j < n ; j ++ ) { originShift . setEntry ( j , currentBest . getEntry ( j ) ) ; for ( int k = 0 ; k < npt ; k ++ ) { interpolationPoints . setEntry ( k , j , ZERO ) ; } for ( int i = 0 ; i < ndim ; i ++ ) { bMatrix . setEntry ( i , j , ZERO ) ; } } for ( int i = 0 , max = n * np / 2 ; i < max ; i ++ ) { modelSecondDerivativesValues . setEntry ( i , ZERO ) ; } for ( int k = 0 ; k < npt ; k ++ ) { modelSecondDerivativesParameters . setEntry ( k , ZERO ) ; for ( int j = 0 , max = npt - np ; j < max ; j ++ ) { zMatrix . setEntry ( k , j , ZERO ) ; } } int ipt = 0 ; int jpt = 0 ; double fbeg = Double . NaN ; do { final int nfm = getEvaluations ( ) ; final int nfx = nfm - n ; final int nfmm = nfm - 1 ; final int nfxm = nfx - 1 ; double stepa = 0 ; double stepb = 0 ; if ( nfm <= 2 * n ) { if ( nfm >= 1 && nfm <= n ) { stepa = initialTrustRegionRadius ; if ( upperDifference . getEntry ( nfmm ) == ZERO ) { stepa = - stepa ; throw new PathIsExploredException ( ) ; } interpolationPoints . setEntry ( nfm , nfmm , stepa ) ; } else if ( nfm > n ) { stepa = interpolationPoints . getEntry ( nfx , nfxm ) ; stepb = - initialTrustRegionRadius ; if ( lowerDifference . getEntry ( nfxm ) == ZERO ) { stepb = Math . min ( TWO * initialTrustRegionRadius , upperDifference . getEntry ( nfxm ) ) ; throw new PathIsExploredException ( ) ; } if ( upperDifference . getEntry ( nfxm ) == ZERO ) { stepb = Math . max ( - TWO * initialTrustRegionRadius , lowerDifference . getEntry ( nfxm ) ) ; throw new PathIsExploredException ( ) ; } interpolationPoints . setEntry ( nfm , nfxm , stepb ) ; } } else { final int tmp1 = ( nfm - np ) / n ; jpt = nfm - tmp1 * n - n ; ipt = jpt + tmp1 ; if ( ipt > n ) { final int tmp2 = jpt ; jpt = ipt - n ; ipt = tmp2 ; throw new PathIsExploredException ( ) ; } final int iptMinus1 = ipt ; final int jptMinus1 = jpt ; interpolationPoints . setEntry ( nfm , iptMinus1 , interpolationPoints . getEntry ( ipt , iptMinus1 ) ) ; interpolationPoints . setEntry ( nfm , jptMinus1 , interpolationPoints . getEntry ( jpt , jptMinus1 ) ) ; } for ( int j = 0 ; j < n ; j ++ ) { currentBest . setEntry ( j , Math . min ( Math . max ( lowerBound [ j ] , originShift . getEntry ( j ) + interpolationPoints . getEntry ( nfm , j ) ) , upperBound [ j ] ) ) ; if ( interpolationPoints . getEntry ( nfm , j ) == lowerDifference . getEntry ( j ) ) { currentBest . setEntry ( j , lowerBound [ j ] ) ; } if ( interpolationPoints . getEntry ( nfm , j ) == upperDifference . getEntry ( j ) ) { currentBest . setEntry ( j , upperBound [ j ] ) ; } } final double objectiveValue = computeObjectiveValue ( currentBest . toArray ( ) ) ; final double f = isMinimize ? objectiveValue : - objectiveValue ; final int numEval = getEvaluations ( ) ; fAtInterpolationPoints . setEntry ( nfm , f ) ; if ( numEval == 1 ) { fbeg = f ; trustRegionCenterInterpolationPointIndex = 0 ; } else if ( f < fAtInterpolationPoints . getEntry ( trustRegionCenterInterpolationPointIndex ) ) { trustRegionCenterInterpolationPointIndex = nfm ; } if ( numEval <= 2 * n + 1 ) { if ( numEval >= 2 && numEval <= n + 1 ) { gradientAtTrustRegionCenter . setEntry ( nfmm , ( f - fbeg ) / stepa ) ; if ( npt < numEval + n ) { final double oneOverStepA = ONE / stepa ; bMatrix . setEntry ( 0 , nfmm , - oneOverStepA ) ; bMatrix . setEntry ( nfm , nfmm , oneOverStepA ) ; bMatrix . setEntry ( npt + nfmm , nfmm , - HALF * rhosq ) ; throw new PathIsExploredException ( ) ; } } else if ( numEval >= n + 2 ) { final int ih = nfx * ( nfx + 1 ) / 2 - 1 ; final double tmp = ( f - fbeg ) / stepb ; final double diff = stepb - stepa ; modelSecondDerivativesValues . setEntry ( ih , TWO * ( tmp - gradientAtTrustRegionCenter . getEntry ( nfxm ) ) / diff ) ; gradientAtTrustRegionCenter . setEntry ( nfxm , ( gradientAtTrustRegionCenter . getEntry ( nfxm ) * stepb - tmp * stepa ) / diff ) ; if ( stepa * stepb < ZERO ) { if ( f < fAtInterpolationPoints . getEntry ( nfm - n ) ) { fAtInterpolationPoints . setEntry ( nfm , fAtInterpolationPoints . getEntry ( nfm - n ) ) ; fAtInterpolationPoints . setEntry ( nfm - n , f ) ; if ( trustRegionCenterInterpolationPointIndex == nfm ) { trustRegionCenterInterpolationPointIndex = nfm - n ; } interpolationPoints . setEntry ( nfm - n , nfxm , stepb ) ; interpolationPoints . setEntry ( nfm , nfxm , stepa ) ; } } bMatrix . setEntry ( 0 , nfxm , - ( stepa + stepb ) / ( stepa * stepb ) ) ; bMatrix . setEntry ( nfm , nfxm , - HALF / interpolationPoints . getEntry ( nfm - n , nfxm ) ) ; bMatrix . setEntry ( nfm - n , nfxm , - bMatrix . getEntry ( 0 , nfxm ) - bMatrix . getEntry ( nfm , nfxm ) ) ; zMatrix . setEntry ( 0 , nfxm , Math . sqrt ( TWO ) / ( stepa * stepb ) ) ; zMatrix . setEntry ( nfm , nfxm , Math . sqrt ( HALF ) / rhosq ) ; zMatrix . setEntry ( nfm - n , nfxm , - zMatrix . getEntry ( 0 , nfxm ) - zMatrix . getEntry ( nfm , nfxm ) ) ; } } else { zMatrix . setEntry ( 0 , nfxm , recip ) ; zMatrix . setEntry ( nfm , nfxm , recip ) ; zMatrix . setEntry ( ipt , nfxm , - recip ) ; zMatrix . setEntry ( jpt , nfxm , - recip ) ; final int ih = ipt * ( ipt - 1 ) / 2 + jpt - 1 ; final double tmp = interpolationPoints . getEntry ( nfm , ipt - 1 ) * interpolationPoints . getEntry ( nfm , jpt - 1 ) ; modelSecondDerivativesValues . setEntry ( ih , ( fbeg - fAtInterpolationPoints . getEntry ( ipt ) - fAtInterpolationPoints . getEntry ( jpt ) + f ) / tmp ) ; throw new PathIsExploredException ( ) ; } } while ( getEvaluations ( ) < npt ) ; } | private void prelim(double[] lowerBound,
double[] upperBound) {
printMethod(); // XXX
final int n = currentBest.getDimension();
final int npt = numberOfInterpolationPoints;
final int ndim = bMatrix.getRowDimension();
final double rhosq = initialTrustRegionRadius * initialTrustRegionRadius;
final double recip = 1d / rhosq;
final int np = n + 1;
// Set XBASE to the initial vector of variables, and set the initial
// elements of XPT, BMAT, HQ, PQ and ZMAT to zero.
for (int j = 0; j < n; j++) {
originShift.setEntry(j, currentBest.getEntry(j));
for (int k = 0; k < npt; k++) {
interpolationPoints.setEntry(k, j, ZERO);
}
for (int i = 0; i < ndim; i++) {
bMatrix.setEntry(i, j, ZERO);
}
}
for (int i = 0, max = n * np / 2; i < max; i++) {
modelSecondDerivativesValues.setEntry(i, ZERO);
}
for (int k = 0; k < npt; k++) {
modelSecondDerivativesParameters.setEntry(k, ZERO);
for (int j = 0, max = npt - np; j < max; j++) {
zMatrix.setEntry(k, j, ZERO);
}
}
// Begin the initialization procedure. NF becomes one more than the number
// of function values so far. The coordinates of the displacement of the
// next initial interpolation point from XBASE are set in XPT(NF+1,.).
int ipt = 0;
int jpt = 0;
double fbeg = Double.NaN;
do {
final int nfm = getEvaluations();
final int nfx = nfm - n;
final int nfmm = nfm - 1;
final int nfxm = nfx - 1;
double stepa = 0;
double stepb = 0;
if (nfm <= 2 * n) {
if (nfm >= 1 &&
nfm <= n) {
stepa = initialTrustRegionRadius;
if (upperDifference.getEntry(nfmm) == ZERO) {
stepa = -stepa;
throw new PathIsExploredException(); // XXX
}
interpolationPoints.setEntry(nfm, nfmm, stepa);
} else if (nfm > n) {
stepa = interpolationPoints.getEntry(nfx, nfxm);
stepb = -initialTrustRegionRadius;
if (lowerDifference.getEntry(nfxm) == ZERO) {
stepb = Math.min(TWO * initialTrustRegionRadius, upperDifference.getEntry(nfxm));
throw new PathIsExploredException(); // XXX
}
if (upperDifference.getEntry(nfxm) == ZERO) {
stepb = Math.max(-TWO * initialTrustRegionRadius, lowerDifference.getEntry(nfxm));
throw new PathIsExploredException(); // XXX
}
interpolationPoints.setEntry(nfm, nfxm, stepb);
}
} else {
final int tmp1 = (nfm - np) / n;
jpt = nfm - tmp1 * n - n;
ipt = jpt + tmp1;
if (ipt > n) {
final int tmp2 = jpt;
jpt = ipt - n;
ipt = tmp2;
// throw new PathIsExploredException(); // XXX
}
final int iptMinus1 = ipt - 1;
final int jptMinus1 = jpt - 1;
interpolationPoints.setEntry(nfm, iptMinus1, interpolationPoints.getEntry(ipt, iptMinus1));
interpolationPoints.setEntry(nfm, jptMinus1, interpolationPoints.getEntry(jpt, jptMinus1));
}
// Calculate the next value of F. The least function value so far and
// its index are required.
for (int j = 0; j < n; j++) {
currentBest.setEntry(j, Math.min(Math.max(lowerBound[j],
originShift.getEntry(j) + interpolationPoints.getEntry(nfm, j)),
upperBound[j]));
if (interpolationPoints.getEntry(nfm, j) == lowerDifference.getEntry(j)) {
currentBest.setEntry(j, lowerBound[j]);
}
if (interpolationPoints.getEntry(nfm, j) == upperDifference.getEntry(j)) {
currentBest.setEntry(j, upperBound[j]);
}
}
final double objectiveValue = computeObjectiveValue(currentBest.toArray());
final double f = isMinimize ? objectiveValue : -objectiveValue;
final int numEval = getEvaluations(); // nfm + 1
fAtInterpolationPoints.setEntry(nfm, f);
if (numEval == 1) {
fbeg = f;
trustRegionCenterInterpolationPointIndex = 0;
} else if (f < fAtInterpolationPoints.getEntry(trustRegionCenterInterpolationPointIndex)) {
trustRegionCenterInterpolationPointIndex = nfm;
}
// Set the nonzero initial elements of BMAT and the quadratic model in the
// cases when NF is at most 2*N+1. If NF exceeds N+1, then the positions
// of the NF-th and (NF-N)-th interpolation points may be switched, in
// order that the function value at the first of them contributes to the
// off-diagonal second derivative terms of the initial quadratic model.
if (numEval <= 2 * n + 1) {
if (numEval >= 2 &&
numEval <= n + 1) {
gradientAtTrustRegionCenter.setEntry(nfmm, (f - fbeg) / stepa);
if (npt < numEval + n) {
final double oneOverStepA = ONE / stepa;
bMatrix.setEntry(0, nfmm, -oneOverStepA);
bMatrix.setEntry(nfm, nfmm, oneOverStepA);
bMatrix.setEntry(npt + nfmm, nfmm, -HALF * rhosq);
throw new PathIsExploredException(); // XXX
}
} else if (numEval >= n + 2) {
final int ih = nfx * (nfx + 1) / 2 - 1;
final double tmp = (f - fbeg) / stepb;
final double diff = stepb - stepa;
modelSecondDerivativesValues.setEntry(ih, TWO * (tmp - gradientAtTrustRegionCenter.getEntry(nfxm)) / diff);
gradientAtTrustRegionCenter.setEntry(nfxm, (gradientAtTrustRegionCenter.getEntry(nfxm) * stepb - tmp * stepa) / diff);
if (stepa * stepb < ZERO) {
if (f < fAtInterpolationPoints.getEntry(nfm - n)) {
fAtInterpolationPoints.setEntry(nfm, fAtInterpolationPoints.getEntry(nfm - n));
fAtInterpolationPoints.setEntry(nfm - n, f);
if (trustRegionCenterInterpolationPointIndex == nfm) {
trustRegionCenterInterpolationPointIndex = nfm - n;
}
interpolationPoints.setEntry(nfm - n, nfxm, stepb);
interpolationPoints.setEntry(nfm, nfxm, stepa);
}
}
bMatrix.setEntry(0, nfxm, -(stepa + stepb) / (stepa * stepb));
bMatrix.setEntry(nfm, nfxm, -HALF / interpolationPoints.getEntry(nfm - n, nfxm));
bMatrix.setEntry(nfm - n, nfxm,
-bMatrix.getEntry(0, nfxm) - bMatrix.getEntry(nfm, nfxm));
zMatrix.setEntry(0, nfxm, Math.sqrt(TWO) / (stepa * stepb));
zMatrix.setEntry(nfm, nfxm, Math.sqrt(HALF) / rhosq);
// zMatrix.setEntry(nfm, nfxm, Math.sqrt(HALF) * recip); // XXX "testAckley" and "testDiffPow" fail.
zMatrix.setEntry(nfm - n, nfxm,
-zMatrix.getEntry(0, nfxm) - zMatrix.getEntry(nfm, nfxm));
}
// Set the off-diagonal second derivatives of the Lagrange functions and
// the initial quadratic model.
} else {
zMatrix.setEntry(0, nfxm, recip);
zMatrix.setEntry(nfm, nfxm, recip);
zMatrix.setEntry(ipt, nfxm, -recip);
zMatrix.setEntry(jpt, nfxm, -recip);
final int ih = ipt * (ipt - 1) / 2 + jpt - 1;
final double tmp = interpolationPoints.getEntry(nfm, ipt - 1) * interpolationPoints.getEntry(nfm, jpt - 1);
modelSecondDerivativesValues.setEntry(ih, (fbeg - fAtInterpolationPoints.getEntry(ipt) - fAtInterpolationPoints.getEntry(jpt) + f) / tmp);
// throw new PathIsExploredException(); // XXX
}
} while (getEvaluations() < npt);
} | private void prelim ( double [ ] lowerBound , double [ ] upperBound ) { printMethod ( ) ; final int n = currentBest . getDimension ( ) ; final int npt = numberOfInterpolationPoints ; final int ndim = bMatrix . getRowDimension ( ) ; final double rhosq = initialTrustRegionRadius * initialTrustRegionRadius ; final double recip = 1d / rhosq ; final int np = n + 1 ; for ( int j = 0 ; j < n ; j ++ ) { originShift . setEntry ( j , currentBest . getEntry ( j ) ) ; for ( int k = 0 ; k < npt ; k ++ ) { interpolationPoints . setEntry ( k , j , ZERO ) ; } for ( int i = 0 ; i < ndim ; i ++ ) { bMatrix . setEntry ( i , j , ZERO ) ; } } for ( int i = 0 , max = n * np / 2 ; i < max ; i ++ ) { modelSecondDerivativesValues . setEntry ( i , ZERO ) ; } for ( int k = 0 ; k < npt ; k ++ ) { modelSecondDerivativesParameters . setEntry ( k , ZERO ) ; for ( int j = 0 , max = npt - np ; j < max ; j ++ ) { zMatrix . setEntry ( k , j , ZERO ) ; } } int ipt = 0 ; int jpt = 0 ; double fbeg = Double . NaN ; do { final int nfm = getEvaluations ( ) ; final int nfx = nfm - n ; final int nfmm = nfm - 1 ; final int nfxm = nfx - 1 ; double stepa = 0 ; double stepb = 0 ; if ( nfm <= 2 * n ) { if ( nfm >= 1 && nfm <= n ) { stepa = initialTrustRegionRadius ; if ( upperDifference . getEntry ( nfmm ) == ZERO ) { stepa = - stepa ; throw new PathIsExploredException ( ) ; } interpolationPoints . setEntry ( nfm , nfmm , stepa ) ; } else if ( nfm > n ) { stepa = interpolationPoints . getEntry ( nfx , nfxm ) ; stepb = - initialTrustRegionRadius ; if ( lowerDifference . getEntry ( nfxm ) == ZERO ) { stepb = Math . min ( TWO * initialTrustRegionRadius , upperDifference . getEntry ( nfxm ) ) ; throw new PathIsExploredException ( ) ; } if ( upperDifference . getEntry ( nfxm ) == ZERO ) { stepb = Math . max ( - TWO * initialTrustRegionRadius , lowerDifference . getEntry ( nfxm ) ) ; throw new PathIsExploredException ( ) ; } interpolationPoints . setEntry ( nfm , nfxm , stepb ) ; } } else { final int tmp1 = ( nfm - np ) / n ; jpt = nfm - tmp1 * n - n ; ipt = jpt + tmp1 ; if ( ipt > n ) { final int tmp2 = jpt ; jpt = ipt - n ; ipt = tmp2 ; } final int iptMinus1 = ipt - 1 ; final int jptMinus1 = jpt - 1 ; interpolationPoints . setEntry ( nfm , iptMinus1 , interpolationPoints . getEntry ( ipt , iptMinus1 ) ) ; interpolationPoints . setEntry ( nfm , jptMinus1 , interpolationPoints . getEntry ( jpt , jptMinus1 ) ) ; } for ( int j = 0 ; j < n ; j ++ ) { currentBest . setEntry ( j , Math . min ( Math . max ( lowerBound [ j ] , originShift . getEntry ( j ) + interpolationPoints . getEntry ( nfm , j ) ) , upperBound [ j ] ) ) ; if ( interpolationPoints . getEntry ( nfm , j ) == lowerDifference . getEntry ( j ) ) { currentBest . setEntry ( j , lowerBound [ j ] ) ; } if ( interpolationPoints . getEntry ( nfm , j ) == upperDifference . getEntry ( j ) ) { currentBest . setEntry ( j , upperBound [ j ] ) ; } } final double objectiveValue = computeObjectiveValue ( currentBest . toArray ( ) ) ; final double f = isMinimize ? objectiveValue : - objectiveValue ; final int numEval = getEvaluations ( ) ; fAtInterpolationPoints . setEntry ( nfm , f ) ; if ( numEval == 1 ) { fbeg = f ; trustRegionCenterInterpolationPointIndex = 0 ; } else if ( f < fAtInterpolationPoints . getEntry ( trustRegionCenterInterpolationPointIndex ) ) { trustRegionCenterInterpolationPointIndex = nfm ; } if ( numEval <= 2 * n + 1 ) { if ( numEval >= 2 && numEval <= n + 1 ) { gradientAtTrustRegionCenter . setEntry ( nfmm , ( f - fbeg ) / stepa ) ; if ( npt < numEval + n ) { final double oneOverStepA = ONE / stepa ; bMatrix . setEntry ( 0 , nfmm , - oneOverStepA ) ; bMatrix . setEntry ( nfm , nfmm , oneOverStepA ) ; bMatrix . setEntry ( npt + nfmm , nfmm , - HALF * rhosq ) ; throw new PathIsExploredException ( ) ; } } else if ( numEval >= n + 2 ) { final int ih = nfx * ( nfx + 1 ) / 2 - 1 ; final double tmp = ( f - fbeg ) / stepb ; final double diff = stepb - stepa ; modelSecondDerivativesValues . setEntry ( ih , TWO * ( tmp - gradientAtTrustRegionCenter . getEntry ( nfxm ) ) / diff ) ; gradientAtTrustRegionCenter . setEntry ( nfxm , ( gradientAtTrustRegionCenter . getEntry ( nfxm ) * stepb - tmp * stepa ) / diff ) ; if ( stepa * stepb < ZERO ) { if ( f < fAtInterpolationPoints . getEntry ( nfm - n ) ) { fAtInterpolationPoints . setEntry ( nfm , fAtInterpolationPoints . getEntry ( nfm - n ) ) ; fAtInterpolationPoints . setEntry ( nfm - n , f ) ; if ( trustRegionCenterInterpolationPointIndex == nfm ) { trustRegionCenterInterpolationPointIndex = nfm - n ; } interpolationPoints . setEntry ( nfm - n , nfxm , stepb ) ; interpolationPoints . setEntry ( nfm , nfxm , stepa ) ; } } bMatrix . setEntry ( 0 , nfxm , - ( stepa + stepb ) / ( stepa * stepb ) ) ; bMatrix . setEntry ( nfm , nfxm , - HALF / interpolationPoints . getEntry ( nfm - n , nfxm ) ) ; bMatrix . setEntry ( nfm - n , nfxm , - bMatrix . getEntry ( 0 , nfxm ) - bMatrix . getEntry ( nfm , nfxm ) ) ; zMatrix . setEntry ( 0 , nfxm , Math . sqrt ( TWO ) / ( stepa * stepb ) ) ; zMatrix . setEntry ( nfm , nfxm , Math . sqrt ( HALF ) / rhosq ) ; zMatrix . setEntry ( nfm - n , nfxm , - zMatrix . getEntry ( 0 , nfxm ) - zMatrix . getEntry ( nfm , nfxm ) ) ; } } else { zMatrix . setEntry ( 0 , nfxm , recip ) ; zMatrix . setEntry ( nfm , nfxm , recip ) ; zMatrix . setEntry ( ipt , nfxm , - recip ) ; zMatrix . setEntry ( jpt , nfxm , - recip ) ; final int ih = ipt * ( ipt - 1 ) / 2 + jpt - 1 ; final double tmp = interpolationPoints . getEntry ( nfm , ipt - 1 ) * interpolationPoints . getEntry ( nfm , jpt - 1 ) ; modelSecondDerivativesValues . setEntry ( ih , ( fbeg - fAtInterpolationPoints . getEntry ( ipt ) - fAtInterpolationPoints . getEntry ( jpt ) + f ) / tmp ) ; } } while ( getEvaluations ( ) < npt ) ; } |
Codec | 4 | src/java/org/apache/commons/codec/binary/Base64.java | 224 | 226 | new Base64().encode() appends a CRLF, and chunks results into 76 character lines | The instance encode() method (e.g. new Base64().encode()) appends a CRLF. Actually it's fully chunking the output into 76 character lines. Commons-Codec-1.3 did not do this. The static Base64.encodeBase64() method behaves the same in both 1.3 and 1.4, so this problem only affects the instance encode() method.
{code}
import org.apache.commons.codec.binary.*;
public class B64 {
public static void main(String[] args) throws Exception {
Base64 b64 = new Base64();
String s1 = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
String s2 = "aaaaaaaaaa";
String s3 = "a";
byte[] b1 = s1.getBytes("UTF-8");
byte[] b2 = s2.getBytes("UTF-8");
byte[] b3 = s3.getBytes("UTF-8");
byte[] result;
result = Base64.encodeBase64(b1);
System.out.println("[" + new String(result, "UTF-8") + "]");
result = b64.encode(b1);
System.out.println("[" + new String(result, "UTF-8") + "]");
result = Base64.encodeBase64(b2);
System.out.println("[" + new String(result, "UTF-8") + "]");
result = b64.encode(b2);
System.out.println("[" + new String(result, "UTF-8") + "]");
result = Base64.encodeBase64(b3);
System.out.println("[" + new String(result, "UTF-8") + "]");
result = b64.encode(b3);
System.out.println("[" + new String(result, "UTF-8") + "]");
}
}
{code}
Here's my output:
{noformat}
$ java -cp commons-codec-1.3.jar:. B64
[YWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYQ==]
[YWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYQ==]
[YWFhYWFhYWFhYQ==]
[YWFhYWFhYWFhYQ==]
[YQ==]
[YQ==]
$ java -cp commons-codec-1.4.jar:. B64
[YWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYQ==]
[YWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFh
YWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYQ==
]
[YWFhYWFhYWFhYQ==]
[YWFhYWFhYWFhYQ==
]
[YQ==]
[YQ==
]
{noformat}
| public Base64() {
this(false);
} | public Base64 ( ) { this ( false ) ; } | public Base64() {
this(0);
} | public Base64 ( ) { this ( 0 ) ; } |
Cli | 39 | src/main/java/org/apache/commons/cli/TypeHandler.java | 64 | 106 | Option parser type EXISTING_FILE_VALUE not check file existing | When the user pass option type FileInputStream.class, I think the expected behavior for the return value is the same type, which the user passed.
Options options = new Options();
options.addOption(Option.builder("f").hasArg().type(FileInputStream.class).build());
CommandLine cline = new DefaultParser().parse(options, args);
FileInputStream file = (FileInputStream) cline.getParsedOptionValue("f"); // it returns "File" object, without check File exist.
I attach a solution for it:
https://github.com/schaumb/commons-cli/commit/abfcc8211f529ab75f3b3edd4a827e484109eb0b
| public static Object createValue(final String str, final Class<?> clazz) throws ParseException
{
if (PatternOptionBuilder.STRING_VALUE == clazz)
{
return str;
}
else if (PatternOptionBuilder.OBJECT_VALUE == clazz)
{
return createObject(str);
}
else if (PatternOptionBuilder.NUMBER_VALUE == clazz)
{
return createNumber(str);
}
else if (PatternOptionBuilder.DATE_VALUE == clazz)
{
return createDate(str);
}
else if (PatternOptionBuilder.CLASS_VALUE == clazz)
{
return createClass(str);
}
else if (PatternOptionBuilder.FILE_VALUE == clazz)
{
return createFile(str);
}
else if (PatternOptionBuilder.EXISTING_FILE_VALUE == clazz)
{
return createFile(str);
}
else if (PatternOptionBuilder.FILES_VALUE == clazz)
{
return createFiles(str);
}
else if (PatternOptionBuilder.URL_VALUE == clazz)
{
return createURL(str);
}
else
{
return null;
}
} | public static Object createValue ( final String str , final Class < ? > clazz ) throws ParseException { if ( PatternOptionBuilder . STRING_VALUE == clazz ) { return str ; } else if ( PatternOptionBuilder . OBJECT_VALUE == clazz ) { return createObject ( str ) ; } else if ( PatternOptionBuilder . NUMBER_VALUE == clazz ) { return createNumber ( str ) ; } else if ( PatternOptionBuilder . DATE_VALUE == clazz ) { return createDate ( str ) ; } else if ( PatternOptionBuilder . CLASS_VALUE == clazz ) { return createClass ( str ) ; } else if ( PatternOptionBuilder . FILE_VALUE == clazz ) { return createFile ( str ) ; } else if ( PatternOptionBuilder . EXISTING_FILE_VALUE == clazz ) { return createFile ( str ) ; } else if ( PatternOptionBuilder . FILES_VALUE == clazz ) { return createFiles ( str ) ; } else if ( PatternOptionBuilder . URL_VALUE == clazz ) { return createURL ( str ) ; } else { return null ; } } | public static Object createValue(final String str, final Class<?> clazz) throws ParseException
{
if (PatternOptionBuilder.STRING_VALUE == clazz)
{
return str;
}
else if (PatternOptionBuilder.OBJECT_VALUE == clazz)
{
return createObject(str);
}
else if (PatternOptionBuilder.NUMBER_VALUE == clazz)
{
return createNumber(str);
}
else if (PatternOptionBuilder.DATE_VALUE == clazz)
{
return createDate(str);
}
else if (PatternOptionBuilder.CLASS_VALUE == clazz)
{
return createClass(str);
}
else if (PatternOptionBuilder.FILE_VALUE == clazz)
{
return createFile(str);
}
else if (PatternOptionBuilder.EXISTING_FILE_VALUE == clazz)
{
return openFile(str);
}
else if (PatternOptionBuilder.FILES_VALUE == clazz)
{
return createFiles(str);
}
else if (PatternOptionBuilder.URL_VALUE == clazz)
{
return createURL(str);
}
else
{
return null;
}
} | public static Object createValue ( final String str , final Class < ? > clazz ) throws ParseException { if ( PatternOptionBuilder . STRING_VALUE == clazz ) { return str ; } else if ( PatternOptionBuilder . OBJECT_VALUE == clazz ) { return createObject ( str ) ; } else if ( PatternOptionBuilder . NUMBER_VALUE == clazz ) { return createNumber ( str ) ; } else if ( PatternOptionBuilder . DATE_VALUE == clazz ) { return createDate ( str ) ; } else if ( PatternOptionBuilder . CLASS_VALUE == clazz ) { return createClass ( str ) ; } else if ( PatternOptionBuilder . FILE_VALUE == clazz ) { return createFile ( str ) ; } else if ( PatternOptionBuilder . EXISTING_FILE_VALUE == clazz ) { return openFile ( str ) ; } else if ( PatternOptionBuilder . FILES_VALUE == clazz ) { return createFiles ( str ) ; } else if ( PatternOptionBuilder . URL_VALUE == clazz ) { return createURL ( str ) ; } else { return null ; } } |
Compress | 37 | src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java | 452 | 502 | Parsing PAX headers fails with NegativeArraySizeException | The {{TarArchiveInputStream.parsePaxHeaders}} method fails with a {{NegativeArraySizeException}} when there is an empty line at the end of the headers.
The inner loop starts reading the length, but it gets a newline (10) and ends up subtracting '0' (48) from it; the result is a negative length that blows up an attempt to allocate the {{rest}} array.
I would say that a check to see if {{ch}} is less the '0' and break the loop if it is.
I used {{npm pack aws-sdk@2.2.16}} to generate a tarball with this issue. | Map<String, String> parsePaxHeaders(final InputStream i)
throws IOException {
final Map<String, String> headers = new HashMap<String, String>(globalPaxHeaders);
// Format is "length keyword=value\n";
while(true){ // get length
int ch;
int len = 0;
int read = 0;
while((ch = i.read()) != -1) {
read++;
if (ch == ' '){
// Get keyword
final ByteArrayOutputStream coll = new ByteArrayOutputStream();
while((ch = i.read()) != -1) {
read++;
if (ch == '='){ // end of keyword
final String keyword = coll.toString(CharsetNames.UTF_8);
// Get rest of entry
final int restLen = len - read;
if (restLen == 1) { // only NL
headers.remove(keyword);
} else {
final byte[] rest = new byte[restLen];
final int got = IOUtils.readFully(i, rest);
if (got != restLen) {
throw new IOException("Failed to read "
+ "Paxheader. Expected "
+ restLen
+ " bytes, read "
+ got);
}
// Drop trailing NL
final String value = new String(rest, 0,
restLen - 1, CharsetNames.UTF_8);
headers.put(keyword, value);
}
break;
}
coll.write((byte) ch);
}
break; // Processed single header
}
len *= 10;
len += ch - '0';
}
if (ch == -1){ // EOF
break;
}
}
return headers;
} | Map < String , String > parsePaxHeaders ( final InputStream i ) throws IOException { final Map < String , String > headers = new HashMap < String , String > ( globalPaxHeaders ) ; while ( true ) { int ch ; int len = 0 ; int read = 0 ; while ( ( ch = i . read ( ) ) != - 1 ) { read ++ ; if ( ch == ' ' ) { final ByteArrayOutputStream coll = new ByteArrayOutputStream ( ) ; while ( ( ch = i . read ( ) ) != - 1 ) { read ++ ; if ( ch == '=' ) { final String keyword = coll . toString ( CharsetNames . UTF_8 ) ; final int restLen = len - read ; if ( restLen == 1 ) { headers . remove ( keyword ) ; } else { final byte [ ] rest = new byte [ restLen ] ; final int got = IOUtils . readFully ( i , rest ) ; if ( got != restLen ) { throw new IOException ( "Failed to read " + "Paxheader. Expected " + restLen + " bytes, read " + got ) ; } final String value = new String ( rest , 0 , restLen - 1 , CharsetNames . UTF_8 ) ; headers . put ( keyword , value ) ; } break ; } coll . write ( ( byte ) ch ) ; } break ; } len *= 10 ; len += ch - '0' ; } if ( ch == - 1 ) { break ; } } return headers ; } | Map<String, String> parsePaxHeaders(final InputStream i)
throws IOException {
final Map<String, String> headers = new HashMap<String, String>(globalPaxHeaders);
// Format is "length keyword=value\n";
while(true){ // get length
int ch;
int len = 0;
int read = 0;
while((ch = i.read()) != -1) {
read++;
if (ch == '\n') { // blank line in header
break;
} else if (ch == ' '){ // End of length string
// Get keyword
final ByteArrayOutputStream coll = new ByteArrayOutputStream();
while((ch = i.read()) != -1) {
read++;
if (ch == '='){ // end of keyword
final String keyword = coll.toString(CharsetNames.UTF_8);
// Get rest of entry
final int restLen = len - read;
if (restLen == 1) { // only NL
headers.remove(keyword);
} else {
final byte[] rest = new byte[restLen];
final int got = IOUtils.readFully(i, rest);
if (got != restLen) {
throw new IOException("Failed to read "
+ "Paxheader. Expected "
+ restLen
+ " bytes, read "
+ got);
}
// Drop trailing NL
final String value = new String(rest, 0,
restLen - 1, CharsetNames.UTF_8);
headers.put(keyword, value);
}
break;
}
coll.write((byte) ch);
}
break; // Processed single header
}
len *= 10;
len += ch - '0';
}
if (ch == -1){ // EOF
break;
}
}
return headers;
} | Map < String , String > parsePaxHeaders ( final InputStream i ) throws IOException { final Map < String , String > headers = new HashMap < String , String > ( globalPaxHeaders ) ; while ( true ) { int ch ; int len = 0 ; int read = 0 ; while ( ( ch = i . read ( ) ) != - 1 ) { read ++ ; if ( ch == '\n' ) { break ; } else if ( ch == ' ' ) { final ByteArrayOutputStream coll = new ByteArrayOutputStream ( ) ; while ( ( ch = i . read ( ) ) != - 1 ) { read ++ ; if ( ch == '=' ) { final String keyword = coll . toString ( CharsetNames . UTF_8 ) ; final int restLen = len - read ; if ( restLen == 1 ) { headers . remove ( keyword ) ; } else { final byte [ ] rest = new byte [ restLen ] ; final int got = IOUtils . readFully ( i , rest ) ; if ( got != restLen ) { throw new IOException ( "Failed to read " + "Paxheader. Expected " + restLen + " bytes, read " + got ) ; } final String value = new String ( rest , 0 , restLen - 1 , CharsetNames . UTF_8 ) ; headers . put ( keyword , value ) ; } break ; } coll . write ( ( byte ) ch ) ; } break ; } len *= 10 ; len += ch - '0' ; } if ( ch == - 1 ) { break ; } } return headers ; } |
Cli | 9 | src/java/org/apache/commons/cli/Parser.java | 303 | 324 | MissingOptionException.getMessage() changed from CLI 1.0 > 1.1 | The MissingOptionException.getMessage() string changed from CLI 1.0 > 1.1.
CLI 1.0 was poorly formatted but readable:
Missing required options: -format-source-properties
CLI 1.1 is almost unreadable:
Missing required options: formatsourceproperties
In CLI 1.0 Options.addOption(Option) prefixed the stored options with a "-" and in CLI 1.1 it doesn't.
I would suggest changing Parser.checkRequiredOptions() to add the options to the error message with a prefix of " -":
OLD:
// loop through the required options
while (iter.hasNext())
{
buff.append(iter.next());
}
NEW:
// loop through the required options
while (iter.hasNext())
{
buff.append(" -" + iter.next());
}
Resulting in:
Missing required options: -format -source -properties
| protected void checkRequiredOptions()
throws MissingOptionException
{
// if there are required options that have not been
// processsed
if (getRequiredOptions().size() > 0)
{
Iterator iter = getRequiredOptions().iterator();
StringBuffer buff = new StringBuffer("Missing required option");
buff.append(getRequiredOptions().size() == 1 ? "" : "s");
buff.append(": ");
// loop through the required options
while (iter.hasNext())
{
buff.append(iter.next());
}
throw new MissingOptionException(buff.toString());
}
} | protected void checkRequiredOptions ( ) throws MissingOptionException { if ( getRequiredOptions ( ) . size ( ) > 0 ) { Iterator iter = getRequiredOptions ( ) . iterator ( ) ; StringBuffer buff = new StringBuffer ( "Missing required option" ) ; buff . append ( getRequiredOptions ( ) . size ( ) == 1 ? "" : "s" ) ; buff . append ( ": " ) ; while ( iter . hasNext ( ) ) { buff . append ( iter . next ( ) ) ; } throw new MissingOptionException ( buff . toString ( ) ) ; } } | protected void checkRequiredOptions()
throws MissingOptionException
{
// if there are required options that have not been
// processsed
if (getRequiredOptions().size() > 0)
{
Iterator iter = getRequiredOptions().iterator();
StringBuffer buff = new StringBuffer("Missing required option");
buff.append(getRequiredOptions().size() == 1 ? "" : "s");
buff.append(": ");
// loop through the required options
while (iter.hasNext())
{
buff.append(iter.next());
buff.append(", ");
}
throw new MissingOptionException(buff.substring(0, buff.length() - 2));
}
} | protected void checkRequiredOptions ( ) throws MissingOptionException { if ( getRequiredOptions ( ) . size ( ) > 0 ) { Iterator iter = getRequiredOptions ( ) . iterator ( ) ; StringBuffer buff = new StringBuffer ( "Missing required option" ) ; buff . append ( getRequiredOptions ( ) . size ( ) == 1 ? "" : "s" ) ; buff . append ( ": " ) ; while ( iter . hasNext ( ) ) { buff . append ( iter . next ( ) ) ; buff . append ( ", " ) ; } throw new MissingOptionException ( buff . substring ( 0 , buff . length ( ) - 2 ) ) ; } } |
Compress | 21 | src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java | 634 | 649 | Writing 7z empty entries produces incorrect or corrupt archive | I couldn't find an exact rule that causes this incorrect behavior, but I tried to reduce it to some simple scenarios to reproduce it:
Input: A folder with certain files -> tried to archive it.
If the folder contains more than 7 files the incorrect behavior appears.
Scenario 1: 7 empty files
Result: The created archive contains a single folder entry with the name of the archive (no matter which was the name of the file)
Scenario 2: 7 files, some empty, some with content
Result: The created archive contains a folder entry with the name of the archive and a number of file entries also with the name of the archive. The number of the entries is equal to the number of non empty files.
Scenario 3: 8 empty files
Result: 7zip Manager cannot open archive and stops working.
Scenario 4.1: 8 files: some empty, some with content, last file (alphabetically) with content
Result: same behavior as described for Scenario 2.
Scenario 4.2: 8 files, some empty, some with content, last file empy
Result: archive is corrupt, the following message is received: "Cannot open file 'archivename.7z' as archive" (7Zip Manager does not crash). | private void writeBits(final DataOutput header, final BitSet bits, final int length) throws IOException {
int cache = 0;
int shift = 7;
for (int i = 0; i < length; i++) {
cache |= ((bits.get(i) ? 1 : 0) << shift);
--shift;
if (shift == 0) {
header.write(cache);
shift = 7;
cache = 0;
}
}
if (length > 0 && shift > 0) {
header.write(cache);
}
} | private void writeBits ( final DataOutput header , final BitSet bits , final int length ) throws IOException { int cache = 0 ; int shift = 7 ; for ( int i = 0 ; i < length ; i ++ ) { cache |= ( ( bits . get ( i ) ? 1 : 0 ) << shift ) ; -- shift ; if ( shift == 0 ) { header . write ( cache ) ; shift = 7 ; cache = 0 ; } } if ( length > 0 && shift > 0 ) { header . write ( cache ) ; } } | private void writeBits(final DataOutput header, final BitSet bits, final int length) throws IOException {
int cache = 0;
int shift = 7;
for (int i = 0; i < length; i++) {
cache |= ((bits.get(i) ? 1 : 0) << shift);
if (--shift < 0) {
header.write(cache);
shift = 7;
cache = 0;
}
}
if (shift != 7) {
header.write(cache);
}
} | private void writeBits ( final DataOutput header , final BitSet bits , final int length ) throws IOException { int cache = 0 ; int shift = 7 ; for ( int i = 0 ; i < length ; i ++ ) { cache |= ( ( bits . get ( i ) ? 1 : 0 ) << shift ) ; if ( -- shift < 0 ) { header . write ( cache ) ; shift = 7 ; cache = 0 ; } } if ( shift != 7 ) { header . write ( cache ) ; } } |
Math | 79 | src/main/java/org/apache/commons/math/util/MathUtils.java | 1623 | 1630 | NPE in KMeansPlusPlusClusterer unittest | When running this unittest, I am facing this NPE:
java.lang.NullPointerException
at org.apache.commons.math.stat.clustering.KMeansPlusPlusClusterer.assignPointsToClusters(KMeansPlusPlusClusterer.java:91)
This is the unittest:
package org.fao.fisheries.chronicles.calcuation.cluster;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import org.apache.commons.math.stat.clustering.Cluster;
import org.apache.commons.math.stat.clustering.EuclideanIntegerPoint;
import org.apache.commons.math.stat.clustering.KMeansPlusPlusClusterer;
import org.fao.fisheries.chronicles.input.CsvImportProcess;
import org.fao.fisheries.chronicles.input.Top200Csv;
import org.junit.Test;
public class ClusterAnalysisTest {
@Test
public void testPerformClusterAnalysis2() {
KMeansPlusPlusClusterer<EuclideanIntegerPoint> transformer = new KMeansPlusPlusClusterer<EuclideanIntegerPoint>(
new Random(1746432956321l));
EuclideanIntegerPoint[] points = new EuclideanIntegerPoint[] {
new EuclideanIntegerPoint(new int[] { 1959, 325100 }),
new EuclideanIntegerPoint(new int[] { 1960, 373200 }), };
List<Cluster<EuclideanIntegerPoint>> clusters = transformer.cluster(Arrays.asList(points), 1, 1);
assertEquals(1, clusters.size());
}
}
| public static double distance(int[] p1, int[] p2) {
int sum = 0;
for (int i = 0; i < p1.length; i++) {
final int dp = p1[i] - p2[i];
sum += dp * dp;
}
return Math.sqrt(sum);
} | public static double distance ( int [ ] p1 , int [ ] p2 ) { int sum = 0 ; for ( int i = 0 ; i < p1 . length ; i ++ ) { final int dp = p1 [ i ] - p2 [ i ] ; sum += dp * dp ; } return Math . sqrt ( sum ) ; } | public static double distance(int[] p1, int[] p2) {
double sum = 0;
for (int i = 0; i < p1.length; i++) {
final double dp = p1[i] - p2[i];
sum += dp * dp;
}
return Math.sqrt(sum);
} | public static double distance ( int [ ] p1 , int [ ] p2 ) { double sum = 0 ; for ( int i = 0 ; i < p1 . length ; i ++ ) { final double dp = p1 [ i ] - p2 [ i ] ; sum += dp * dp ; } return Math . sqrt ( sum ) ; } |
Math | 96 | src/java/org/apache/commons/math/complex/Complex.java | 245 | 267 | Result of multiplying and equals for complex numbers is wrong | Hi.
The bug relates on complex numbers.
The methods "multiply" and "equals" of the class Complex are involved.
mathematic background: (0,i) * (-1,0i) = (0,-i).
little java program + output that shows the bug:
-----------------------------------------------------------------------
{code}
import org.apache.commons.math.complex.*;
public class TestProg {
public static void main(String[] args) {
ComplexFormat f = new ComplexFormat();
Complex c1 = new Complex(0,1);
Complex c2 = new Complex(-1,0);
Complex res = c1.multiply(c2);
Complex comp = new Complex(0,-1);
System.out.println("res: "+f.format(res));
System.out.println("comp: "+f.format(comp));
System.out.println("res=comp: "+res.equals(comp));
}
}
{code}
-----------------------------------------------------------------------
res: -0 - 1i
comp: 0 - 1i
res=comp: false
-----------------------------------------------------------------------
I think the "equals" should return "true".
The problem could either be the "multiply" method that gives (-0,-1i) instead of (0,-1i),
or if you think thats right, the equals method has to be modified.
Good Luck
Dieter | public boolean equals(Object other) {
boolean ret;
if (this == other) {
ret = true;
} else if (other == null) {
ret = false;
} else {
try {
Complex rhs = (Complex)other;
if (rhs.isNaN()) {
ret = this.isNaN();
} else {
ret = (Double.doubleToRawLongBits(real) == Double.doubleToRawLongBits(rhs.getReal())) && (Double.doubleToRawLongBits(imaginary) == Double.doubleToRawLongBits(rhs.getImaginary()));
}
} catch (ClassCastException ex) {
// ignore exception
ret = false;
}
}
return ret;
} | public boolean equals ( Object other ) { boolean ret ; if ( this == other ) { ret = true ; } else if ( other == null ) { ret = false ; } else { try { Complex rhs = ( Complex ) other ; if ( rhs . isNaN ( ) ) { ret = this . isNaN ( ) ; } else { ret = ( Double . doubleToRawLongBits ( real ) == Double . doubleToRawLongBits ( rhs . getReal ( ) ) ) && ( Double . doubleToRawLongBits ( imaginary ) == Double . doubleToRawLongBits ( rhs . getImaginary ( ) ) ) ; } } catch ( ClassCastException ex ) { ret = false ; } } return ret ; } | public boolean equals(Object other) {
boolean ret;
if (this == other) {
ret = true;
} else if (other == null) {
ret = false;
} else {
try {
Complex rhs = (Complex)other;
if (rhs.isNaN()) {
ret = this.isNaN();
} else {
ret = (real == rhs.real) && (imaginary == rhs.imaginary);
}
} catch (ClassCastException ex) {
// ignore exception
ret = false;
}
}
return ret;
} | public boolean equals ( Object other ) { boolean ret ; if ( this == other ) { ret = true ; } else if ( other == null ) { ret = false ; } else { try { Complex rhs = ( Complex ) other ; if ( rhs . isNaN ( ) ) { ret = this . isNaN ( ) ; } else { ret = ( real == rhs . real ) && ( imaginary == rhs . imaginary ) ; } } catch ( ClassCastException ex ) { ret = false ; } } return ret ; } |
Mockito | 31 | src/org/mockito/internal/stubbing/defaultanswers/ReturnsSmartNulls.java | 59 | 61 | SmartNulls should identify the arguments that the stubbed method was invoked with | I've started mocking with RETURNS_SMART_NULLS. It is very useful and I like the fact that it can be configured with the @Mock annotation. I agree with the comment that it should be the default in Mockito 2.0. Returning null values from mocked methods encourages writing code that is designed to handle nulls, which IMO is a code smell. One thing that would be really nice is if the arguments of the stubbed method were retained and then printed when the smart null is used. The reported message is typically correct: "Because this method was not stubbed correctly: ", but this is generally because I've made some mistake in matching the parameters of the stubbed method. If I could see the arguments that were used in the invocation it would mean that I wouldn't need to debug into the method to check them. | private String formatMethodCall() {
return invocation.getMethod().getName() + "()";
} | private String formatMethodCall ( ) { return invocation . getMethod ( ) . getName ( ) + "()" ; } | private String formatMethodCall() {
String args = Arrays.toString(invocation.getArguments());
return invocation.getMethod().getName() + "(" + args.substring(1, args.length() - 1) + ")";
} | private String formatMethodCall ( ) { String args = Arrays . toString ( invocation . getArguments ( ) ) ; return invocation . getMethod ( ) . getName ( ) + "(" + args . substring ( 1 , args . length ( ) - 1 ) + ")" ; } |
Math | 103 | src/java/org/apache/commons/math/distribution/NormalDistributionImpl.java | 108 | 111 | ConvergenceException in normal CDF | NormalDistributionImpl::cumulativeProbability(double x) throws ConvergenceException
if x deviates too much from the mean. For example, when x=+/-100, mean=0, sd=1.
Of course the value of the CDF is hard to evaluate in these cases,
but effectively it should be either zero or one. | public double cumulativeProbability(double x) throws MathException {
return 0.5 * (1.0 + Erf.erf((x - mean) /
(standardDeviation * Math.sqrt(2.0))));
} | public double cumulativeProbability ( double x ) throws MathException { return 0.5 * ( 1.0 + Erf . erf ( ( x - mean ) / ( standardDeviation * Math . sqrt ( 2.0 ) ) ) ) ; } | public double cumulativeProbability(double x) throws MathException {
try {
return 0.5 * (1.0 + Erf.erf((x - mean) /
(standardDeviation * Math.sqrt(2.0))));
} catch (MaxIterationsExceededException ex) {
if (x < (mean - 20 * standardDeviation)) { // JDK 1.5 blows at 38
return 0.0d;
} else if (x > (mean + 20 * standardDeviation)) {
return 1.0d;
} else {
throw ex;
}
}
} | public double cumulativeProbability ( double x ) throws MathException { try { return 0.5 * ( 1.0 + Erf . erf ( ( x - mean ) / ( standardDeviation * Math . sqrt ( 2.0 ) ) ) ) ; } catch ( MaxIterationsExceededException ex ) { if ( x < ( mean - 20 * standardDeviation ) ) { return 0.0d ; } else if ( x > ( mean + 20 * standardDeviation ) ) { return 1.0d ; } else { throw ex ; } } } |
JacksonCore | 11 | src/main/java/com/fasterxml/jackson/core/sym/ByteQuadsCanonicalizer.java | 874 | 886 | ArrayIndexOutOfBoundsException: 128 when repeatedly serializing to a byte array | ```
java.lang.ArrayIndexOutOfBoundsException: 128
at com.fasterxml.jackson.core.sym.ByteQuadsCanonicalizer.addName(ByteQuadsCanonicalizer.java:853)
at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.addName(UTF8StreamJsonParser.java:2340)
at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.findName(UTF8StreamJsonParser.java:2224)
at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.parseLongName(UTF8StreamJsonParser.java:1831)
at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.parseMediumName2(UTF8StreamJsonParser.java:1786)
at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.parseMediumName(UTF8StreamJsonParser.java:1743)
at com.fasterxml.jackson.core.json.UTF8StreamJsonParser._parseName(UTF8StreamJsonParser.java:1678)
at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.nextFieldName(UTF8StreamJsonParser.java:1007)
at com.fasterxml.jackson.databind.deser.std.MapDeserializer._readAndBindStringMap(MapDeserializer.java:471)
at com.fasterxml.jackson.databind.deser.std.MapDeserializer.deserialize(MapDeserializer.java:341)
at com.fasterxml.jackson.databind.deser.std.MapDeserializer.deserialize(MapDeserializer.java:26)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3702)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2824)
at com.kryptnostic.services.v1.SmokeTests.spamAddIndexPair(SmokeTests.java:605)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
at org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:50)
at org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:38)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:459)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:675)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:382)
at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:192)
```
Repro:
```
@Test
public void spamTest() {
ObjectMapper mapper = new ObjectMapper();
Map<ObjectUserKey, ServerIndexPair> ssip = Maps.newConcurrentMap();
for ( int i = 0; i < 10000; ++i ) {
byte[] indexPairBytes = new byte[ 2080 ];
new Random().nextBytes( indexPairBytes );
ServerIndexPair sip = new ServerIndexPair( indexPairBytes );
byte[] s = mapper.writeValueAsBytes( ImmutableMap.of( UUID
.randomUUID().toString(), sip ) );
Map<String, ServerIndexPair> metadata = mapper.readValue( s,
new TypeReference<Map<String, ServerIndexPair>>() {} );
for ( Entry<String, ServerIndexPair> metadataEntry : metadata.entrySet() ) {
ServerIndexPair indexPair = metadataEntry.getValue();
ssip.put( new ObjectUserKey( metadataEntry.getKey(), user ),
indexPair );
}
logger.error( "Iteration: {}", i );
}
}
```
```
public class ServerIndexPair {
public static final String INDEX_PAIR_FIELD = "indexPair";
private final byte[] indexPair;
@JsonCreator
public ServerIndexPair( @JsonProperty( INDEX_PAIR_FIELD ) byte[] indexPair ) {
Preconditions.checkState( indexPair.length == 2080, "Index pair must be 2080 bytes long." );
this.indexPair = indexPair;
}
@JsonProperty( INDEX_PAIR_FIELD )
public byte[] getIndexPair() {
return indexPair;
}
}
```
```
public class ObjectUserKey {
public static final String SEPARATOR = ":";
private final String objectId;
private final UUID userKey;
@JsonCreator
public ObjectUserKey(
@JsonProperty( Names.ID_FIELD ) String objectId,
@JsonProperty( Names.USER_FIELD ) UUID userKey ) {
super();
this.objectId = objectId;
this.userKey = userKey;
}
@JsonProperty( Names.ID_FIELD )
public String getObjectId() {
return objectId;
}
@JsonProperty( Names.USER_FIELD )
public UUID getUserKey() {
return userKey;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ( ( objectId == null ) ? 0 : objectId.hashCode() );
return result;
}
@Override
public boolean equals( Object obj ) {
if ( this == obj ) {
return true;
}
if ( obj == null ) {
return false;
}
if ( !( obj instanceof ObjectUserKey ) ) {
return false;
}
ObjectUserKey other = (ObjectUserKey) obj;
if ( objectId == null ) {
if ( other.objectId != null ) {
return false;
}
}
if ( userKey == null ) {
if ( other.userKey != null ) {
return false;
}
}
if ( !objectId.equals( other.objectId ) ) {
return false;
}
if ( !userKey.equals( other.userKey ) ) {
return false;
}
return true;
}
@Override
public String toString() {
return userKey + SEPARATOR + objectId;
}
public static ObjectUserKey fromString( String value ) {
int index = value.lastIndexOf( ObjectUserKey.SEPARATOR );
Preconditions.checkState( index > -1, "Separator character " + SEPARATOR
+ " should be present for ObjectUserKey" );
String userKeyString = value.substring( 0, index );
String objectIdString = value.substring( index + 1 );
UUID userKey = UUID.fromString( userKeyString );
return new ObjectUserKey( objectIdString, userKey );
}
public byte[] asBytes() {
return this.toString().getBytes();
}
}
```
| private void _verifySharing()
{
if (_hashShared) {
_hashArea = Arrays.copyOf(_hashArea, _hashArea.length);
_names = Arrays.copyOf(_names, _names.length);
_hashShared = false;
// 09-Sep-2015, tatu: As per [jackson-core#216], also need to ensure
// we rehash as needed, as need-rehash flag is not copied from parent
}
if (_needRehash) {
rehash();
}
} | private void _verifySharing ( ) { if ( _hashShared ) { _hashArea = Arrays . copyOf ( _hashArea , _hashArea . length ) ; _names = Arrays . copyOf ( _names , _names . length ) ; _hashShared = false ; } if ( _needRehash ) { rehash ( ) ; } } | private void _verifySharing()
{
if (_hashShared) {
_hashArea = Arrays.copyOf(_hashArea, _hashArea.length);
_names = Arrays.copyOf(_names, _names.length);
_hashShared = false;
// 09-Sep-2015, tatu: As per [jackson-core#216], also need to ensure
// we rehash as needed, as need-rehash flag is not copied from parent
_verifyNeedForRehash();
}
if (_needRehash) {
rehash();
}
} | private void _verifySharing ( ) { if ( _hashShared ) { _hashArea = Arrays . copyOf ( _hashArea , _hashArea . length ) ; _names = Arrays . copyOf ( _names , _names . length ) ; _hashShared = false ; _verifyNeedForRehash ( ) ; } if ( _needRehash ) { rehash ( ) ; } } |
Math | 55 | src/main/java/org/apache/commons/math/geometry/Vector3D.java | 457 | 475 | Vector3D.crossProduct is sensitive to numerical cancellation | Cross product implementation uses the naive formulas (y1 z2 - y2 z1, ...). These formulas fail when vectors are almost colinear, like in the following example:
{code}
Vector3D v1 = new Vector3D(9070467121.0, 4535233560.0, 1);
Vector3D v2 = new Vector3D(9070467123.0, 4535233561.0, 1);
System.out.println(Vector3D.crossProduct(v1, v2));
{code}
The previous code displays { -1, 2, 0 } instead of the correct answer { -1, 2, 1 } | public static Vector3D crossProduct(final Vector3D v1, final Vector3D v2) {
// rescale both vectors without losing precision,
// to ensure their norm are the same order of magnitude
// we reduce cancellation errors by preconditioning,
// we replace v1 by v3 = v1 - rho v2 with rho chosen in order to compute
// v3 without loss of precision. See Kahan lecture
// "Computing Cross-Products and Rotations in 2- and 3-Dimensional Euclidean Spaces"
// available at http://www.cs.berkeley.edu/~wkahan/MathH110/Cross.pdf
// compute rho as an 8 bits approximation of v1.v2 / v2.v2
// compute cross product from v3 and v2 instead of v1 and v2
return new Vector3D(v1.y * v2.z - v1.z * v2.y, v1.z * v2.x - v1.x * v2.z, v1.x * v2.y - v1.y * v2.x);
} | public static Vector3D crossProduct ( final Vector3D v1 , final Vector3D v2 ) { return new Vector3D ( v1 . y * v2 . z - v1 . z * v2 . y , v1 . z * v2 . x - v1 . x * v2 . z , v1 . x * v2 . y - v1 . y * v2 . x ) ; } | public static Vector3D crossProduct(final Vector3D v1, final Vector3D v2) {
final double n1 = v1.getNormSq();
final double n2 = v2.getNormSq();
if ((n1 * n2) < MathUtils.SAFE_MIN) {
return ZERO;
}
// rescale both vectors without losing precision,
// to ensure their norm are the same order of magnitude
final int deltaExp = (FastMath.getExponent(n1) - FastMath.getExponent(n2)) / 4;
final double x1 = FastMath.scalb(v1.x, -deltaExp);
final double y1 = FastMath.scalb(v1.y, -deltaExp);
final double z1 = FastMath.scalb(v1.z, -deltaExp);
final double x2 = FastMath.scalb(v2.x, deltaExp);
final double y2 = FastMath.scalb(v2.y, deltaExp);
final double z2 = FastMath.scalb(v2.z, deltaExp);
// we reduce cancellation errors by preconditioning,
// we replace v1 by v3 = v1 - rho v2 with rho chosen in order to compute
// v3 without loss of precision. See Kahan lecture
// "Computing Cross-Products and Rotations in 2- and 3-Dimensional Euclidean Spaces"
// available at http://www.cs.berkeley.edu/~wkahan/MathH110/Cross.pdf
// compute rho as an 8 bits approximation of v1.v2 / v2.v2
final double ratio = (x1 * x2 + y1 * y2 + z1 * z2) / FastMath.scalb(n2, 2 * deltaExp);
final double rho = FastMath.rint(256 * ratio) / 256;
final double x3 = x1 - rho * x2;
final double y3 = y1 - rho * y2;
final double z3 = z1 - rho * z2;
// compute cross product from v3 and v2 instead of v1 and v2
return new Vector3D(y3 * z2 - z3 * y2, z3 * x2 - x3 * z2, x3 * y2 - y3 * x2);
} | public static Vector3D crossProduct ( final Vector3D v1 , final Vector3D v2 ) { final double n1 = v1 . getNormSq ( ) ; final double n2 = v2 . getNormSq ( ) ; if ( ( n1 * n2 ) < MathUtils . SAFE_MIN ) { return ZERO ; } final int deltaExp = ( FastMath . getExponent ( n1 ) - FastMath . getExponent ( n2 ) ) / 4 ; final double x1 = FastMath . scalb ( v1 . x , - deltaExp ) ; final double y1 = FastMath . scalb ( v1 . y , - deltaExp ) ; final double z1 = FastMath . scalb ( v1 . z , - deltaExp ) ; final double x2 = FastMath . scalb ( v2 . x , deltaExp ) ; final double y2 = FastMath . scalb ( v2 . y , deltaExp ) ; final double z2 = FastMath . scalb ( v2 . z , deltaExp ) ; final double ratio = ( x1 * x2 + y1 * y2 + z1 * z2 ) / FastMath . scalb ( n2 , 2 * deltaExp ) ; final double rho = FastMath . rint ( 256 * ratio ) / 256 ; final double x3 = x1 - rho * x2 ; final double y3 = y1 - rho * y2 ; final double z3 = z1 - rho * z2 ; return new Vector3D ( y3 * z2 - z3 * y2 , z3 * x2 - x3 * z2 , x3 * y2 - y3 * x2 ) ; } |
JacksonDatabind | 67 | src/main/java/com/fasterxml/jackson/databind/deser/BasicDeserializerFactory.java | 1384 | 1416 | Map key deserializerModifiers ignored | We have a module that extends simple model to allow us to accept enum names in lower case in a fairly generic manner
Inside that we add the `modifyKeyDeserializer`
The incoming class (using immutables) is mapped to a guava immutable map.
Walking through the code:
> com.fasterxml.jackson.datatype.guava.deser.ImmutableMapDeserializer.createContextual
> calls DeserializationContext.findKeyDeserializer
> calls DeserializerCache.findKeyDeserializer
> calls BasicDeserializerFactory.createKeyDeserializer
which has the code:
```java
// the only non-standard thing is this:
if (deser == null) {
if (type.isEnumType()) {
return _createEnumKeyDeserializer(ctxt, type);
}
deser = StdKeyDeserializers.findStringBasedKeyDeserializer(config, type);
}
```
Since we are an enum type, it returns the value in the `_createEnumKeyDeserializer`, which is the standard enum deserializer.
Below that block is the check for the hasDeserializerModifiers, but since we have returned already, it is never called, so we can't override the behaviour.
Module fragment:
```java
setDeserializerModifier(new BeanDeserializerModifier() {
@Override
@SuppressWarnings("unchecked")
public JsonDeserializer<Enum> modifyEnumDeserializer(
DeserializationConfig config,
final JavaType type,
BeanDescription beanDesc,
final JsonDeserializer<?> deserializer) {
return new JsonDeserializer<Enum>() {
@Override
public Enum deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException {
Class<? extends Enum> rawClass = (Class<Enum<?>>) type.getRawClass();
return Enum.valueOf(rawClass, jp.getValueAsString().toUpperCase());
}
};
}
@Override
public KeyDeserializer modifyKeyDeserializer(
DeserializationConfig config,
JavaType type,
KeyDeserializer deserializer) {
if (!type.isEnumType()) {
return super.modifyKeyDeserializer(config, type, deserializer);
}
return new KeyDeserializer() {
@Override
@SuppressWarnings("unchecked")
public Object deserializeKey(String key, DeserializationContext ctxt)
throws IOException, JsonProcessingException {
Class<? extends Enum> rawClass = (Class<Enum<?>>) type.getRawClass();
return Enum.valueOf(rawClass, key.toUpperCase());
}
};
}
});
```
I appreciate the code around here is fairly complex.
Related issues (possibly):
https://github.com/FasterXML/jackson-databind/issues/749
https://github.com/FasterXML/jackson-databind/issues/1313 | @Override
public KeyDeserializer createKeyDeserializer(DeserializationContext ctxt,
JavaType type)
throws JsonMappingException
{
final DeserializationConfig config = ctxt.getConfig();
KeyDeserializer deser = null;
if (_factoryConfig.hasKeyDeserializers()) {
BeanDescription beanDesc = config.introspectClassAnnotations(type.getRawClass());
for (KeyDeserializers d : _factoryConfig.keyDeserializers()) {
deser = d.findKeyDeserializer(type, config, beanDesc);
if (deser != null) {
break;
}
}
}
// the only non-standard thing is this:
if (deser == null) {
if (type.isEnumType()) {
return _createEnumKeyDeserializer(ctxt, type);
}
deser = StdKeyDeserializers.findStringBasedKeyDeserializer(config, type);
}
// and then post-processing
if (deser != null) {
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
deser = mod.modifyKeyDeserializer(config, type, deser);
}
}
}
return deser;
} | @ Override public KeyDeserializer createKeyDeserializer ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { final DeserializationConfig config = ctxt . getConfig ( ) ; KeyDeserializer deser = null ; if ( _factoryConfig . hasKeyDeserializers ( ) ) { BeanDescription beanDesc = config . introspectClassAnnotations ( type . getRawClass ( ) ) ; for ( KeyDeserializers d : _factoryConfig . keyDeserializers ( ) ) { deser = d . findKeyDeserializer ( type , config , beanDesc ) ; if ( deser != null ) { break ; } } } if ( deser == null ) { if ( type . isEnumType ( ) ) { return _createEnumKeyDeserializer ( ctxt , type ) ; } deser = StdKeyDeserializers . findStringBasedKeyDeserializer ( config , type ) ; } if ( deser != null ) { if ( _factoryConfig . hasDeserializerModifiers ( ) ) { for ( BeanDeserializerModifier mod : _factoryConfig . deserializerModifiers ( ) ) { deser = mod . modifyKeyDeserializer ( config , type , deser ) ; } } } return deser ; } | @Override
public KeyDeserializer createKeyDeserializer(DeserializationContext ctxt,
JavaType type)
throws JsonMappingException
{
final DeserializationConfig config = ctxt.getConfig();
KeyDeserializer deser = null;
if (_factoryConfig.hasKeyDeserializers()) {
BeanDescription beanDesc = config.introspectClassAnnotations(type.getRawClass());
for (KeyDeserializers d : _factoryConfig.keyDeserializers()) {
deser = d.findKeyDeserializer(type, config, beanDesc);
if (deser != null) {
break;
}
}
}
// the only non-standard thing is this:
if (deser == null) {
if (type.isEnumType()) {
deser = _createEnumKeyDeserializer(ctxt, type);
} else {
deser = StdKeyDeserializers.findStringBasedKeyDeserializer(config, type);
}
}
// and then post-processing
if (deser != null) {
if (_factoryConfig.hasDeserializerModifiers()) {
for (BeanDeserializerModifier mod : _factoryConfig.deserializerModifiers()) {
deser = mod.modifyKeyDeserializer(config, type, deser);
}
}
}
return deser;
} | @ Override public KeyDeserializer createKeyDeserializer ( DeserializationContext ctxt , JavaType type ) throws JsonMappingException { final DeserializationConfig config = ctxt . getConfig ( ) ; KeyDeserializer deser = null ; if ( _factoryConfig . hasKeyDeserializers ( ) ) { BeanDescription beanDesc = config . introspectClassAnnotations ( type . getRawClass ( ) ) ; for ( KeyDeserializers d : _factoryConfig . keyDeserializers ( ) ) { deser = d . findKeyDeserializer ( type , config , beanDesc ) ; if ( deser != null ) { break ; } } } if ( deser == null ) { if ( type . isEnumType ( ) ) { deser = _createEnumKeyDeserializer ( ctxt , type ) ; } else { deser = StdKeyDeserializers . findStringBasedKeyDeserializer ( config , type ) ; } } if ( deser != null ) { if ( _factoryConfig . hasDeserializerModifiers ( ) ) { for ( BeanDeserializerModifier mod : _factoryConfig . deserializerModifiers ( ) ) { deser = mod . modifyKeyDeserializer ( config , type , deser ) ; } } } return deser ; } |
JacksonDatabind | 88 | src/main/java/com/fasterxml/jackson/databind/jsontype/impl/ClassNameIdResolver.java | 45 | 78 | Missing type checks when using polymorphic type ids | (report by Lukes Euler)
`JavaType` supports limited amount of generic typing for textual representation, originally just to support typing needed for `EnumMap` (I think). Based on some reports, it appears that some of type compatibility checks are not performed in those cases; if so, they should be made since there is potential for abuse.
The problem here although actual type assignment will fail later on, ability to trigger some of processing (instantiation of incompatible classes, perhaps assingnment of properties) may itself be vulnerability.
| protected JavaType _typeFromId(String id, DatabindContext ctxt) throws IOException
{
/* 30-Jan-2010, tatu: Most ids are basic class names; so let's first
* check if any generics info is added; and only then ask factory
* to do translation when necessary
*/
TypeFactory tf = ctxt.getTypeFactory();
if (id.indexOf('<') > 0) {
// note: may want to try combining with specialization (esp for EnumMap)?
// 17-Aug-2017, tatu: As per [databind#1735] need to ensure assignment
// compatibility -- needed later anyway, and not doing so may open
// security issues.
JavaType t = tf.constructFromCanonical(id);
// Probably cleaner to have a method in `TypeFactory` but can't add in patch
return t;
}
Class<?> cls;
try {
cls = tf.findClass(id);
} catch (ClassNotFoundException e) {
// 24-May-2016, tatu: Ok, this is pretty ugly, but we should always get
// DeserializationContext, just playing it safe
if (ctxt instanceof DeserializationContext) {
DeserializationContext dctxt = (DeserializationContext) ctxt;
// First: we may have problem handlers that can deal with it?
return dctxt.handleUnknownTypeId(_baseType, id, this, "no such class found");
}
// ... meaning that we really should never get here.
return null;
} catch (Exception e) {
throw new IllegalArgumentException("Invalid type id '"+id+"' (for id type 'Id.class'): "+e.getMessage(), e);
}
return tf.constructSpecializedType(_baseType, cls);
} | protected JavaType _typeFromId ( String id , DatabindContext ctxt ) throws IOException { TypeFactory tf = ctxt . getTypeFactory ( ) ; if ( id . indexOf ( '<' ) > 0 ) { JavaType t = tf . constructFromCanonical ( id ) ; return t ; } Class < ? > cls ; try { cls = tf . findClass ( id ) ; } catch ( ClassNotFoundException e ) { if ( ctxt instanceof DeserializationContext ) { DeserializationContext dctxt = ( DeserializationContext ) ctxt ; return dctxt . handleUnknownTypeId ( _baseType , id , this , "no such class found" ) ; } return null ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Invalid type id '" + id + "' (for id type 'Id.class'): " + e . getMessage ( ) , e ) ; } return tf . constructSpecializedType ( _baseType , cls ) ; } | protected JavaType _typeFromId(String id, DatabindContext ctxt) throws IOException
{
/* 30-Jan-2010, tatu: Most ids are basic class names; so let's first
* check if any generics info is added; and only then ask factory
* to do translation when necessary
*/
TypeFactory tf = ctxt.getTypeFactory();
if (id.indexOf('<') > 0) {
// note: may want to try combining with specialization (esp for EnumMap)?
// 17-Aug-2017, tatu: As per [databind#1735] need to ensure assignment
// compatibility -- needed later anyway, and not doing so may open
// security issues.
JavaType t = tf.constructFromCanonical(id);
if (!t.isTypeOrSubTypeOf(_baseType.getRawClass())) {
// Probably cleaner to have a method in `TypeFactory` but can't add in patch
throw new IllegalArgumentException(String.format(
"Class %s not subtype of %s", t.getRawClass().getName(), _baseType));
}
return t;
}
Class<?> cls;
try {
cls = tf.findClass(id);
} catch (ClassNotFoundException e) {
// 24-May-2016, tatu: Ok, this is pretty ugly, but we should always get
// DeserializationContext, just playing it safe
if (ctxt instanceof DeserializationContext) {
DeserializationContext dctxt = (DeserializationContext) ctxt;
// First: we may have problem handlers that can deal with it?
return dctxt.handleUnknownTypeId(_baseType, id, this, "no such class found");
}
// ... meaning that we really should never get here.
return null;
} catch (Exception e) {
throw new IllegalArgumentException("Invalid type id '"+id+"' (for id type 'Id.class'): "+e.getMessage(), e);
}
return tf.constructSpecializedType(_baseType, cls);
} | protected JavaType _typeFromId ( String id , DatabindContext ctxt ) throws IOException { TypeFactory tf = ctxt . getTypeFactory ( ) ; if ( id . indexOf ( '<' ) > 0 ) { JavaType t = tf . constructFromCanonical ( id ) ; if ( ! t . isTypeOrSubTypeOf ( _baseType . getRawClass ( ) ) ) { throw new IllegalArgumentException ( String . format ( "Class %s not subtype of %s" , t . getRawClass ( ) . getName ( ) , _baseType ) ) ; } return t ; } Class < ? > cls ; try { cls = tf . findClass ( id ) ; } catch ( ClassNotFoundException e ) { if ( ctxt instanceof DeserializationContext ) { DeserializationContext dctxt = ( DeserializationContext ) ctxt ; return dctxt . handleUnknownTypeId ( _baseType , id , this , "no such class found" ) ; } return null ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Invalid type id '" + id + "' (for id type 'Id.class'): " + e . getMessage ( ) , e ) ; } return tf . constructSpecializedType ( _baseType , cls ) ; } |
Math | 43 | src/main/java/org/apache/commons/math/stat/descriptive/SummaryStatistics.java | 149 | 168 | Statistics.setVarianceImpl makes getStandardDeviation produce NaN | Invoking SummaryStatistics.setVarianceImpl(new Variance(true/false) makes getStandardDeviation produce NaN. The code to reproduce it:
{code:java}
int[] scores = {1, 2, 3, 4};
SummaryStatistics stats = new SummaryStatistics();
stats.setVarianceImpl(new Variance(false)); //use "population variance"
for(int i : scores) {
stats.addValue(i);
}
double sd = stats.getStandardDeviation();
System.out.println(sd);
{code}
A workaround suggested by Mikkel is:
{code:java}
double sd = FastMath.sqrt(stats.getSecondMoment() / stats.getN());
{code} | public void addValue(double value) {
sumImpl.increment(value);
sumsqImpl.increment(value);
minImpl.increment(value);
maxImpl.increment(value);
sumLogImpl.increment(value);
secondMoment.increment(value);
// If mean, variance or geomean have been overridden,
// need to increment these
if (!(meanImpl instanceof Mean)) {
meanImpl.increment(value);
}
if (!(varianceImpl instanceof Variance)) {
varianceImpl.increment(value);
}
if (!(geoMeanImpl instanceof GeometricMean)) {
geoMeanImpl.increment(value);
}
n++;
} | public void addValue ( double value ) { sumImpl . increment ( value ) ; sumsqImpl . increment ( value ) ; minImpl . increment ( value ) ; maxImpl . increment ( value ) ; sumLogImpl . increment ( value ) ; secondMoment . increment ( value ) ; if ( ! ( meanImpl instanceof Mean ) ) { meanImpl . increment ( value ) ; } if ( ! ( varianceImpl instanceof Variance ) ) { varianceImpl . increment ( value ) ; } if ( ! ( geoMeanImpl instanceof GeometricMean ) ) { geoMeanImpl . increment ( value ) ; } n ++ ; } | public void addValue(double value) {
sumImpl.increment(value);
sumsqImpl.increment(value);
minImpl.increment(value);
maxImpl.increment(value);
sumLogImpl.increment(value);
secondMoment.increment(value);
// If mean, variance or geomean have been overridden,
// need to increment these
if (meanImpl != mean) {
meanImpl.increment(value);
}
if (varianceImpl != variance) {
varianceImpl.increment(value);
}
if (geoMeanImpl != geoMean) {
geoMeanImpl.increment(value);
}
n++;
} | public void addValue ( double value ) { sumImpl . increment ( value ) ; sumsqImpl . increment ( value ) ; minImpl . increment ( value ) ; maxImpl . increment ( value ) ; sumLogImpl . increment ( value ) ; secondMoment . increment ( value ) ; if ( meanImpl != mean ) { meanImpl . increment ( value ) ; } if ( varianceImpl != variance ) { varianceImpl . increment ( value ) ; } if ( geoMeanImpl != geoMean ) { geoMeanImpl . increment ( value ) ; } n ++ ; } |
JacksonDatabind | 71 | src/main/java/com/fasterxml/jackson/databind/deser/std/StdKeyDeserializer.java | 70 | 116 | Missing `KeyDeserializer` for `CharSequence` | Looks like use of nominal Map key type of `CharSequence` does not work yet (as of 2.7.8 / 2.8.6).
This is something that is needed to work with certain frameworks, such as Avro's generated POJOs.
| public static StdKeyDeserializer forType(Class<?> raw)
{
int kind;
// first common types:
if (raw == String.class || raw == Object.class) {
return StringKD.forType(raw);
} else if (raw == UUID.class) {
kind = TYPE_UUID;
} else if (raw == Integer.class) {
kind = TYPE_INT;
} else if (raw == Long.class) {
kind = TYPE_LONG;
} else if (raw == Date.class) {
kind = TYPE_DATE;
} else if (raw == Calendar.class) {
kind = TYPE_CALENDAR;
// then less common ones...
} else if (raw == Boolean.class) {
kind = TYPE_BOOLEAN;
} else if (raw == Byte.class) {
kind = TYPE_BYTE;
} else if (raw == Character.class) {
kind = TYPE_CHAR;
} else if (raw == Short.class) {
kind = TYPE_SHORT;
} else if (raw == Float.class) {
kind = TYPE_FLOAT;
} else if (raw == Double.class) {
kind = TYPE_DOUBLE;
} else if (raw == URI.class) {
kind = TYPE_URI;
} else if (raw == URL.class) {
kind = TYPE_URL;
} else if (raw == Class.class) {
kind = TYPE_CLASS;
} else if (raw == Locale.class) {
FromStringDeserializer<?> deser = FromStringDeserializer.findDeserializer(Locale.class);
return new StdKeyDeserializer(TYPE_LOCALE, raw, deser);
} else if (raw == Currency.class) {
FromStringDeserializer<?> deser = FromStringDeserializer.findDeserializer(Currency.class);
return new StdKeyDeserializer(TYPE_CURRENCY, raw, deser);
} else {
return null;
}
return new StdKeyDeserializer(kind, raw);
} | public static StdKeyDeserializer forType ( Class < ? > raw ) { int kind ; if ( raw == String . class || raw == Object . class ) { return StringKD . forType ( raw ) ; } else if ( raw == UUID . class ) { kind = TYPE_UUID ; } else if ( raw == Integer . class ) { kind = TYPE_INT ; } else if ( raw == Long . class ) { kind = TYPE_LONG ; } else if ( raw == Date . class ) { kind = TYPE_DATE ; } else if ( raw == Calendar . class ) { kind = TYPE_CALENDAR ; } else if ( raw == Boolean . class ) { kind = TYPE_BOOLEAN ; } else if ( raw == Byte . class ) { kind = TYPE_BYTE ; } else if ( raw == Character . class ) { kind = TYPE_CHAR ; } else if ( raw == Short . class ) { kind = TYPE_SHORT ; } else if ( raw == Float . class ) { kind = TYPE_FLOAT ; } else if ( raw == Double . class ) { kind = TYPE_DOUBLE ; } else if ( raw == URI . class ) { kind = TYPE_URI ; } else if ( raw == URL . class ) { kind = TYPE_URL ; } else if ( raw == Class . class ) { kind = TYPE_CLASS ; } else if ( raw == Locale . class ) { FromStringDeserializer < ? > deser = FromStringDeserializer . findDeserializer ( Locale . class ) ; return new StdKeyDeserializer ( TYPE_LOCALE , raw , deser ) ; } else if ( raw == Currency . class ) { FromStringDeserializer < ? > deser = FromStringDeserializer . findDeserializer ( Currency . class ) ; return new StdKeyDeserializer ( TYPE_CURRENCY , raw , deser ) ; } else { return null ; } return new StdKeyDeserializer ( kind , raw ) ; } | public static StdKeyDeserializer forType(Class<?> raw)
{
int kind;
// first common types:
if (raw == String.class || raw == Object.class || raw == CharSequence.class) {
return StringKD.forType(raw);
} else if (raw == UUID.class) {
kind = TYPE_UUID;
} else if (raw == Integer.class) {
kind = TYPE_INT;
} else if (raw == Long.class) {
kind = TYPE_LONG;
} else if (raw == Date.class) {
kind = TYPE_DATE;
} else if (raw == Calendar.class) {
kind = TYPE_CALENDAR;
// then less common ones...
} else if (raw == Boolean.class) {
kind = TYPE_BOOLEAN;
} else if (raw == Byte.class) {
kind = TYPE_BYTE;
} else if (raw == Character.class) {
kind = TYPE_CHAR;
} else if (raw == Short.class) {
kind = TYPE_SHORT;
} else if (raw == Float.class) {
kind = TYPE_FLOAT;
} else if (raw == Double.class) {
kind = TYPE_DOUBLE;
} else if (raw == URI.class) {
kind = TYPE_URI;
} else if (raw == URL.class) {
kind = TYPE_URL;
} else if (raw == Class.class) {
kind = TYPE_CLASS;
} else if (raw == Locale.class) {
FromStringDeserializer<?> deser = FromStringDeserializer.findDeserializer(Locale.class);
return new StdKeyDeserializer(TYPE_LOCALE, raw, deser);
} else if (raw == Currency.class) {
FromStringDeserializer<?> deser = FromStringDeserializer.findDeserializer(Currency.class);
return new StdKeyDeserializer(TYPE_CURRENCY, raw, deser);
} else {
return null;
}
return new StdKeyDeserializer(kind, raw);
} | public static StdKeyDeserializer forType ( Class < ? > raw ) { int kind ; if ( raw == String . class || raw == Object . class || raw == CharSequence . class ) { return StringKD . forType ( raw ) ; } else if ( raw == UUID . class ) { kind = TYPE_UUID ; } else if ( raw == Integer . class ) { kind = TYPE_INT ; } else if ( raw == Long . class ) { kind = TYPE_LONG ; } else if ( raw == Date . class ) { kind = TYPE_DATE ; } else if ( raw == Calendar . class ) { kind = TYPE_CALENDAR ; } else if ( raw == Boolean . class ) { kind = TYPE_BOOLEAN ; } else if ( raw == Byte . class ) { kind = TYPE_BYTE ; } else if ( raw == Character . class ) { kind = TYPE_CHAR ; } else if ( raw == Short . class ) { kind = TYPE_SHORT ; } else if ( raw == Float . class ) { kind = TYPE_FLOAT ; } else if ( raw == Double . class ) { kind = TYPE_DOUBLE ; } else if ( raw == URI . class ) { kind = TYPE_URI ; } else if ( raw == URL . class ) { kind = TYPE_URL ; } else if ( raw == Class . class ) { kind = TYPE_CLASS ; } else if ( raw == Locale . class ) { FromStringDeserializer < ? > deser = FromStringDeserializer . findDeserializer ( Locale . class ) ; return new StdKeyDeserializer ( TYPE_LOCALE , raw , deser ) ; } else if ( raw == Currency . class ) { FromStringDeserializer < ? > deser = FromStringDeserializer . findDeserializer ( Currency . class ) ; return new StdKeyDeserializer ( TYPE_CURRENCY , raw , deser ) ; } else { return null ; } return new StdKeyDeserializer ( kind , raw ) ; } |
Cli | 15 | src/java/org/apache/commons/cli2/commandline/WriteableCommandLineImpl.java | 111 | 130 | deafult arguments only works if no arguments are submitted | When using multple arguments and defaults, the behaviour is counter-intuitive and will only pick up a default if no args are passed in.
For instance in the code below I have set up so 0, 1, or 2 args may bve accepted, with defaults 100 and 1000.
I expect it to behave as follows.
1. for 2 args, 1 and 2 the values should be 1 and 2. This works as expected.
2. for 0 args passed in the values should be 100 and 1000, picking up both of the defaults. This works as expected
3. for 1 arg passed in the values should be 1 and 1000, so the second argument picks up the second default value. The valuse become just 1, which is not as expected..
Currently, in the second case will only return 1 and ignore the defaults.
public void testSingleOptionSingleArgument() throws Exception {
String defaulValue1 = "100";
String defaultValue2 = "1000";
final DefaultOptionBuilder obuilder = new DefaultOptionBuilder();
final ArgumentBuilder abuilder = new ArgumentBuilder();
final GroupBuilder gbuilder = new GroupBuilder();
DefaultOption bOption = obuilder.withShortName("b")
.withLongName("b")
.withArgument(abuilder.withName("b")
.withMinimum(0)
.withMaximum(2)
.withDefault(defaulValue1)
.withDefault(defaultValue2)
.create())
.create();
Group options = gbuilder
.withName("options")
.withOption(bOption)
.create();
Parser parser = new Parser();
parser.setHelpTrigger("--help");
parser.setGroup(options);
String enteredValue1 = "1";
String[] args = new String[]{"-b", enteredValue1};
CommandLine cl = parser.parse(args);
CommandLine cmd = cl;
assertNotNull(cmd);
List b = cmd.getValues("-b");
assertEquals("[" + enteredValue1 + "]", b + "");
}
| public List getValues(final Option option,
List defaultValues) {
// initialize the return list
List valueList = (List) values.get(option);
// grab the correct default values
if ((valueList == null) || valueList.isEmpty()) {
valueList = defaultValues;
}
// augment the list with the default values
if ((valueList == null) || valueList.isEmpty()) {
valueList = (List) this.defaultValues.get(option);
}
// if there are more default values as specified, add them to
// the list.
// copy the list first
return valueList == null ? Collections.EMPTY_LIST : valueList;
} | public List getValues ( final Option option , List defaultValues ) { List valueList = ( List ) values . get ( option ) ; if ( ( valueList == null ) || valueList . isEmpty ( ) ) { valueList = defaultValues ; } if ( ( valueList == null ) || valueList . isEmpty ( ) ) { valueList = ( List ) this . defaultValues . get ( option ) ; } return valueList == null ? Collections . EMPTY_LIST : valueList ; } | public List getValues(final Option option,
List defaultValues) {
// initialize the return list
List valueList = (List) values.get(option);
// grab the correct default values
if (defaultValues == null || defaultValues.isEmpty()) {
defaultValues = (List) this.defaultValues.get(option);
}
// augment the list with the default values
if (defaultValues != null && !defaultValues.isEmpty()) {
if (valueList == null || valueList.isEmpty()) {
valueList = defaultValues;
} else {
// if there are more default values as specified, add them to
// the list.
if (defaultValues.size() > valueList.size()) {
// copy the list first
valueList = new ArrayList(valueList);
for (int i=valueList.size(); i<defaultValues.size(); i++) {
valueList.add(defaultValues.get(i));
}
}
}
}
return valueList == null ? Collections.EMPTY_LIST : valueList;
} | public List getValues ( final Option option , List defaultValues ) { List valueList = ( List ) values . get ( option ) ; if ( defaultValues == null || defaultValues . isEmpty ( ) ) { defaultValues = ( List ) this . defaultValues . get ( option ) ; } if ( defaultValues != null && ! defaultValues . isEmpty ( ) ) { if ( valueList == null || valueList . isEmpty ( ) ) { valueList = defaultValues ; } else { if ( defaultValues . size ( ) > valueList . size ( ) ) { valueList = new ArrayList ( valueList ) ; for ( int i = valueList . size ( ) ; i < defaultValues . size ( ) ; i ++ ) { valueList . add ( defaultValues . get ( i ) ) ; } } } } return valueList == null ? Collections . EMPTY_LIST : valueList ; } |
JxPath | 8 | src/java/org/apache/commons/jxpath/ri/compiler/CoreOperationRelationalExpression.java | 56 | 78 | Comparing with NaN is incorrect | 'NaN' > 'NaN' is true, but should be FALSE | private boolean compute(Object left, Object right) {
left = reduce(left);
right = reduce(right);
if (left instanceof InitialContext) {
((InitialContext) left).reset();
}
if (right instanceof InitialContext) {
((InitialContext) right).reset();
}
if (left instanceof Iterator && right instanceof Iterator) {
return findMatch((Iterator) left, (Iterator) right);
}
if (left instanceof Iterator) {
return containsMatch((Iterator) left, right);
}
if (right instanceof Iterator) {
return containsMatch((Iterator) right, left);
}
double ld = InfoSetUtil.doubleValue(left);
double rd = InfoSetUtil.doubleValue(right);
return evaluateCompare(ld == rd ? 0 : ld < rd ? -1 : 1);
} | private boolean compute ( Object left , Object right ) { left = reduce ( left ) ; right = reduce ( right ) ; if ( left instanceof InitialContext ) { ( ( InitialContext ) left ) . reset ( ) ; } if ( right instanceof InitialContext ) { ( ( InitialContext ) right ) . reset ( ) ; } if ( left instanceof Iterator && right instanceof Iterator ) { return findMatch ( ( Iterator ) left , ( Iterator ) right ) ; } if ( left instanceof Iterator ) { return containsMatch ( ( Iterator ) left , right ) ; } if ( right instanceof Iterator ) { return containsMatch ( ( Iterator ) right , left ) ; } double ld = InfoSetUtil . doubleValue ( left ) ; double rd = InfoSetUtil . doubleValue ( right ) ; return evaluateCompare ( ld == rd ? 0 : ld < rd ? - 1 : 1 ) ; } | private boolean compute(Object left, Object right) {
left = reduce(left);
right = reduce(right);
if (left instanceof InitialContext) {
((InitialContext) left).reset();
}
if (right instanceof InitialContext) {
((InitialContext) right).reset();
}
if (left instanceof Iterator && right instanceof Iterator) {
return findMatch((Iterator) left, (Iterator) right);
}
if (left instanceof Iterator) {
return containsMatch((Iterator) left, right);
}
if (right instanceof Iterator) {
return containsMatch((Iterator) right, left);
}
double ld = InfoSetUtil.doubleValue(left);
if (Double.isNaN(ld)) {
return false;
}
double rd = InfoSetUtil.doubleValue(right);
if (Double.isNaN(rd)) {
return false;
}
return evaluateCompare(ld == rd ? 0 : ld < rd ? -1 : 1);
} | private boolean compute ( Object left , Object right ) { left = reduce ( left ) ; right = reduce ( right ) ; if ( left instanceof InitialContext ) { ( ( InitialContext ) left ) . reset ( ) ; } if ( right instanceof InitialContext ) { ( ( InitialContext ) right ) . reset ( ) ; } if ( left instanceof Iterator && right instanceof Iterator ) { return findMatch ( ( Iterator ) left , ( Iterator ) right ) ; } if ( left instanceof Iterator ) { return containsMatch ( ( Iterator ) left , right ) ; } if ( right instanceof Iterator ) { return containsMatch ( ( Iterator ) right , left ) ; } double ld = InfoSetUtil . doubleValue ( left ) ; if ( Double . isNaN ( ld ) ) { return false ; } double rd = InfoSetUtil . doubleValue ( right ) ; if ( Double . isNaN ( rd ) ) { return false ; } return evaluateCompare ( ld == rd ? 0 : ld < rd ? - 1 : 1 ) ; } |
JacksonDatabind | 51 | src/main/java/com/fasterxml/jackson/databind/jsontype/impl/TypeDeserializerBase.java | 140 | 191 | Generic type returned from type id resolver seems to be ignored | https://github.com/benson-basis/jackson-custom-mess-tc
Here's the situation, with Jackson 2.7.4.
I have a TypeIdResolver that returns a JavaType for a generic type. However, something seems to be forgetting/erasing the generic, as it is failing to use the generic type param to understand the type of a field in the class.
All the information is in the test case, so I'm not putting any code to read here in the issue.
| protected final JsonDeserializer<Object> _findDeserializer(DeserializationContext ctxt,
String typeId) throws IOException
{
JsonDeserializer<Object> deser = _deserializers.get(typeId);
if (deser == null) {
/* As per [Databind#305], need to provide contextual info. But for
* backwards compatibility, let's start by only supporting this
* for base class, not via interface. Later on we can add this
* to the interface, assuming deprecation at base class helps.
*/
JavaType type = _idResolver.typeFromId(ctxt, typeId);
if (type == null) {
// As per [JACKSON-614], use the default impl if no type id available:
deser = _findDefaultImplDeserializer(ctxt);
if (deser == null) {
// 10-May-2016, tatu: We may get some help...
JavaType actual = _handleUnknownTypeId(ctxt, typeId, _idResolver, _baseType);
if (actual == null) { // what should this be taken to mean?
// TODO: try to figure out something better
return null;
}
// ... would this actually work?
deser = ctxt.findContextualValueDeserializer(actual, _property);
}
} else {
/* 16-Dec-2010, tatu: Since nominal type we get here has no (generic) type parameters,
* we actually now need to explicitly narrow from base type (which may have parameterization)
* using raw type.
*
* One complication, though; can not change 'type class' (simple type to container); otherwise
* we may try to narrow a SimpleType (Object.class) into MapType (Map.class), losing actual
* type in process (getting SimpleType of Map.class which will not work as expected)
*/
if ((_baseType != null)
&& _baseType.getClass() == type.getClass()) {
/* 09-Aug-2015, tatu: Not sure if the second part of the check makes sense;
* but it appears to check that JavaType impl class is the same which is
* important for some reason?
* Disabling the check will break 2 Enum-related tests.
*/
// 19-Jun-2016, tatu: As per [databind#1270] we may actually get full
// generic type with custom type resolvers. If so, should try to retain them.
// Whether this is sufficient to avoid problems remains to be seen, but for
// now it should improve things.
type = ctxt.getTypeFactory().constructSpecializedType(_baseType, type.getRawClass());
}
deser = ctxt.findContextualValueDeserializer(type, _property);
}
_deserializers.put(typeId, deser);
}
return deser;
} | protected final JsonDeserializer < Object > _findDeserializer ( DeserializationContext ctxt , String typeId ) throws IOException { JsonDeserializer < Object > deser = _deserializers . get ( typeId ) ; if ( deser == null ) { JavaType type = _idResolver . typeFromId ( ctxt , typeId ) ; if ( type == null ) { deser = _findDefaultImplDeserializer ( ctxt ) ; if ( deser == null ) { JavaType actual = _handleUnknownTypeId ( ctxt , typeId , _idResolver , _baseType ) ; if ( actual == null ) { return null ; } deser = ctxt . findContextualValueDeserializer ( actual , _property ) ; } } else { if ( ( _baseType != null ) && _baseType . getClass ( ) == type . getClass ( ) ) { type = ctxt . getTypeFactory ( ) . constructSpecializedType ( _baseType , type . getRawClass ( ) ) ; } deser = ctxt . findContextualValueDeserializer ( type , _property ) ; } _deserializers . put ( typeId , deser ) ; } return deser ; } | protected final JsonDeserializer<Object> _findDeserializer(DeserializationContext ctxt,
String typeId) throws IOException
{
JsonDeserializer<Object> deser = _deserializers.get(typeId);
if (deser == null) {
/* As per [Databind#305], need to provide contextual info. But for
* backwards compatibility, let's start by only supporting this
* for base class, not via interface. Later on we can add this
* to the interface, assuming deprecation at base class helps.
*/
JavaType type = _idResolver.typeFromId(ctxt, typeId);
if (type == null) {
// As per [JACKSON-614], use the default impl if no type id available:
deser = _findDefaultImplDeserializer(ctxt);
if (deser == null) {
// 10-May-2016, tatu: We may get some help...
JavaType actual = _handleUnknownTypeId(ctxt, typeId, _idResolver, _baseType);
if (actual == null) { // what should this be taken to mean?
// TODO: try to figure out something better
return null;
}
// ... would this actually work?
deser = ctxt.findContextualValueDeserializer(actual, _property);
}
} else {
/* 16-Dec-2010, tatu: Since nominal type we get here has no (generic) type parameters,
* we actually now need to explicitly narrow from base type (which may have parameterization)
* using raw type.
*
* One complication, though; can not change 'type class' (simple type to container); otherwise
* we may try to narrow a SimpleType (Object.class) into MapType (Map.class), losing actual
* type in process (getting SimpleType of Map.class which will not work as expected)
*/
if ((_baseType != null)
&& _baseType.getClass() == type.getClass()) {
/* 09-Aug-2015, tatu: Not sure if the second part of the check makes sense;
* but it appears to check that JavaType impl class is the same which is
* important for some reason?
* Disabling the check will break 2 Enum-related tests.
*/
// 19-Jun-2016, tatu: As per [databind#1270] we may actually get full
// generic type with custom type resolvers. If so, should try to retain them.
// Whether this is sufficient to avoid problems remains to be seen, but for
// now it should improve things.
if (!type.hasGenericTypes()) {
type = ctxt.getTypeFactory().constructSpecializedType(_baseType, type.getRawClass());
}
}
deser = ctxt.findContextualValueDeserializer(type, _property);
}
_deserializers.put(typeId, deser);
}
return deser;
} | protected final JsonDeserializer < Object > _findDeserializer ( DeserializationContext ctxt , String typeId ) throws IOException { JsonDeserializer < Object > deser = _deserializers . get ( typeId ) ; if ( deser == null ) { JavaType type = _idResolver . typeFromId ( ctxt , typeId ) ; if ( type == null ) { deser = _findDefaultImplDeserializer ( ctxt ) ; if ( deser == null ) { JavaType actual = _handleUnknownTypeId ( ctxt , typeId , _idResolver , _baseType ) ; if ( actual == null ) { return null ; } deser = ctxt . findContextualValueDeserializer ( actual , _property ) ; } } else { if ( ( _baseType != null ) && _baseType . getClass ( ) == type . getClass ( ) ) { if ( ! type . hasGenericTypes ( ) ) { type = ctxt . getTypeFactory ( ) . constructSpecializedType ( _baseType , type . getRawClass ( ) ) ; } } deser = ctxt . findContextualValueDeserializer ( type , _property ) ; } _deserializers . put ( typeId , deser ) ; } return deser ; } |
JxPath | 12 | src/java/org/apache/commons/jxpath/ri/model/dom/DOMNodePointer.java | 87 | 136 | Incomplete handling of undefined namespaces | Mcduffey, Joe <jdmcduf@nsa.gov>
Can someone tell me how to register namespaces so that attributes with namespaces does not cause the exception
org.apache.common.ri.model.dom.DOMNodePointer.createAttribute
unknown namespace prefix: xsi
For example the following
<ElementA A:myAttr="Mytype">
<B:ElementB>MY VALUE</B:ElementB>
</ElementA>
Would result in the following exception:
org.apache.common.ri.model.dom.DOMNodePointer.createAttribute
unknown namespace prefix: A
FYI: In this example there was a namespace decaration in the file and I also manually called the
registerNamespace(A,"/http...");
registerNamespace(B,"/http...");
There was no problem encountered for elements. Only attributes. Can someone help? Thanks. | public static boolean testNode(Node node, NodeTest test) {
if (test == null) {
return true;
}
if (test instanceof NodeNameTest) {
if (node.getNodeType() != Node.ELEMENT_NODE) {
return false;
}
NodeNameTest nodeNameTest = (NodeNameTest) test;
QName testName = nodeNameTest.getNodeName();
String namespaceURI = nodeNameTest.getNamespaceURI();
boolean wildcard = nodeNameTest.isWildcard();
String testPrefix = testName.getPrefix();
if (wildcard && testPrefix == null) {
return true;
}
if (wildcard
|| testName.getName()
.equals(DOMNodePointer.getLocalName(node))) {
String nodeNS = DOMNodePointer.getNamespaceURI(node);
return equalStrings(namespaceURI, nodeNS);
}
return false;
}
if (test instanceof NodeTypeTest) {
int nodeType = node.getNodeType();
switch (((NodeTypeTest) test).getNodeType()) {
case Compiler.NODE_TYPE_NODE :
return nodeType == Node.ELEMENT_NODE
|| nodeType == Node.DOCUMENT_NODE;
case Compiler.NODE_TYPE_TEXT :
return nodeType == Node.CDATA_SECTION_NODE
|| nodeType == Node.TEXT_NODE;
case Compiler.NODE_TYPE_COMMENT :
return nodeType == Node.COMMENT_NODE;
case Compiler.NODE_TYPE_PI :
return nodeType == Node.PROCESSING_INSTRUCTION_NODE;
}
return false;
}
if (test instanceof ProcessingInstructionTest) {
if (node.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE) {
String testPI = ((ProcessingInstructionTest) test).getTarget();
String nodePI = ((ProcessingInstruction) node).getTarget();
return testPI.equals(nodePI);
}
}
return false;
} | public static boolean testNode ( Node node , NodeTest test ) { if ( test == null ) { return true ; } if ( test instanceof NodeNameTest ) { if ( node . getNodeType ( ) != Node . ELEMENT_NODE ) { return false ; } NodeNameTest nodeNameTest = ( NodeNameTest ) test ; QName testName = nodeNameTest . getNodeName ( ) ; String namespaceURI = nodeNameTest . getNamespaceURI ( ) ; boolean wildcard = nodeNameTest . isWildcard ( ) ; String testPrefix = testName . getPrefix ( ) ; if ( wildcard && testPrefix == null ) { return true ; } if ( wildcard || testName . getName ( ) . equals ( DOMNodePointer . getLocalName ( node ) ) ) { String nodeNS = DOMNodePointer . getNamespaceURI ( node ) ; return equalStrings ( namespaceURI , nodeNS ) ; } return false ; } if ( test instanceof NodeTypeTest ) { int nodeType = node . getNodeType ( ) ; switch ( ( ( NodeTypeTest ) test ) . getNodeType ( ) ) { case Compiler . NODE_TYPE_NODE : return nodeType == Node . ELEMENT_NODE || nodeType == Node . DOCUMENT_NODE ; case Compiler . NODE_TYPE_TEXT : return nodeType == Node . CDATA_SECTION_NODE || nodeType == Node . TEXT_NODE ; case Compiler . NODE_TYPE_COMMENT : return nodeType == Node . COMMENT_NODE ; case Compiler . NODE_TYPE_PI : return nodeType == Node . PROCESSING_INSTRUCTION_NODE ; } return false ; } if ( test instanceof ProcessingInstructionTest ) { if ( node . getNodeType ( ) == Node . PROCESSING_INSTRUCTION_NODE ) { String testPI = ( ( ProcessingInstructionTest ) test ) . getTarget ( ) ; String nodePI = ( ( ProcessingInstruction ) node ) . getTarget ( ) ; return testPI . equals ( nodePI ) ; } } return false ; } | public static boolean testNode(Node node, NodeTest test) {
if (test == null) {
return true;
}
if (test instanceof NodeNameTest) {
if (node.getNodeType() != Node.ELEMENT_NODE) {
return false;
}
NodeNameTest nodeNameTest = (NodeNameTest) test;
QName testName = nodeNameTest.getNodeName();
String namespaceURI = nodeNameTest.getNamespaceURI();
boolean wildcard = nodeNameTest.isWildcard();
String testPrefix = testName.getPrefix();
if (wildcard && testPrefix == null) {
return true;
}
if (wildcard
|| testName.getName()
.equals(DOMNodePointer.getLocalName(node))) {
String nodeNS = DOMNodePointer.getNamespaceURI(node);
return equalStrings(namespaceURI, nodeNS) || nodeNS == null
&& equalStrings(testPrefix, getPrefix(node));
}
return false;
}
if (test instanceof NodeTypeTest) {
int nodeType = node.getNodeType();
switch (((NodeTypeTest) test).getNodeType()) {
case Compiler.NODE_TYPE_NODE :
return nodeType == Node.ELEMENT_NODE
|| nodeType == Node.DOCUMENT_NODE;
case Compiler.NODE_TYPE_TEXT :
return nodeType == Node.CDATA_SECTION_NODE
|| nodeType == Node.TEXT_NODE;
case Compiler.NODE_TYPE_COMMENT :
return nodeType == Node.COMMENT_NODE;
case Compiler.NODE_TYPE_PI :
return nodeType == Node.PROCESSING_INSTRUCTION_NODE;
}
return false;
}
if (test instanceof ProcessingInstructionTest) {
if (node.getNodeType() == Node.PROCESSING_INSTRUCTION_NODE) {
String testPI = ((ProcessingInstructionTest) test).getTarget();
String nodePI = ((ProcessingInstruction) node).getTarget();
return testPI.equals(nodePI);
}
}
return false;
} | public static boolean testNode ( Node node , NodeTest test ) { if ( test == null ) { return true ; } if ( test instanceof NodeNameTest ) { if ( node . getNodeType ( ) != Node . ELEMENT_NODE ) { return false ; } NodeNameTest nodeNameTest = ( NodeNameTest ) test ; QName testName = nodeNameTest . getNodeName ( ) ; String namespaceURI = nodeNameTest . getNamespaceURI ( ) ; boolean wildcard = nodeNameTest . isWildcard ( ) ; String testPrefix = testName . getPrefix ( ) ; if ( wildcard && testPrefix == null ) { return true ; } if ( wildcard || testName . getName ( ) . equals ( DOMNodePointer . getLocalName ( node ) ) ) { String nodeNS = DOMNodePointer . getNamespaceURI ( node ) ; return equalStrings ( namespaceURI , nodeNS ) || nodeNS == null && equalStrings ( testPrefix , getPrefix ( node ) ) ; } return false ; } if ( test instanceof NodeTypeTest ) { int nodeType = node . getNodeType ( ) ; switch ( ( ( NodeTypeTest ) test ) . getNodeType ( ) ) { case Compiler . NODE_TYPE_NODE : return nodeType == Node . ELEMENT_NODE || nodeType == Node . DOCUMENT_NODE ; case Compiler . NODE_TYPE_TEXT : return nodeType == Node . CDATA_SECTION_NODE || nodeType == Node . TEXT_NODE ; case Compiler . NODE_TYPE_COMMENT : return nodeType == Node . COMMENT_NODE ; case Compiler . NODE_TYPE_PI : return nodeType == Node . PROCESSING_INSTRUCTION_NODE ; } return false ; } if ( test instanceof ProcessingInstructionTest ) { if ( node . getNodeType ( ) == Node . PROCESSING_INSTRUCTION_NODE ) { String testPI = ( ( ProcessingInstructionTest ) test ) . getTarget ( ) ; String nodePI = ( ( ProcessingInstruction ) node ) . getTarget ( ) ; return testPI . equals ( nodePI ) ; } } return false ; } |
Math | 63 | src/main/java/org/apache/commons/math/util/MathUtils.java | 416 | 418 | NaN in "equals" methods | In "MathUtils", some "equals" methods will return true if both argument are NaN.
Unless I'm mistaken, this contradicts the IEEE standard.
If nobody objects, I'm going to make the changes.
| public static boolean equals(double x, double y) {
return (Double.isNaN(x) && Double.isNaN(y)) || x == y;
} | public static boolean equals ( double x , double y ) { return ( Double . isNaN ( x ) && Double . isNaN ( y ) ) || x == y ; } | public static boolean equals(double x, double y) {
return equals(x, y, 1);
} | public static boolean equals ( double x , double y ) { return equals ( x , y , 1 ) ; } |
Cli | 35 | src/main/java/org/apache/commons/cli/Options.java | 233 | 250 | LongOpt falsely detected as ambiguous | Options options = new Options();
options.addOption(Option.builder().longOpt("importToOpen").hasArg().argName("FILE").build());
options.addOption(Option.builder("i").longOpt("import").hasArg().argName("FILE").build());
Parsing "--import=FILE" is not possible since 1.3 as it throws a AmbiguousOptionException stating that it cannot decide whether import is import or importToOpen. In 1.2 this is not an issue.
The root lies in the new DefaultParser which does a startsWith check internally.
| public List<String> getMatchingOptions(String opt)
{
opt = Util.stripLeadingHyphens(opt);
List<String> matchingOpts = new ArrayList<String>();
// for a perfect match return the single option only
for (String longOpt : longOpts.keySet())
{
if (longOpt.startsWith(opt))
{
matchingOpts.add(longOpt);
}
}
return matchingOpts;
} | public List < String > getMatchingOptions ( String opt ) { opt = Util . stripLeadingHyphens ( opt ) ; List < String > matchingOpts = new ArrayList < String > ( ) ; for ( String longOpt : longOpts . keySet ( ) ) { if ( longOpt . startsWith ( opt ) ) { matchingOpts . add ( longOpt ) ; } } return matchingOpts ; } | public List<String> getMatchingOptions(String opt)
{
opt = Util.stripLeadingHyphens(opt);
List<String> matchingOpts = new ArrayList<String>();
// for a perfect match return the single option only
if(longOpts.keySet().contains(opt)) {
return Collections.singletonList(opt);
}
for (String longOpt : longOpts.keySet())
{
if (longOpt.startsWith(opt))
{
matchingOpts.add(longOpt);
}
}
return matchingOpts;
} | public List < String > getMatchingOptions ( String opt ) { opt = Util . stripLeadingHyphens ( opt ) ; List < String > matchingOpts = new ArrayList < String > ( ) ; if ( longOpts . keySet ( ) . contains ( opt ) ) { return Collections . singletonList ( opt ) ; } for ( String longOpt : longOpts . keySet ( ) ) { if ( longOpt . startsWith ( opt ) ) { matchingOpts . add ( longOpt ) ; } } return matchingOpts ; } |
Math | 34 | src/main/java/org/apache/commons/math3/genetics/ListPopulation.java | 208 | 210 | ListPopulation Iterator allows you to remove chromosomes from the population. | Calling the iterator method of ListPopulation returns an iterator of the protected modifiable list. Before returning the iterator we should wrap it in an unmodifiable list. | public Iterator<Chromosome> iterator() {
return chromosomes.iterator();
} | public Iterator < Chromosome > iterator ( ) { return chromosomes . iterator ( ) ; } | public Iterator<Chromosome> iterator() {
return getChromosomes().iterator();
} | public Iterator < Chromosome > iterator ( ) { return getChromosomes ( ) . iterator ( ) ; } |
Cli | 23 | src/java/org/apache/commons/cli/HelpFormatter.java | 805 | 841 | infinite loop in the wrapping code of HelpFormatter | If there is not enough space to display a word on a single line, HelpFormatter goes into a infinite loops until the JVM crashes with an OutOfMemoryError.
Test case:
{code}
Options options = new Options();
options.addOption("h", "help", false, "This is a looooong description");
HelpFormatter formatter = new HelpFormatter();
formatter.setWidth(20);
formatter.printHelp("app", options); // hang & crash
{code}
An helpful exception indicating the insufficient width would be more appropriate than an OutOfMemoryError. | protected StringBuffer renderWrappedText(StringBuffer sb, int width,
int nextLineTabStop, String text)
{
int pos = findWrapPos(text, width, 0);
if (pos == -1)
{
sb.append(rtrim(text));
return sb;
}
sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine);
// all following lines must be padded with nextLineTabStop space
// characters
final String padding = createPadding(nextLineTabStop);
while (true)
{
int lastPos = pos;
text = padding + text.substring(pos).trim();
pos = findWrapPos(text, width, 0);
if (pos == -1)
{
sb.append(text);
return sb;
} else
if (pos == lastPos)
{
throw new RuntimeException("Text too long for line - throwing exception to avoid infinite loop [CLI-162]: " + text);
}
sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine);
}
} | protected StringBuffer renderWrappedText ( StringBuffer sb , int width , int nextLineTabStop , String text ) { int pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( rtrim ( text ) ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; final String padding = createPadding ( nextLineTabStop ) ; while ( true ) { int lastPos = pos ; text = padding + text . substring ( pos ) . trim ( ) ; pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( text ) ; return sb ; } else if ( pos == lastPos ) { throw new RuntimeException ( "Text too long for line - throwing exception to avoid infinite loop [CLI-162]: " + text ) ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; } } | protected StringBuffer renderWrappedText(StringBuffer sb, int width,
int nextLineTabStop, String text)
{
int pos = findWrapPos(text, width, 0);
if (pos == -1)
{
sb.append(rtrim(text));
return sb;
}
sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine);
// all following lines must be padded with nextLineTabStop space
// characters
final String padding = createPadding(nextLineTabStop);
while (true)
{
text = padding + text.substring(pos).trim();
pos = findWrapPos(text, width, 0);
if (pos == -1)
{
sb.append(text);
return sb;
}
if ( (text.length() > width) && (pos == nextLineTabStop - 1) ) {
sb.append(text);
return sb;
}
sb.append(rtrim(text.substring(0, pos))).append(defaultNewLine);
}
} | protected StringBuffer renderWrappedText ( StringBuffer sb , int width , int nextLineTabStop , String text ) { int pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( rtrim ( text ) ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; final String padding = createPadding ( nextLineTabStop ) ; while ( true ) { text = padding + text . substring ( pos ) . trim ( ) ; pos = findWrapPos ( text , width , 0 ) ; if ( pos == - 1 ) { sb . append ( text ) ; return sb ; } if ( ( text . length ( ) > width ) && ( pos == nextLineTabStop - 1 ) ) { sb . append ( text ) ; return sb ; } sb . append ( rtrim ( text . substring ( 0 , pos ) ) ) . append ( defaultNewLine ) ; } } |
Cli | 5 | src/java/org/apache/commons/cli/Util.java | 34 | 46 | NullPointerException in Util.stripLeadingHyphens when passed a null argument | If you try to do a hasOption(null), you get a NPE:
java.lang.NullPointerException
at org.apache.commons.cli.Util.stripLeadingHyphens(Util.java:39)
at org.apache.commons.cli.CommandLine.resolveOption(CommandLine.java:166)
at org.apache.commons.cli.CommandLine.hasOption(CommandLine.java:68)
Either hasOption should reject the null argument, or the function should simply return false. I think the latter makes more since, as this is how Java collections generally work. | static String stripLeadingHyphens(String str)
{
if (str.startsWith("--"))
{
return str.substring(2, str.length());
}
else if (str.startsWith("-"))
{
return str.substring(1, str.length());
}
return str;
} | static String stripLeadingHyphens ( String str ) { if ( str . startsWith ( "--" ) ) { return str . substring ( 2 , str . length ( ) ) ; } else if ( str . startsWith ( "-" ) ) { return str . substring ( 1 , str . length ( ) ) ; } return str ; } | static String stripLeadingHyphens(String str)
{
if (str == null) {
return null;
}
if (str.startsWith("--"))
{
return str.substring(2, str.length());
}
else if (str.startsWith("-"))
{
return str.substring(1, str.length());
}
return str;
} | static String stripLeadingHyphens ( String str ) { if ( str == null ) { return null ; } if ( str . startsWith ( "--" ) ) { return str . substring ( 2 , str . length ( ) ) ; } else if ( str . startsWith ( "-" ) ) { return str . substring ( 1 , str . length ( ) ) ; } return str ; } |
JacksonDatabind | 47 | src/main/java/com/fasterxml/jackson/databind/AnnotationIntrospector.java | 795 | 896 | `@JsonSerialize(as=superType)` behavior disallowed in 2.7.4 | #1178 fixed the problem with collections, but I'm seeing a problem with individual objects.
I'm getting:
```
com.fasterxml.jackson.databind.JsonMappingException: Failed to widen type [simple type, class org.pharmgkb.model.AccessionIdentifier] with annotation (value org.pharmgkb.model.BaseAccessionIdentifier), from 'getReference': Class org.pharmgkb.model.BaseAccessionIdentifier not a super-type of [simple type, class org.pharmgkb.model.AccessionIdentifier]
at com.fasterxml.jackson.databind.AnnotationIntrospector.refineSerializationType(AnnotationIntrospector.java:821)
at com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair.refineSerializationType(AnnotationIntrospectorPair.java:488)
at com.fasterxml.jackson.databind.ser.PropertyBuilder.findSerializationType(PropertyBuilder.java:194)
at com.fasterxml.jackson.databind.ser.PropertyBuilder.buildWriter(PropertyBuilder.java:73)
at com.fasterxml.jackson.databind.ser.BeanSerializerFactory._constructWriter(BeanSerializerFactory.java:805)
at com.fasterxml.jackson.databind.ser.BeanSerializerFactory.findBeanProperties(BeanSerializerFactory.java:608)
at com.fasterxml.jackson.databind.ser.BeanSerializerFactory.constructBeanSerializer(BeanSerializerFactory.java:388)
at com.fasterxml.jackson.databind.ser.BeanSerializerFactory.findBeanSerializer(BeanSerializerFactory.java:271)
at com.fasterxml.jackson.databind.ser.BeanSerializerFactory._createSerializer2(BeanSerializerFactory.java:223)
at com.fasterxml.jackson.databind.ser.BeanSerializerFactory.createSerializer(BeanSerializerFactory.java:157)
at com.fasterxml.jackson.databind.SerializerProvider._createUntypedSerializer(SerializerProvider.java:1215)
at com.fasterxml.jackson.databind.SerializerProvider._createAndCacheUntypedSerializer(SerializerProvider.java:1167)
at com.fasterxml.jackson.databind.SerializerProvider.findValueSerializer(SerializerProvider.java:490)
at com.fasterxml.jackson.databind.SerializerProvider.findTypedValueSerializer(SerializerProvider.java:688)
at com.fasterxml.jackson.databind.ser.DefaultSerializerProvider.serializeValue(DefaultSerializerProvider.java:107)
at com.fasterxml.jackson.databind.ObjectWriter$Prefetch.serialize(ObjectWriter.java:1428)
at com.fasterxml.jackson.databind.ObjectWriter._configAndWriteValue(ObjectWriter.java:1129)
at com.fasterxml.jackson.databind.ObjectWriter.writeValueAsString(ObjectWriter.java:1001)
at org.pharmgkb.jackson.JacksonTest.testModelObjects(JacksonTest.java:48)
```
On something like:
```
public class Foo {
@JsonSerialize(as = BaseAccessionIdentifier.class)
@JsonDeserialize(as = BaseAccessionIdentifier.class)
public AccessionIdentifier getReference() {
}
}
```
```
public interface AccessionIdentifier {
}
```
```
public class BaseAccessionIdentifier implements AccessionIdentifier {
}
```
| public JavaType refineSerializationType(final MapperConfig<?> config,
final Annotated a, final JavaType baseType) throws JsonMappingException
{
JavaType type = baseType;
final TypeFactory tf = config.getTypeFactory();
// 10-Oct-2015, tatu: For 2.7, we'll need to delegate back to
// now-deprecated secondary methods; this because while
// direct sub-class not yet retrofitted may only override
// those methods. With 2.8 or later we may consider removal
// of these methods
// Ok: start by refining the main type itself; common to all types
Class<?> serClass = findSerializationType(a);
if (serClass != null) {
if (type.hasRawClass(serClass)) {
// 30-Nov-2015, tatu: As per [databind#1023], need to allow forcing of
// static typing this way
type = type.withStaticTyping();
} else {
try {
// 11-Oct-2015, tatu: For deser, we call `TypeFactory.constructSpecializedType()`,
// may be needed here too in future?
type = tf.constructGeneralizedType(type, serClass);
} catch (IllegalArgumentException iae) {
throw new JsonMappingException(null,
String.format("Failed to widen type %s with annotation (value %s), from '%s': %s",
type, serClass.getName(), a.getName(), iae.getMessage()),
iae);
}
}
}
// Then further processing for container types
// First, key type (for Maps, Map-like types):
if (type.isMapLikeType()) {
JavaType keyType = type.getKeyType();
Class<?> keyClass = findSerializationKeyType(a, keyType);
if (keyClass != null) {
if (keyType.hasRawClass(keyClass)) {
keyType = keyType.withStaticTyping();
} else {
Class<?> currRaw = keyType.getRawClass();
try {
// 19-May-2016, tatu: As per [databind#1231], [databind#1178] may need to actually
// specialize (narrow) type sometimes, even if more commonly opposite
// is needed.
if (keyClass.isAssignableFrom(currRaw)) { // common case
keyType = tf.constructGeneralizedType(keyType, keyClass);
} else if (currRaw.isAssignableFrom(keyClass)) { // specialization, ok as well
keyType = tf.constructSpecializedType(keyType, keyClass);
} else {
throw new JsonMappingException(null,
String.format("Can not refine serialization key type %s into %s; types not related",
keyType, keyClass.getName()));
}
} catch (IllegalArgumentException iae) {
throw new JsonMappingException(null,
String.format("Failed to widen key type of %s with concrete-type annotation (value %s), from '%s': %s",
type, keyClass.getName(), a.getName(), iae.getMessage()),
iae);
}
}
type = ((MapLikeType) type).withKeyType(keyType);
}
}
JavaType contentType = type.getContentType();
if (contentType != null) { // collection[like], map[like], array, reference
// And then value types for all containers:
Class<?> contentClass = findSerializationContentType(a, contentType);
if (contentClass != null) {
if (contentType.hasRawClass(contentClass)) {
contentType = contentType.withStaticTyping();
} else {
// 03-Apr-2016, tatu: As per [databind#1178], may need to actually
// specialize (narrow) type sometimes, even if more commonly opposite
// is needed.
Class<?> currRaw = contentType.getRawClass();
try {
if (contentClass.isAssignableFrom(currRaw)) { // common case
contentType = tf.constructGeneralizedType(contentType, contentClass);
} else if (currRaw.isAssignableFrom(contentClass)) { // specialization, ok as well
contentType = tf.constructSpecializedType(contentType, contentClass);
} else {
throw new JsonMappingException(null,
String.format("Can not refine serialization content type %s into %s; types not related",
contentType, contentClass.getName()));
}
} catch (IllegalArgumentException iae) { // shouldn't really happen
throw new JsonMappingException(null,
String.format("Internal error: failed to refine value type of %s with concrete-type annotation (value %s), from '%s': %s",
type, contentClass.getName(), a.getName(), iae.getMessage()),
iae);
}
}
type = type.withContentType(contentType);
}
}
return type;
} | public JavaType refineSerializationType ( final MapperConfig < ? > config , final Annotated a , final JavaType baseType ) throws JsonMappingException { JavaType type = baseType ; final TypeFactory tf = config . getTypeFactory ( ) ; Class < ? > serClass = findSerializationType ( a ) ; if ( serClass != null ) { if ( type . hasRawClass ( serClass ) ) { type = type . withStaticTyping ( ) ; } else { try { type = tf . constructGeneralizedType ( type , serClass ) ; } catch ( IllegalArgumentException iae ) { throw new JsonMappingException ( null , String . format ( "Failed to widen type %s with annotation (value %s), from '%s': %s" , type , serClass . getName ( ) , a . getName ( ) , iae . getMessage ( ) ) , iae ) ; } } } if ( type . isMapLikeType ( ) ) { JavaType keyType = type . getKeyType ( ) ; Class < ? > keyClass = findSerializationKeyType ( a , keyType ) ; if ( keyClass != null ) { if ( keyType . hasRawClass ( keyClass ) ) { keyType = keyType . withStaticTyping ( ) ; } else { Class < ? > currRaw = keyType . getRawClass ( ) ; try { if ( keyClass . isAssignableFrom ( currRaw ) ) { keyType = tf . constructGeneralizedType ( keyType , keyClass ) ; } else if ( currRaw . isAssignableFrom ( keyClass ) ) { keyType = tf . constructSpecializedType ( keyType , keyClass ) ; } else { throw new JsonMappingException ( null , String . format ( "Can not refine serialization key type %s into %s; types not related" , keyType , keyClass . getName ( ) ) ) ; } } catch ( IllegalArgumentException iae ) { throw new JsonMappingException ( null , String . format ( "Failed to widen key type of %s with concrete-type annotation (value %s), from '%s': %s" , type , keyClass . getName ( ) , a . getName ( ) , iae . getMessage ( ) ) , iae ) ; } } type = ( ( MapLikeType ) type ) . withKeyType ( keyType ) ; } } JavaType contentType = type . getContentType ( ) ; if ( contentType != null ) { Class < ? > contentClass = findSerializationContentType ( a , contentType ) ; if ( contentClass != null ) { if ( contentType . hasRawClass ( contentClass ) ) { contentType = contentType . withStaticTyping ( ) ; } else { Class < ? > currRaw = contentType . getRawClass ( ) ; try { if ( contentClass . isAssignableFrom ( currRaw ) ) { contentType = tf . constructGeneralizedType ( contentType , contentClass ) ; } else if ( currRaw . isAssignableFrom ( contentClass ) ) { contentType = tf . constructSpecializedType ( contentType , contentClass ) ; } else { throw new JsonMappingException ( null , String . format ( "Can not refine serialization content type %s into %s; types not related" , contentType , contentClass . getName ( ) ) ) ; } } catch ( IllegalArgumentException iae ) { throw new JsonMappingException ( null , String . format ( "Internal error: failed to refine value type of %s with concrete-type annotation (value %s), from '%s': %s" , type , contentClass . getName ( ) , a . getName ( ) , iae . getMessage ( ) ) , iae ) ; } } type = type . withContentType ( contentType ) ; } } return type ; } | public JavaType refineSerializationType(final MapperConfig<?> config,
final Annotated a, final JavaType baseType) throws JsonMappingException
{
JavaType type = baseType;
final TypeFactory tf = config.getTypeFactory();
// 10-Oct-2015, tatu: For 2.7, we'll need to delegate back to
// now-deprecated secondary methods; this because while
// direct sub-class not yet retrofitted may only override
// those methods. With 2.8 or later we may consider removal
// of these methods
// Ok: start by refining the main type itself; common to all types
Class<?> serClass = findSerializationType(a);
if (serClass != null) {
if (type.hasRawClass(serClass)) {
// 30-Nov-2015, tatu: As per [databind#1023], need to allow forcing of
// static typing this way
type = type.withStaticTyping();
} else {
Class<?> currRaw = type.getRawClass();
try {
// 11-Oct-2015, tatu: For deser, we call `TypeFactory.constructSpecializedType()`,
// may be needed here too in future?
if (serClass.isAssignableFrom(currRaw)) { // common case
type = tf.constructGeneralizedType(type, serClass);
} else if (currRaw.isAssignableFrom(serClass)) { // specialization, ok as well
type = tf.constructSpecializedType(type, serClass);
} else {
throw new JsonMappingException(null,
String.format("Can not refine serialization type %s into %s; types not related",
type, serClass.getName()));
}
} catch (IllegalArgumentException iae) {
throw new JsonMappingException(null,
String.format("Failed to widen type %s with annotation (value %s), from '%s': %s",
type, serClass.getName(), a.getName(), iae.getMessage()),
iae);
}
}
}
// Then further processing for container types
// First, key type (for Maps, Map-like types):
if (type.isMapLikeType()) {
JavaType keyType = type.getKeyType();
Class<?> keyClass = findSerializationKeyType(a, keyType);
if (keyClass != null) {
if (keyType.hasRawClass(keyClass)) {
keyType = keyType.withStaticTyping();
} else {
Class<?> currRaw = keyType.getRawClass();
try {
// 19-May-2016, tatu: As per [databind#1231], [databind#1178] may need to actually
// specialize (narrow) type sometimes, even if more commonly opposite
// is needed.
if (keyClass.isAssignableFrom(currRaw)) { // common case
keyType = tf.constructGeneralizedType(keyType, keyClass);
} else if (currRaw.isAssignableFrom(keyClass)) { // specialization, ok as well
keyType = tf.constructSpecializedType(keyType, keyClass);
} else {
throw new JsonMappingException(null,
String.format("Can not refine serialization key type %s into %s; types not related",
keyType, keyClass.getName()));
}
} catch (IllegalArgumentException iae) {
throw new JsonMappingException(null,
String.format("Failed to widen key type of %s with concrete-type annotation (value %s), from '%s': %s",
type, keyClass.getName(), a.getName(), iae.getMessage()),
iae);
}
}
type = ((MapLikeType) type).withKeyType(keyType);
}
}
JavaType contentType = type.getContentType();
if (contentType != null) { // collection[like], map[like], array, reference
// And then value types for all containers:
Class<?> contentClass = findSerializationContentType(a, contentType);
if (contentClass != null) {
if (contentType.hasRawClass(contentClass)) {
contentType = contentType.withStaticTyping();
} else {
// 03-Apr-2016, tatu: As per [databind#1178], may need to actually
// specialize (narrow) type sometimes, even if more commonly opposite
// is needed.
Class<?> currRaw = contentType.getRawClass();
try {
if (contentClass.isAssignableFrom(currRaw)) { // common case
contentType = tf.constructGeneralizedType(contentType, contentClass);
} else if (currRaw.isAssignableFrom(contentClass)) { // specialization, ok as well
contentType = tf.constructSpecializedType(contentType, contentClass);
} else {
throw new JsonMappingException(null,
String.format("Can not refine serialization content type %s into %s; types not related",
contentType, contentClass.getName()));
}
} catch (IllegalArgumentException iae) { // shouldn't really happen
throw new JsonMappingException(null,
String.format("Internal error: failed to refine value type of %s with concrete-type annotation (value %s), from '%s': %s",
type, contentClass.getName(), a.getName(), iae.getMessage()),
iae);
}
}
type = type.withContentType(contentType);
}
}
return type;
} | public JavaType refineSerializationType ( final MapperConfig < ? > config , final Annotated a , final JavaType baseType ) throws JsonMappingException { JavaType type = baseType ; final TypeFactory tf = config . getTypeFactory ( ) ; Class < ? > serClass = findSerializationType ( a ) ; if ( serClass != null ) { if ( type . hasRawClass ( serClass ) ) { type = type . withStaticTyping ( ) ; } else { Class < ? > currRaw = type . getRawClass ( ) ; try { if ( serClass . isAssignableFrom ( currRaw ) ) { type = tf . constructGeneralizedType ( type , serClass ) ; } else if ( currRaw . isAssignableFrom ( serClass ) ) { type = tf . constructSpecializedType ( type , serClass ) ; } else { throw new JsonMappingException ( null , String . format ( "Can not refine serialization type %s into %s; types not related" , type , serClass . getName ( ) ) ) ; } } catch ( IllegalArgumentException iae ) { throw new JsonMappingException ( null , String . format ( "Failed to widen type %s with annotation (value %s), from '%s': %s" , type , serClass . getName ( ) , a . getName ( ) , iae . getMessage ( ) ) , iae ) ; } } } if ( type . isMapLikeType ( ) ) { JavaType keyType = type . getKeyType ( ) ; Class < ? > keyClass = findSerializationKeyType ( a , keyType ) ; if ( keyClass != null ) { if ( keyType . hasRawClass ( keyClass ) ) { keyType = keyType . withStaticTyping ( ) ; } else { Class < ? > currRaw = keyType . getRawClass ( ) ; try { if ( keyClass . isAssignableFrom ( currRaw ) ) { keyType = tf . constructGeneralizedType ( keyType , keyClass ) ; } else if ( currRaw . isAssignableFrom ( keyClass ) ) { keyType = tf . constructSpecializedType ( keyType , keyClass ) ; } else { throw new JsonMappingException ( null , String . format ( "Can not refine serialization key type %s into %s; types not related" , keyType , keyClass . getName ( ) ) ) ; } } catch ( IllegalArgumentException iae ) { throw new JsonMappingException ( null , String . format ( "Failed to widen key type of %s with concrete-type annotation (value %s), from '%s': %s" , type , keyClass . getName ( ) , a . getName ( ) , iae . getMessage ( ) ) , iae ) ; } } type = ( ( MapLikeType ) type ) . withKeyType ( keyType ) ; } } JavaType contentType = type . getContentType ( ) ; if ( contentType != null ) { Class < ? > contentClass = findSerializationContentType ( a , contentType ) ; if ( contentClass != null ) { if ( contentType . hasRawClass ( contentClass ) ) { contentType = contentType . withStaticTyping ( ) ; } else { Class < ? > currRaw = contentType . getRawClass ( ) ; try { if ( contentClass . isAssignableFrom ( currRaw ) ) { contentType = tf . constructGeneralizedType ( contentType , contentClass ) ; } else if ( currRaw . isAssignableFrom ( contentClass ) ) { contentType = tf . constructSpecializedType ( contentType , contentClass ) ; } else { throw new JsonMappingException ( null , String . format ( "Can not refine serialization content type %s into %s; types not related" , contentType , contentClass . getName ( ) ) ) ; } } catch ( IllegalArgumentException iae ) { throw new JsonMappingException ( null , String . format ( "Internal error: failed to refine value type of %s with concrete-type annotation (value %s), from '%s': %s" , type , contentClass . getName ( ) , a . getName ( ) , iae . getMessage ( ) ) , iae ) ; } } type = type . withContentType ( contentType ) ; } } return type ; } |
Math | 75 | src/main/java/org/apache/commons/math/stat/Frequency.java | 301 | 304 | In stat.Frequency, getPct(Object) uses getCumPct(Comparable) instead of getPct(Comparable) | Drop in Replacement of 1.2 with 2.0 not possible because all getPct calls will be cummulative without code change
Frequency.java
/**
* Returns the percentage of values that are equal to v
* @deprecated replaced by {@link #getPct(Comparable)} as of 2.0
*/
@Deprecated
public double getPct(Object v) {
return getCumPct((Comparable<?>) v);
} | @Deprecated
public double getPct(Object v) {
return getCumPct((Comparable<?>) v);
} | @ Deprecated public double getPct ( Object v ) { return getCumPct ( ( Comparable < ? > ) v ) ; } | @Deprecated
public double getPct(Object v) {
return getPct((Comparable<?>) v);
} | @ Deprecated public double getPct ( Object v ) { return getPct ( ( Comparable < ? > ) v ) ; } |
JacksonCore | 7 | src/main/java/com/fasterxml/jackson/core/json/JsonWriteContext.java | 166 | 185 | Add a check so `JsonGenerator.writeString()` won't work if `writeFieldName()` expected. | Looks like calling `writeString()` (and perhaps other scalar write methods) results in writing invalid output, instead of throwing an exception. It should instead fail; in future we may want to consider allowing this as an alias, but at any rate it should not produce invalid output.
| public int writeValue() {
// Most likely, object:
if (_type == TYPE_OBJECT) {
_gotName = false;
++_index;
return STATUS_OK_AFTER_COLON;
}
// Ok, array?
if (_type == TYPE_ARRAY) {
int ix = _index;
++_index;
return (ix < 0) ? STATUS_OK_AS_IS : STATUS_OK_AFTER_COMMA;
}
// Nope, root context
// No commas within root context, but need space
++_index;
return (_index == 0) ? STATUS_OK_AS_IS : STATUS_OK_AFTER_SPACE;
} | public int writeValue ( ) { if ( _type == TYPE_OBJECT ) { _gotName = false ; ++ _index ; return STATUS_OK_AFTER_COLON ; } if ( _type == TYPE_ARRAY ) { int ix = _index ; ++ _index ; return ( ix < 0 ) ? STATUS_OK_AS_IS : STATUS_OK_AFTER_COMMA ; } ++ _index ; return ( _index == 0 ) ? STATUS_OK_AS_IS : STATUS_OK_AFTER_SPACE ; } | public int writeValue() {
// Most likely, object:
if (_type == TYPE_OBJECT) {
if (!_gotName) {
return STATUS_EXPECT_NAME;
}
_gotName = false;
++_index;
return STATUS_OK_AFTER_COLON;
}
// Ok, array?
if (_type == TYPE_ARRAY) {
int ix = _index;
++_index;
return (ix < 0) ? STATUS_OK_AS_IS : STATUS_OK_AFTER_COMMA;
}
// Nope, root context
// No commas within root context, but need space
++_index;
return (_index == 0) ? STATUS_OK_AS_IS : STATUS_OK_AFTER_SPACE;
} | public int writeValue ( ) { if ( _type == TYPE_OBJECT ) { if ( ! _gotName ) { return STATUS_EXPECT_NAME ; } _gotName = false ; ++ _index ; return STATUS_OK_AFTER_COLON ; } if ( _type == TYPE_ARRAY ) { int ix = _index ; ++ _index ; return ( ix < 0 ) ? STATUS_OK_AS_IS : STATUS_OK_AFTER_COMMA ; } ++ _index ; return ( _index == 0 ) ? STATUS_OK_AS_IS : STATUS_OK_AFTER_SPACE ; } |
Math | 3 | src/main/java/org/apache/commons/math3/util/MathArrays.java | 814 | 872 | ArrayIndexOutOfBoundsException in MathArrays.linearCombination | When MathArrays.linearCombination is passed arguments with length 1, it throws an ArrayOutOfBoundsException. This is caused by this line:
double prodHighNext = prodHigh[1];
linearCombination should check the length of the arguments and fall back to simple multiplication if length == 1. | public static double linearCombination(final double[] a, final double[] b)
throws DimensionMismatchException {
final int len = a.length;
if (len != b.length) {
throw new DimensionMismatchException(len, b.length);
}
// Revert to scalar multiplication.
final double[] prodHigh = new double[len];
double prodLowSum = 0;
for (int i = 0; i < len; i++) {
final double ai = a[i];
final double ca = SPLIT_FACTOR * ai;
final double aHigh = ca - (ca - ai);
final double aLow = ai - aHigh;
final double bi = b[i];
final double cb = SPLIT_FACTOR * bi;
final double bHigh = cb - (cb - bi);
final double bLow = bi - bHigh;
prodHigh[i] = ai * bi;
final double prodLow = aLow * bLow - (((prodHigh[i] -
aHigh * bHigh) -
aLow * bHigh) -
aHigh * bLow);
prodLowSum += prodLow;
}
final double prodHighCur = prodHigh[0];
double prodHighNext = prodHigh[1];
double sHighPrev = prodHighCur + prodHighNext;
double sPrime = sHighPrev - prodHighNext;
double sLowSum = (prodHighNext - (sHighPrev - sPrime)) + (prodHighCur - sPrime);
final int lenMinusOne = len - 1;
for (int i = 1; i < lenMinusOne; i++) {
prodHighNext = prodHigh[i + 1];
final double sHighCur = sHighPrev + prodHighNext;
sPrime = sHighCur - prodHighNext;
sLowSum += (prodHighNext - (sHighCur - sPrime)) + (sHighPrev - sPrime);
sHighPrev = sHighCur;
}
double result = sHighPrev + (prodLowSum + sLowSum);
if (Double.isNaN(result)) {
// either we have split infinite numbers or some coefficients were NaNs,
// just rely on the naive implementation and let IEEE754 handle this
result = 0;
for (int i = 0; i < len; ++i) {
result += a[i] * b[i];
}
}
return result;
} | public static double linearCombination ( final double [ ] a , final double [ ] b ) throws DimensionMismatchException { final int len = a . length ; if ( len != b . length ) { throw new DimensionMismatchException ( len , b . length ) ; } final double [ ] prodHigh = new double [ len ] ; double prodLowSum = 0 ; for ( int i = 0 ; i < len ; i ++ ) { final double ai = a [ i ] ; final double ca = SPLIT_FACTOR * ai ; final double aHigh = ca - ( ca - ai ) ; final double aLow = ai - aHigh ; final double bi = b [ i ] ; final double cb = SPLIT_FACTOR * bi ; final double bHigh = cb - ( cb - bi ) ; final double bLow = bi - bHigh ; prodHigh [ i ] = ai * bi ; final double prodLow = aLow * bLow - ( ( ( prodHigh [ i ] - aHigh * bHigh ) - aLow * bHigh ) - aHigh * bLow ) ; prodLowSum += prodLow ; } final double prodHighCur = prodHigh [ 0 ] ; double prodHighNext = prodHigh [ 1 ] ; double sHighPrev = prodHighCur + prodHighNext ; double sPrime = sHighPrev - prodHighNext ; double sLowSum = ( prodHighNext - ( sHighPrev - sPrime ) ) + ( prodHighCur - sPrime ) ; final int lenMinusOne = len - 1 ; for ( int i = 1 ; i < lenMinusOne ; i ++ ) { prodHighNext = prodHigh [ i + 1 ] ; final double sHighCur = sHighPrev + prodHighNext ; sPrime = sHighCur - prodHighNext ; sLowSum += ( prodHighNext - ( sHighCur - sPrime ) ) + ( sHighPrev - sPrime ) ; sHighPrev = sHighCur ; } double result = sHighPrev + ( prodLowSum + sLowSum ) ; if ( Double . isNaN ( result ) ) { result = 0 ; for ( int i = 0 ; i < len ; ++ i ) { result += a [ i ] * b [ i ] ; } } return result ; } | public static double linearCombination(final double[] a, final double[] b)
throws DimensionMismatchException {
final int len = a.length;
if (len != b.length) {
throw new DimensionMismatchException(len, b.length);
}
if (len == 1) {
// Revert to scalar multiplication.
return a[0] * b[0];
}
final double[] prodHigh = new double[len];
double prodLowSum = 0;
for (int i = 0; i < len; i++) {
final double ai = a[i];
final double ca = SPLIT_FACTOR * ai;
final double aHigh = ca - (ca - ai);
final double aLow = ai - aHigh;
final double bi = b[i];
final double cb = SPLIT_FACTOR * bi;
final double bHigh = cb - (cb - bi);
final double bLow = bi - bHigh;
prodHigh[i] = ai * bi;
final double prodLow = aLow * bLow - (((prodHigh[i] -
aHigh * bHigh) -
aLow * bHigh) -
aHigh * bLow);
prodLowSum += prodLow;
}
final double prodHighCur = prodHigh[0];
double prodHighNext = prodHigh[1];
double sHighPrev = prodHighCur + prodHighNext;
double sPrime = sHighPrev - prodHighNext;
double sLowSum = (prodHighNext - (sHighPrev - sPrime)) + (prodHighCur - sPrime);
final int lenMinusOne = len - 1;
for (int i = 1; i < lenMinusOne; i++) {
prodHighNext = prodHigh[i + 1];
final double sHighCur = sHighPrev + prodHighNext;
sPrime = sHighCur - prodHighNext;
sLowSum += (prodHighNext - (sHighCur - sPrime)) + (sHighPrev - sPrime);
sHighPrev = sHighCur;
}
double result = sHighPrev + (prodLowSum + sLowSum);
if (Double.isNaN(result)) {
// either we have split infinite numbers or some coefficients were NaNs,
// just rely on the naive implementation and let IEEE754 handle this
result = 0;
for (int i = 0; i < len; ++i) {
result += a[i] * b[i];
}
}
return result;
} | public static double linearCombination ( final double [ ] a , final double [ ] b ) throws DimensionMismatchException { final int len = a . length ; if ( len != b . length ) { throw new DimensionMismatchException ( len , b . length ) ; } if ( len == 1 ) { return a [ 0 ] * b [ 0 ] ; } final double [ ] prodHigh = new double [ len ] ; double prodLowSum = 0 ; for ( int i = 0 ; i < len ; i ++ ) { final double ai = a [ i ] ; final double ca = SPLIT_FACTOR * ai ; final double aHigh = ca - ( ca - ai ) ; final double aLow = ai - aHigh ; final double bi = b [ i ] ; final double cb = SPLIT_FACTOR * bi ; final double bHigh = cb - ( cb - bi ) ; final double bLow = bi - bHigh ; prodHigh [ i ] = ai * bi ; final double prodLow = aLow * bLow - ( ( ( prodHigh [ i ] - aHigh * bHigh ) - aLow * bHigh ) - aHigh * bLow ) ; prodLowSum += prodLow ; } final double prodHighCur = prodHigh [ 0 ] ; double prodHighNext = prodHigh [ 1 ] ; double sHighPrev = prodHighCur + prodHighNext ; double sPrime = sHighPrev - prodHighNext ; double sLowSum = ( prodHighNext - ( sHighPrev - sPrime ) ) + ( prodHighCur - sPrime ) ; final int lenMinusOne = len - 1 ; for ( int i = 1 ; i < lenMinusOne ; i ++ ) { prodHighNext = prodHigh [ i + 1 ] ; final double sHighCur = sHighPrev + prodHighNext ; sPrime = sHighCur - prodHighNext ; sLowSum += ( prodHighNext - ( sHighCur - sPrime ) ) + ( sHighPrev - sPrime ) ; sHighPrev = sHighCur ; } double result = sHighPrev + ( prodLowSum + sLowSum ) ; if ( Double . isNaN ( result ) ) { result = 0 ; for ( int i = 0 ; i < len ; ++ i ) { result += a [ i ] * b [ i ] ; } } return result ; } |
Math | 59 | src/main/java/org/apache/commons/math/util/FastMath.java | 3481 | 3483 | FastMath.max(50.0f, -50.0f) => -50.0f; should be +50.0f | FastMath.max(50.0f, -50.0f) => -50.0f; should be +50.0f.
This is because the wrong variable is returned.
The bug was not detected by the test case "testMinMaxFloat()" because that has a bug too - it tests doubles, not floats. | public static float max(final float a, final float b) {
return (a <= b) ? b : (Float.isNaN(a + b) ? Float.NaN : b);
} | public static float max ( final float a , final float b ) { return ( a <= b ) ? b : ( Float . isNaN ( a + b ) ? Float . NaN : b ) ; } | public static float max(final float a, final float b) {
return (a <= b) ? b : (Float.isNaN(a + b) ? Float.NaN : a);
} | public static float max ( final float a , final float b ) { return ( a <= b ) ? b : ( Float . isNaN ( a + b ) ? Float . NaN : a ) ; } |
Compress | 40 | src/main/java/org/apache/commons/compress/utils/BitInputStream.java | 81 | 109 | Overflow in BitInputStream | in Class BitInputStream.java(\src\main\java\org\apache\commons\compress\utils),
funcion:
public long readBits(final int count) throws IOException {
if (count < 0 || count > MAXIMUM_CACHE_SIZE) {
throw new IllegalArgumentException("count must not be negative or greater than " + MAXIMUM_CACHE_SIZE);
}
while (bitsCachedSize < count) {
final long nextByte = in.read();
if (nextByte < 0) {
return nextByte;
}
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
bitsCached |= (nextByte << bitsCachedSize);
} else {
bitsCached <<= 8;
bitsCached |= nextByte;
}
bitsCachedSize += 8;
}
final long bitsOut;
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
bitsOut = (bitsCached & MASKS[count]);
bitsCached >>>= count;
} else {
bitsOut = (bitsCached >> (bitsCachedSize - count)) & MASKS[count];
}
bitsCachedSize -= count;
return bitsOut;
}
I think here "bitsCached |= (nextByte << bitsCachedSize);" will overflow in some cases. for example, below is a test case:
public static void test() {
ByteArrayInputStream in = new ByteArrayInputStream(new byte[]{87, 45, 66, 15,
90, 29, 88, 61, 33, 74});
BitInputStream bin = new BitInputStream(in, ByteOrder.LITTLE_ENDIAN);
try {
long ret = bin.readBits(5);
ret = bin.readBits(63);
ret = bin.readBits(12);
} catch (Exception e) {
e.printStackTrace();
}
}
overflow occur in "bin.readBits(63);" , so ,result in wrong result from "bin.readBits(12);"
| public long readBits(final int count) throws IOException {
if (count < 0 || count > MAXIMUM_CACHE_SIZE) {
throw new IllegalArgumentException("count must not be negative or greater than " + MAXIMUM_CACHE_SIZE);
}
while (bitsCachedSize < count) {
final long nextByte = in.read();
if (nextByte < 0) {
return nextByte;
}
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
bitsCached |= (nextByte << bitsCachedSize);
} else {
bitsCached <<= 8;
bitsCached |= nextByte;
}
bitsCachedSize += 8;
}
// bitsCachedSize >= 57 and left-shifting it 8 bits would cause an overflow
final long bitsOut;
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
bitsOut = (bitsCached & MASKS[count]);
bitsCached >>>= count;
} else {
bitsOut = (bitsCached >> (bitsCachedSize - count)) & MASKS[count];
}
bitsCachedSize -= count;
return bitsOut;
} | public long readBits ( final int count ) throws IOException { if ( count < 0 || count > MAXIMUM_CACHE_SIZE ) { throw new IllegalArgumentException ( "count must not be negative or greater than " + MAXIMUM_CACHE_SIZE ) ; } while ( bitsCachedSize < count ) { final long nextByte = in . read ( ) ; if ( nextByte < 0 ) { return nextByte ; } if ( byteOrder == ByteOrder . LITTLE_ENDIAN ) { bitsCached |= ( nextByte << bitsCachedSize ) ; } else { bitsCached <<= 8 ; bitsCached |= nextByte ; } bitsCachedSize += 8 ; } final long bitsOut ; if ( byteOrder == ByteOrder . LITTLE_ENDIAN ) { bitsOut = ( bitsCached & MASKS [ count ] ) ; bitsCached >>>= count ; } else { bitsOut = ( bitsCached >> ( bitsCachedSize - count ) ) & MASKS [ count ] ; } bitsCachedSize -= count ; return bitsOut ; } | public long readBits(final int count) throws IOException {
if (count < 0 || count > MAXIMUM_CACHE_SIZE) {
throw new IllegalArgumentException("count must not be negative or greater than " + MAXIMUM_CACHE_SIZE);
}
while (bitsCachedSize < count && bitsCachedSize < 57) {
final long nextByte = in.read();
if (nextByte < 0) {
return nextByte;
}
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
bitsCached |= (nextByte << bitsCachedSize);
} else {
bitsCached <<= 8;
bitsCached |= nextByte;
}
bitsCachedSize += 8;
}
int overflowBits = 0;
long overflow = 0l;
if (bitsCachedSize < count) {
// bitsCachedSize >= 57 and left-shifting it 8 bits would cause an overflow
int bitsToAddCount = count - bitsCachedSize;
overflowBits = 8 - bitsToAddCount;
final long nextByte = in.read();
if (nextByte < 0) {
return nextByte;
}
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
long bitsToAdd = nextByte & MASKS[bitsToAddCount];
bitsCached |= (bitsToAdd << bitsCachedSize);
overflow = (nextByte >>> bitsToAddCount) & MASKS[overflowBits];
} else {
bitsCached <<= bitsToAddCount;
long bitsToAdd = (nextByte >>> (overflowBits)) & MASKS[bitsToAddCount];
bitsCached |= bitsToAdd;
overflow = nextByte & MASKS[overflowBits];
}
bitsCachedSize = count;
}
final long bitsOut;
if (overflowBits == 0) {
if (byteOrder == ByteOrder.LITTLE_ENDIAN) {
bitsOut = (bitsCached & MASKS[count]);
bitsCached >>>= count;
} else {
bitsOut = (bitsCached >> (bitsCachedSize - count)) & MASKS[count];
}
bitsCachedSize -= count;
} else {
bitsOut = bitsCached & MASKS[count];
bitsCached = overflow;
bitsCachedSize = overflowBits;
}
return bitsOut;
} | public long readBits ( final int count ) throws IOException { if ( count < 0 || count > MAXIMUM_CACHE_SIZE ) { throw new IllegalArgumentException ( "count must not be negative or greater than " + MAXIMUM_CACHE_SIZE ) ; } while ( bitsCachedSize < count && bitsCachedSize < 57 ) { final long nextByte = in . read ( ) ; if ( nextByte < 0 ) { return nextByte ; } if ( byteOrder == ByteOrder . LITTLE_ENDIAN ) { bitsCached |= ( nextByte << bitsCachedSize ) ; } else { bitsCached <<= 8 ; bitsCached |= nextByte ; } bitsCachedSize += 8 ; } int overflowBits = 0 ; long overflow = 0l ; if ( bitsCachedSize < count ) { int bitsToAddCount = count - bitsCachedSize ; overflowBits = 8 - bitsToAddCount ; final long nextByte = in . read ( ) ; if ( nextByte < 0 ) { return nextByte ; } if ( byteOrder == ByteOrder . LITTLE_ENDIAN ) { long bitsToAdd = nextByte & MASKS [ bitsToAddCount ] ; bitsCached |= ( bitsToAdd << bitsCachedSize ) ; overflow = ( nextByte >>> bitsToAddCount ) & MASKS [ overflowBits ] ; } else { bitsCached <<= bitsToAddCount ; long bitsToAdd = ( nextByte >>> ( overflowBits ) ) & MASKS [ bitsToAddCount ] ; bitsCached |= bitsToAdd ; overflow = nextByte & MASKS [ overflowBits ] ; } bitsCachedSize = count ; } final long bitsOut ; if ( overflowBits == 0 ) { if ( byteOrder == ByteOrder . LITTLE_ENDIAN ) { bitsOut = ( bitsCached & MASKS [ count ] ) ; bitsCached >>>= count ; } else { bitsOut = ( bitsCached >> ( bitsCachedSize - count ) ) & MASKS [ count ] ; } bitsCachedSize -= count ; } else { bitsOut = bitsCached & MASKS [ count ] ; bitsCached = overflow ; bitsCachedSize = overflowBits ; } return bitsOut ; } |
Compress | 17 | src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java | 102 | 151 | Tar file for Android backup cannot be read | Attached tar file was generated by some kind of backup tool on Android. Normal tar utilities seem to handle it fine, but Commons Compress doesn't.
{noformat}
java.lang.IllegalArgumentException: Invalid byte 0 at offset 5 in '01750{NUL}{NUL}{NUL}' len=8
at org.apache.commons.compress.archivers.tar.TarUtils.parseOctal(TarUtils.java:99)
at org.apache.commons.compress.archivers.tar.TarArchiveEntry.parseTarHeader(TarArchiveEntry.java:788)
at org.apache.commons.compress.archivers.tar.TarArchiveEntry.<init>(TarArchiveEntry.java:308)
{noformat}
| public static long parseOctal(final byte[] buffer, final int offset, final int length) {
long result = 0;
int end = offset + length;
int start = offset;
if (length < 2){
throw new IllegalArgumentException("Length "+length+" must be at least 2");
}
if (buffer[start] == 0) {
return 0L;
}
// Skip leading spaces
while (start < end){
if (buffer[start] == ' '){
start++;
} else {
break;
}
}
// Must have trailing NUL or space
byte trailer;
trailer = buffer[end-1];
if (trailer == 0 || trailer == ' '){
end--;
} else {
throw new IllegalArgumentException(
exceptionMessage(buffer, offset, length, end-1, trailer));
}
// May have additional NULs or spaces
trailer = buffer[end - 1];
if (trailer == 0 || trailer == ' '){
end--;
}
for ( ;start < end; start++) {
final byte currentByte = buffer[start];
// CheckStyle:MagicNumber OFF
if (currentByte < '0' || currentByte > '7'){
throw new IllegalArgumentException(
exceptionMessage(buffer, offset, length, start, currentByte));
}
result = (result << 3) + (currentByte - '0'); // convert from ASCII
// CheckStyle:MagicNumber ON
}
return result;
} | public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } if ( buffer [ start ] == 0 ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer ; trailer = buffer [ end - 1 ] ; if ( trailer == 0 || trailer == ' ' ) { end -- ; } else { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , end - 1 , trailer ) ) ; } trailer = buffer [ end - 1 ] ; if ( trailer == 0 || trailer == ' ' ) { end -- ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; } | public static long parseOctal(final byte[] buffer, final int offset, final int length) {
long result = 0;
int end = offset + length;
int start = offset;
if (length < 2){
throw new IllegalArgumentException("Length "+length+" must be at least 2");
}
if (buffer[start] == 0) {
return 0L;
}
// Skip leading spaces
while (start < end){
if (buffer[start] == ' '){
start++;
} else {
break;
}
}
// Must have trailing NUL or space
byte trailer;
trailer = buffer[end-1];
if (trailer == 0 || trailer == ' '){
end--;
} else {
throw new IllegalArgumentException(
exceptionMessage(buffer, offset, length, end-1, trailer));
}
// May have additional NULs or spaces
trailer = buffer[end - 1];
while (start < end - 1 && (trailer == 0 || trailer == ' ')) {
end--;
trailer = buffer[end - 1];
}
for ( ;start < end; start++) {
final byte currentByte = buffer[start];
// CheckStyle:MagicNumber OFF
if (currentByte < '0' || currentByte > '7'){
throw new IllegalArgumentException(
exceptionMessage(buffer, offset, length, start, currentByte));
}
result = (result << 3) + (currentByte - '0'); // convert from ASCII
// CheckStyle:MagicNumber ON
}
return result;
} | public static long parseOctal ( final byte [ ] buffer , final int offset , final int length ) { long result = 0 ; int end = offset + length ; int start = offset ; if ( length < 2 ) { throw new IllegalArgumentException ( "Length " + length + " must be at least 2" ) ; } if ( buffer [ start ] == 0 ) { return 0L ; } while ( start < end ) { if ( buffer [ start ] == ' ' ) { start ++ ; } else { break ; } } byte trailer ; trailer = buffer [ end - 1 ] ; if ( trailer == 0 || trailer == ' ' ) { end -- ; } else { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , end - 1 , trailer ) ) ; } trailer = buffer [ end - 1 ] ; while ( start < end - 1 && ( trailer == 0 || trailer == ' ' ) ) { end -- ; trailer = buffer [ end - 1 ] ; } for ( ; start < end ; start ++ ) { final byte currentByte = buffer [ start ] ; if ( currentByte < '0' || currentByte > '7' ) { throw new IllegalArgumentException ( exceptionMessage ( buffer , offset , length , start , currentByte ) ) ; } result = ( result << 3 ) + ( currentByte - '0' ) ; } return result ; } |
Cli | 19 | src/java/org/apache/commons/cli/PosixParser.java | 227 | 239 | PosixParser ignores unrecognized tokens starting with '-' | PosixParser doesn't handle properly unrecognized tokens starting with '-' when stopAtNonOption is enabled, the token is simply ignored.
For example, if the option 'a' is defined, the following command line:
{code}-z -a foo{code}
is interpreted as:
{code}-a foo{code} | private void processOptionToken(String token, boolean stopAtNonOption)
{
if (options.hasOption(token))
{
currentOption = options.getOption(token);
tokens.add(token);
}
else if (stopAtNonOption)
{
eatTheRest = true;
tokens.add(token);
}
} | private void processOptionToken ( String token , boolean stopAtNonOption ) { if ( options . hasOption ( token ) ) { currentOption = options . getOption ( token ) ; tokens . add ( token ) ; } else if ( stopAtNonOption ) { eatTheRest = true ; tokens . add ( token ) ; } } | private void processOptionToken(String token, boolean stopAtNonOption)
{
if (options.hasOption(token))
{
currentOption = options.getOption(token);
}
else if (stopAtNonOption)
{
eatTheRest = true;
}
tokens.add(token);
} | private void processOptionToken ( String token , boolean stopAtNonOption ) { if ( options . hasOption ( token ) ) { currentOption = options . getOption ( token ) ; } else if ( stopAtNonOption ) { eatTheRest = true ; } tokens . add ( token ) ; } |
Cli | 12 | src/java/org/apache/commons/cli/GnuParser.java | 52 | 110 | PosixParser interupts "-target opt" as "-t arget opt" | This was posted on the Commons-Developer list and confirmed as a bug.
> Is this a bug? Or am I using this incorrectly?
> I have an option with short and long values. Given code that is
> essentially what is below, with a PosixParser I see results as
> follows:
>
> A command line with just "-t" prints out the results of the catch
> block
> (OK)
> A command line with just "-target" prints out the results of the catch
> block (OK)
> A command line with just "-t foobar.com" prints out "processing selected
> target: foobar.com" (OK)
> A command line with just "-target foobar.com" prints out "processing
> selected target: arget" (ERROR?)
>
> ======================================================================
> ==
> =======================
> private static final String OPTION_TARGET = "t";
> private static final String OPTION_TARGET_LONG = "target";
> // ...
> Option generateTarget = new Option(OPTION_TARGET,
> OPTION_TARGET_LONG,
> true,
> "Generate files for the specified
> target machine");
> // ...
> try {
> parsedLine = parser.parse(cmdLineOpts, args);
> } catch (ParseException pe) {
> System.out.println("Invalid command: " + pe.getMessage() +
> "\n");
> HelpFormatter hf = new HelpFormatter();
> hf.printHelp(USAGE, cmdLineOpts);
> System.exit(-1);
> }
>
> if (parsedLine.hasOption(OPTION_TARGET)) {
> System.out.println("processing selected target: " +
> parsedLine.getOptionValue(OPTION_TARGET));
> }
It is a bug but it is due to well defined behaviour (so that makes me feel a
little better about myself ;). To support *special*
(well I call them special anyway) like -Dsystem.property=value we need to be
able to examine the first character of an option. If the first character is
itself defined as an Option then the remainder of the token is used as the
value, e.g. 'D' is the token, it is an option so 'system.property=value' is the
argument value for that option. This is the behaviour that we are seeing for
your example.
't' is the token, it is an options so 'arget' is the argument value.
I suppose a solution to this could be to have a way to specify properties for
parsers. In this case 'posix.special.option == true' for turning
on *special* options. I'll have a look into this and let you know.
Just to keep track of this and to get you used to how we operate, can you log a
bug in bugzilla for this.
Thanks,
-John K | protected String[] flatten(Options options, String[] arguments, boolean stopAtNonOption)
{
List tokens = new ArrayList();
boolean eatTheRest = false;
for (int i = 0; i < arguments.length; i++)
{
String arg = arguments[i];
if ("--".equals(arg))
{
eatTheRest = true;
tokens.add("--");
}
else if ("-".equals(arg))
{
tokens.add("-");
}
else if (arg.startsWith("-"))
{
String opt = Util.stripLeadingHyphens(arg);
if (options.hasOption(opt))
{
tokens.add(arg);
}
else
{
if (options.hasOption(arg.substring(0, 2)))
{
// the format is --foo=value or -foo=value
// the format is a special properties option (-Dproperty=value)
tokens.add(arg.substring(0, 2)); // -D
tokens.add(arg.substring(2)); // property=value
}
else
{
eatTheRest = stopAtNonOption;
tokens.add(arg);
}
}
}
else
{
tokens.add(arg);
}
if (eatTheRest)
{
for (i++; i < arguments.length; i++)
{
tokens.add(arguments[i]);
}
}
}
return (String[]) tokens.toArray(new String[tokens.size()]);
} | protected String [ ] flatten ( Options options , String [ ] arguments , boolean stopAtNonOption ) { List tokens = new ArrayList ( ) ; boolean eatTheRest = false ; for ( int i = 0 ; i < arguments . length ; i ++ ) { String arg = arguments [ i ] ; if ( "--" . equals ( arg ) ) { eatTheRest = true ; tokens . add ( "--" ) ; } else if ( "-" . equals ( arg ) ) { tokens . add ( "-" ) ; } else if ( arg . startsWith ( "-" ) ) { String opt = Util . stripLeadingHyphens ( arg ) ; if ( options . hasOption ( opt ) ) { tokens . add ( arg ) ; } else { if ( options . hasOption ( arg . substring ( 0 , 2 ) ) ) { tokens . add ( arg . substring ( 0 , 2 ) ) ; tokens . add ( arg . substring ( 2 ) ) ; } else { eatTheRest = stopAtNonOption ; tokens . add ( arg ) ; } } } else { tokens . add ( arg ) ; } if ( eatTheRest ) { for ( i ++ ; i < arguments . length ; i ++ ) { tokens . add ( arguments [ i ] ) ; } } } return ( String [ ] ) tokens . toArray ( new String [ tokens . size ( ) ] ) ; } | protected String[] flatten(Options options, String[] arguments, boolean stopAtNonOption)
{
List tokens = new ArrayList();
boolean eatTheRest = false;
for (int i = 0; i < arguments.length; i++)
{
String arg = arguments[i];
if ("--".equals(arg))
{
eatTheRest = true;
tokens.add("--");
}
else if ("-".equals(arg))
{
tokens.add("-");
}
else if (arg.startsWith("-"))
{
String opt = Util.stripLeadingHyphens(arg);
if (options.hasOption(opt))
{
tokens.add(arg);
}
else
{
if (opt.indexOf('=') != -1 && options.hasOption(opt.substring(0, opt.indexOf('='))))
{
// the format is --foo=value or -foo=value
tokens.add(arg.substring(0, arg.indexOf('='))); // --foo
tokens.add(arg.substring(arg.indexOf('=') + 1)); // value
}
else if (options.hasOption(arg.substring(0, 2)))
{
// the format is a special properties option (-Dproperty=value)
tokens.add(arg.substring(0, 2)); // -D
tokens.add(arg.substring(2)); // property=value
}
else
{
eatTheRest = stopAtNonOption;
tokens.add(arg);
}
}
}
else
{
tokens.add(arg);
}
if (eatTheRest)
{
for (i++; i < arguments.length; i++)
{
tokens.add(arguments[i]);
}
}
}
return (String[]) tokens.toArray(new String[tokens.size()]);
} | protected String [ ] flatten ( Options options , String [ ] arguments , boolean stopAtNonOption ) { List tokens = new ArrayList ( ) ; boolean eatTheRest = false ; for ( int i = 0 ; i < arguments . length ; i ++ ) { String arg = arguments [ i ] ; if ( "--" . equals ( arg ) ) { eatTheRest = true ; tokens . add ( "--" ) ; } else if ( "-" . equals ( arg ) ) { tokens . add ( "-" ) ; } else if ( arg . startsWith ( "-" ) ) { String opt = Util . stripLeadingHyphens ( arg ) ; if ( options . hasOption ( opt ) ) { tokens . add ( arg ) ; } else { if ( opt . indexOf ( '=' ) != - 1 && options . hasOption ( opt . substring ( 0 , opt . indexOf ( '=' ) ) ) ) { tokens . add ( arg . substring ( 0 , arg . indexOf ( '=' ) ) ) ; tokens . add ( arg . substring ( arg . indexOf ( '=' ) + 1 ) ) ; } else if ( options . hasOption ( arg . substring ( 0 , 2 ) ) ) { tokens . add ( arg . substring ( 0 , 2 ) ) ; tokens . add ( arg . substring ( 2 ) ) ; } else { eatTheRest = stopAtNonOption ; tokens . add ( arg ) ; } } } else { tokens . add ( arg ) ; } if ( eatTheRest ) { for ( i ++ ; i < arguments . length ; i ++ ) { tokens . add ( arguments [ i ] ) ; } } } return ( String [ ] ) tokens . toArray ( new String [ tokens . size ( ) ] ) ; } |
Math | 13 | src/main/java/org/apache/commons/math3/optimization/general/AbstractLeastSquaresOptimizer.java | 561 | 564 | new multivariate vector optimizers cannot be used with large number of weights | When using the Weigth class to pass a large number of weights to multivariate vector optimizers, an nxn full matrix is created (and copied) when a n elements vector is used. This exhausts memory when n is large.
This happens for example when using curve fitters (even simple curve fitters like polynomial ones for low degree) with large number of points. I encountered this with curve fitting on 41200 points, which created a matrix with 1.7 billion elements. | private RealMatrix squareRoot(RealMatrix m) {
final EigenDecomposition dec = new EigenDecomposition(m);
return dec.getSquareRoot();
} | private RealMatrix squareRoot ( RealMatrix m ) { final EigenDecomposition dec = new EigenDecomposition ( m ) ; return dec . getSquareRoot ( ) ; } | private RealMatrix squareRoot(RealMatrix m) {
if (m instanceof DiagonalMatrix) {
final int dim = m.getRowDimension();
final RealMatrix sqrtM = new DiagonalMatrix(dim);
for (int i = 0; i < dim; i++) {
sqrtM.setEntry(i, i, FastMath.sqrt(m.getEntry(i, i)));
}
return sqrtM;
} else {
final EigenDecomposition dec = new EigenDecomposition(m);
return dec.getSquareRoot();
}
} | private RealMatrix squareRoot ( RealMatrix m ) { if ( m instanceof DiagonalMatrix ) { final int dim = m . getRowDimension ( ) ; final RealMatrix sqrtM = new DiagonalMatrix ( dim ) ; for ( int i = 0 ; i < dim ; i ++ ) { sqrtM . setEntry ( i , i , FastMath . sqrt ( m . getEntry ( i , i ) ) ) ; } return sqrtM ; } else { final EigenDecomposition dec = new EigenDecomposition ( m ) ; return dec . getSquareRoot ( ) ; } } |
JacksonDatabind | 76 | src/main/java/com/fasterxml/jackson/databind/deser/BuilderBasedDeserializer.java | 565 | 637 | Missing properties when deserializing using a builder class with a non-default constructor and a mutator annotated with `@JsonUnwrapped` | When deserializing using a builder class with a non-default constructor and any number of mutator methods annotated with @JsonUnwrapped, the `BuilderBasedDeserializer::deserializeUsingPropertyBasedWithUnwrapped` method cuts short the process of adding SettableBeanProperties.
The logic dictates that once all properties necessary to construct the builder have been found, the builder is constructed using all known SettableBeanProperties that have been found up to that point in the tokenizing process.
Therefore, in the case that the builder has a single property required for construction, and that property is found anywhere other than at the end of the JSON content, any properties subsequent to the constructor property are not evaluated and are left with their default values.
Given the following classes:
```java
@JsonDeserialize(builder = Employee.Builder.class)
public class Employee {
private final long id;
private final Name name;
private final int age;
private Employee(Builder builder) {
id = builder.id;
name = builder.name;
age = builder.age;
}
public long getId() {
return id;
}
public Name getName() {
return name;
}
public int getAge() {
return age;
}
@JsonPOJOBuilder(withPrefix = "set")
public static class Builder {
private final long id;
private Name name;
private int age;
@JsonCreator
public Builder(@JsonProperty("emp_id") long id) {
this.id = id;
}
@JsonUnwrapped
public void setName(Name name) {
this.name = name;
}
@JsonProperty("emp_age")
public void setAge(int age) {
this.age = age;
}
public Employee build() {
return new Employee(this);
}
}
}
public class Name {
private final String first;
private final String last;
@JsonCreator
public Name(
@JsonProperty("emp_first_name") String first,
@JsonProperty("emp_last_name") String last
) {
this.first = first;
this.last = last;
}
public String getFirst() {
return first;
}
public String getLast() {
return last;
}
}
```
And given the following JSON string:
```json
{
"emp_age": 30,
"emp_id": 1234,
"emp_first_name": "John",
"emp_last_name": "Doe"
}
```
We will see the following output:
```java
Employee emp = new ObjectMapper().readValue(json, Employee.class);
System.out.println(emp.getAge()); // 30
System.out.println(emp.getId()); // 1234
System.out.println(emp.getName()); // null
```
However, if we place the `emp_id` property at the end of the JSON string, we would get the following output:
```java
Employee emp = new ObjectMapper().readValue(json, Employee.class);
System.out.println(emp.getAge()); // 30
System.out.println(emp.getId()); // 1234
System.out.println(emp.getName()); // Name Object
```
If we were to place `emp_age` and `emp_first_name` and `emp_last_name` all after the `emp_id` property in the JSON string, we would get the following output:
```java
Employee emp = new ObjectMapper().readValue(json, Employee.class);
System.out.println(emp.getAge()); // 0
System.out.println(emp.getId()); // 1234
System.out.println(emp.getName()); // null
``` | @SuppressWarnings("resource")
protected Object deserializeUsingPropertyBasedWithUnwrapped(JsonParser p,
DeserializationContext ctxt)
throws IOException, JsonProcessingException
{
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
TokenBuffer tokens = new TokenBuffer(p, ctxt);
tokens.writeStartObject();
JsonToken t = p.getCurrentToken();
for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) {
String propName = p.getCurrentName();
p.nextToken(); // to point to value
// creator property?
SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
if (creatorProp != null) {
if (buffer.assignParameter(creatorProp, creatorProp.deserialize(p, ctxt))) {
t = p.nextToken();
Object bean;
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
wrapAndThrow(e, _beanType.getRawClass(), propName, ctxt);
continue;
}
while (t == JsonToken.FIELD_NAME) {
p.nextToken();
tokens.copyCurrentStructure(p);
t = p.nextToken();
}
tokens.writeEndObject();
if (bean.getClass() != _beanType.getRawClass()) {
ctxt.reportMappingException("Can not create polymorphic instances with unwrapped values");
return null;
}
return _unwrappedPropertyHandler.processUnwrapped(p, ctxt, bean, tokens);
}
continue;
}
// Object Id property?
if (buffer.readIdProperty(propName)) {
continue;
}
// regular property? needs buffering
SettableBeanProperty prop = _beanProperties.find(propName);
if (prop != null) {
buffer.bufferProperty(prop, prop.deserialize(p, ctxt));
continue;
}
if (_ignorableProps != null && _ignorableProps.contains(propName)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
tokens.writeFieldName(propName);
tokens.copyCurrentStructure(p);
// "any property"?
if (_anySetter != null) {
buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt));
}
}
// We hit END_OBJECT, so:
Object bean;
// !!! 15-Feb-2012, tatu: Need to modify creator to use Builder!
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
return wrapInstantiationProblem(e, ctxt);
}
return _unwrappedPropertyHandler.processUnwrapped(p, ctxt, bean, tokens);
} | @ SuppressWarnings ( "resource" ) protected Object deserializeUsingPropertyBasedWithUnwrapped ( JsonParser p , DeserializationContext ctxt ) throws IOException , JsonProcessingException { final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer tokens = new TokenBuffer ( p , ctxt ) ; tokens . writeStartObject ( ) ; JsonToken t = p . getCurrentToken ( ) ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { if ( buffer . assignParameter ( creatorProp , creatorProp . deserialize ( p , ctxt ) ) ) { t = p . nextToken ( ) ; Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { wrapAndThrow ( e , _beanType . getRawClass ( ) , propName , ctxt ) ; continue ; } while ( t == JsonToken . FIELD_NAME ) { p . nextToken ( ) ; tokens . copyCurrentStructure ( p ) ; t = p . nextToken ( ) ; } tokens . writeEndObject ( ) ; if ( bean . getClass ( ) != _beanType . getRawClass ( ) ) { ctxt . reportMappingException ( "Can not create polymorphic instances with unwrapped values" ) ; return null ; } return _unwrappedPropertyHandler . processUnwrapped ( p , ctxt , bean , tokens ) ; } continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { buffer . bufferProperty ( prop , prop . deserialize ( p , ctxt ) ) ; continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } tokens . writeFieldName ( propName ) ; tokens . copyCurrentStructure ( p ) ; if ( _anySetter != null ) { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } } Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { return wrapInstantiationProblem ( e , ctxt ) ; } return _unwrappedPropertyHandler . processUnwrapped ( p , ctxt , bean , tokens ) ; } | @SuppressWarnings("resource")
protected Object deserializeUsingPropertyBasedWithUnwrapped(JsonParser p,
DeserializationContext ctxt)
throws IOException, JsonProcessingException
{
final PropertyBasedCreator creator = _propertyBasedCreator;
PropertyValueBuffer buffer = creator.startBuilding(p, ctxt, _objectIdReader);
TokenBuffer tokens = new TokenBuffer(p, ctxt);
tokens.writeStartObject();
JsonToken t = p.getCurrentToken();
for (; t == JsonToken.FIELD_NAME; t = p.nextToken()) {
String propName = p.getCurrentName();
p.nextToken(); // to point to value
// creator property?
SettableBeanProperty creatorProp = creator.findCreatorProperty(propName);
if (creatorProp != null) {
buffer.assignParameter(creatorProp, creatorProp.deserialize(p, ctxt));
continue;
}
// Object Id property?
if (buffer.readIdProperty(propName)) {
continue;
}
// regular property? needs buffering
SettableBeanProperty prop = _beanProperties.find(propName);
if (prop != null) {
buffer.bufferProperty(prop, prop.deserialize(p, ctxt));
continue;
}
if (_ignorableProps != null && _ignorableProps.contains(propName)) {
handleIgnoredProperty(p, ctxt, handledType(), propName);
continue;
}
tokens.writeFieldName(propName);
tokens.copyCurrentStructure(p);
// "any property"?
if (_anySetter != null) {
buffer.bufferAnyProperty(_anySetter, propName, _anySetter.deserialize(p, ctxt));
}
}
// We hit END_OBJECT, so:
Object bean;
// !!! 15-Feb-2012, tatu: Need to modify creator to use Builder!
try {
bean = creator.build(ctxt, buffer);
} catch (Exception e) {
return wrapInstantiationProblem(e, ctxt);
}
return _unwrappedPropertyHandler.processUnwrapped(p, ctxt, bean, tokens);
} | @ SuppressWarnings ( "resource" ) protected Object deserializeUsingPropertyBasedWithUnwrapped ( JsonParser p , DeserializationContext ctxt ) throws IOException , JsonProcessingException { final PropertyBasedCreator creator = _propertyBasedCreator ; PropertyValueBuffer buffer = creator . startBuilding ( p , ctxt , _objectIdReader ) ; TokenBuffer tokens = new TokenBuffer ( p , ctxt ) ; tokens . writeStartObject ( ) ; JsonToken t = p . getCurrentToken ( ) ; for ( ; t == JsonToken . FIELD_NAME ; t = p . nextToken ( ) ) { String propName = p . getCurrentName ( ) ; p . nextToken ( ) ; SettableBeanProperty creatorProp = creator . findCreatorProperty ( propName ) ; if ( creatorProp != null ) { buffer . assignParameter ( creatorProp , creatorProp . deserialize ( p , ctxt ) ) ; continue ; } if ( buffer . readIdProperty ( propName ) ) { continue ; } SettableBeanProperty prop = _beanProperties . find ( propName ) ; if ( prop != null ) { buffer . bufferProperty ( prop , prop . deserialize ( p , ctxt ) ) ; continue ; } if ( _ignorableProps != null && _ignorableProps . contains ( propName ) ) { handleIgnoredProperty ( p , ctxt , handledType ( ) , propName ) ; continue ; } tokens . writeFieldName ( propName ) ; tokens . copyCurrentStructure ( p ) ; if ( _anySetter != null ) { buffer . bufferAnyProperty ( _anySetter , propName , _anySetter . deserialize ( p , ctxt ) ) ; } } Object bean ; try { bean = creator . build ( ctxt , buffer ) ; } catch ( Exception e ) { return wrapInstantiationProblem ( e , ctxt ) ; } return _unwrappedPropertyHandler . processUnwrapped ( p , ctxt , bean , tokens ) ; } |
JacksonDatabind | 99 | src/main/java/com/fasterxml/jackson/databind/type/ReferenceType.java | 162 | 170 | Canonical string for reference type is built incorrectly | Canonical string for reference type is built incorrectly.
E.g.:
`new ReferenceType(new TypeFactory(new LRUMap<Object, JavaType>(0, 10000)).constructType(Object.class), new PlaceholderForType(0)).toCanonical()`
yields:
`java.lang.Object<$1`
while the expected value is:
`java.lang.Object<$1>` | @Override
protected String buildCanonicalName()
{
StringBuilder sb = new StringBuilder();
sb.append(_class.getName());
sb.append('<');
sb.append(_referencedType.toCanonical());
return sb.toString();
} | @ Override protected String buildCanonicalName ( ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( _class . getName ( ) ) ; sb . append ( '<' ) ; sb . append ( _referencedType . toCanonical ( ) ) ; return sb . toString ( ) ; } | @Override
protected String buildCanonicalName()
{
StringBuilder sb = new StringBuilder();
sb.append(_class.getName());
sb.append('<');
sb.append(_referencedType.toCanonical());
sb.append('>');
return sb.toString();
} | @ Override protected String buildCanonicalName ( ) { StringBuilder sb = new StringBuilder ( ) ; sb . append ( _class . getName ( ) ) ; sb . append ( '<' ) ; sb . append ( _referencedType . toCanonical ( ) ) ; sb . append ( '>' ) ; return sb . toString ( ) ; } |
Math | 44 | src/main/java/org/apache/commons/math/ode/AbstractIntegrator.java | 274 | 374 | Incomplete reinitialization with some events handling | I get a bug with event handling: I track 2 events that occur in the same step, when the first one is accepted, it resets the state but the reinitialization is not complete and the second one becomes unable to find its way.
I can't give my context, which is rather large, but I tried a patch that works for me, unfortunately it breaks the unit tests. | protected double acceptStep(final AbstractStepInterpolator interpolator,
final double[] y, final double[] yDot, final double tEnd)
throws MathIllegalStateException {
double previousT = interpolator.getGlobalPreviousTime();
final double currentT = interpolator.getGlobalCurrentTime();
resetOccurred = false;
// initialize the events states if needed
if (! statesInitialized) {
for (EventState state : eventsStates) {
state.reinitializeBegin(interpolator);
}
statesInitialized = true;
}
// search for next events that may occur during the step
final int orderingSign = interpolator.isForward() ? +1 : -1;
SortedSet<EventState> occuringEvents = new TreeSet<EventState>(new Comparator<EventState>() {
/** {@inheritDoc} */
public int compare(EventState es0, EventState es1) {
return orderingSign * Double.compare(es0.getEventTime(), es1.getEventTime());
}
});
for (final EventState state : eventsStates) {
if (state.evaluateStep(interpolator)) {
// the event occurs during the current step
occuringEvents.add(state);
}
}
while (!occuringEvents.isEmpty()) {
// handle the chronologically first event
final Iterator<EventState> iterator = occuringEvents.iterator();
final EventState currentEvent = iterator.next();
iterator.remove();
// restrict the interpolator to the first part of the step, up to the event
final double eventT = currentEvent.getEventTime();
interpolator.setSoftPreviousTime(previousT);
interpolator.setSoftCurrentTime(eventT);
// trigger the event
interpolator.setInterpolatedTime(eventT);
final double[] eventY = interpolator.getInterpolatedState();
currentEvent.stepAccepted(eventT, eventY);
isLastStep = currentEvent.stop();
// handle the first part of the step, up to the event
for (final StepHandler handler : stepHandlers) {
handler.handleStep(interpolator, isLastStep);
}
if (isLastStep) {
// the event asked to stop integration
System.arraycopy(eventY, 0, y, 0, y.length);
return eventT;
}
if (currentEvent.reset(eventT, eventY)) {
// some event handler has triggered changes that
// invalidate the derivatives, we need to recompute them
System.arraycopy(eventY, 0, y, 0, y.length);
computeDerivatives(eventT, y, yDot);
resetOccurred = true;
return eventT;
}
// prepare handling of the remaining part of the step
previousT = eventT;
interpolator.setSoftPreviousTime(eventT);
interpolator.setSoftCurrentTime(currentT);
// check if the same event occurs again in the remaining part of the step
if (currentEvent.evaluateStep(interpolator)) {
// the event occurs during the current step
occuringEvents.add(currentEvent);
}
}
interpolator.setInterpolatedTime(currentT);
final double[] currentY = interpolator.getInterpolatedState();
for (final EventState state : eventsStates) {
state.stepAccepted(currentT, currentY);
isLastStep = isLastStep || state.stop();
}
isLastStep = isLastStep || Precision.equals(currentT, tEnd, 1);
// handle the remaining part of the step, after all events if any
for (StepHandler handler : stepHandlers) {
handler.handleStep(interpolator, isLastStep);
}
return currentT;
} | protected double acceptStep ( final AbstractStepInterpolator interpolator , final double [ ] y , final double [ ] yDot , final double tEnd ) throws MathIllegalStateException { double previousT = interpolator . getGlobalPreviousTime ( ) ; final double currentT = interpolator . getGlobalCurrentTime ( ) ; resetOccurred = false ; if ( ! statesInitialized ) { for ( EventState state : eventsStates ) { state . reinitializeBegin ( interpolator ) ; } statesInitialized = true ; } final int orderingSign = interpolator . isForward ( ) ? + 1 : - 1 ; SortedSet < EventState > occuringEvents = new TreeSet < EventState > ( new Comparator < EventState > ( ) { public int compare ( EventState es0 , EventState es1 ) { return orderingSign * Double . compare ( es0 . getEventTime ( ) , es1 . getEventTime ( ) ) ; } } ) ; for ( final EventState state : eventsStates ) { if ( state . evaluateStep ( interpolator ) ) { occuringEvents . add ( state ) ; } } while ( ! occuringEvents . isEmpty ( ) ) { final Iterator < EventState > iterator = occuringEvents . iterator ( ) ; final EventState currentEvent = iterator . next ( ) ; iterator . remove ( ) ; final double eventT = currentEvent . getEventTime ( ) ; interpolator . setSoftPreviousTime ( previousT ) ; interpolator . setSoftCurrentTime ( eventT ) ; interpolator . setInterpolatedTime ( eventT ) ; final double [ ] eventY = interpolator . getInterpolatedState ( ) ; currentEvent . stepAccepted ( eventT , eventY ) ; isLastStep = currentEvent . stop ( ) ; for ( final StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , isLastStep ) ; } if ( isLastStep ) { System . arraycopy ( eventY , 0 , y , 0 , y . length ) ; return eventT ; } if ( currentEvent . reset ( eventT , eventY ) ) { System . arraycopy ( eventY , 0 , y , 0 , y . length ) ; computeDerivatives ( eventT , y , yDot ) ; resetOccurred = true ; return eventT ; } previousT = eventT ; interpolator . setSoftPreviousTime ( eventT ) ; interpolator . setSoftCurrentTime ( currentT ) ; if ( currentEvent . evaluateStep ( interpolator ) ) { occuringEvents . add ( currentEvent ) ; } } interpolator . setInterpolatedTime ( currentT ) ; final double [ ] currentY = interpolator . getInterpolatedState ( ) ; for ( final EventState state : eventsStates ) { state . stepAccepted ( currentT , currentY ) ; isLastStep = isLastStep || state . stop ( ) ; } isLastStep = isLastStep || Precision . equals ( currentT , tEnd , 1 ) ; for ( StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , isLastStep ) ; } return currentT ; } | protected double acceptStep(final AbstractStepInterpolator interpolator,
final double[] y, final double[] yDot, final double tEnd)
throws MathIllegalStateException {
double previousT = interpolator.getGlobalPreviousTime();
final double currentT = interpolator.getGlobalCurrentTime();
// initialize the events states if needed
if (! statesInitialized) {
for (EventState state : eventsStates) {
state.reinitializeBegin(interpolator);
}
statesInitialized = true;
}
// search for next events that may occur during the step
final int orderingSign = interpolator.isForward() ? +1 : -1;
SortedSet<EventState> occuringEvents = new TreeSet<EventState>(new Comparator<EventState>() {
/** {@inheritDoc} */
public int compare(EventState es0, EventState es1) {
return orderingSign * Double.compare(es0.getEventTime(), es1.getEventTime());
}
});
for (final EventState state : eventsStates) {
if (state.evaluateStep(interpolator)) {
// the event occurs during the current step
occuringEvents.add(state);
}
}
while (!occuringEvents.isEmpty()) {
// handle the chronologically first event
final Iterator<EventState> iterator = occuringEvents.iterator();
final EventState currentEvent = iterator.next();
iterator.remove();
// restrict the interpolator to the first part of the step, up to the event
final double eventT = currentEvent.getEventTime();
interpolator.setSoftPreviousTime(previousT);
interpolator.setSoftCurrentTime(eventT);
// trigger the event
interpolator.setInterpolatedTime(eventT);
final double[] eventY = interpolator.getInterpolatedState();
currentEvent.stepAccepted(eventT, eventY);
isLastStep = currentEvent.stop();
// handle the first part of the step, up to the event
for (final StepHandler handler : stepHandlers) {
handler.handleStep(interpolator, isLastStep);
}
if (isLastStep) {
// the event asked to stop integration
System.arraycopy(eventY, 0, y, 0, y.length);
for (final EventState remaining : occuringEvents) {
remaining.stepAccepted(eventT, eventY);
}
return eventT;
}
if (currentEvent.reset(eventT, eventY)) {
// some event handler has triggered changes that
// invalidate the derivatives, we need to recompute them
System.arraycopy(eventY, 0, y, 0, y.length);
computeDerivatives(eventT, y, yDot);
resetOccurred = true;
for (final EventState remaining : occuringEvents) {
remaining.stepAccepted(eventT, eventY);
}
return eventT;
}
// prepare handling of the remaining part of the step
previousT = eventT;
interpolator.setSoftPreviousTime(eventT);
interpolator.setSoftCurrentTime(currentT);
// check if the same event occurs again in the remaining part of the step
if (currentEvent.evaluateStep(interpolator)) {
// the event occurs during the current step
occuringEvents.add(currentEvent);
}
}
interpolator.setInterpolatedTime(currentT);
final double[] currentY = interpolator.getInterpolatedState();
for (final EventState state : eventsStates) {
state.stepAccepted(currentT, currentY);
isLastStep = isLastStep || state.stop();
}
isLastStep = isLastStep || Precision.equals(currentT, tEnd, 1);
// handle the remaining part of the step, after all events if any
for (StepHandler handler : stepHandlers) {
handler.handleStep(interpolator, isLastStep);
}
return currentT;
} | protected double acceptStep ( final AbstractStepInterpolator interpolator , final double [ ] y , final double [ ] yDot , final double tEnd ) throws MathIllegalStateException { double previousT = interpolator . getGlobalPreviousTime ( ) ; final double currentT = interpolator . getGlobalCurrentTime ( ) ; if ( ! statesInitialized ) { for ( EventState state : eventsStates ) { state . reinitializeBegin ( interpolator ) ; } statesInitialized = true ; } final int orderingSign = interpolator . isForward ( ) ? + 1 : - 1 ; SortedSet < EventState > occuringEvents = new TreeSet < EventState > ( new Comparator < EventState > ( ) { public int compare ( EventState es0 , EventState es1 ) { return orderingSign * Double . compare ( es0 . getEventTime ( ) , es1 . getEventTime ( ) ) ; } } ) ; for ( final EventState state : eventsStates ) { if ( state . evaluateStep ( interpolator ) ) { occuringEvents . add ( state ) ; } } while ( ! occuringEvents . isEmpty ( ) ) { final Iterator < EventState > iterator = occuringEvents . iterator ( ) ; final EventState currentEvent = iterator . next ( ) ; iterator . remove ( ) ; final double eventT = currentEvent . getEventTime ( ) ; interpolator . setSoftPreviousTime ( previousT ) ; interpolator . setSoftCurrentTime ( eventT ) ; interpolator . setInterpolatedTime ( eventT ) ; final double [ ] eventY = interpolator . getInterpolatedState ( ) ; currentEvent . stepAccepted ( eventT , eventY ) ; isLastStep = currentEvent . stop ( ) ; for ( final StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , isLastStep ) ; } if ( isLastStep ) { System . arraycopy ( eventY , 0 , y , 0 , y . length ) ; for ( final EventState remaining : occuringEvents ) { remaining . stepAccepted ( eventT , eventY ) ; } return eventT ; } if ( currentEvent . reset ( eventT , eventY ) ) { System . arraycopy ( eventY , 0 , y , 0 , y . length ) ; computeDerivatives ( eventT , y , yDot ) ; resetOccurred = true ; for ( final EventState remaining : occuringEvents ) { remaining . stepAccepted ( eventT , eventY ) ; } return eventT ; } previousT = eventT ; interpolator . setSoftPreviousTime ( eventT ) ; interpolator . setSoftCurrentTime ( currentT ) ; if ( currentEvent . evaluateStep ( interpolator ) ) { occuringEvents . add ( currentEvent ) ; } } interpolator . setInterpolatedTime ( currentT ) ; final double [ ] currentY = interpolator . getInterpolatedState ( ) ; for ( final EventState state : eventsStates ) { state . stepAccepted ( currentT , currentY ) ; isLastStep = isLastStep || state . stop ( ) ; } isLastStep = isLastStep || Precision . equals ( currentT , tEnd , 1 ) ; for ( StepHandler handler : stepHandlers ) { handler . handleStep ( interpolator , isLastStep ) ; } return currentT ; } |
JacksonDatabind | 60 | src/main/java/com/fasterxml/jackson/databind/ser/std/JsonValueSerializer.java | 195 | 242 | Polymorphic type lost when using `@JsonValue` | When suppressing all getters but one with @JsonIgnore and choosing to use a byte array for serialization (marking its getter with @JsonValue), the typing of the object is changed to "[B", which is deserialized to a byte array. I would have expected verbose typing and usage of the constructor marked with @JsonCreator that accepts the byte array to construct the object on deserialization. The behavior is as expected when choosing more fields for serialization, which is redundant data in this case.
Running jackson-databind 2.7.4 on Java 1.8.0_91.
Configuration of the ObjectMapper:
```
private final ObjectMapper mapper;
public JsonFilter() {
this.mapper = new ObjectMapper();
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
mapper.enableDefaultTyping();
}
```
Serialization: `mapper.writeValueAsString(message)`
Deserialization: `mapper.readValue(json, RemoteCall.class)`
Getter and field:
```
/** @serial */
private byte[] apdu;
@JsonValue
public byte[] getBytes() {
return apdu.clone();
}
```
Constructor:
```
@JsonCreator
public CommandAPDU(@JsonProperty(value = "bytes") byte[] apdu) {
this.apdu = apdu.clone();
parse();
LOG.v("com.ubitricity.devices.common.pal.CommandAPDU creator (1)");
}
```
Serializes to `"args":[["[B","AKQEAAnw8fLz9AAAAgA="],["net.sf.lipermi.call.RemoteInstance",{"instanceId":"b0e15098-f49e-4328-b072-fc5df42799bd","className":"com.ubitricity.devices.common.tasks.ResponseReceiver"}]]` where "args" is an Object array field of the enclosing object.
| @Override
public void serializeWithType(Object bean, JsonGenerator gen, SerializerProvider provider,
TypeSerializer typeSer0) throws IOException
{
// Regardless of other parts, first need to find value to serialize:
Object value = null;
try {
value = _accessorMethod.getValue(bean);
// and if we got null, can also just write it directly
if (value == null) {
provider.defaultSerializeNull(gen);
return;
}
JsonSerializer<Object> ser = _valueSerializer;
if (ser == null) { // no serializer yet? Need to fetch
// ser = provider.findTypedValueSerializer(value.getClass(), true, _property);
ser = provider.findValueSerializer(value.getClass(), _property);
} else {
/* 09-Dec-2010, tatu: To work around natural type's refusal to add type info, we do
* this (note: type is for the wrapper type, not enclosed value!)
*/
if (_forceTypeInformation) {
typeSer0.writeTypePrefixForScalar(bean, gen);
ser.serialize(value, gen, provider);
typeSer0.writeTypeSuffixForScalar(bean, gen);
return;
}
}
// 28-Sep-2016, tatu: As per [databind#1385], we do need to do some juggling
// to use different Object for type id (logical type) and actual serialization
// (delegat type).
ser.serializeWithType(value, gen, provider, typeSer0);
} catch (IOException ioe) {
throw ioe;
} catch (Exception e) {
Throwable t = e;
// Need to unwrap this specific type, to see infinite recursion...
while (t instanceof InvocationTargetException && t.getCause() != null) {
t = t.getCause();
}
// Errors shouldn't be wrapped (and often can't, as well)
if (t instanceof Error) {
throw (Error) t;
}
// let's try to indicate the path best we can...
throw JsonMappingException.wrapWithPath(t, bean, _accessorMethod.getName() + "()");
}
} | @ Override public void serializeWithType ( Object bean , JsonGenerator gen , SerializerProvider provider , TypeSerializer typeSer0 ) throws IOException { Object value = null ; try { value = _accessorMethod . getValue ( bean ) ; if ( value == null ) { provider . defaultSerializeNull ( gen ) ; return ; } JsonSerializer < Object > ser = _valueSerializer ; if ( ser == null ) { ser = provider . findValueSerializer ( value . getClass ( ) , _property ) ; } else { if ( _forceTypeInformation ) { typeSer0 . writeTypePrefixForScalar ( bean , gen ) ; ser . serialize ( value , gen , provider ) ; typeSer0 . writeTypeSuffixForScalar ( bean , gen ) ; return ; } } ser . serializeWithType ( value , gen , provider , typeSer0 ) ; } catch ( IOException ioe ) { throw ioe ; } catch ( Exception e ) { Throwable t = e ; while ( t instanceof InvocationTargetException && t . getCause ( ) != null ) { t = t . getCause ( ) ; } if ( t instanceof Error ) { throw ( Error ) t ; } throw JsonMappingException . wrapWithPath ( t , bean , _accessorMethod . getName ( ) + "()" ) ; } } | @Override
public void serializeWithType(Object bean, JsonGenerator gen, SerializerProvider provider,
TypeSerializer typeSer0) throws IOException
{
// Regardless of other parts, first need to find value to serialize:
Object value = null;
try {
value = _accessorMethod.getValue(bean);
// and if we got null, can also just write it directly
if (value == null) {
provider.defaultSerializeNull(gen);
return;
}
JsonSerializer<Object> ser = _valueSerializer;
if (ser == null) { // no serializer yet? Need to fetch
// ser = provider.findTypedValueSerializer(value.getClass(), true, _property);
ser = provider.findValueSerializer(value.getClass(), _property);
} else {
/* 09-Dec-2010, tatu: To work around natural type's refusal to add type info, we do
* this (note: type is for the wrapper type, not enclosed value!)
*/
if (_forceTypeInformation) {
typeSer0.writeTypePrefixForScalar(bean, gen);
ser.serialize(value, gen, provider);
typeSer0.writeTypeSuffixForScalar(bean, gen);
return;
}
}
// 28-Sep-2016, tatu: As per [databind#1385], we do need to do some juggling
// to use different Object for type id (logical type) and actual serialization
// (delegat type).
TypeSerializerRerouter rr = new TypeSerializerRerouter(typeSer0, bean);
ser.serializeWithType(value, gen, provider, rr);
} catch (IOException ioe) {
throw ioe;
} catch (Exception e) {
Throwable t = e;
// Need to unwrap this specific type, to see infinite recursion...
while (t instanceof InvocationTargetException && t.getCause() != null) {
t = t.getCause();
}
// Errors shouldn't be wrapped (and often can't, as well)
if (t instanceof Error) {
throw (Error) t;
}
// let's try to indicate the path best we can...
throw JsonMappingException.wrapWithPath(t, bean, _accessorMethod.getName() + "()");
}
} | @ Override public void serializeWithType ( Object bean , JsonGenerator gen , SerializerProvider provider , TypeSerializer typeSer0 ) throws IOException { Object value = null ; try { value = _accessorMethod . getValue ( bean ) ; if ( value == null ) { provider . defaultSerializeNull ( gen ) ; return ; } JsonSerializer < Object > ser = _valueSerializer ; if ( ser == null ) { ser = provider . findValueSerializer ( value . getClass ( ) , _property ) ; } else { if ( _forceTypeInformation ) { typeSer0 . writeTypePrefixForScalar ( bean , gen ) ; ser . serialize ( value , gen , provider ) ; typeSer0 . writeTypeSuffixForScalar ( bean , gen ) ; return ; } } TypeSerializerRerouter rr = new TypeSerializerRerouter ( typeSer0 , bean ) ; ser . serializeWithType ( value , gen , provider , rr ) ; } catch ( IOException ioe ) { throw ioe ; } catch ( Exception e ) { Throwable t = e ; while ( t instanceof InvocationTargetException && t . getCause ( ) != null ) { t = t . getCause ( ) ; } if ( t instanceof Error ) { throw ( Error ) t ; } throw JsonMappingException . wrapWithPath ( t , bean , _accessorMethod . getName ( ) + "()" ) ; } } |
Math | 8 | src/main/java/org/apache/commons/math3/distribution/DiscreteDistribution.java | 181 | 195 | DiscreteDistribution.sample(int) may throw an exception if first element of singletons of sub-class type | Creating an array with {{Array.newInstance(singletons.get(0).getClass(), sampleSize)}} in DiscreteDistribution.sample(int) is risky. An exception will be thrown if:
* {{singleons.get(0)}} is of type T1, an sub-class of T, and
* {{DiscreteDistribution.sample()}} returns an object which is of type T, but not of type T1.
To reproduce:
{code}
List<Pair<Object,Double>> list = new ArrayList<Pair<Object, Double>>();
list.add(new Pair<Object, Double>(new Object() {}, new Double(0)));
list.add(new Pair<Object, Double>(new Object() {}, new Double(1)));
new DiscreteDistribution<Object>(list).sample(1);
{code}
Attaching a patch. | public T[] sample(int sampleSize) throws NotStrictlyPositiveException {
if (sampleSize <= 0) {
throw new NotStrictlyPositiveException(LocalizedFormats.NUMBER_OF_SAMPLES,
sampleSize);
}
final T[]out = (T[]) java.lang.reflect.Array.newInstance(singletons.get(0).getClass(), sampleSize);
for (int i = 0; i < sampleSize; i++) {
out[i] = sample();
}
return out;
} | public T [ ] sample ( int sampleSize ) throws NotStrictlyPositiveException { if ( sampleSize <= 0 ) { throw new NotStrictlyPositiveException ( LocalizedFormats . NUMBER_OF_SAMPLES , sampleSize ) ; } final T [ ] out = ( T [ ] ) java . lang . reflect . Array . newInstance ( singletons . get ( 0 ) . getClass ( ) , sampleSize ) ; for ( int i = 0 ; i < sampleSize ; i ++ ) { out [ i ] = sample ( ) ; } return out ; } | public Object[] sample(int sampleSize) throws NotStrictlyPositiveException {
if (sampleSize <= 0) {
throw new NotStrictlyPositiveException(LocalizedFormats.NUMBER_OF_SAMPLES,
sampleSize);
}
final Object[] out = new Object[sampleSize];
for (int i = 0; i < sampleSize; i++) {
out[i] = sample();
}
return out;
} | public Object [ ] sample ( int sampleSize ) throws NotStrictlyPositiveException { if ( sampleSize <= 0 ) { throw new NotStrictlyPositiveException ( LocalizedFormats . NUMBER_OF_SAMPLES , sampleSize ) ; } final Object [ ] out = new Object [ sampleSize ] ; for ( int i = 0 ; i < sampleSize ; i ++ ) { out [ i ] = sample ( ) ; } return out ; } |
Math | 52 | src/main/java/org/apache/commons/math/geometry/euclidean/threed/Rotation.java | 313 | 390 | numerical problems in rotation creation | building a rotation from the following vector pairs leads to NaN:
u1 = -4921140.837095533, -2.1512094250440013E7, -890093.279426377
u2 = -2.7238580938724895E9, -2.169664921341876E9, 6.749688708885301E10
v1 = 1, 0, 0
v2 = 0, 0, 1
The constructor first changes the (v1, v2) pair into (v1', v2') ensuring the following scalar products hold:
<v1'|v1'> == <u1|u1>
<v2'|v2'> == <u2|u2>
<u1 |u2> == <v1'|v2'>
Once the (v1', v2') pair has been computed, we compute the cross product:
k = (v1' - u1)^(v2' - u2)
and the scalar product:
c = <k | (u1^u2)>
By construction, c is positive or null and the quaternion axis we want to build is q = k/[2*sqrt(c)].
c should be null only if some of the vectors are aligned, and this is dealt with later in the algorithm.
However, there are numerical problems with the vector above with the way these computations are done, as shown
by the following comparisons, showing the result we get from our Java code and the result we get from manual
computation with the same formulas but with enhanced precision:
commons math: k = 38514476.5, -84., -1168590144
high precision: k = 38514410.36093388..., -0.374075245201180409222711..., -1168590152.10599715208...
and it becomes worse when computing c because the vectors are almost orthogonal to each other, hence inducing additional cancellations. We get:
commons math c = -1.2397173627587605E20
high precision: c = 558382746168463196.7079627...
We have lost ALL significant digits in cancellations, and even the sign is wrong!
| public Rotation(Vector3D u1, Vector3D u2, Vector3D v1, Vector3D v2) {
// norms computation
double u1u1 = u1.getNormSq();
double u2u2 = u2.getNormSq();
double v1v1 = v1.getNormSq();
double v2v2 = v2.getNormSq();
if ((u1u1 == 0) || (u2u2 == 0) || (v1v1 == 0) || (v2v2 == 0)) {
throw MathRuntimeException.createIllegalArgumentException(LocalizedFormats.ZERO_NORM_FOR_ROTATION_DEFINING_VECTOR);
}
// normalize v1 in order to have (v1'|v1') = (u1|u1)
v1 = new Vector3D(FastMath.sqrt(u1u1 / v1v1), v1);
// adjust v2 in order to have (u1|u2) = (v1'|v2') and (v2'|v2') = (u2|u2)
double u1u2 = u1.dotProduct(u2);
double v1v2 = v1.dotProduct(v2);
double coeffU = u1u2 / u1u1;
double coeffV = v1v2 / u1u1;
double beta = FastMath.sqrt((u2u2 - u1u2 * coeffU) / (v2v2 - v1v2 * coeffV));
double alpha = coeffU - beta * coeffV;
v2 = new Vector3D(alpha, v1, beta, v2);
// preliminary computation
Vector3D uRef = u1;
Vector3D vRef = v1;
Vector3D v1Su1 = v1.subtract(u1);
Vector3D v2Su2 = v2.subtract(u2);
Vector3D k = v1Su1.crossProduct(v2Su2);
Vector3D u3 = u1.crossProduct(u2);
double c = k.dotProduct(u3);
if (c == 0) {
// the (q1, q2, q3) vector is close to the (u1, u2) plane
// we try other vectors
Vector3D v3 = Vector3D.crossProduct(v1, v2);
Vector3D v3Su3 = v3.subtract(u3);
k = v1Su1.crossProduct(v3Su3);
Vector3D u2Prime = u1.crossProduct(u3);
c = k.dotProduct(u2Prime);
if (c == 0) {
// the (q1, q2, q3) vector is also close to the (u1, u3) plane,
// it is almost aligned with u1: we try (u2, u3) and (v2, v3)
k = v2Su2.crossProduct(v3Su3);;
c = k.dotProduct(u2.crossProduct(u3));;
if (c == 0) {
// the (q1, q2, q3) vector is aligned with everything
// this is really the identity rotation
q0 = 1.0;
q1 = 0.0;
q2 = 0.0;
q3 = 0.0;
return;
}
// we will have to use u2 and v2 to compute the scalar part
uRef = u2;
vRef = v2;
}
}
// compute the vectorial part
c = FastMath.sqrt(c);
double inv = 1.0 / (c + c);
q1 = inv * k.getX();
q2 = inv * k.getY();
q3 = inv * k.getZ();
// compute the scalar part
k = new Vector3D(uRef.getY() * q3 - uRef.getZ() * q2,
uRef.getZ() * q1 - uRef.getX() * q3,
uRef.getX() * q2 - uRef.getY() * q1);
q0 = vRef.dotProduct(k) / (2 * k.getNormSq());
} | public Rotation ( Vector3D u1 , Vector3D u2 , Vector3D v1 , Vector3D v2 ) { double u1u1 = u1 . getNormSq ( ) ; double u2u2 = u2 . getNormSq ( ) ; double v1v1 = v1 . getNormSq ( ) ; double v2v2 = v2 . getNormSq ( ) ; if ( ( u1u1 == 0 ) || ( u2u2 == 0 ) || ( v1v1 == 0 ) || ( v2v2 == 0 ) ) { throw MathRuntimeException . createIllegalArgumentException ( LocalizedFormats . ZERO_NORM_FOR_ROTATION_DEFINING_VECTOR ) ; } v1 = new Vector3D ( FastMath . sqrt ( u1u1 / v1v1 ) , v1 ) ; double u1u2 = u1 . dotProduct ( u2 ) ; double v1v2 = v1 . dotProduct ( v2 ) ; double coeffU = u1u2 / u1u1 ; double coeffV = v1v2 / u1u1 ; double beta = FastMath . sqrt ( ( u2u2 - u1u2 * coeffU ) / ( v2v2 - v1v2 * coeffV ) ) ; double alpha = coeffU - beta * coeffV ; v2 = new Vector3D ( alpha , v1 , beta , v2 ) ; Vector3D uRef = u1 ; Vector3D vRef = v1 ; Vector3D v1Su1 = v1 . subtract ( u1 ) ; Vector3D v2Su2 = v2 . subtract ( u2 ) ; Vector3D k = v1Su1 . crossProduct ( v2Su2 ) ; Vector3D u3 = u1 . crossProduct ( u2 ) ; double c = k . dotProduct ( u3 ) ; if ( c == 0 ) { Vector3D v3 = Vector3D . crossProduct ( v1 , v2 ) ; Vector3D v3Su3 = v3 . subtract ( u3 ) ; k = v1Su1 . crossProduct ( v3Su3 ) ; Vector3D u2Prime = u1 . crossProduct ( u3 ) ; c = k . dotProduct ( u2Prime ) ; if ( c == 0 ) { k = v2Su2 . crossProduct ( v3Su3 ) ; ; c = k . dotProduct ( u2 . crossProduct ( u3 ) ) ; ; if ( c == 0 ) { q0 = 1.0 ; q1 = 0.0 ; q2 = 0.0 ; q3 = 0.0 ; return ; } uRef = u2 ; vRef = v2 ; } } c = FastMath . sqrt ( c ) ; double inv = 1.0 / ( c + c ) ; q1 = inv * k . getX ( ) ; q2 = inv * k . getY ( ) ; q3 = inv * k . getZ ( ) ; k = new Vector3D ( uRef . getY ( ) * q3 - uRef . getZ ( ) * q2 , uRef . getZ ( ) * q1 - uRef . getX ( ) * q3 , uRef . getX ( ) * q2 - uRef . getY ( ) * q1 ) ; q0 = vRef . dotProduct ( k ) / ( 2 * k . getNormSq ( ) ) ; } | public Rotation(Vector3D u1, Vector3D u2, Vector3D v1, Vector3D v2) {
// norms computation
double u1u1 = u1.getNormSq();
double u2u2 = u2.getNormSq();
double v1v1 = v1.getNormSq();
double v2v2 = v2.getNormSq();
if ((u1u1 == 0) || (u2u2 == 0) || (v1v1 == 0) || (v2v2 == 0)) {
throw MathRuntimeException.createIllegalArgumentException(LocalizedFormats.ZERO_NORM_FOR_ROTATION_DEFINING_VECTOR);
}
// normalize v1 in order to have (v1'|v1') = (u1|u1)
v1 = new Vector3D(FastMath.sqrt(u1u1 / v1v1), v1);
// adjust v2 in order to have (u1|u2) = (v1'|v2') and (v2'|v2') = (u2|u2)
double u1u2 = u1.dotProduct(u2);
double v1v2 = v1.dotProduct(v2);
double coeffU = u1u2 / u1u1;
double coeffV = v1v2 / u1u1;
double beta = FastMath.sqrt((u2u2 - u1u2 * coeffU) / (v2v2 - v1v2 * coeffV));
double alpha = coeffU - beta * coeffV;
v2 = new Vector3D(alpha, v1, beta, v2);
// preliminary computation
Vector3D uRef = u1;
Vector3D vRef = v1;
Vector3D v1Su1 = v1.subtract(u1);
Vector3D v2Su2 = v2.subtract(u2);
Vector3D k = v1Su1.crossProduct(v2Su2);
Vector3D u3 = u1.crossProduct(u2);
double c = k.dotProduct(u3);
final double inPlaneThreshold = 0.001;
if (c <= inPlaneThreshold * k.getNorm() * u3.getNorm()) {
// the (q1, q2, q3) vector is close to the (u1, u2) plane
// we try other vectors
Vector3D v3 = Vector3D.crossProduct(v1, v2);
Vector3D v3Su3 = v3.subtract(u3);
k = v1Su1.crossProduct(v3Su3);
Vector3D u2Prime = u1.crossProduct(u3);
c = k.dotProduct(u2Prime);
if (c <= inPlaneThreshold * k.getNorm() * u2Prime.getNorm()) {
// the (q1, q2, q3) vector is also close to the (u1, u3) plane,
// it is almost aligned with u1: we try (u2, u3) and (v2, v3)
k = v2Su2.crossProduct(v3Su3);;
c = k.dotProduct(u2.crossProduct(u3));;
if (c <= 0) {
// the (q1, q2, q3) vector is aligned with everything
// this is really the identity rotation
q0 = 1.0;
q1 = 0.0;
q2 = 0.0;
q3 = 0.0;
return;
}
// we will have to use u2 and v2 to compute the scalar part
uRef = u2;
vRef = v2;
}
}
// compute the vectorial part
c = FastMath.sqrt(c);
double inv = 1.0 / (c + c);
q1 = inv * k.getX();
q2 = inv * k.getY();
q3 = inv * k.getZ();
// compute the scalar part
k = new Vector3D(uRef.getY() * q3 - uRef.getZ() * q2,
uRef.getZ() * q1 - uRef.getX() * q3,
uRef.getX() * q2 - uRef.getY() * q1);
q0 = vRef.dotProduct(k) / (2 * k.getNormSq());
} | public Rotation ( Vector3D u1 , Vector3D u2 , Vector3D v1 , Vector3D v2 ) { double u1u1 = u1 . getNormSq ( ) ; double u2u2 = u2 . getNormSq ( ) ; double v1v1 = v1 . getNormSq ( ) ; double v2v2 = v2 . getNormSq ( ) ; if ( ( u1u1 == 0 ) || ( u2u2 == 0 ) || ( v1v1 == 0 ) || ( v2v2 == 0 ) ) { throw MathRuntimeException . createIllegalArgumentException ( LocalizedFormats . ZERO_NORM_FOR_ROTATION_DEFINING_VECTOR ) ; } v1 = new Vector3D ( FastMath . sqrt ( u1u1 / v1v1 ) , v1 ) ; double u1u2 = u1 . dotProduct ( u2 ) ; double v1v2 = v1 . dotProduct ( v2 ) ; double coeffU = u1u2 / u1u1 ; double coeffV = v1v2 / u1u1 ; double beta = FastMath . sqrt ( ( u2u2 - u1u2 * coeffU ) / ( v2v2 - v1v2 * coeffV ) ) ; double alpha = coeffU - beta * coeffV ; v2 = new Vector3D ( alpha , v1 , beta , v2 ) ; Vector3D uRef = u1 ; Vector3D vRef = v1 ; Vector3D v1Su1 = v1 . subtract ( u1 ) ; Vector3D v2Su2 = v2 . subtract ( u2 ) ; Vector3D k = v1Su1 . crossProduct ( v2Su2 ) ; Vector3D u3 = u1 . crossProduct ( u2 ) ; double c = k . dotProduct ( u3 ) ; final double inPlaneThreshold = 0.001 ; if ( c <= inPlaneThreshold * k . getNorm ( ) * u3 . getNorm ( ) ) { Vector3D v3 = Vector3D . crossProduct ( v1 , v2 ) ; Vector3D v3Su3 = v3 . subtract ( u3 ) ; k = v1Su1 . crossProduct ( v3Su3 ) ; Vector3D u2Prime = u1 . crossProduct ( u3 ) ; c = k . dotProduct ( u2Prime ) ; if ( c <= inPlaneThreshold * k . getNorm ( ) * u2Prime . getNorm ( ) ) { k = v2Su2 . crossProduct ( v3Su3 ) ; ; c = k . dotProduct ( u2 . crossProduct ( u3 ) ) ; ; if ( c <= 0 ) { q0 = 1.0 ; q1 = 0.0 ; q2 = 0.0 ; q3 = 0.0 ; return ; } uRef = u2 ; vRef = v2 ; } } c = FastMath . sqrt ( c ) ; double inv = 1.0 / ( c + c ) ; q1 = inv * k . getX ( ) ; q2 = inv * k . getY ( ) ; q3 = inv * k . getZ ( ) ; k = new Vector3D ( uRef . getY ( ) * q3 - uRef . getZ ( ) * q2 , uRef . getZ ( ) * q1 - uRef . getX ( ) * q3 , uRef . getX ( ) * q2 - uRef . getY ( ) * q1 ) ; q0 = vRef . dotProduct ( k ) / ( 2 * k . getNormSq ( ) ) ; } |
JacksonDatabind | 37 | src/main/java/com/fasterxml/jackson/databind/type/SimpleType.java | 119 | 137 | Field in base class is not recognized, when using `@JsonType.defaultImpl` | When deserializing JSON to Java POJOS, a field inherited from a base class is not recognized. Here is the stack:
```
com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException: Unrecognized field "name" (class org.apache.calcite.model.JsonMapSchema), not marked as ignorable (2 known properties: "functions", "tables"])
at [Source: {
version: '1.0',
schemas: [
{
name: 'FoodMart',
tables: [
{
name: 'time_by_day',
columns: [
{
name: 'time_id'
}
]
},
{
name: 'sales_fact_1997',
columns: [
{
name: 'time_id'
}
]
}
]
}
]
}; line: 24, column: 7] (through reference chain: org.apache.calcite.model.JsonRoot["schemas"]->java.util.ArrayList[0]->org.apache.calcite.model.JsonMapSchema["name"])
at com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException.from(UnrecognizedPropertyException.java:62)
at com.fasterxml.jackson.databind.DeserializationContext.reportUnknownProperty(DeserializationContext.java:855)
at com.fasterxml.jackson.databind.deser.std.StdDeserializer.handleUnknownProperty(StdDeserializer.java:1083)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.handleUnknownProperty(BeanDeserializerBase.java:1389)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.handleUnknownVanilla(BeanDeserializerBase.java:1367)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.vanillaDeserialize(BeanDeserializer.java:266)
at com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeOther(BeanDeserializer.java:163)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:135)
at com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer._deserializeTypedUsingDefaultImpl(AsPropertyTypeDeserializer.java:136)
at com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer.deserializeTypedFromObject(AsPropertyTypeDeserializer.java:99)
at com.fasterxml.jackson.databind.deser.AbstractDeserializer.deserializeWithType(AbstractDeserializer.java:142)
at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserialize(CollectionDeserializer.java:279)
at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserialize(CollectionDeserializer.java:249)
at com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserialize(CollectionDeserializer.java:26)
at com.fasterxml.jackson.databind.deser.SettableBeanProperty.deserialize(SettableBeanProperty.java:490)
at com.fasterxml.jackson.databind.deser.impl.FieldProperty.deserializeAndSet(FieldProperty.java:101)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.vanillaDeserialize(BeanDeserializer.java:260)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:125)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3788)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2779)
at org.apache.calcite.test.ModelTest.testRead(ModelTest.java:58)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:69)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:234)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:74)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:144)
```
My `JsonMapSchema` class has a base class `JsonSchema` and it has a public field `name`. See https://github.com/apache/calcite/blob/master/core/src/test/java/org/apache/calcite/test/ModelTest.java.
I have an application that worked in 2.6.3, fails in 2.7.0, so I suspect this is a regression.
| @Override
protected JavaType _narrow(Class<?> subclass)
{
if (_class == subclass) {
return this;
}
// Should we check that there is a sub-class relationship?
// 15-Jan-2016, tatu: Almost yes, but there are some complications with
// placeholder values, so no.
/*
if (!_class.isAssignableFrom(subclass)) {
throw new IllegalArgumentException("Class "+subclass.getName()+" not sub-type of "
+_class.getName());
}
*/
// 15-Jan-2015, tatu: Not correct; should really re-resolve...
return new SimpleType(subclass, _bindings, _superClass, _superInterfaces,
_valueHandler, _typeHandler, _asStatic);
} | @ Override protected JavaType _narrow ( Class < ? > subclass ) { if ( _class == subclass ) { return this ; } return new SimpleType ( subclass , _bindings , _superClass , _superInterfaces , _valueHandler , _typeHandler , _asStatic ) ; } | @Override
protected JavaType _narrow(Class<?> subclass)
{
if (_class == subclass) {
return this;
}
// Should we check that there is a sub-class relationship?
// 15-Jan-2016, tatu: Almost yes, but there are some complications with
// placeholder values, so no.
/*
if (!_class.isAssignableFrom(subclass)) {
throw new IllegalArgumentException("Class "+subclass.getName()+" not sub-type of "
+_class.getName());
}
*/
// 15-Jan-2015, tatu: Not correct; should really re-resolve...
return new SimpleType(subclass, _bindings, this, _superInterfaces,
_valueHandler, _typeHandler, _asStatic);
} | @ Override protected JavaType _narrow ( Class < ? > subclass ) { if ( _class == subclass ) { return this ; } return new SimpleType ( subclass , _bindings , this , _superInterfaces , _valueHandler , _typeHandler , _asStatic ) ; } |
Mockito | 36 | src/org/mockito/internal/invocation/Invocation.java | 201 | 203 | Inform the user immediately when she tries to 'callRealMethod()' on a mock of a interface | Inform the user immediately when she tries to 'callRealMethod()' on a mock of a interface: //fail fast when: when(mockOfAnIterface.doStuff()).thenCallRealMethod(); | public Object callRealMethod() throws Throwable {
return realMethod.invoke(mock, rawArguments);
} | public Object callRealMethod ( ) throws Throwable { return realMethod . invoke ( mock , rawArguments ) ; } | public Object callRealMethod() throws Throwable {
if (this.getMethod().getDeclaringClass().isInterface()) {
new Reporter().cannotCallRealMethodOnInterface();
}
return realMethod.invoke(mock, rawArguments);
} | public Object callRealMethod ( ) throws Throwable { if ( this . getMethod ( ) . getDeclaringClass ( ) . isInterface ( ) ) { new Reporter ( ) . cannotCallRealMethodOnInterface ( ) ; } return realMethod . invoke ( mock , rawArguments ) ; } |
Math | 91 | src/java/org/apache/commons/math/fraction/Fraction.java | 258 | 262 | Fraction.comparTo returns 0 for some differente fractions | If two different fractions evaluate to the same double due to limited precision,
the compareTo methode returns 0 as if they were identical.
{code}
// value is roughly PI - 3.07e-18
Fraction pi1 = new Fraction(1068966896, 340262731);
// value is roughly PI + 1.936e-17
Fraction pi2 = new Fraction( 411557987, 131002976);
System.out.println(pi1.doubleValue() - pi2.doubleValue()); // exactly 0.0 due to limited IEEE754 precision
System.out.println(pi1.compareTo(pi2)); // display 0 instead of a negative value
{code} | public int compareTo(Fraction object) {
double nOd = doubleValue();
double dOn = object.doubleValue();
return (nOd < dOn) ? -1 : ((nOd > dOn) ? +1 : 0);
} | public int compareTo ( Fraction object ) { double nOd = doubleValue ( ) ; double dOn = object . doubleValue ( ) ; return ( nOd < dOn ) ? - 1 : ( ( nOd > dOn ) ? + 1 : 0 ) ; } | public int compareTo(Fraction object) {
long nOd = ((long) numerator) * object.denominator;
long dOn = ((long) denominator) * object.numerator;
return (nOd < dOn) ? -1 : ((nOd > dOn) ? +1 : 0);
} | public int compareTo ( Fraction object ) { long nOd = ( ( long ) numerator ) * object . denominator ; long dOn = ( ( long ) denominator ) * object . numerator ; return ( nOd < dOn ) ? - 1 : ( ( nOd > dOn ) ? + 1 : 0 ) ; } |
Cli | 28 | src/java/org/apache/commons/cli/Parser.java | 252 | 296 | Default options may be partially processed | The Properties instance passed to the Parser.parse() method to initialize the default options may be partially processed. This happens when the properties contains an option that doesn't accept arguments and has a default value that isn't evaluated to "true". When this case occurs the processing of the properties is stopped and the remaining options are never handled.
This is caused by the break statement in Parser.processProperties(Properties), a continue statement should have been used instead.
The related test in ValueTest is also wrong, there are two assertions that need to be changed:
{code}
Options opts = new Options();
opts.addOption("a", false, "toggle -a");
opts.addOption("c", "c", false, "toggle -c");
opts.addOption(OptionBuilder.hasOptionalArg().create('e'));
properties = new Properties();
properties.setProperty( "a", "false" );
properties.setProperty( "c", "no" );
properties.setProperty( "e", "0" );
cmd = parser.parse(opts, null, properties);
assertTrue( !cmd.hasOption("a") );
assertTrue( !cmd.hasOption("c") );
assertTrue( !cmd.hasOption("e") ); // Wrong, this option accepts an argument and should receive the value "0"
{code}
and the second one:
{code}
properties = new Properties();
properties.setProperty( "a", "just a string" );
properties.setProperty( "e", "" );
cmd = parser.parse(opts, null, properties);
assertTrue( !cmd.hasOption("a") );
assertTrue( !cmd.hasOption("c") );
assertTrue( !cmd.hasOption("e") ); // Wrong, this option accepts an argument and should receive an empty string as value
{code}
| protected void processProperties(Properties properties)
{
if (properties == null)
{
return;
}
for (Enumeration e = properties.propertyNames(); e.hasMoreElements();)
{
String option = e.nextElement().toString();
if (!cmd.hasOption(option))
{
Option opt = getOptions().getOption(option);
// get the value from the properties instance
String value = properties.getProperty(option);
if (opt.hasArg())
{
if (opt.getValues() == null || opt.getValues().length == 0)
{
try
{
opt.addValueForProcessing(value);
}
catch (RuntimeException exp)
{
// if we cannot add the value don't worry about it
}
}
}
else if (!("yes".equalsIgnoreCase(value)
|| "true".equalsIgnoreCase(value)
|| "1".equalsIgnoreCase(value)))
{
// if the value is not yes, true or 1 then don't add the
// option to the CommandLine
break;
}
cmd.addOption(opt);
}
}
} | protected void processProperties ( Properties properties ) { if ( properties == null ) { return ; } for ( Enumeration e = properties . propertyNames ( ) ; e . hasMoreElements ( ) ; ) { String option = e . nextElement ( ) . toString ( ) ; if ( ! cmd . hasOption ( option ) ) { Option opt = getOptions ( ) . getOption ( option ) ; String value = properties . getProperty ( option ) ; if ( opt . hasArg ( ) ) { if ( opt . getValues ( ) == null || opt . getValues ( ) . length == 0 ) { try { opt . addValueForProcessing ( value ) ; } catch ( RuntimeException exp ) { } } } else if ( ! ( "yes" . equalsIgnoreCase ( value ) || "true" . equalsIgnoreCase ( value ) || "1" . equalsIgnoreCase ( value ) ) ) { break ; } cmd . addOption ( opt ) ; } } } | protected void processProperties(Properties properties)
{
if (properties == null)
{
return;
}
for (Enumeration e = properties.propertyNames(); e.hasMoreElements();)
{
String option = e.nextElement().toString();
if (!cmd.hasOption(option))
{
Option opt = getOptions().getOption(option);
// get the value from the properties instance
String value = properties.getProperty(option);
if (opt.hasArg())
{
if (opt.getValues() == null || opt.getValues().length == 0)
{
try
{
opt.addValueForProcessing(value);
}
catch (RuntimeException exp)
{
// if we cannot add the value don't worry about it
}
}
}
else if (!("yes".equalsIgnoreCase(value)
|| "true".equalsIgnoreCase(value)
|| "1".equalsIgnoreCase(value)))
{
// if the value is not yes, true or 1 then don't add the
// option to the CommandLine
continue;
}
cmd.addOption(opt);
}
}
} | protected void processProperties ( Properties properties ) { if ( properties == null ) { return ; } for ( Enumeration e = properties . propertyNames ( ) ; e . hasMoreElements ( ) ; ) { String option = e . nextElement ( ) . toString ( ) ; if ( ! cmd . hasOption ( option ) ) { Option opt = getOptions ( ) . getOption ( option ) ; String value = properties . getProperty ( option ) ; if ( opt . hasArg ( ) ) { if ( opt . getValues ( ) == null || opt . getValues ( ) . length == 0 ) { try { opt . addValueForProcessing ( value ) ; } catch ( RuntimeException exp ) { } } } else if ( ! ( "yes" . equalsIgnoreCase ( value ) || "true" . equalsIgnoreCase ( value ) || "1" . equalsIgnoreCase ( value ) ) ) { continue ; } cmd . addOption ( opt ) ; } } } |
Compress | 26 | src/main/java/org/apache/commons/compress/utils/IOUtils.java | 94 | 105 | IOUtils.skip does not work as advertised | I am trying to feed a TarInputStream from a CipherInputStream.
It does not work, because IOUtils.skip() does not adhere to the contract it claims in javadoc:
" * <p>This method will only skip less than the requested number of
* bytes if the end of the input stream has been reached.</p>"
However it does:
long skipped = input.skip(numToSkip);
if (skipped == 0) {
break;
}
And the input stream javadoc says:
" * This may result from any of a number of conditions; reaching end of file
* before <code>n</code> bytes have been skipped is only one possibility."
In the case of CipherInputStream, it stops at the end of each byte buffer.
If you check the IOUtils from colleagues at commons-io, they have considered this case in IOUtils.skip() where they use a read to skip through the stream.
An optimized version could combine trying to skip, then read then trying to skip again. | public static long skip(InputStream input, long numToSkip) throws IOException {
long available = numToSkip;
while (numToSkip > 0) {
long skipped = input.skip(numToSkip);
if (skipped == 0) {
break;
}
numToSkip -= skipped;
}
return available - numToSkip;
} | public static long skip ( InputStream input , long numToSkip ) throws IOException { long available = numToSkip ; while ( numToSkip > 0 ) { long skipped = input . skip ( numToSkip ) ; if ( skipped == 0 ) { break ; } numToSkip -= skipped ; } return available - numToSkip ; } | public static long skip(InputStream input, long numToSkip) throws IOException {
long available = numToSkip;
while (numToSkip > 0) {
long skipped = input.skip(numToSkip);
if (skipped == 0) {
break;
}
numToSkip -= skipped;
}
if (numToSkip > 0) {
byte[] skipBuf = new byte[SKIP_BUF_SIZE];
while (numToSkip > 0) {
int read = readFully(input, skipBuf, 0,
(int) Math.min(numToSkip, SKIP_BUF_SIZE));
if (read < 1) {
break;
}
numToSkip -= read;
}
}
return available - numToSkip;
} | public static long skip ( InputStream input , long numToSkip ) throws IOException { long available = numToSkip ; while ( numToSkip > 0 ) { long skipped = input . skip ( numToSkip ) ; if ( skipped == 0 ) { break ; } numToSkip -= skipped ; } if ( numToSkip > 0 ) { byte [ ] skipBuf = new byte [ SKIP_BUF_SIZE ] ; while ( numToSkip > 0 ) { int read = readFully ( input , skipBuf , 0 , ( int ) Math . min ( numToSkip , SKIP_BUF_SIZE ) ) ; if ( read < 1 ) { break ; } numToSkip -= read ; } } return available - numToSkip ; } |
JacksonDatabind | 108 | src/main/java/com/fasterxml/jackson/databind/ObjectReader.java | 1166 | 1170 | Change of behavior (2.8 -> 2.9) with `ObjectMapper.readTree(input)` with no content | So, it looks like `readTree()` methods in `ObjectMapper`, `ObjectReader` that take input OTHER than `JsonParser`, and are given "empty input" (only white-space available before end), will
* Return `NullNode` (Jackson 2.x up to and including 2.8)
* Return `null` (Jackson 2.9)
Latter behavior is what `readTree(JsonParser)` has and will do; but this accidentally changed other methods due to refactoring that unified underlying call handling (and add checking for new `DeserializationFeature.FAIL_ON_TRAILING_TOKENS`).
Behavior for this edge case was not being tested, apparently.
Now: since behavior has been changed for all 2.9.x patch versions, I am not sure it should be changed for 2.9 branch. But it seems sub-optimal as behavior, and something to definitely change for 3.0... but probably also for 2.10.
There are multiple things we could do.
1. Change it back to 2.8, to return `NullNode`
2. Change to throw exception, as "not valid" use case
3. Change it to return `MissingNode`
4. Leave as-is, for rest of 2.x.
Although it might seem best to revert it to (1), that seems somewhat wrong, problematic, as it would now not be possible to distinguish between JSON `null` value and missing content.
And although (2) would probably make sense, if designing API from scratch, it is probably too intrusive.
So I think (3) is the best way: it avoids returning `null` or throwing Exception (both being likely to break 2.9 code), but still allows distinguishing between all possible input cases.
| @SuppressWarnings("unchecked")
@Override
public <T extends TreeNode> T readTree(JsonParser p) throws IOException {
return (T) _bindAsTree(p);
} | @ SuppressWarnings ( "unchecked" ) @ Override public < T extends TreeNode > T readTree ( JsonParser p ) throws IOException { return ( T ) _bindAsTree ( p ) ; } | @SuppressWarnings("unchecked")
@Override
public <T extends TreeNode> T readTree(JsonParser p) throws IOException {
return (T) _bindAsTreeOrNull(p);
} | @ SuppressWarnings ( "unchecked" ) @ Override public < T extends TreeNode > T readTree ( JsonParser p ) throws IOException { return ( T ) _bindAsTreeOrNull ( p ) ; } |
Compress | 9 | src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java | 266 | 331 | TarArchiveOutputStream.getBytesWritten() returns invalid value | It appears the TarArchiveOutputStream.getBytesWritten()returns zero or invalid value when queried.
In the code sample below, it returns zero, even after an sizeable file was processed.
I've printed it twice, once before closing the output stream, and once after, just for the reference.
It is also demonstrable on multiple processed files.
Within the TarArchiveOutputStream.getBytesWritten() implementation, it appears the call for count(numToWrite) is made after the numToWrite is depleted in the process of actual byte writing. When call for count(numToWrite); is moved up, the returned values for TarArchiveOutputStream.getBytesWritten() are getting equal to the sum of the sizes of processed files. This is much closer to expected value ("Returns the current number of bytes written to this stream.") but still not correct, for that number should include the tar header sizes as well.
At any rate, please find the proposed patch below, merely moving count(numToWrite); up a few lines. This makes TarArchiveOutputStream.getBytesWritten() closer to true value.
Test code:
{code}
@Test
public void tartest() throws Exception {
FileOutputStream myOutputStream = new FileOutputStream("C:/temp/tartest.tar");
ArchiveOutputStream sTarOut = new ArchiveStreamFactory().createArchiveOutputStream(ArchiveStreamFactory.TAR, myOutputStream);
File sSource = new File("C:/share/od_l.txt");
TarArchiveEntry sEntry = new TarArchiveEntry(sSource);
sTarOut.putArchiveEntry(sEntry);
FileInputStream sInput = new FileInputStream(sSource);
byte[] cpRead = new byte[8192];
int iRead = 0;
while ((iRead = sInput.read(cpRead)) > 0) {
sTarOut.write(cpRead, 0, iRead);
}
sLog.info("Processed: "+sTarOut.getBytesWritten()+" bytes. File Len: "+sSource.length());
sInput.close();
sTarOut.closeArchiveEntry();
sTarOut.close();
sLog.info("Processed: "+sTarOut.getBytesWritten()+" bytes. File Len: "+sSource.length());
return;
}
{code}
Test Output:
{code}
Oct 21, 2011 9:09:28 AM com.cronsult.jndmpd.test.Backup tartest
INFO: Processed: 0 bytes. File Len: 186974208
Oct 21, 2011 9:09:28 AM com.cronsult.jndmpd.test.Backup tartest
INFO: Processed: 0 bytes. File Len: 186974208
{code}
Proposed Patch:
{code}
Index: src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
===================================================================
--- src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java (revision 1187150)
+++ src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java (working copy)
@@ -276,6 +276,8 @@
// eliminate some of the buffer copying.
//
}
+
+ count(numToWrite);
if (assemLen > 0) {
if ((assemLen + numToWrite) >= recordBuf.length) {
@@ -325,7 +327,7 @@
wOffset += num;
}
- count(numToWrite);
+
}
/**
{code} | @Override
public void write(byte[] wBuf, int wOffset, int numToWrite) throws IOException {
if ((currBytes + numToWrite) > currSize) {
throw new IOException("request to write '" + numToWrite
+ "' bytes exceeds size in header of '"
+ currSize + "' bytes for entry '"
+ currName + "'");
//
// We have to deal with assembly!!!
// The programmer can be writing little 32 byte chunks for all
// we know, and we must assemble complete records for writing.
// REVIEW Maybe this should be in TarBuffer? Could that help to
// eliminate some of the buffer copying.
//
}
if (assemLen > 0) {
if ((assemLen + numToWrite) >= recordBuf.length) {
int aLen = recordBuf.length - assemLen;
System.arraycopy(assemBuf, 0, recordBuf, 0,
assemLen);
System.arraycopy(wBuf, wOffset, recordBuf,
assemLen, aLen);
buffer.writeRecord(recordBuf);
currBytes += recordBuf.length;
wOffset += aLen;
numToWrite -= aLen;
assemLen = 0;
} else {
System.arraycopy(wBuf, wOffset, assemBuf, assemLen,
numToWrite);
wOffset += numToWrite;
assemLen += numToWrite;
numToWrite = 0;
}
}
//
// When we get here we have EITHER:
// o An empty "assemble" buffer.
// o No bytes to write (numToWrite == 0)
//
while (numToWrite > 0) {
if (numToWrite < recordBuf.length) {
System.arraycopy(wBuf, wOffset, assemBuf, assemLen,
numToWrite);
assemLen += numToWrite;
break;
}
buffer.writeRecord(wBuf, wOffset);
int num = recordBuf.length;
currBytes += num;
numToWrite -= num;
wOffset += num;
}
count(numToWrite);
} | @ Override public void write ( byte [ ] wBuf , int wOffset , int numToWrite ) throws IOException { if ( ( currBytes + numToWrite ) > currSize ) { throw new IOException ( "request to write '" + numToWrite + "' bytes exceeds size in header of '" + currSize + "' bytes for entry '" + currName + "'" ) ; } if ( assemLen > 0 ) { if ( ( assemLen + numToWrite ) >= recordBuf . length ) { int aLen = recordBuf . length - assemLen ; System . arraycopy ( assemBuf , 0 , recordBuf , 0 , assemLen ) ; System . arraycopy ( wBuf , wOffset , recordBuf , assemLen , aLen ) ; buffer . writeRecord ( recordBuf ) ; currBytes += recordBuf . length ; wOffset += aLen ; numToWrite -= aLen ; assemLen = 0 ; } else { System . arraycopy ( wBuf , wOffset , assemBuf , assemLen , numToWrite ) ; wOffset += numToWrite ; assemLen += numToWrite ; numToWrite = 0 ; } } while ( numToWrite > 0 ) { if ( numToWrite < recordBuf . length ) { System . arraycopy ( wBuf , wOffset , assemBuf , assemLen , numToWrite ) ; assemLen += numToWrite ; break ; } buffer . writeRecord ( wBuf , wOffset ) ; int num = recordBuf . length ; currBytes += num ; numToWrite -= num ; wOffset += num ; } count ( numToWrite ) ; } | @Override
public void write(byte[] wBuf, int wOffset, int numToWrite) throws IOException {
if ((currBytes + numToWrite) > currSize) {
throw new IOException("request to write '" + numToWrite
+ "' bytes exceeds size in header of '"
+ currSize + "' bytes for entry '"
+ currName + "'");
//
// We have to deal with assembly!!!
// The programmer can be writing little 32 byte chunks for all
// we know, and we must assemble complete records for writing.
// REVIEW Maybe this should be in TarBuffer? Could that help to
// eliminate some of the buffer copying.
//
}
if (assemLen > 0) {
if ((assemLen + numToWrite) >= recordBuf.length) {
int aLen = recordBuf.length - assemLen;
System.arraycopy(assemBuf, 0, recordBuf, 0,
assemLen);
System.arraycopy(wBuf, wOffset, recordBuf,
assemLen, aLen);
buffer.writeRecord(recordBuf);
currBytes += recordBuf.length;
wOffset += aLen;
numToWrite -= aLen;
assemLen = 0;
} else {
System.arraycopy(wBuf, wOffset, assemBuf, assemLen,
numToWrite);
wOffset += numToWrite;
assemLen += numToWrite;
numToWrite = 0;
}
}
//
// When we get here we have EITHER:
// o An empty "assemble" buffer.
// o No bytes to write (numToWrite == 0)
//
while (numToWrite > 0) {
if (numToWrite < recordBuf.length) {
System.arraycopy(wBuf, wOffset, assemBuf, assemLen,
numToWrite);
assemLen += numToWrite;
break;
}
buffer.writeRecord(wBuf, wOffset);
int num = recordBuf.length;
currBytes += num;
numToWrite -= num;
wOffset += num;
}
} | @ Override public void write ( byte [ ] wBuf , int wOffset , int numToWrite ) throws IOException { if ( ( currBytes + numToWrite ) > currSize ) { throw new IOException ( "request to write '" + numToWrite + "' bytes exceeds size in header of '" + currSize + "' bytes for entry '" + currName + "'" ) ; } if ( assemLen > 0 ) { if ( ( assemLen + numToWrite ) >= recordBuf . length ) { int aLen = recordBuf . length - assemLen ; System . arraycopy ( assemBuf , 0 , recordBuf , 0 , assemLen ) ; System . arraycopy ( wBuf , wOffset , recordBuf , assemLen , aLen ) ; buffer . writeRecord ( recordBuf ) ; currBytes += recordBuf . length ; wOffset += aLen ; numToWrite -= aLen ; assemLen = 0 ; } else { System . arraycopy ( wBuf , wOffset , assemBuf , assemLen , numToWrite ) ; wOffset += numToWrite ; assemLen += numToWrite ; numToWrite = 0 ; } } while ( numToWrite > 0 ) { if ( numToWrite < recordBuf . length ) { System . arraycopy ( wBuf , wOffset , assemBuf , assemLen , numToWrite ) ; assemLen += numToWrite ; break ; } buffer . writeRecord ( wBuf , wOffset ) ; int num = recordBuf . length ; currBytes += num ; numToWrite -= num ; wOffset += num ; } } |
Compress | 30 | src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java | 152 | 179 | BZip2CompressorInputStream return value wrong when told to read to a full buffer. | BZip2CompressorInputStream.read(buffer, offset, length) returns -1 when given an offset equal to the length of the buffer.
This indicates, not that the buffer was full, but that the stream was finished.
It seems like a pretty stupid thing to do - but I'm getting this when trying to use Kryo serialization (which is probably a bug on their part, too), so it does occur and has negative affects.
Here's a JUnit test that shows the problem specifically:
{noformat}
@Test
public void testApacheCommonsBZipUncompression () throws Exception {
// Create a big random piece of data
byte[] rawData = new byte[1048576];
for (int i=0; i<rawData.length; ++i) {
rawData[i] = (byte) Math.floor(Math.random()*256);
}
// Compress it
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BZip2CompressorOutputStream bzipOut = new BZip2CompressorOutputStream(baos);
bzipOut.write(rawData);
bzipOut.flush();
bzipOut.close();
baos.flush();
baos.close();
// Try to read it back in
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
BZip2CompressorInputStream bzipIn = new BZip2CompressorInputStream(bais);
byte[] buffer = new byte[1024];
// Works fine
Assert.assertEquals(1024, bzipIn.read(buffer, 0, 1024));
// Fails, returns -1 (indicating the stream is complete rather than that the buffer
// was full)
Assert.assertEquals(0, bzipIn.read(buffer, 1024, 0));
// But if you change the above expected value to -1, the following line still works
Assert.assertEquals(1024, bzipIn.read(buffer, 0, 1024));
bzipIn.close();
}
{noformat}
| @Override
public int read(final byte[] dest, final int offs, final int len)
throws IOException {
if (offs < 0) {
throw new IndexOutOfBoundsException("offs(" + offs + ") < 0.");
}
if (len < 0) {
throw new IndexOutOfBoundsException("len(" + len + ") < 0.");
}
if (offs + len > dest.length) {
throw new IndexOutOfBoundsException("offs(" + offs + ") + len("
+ len + ") > dest.length(" + dest.length + ").");
}
if (this.in == null) {
throw new IOException("stream closed");
}
final int hi = offs + len;
int destOffs = offs;
int b;
while (destOffs < hi && ((b = read0()) >= 0)) {
dest[destOffs++] = (byte) b;
count(1);
}
int c = (destOffs == offs) ? -1 : (destOffs - offs);
return c;
} | @ Override public int read ( final byte [ ] dest , final int offs , final int len ) throws IOException { if ( offs < 0 ) { throw new IndexOutOfBoundsException ( "offs(" + offs + ") < 0." ) ; } if ( len < 0 ) { throw new IndexOutOfBoundsException ( "len(" + len + ") < 0." ) ; } if ( offs + len > dest . length ) { throw new IndexOutOfBoundsException ( "offs(" + offs + ") + len(" + len + ") > dest.length(" + dest . length + ")." ) ; } if ( this . in == null ) { throw new IOException ( "stream closed" ) ; } final int hi = offs + len ; int destOffs = offs ; int b ; while ( destOffs < hi && ( ( b = read0 ( ) ) >= 0 ) ) { dest [ destOffs ++ ] = ( byte ) b ; count ( 1 ) ; } int c = ( destOffs == offs ) ? - 1 : ( destOffs - offs ) ; return c ; } | @Override
public int read(final byte[] dest, final int offs, final int len)
throws IOException {
if (offs < 0) {
throw new IndexOutOfBoundsException("offs(" + offs + ") < 0.");
}
if (len < 0) {
throw new IndexOutOfBoundsException("len(" + len + ") < 0.");
}
if (offs + len > dest.length) {
throw new IndexOutOfBoundsException("offs(" + offs + ") + len("
+ len + ") > dest.length(" + dest.length + ").");
}
if (this.in == null) {
throw new IOException("stream closed");
}
if (len == 0) {
return 0;
}
final int hi = offs + len;
int destOffs = offs;
int b;
while (destOffs < hi && ((b = read0()) >= 0)) {
dest[destOffs++] = (byte) b;
count(1);
}
int c = (destOffs == offs) ? -1 : (destOffs - offs);
return c;
} | @ Override public int read ( final byte [ ] dest , final int offs , final int len ) throws IOException { if ( offs < 0 ) { throw new IndexOutOfBoundsException ( "offs(" + offs + ") < 0." ) ; } if ( len < 0 ) { throw new IndexOutOfBoundsException ( "len(" + len + ") < 0." ) ; } if ( offs + len > dest . length ) { throw new IndexOutOfBoundsException ( "offs(" + offs + ") + len(" + len + ") > dest.length(" + dest . length + ")." ) ; } if ( this . in == null ) { throw new IOException ( "stream closed" ) ; } if ( len == 0 ) { return 0 ; } final int hi = offs + len ; int destOffs = offs ; int b ; while ( destOffs < hi && ( ( b = read0 ( ) ) >= 0 ) ) { dest [ destOffs ++ ] = ( byte ) b ; count ( 1 ) ; } int c = ( destOffs == offs ) ? - 1 : ( destOffs - offs ) ; return c ; } |
Codec | 3 | src/java/org/apache/commons/codec/language/DoubleMetaphone.java | 418 | 469 | Double Metaphone bugs in alternative encoding | The new test case (CODEC-83) has highlighted a number of issues with the "alternative" encoding in the Double Metaphone implementation
1) Bug in the handleG method when "G" is followed by "IER"
* The alternative encoding of "Angier" results in "ANKR" rather than "ANJR"
* The alternative encoding of "rogier" results in "RKR" rather than "RJR"
The problem is in the handleG() method and is caused by the wrong length (4 instead of 3) being used in the contains() method:
{code}
} else if (contains(value, index + 1, 4, "IER")) {
{code}
...this should be
{code}
} else if (contains(value, index + 1, 3, "IER")) {
{code}
2) Bug in the handleL method
* The alternative encoding of "cabrillo" results in "KPRL " rather than "KPR"
The problem is that the first thing this method does is append an "L" to both primary & alternative encoding. When the conditionL0() method returns true then the "L" should not be appended for the alternative encoding
{code}
result.append('L');
if (charAt(value, index + 1) == 'L') {
if (conditionL0(value, index)) {
result.appendAlternate(' ');
}
index += 2;
} else {
index++;
}
return index;
{code}
Suggest refeactoring this to
{code}
if (charAt(value, index + 1) == 'L') {
if (conditionL0(value, index)) {
result.appendPrimary('L');
} else {
result.append('L');
}
index += 2;
} else {
result.append('L');
index++;
}
return index;
{code}
3) Bug in the conditionL0() method for words ending in "AS" and "OS"
* The alternative encoding of "gallegos" results in "KLKS" rather than "KKS"
The problem is caused by the wrong start position being used in the contains() method, which means its not checking the last two characters of the word but checks the previous & current position instead:
{code}
} else if ((contains(value, index - 1, 2, "AS", "OS") ||
{code}
...this should be
{code}
} else if ((contains(value, value.length() - 2, 2, "AS", "OS") ||
{code}
I'll attach a patch for review | private int handleG(String value,
DoubleMetaphoneResult result,
int index,
boolean slavoGermanic) {
if (charAt(value, index + 1) == 'H') {
index = handleGH(value, result, index);
} else if (charAt(value, index + 1) == 'N') {
if (index == 1 && isVowel(charAt(value, 0)) && !slavoGermanic) {
result.append("KN", "N");
} else if (!contains(value, index + 2, 2, "EY") &&
charAt(value, index + 1) != 'Y' && !slavoGermanic) {
result.append("N", "KN");
} else {
result.append("KN");
}
index = index + 2;
} else if (contains(value, index + 1, 2, "LI") && !slavoGermanic) {
result.append("KL", "L");
index += 2;
} else if (index == 0 && (charAt(value, index + 1) == 'Y' || contains(value, index + 1, 2, ES_EP_EB_EL_EY_IB_IL_IN_IE_EI_ER))) {
//-- -ges-, -gep-, -gel-, -gie- at beginning --//
result.append('K', 'J');
index += 2;
} else if ((contains(value, index + 1, 2, "ER") ||
charAt(value, index + 1) == 'Y') &&
!contains(value, 0, 6, "DANGER", "RANGER", "MANGER") &&
!contains(value, index - 1, 1, "E", "I") &&
!contains(value, index - 1, 3, "RGY", "OGY")) {
//-- -ger-, -gy- --//
result.append('K', 'J');
index += 2;
} else if (contains(value, index + 1, 1, "E", "I", "Y") ||
contains(value, index - 1, 4, "AGGI", "OGGI")) {
//-- Italian "biaggi" --//
if ((contains(value, 0 ,4, "VAN ", "VON ") || contains(value, 0, 3, "SCH")) || contains(value, index + 1, 2, "ET")) {
//-- obvious germanic --//
result.append('K');
} else if (contains(value, index + 1, 4, "IER")) {
result.append('J');
} else {
result.append('J', 'K');
}
index += 2;
} else if (charAt(value, index + 1) == 'G') {
index += 2;
result.append('K');
} else {
index++;
result.append('K');
}
return index;
} | private int handleG ( String value , DoubleMetaphoneResult result , int index , boolean slavoGermanic ) { if ( charAt ( value , index + 1 ) == 'H' ) { index = handleGH ( value , result , index ) ; } else if ( charAt ( value , index + 1 ) == 'N' ) { if ( index == 1 && isVowel ( charAt ( value , 0 ) ) && ! slavoGermanic ) { result . append ( "KN" , "N" ) ; } else if ( ! contains ( value , index + 2 , 2 , "EY" ) && charAt ( value , index + 1 ) != 'Y' && ! slavoGermanic ) { result . append ( "N" , "KN" ) ; } else { result . append ( "KN" ) ; } index = index + 2 ; } else if ( contains ( value , index + 1 , 2 , "LI" ) && ! slavoGermanic ) { result . append ( "KL" , "L" ) ; index += 2 ; } else if ( index == 0 && ( charAt ( value , index + 1 ) == 'Y' || contains ( value , index + 1 , 2 , ES_EP_EB_EL_EY_IB_IL_IN_IE_EI_ER ) ) ) { result . append ( 'K' , 'J' ) ; index += 2 ; } else if ( ( contains ( value , index + 1 , 2 , "ER" ) || charAt ( value , index + 1 ) == 'Y' ) && ! contains ( value , 0 , 6 , "DANGER" , "RANGER" , "MANGER" ) && ! contains ( value , index - 1 , 1 , "E" , "I" ) && ! contains ( value , index - 1 , 3 , "RGY" , "OGY" ) ) { result . append ( 'K' , 'J' ) ; index += 2 ; } else if ( contains ( value , index + 1 , 1 , "E" , "I" , "Y" ) || contains ( value , index - 1 , 4 , "AGGI" , "OGGI" ) ) { if ( ( contains ( value , 0 , 4 , "VAN " , "VON " ) || contains ( value , 0 , 3 , "SCH" ) ) || contains ( value , index + 1 , 2 , "ET" ) ) { result . append ( 'K' ) ; } else if ( contains ( value , index + 1 , 4 , "IER" ) ) { result . append ( 'J' ) ; } else { result . append ( 'J' , 'K' ) ; } index += 2 ; } else if ( charAt ( value , index + 1 ) == 'G' ) { index += 2 ; result . append ( 'K' ) ; } else { index ++ ; result . append ( 'K' ) ; } return index ; } | private int handleG(String value,
DoubleMetaphoneResult result,
int index,
boolean slavoGermanic) {
if (charAt(value, index + 1) == 'H') {
index = handleGH(value, result, index);
} else if (charAt(value, index + 1) == 'N') {
if (index == 1 && isVowel(charAt(value, 0)) && !slavoGermanic) {
result.append("KN", "N");
} else if (!contains(value, index + 2, 2, "EY") &&
charAt(value, index + 1) != 'Y' && !slavoGermanic) {
result.append("N", "KN");
} else {
result.append("KN");
}
index = index + 2;
} else if (contains(value, index + 1, 2, "LI") && !slavoGermanic) {
result.append("KL", "L");
index += 2;
} else if (index == 0 && (charAt(value, index + 1) == 'Y' || contains(value, index + 1, 2, ES_EP_EB_EL_EY_IB_IL_IN_IE_EI_ER))) {
//-- -ges-, -gep-, -gel-, -gie- at beginning --//
result.append('K', 'J');
index += 2;
} else if ((contains(value, index + 1, 2, "ER") ||
charAt(value, index + 1) == 'Y') &&
!contains(value, 0, 6, "DANGER", "RANGER", "MANGER") &&
!contains(value, index - 1, 1, "E", "I") &&
!contains(value, index - 1, 3, "RGY", "OGY")) {
//-- -ger-, -gy- --//
result.append('K', 'J');
index += 2;
} else if (contains(value, index + 1, 1, "E", "I", "Y") ||
contains(value, index - 1, 4, "AGGI", "OGGI")) {
//-- Italian "biaggi" --//
if ((contains(value, 0 ,4, "VAN ", "VON ") || contains(value, 0, 3, "SCH")) || contains(value, index + 1, 2, "ET")) {
//-- obvious germanic --//
result.append('K');
} else if (contains(value, index + 1, 3, "IER")) {
result.append('J');
} else {
result.append('J', 'K');
}
index += 2;
} else if (charAt(value, index + 1) == 'G') {
index += 2;
result.append('K');
} else {
index++;
result.append('K');
}
return index;
} | private int handleG ( String value , DoubleMetaphoneResult result , int index , boolean slavoGermanic ) { if ( charAt ( value , index + 1 ) == 'H' ) { index = handleGH ( value , result , index ) ; } else if ( charAt ( value , index + 1 ) == 'N' ) { if ( index == 1 && isVowel ( charAt ( value , 0 ) ) && ! slavoGermanic ) { result . append ( "KN" , "N" ) ; } else if ( ! contains ( value , index + 2 , 2 , "EY" ) && charAt ( value , index + 1 ) != 'Y' && ! slavoGermanic ) { result . append ( "N" , "KN" ) ; } else { result . append ( "KN" ) ; } index = index + 2 ; } else if ( contains ( value , index + 1 , 2 , "LI" ) && ! slavoGermanic ) { result . append ( "KL" , "L" ) ; index += 2 ; } else if ( index == 0 && ( charAt ( value , index + 1 ) == 'Y' || contains ( value , index + 1 , 2 , ES_EP_EB_EL_EY_IB_IL_IN_IE_EI_ER ) ) ) { result . append ( 'K' , 'J' ) ; index += 2 ; } else if ( ( contains ( value , index + 1 , 2 , "ER" ) || charAt ( value , index + 1 ) == 'Y' ) && ! contains ( value , 0 , 6 , "DANGER" , "RANGER" , "MANGER" ) && ! contains ( value , index - 1 , 1 , "E" , "I" ) && ! contains ( value , index - 1 , 3 , "RGY" , "OGY" ) ) { result . append ( 'K' , 'J' ) ; index += 2 ; } else if ( contains ( value , index + 1 , 1 , "E" , "I" , "Y" ) || contains ( value , index - 1 , 4 , "AGGI" , "OGGI" ) ) { if ( ( contains ( value , 0 , 4 , "VAN " , "VON " ) || contains ( value , 0 , 3 , "SCH" ) ) || contains ( value , index + 1 , 2 , "ET" ) ) { result . append ( 'K' ) ; } else if ( contains ( value , index + 1 , 3 , "IER" ) ) { result . append ( 'J' ) ; } else { result . append ( 'J' , 'K' ) ; } index += 2 ; } else if ( charAt ( value , index + 1 ) == 'G' ) { index += 2 ; result . append ( 'K' ) ; } else { index ++ ; result . append ( 'K' ) ; } return index ; } |
Mockito | 20 | src/org/mockito/internal/creation/bytebuddy/ByteBuddyMockMaker.java | 24 | 53 | Allow convenient spying on abstract classes | I posted this in GoogleCode and was asked to submit in github.
Mockito is easy to use when the test needs to provide canned values for a certain method.
But it gets harder when a canned value isn't sufficient.
##### Example 1: Fake with trivial Logic
```
interface UserAccount {
List<String> getEmails();
void addEmail(String email);
// 12 other methods ...
}
```
When mocking such domain entity object, it's tedious to manually program getEmails()/addEmail() with when().thenReturn() and to make sure the two methods are logically consistent, that is, getEmails() returns all emails added.
##### Example 2: callback-style API
```
interface AccountService {
void getAccount(String id, AsyncCallback<UserAccount> callback);
}
```
Stubbing AccountService isn't easy. It'd require use of Answer, and the Answer API isn't statically type safe:
```
when(service.getAccount(eq(id), any(AsyncCallback.class)).thenAnswer(new Answer<Void>() {
AsyncCallback<UserAccount> callback = (AsyncCallback<UserAccount>) getArguments()[1];
...
});
```
##### Example 3: Uninteresting parameters
```
interface AccountRpcService {
FutureAccount getAccount(RpcContext context, String id);
}
```
None of the tests care about the context object. It's an uninteresting parameter imposed by the framework.
If AccountRpcService were directly mocked, all tests would have to use isA() to repetitively mention this uninteresting parameter, like this:
`when(service.getAccount(isA(RpcContext.class), eq("id")).thenReturn(...);`
And all other parameters are required to be wrapped in eq().
#### Proposal
I propose adding support for abstract classes to mockito to make it easier to deal with tests like above:
##### For example 1
```
abstract class FakeUserAccount implements UserAccount {
private final List<String> emails = new ArrayList<>();
@Override public void addEmail(String email) {
emails.add(email);
}
@Override List<String> getEmails() {
return ImmutableList.copyOf(emails);
}
}
@Fake private FakeUserAccount userAccount; // Mockito instantiates abstract class.
```
##### For example 2
```
abstract class MockAccountService implements AccountService {
@Override public void getAccount(String id, AsyncCallback<UserAccount> callback) {
callback.onSuccess(getAccount(id));
}
abstract UserAccount getAccount(String id);
}
@Fake private MockAccountService service;
...
when(service.getAccount("id")).thenReturn(account);
```
##### For example 3
```
abstract class MockAccountRpcService implements AccountRpcService {
@Override Future<Account> getAccount(RpcContext context, String id) {
checkNotNull(context); // Common sanity test. Don't have to repeat it in tests.
return getAccount(id);
}
abstract Future<Account> getAccount(String id);
}
@Fake private MockAccountRpcService service;
when(service.getAccount("id")).thenReturn(...);
```
My work place internally implemented a default Answer to support abstract classes. We found that the support of abstract classes helps us to avoid overusing mocks when we should be using fakes. And in situations like above we get cleaner test code.
But because it's not integrated in the core Mockito, there are gotchas with our implementation (like, you can't have private/final methods in your fake).
If the idea sounds okay to give a try, I'll volunteer to submit a patch.
Thanks!
| public <T> T createMock(MockCreationSettings<T> settings, MockHandler handler) {
if (settings.getSerializableMode() == SerializableMode.ACROSS_CLASSLOADERS) {
throw new MockitoException("Serialization across classloaders not yet supported with ByteBuddyMockMaker");
}
Class<? extends T> mockedProxyType = cachingMockBytecodeGenerator.get(
settings.getTypeToMock(),
settings.getExtraInterfaces()
);
T mockInstance = null;
try {
mockInstance = classInstantiator.instantiate(mockedProxyType);
MockMethodInterceptor.MockAccess mockAccess = (MockMethodInterceptor.MockAccess) mockInstance;
mockAccess.setMockitoInterceptor(new MockMethodInterceptor(asInternalMockHandler(handler), settings));
return ensureMockIsAssignableToMockedType(settings, mockInstance);
} catch (ClassCastException cce) {
throw new MockitoException(join(
"ClassCastException occurred while creating the mockito mock :",
" class to mock : " + describeClass(mockedProxyType),
" created class : " + describeClass(settings.getTypeToMock()),
" proxy instance class : " + describeClass(mockInstance),
" instance creation by : " + classInstantiator.getClass().getSimpleName(),
"",
"You might experience classloading issues, please ask the mockito mailing-list.",
""
),cce);
} catch (org.mockito.internal.creation.instance.InstantiationException e) {
throw new MockitoException("Unable to create mock instance of type '" + mockedProxyType.getSuperclass().getSimpleName() + "'", e);
}
} | public < T > T createMock ( MockCreationSettings < T > settings , MockHandler handler ) { if ( settings . getSerializableMode ( ) == SerializableMode . ACROSS_CLASSLOADERS ) { throw new MockitoException ( "Serialization across classloaders not yet supported with ByteBuddyMockMaker" ) ; } Class < ? extends T > mockedProxyType = cachingMockBytecodeGenerator . get ( settings . getTypeToMock ( ) , settings . getExtraInterfaces ( ) ) ; T mockInstance = null ; try { mockInstance = classInstantiator . instantiate ( mockedProxyType ) ; MockMethodInterceptor . MockAccess mockAccess = ( MockMethodInterceptor . MockAccess ) mockInstance ; mockAccess . setMockitoInterceptor ( new MockMethodInterceptor ( asInternalMockHandler ( handler ) , settings ) ) ; return ensureMockIsAssignableToMockedType ( settings , mockInstance ) ; } catch ( ClassCastException cce ) { throw new MockitoException ( join ( "ClassCastException occurred while creating the mockito mock :" , " class to mock : " + describeClass ( mockedProxyType ) , " created class : " + describeClass ( settings . getTypeToMock ( ) ) , " proxy instance class : " + describeClass ( mockInstance ) , " instance creation by : " + classInstantiator . getClass ( ) . getSimpleName ( ) , "" , "You might experience classloading issues, please ask the mockito mailing-list." , "" ) , cce ) ; } catch ( org . mockito . internal . creation . instance . InstantiationException e ) { throw new MockitoException ( "Unable to create mock instance of type '" + mockedProxyType . getSuperclass ( ) . getSimpleName ( ) + "'" , e ) ; } } | public <T> T createMock(MockCreationSettings<T> settings, MockHandler handler) {
if (settings.getSerializableMode() == SerializableMode.ACROSS_CLASSLOADERS) {
throw new MockitoException("Serialization across classloaders not yet supported with ByteBuddyMockMaker");
}
Class<? extends T> mockedProxyType = cachingMockBytecodeGenerator.get(
settings.getTypeToMock(),
settings.getExtraInterfaces()
);
Instantiator instantiator = new InstantiatorProvider().getInstantiator(settings);
T mockInstance = null;
try {
mockInstance = instantiator.newInstance(mockedProxyType);
MockMethodInterceptor.MockAccess mockAccess = (MockMethodInterceptor.MockAccess) mockInstance;
mockAccess.setMockitoInterceptor(new MockMethodInterceptor(asInternalMockHandler(handler), settings));
return ensureMockIsAssignableToMockedType(settings, mockInstance);
} catch (ClassCastException cce) {
throw new MockitoException(join(
"ClassCastException occurred while creating the mockito mock :",
" class to mock : " + describeClass(mockedProxyType),
" created class : " + describeClass(settings.getTypeToMock()),
" proxy instance class : " + describeClass(mockInstance),
" instance creation by : " + instantiator.getClass().getSimpleName(),
"",
"You might experience classloading issues, please ask the mockito mailing-list.",
""
),cce);
} catch (org.mockito.internal.creation.instance.InstantiationException e) {
throw new MockitoException("Unable to create mock instance of type '" + mockedProxyType.getSuperclass().getSimpleName() + "'", e);
}
} | public < T > T createMock ( MockCreationSettings < T > settings , MockHandler handler ) { if ( settings . getSerializableMode ( ) == SerializableMode . ACROSS_CLASSLOADERS ) { throw new MockitoException ( "Serialization across classloaders not yet supported with ByteBuddyMockMaker" ) ; } Class < ? extends T > mockedProxyType = cachingMockBytecodeGenerator . get ( settings . getTypeToMock ( ) , settings . getExtraInterfaces ( ) ) ; Instantiator instantiator = new InstantiatorProvider ( ) . getInstantiator ( settings ) ; T mockInstance = null ; try { mockInstance = instantiator . newInstance ( mockedProxyType ) ; MockMethodInterceptor . MockAccess mockAccess = ( MockMethodInterceptor . MockAccess ) mockInstance ; mockAccess . setMockitoInterceptor ( new MockMethodInterceptor ( asInternalMockHandler ( handler ) , settings ) ) ; return ensureMockIsAssignableToMockedType ( settings , mockInstance ) ; } catch ( ClassCastException cce ) { throw new MockitoException ( join ( "ClassCastException occurred while creating the mockito mock :" , " class to mock : " + describeClass ( mockedProxyType ) , " created class : " + describeClass ( settings . getTypeToMock ( ) ) , " proxy instance class : " + describeClass ( mockInstance ) , " instance creation by : " + instantiator . getClass ( ) . getSimpleName ( ) , "" , "You might experience classloading issues, please ask the mockito mailing-list." , "" ) , cce ) ; } catch ( org . mockito . internal . creation . instance . InstantiationException e ) { throw new MockitoException ( "Unable to create mock instance of type '" + mockedProxyType . getSuperclass ( ) . getSimpleName ( ) + "'" , e ) ; } } |
Chart | 11 | source/org/jfree/chart/util/ShapeUtilities.java | 264 | 296 | JCommon 1.0.12 ShapeUtilities.equal(path1,path2) | The comparison of two GeneralPath objects uses the same PathIterator for both objects. equal(GeneralPath path1, GeneralPath path2) will thus return true for any pair of non-null GeneralPath instances having the same windingRule. | public static boolean equal(GeneralPath p1, GeneralPath p2) {
if (p1 == null) {
return (p2 == null);
}
if (p2 == null) {
return false;
}
if (p1.getWindingRule() != p2.getWindingRule()) {
return false;
}
PathIterator iterator1 = p1.getPathIterator(null);
PathIterator iterator2 = p1.getPathIterator(null);
double[] d1 = new double[6];
double[] d2 = new double[6];
boolean done = iterator1.isDone() && iterator2.isDone();
while (!done) {
if (iterator1.isDone() != iterator2.isDone()) {
return false;
}
int seg1 = iterator1.currentSegment(d1);
int seg2 = iterator2.currentSegment(d2);
if (seg1 != seg2) {
return false;
}
if (!Arrays.equals(d1, d2)) {
return false;
}
iterator1.next();
iterator2.next();
done = iterator1.isDone() && iterator2.isDone();
}
return true;
} | public static boolean equal ( GeneralPath p1 , GeneralPath p2 ) { if ( p1 == null ) { return ( p2 == null ) ; } if ( p2 == null ) { return false ; } if ( p1 . getWindingRule ( ) != p2 . getWindingRule ( ) ) { return false ; } PathIterator iterator1 = p1 . getPathIterator ( null ) ; PathIterator iterator2 = p1 . getPathIterator ( null ) ; double [ ] d1 = new double [ 6 ] ; double [ ] d2 = new double [ 6 ] ; boolean done = iterator1 . isDone ( ) && iterator2 . isDone ( ) ; while ( ! done ) { if ( iterator1 . isDone ( ) != iterator2 . isDone ( ) ) { return false ; } int seg1 = iterator1 . currentSegment ( d1 ) ; int seg2 = iterator2 . currentSegment ( d2 ) ; if ( seg1 != seg2 ) { return false ; } if ( ! Arrays . equals ( d1 , d2 ) ) { return false ; } iterator1 . next ( ) ; iterator2 . next ( ) ; done = iterator1 . isDone ( ) && iterator2 . isDone ( ) ; } return true ; } | public static boolean equal(GeneralPath p1, GeneralPath p2) {
if (p1 == null) {
return (p2 == null);
}
if (p2 == null) {
return false;
}
if (p1.getWindingRule() != p2.getWindingRule()) {
return false;
}
PathIterator iterator1 = p1.getPathIterator(null);
PathIterator iterator2 = p2.getPathIterator(null);
double[] d1 = new double[6];
double[] d2 = new double[6];
boolean done = iterator1.isDone() && iterator2.isDone();
while (!done) {
if (iterator1.isDone() != iterator2.isDone()) {
return false;
}
int seg1 = iterator1.currentSegment(d1);
int seg2 = iterator2.currentSegment(d2);
if (seg1 != seg2) {
return false;
}
if (!Arrays.equals(d1, d2)) {
return false;
}
iterator1.next();
iterator2.next();
done = iterator1.isDone() && iterator2.isDone();
}
return true;
} | public static boolean equal ( GeneralPath p1 , GeneralPath p2 ) { if ( p1 == null ) { return ( p2 == null ) ; } if ( p2 == null ) { return false ; } if ( p1 . getWindingRule ( ) != p2 . getWindingRule ( ) ) { return false ; } PathIterator iterator1 = p1 . getPathIterator ( null ) ; PathIterator iterator2 = p2 . getPathIterator ( null ) ; double [ ] d1 = new double [ 6 ] ; double [ ] d2 = new double [ 6 ] ; boolean done = iterator1 . isDone ( ) && iterator2 . isDone ( ) ; while ( ! done ) { if ( iterator1 . isDone ( ) != iterator2 . isDone ( ) ) { return false ; } int seg1 = iterator1 . currentSegment ( d1 ) ; int seg2 = iterator2 . currentSegment ( d2 ) ; if ( seg1 != seg2 ) { return false ; } if ( ! Arrays . equals ( d1 , d2 ) ) { return false ; } iterator1 . next ( ) ; iterator2 . next ( ) ; done = iterator1 . isDone ( ) && iterator2 . isDone ( ) ; } return true ; } |
Math | 87 | src/java/org/apache/commons/math/optimization/linear/SimplexTableau.java | 272 | 284 | Basic variable is not found correctly in simplex tableau | The last patch to SimplexTableau caused an automated test suite I'm running at work to go down a new code path and uncover what is hopefully the last bug remaining in the Simplex code.
SimplexTableau was assuming an entry in the tableau had to be nonzero to indicate a basic variable, which is incorrect - the entry should have a value equal to 1. | private Integer getBasicRow(final int col) {
Integer row = null;
for (int i = getNumObjectiveFunctions(); i < getHeight(); i++) {
if (!MathUtils.equals(getEntry(i, col), 0.0, epsilon)) {
if (row == null) {
row = i;
} else {
return null;
}
}
}
return row;
} | private Integer getBasicRow ( final int col ) { Integer row = null ; for ( int i = getNumObjectiveFunctions ( ) ; i < getHeight ( ) ; i ++ ) { if ( ! MathUtils . equals ( getEntry ( i , col ) , 0.0 , epsilon ) ) { if ( row == null ) { row = i ; } else { return null ; } } } return row ; } | private Integer getBasicRow(final int col) {
Integer row = null;
for (int i = getNumObjectiveFunctions(); i < getHeight(); i++) {
if (MathUtils.equals(getEntry(i, col), 1.0, epsilon) && (row == null)) {
row = i;
} else if (!MathUtils.equals(getEntry(i, col), 0.0, epsilon)) {
return null;
}
}
return row;
} | private Integer getBasicRow ( final int col ) { Integer row = null ; for ( int i = getNumObjectiveFunctions ( ) ; i < getHeight ( ) ; i ++ ) { if ( MathUtils . equals ( getEntry ( i , col ) , 1.0 , epsilon ) && ( row == null ) ) { row = i ; } else if ( ! MathUtils . equals ( getEntry ( i , col ) , 0.0 , epsilon ) ) { return null ; } } return row ; } |
Csv | 6 | src/main/java/org/apache/commons/csv/CSVRecord.java | 179 | 185 | CSVRecord.toMap() fails if row length shorter than header length | Similar to CSV-96, if .toMap() is called on a record that has fewer fields than we have header columns we'll get an ArrayOutOfBoundsException.
{code}
@Test
public void testToMapWhenHeaderTooLong() throws Exception {
final CSVParser parser = new CSVParser("a,b", CSVFormat.newBuilder().withHeader("A", "B", "C").build());
final CSVRecord record = parser.iterator().next();
record.toMap();
}
{code} | <M extends Map<String, String>> M putIn(final M map) {
for (final Entry<String, Integer> entry : mapping.entrySet()) {
final int col = entry.getValue().intValue();
map.put(entry.getKey(), values[col]);
}
return map;
} | < M extends Map < String , String > > M putIn ( final M map ) { for ( final Entry < String , Integer > entry : mapping . entrySet ( ) ) { final int col = entry . getValue ( ) . intValue ( ) ; map . put ( entry . getKey ( ) , values [ col ] ) ; } return map ; } | <M extends Map<String, String>> M putIn(final M map) {
for (final Entry<String, Integer> entry : mapping.entrySet()) {
final int col = entry.getValue().intValue();
if (col < values.length) {
map.put(entry.getKey(), values[col]);
}
}
return map;
} | < M extends Map < String , String > > M putIn ( final M map ) { for ( final Entry < String , Integer > entry : mapping . entrySet ( ) ) { final int col = entry . getValue ( ) . intValue ( ) ; if ( col < values . length ) { map . put ( entry . getKey ( ) , values [ col ] ) ; } } return map ; } |
Csv | 15 | src/main/java/org/apache/commons/csv/CSVFormat.java | 1151 | 1256 | The behavior of quote char using is not similar as Excel does when the first string contains CJK char(s) | When using CSVFormat.EXCEL to print a CSV file, the behavior of quote char using is not similar as Microsoft Excel does when the first string contains Chinese, Japanese or Korean (CJK) char(s).
e.g.
There are 3 data members in a record, with Japanese chars: "γ", "γ", "γ":
Microsoft Excel outputs:
γ,γ,γ
Apache Common CSV outputs:
"γ",γ,γ
| private void printAndQuote(final Object object, final CharSequence value, final int offset, final int len,
final Appendable out, final boolean newRecord) throws IOException {
boolean quote = false;
int start = offset;
int pos = offset;
final int end = offset + len;
final char delimChar = getDelimiter();
final char quoteChar = getQuoteCharacter().charValue();
QuoteMode quoteModePolicy = getQuoteMode();
if (quoteModePolicy == null) {
quoteModePolicy = QuoteMode.MINIMAL;
}
switch (quoteModePolicy) {
case ALL:
case ALL_NON_NULL:
quote = true;
break;
case NON_NUMERIC:
quote = !(object instanceof Number);
break;
case NONE:
// Use the existing escaping code
printAndEscape(value, offset, len, out);
return;
case MINIMAL:
if (len <= 0) {
// always quote an empty token that is the first
// on the line, as it may be the only thing on the
// line. If it were not quoted in that case,
// an empty line has no tokens.
if (newRecord) {
quote = true;
}
} else {
char c = value.charAt(pos);
if (newRecord && (c < 0x20 || c > 0x21 && c < 0x23 || c > 0x2B && c < 0x2D || c > 0x7E)) {
quote = true;
} else if (c <= COMMENT) {
// Some other chars at the start of a value caused the parser to fail, so for now
// encapsulate if we start in anything less than '#'. We are being conservative
// by including the default comment char too.
quote = true;
} else {
while (pos < end) {
c = value.charAt(pos);
if (c == LF || c == CR || c == quoteChar || c == delimChar) {
quote = true;
break;
}
pos++;
}
if (!quote) {
pos = end - 1;
c = value.charAt(pos);
// Some other chars at the end caused the parser to fail, so for now
// encapsulate if we end in anything less than ' '
if (c <= SP) {
quote = true;
}
}
}
}
if (!quote) {
// no encapsulation needed - write out the original value
out.append(value, start, end);
return;
}
break;
default:
throw new IllegalStateException("Unexpected Quote value: " + quoteModePolicy);
}
if (!quote) {
// no encapsulation needed - write out the original value
out.append(value, start, end);
return;
}
// we hit something that needed encapsulation
out.append(quoteChar);
// Pick up where we left off: pos should be positioned on the first character that caused
// the need for encapsulation.
while (pos < end) {
final char c = value.charAt(pos);
if (c == quoteChar) {
// write out the chunk up until this point
// add 1 to the length to write out the encapsulator also
out.append(value, start, pos + 1);
// put the next starting position on the encapsulator so we will
// write it out again with the next string (effectively doubling it)
start = pos;
}
pos++;
}
// write the last segment
out.append(value, start, pos);
out.append(quoteChar);
} | private void printAndQuote ( final Object object , final CharSequence value , final int offset , final int len , final Appendable out , final boolean newRecord ) throws IOException { boolean quote = false ; int start = offset ; int pos = offset ; final int end = offset + len ; final char delimChar = getDelimiter ( ) ; final char quoteChar = getQuoteCharacter ( ) . charValue ( ) ; QuoteMode quoteModePolicy = getQuoteMode ( ) ; if ( quoteModePolicy == null ) { quoteModePolicy = QuoteMode . MINIMAL ; } switch ( quoteModePolicy ) { case ALL : case ALL_NON_NULL : quote = true ; break ; case NON_NUMERIC : quote = ! ( object instanceof Number ) ; break ; case NONE : printAndEscape ( value , offset , len , out ) ; return ; case MINIMAL : if ( len <= 0 ) { if ( newRecord ) { quote = true ; } } else { char c = value . charAt ( pos ) ; if ( newRecord && ( c < 0x20 || c > 0x21 && c < 0x23 || c > 0x2B && c < 0x2D || c > 0x7E ) ) { quote = true ; } else if ( c <= COMMENT ) { quote = true ; } else { while ( pos < end ) { c = value . charAt ( pos ) ; if ( c == LF || c == CR || c == quoteChar || c == delimChar ) { quote = true ; break ; } pos ++ ; } if ( ! quote ) { pos = end - 1 ; c = value . charAt ( pos ) ; if ( c <= SP ) { quote = true ; } } } } if ( ! quote ) { out . append ( value , start , end ) ; return ; } break ; default : throw new IllegalStateException ( "Unexpected Quote value: " + quoteModePolicy ) ; } if ( ! quote ) { out . append ( value , start , end ) ; return ; } out . append ( quoteChar ) ; while ( pos < end ) { final char c = value . charAt ( pos ) ; if ( c == quoteChar ) { out . append ( value , start , pos + 1 ) ; start = pos ; } pos ++ ; } out . append ( value , start , pos ) ; out . append ( quoteChar ) ; } | private void printAndQuote(final Object object, final CharSequence value, final int offset, final int len,
final Appendable out, final boolean newRecord) throws IOException {
boolean quote = false;
int start = offset;
int pos = offset;
final int end = offset + len;
final char delimChar = getDelimiter();
final char quoteChar = getQuoteCharacter().charValue();
QuoteMode quoteModePolicy = getQuoteMode();
if (quoteModePolicy == null) {
quoteModePolicy = QuoteMode.MINIMAL;
}
switch (quoteModePolicy) {
case ALL:
case ALL_NON_NULL:
quote = true;
break;
case NON_NUMERIC:
quote = !(object instanceof Number);
break;
case NONE:
// Use the existing escaping code
printAndEscape(value, offset, len, out);
return;
case MINIMAL:
if (len <= 0) {
// always quote an empty token that is the first
// on the line, as it may be the only thing on the
// line. If it were not quoted in that case,
// an empty line has no tokens.
if (newRecord) {
quote = true;
}
} else {
char c = value.charAt(pos);
if (c <= COMMENT) {
// Some other chars at the start of a value caused the parser to fail, so for now
// encapsulate if we start in anything less than '#'. We are being conservative
// by including the default comment char too.
quote = true;
} else {
while (pos < end) {
c = value.charAt(pos);
if (c == LF || c == CR || c == quoteChar || c == delimChar) {
quote = true;
break;
}
pos++;
}
if (!quote) {
pos = end - 1;
c = value.charAt(pos);
// Some other chars at the end caused the parser to fail, so for now
// encapsulate if we end in anything less than ' '
if (c <= SP) {
quote = true;
}
}
}
}
if (!quote) {
// no encapsulation needed - write out the original value
out.append(value, start, end);
return;
}
break;
default:
throw new IllegalStateException("Unexpected Quote value: " + quoteModePolicy);
}
if (!quote) {
// no encapsulation needed - write out the original value
out.append(value, start, end);
return;
}
// we hit something that needed encapsulation
out.append(quoteChar);
// Pick up where we left off: pos should be positioned on the first character that caused
// the need for encapsulation.
while (pos < end) {
final char c = value.charAt(pos);
if (c == quoteChar) {
// write out the chunk up until this point
// add 1 to the length to write out the encapsulator also
out.append(value, start, pos + 1);
// put the next starting position on the encapsulator so we will
// write it out again with the next string (effectively doubling it)
start = pos;
}
pos++;
}
// write the last segment
out.append(value, start, pos);
out.append(quoteChar);
} | private void printAndQuote ( final Object object , final CharSequence value , final int offset , final int len , final Appendable out , final boolean newRecord ) throws IOException { boolean quote = false ; int start = offset ; int pos = offset ; final int end = offset + len ; final char delimChar = getDelimiter ( ) ; final char quoteChar = getQuoteCharacter ( ) . charValue ( ) ; QuoteMode quoteModePolicy = getQuoteMode ( ) ; if ( quoteModePolicy == null ) { quoteModePolicy = QuoteMode . MINIMAL ; } switch ( quoteModePolicy ) { case ALL : case ALL_NON_NULL : quote = true ; break ; case NON_NUMERIC : quote = ! ( object instanceof Number ) ; break ; case NONE : printAndEscape ( value , offset , len , out ) ; return ; case MINIMAL : if ( len <= 0 ) { if ( newRecord ) { quote = true ; } } else { char c = value . charAt ( pos ) ; if ( c <= COMMENT ) { quote = true ; } else { while ( pos < end ) { c = value . charAt ( pos ) ; if ( c == LF || c == CR || c == quoteChar || c == delimChar ) { quote = true ; break ; } pos ++ ; } if ( ! quote ) { pos = end - 1 ; c = value . charAt ( pos ) ; if ( c <= SP ) { quote = true ; } } } } if ( ! quote ) { out . append ( value , start , end ) ; return ; } break ; default : throw new IllegalStateException ( "Unexpected Quote value: " + quoteModePolicy ) ; } if ( ! quote ) { out . append ( value , start , end ) ; return ; } out . append ( quoteChar ) ; while ( pos < end ) { final char c = value . charAt ( pos ) ; if ( c == quoteChar ) { out . append ( value , start , pos + 1 ) ; start = pos ; } pos ++ ; } out . append ( value , start , pos ) ; out . append ( quoteChar ) ; } |
Compress | 10 | src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java | 801 | 843 | Cannot Read Winzip Archives With Unicode Extra Fields | I have a zip file created with WinZip containing Unicode extra fields. Upon attempting to extract it with org.apache.commons.compress.archivers.zip.ZipFile, ZipFile.getInputStream() returns null for ZipArchiveEntries previously retrieved with ZipFile.getEntry() or even ZipFile.getEntries(). See UTF8ZipFilesTest.patch in the attachments for a test case exposing the bug. The original test case stopped short of trying to read the entries, that's why this wasn't flagged up before.
The problem lies in the fact that inside ZipFile.java entries are stored in a HashMap. However, at one point after populating the HashMap, the unicode extra fields are read, which leads to a change of the ZipArchiveEntry name, and therefore a change of its hash code. Because of this, subsequent gets on the HashMap fail to retrieve the original values.
ZipFile.patch contains an (admittedly simple-minded) fix for this problem by reconstructing the entries HashMap after the Unicode extra fields have been parsed. The purpose of this patch is mainly to show that the problem is indeed what I think, rather than providing a well-designed solution.
The patches have been tested against revision 1210416. | private void resolveLocalFileHeaderData(Map<ZipArchiveEntry, NameAndComment>
entriesWithoutUTF8Flag)
throws IOException {
// changing the name of a ZipArchiveEntry is going to change
// the hashcode - see COMPRESS-164
// Map needs to be reconstructed in order to keep central
// directory order
for (ZipArchiveEntry ze : entries.keySet()) {
OffsetEntry offsetEntry = entries.get(ze);
long offset = offsetEntry.headerOffset;
archive.seek(offset + LFH_OFFSET_FOR_FILENAME_LENGTH);
byte[] b = new byte[SHORT];
archive.readFully(b);
int fileNameLen = ZipShort.getValue(b);
archive.readFully(b);
int extraFieldLen = ZipShort.getValue(b);
int lenToSkip = fileNameLen;
while (lenToSkip > 0) {
int skipped = archive.skipBytes(lenToSkip);
if (skipped <= 0) {
throw new RuntimeException("failed to skip file name in"
+ " local file header");
}
lenToSkip -= skipped;
}
byte[] localExtraData = new byte[extraFieldLen];
archive.readFully(localExtraData);
ze.setExtra(localExtraData);
offsetEntry.dataOffset = offset + LFH_OFFSET_FOR_FILENAME_LENGTH
+ SHORT + SHORT + fileNameLen + extraFieldLen;
if (entriesWithoutUTF8Flag.containsKey(ze)) {
String orig = ze.getName();
NameAndComment nc = entriesWithoutUTF8Flag.get(ze);
ZipUtil.setNameAndCommentFromExtraFields(ze, nc.name,
nc.comment);
if (!orig.equals(ze.getName())) {
nameMap.remove(orig);
nameMap.put(ze.getName(), ze);
}
}
}
} | private void resolveLocalFileHeaderData ( Map < ZipArchiveEntry , NameAndComment > entriesWithoutUTF8Flag ) throws IOException { for ( ZipArchiveEntry ze : entries . keySet ( ) ) { OffsetEntry offsetEntry = entries . get ( ze ) ; long offset = offsetEntry . headerOffset ; archive . seek ( offset + LFH_OFFSET_FOR_FILENAME_LENGTH ) ; byte [ ] b = new byte [ SHORT ] ; archive . readFully ( b ) ; int fileNameLen = ZipShort . getValue ( b ) ; archive . readFully ( b ) ; int extraFieldLen = ZipShort . getValue ( b ) ; int lenToSkip = fileNameLen ; while ( lenToSkip > 0 ) { int skipped = archive . skipBytes ( lenToSkip ) ; if ( skipped <= 0 ) { throw new RuntimeException ( "failed to skip file name in" + " local file header" ) ; } lenToSkip -= skipped ; } byte [ ] localExtraData = new byte [ extraFieldLen ] ; archive . readFully ( localExtraData ) ; ze . setExtra ( localExtraData ) ; offsetEntry . dataOffset = offset + LFH_OFFSET_FOR_FILENAME_LENGTH + SHORT + SHORT + fileNameLen + extraFieldLen ; if ( entriesWithoutUTF8Flag . containsKey ( ze ) ) { String orig = ze . getName ( ) ; NameAndComment nc = entriesWithoutUTF8Flag . get ( ze ) ; ZipUtil . setNameAndCommentFromExtraFields ( ze , nc . name , nc . comment ) ; if ( ! orig . equals ( ze . getName ( ) ) ) { nameMap . remove ( orig ) ; nameMap . put ( ze . getName ( ) , ze ) ; } } } } | private void resolveLocalFileHeaderData(Map<ZipArchiveEntry, NameAndComment>
entriesWithoutUTF8Flag)
throws IOException {
// changing the name of a ZipArchiveEntry is going to change
// the hashcode - see COMPRESS-164
// Map needs to be reconstructed in order to keep central
// directory order
Map<ZipArchiveEntry, OffsetEntry> origMap =
new LinkedHashMap<ZipArchiveEntry, OffsetEntry>(entries);
entries.clear();
for (ZipArchiveEntry ze : origMap.keySet()) {
OffsetEntry offsetEntry = origMap.get(ze);
long offset = offsetEntry.headerOffset;
archive.seek(offset + LFH_OFFSET_FOR_FILENAME_LENGTH);
byte[] b = new byte[SHORT];
archive.readFully(b);
int fileNameLen = ZipShort.getValue(b);
archive.readFully(b);
int extraFieldLen = ZipShort.getValue(b);
int lenToSkip = fileNameLen;
while (lenToSkip > 0) {
int skipped = archive.skipBytes(lenToSkip);
if (skipped <= 0) {
throw new RuntimeException("failed to skip file name in"
+ " local file header");
}
lenToSkip -= skipped;
}
byte[] localExtraData = new byte[extraFieldLen];
archive.readFully(localExtraData);
ze.setExtra(localExtraData);
offsetEntry.dataOffset = offset + LFH_OFFSET_FOR_FILENAME_LENGTH
+ SHORT + SHORT + fileNameLen + extraFieldLen;
if (entriesWithoutUTF8Flag.containsKey(ze)) {
String orig = ze.getName();
NameAndComment nc = entriesWithoutUTF8Flag.get(ze);
ZipUtil.setNameAndCommentFromExtraFields(ze, nc.name,
nc.comment);
if (!orig.equals(ze.getName())) {
nameMap.remove(orig);
nameMap.put(ze.getName(), ze);
}
}
entries.put(ze, offsetEntry);
}
} | private void resolveLocalFileHeaderData ( Map < ZipArchiveEntry , NameAndComment > entriesWithoutUTF8Flag ) throws IOException { Map < ZipArchiveEntry , OffsetEntry > origMap = new LinkedHashMap < ZipArchiveEntry , OffsetEntry > ( entries ) ; entries . clear ( ) ; for ( ZipArchiveEntry ze : origMap . keySet ( ) ) { OffsetEntry offsetEntry = origMap . get ( ze ) ; long offset = offsetEntry . headerOffset ; archive . seek ( offset + LFH_OFFSET_FOR_FILENAME_LENGTH ) ; byte [ ] b = new byte [ SHORT ] ; archive . readFully ( b ) ; int fileNameLen = ZipShort . getValue ( b ) ; archive . readFully ( b ) ; int extraFieldLen = ZipShort . getValue ( b ) ; int lenToSkip = fileNameLen ; while ( lenToSkip > 0 ) { int skipped = archive . skipBytes ( lenToSkip ) ; if ( skipped <= 0 ) { throw new RuntimeException ( "failed to skip file name in" + " local file header" ) ; } lenToSkip -= skipped ; } byte [ ] localExtraData = new byte [ extraFieldLen ] ; archive . readFully ( localExtraData ) ; ze . setExtra ( localExtraData ) ; offsetEntry . dataOffset = offset + LFH_OFFSET_FOR_FILENAME_LENGTH + SHORT + SHORT + fileNameLen + extraFieldLen ; if ( entriesWithoutUTF8Flag . containsKey ( ze ) ) { String orig = ze . getName ( ) ; NameAndComment nc = entriesWithoutUTF8Flag . get ( ze ) ; ZipUtil . setNameAndCommentFromExtraFields ( ze , nc . name , nc . comment ) ; if ( ! orig . equals ( ze . getName ( ) ) ) { nameMap . remove ( orig ) ; nameMap . put ( ze . getName ( ) , ze ) ; } } entries . put ( ze , offsetEntry ) ; } } |
Math | 48 | src/main/java/org/apache/commons/math/analysis/solvers/BaseSecantSolver.java | 129 | 251 | "RegulaFalsiSolver" failure | The following unit test:
{code}
@Test
public void testBug() {
final UnivariateRealFunction f = new UnivariateRealFunction() {
@Override
public double value(double x) {
return Math.exp(x) - Math.pow(Math.PI, 3.0);
}
};
UnivariateRealSolver solver = new RegulaFalsiSolver();
double root = solver.solve(100, f, 1, 10);
}
{code}
fails with
{noformat}
illegal state: maximal count (100) exceeded: evaluations
{noformat}
Using "PegasusSolver", the answer is found after 17 evaluations.
| protected final double doSolve() {
// Get initial solution
double x0 = getMin();
double x1 = getMax();
double f0 = computeObjectiveValue(x0);
double f1 = computeObjectiveValue(x1);
// If one of the bounds is the exact root, return it. Since these are
// not under-approximations or over-approximations, we can return them
// regardless of the allowed solutions.
if (f0 == 0.0) {
return x0;
}
if (f1 == 0.0) {
return x1;
}
// Verify bracketing of initial solution.
verifyBracketing(x0, x1);
// Get accuracies.
final double ftol = getFunctionValueAccuracy();
final double atol = getAbsoluteAccuracy();
final double rtol = getRelativeAccuracy();
// Keep track of inverted intervals, meaning that the left bound is
// larger than the right bound.
boolean inverted = false;
// Keep finding better approximations.
while (true) {
// Calculate the next approximation.
final double x = x1 - ((f1 * (x1 - x0)) / (f1 - f0));
final double fx = computeObjectiveValue(x);
// If the new approximation is the exact root, return it. Since
// this is not an under-approximation or an over-approximation,
// we can return it regardless of the allowed solutions.
if (fx == 0.0) {
return x;
}
// Update the bounds with the new approximation.
if (f1 * fx < 0) {
// The value of x1 has switched to the other bound, thus inverting
// the interval.
x0 = x1;
f0 = f1;
inverted = !inverted;
} else {
switch (method) {
case ILLINOIS:
f0 *= 0.5;
break;
case PEGASUS:
f0 *= f1 / (f1 + fx);
break;
case REGULA_FALSI:
// Detect early that algorithm is stuck, instead of waiting
// for the maximum number of iterations to be exceeded.
break;
default:
// Should never happen.
throw new MathInternalError();
}
}
// Update from [x0, x1] to [x0, x].
x1 = x;
f1 = fx;
// If the function value of the last approximation is too small,
// given the function value accuracy, then we can't get closer to
// the root than we already are.
if (FastMath.abs(f1) <= ftol) {
switch (allowed) {
case ANY_SIDE:
return x1;
case LEFT_SIDE:
if (inverted) {
return x1;
}
break;
case RIGHT_SIDE:
if (!inverted) {
return x1;
}
break;
case BELOW_SIDE:
if (f1 <= 0) {
return x1;
}
break;
case ABOVE_SIDE:
if (f1 >= 0) {
return x1;
}
break;
default:
throw new MathInternalError();
}
}
// If the current interval is within the given accuracies, we
// are satisfied with the current approximation.
if (FastMath.abs(x1 - x0) < FastMath.max(rtol * FastMath.abs(x1),
atol)) {
switch (allowed) {
case ANY_SIDE:
return x1;
case LEFT_SIDE:
return inverted ? x1 : x0;
case RIGHT_SIDE:
return inverted ? x0 : x1;
case BELOW_SIDE:
return (f1 <= 0) ? x1 : x0;
case ABOVE_SIDE:
return (f1 >= 0) ? x1 : x0;
default:
throw new MathInternalError();
}
}
}
} | protected final double doSolve ( ) { double x0 = getMin ( ) ; double x1 = getMax ( ) ; double f0 = computeObjectiveValue ( x0 ) ; double f1 = computeObjectiveValue ( x1 ) ; if ( f0 == 0.0 ) { return x0 ; } if ( f1 == 0.0 ) { return x1 ; } verifyBracketing ( x0 , x1 ) ; final double ftol = getFunctionValueAccuracy ( ) ; final double atol = getAbsoluteAccuracy ( ) ; final double rtol = getRelativeAccuracy ( ) ; boolean inverted = false ; while ( true ) { final double x = x1 - ( ( f1 * ( x1 - x0 ) ) / ( f1 - f0 ) ) ; final double fx = computeObjectiveValue ( x ) ; if ( fx == 0.0 ) { return x ; } if ( f1 * fx < 0 ) { x0 = x1 ; f0 = f1 ; inverted = ! inverted ; } else { switch ( method ) { case ILLINOIS : f0 *= 0.5 ; break ; case PEGASUS : f0 *= f1 / ( f1 + fx ) ; break ; case REGULA_FALSI : break ; default : throw new MathInternalError ( ) ; } } x1 = x ; f1 = fx ; if ( FastMath . abs ( f1 ) <= ftol ) { switch ( allowed ) { case ANY_SIDE : return x1 ; case LEFT_SIDE : if ( inverted ) { return x1 ; } break ; case RIGHT_SIDE : if ( ! inverted ) { return x1 ; } break ; case BELOW_SIDE : if ( f1 <= 0 ) { return x1 ; } break ; case ABOVE_SIDE : if ( f1 >= 0 ) { return x1 ; } break ; default : throw new MathInternalError ( ) ; } } if ( FastMath . abs ( x1 - x0 ) < FastMath . max ( rtol * FastMath . abs ( x1 ) , atol ) ) { switch ( allowed ) { case ANY_SIDE : return x1 ; case LEFT_SIDE : return inverted ? x1 : x0 ; case RIGHT_SIDE : return inverted ? x0 : x1 ; case BELOW_SIDE : return ( f1 <= 0 ) ? x1 : x0 ; case ABOVE_SIDE : return ( f1 >= 0 ) ? x1 : x0 ; default : throw new MathInternalError ( ) ; } } } } | protected final double doSolve() {
// Get initial solution
double x0 = getMin();
double x1 = getMax();
double f0 = computeObjectiveValue(x0);
double f1 = computeObjectiveValue(x1);
// If one of the bounds is the exact root, return it. Since these are
// not under-approximations or over-approximations, we can return them
// regardless of the allowed solutions.
if (f0 == 0.0) {
return x0;
}
if (f1 == 0.0) {
return x1;
}
// Verify bracketing of initial solution.
verifyBracketing(x0, x1);
// Get accuracies.
final double ftol = getFunctionValueAccuracy();
final double atol = getAbsoluteAccuracy();
final double rtol = getRelativeAccuracy();
// Keep track of inverted intervals, meaning that the left bound is
// larger than the right bound.
boolean inverted = false;
// Keep finding better approximations.
while (true) {
// Calculate the next approximation.
final double x = x1 - ((f1 * (x1 - x0)) / (f1 - f0));
final double fx = computeObjectiveValue(x);
// If the new approximation is the exact root, return it. Since
// this is not an under-approximation or an over-approximation,
// we can return it regardless of the allowed solutions.
if (fx == 0.0) {
return x;
}
// Update the bounds with the new approximation.
if (f1 * fx < 0) {
// The value of x1 has switched to the other bound, thus inverting
// the interval.
x0 = x1;
f0 = f1;
inverted = !inverted;
} else {
switch (method) {
case ILLINOIS:
f0 *= 0.5;
break;
case PEGASUS:
f0 *= f1 / (f1 + fx);
break;
case REGULA_FALSI:
// Detect early that algorithm is stuck, instead of waiting
// for the maximum number of iterations to be exceeded.
if (x == x1) {
throw new ConvergenceException();
}
break;
default:
// Should never happen.
throw new MathInternalError();
}
}
// Update from [x0, x1] to [x0, x].
x1 = x;
f1 = fx;
// If the function value of the last approximation is too small,
// given the function value accuracy, then we can't get closer to
// the root than we already are.
if (FastMath.abs(f1) <= ftol) {
switch (allowed) {
case ANY_SIDE:
return x1;
case LEFT_SIDE:
if (inverted) {
return x1;
}
break;
case RIGHT_SIDE:
if (!inverted) {
return x1;
}
break;
case BELOW_SIDE:
if (f1 <= 0) {
return x1;
}
break;
case ABOVE_SIDE:
if (f1 >= 0) {
return x1;
}
break;
default:
throw new MathInternalError();
}
}
// If the current interval is within the given accuracies, we
// are satisfied with the current approximation.
if (FastMath.abs(x1 - x0) < FastMath.max(rtol * FastMath.abs(x1),
atol)) {
switch (allowed) {
case ANY_SIDE:
return x1;
case LEFT_SIDE:
return inverted ? x1 : x0;
case RIGHT_SIDE:
return inverted ? x0 : x1;
case BELOW_SIDE:
return (f1 <= 0) ? x1 : x0;
case ABOVE_SIDE:
return (f1 >= 0) ? x1 : x0;
default:
throw new MathInternalError();
}
}
}
} | protected final double doSolve ( ) { double x0 = getMin ( ) ; double x1 = getMax ( ) ; double f0 = computeObjectiveValue ( x0 ) ; double f1 = computeObjectiveValue ( x1 ) ; if ( f0 == 0.0 ) { return x0 ; } if ( f1 == 0.0 ) { return x1 ; } verifyBracketing ( x0 , x1 ) ; final double ftol = getFunctionValueAccuracy ( ) ; final double atol = getAbsoluteAccuracy ( ) ; final double rtol = getRelativeAccuracy ( ) ; boolean inverted = false ; while ( true ) { final double x = x1 - ( ( f1 * ( x1 - x0 ) ) / ( f1 - f0 ) ) ; final double fx = computeObjectiveValue ( x ) ; if ( fx == 0.0 ) { return x ; } if ( f1 * fx < 0 ) { x0 = x1 ; f0 = f1 ; inverted = ! inverted ; } else { switch ( method ) { case ILLINOIS : f0 *= 0.5 ; break ; case PEGASUS : f0 *= f1 / ( f1 + fx ) ; break ; case REGULA_FALSI : if ( x == x1 ) { throw new ConvergenceException ( ) ; } break ; default : throw new MathInternalError ( ) ; } } x1 = x ; f1 = fx ; if ( FastMath . abs ( f1 ) <= ftol ) { switch ( allowed ) { case ANY_SIDE : return x1 ; case LEFT_SIDE : if ( inverted ) { return x1 ; } break ; case RIGHT_SIDE : if ( ! inverted ) { return x1 ; } break ; case BELOW_SIDE : if ( f1 <= 0 ) { return x1 ; } break ; case ABOVE_SIDE : if ( f1 >= 0 ) { return x1 ; } break ; default : throw new MathInternalError ( ) ; } } if ( FastMath . abs ( x1 - x0 ) < FastMath . max ( rtol * FastMath . abs ( x1 ) , atol ) ) { switch ( allowed ) { case ANY_SIDE : return x1 ; case LEFT_SIDE : return inverted ? x1 : x0 ; case RIGHT_SIDE : return inverted ? x0 : x1 ; case BELOW_SIDE : return ( f1 <= 0 ) ? x1 : x0 ; case ABOVE_SIDE : return ( f1 >= 0 ) ? x1 : x0 ; default : throw new MathInternalError ( ) ; } } } } |
JacksonDatabind | 83 | src/main/java/com/fasterxml/jackson/databind/deser/std/FromStringDeserializer.java | 103 | 159 | `FromStringDeserializer` ignores registered `DeserializationProblemHandler` for `java.util.UUID` | Culprit appears to be [lines 155-161 of FromStringDeserializer](https://github.com/FasterXML/jackson-databind/blob/60ae6000d361f910ab0d7d269a5bac1fc66f4cd9/src/main/java/com/fasterxml/jackson/databind/deser/std/FromStringDeserializer.java#L155-L161):
```
// 05-May-2016, tatu: Unlike most usage, this seems legit, so...
JsonMappingException e = ctxt.weirdStringException(text, _valueClass, msg);
if (cause != null) {
e.initCause(cause);
}
throw e;
// nothing to do here, yet? We'll fail anyway
```
The above lines appear to show that the exception will be thrown regardless of any problem handling logic.
Test Case:
```
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.deser.DeserializationProblemHandler;
import org.junit.Test;
import java.io.IOException;
import java.util.UUID;
public class UUIDDeserializerTest {
@Test
public void itUsesDeserializationProblemHandlerProperly() throws IOException {
ObjectMapper mapper = new ObjectMapper().addHandler(new DeserializationProblemHandler() {
@Override
public Object handleWeirdStringValue(final DeserializationContext ctxt, final Class<?> targetType, final String valueToConvert, final String failureMsg) throws IOException {
return null;
}
});
mapper.readValue("{\"id\" : \"I am not a UUID\"}", IdBean.class);
}
public static class IdBean {
private UUID id;
public UUID getId() {
return id;
}
public void setId(final UUID id) {
this.id = id;
}
}
}
```
The handler handles the issue properly; but an exception is thrown anyway:
```
an not deserialize value of type java.util.UUID from String "I am not a UUID": not a valid textual representation
at [Source: (String)"{"id" : "I am not a UUID"}"; line: 1, column: 9] (through reference chain: com.company.test.UUIDDeserializerTest$IdBean["id"])
com.fasterxml.jackson.databind.exc.InvalidFormatException: Can not deserialize value of type java.util.UUID from String "I am not a UUID": not a valid textual representation
at [Source: (String)"{"id" : "I am not a UUID"}"; line: 1, column: 9] (through reference chain: com.company.test.UUIDDeserializerTest$IdBean["id"])
at com.fasterxml.jackson.databind.exc.InvalidFormatException.from(InvalidFormatException.java:67)
at com.fasterxml.jackson.databind.DeserializationContext.weirdStringException(DeserializationContext.java:1504)
at com.fasterxml.jackson.databind.deser.std.FromStringDeserializer.deserialize(FromStringDeserializer.java:156)
at com.fasterxml.jackson.databind.deser.impl.MethodProperty.deserializeAndSet(MethodProperty.java:127)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.vanillaDeserialize(BeanDeserializer.java:287)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:151)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3999)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2980)
```
| @SuppressWarnings("unchecked")
@Override
public T deserialize(JsonParser p, DeserializationContext ctxt) throws IOException
{
// 22-Sep-2012, tatu: For 2.1, use this new method, may force coercion:
String text = p.getValueAsString();
if (text != null) { // has String representation
if (text.length() == 0 || (text = text.trim()).length() == 0) {
// 04-Feb-2013, tatu: Usually should become null; but not always
return _deserializeFromEmptyString();
}
Exception cause = null;
try {
// 19-May-2017, tatu: Used to require non-null result (assuming `null`
// indicated error; but that seems wrong. Should be able to return
// `null` as value.
if (_deserialize(text, ctxt) != null) {
return _deserialize(text, ctxt);
}
} catch (IllegalArgumentException iae) {
cause = iae;
} catch (MalformedURLException me) {
cause = me;
}
String msg = "not a valid textual representation";
if (cause != null) {
String m2 = cause.getMessage();
if (m2 != null) {
msg = msg + ", problem: "+m2;
}
}
// 05-May-2016, tatu: Unlike most usage, this seems legit, so...
JsonMappingException e = ctxt.weirdStringException(text, _valueClass, msg);
if (cause != null) {
e.initCause(cause);
}
throw e;
// nothing to do here, yet? We'll fail anyway
}
JsonToken t = p.getCurrentToken();
// [databind#381]
if (t == JsonToken.START_ARRAY) {
return _deserializeFromArray(p, ctxt);
}
if (t == JsonToken.VALUE_EMBEDDED_OBJECT) {
// Trivial cases; null to null, instance of type itself returned as is
Object ob = p.getEmbeddedObject();
if (ob == null) {
return null;
}
if (_valueClass.isAssignableFrom(ob.getClass())) {
return (T) ob;
}
return _deserializeEmbedded(ob, ctxt);
}
return (T) ctxt.handleUnexpectedToken(_valueClass, p);
} | @ SuppressWarnings ( "unchecked" ) @ Override public T deserialize ( JsonParser p , DeserializationContext ctxt ) throws IOException { String text = p . getValueAsString ( ) ; if ( text != null ) { if ( text . length ( ) == 0 || ( text = text . trim ( ) ) . length ( ) == 0 ) { return _deserializeFromEmptyString ( ) ; } Exception cause = null ; try { if ( _deserialize ( text , ctxt ) != null ) { return _deserialize ( text , ctxt ) ; } } catch ( IllegalArgumentException iae ) { cause = iae ; } catch ( MalformedURLException me ) { cause = me ; } String msg = "not a valid textual representation" ; if ( cause != null ) { String m2 = cause . getMessage ( ) ; if ( m2 != null ) { msg = msg + ", problem: " + m2 ; } } JsonMappingException e = ctxt . weirdStringException ( text , _valueClass , msg ) ; if ( cause != null ) { e . initCause ( cause ) ; } throw e ; } JsonToken t = p . getCurrentToken ( ) ; if ( t == JsonToken . START_ARRAY ) { return _deserializeFromArray ( p , ctxt ) ; } if ( t == JsonToken . VALUE_EMBEDDED_OBJECT ) { Object ob = p . getEmbeddedObject ( ) ; if ( ob == null ) { return null ; } if ( _valueClass . isAssignableFrom ( ob . getClass ( ) ) ) { return ( T ) ob ; } return _deserializeEmbedded ( ob , ctxt ) ; } return ( T ) ctxt . handleUnexpectedToken ( _valueClass , p ) ; } | @SuppressWarnings("unchecked")
@Override
public T deserialize(JsonParser p, DeserializationContext ctxt) throws IOException
{
// 22-Sep-2012, tatu: For 2.1, use this new method, may force coercion:
String text = p.getValueAsString();
if (text != null) { // has String representation
if (text.length() == 0 || (text = text.trim()).length() == 0) {
// 04-Feb-2013, tatu: Usually should become null; but not always
return _deserializeFromEmptyString();
}
Exception cause = null;
try {
// 19-May-2017, tatu: Used to require non-null result (assuming `null`
// indicated error; but that seems wrong. Should be able to return
// `null` as value.
return _deserialize(text, ctxt);
} catch (IllegalArgumentException iae) {
cause = iae;
} catch (MalformedURLException me) {
cause = me;
}
String msg = "not a valid textual representation";
if (cause != null) {
String m2 = cause.getMessage();
if (m2 != null) {
msg = msg + ", problem: "+m2;
}
}
// 05-May-2016, tatu: Unlike most usage, this seems legit, so...
JsonMappingException e = ctxt.weirdStringException(text, _valueClass, msg);
if (cause != null) {
e.initCause(cause);
}
throw e;
// nothing to do here, yet? We'll fail anyway
}
JsonToken t = p.getCurrentToken();
// [databind#381]
if (t == JsonToken.START_ARRAY) {
return _deserializeFromArray(p, ctxt);
}
if (t == JsonToken.VALUE_EMBEDDED_OBJECT) {
// Trivial cases; null to null, instance of type itself returned as is
Object ob = p.getEmbeddedObject();
if (ob == null) {
return null;
}
if (_valueClass.isAssignableFrom(ob.getClass())) {
return (T) ob;
}
return _deserializeEmbedded(ob, ctxt);
}
return (T) ctxt.handleUnexpectedToken(_valueClass, p);
} | @ SuppressWarnings ( "unchecked" ) @ Override public T deserialize ( JsonParser p , DeserializationContext ctxt ) throws IOException { String text = p . getValueAsString ( ) ; if ( text != null ) { if ( text . length ( ) == 0 || ( text = text . trim ( ) ) . length ( ) == 0 ) { return _deserializeFromEmptyString ( ) ; } Exception cause = null ; try { return _deserialize ( text , ctxt ) ; } catch ( IllegalArgumentException iae ) { cause = iae ; } catch ( MalformedURLException me ) { cause = me ; } String msg = "not a valid textual representation" ; if ( cause != null ) { String m2 = cause . getMessage ( ) ; if ( m2 != null ) { msg = msg + ", problem: " + m2 ; } } JsonMappingException e = ctxt . weirdStringException ( text , _valueClass , msg ) ; if ( cause != null ) { e . initCause ( cause ) ; } throw e ; } JsonToken t = p . getCurrentToken ( ) ; if ( t == JsonToken . START_ARRAY ) { return _deserializeFromArray ( p , ctxt ) ; } if ( t == JsonToken . VALUE_EMBEDDED_OBJECT ) { Object ob = p . getEmbeddedObject ( ) ; if ( ob == null ) { return null ; } if ( _valueClass . isAssignableFrom ( ob . getClass ( ) ) ) { return ( T ) ob ; } return _deserializeEmbedded ( ob , ctxt ) ; } return ( T ) ctxt . handleUnexpectedToken ( _valueClass , p ) ; } |
JacksonDatabind | 7 | src/main/java/com/fasterxml/jackson/databind/util/TokenBuffer.java | 403 | 411 | Possibly wrong `TokenBuffer` delegate deserialization using `@JsonCreator` | ``` java
class Value {
@JsonCreator
public static Value from(TokenBuffer buffer) {
...
}
```
Given JSON string is `{ "a":1, "b":null }`, it is expected that while deserializing using delegate buffer,
current token will be start object `{`, and rest of the tokens will be available in buffer:
```
[START_OBJECT, FIELD_NAME, VALUE_NUMBER_INT, FIELD_NAME, VALUE_NULL, END_OBJECT]
```
But, buffers ends up being started with field name and then contains single attribute value
```
[FIELD_NAME, VALUE_NUMBER_INT]
```
It's due to how `TokenBuffer#copyCurrentStructure` works when we have current token as a `FIELD_NAME`, rather than `START_OBJECT`, because it's forced to move to next token [BeanDeserializer.java:120](https://github.com/FasterXML/jackson-databind/blob/2.4/src/main/java/com/fasterxml/jackson/databind/deser/BeanDeserializer.java#L120)
Hope this helps to nail it down. Is it an intended behavior, or it's regression/bug?
| public TokenBuffer deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException
{
copyCurrentStructure(jp);
/* 28-Oct-2014, tatu: As per #592, need to support a special case of starting from
* FIELD_NAME, which is taken to mean that we are missing START_OBJECT, but need
* to assume one did exist.
*/
return this;
} | public TokenBuffer deserialize ( JsonParser jp , DeserializationContext ctxt ) throws IOException { copyCurrentStructure ( jp ) ; return this ; } | public TokenBuffer deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException
{
if (jp.getCurrentTokenId() != JsonToken.FIELD_NAME.id()) {
copyCurrentStructure(jp);
return this;
}
/* 28-Oct-2014, tatu: As per #592, need to support a special case of starting from
* FIELD_NAME, which is taken to mean that we are missing START_OBJECT, but need
* to assume one did exist.
*/
JsonToken t;
writeStartObject();
do {
copyCurrentStructure(jp);
} while ((t = jp.nextToken()) == JsonToken.FIELD_NAME);
if (t != JsonToken.END_OBJECT) {
throw ctxt.mappingException("Expected END_OBJECT after copying contents of a JsonParser into TokenBuffer, got "+t);
}
writeEndObject();
return this;
} | public TokenBuffer deserialize ( JsonParser jp , DeserializationContext ctxt ) throws IOException { if ( jp . getCurrentTokenId ( ) != JsonToken . FIELD_NAME . id ( ) ) { copyCurrentStructure ( jp ) ; return this ; } JsonToken t ; writeStartObject ( ) ; do { copyCurrentStructure ( jp ) ; } while ( ( t = jp . nextToken ( ) ) == JsonToken . FIELD_NAME ) ; if ( t != JsonToken . END_OBJECT ) { throw ctxt . mappingException ( "Expected END_OBJECT after copying contents of a JsonParser into TokenBuffer, got " + t ) ; } writeEndObject ( ) ; return this ; } |