Remove unnecessary type arguments (Java 8)
git-svn-id: https://svn.apache.org/repos/asf/poi/trunk@1808516 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
d4070f05e7
commit
7937da6a10
@ -433,7 +433,7 @@ public class CopyCompare
|
||||
/** Contains the directory paths that have already been created in the
|
||||
* output POI filesystem and maps them to their corresponding
|
||||
* {@link org.apache.poi.poifs.filesystem.DirectoryNode}s. */
|
||||
private final Map<String,DirectoryEntry> paths = new HashMap<String,DirectoryEntry>();
|
||||
private final Map<String,DirectoryEntry> paths = new HashMap<>();
|
||||
|
||||
|
||||
|
||||
|
@ -323,7 +323,7 @@ public class WriteAuthorAndTitle
|
||||
/** Contains the directory paths that have already been created in the
|
||||
* output POI filesystem and maps them to their corresponding
|
||||
* {@link org.apache.poi.poifs.filesystem.DirectoryNode}s. */
|
||||
private final Map<String, DirectoryEntry> paths = new HashMap<String, DirectoryEntry>();
|
||||
private final Map<String, DirectoryEntry> paths = new HashMap<>();
|
||||
|
||||
|
||||
|
||||
|
@ -76,7 +76,7 @@ public class XLS2CSVmra implements HSSFListener {
|
||||
/** So we known which sheet we're on */
|
||||
private int sheetIndex = -1;
|
||||
private BoundSheetRecord[] orderedBSRs;
|
||||
private List<BoundSheetRecord> boundSheetRecords = new ArrayList<BoundSheetRecord>();
|
||||
private List<BoundSheetRecord> boundSheetRecords = new ArrayList<>();
|
||||
|
||||
// For handling formulas with string results
|
||||
private int nextRow;
|
||||
|
@ -81,7 +81,7 @@ public class InCellLists {
|
||||
// whose items are neither bulleted or numbered - into that cell.
|
||||
row = sheet.createRow(1);
|
||||
cell = row.createCell(0);
|
||||
ArrayList<String> listItems = new ArrayList<String>();
|
||||
ArrayList<String> listItems = new ArrayList<>();
|
||||
listItems.add("List Item One.");
|
||||
listItems.add("List Item Two.");
|
||||
listItems.add("List Item Three.");
|
||||
@ -125,8 +125,8 @@ public class InCellLists {
|
||||
// to preserve order.
|
||||
row = sheet.createRow(4);
|
||||
cell = row.createCell(0);
|
||||
ArrayList<MultiLevelListItem> multiLevelListItems = new ArrayList<MultiLevelListItem>();
|
||||
listItems = new ArrayList<String>();
|
||||
ArrayList<MultiLevelListItem> multiLevelListItems = new ArrayList<>();
|
||||
listItems = new ArrayList<>();
|
||||
listItems.add("ML List Item One - Sub Item One.");
|
||||
listItems.add("ML List Item One - Sub Item Two.");
|
||||
listItems.add("ML List Item One - Sub Item Three.");
|
||||
@ -137,7 +137,7 @@ public class InCellLists {
|
||||
// item
|
||||
multiLevelListItems.add(new MultiLevelListItem("List Item Two.", null));
|
||||
multiLevelListItems.add(new MultiLevelListItem("List Item Three.", null));
|
||||
listItems = new ArrayList<String>();
|
||||
listItems = new ArrayList<>();
|
||||
listItems.add("ML List Item Four - Sub Item One.");
|
||||
listItems.add("ML List Item Four - Sub Item Two.");
|
||||
listItems.add("ML List Item Four - Sub Item Three.");
|
||||
|
@ -89,7 +89,7 @@ public class PendingPaintings {
|
||||
* @param parent
|
||||
*/
|
||||
public PendingPaintings(JComponent parent) {
|
||||
paintings = new ArrayList<Painting>();
|
||||
paintings = new ArrayList<>();
|
||||
parent.putClientProperty(PENDING_PAINTINGS, this);
|
||||
}
|
||||
|
||||
|
@ -46,7 +46,7 @@ public class ExtendableTreeCellRenderer implements TreeCellRenderer
|
||||
|
||||
public ExtendableTreeCellRenderer()
|
||||
{
|
||||
renderers = new HashMap<Class<?>,TreeCellRenderer>();
|
||||
renderers = new HashMap<>();
|
||||
register(Object.class, new DefaultTreeCellRenderer()
|
||||
{
|
||||
@Override
|
||||
|
@ -96,7 +96,7 @@ public class TreeReaderListener implements POIFSReaderListener
|
||||
{
|
||||
this.filename = filename;
|
||||
this.rootNode = rootNode;
|
||||
pathToNode = new HashMap<Object,MutableTreeNode>(15); // Should be a reasonable guess.
|
||||
pathToNode = new HashMap<>(15); // Should be a reasonable guess.
|
||||
}
|
||||
|
||||
|
||||
|
@ -209,7 +209,7 @@ public class BusinessPlan {
|
||||
* create a library of cell styles
|
||||
*/
|
||||
private static Map<String, CellStyle> createStyles(Workbook wb){
|
||||
Map<String, CellStyle> styles = new HashMap<String, CellStyle>();
|
||||
Map<String, CellStyle> styles = new HashMap<>();
|
||||
DataFormat df = wb.createDataFormat();
|
||||
|
||||
CellStyle style;
|
||||
|
@ -148,7 +148,7 @@ public class CalendarDemo {
|
||||
* cell styles used for formatting calendar sheets
|
||||
*/
|
||||
private static Map<String, CellStyle> createStyles(Workbook wb){
|
||||
Map<String, CellStyle> styles = new HashMap<String, CellStyle>();
|
||||
Map<String, CellStyle> styles = new HashMap<>();
|
||||
|
||||
short borderColor = IndexedColors.GREY_50_PERCENT.getIndex();
|
||||
|
||||
|
@ -88,7 +88,7 @@ public class ExcelComparator {
|
||||
Cell cell;
|
||||
}
|
||||
|
||||
List<String> listOfDifferences = new ArrayList<String>();
|
||||
List<String> listOfDifferences = new ArrayList<>();
|
||||
|
||||
public static void main(String args[]) throws Exception {
|
||||
if (args.length != 2 || !(new File(args[0]).exists()) || !(new File(args[1]).exists())) {
|
||||
|
@ -150,7 +150,7 @@ public class LoanCalculator {
|
||||
* cell styles used for formatting calendar sheets
|
||||
*/
|
||||
private static Map<String, CellStyle> createStyles(Workbook wb){
|
||||
Map<String, CellStyle> styles = new HashMap<String, CellStyle>();
|
||||
Map<String, CellStyle> styles = new HashMap<>();
|
||||
|
||||
CellStyle style;
|
||||
Font titleFont = wb.createFont();
|
||||
|
@ -153,7 +153,7 @@ public class SSPerformanceTest {
|
||||
}
|
||||
|
||||
static Map<String, CellStyle> createStyles(Workbook wb) {
|
||||
Map<String, CellStyle> styles = new HashMap<String, CellStyle>();
|
||||
Map<String, CellStyle> styles = new HashMap<>();
|
||||
CellStyle style;
|
||||
|
||||
Font headerFont = wb.createFont();
|
||||
|
@ -163,7 +163,7 @@ public class TimesheetDemo {
|
||||
* Create a library of cell styles
|
||||
*/
|
||||
private static Map<String, CellStyle> createStyles(Workbook wb){
|
||||
Map<String, CellStyle> styles = new HashMap<String, CellStyle>();
|
||||
Map<String, CellStyle> styles = new HashMap<>();
|
||||
CellStyle style;
|
||||
Font titleFont = wb.createFont();
|
||||
titleFont.setFontHeightInPoints((short)18);
|
||||
|
@ -409,7 +409,7 @@ public class ToCSV {
|
||||
Sheet sheet = null;
|
||||
Row row = null;
|
||||
int lastRowNum = 0;
|
||||
this.csvData = new ArrayList<ArrayList<String>>();
|
||||
this.csvData = new ArrayList<>();
|
||||
|
||||
System.out.println("Converting files contents to CSV format.");
|
||||
|
||||
@ -526,7 +526,7 @@ public class ToCSV {
|
||||
private void rowToCSV(Row row) {
|
||||
Cell cell = null;
|
||||
int lastCellNum = 0;
|
||||
ArrayList<String> csvLine = new ArrayList<String>();
|
||||
ArrayList<String> csvLine = new ArrayList<>();
|
||||
|
||||
// Check to ensure that a row was recovered from the sheet as it is
|
||||
// possible that one or more rows between other populated rows could be
|
||||
|
@ -57,13 +57,13 @@ public class CheckFunctionsSupported {
|
||||
CheckFunctionsSupported check = new CheckFunctionsSupported(wb);
|
||||
|
||||
// Fetch all the problems
|
||||
List<FormulaEvaluationProblems> problems = new ArrayList<CheckFunctionsSupported.FormulaEvaluationProblems>();
|
||||
List<FormulaEvaluationProblems> problems = new ArrayList<>();
|
||||
for (int sn=0; sn<wb.getNumberOfSheets(); sn++) {
|
||||
problems.add(check.getEvaluationProblems(sn));
|
||||
}
|
||||
|
||||
// Produce an overall summary
|
||||
Set<String> unsupportedFunctions = new TreeSet<String>();
|
||||
Set<String> unsupportedFunctions = new TreeSet<>();
|
||||
for (FormulaEvaluationProblems p : problems) {
|
||||
unsupportedFunctions.addAll(p.unsupportedFunctions);
|
||||
}
|
||||
@ -121,8 +121,8 @@ public class CheckFunctionsSupported {
|
||||
return getEvaluationProblems(workbook.getSheetAt(sheetIndex));
|
||||
}
|
||||
public FormulaEvaluationProblems getEvaluationProblems(Sheet sheet) {
|
||||
Set<String> unsupportedFunctions = new HashSet<String>();
|
||||
Map<CellReference,Exception> unevaluatableCells = new HashMap<CellReference, Exception>();
|
||||
Set<String> unsupportedFunctions = new HashSet<>();
|
||||
Map<CellReference,Exception> unevaluatableCells = new HashMap<>();
|
||||
|
||||
for (Row r : sheet) {
|
||||
for (Cell c : r) {
|
||||
|
@ -59,7 +59,7 @@ public class SettingExternalFunction {
|
||||
return ErrorEval.NA;
|
||||
}
|
||||
};
|
||||
_functionsByName = new HashMap<String, FreeRefFunction>();
|
||||
_functionsByName = new HashMap<>();
|
||||
_functionsByName.put("BDP", NA);
|
||||
_functionsByName.put("BDH", NA);
|
||||
_functionsByName.put("BDS", NA);
|
||||
|
@ -98,7 +98,7 @@ public class ToHtml {
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
private static <K, V> Map<K, V> mapFor(Object... mapping) {
|
||||
Map<K, V> map = new HashMap<K, V>();
|
||||
Map<K, V> map = new HashMap<>();
|
||||
for (int i = 0; i < mapping.length; i += 2) {
|
||||
map.put((K) mapping[i], (V) mapping[i + 1]);
|
||||
}
|
||||
@ -260,7 +260,7 @@ public class ToHtml {
|
||||
}
|
||||
|
||||
// now add css for each used style
|
||||
Set<CellStyle> seen = new HashSet<CellStyle>();
|
||||
Set<CellStyle> seen = new HashSet<>();
|
||||
for (int i = 0; i < wb.getNumberOfSheets(); i++) {
|
||||
Sheet sheet = wb.getSheetAt(i);
|
||||
Iterator<Row> rows = sheet.rowIterator();
|
||||
|
@ -96,7 +96,7 @@ public class FromHowTo {
|
||||
private String lastContents;
|
||||
private boolean nextIsString;
|
||||
private boolean inlineStr;
|
||||
private final LruCache<Integer,String> lruCache = new LruCache<Integer,String>(50);
|
||||
private final LruCache<Integer,String> lruCache = new LruCache<>(50);
|
||||
|
||||
private static class LruCache<A,B> extends LinkedHashMap<A, B> {
|
||||
private final int maxEntries;
|
||||
|
@ -131,7 +131,7 @@ public class AligningCells {
|
||||
// You can add multiple spans for one row
|
||||
Object span = start_column + ":" + end_column;
|
||||
|
||||
List<Object> spanList = new ArrayList<Object>();
|
||||
List<Object> spanList = new ArrayList<>();
|
||||
spanList.add(span);
|
||||
|
||||
//add spns to the row
|
||||
|
@ -116,7 +116,7 @@ public class BigGridDemo {
|
||||
* Create a library of cell styles.
|
||||
*/
|
||||
private static Map<String, XSSFCellStyle> createStyles(XSSFWorkbook wb){
|
||||
Map<String, XSSFCellStyle> styles = new HashMap<String, XSSFCellStyle>();
|
||||
Map<String, XSSFCellStyle> styles = new HashMap<>();
|
||||
XSSFDataFormat fmt = wb.createDataFormat();
|
||||
|
||||
XSSFCellStyle style1 = wb.createCellStyle();
|
||||
|
@ -135,7 +135,7 @@ public class CalendarDemo {
|
||||
* cell styles used for formatting calendar sheets
|
||||
*/
|
||||
private static Map<String, XSSFCellStyle> createStyles(XSSFWorkbook wb){
|
||||
Map<String, XSSFCellStyle> styles = new HashMap<String, XSSFCellStyle>();
|
||||
Map<String, XSSFCellStyle> styles = new HashMap<>();
|
||||
|
||||
XSSFCellStyle style;
|
||||
XSSFFont titleFont = wb.createFont();
|
||||
|
@ -47,8 +47,8 @@ public class ExcelAntTask extends Task {
|
||||
private LinkedList<ExcelAntUserDefinedFunction> functions ;
|
||||
|
||||
public ExcelAntTask() {
|
||||
tests = new LinkedList<ExcelAntTest>() ;
|
||||
functions = new LinkedList<ExcelAntUserDefinedFunction>() ;
|
||||
tests = new LinkedList<>() ;
|
||||
functions = new LinkedList<>() ;
|
||||
}
|
||||
|
||||
public void addPrecision( ExcelAntPrecision prec ) {
|
||||
|
@ -56,9 +56,9 @@ public class ExcelAntTest extends Task{
|
||||
|
||||
|
||||
public ExcelAntTest() {
|
||||
evaluators = new LinkedList<ExcelAntEvaluateCell>();
|
||||
failureMessages = new LinkedList<String>();
|
||||
testTasks = new LinkedList<Task>();
|
||||
evaluators = new LinkedList<>();
|
||||
failureMessages = new LinkedList<>();
|
||||
testTasks = new LinkedList<>();
|
||||
}
|
||||
|
||||
public void setPrecision( double precision ) {
|
||||
|
@ -59,7 +59,7 @@ public class ExcelAntWorkbookUtil extends Typedef {
|
||||
|
||||
private Workbook workbook;
|
||||
|
||||
private final Map<String, FreeRefFunction> xlsMacroList = new HashMap<String, FreeRefFunction>();
|
||||
private final Map<String, FreeRefFunction> xlsMacroList = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Constructs an instance using a String that contains the fully qualified
|
||||
@ -215,7 +215,7 @@ public class ExcelAntWorkbookUtil extends Typedef {
|
||||
* @return
|
||||
*/
|
||||
public List<String> getSheets() {
|
||||
ArrayList<String> sheets = new ArrayList<String>();
|
||||
ArrayList<String> sheets = new ArrayList<>();
|
||||
|
||||
int sheetCount = workbook.getNumberOfSheets();
|
||||
|
||||
|
@ -46,7 +46,7 @@ public final class ExcelAntWorkbookUtilFactory {
|
||||
*/
|
||||
public static ExcelAntWorkbookUtil getInstance(String fileName) {
|
||||
if(workbookUtilMap == null) {
|
||||
workbookUtilMap = new HashMap<String, ExcelAntWorkbookUtil>();
|
||||
workbookUtilMap = new HashMap<>();
|
||||
}
|
||||
|
||||
if(workbookUtilMap.containsKey(fileName)) {
|
||||
|
@ -97,7 +97,7 @@ public class TestAllFiles {
|
||||
|
||||
|
||||
// map file extensions to the actual mappers
|
||||
static final Map<String, FileHandler> HANDLERS = new HashMap<String, FileHandler>();
|
||||
static final Map<String, FileHandler> HANDLERS = new HashMap<>();
|
||||
static {
|
||||
// Excel
|
||||
HANDLERS.put(".xls", new HSSFFileHandler());
|
||||
@ -210,7 +210,7 @@ public class TestAllFiles {
|
||||
HANDLERS.put("spreadsheet/BigSSTRecordCR", new NullFileHandler());
|
||||
HANDLERS.put("spreadsheet/test_properties1", new NullFileHandler());
|
||||
|
||||
Map<String,String> passmap = new HashMap<String,String>();
|
||||
Map<String,String> passmap = new HashMap<>();
|
||||
passmap.put("slideshow/Password_Protected-hello.ppt", "hello");
|
||||
passmap.put("slideshow/Password_Protected-56-hello.ppt", "hello");
|
||||
passmap.put("slideshow/Password_Protected-np-hello.ppt", "hello");
|
||||
@ -237,7 +237,7 @@ public class TestAllFiles {
|
||||
return Collections.unmodifiableSet(hashSet(a));
|
||||
}
|
||||
private static Set<String> hashSet(String... a) {
|
||||
return new HashSet<String>(Arrays.asList(a));
|
||||
return new HashSet<>(Arrays.asList(a));
|
||||
}
|
||||
|
||||
// Old Word Documents where we can at least extract some text
|
||||
@ -345,7 +345,7 @@ public class TestAllFiles {
|
||||
|
||||
System.out.println("Handling " + scanner.getIncludedFiles().length + " files");
|
||||
|
||||
List<Object[]> files = new ArrayList<Object[]>();
|
||||
List<Object[]> files = new ArrayList<>();
|
||||
for(String file : scanner.getIncludedFiles()) {
|
||||
file = file.replace('\\', '/'); // ... failures/handlers lookup doesn't work on windows otherwise
|
||||
if (IGNORED.contains(file)) {
|
||||
|
@ -38,7 +38,7 @@ import org.apache.poi.util.IOUtils;
|
||||
import org.apache.xmlbeans.XmlException;
|
||||
|
||||
public abstract class AbstractFileHandler implements FileHandler {
|
||||
public static final Set<String> EXPECTED_EXTRACTOR_FAILURES = new HashSet<String>();
|
||||
public static final Set<String> EXPECTED_EXTRACTOR_FAILURES = new HashSet<>();
|
||||
static {
|
||||
// password protected files without password
|
||||
// ... currently none ...
|
||||
|
@ -63,7 +63,7 @@ public class HPSFFileHandler extends POIFSFileHandler {
|
||||
|
||||
|
||||
private static final Set<String> unmodifiableHashSet(String... a) {
|
||||
return Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(a)));
|
||||
return Collections.unmodifiableSet(new HashSet<>(Arrays.asList(a)));
|
||||
}
|
||||
|
||||
|
||||
|
@ -54,7 +54,7 @@ public class HSSFFileHandler extends SpreadsheetHandler {
|
||||
// TODO: still fails on some records... RecordsStresser.handleWorkbook(wb);
|
||||
}
|
||||
|
||||
private static final Set<String> EXPECTED_ADDITIONAL_FAILURES = new HashSet<String>();
|
||||
private static final Set<String> EXPECTED_ADDITIONAL_FAILURES = new HashSet<>();
|
||||
static {
|
||||
// encrypted
|
||||
EXPECTED_ADDITIONAL_FAILURES.add("spreadsheet/35897-type4.xls");
|
||||
|
@ -157,7 +157,7 @@ public class XSSFFileHandler extends SpreadsheetHandler {
|
||||
}
|
||||
}
|
||||
|
||||
private static final Set<String> EXPECTED_ADDITIONAL_FAILURES = new HashSet<String>();
|
||||
private static final Set<String> EXPECTED_ADDITIONAL_FAILURES = new HashSet<>();
|
||||
static {
|
||||
// expected sheet-id not found
|
||||
// EXPECTED_ADDITIONAL_FAILURES.add("spreadsheet/52348.xlsx");
|
||||
|
@ -70,7 +70,7 @@ public enum HyperlinkType {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
private static final Map<Integer, HyperlinkType> map = new HashMap<Integer, HyperlinkType>();
|
||||
private static final Map<Integer, HyperlinkType> map = new HashMap<>();
|
||||
static {
|
||||
for (HyperlinkType type : values()) {
|
||||
map.put(type.getCode(), type);
|
||||
|
@ -66,7 +66,7 @@ public enum FontGroup {
|
||||
private static NavigableMap<Integer,Range> UCS_RANGES;
|
||||
|
||||
static {
|
||||
UCS_RANGES = new TreeMap<Integer,Range>();
|
||||
UCS_RANGES = new TreeMap<>();
|
||||
UCS_RANGES.put(0x0000, new Range(0x007F, LATIN));
|
||||
UCS_RANGES.put(0x0080, new Range(0x00A6, LATIN));
|
||||
UCS_RANGES.put(0x00A9, new Range(0x00AF, LATIN));
|
||||
@ -111,7 +111,7 @@ public enum FontGroup {
|
||||
* @return the FontGroup
|
||||
*/
|
||||
public static List<FontGroupRange> getFontGroupRanges(String runText) {
|
||||
List<FontGroupRange> ttrList = new ArrayList<FontGroupRange>();
|
||||
List<FontGroupRange> ttrList = new ArrayList<>();
|
||||
FontGroupRange ttrLast = null;
|
||||
final int rlen = (runText != null) ? runText.length() : 0;
|
||||
for(int cp, i = 0, charCount; i < rlen; i += charCount) {
|
||||
|
@ -30,7 +30,7 @@ import org.apache.poi.util.LittleEndian;
|
||||
*/
|
||||
public abstract class AbstractEscherOptRecord extends EscherRecord
|
||||
{
|
||||
private List<EscherProperty> properties = new ArrayList<EscherProperty>();
|
||||
private List<EscherProperty> properties = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Add a property to this record.
|
||||
@ -176,7 +176,7 @@ public abstract class AbstractEscherOptRecord extends EscherRecord
|
||||
|
||||
@Override
|
||||
protected Object[][] getAttributeMap() {
|
||||
List<Object> attrList = new ArrayList<Object>(properties.size()*2+2);
|
||||
List<Object> attrList = new ArrayList<>(properties.size() * 2 + 2);
|
||||
attrList.add("properties");
|
||||
attrList.add(properties.size());
|
||||
for ( EscherProperty property : properties ) {
|
||||
|
@ -108,7 +108,7 @@ public class DefaultEscherRecordFactory implements EscherRecordFactory {
|
||||
* @return The map containing the id/constructor pairs.
|
||||
*/
|
||||
protected static Map<Short, Constructor<? extends EscherRecord>> recordsToMap(Class<?>[] recClasses) {
|
||||
Map<Short, Constructor<? extends EscherRecord>> result = new HashMap<Short, Constructor<? extends EscherRecord>>();
|
||||
Map<Short, Constructor<? extends EscherRecord>> result = new HashMap<>();
|
||||
final Class<?>[] EMPTY_CLASS_ARRAY = new Class[0];
|
||||
|
||||
for (Class<?> recClass : recClasses) {
|
||||
|
@ -68,7 +68,7 @@ public final class EscherContainerRecord extends EscherRecord implements Iterabl
|
||||
*/
|
||||
private int _remainingLength;
|
||||
|
||||
private final List<EscherRecord> _childRecords = new ArrayList<EscherRecord>();
|
||||
private final List<EscherRecord> _childRecords = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public int fillFields(byte[] data, int pOffset, EscherRecordFactory recordFactory) {
|
||||
@ -148,7 +148,7 @@ public final class EscherContainerRecord extends EscherRecord implements Iterabl
|
||||
*/
|
||||
@Override
|
||||
public List<EscherRecord> getChildRecords() {
|
||||
return new ArrayList<EscherRecord>(_childRecords);
|
||||
return new ArrayList<>(_childRecords);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -190,7 +190,7 @@ public final class EscherContainerRecord extends EscherRecord implements Iterabl
|
||||
* @return EscherContainer children
|
||||
*/
|
||||
public List<EscherContainerRecord> getChildContainers() {
|
||||
List<EscherContainerRecord> containers = new ArrayList<EscherContainerRecord>();
|
||||
List<EscherContainerRecord> containers = new ArrayList<>();
|
||||
for (EscherRecord r : this) {
|
||||
if(r instanceof EscherContainerRecord) {
|
||||
containers.add((EscherContainerRecord) r);
|
||||
@ -284,7 +284,7 @@ public final class EscherContainerRecord extends EscherRecord implements Iterabl
|
||||
|
||||
@Override
|
||||
protected Object[][] getAttributeMap() {
|
||||
List<Object> chList = new ArrayList<Object>(_childRecords.size()*2+2);
|
||||
List<Object> chList = new ArrayList<>(_childRecords.size() * 2 + 2);
|
||||
chList.add("children");
|
||||
chList.add(_childRecords.size());
|
||||
int count = 0;
|
||||
|
@ -39,7 +39,7 @@ public final class EscherDggRecord extends EscherRecord {
|
||||
// private int field_2_numIdClusters;
|
||||
private int field_3_numShapesSaved;
|
||||
private int field_4_drawingsSaved;
|
||||
private final List<FileIdCluster> field_5_fileIdClusters = new ArrayList<FileIdCluster>();
|
||||
private final List<FileIdCluster> field_5_fileIdClusters = new ArrayList<>();
|
||||
private int maxDgId;
|
||||
|
||||
public static class FileIdCluster {
|
||||
@ -329,7 +329,7 @@ public final class EscherDggRecord extends EscherRecord {
|
||||
|
||||
@Override
|
||||
protected Object[][] getAttributeMap() {
|
||||
List<Object> fldIds = new ArrayList<Object>();
|
||||
List<Object> fldIds = new ArrayList<>();
|
||||
fldIds.add("FileId Clusters");
|
||||
fldIds.add(field_5_fileIdClusters.size());
|
||||
for (FileIdCluster fic : field_5_fileIdClusters) {
|
||||
|
@ -334,7 +334,7 @@ public final class EscherProperties {
|
||||
private static final Map<Short, EscherPropertyMetaData> properties = initProps();
|
||||
|
||||
private static Map<Short, EscherPropertyMetaData> initProps() {
|
||||
Map<Short, EscherPropertyMetaData> m = new HashMap<Short, EscherPropertyMetaData>();
|
||||
Map<Short, EscherPropertyMetaData> m = new HashMap<>();
|
||||
addProp(m, TRANSFORM__ROTATION, "transform.rotation");
|
||||
addProp(m, PROTECTION__LOCKROTATION, "protection.lockrotation");
|
||||
addProp(m, PROTECTION__LOCKASPECTRATIO, "protection.lockaspectratio");
|
||||
|
@ -35,7 +35,7 @@ public final class EscherPropertyFactory {
|
||||
* @return The new properties
|
||||
*/
|
||||
public List<EscherProperty> createProperties(byte[] data, int offset, short numProperties) {
|
||||
List<EscherProperty> results = new ArrayList<EscherProperty>();
|
||||
List<EscherProperty> results = new ArrayList<>();
|
||||
|
||||
int pos = offset;
|
||||
|
||||
|
@ -135,7 +135,7 @@ public final class EscherTextboxRecord extends EscherRecord implements Cloneable
|
||||
@Override
|
||||
protected Object[][] getAttributeMap() {
|
||||
int numCh = getChildRecords().size();
|
||||
List<Object> chLst = new ArrayList<Object>(numCh*2+2);
|
||||
List<Object> chLst = new ArrayList<>(numCh * 2 + 2);
|
||||
chLst.add("children");
|
||||
chLst.add(numCh);
|
||||
for (EscherRecord er : getChildRecords()) {
|
||||
|
@ -35,7 +35,7 @@ public final class UnknownEscherRecord extends EscherRecord implements Cloneable
|
||||
private List<EscherRecord> _childRecords;
|
||||
|
||||
public UnknownEscherRecord() {
|
||||
_childRecords = new ArrayList<EscherRecord>();
|
||||
_childRecords = new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -139,7 +139,7 @@ public final class UnknownEscherRecord extends EscherRecord implements Cloneable
|
||||
@Override
|
||||
protected Object[][] getAttributeMap() {
|
||||
int numCh = getChildRecords().size();
|
||||
List<Object> chLst = new ArrayList<Object>(numCh*2+2);
|
||||
List<Object> chLst = new ArrayList<>(numCh * 2 + 2);
|
||||
chLst.add("children");
|
||||
chLst.add(numCh);
|
||||
for (EscherRecord er : _childRecords) {
|
||||
|
@ -214,9 +214,9 @@ public class OLE2ExtractorFactory {
|
||||
throws IOException
|
||||
{
|
||||
// All the embedded directories we spotted
|
||||
List<Entry> dirs = new ArrayList<Entry>();
|
||||
List<Entry> dirs = new ArrayList<>();
|
||||
// For anything else not directly held in as a POIFS directory
|
||||
List<InputStream> nonPOIFS = new ArrayList<InputStream>();
|
||||
List<InputStream> nonPOIFS = new ArrayList<>();
|
||||
|
||||
// Find all the embedded directories
|
||||
DirectoryEntry root = ext.getRoot();
|
||||
@ -250,7 +250,7 @@ public class OLE2ExtractorFactory {
|
||||
return new POITextExtractor[0];
|
||||
}
|
||||
|
||||
ArrayList<POITextExtractor> e = new ArrayList<POITextExtractor>();
|
||||
ArrayList<POITextExtractor> e = new ArrayList<>();
|
||||
for (Entry dir : dirs) {
|
||||
e.add(createExtractor(
|
||||
(DirectoryNode) dir
|
||||
|
@ -70,12 +70,12 @@ public class CustomProperties implements Map<String,Object> {
|
||||
/**
|
||||
* The custom properties
|
||||
*/
|
||||
private final HashMap<Long,CustomProperty> props = new HashMap<Long,CustomProperty>();
|
||||
private final HashMap<Long,CustomProperty> props = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Maps property IDs to property names and vice versa.
|
||||
*/
|
||||
private final TreeBidiMap<Long,String> dictionary = new TreeBidiMap<Long,String>();
|
||||
private final TreeBidiMap<Long,String> dictionary = new TreeBidiMap<>();
|
||||
|
||||
/**
|
||||
* Tells whether this object is pure or not.
|
||||
@ -219,7 +219,7 @@ public class CustomProperties implements Map<String,Object> {
|
||||
* @return the list of properties
|
||||
*/
|
||||
public List<CustomProperty> properties() {
|
||||
List<CustomProperty> list = new ArrayList<CustomProperty>(props.size());
|
||||
List<CustomProperty> list = new ArrayList<>(props.size());
|
||||
for (Long l : dictionary.keySet()) {
|
||||
list.add(props.get(l));
|
||||
}
|
||||
@ -231,7 +231,7 @@ public class CustomProperties implements Map<String,Object> {
|
||||
*/
|
||||
@Override
|
||||
public Collection<Object> values() {
|
||||
List<Object> list = new ArrayList<Object>(props.size());
|
||||
List<Object> list = new ArrayList<>(props.size());
|
||||
for (Long l : dictionary.keySet()) {
|
||||
list.add(props.get(l).getValue());
|
||||
}
|
||||
@ -240,7 +240,7 @@ public class CustomProperties implements Map<String,Object> {
|
||||
|
||||
@Override
|
||||
public Set<Entry<String, Object>> entrySet() {
|
||||
Map<String,Object> set = new LinkedHashMap<String,Object>(props.size());
|
||||
Map<String,Object> set = new LinkedHashMap<>(props.size());
|
||||
for (Entry<Long,String> se : dictionary.entrySet()) {
|
||||
set.put(se.getValue(), props.get(se.getKey()).getValue());
|
||||
}
|
||||
|
@ -825,7 +825,7 @@ public class DocumentSummaryInformation extends PropertySet {
|
||||
throw new HPSFRuntimeException("Illegal internal format of Document SummaryInformation stream: second section is missing.");
|
||||
}
|
||||
|
||||
List<Section> l = new LinkedList<Section>(getSections());
|
||||
List<Section> l = new LinkedList<>(getSections());
|
||||
clearSections();
|
||||
int idx = 0;
|
||||
for (Section s : l) {
|
||||
|
@ -83,7 +83,7 @@ public class HPSFPropertiesOnlyDocument extends POIDocument {
|
||||
|
||||
private void write(NPOIFSFileSystem fs) throws IOException {
|
||||
// For tracking what we've written out, so far
|
||||
List<String> excepts = new ArrayList<String>(2);
|
||||
List<String> excepts = new ArrayList<>(2);
|
||||
|
||||
// Write out our HPFS properties, with any changes
|
||||
writeProperties(fs, excepts);
|
||||
|
@ -134,7 +134,7 @@ public class PropertySet {
|
||||
/**
|
||||
* The sections in this {@link PropertySet}.
|
||||
*/
|
||||
private final List<Section> sections = new ArrayList<Section>();
|
||||
private final List<Section> sections = new ArrayList<>();
|
||||
|
||||
|
||||
/**
|
||||
|
@ -71,7 +71,7 @@ public class Section {
|
||||
/**
|
||||
* This section's properties.
|
||||
*/
|
||||
private final Map<Long,Property> properties = new LinkedHashMap<Long,Property>();
|
||||
private final Map<Long,Property> properties = new LinkedHashMap<>();
|
||||
|
||||
/**
|
||||
* This member is {@code true} if the last call to {@link
|
||||
@ -175,7 +175,7 @@ public class Section {
|
||||
* seconds pass reads the other properties.
|
||||
*/
|
||||
/* Pass 1: Read the property list. */
|
||||
final TreeBidiMap<Long,Long> offset2Id = new TreeBidiMap<Long,Long>();
|
||||
final TreeBidiMap<Long,Long> offset2Id = new TreeBidiMap<>();
|
||||
for (int i = 0; i < propertyCount; i++) {
|
||||
/* Read the property ID. */
|
||||
long id = (int)leis.readUInt();
|
||||
@ -662,7 +662,7 @@ public class Section {
|
||||
|
||||
/* Compare all properties except the dictionary (id 0) and
|
||||
* the codepage (id 1 / ignored) as they must be handled specially. */
|
||||
Set<Long> propIds = new HashSet<Long>(properties.keySet());
|
||||
Set<Long> propIds = new HashSet<>(properties.keySet());
|
||||
propIds.addAll(s.properties.keySet());
|
||||
propIds.remove(0L);
|
||||
propIds.remove(1L);
|
||||
@ -800,7 +800,7 @@ public class Section {
|
||||
*/
|
||||
private boolean readDictionary(LittleEndianByteArrayInputStream leis, final int length, final int codepage)
|
||||
throws UnsupportedEncodingException {
|
||||
Map<Long,String> dic = new HashMap<Long,String>();
|
||||
Map<Long,String> dic = new HashMap<>();
|
||||
|
||||
/*
|
||||
* Read the number of dictionary entries.
|
||||
@ -919,7 +919,7 @@ public class Section {
|
||||
public void setDictionary(final Map<Long,String> dictionary) throws IllegalPropertySetDataException {
|
||||
if (dictionary != null) {
|
||||
if (this.dictionary == null) {
|
||||
this.dictionary = new TreeMap<Long,String>();
|
||||
this.dictionary = new TreeMap<>();
|
||||
}
|
||||
this.dictionary.putAll(dictionary);
|
||||
|
||||
|
@ -387,8 +387,8 @@ public class Variant
|
||||
|
||||
/* Initialize the number-to-name and number-to-length map: */
|
||||
static {
|
||||
Map<Long,String> number2Name = new HashMap<Long,String>(NUMBER_TO_NAME_LIST.length, 1.0F);
|
||||
Map<Long,Integer> number2Len = new HashMap<Long,Integer>(NUMBER_TO_NAME_LIST.length, 1.0F);
|
||||
Map<Long,String> number2Name = new HashMap<>(NUMBER_TO_NAME_LIST.length, 1.0F);
|
||||
Map<Long,Integer> number2Len = new HashMap<>(NUMBER_TO_NAME_LIST.length, 1.0F);
|
||||
|
||||
for (Object[] nn : NUMBER_TO_NAME_LIST) {
|
||||
number2Name.put((Long)nn[0], (String)nn[1]);
|
||||
|
@ -105,7 +105,7 @@ public class VariantSupport extends Variant {
|
||||
if (isLogUnsupportedTypes())
|
||||
{
|
||||
if (unsupportedMessage == null) {
|
||||
unsupportedMessage = new LinkedList<Long>();
|
||||
unsupportedMessage = new LinkedList<>();
|
||||
}
|
||||
Long vt = Long.valueOf(ex.getVariantType());
|
||||
if (!unsupportedMessage.contains(vt))
|
||||
|
@ -47,7 +47,7 @@ class Vector {
|
||||
//of allocating array of length "length".
|
||||
//If the length is corrupted and crazily big but < Integer.MAX_VALUE,
|
||||
//this will trigger a RuntimeException "Buffer overrun" in lei.checkPosition
|
||||
List<TypedPropertyValue> values = new ArrayList<TypedPropertyValue>();
|
||||
List<TypedPropertyValue> values = new ArrayList<>();
|
||||
int paddedType = (_type == Variant.VT_VARIANT) ? 0 : _type;
|
||||
for ( int i = 0; i < length; i++ ) {
|
||||
TypedPropertyValue value = new TypedPropertyValue(paddedType, null);
|
||||
|
@ -420,7 +420,7 @@ public class PropertyIDMap implements Map<Long,String> {
|
||||
* @param map The instance to be created is backed by this map.
|
||||
*/
|
||||
private PropertyIDMap(Object[][] idValues) {
|
||||
Map<Long,String> m = new HashMap<Long,String>(idValues.length);
|
||||
Map<Long,String> m = new HashMap<>(idValues.length);
|
||||
for (Object[] idValue : idValues) {
|
||||
m.put((Long)idValue[0], (String)idValue[1]);
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ public class SectionIDMap {
|
||||
* The default section ID map. It maps section format IDs to {@link PropertyIDMap PropertyIDMaps}
|
||||
*/
|
||||
private static ThreadLocal<Map<ClassID,PropertyIDMap>> defaultMap =
|
||||
new ThreadLocal<Map<ClassID,PropertyIDMap>>();
|
||||
new ThreadLocal<>();
|
||||
|
||||
/**
|
||||
* <p>The SummaryInformation's section's format ID.</p>
|
||||
@ -79,7 +79,7 @@ public class SectionIDMap {
|
||||
public static SectionIDMap getInstance() {
|
||||
Map<ClassID,PropertyIDMap> m = defaultMap.get();
|
||||
if (m == null) {
|
||||
m = new HashMap<ClassID,PropertyIDMap>();
|
||||
m = new HashMap<>();
|
||||
m.put(SUMMARY_INFORMATION_ID, PropertyIDMap.getSummaryInformationProperties());
|
||||
m.put(DOCUMENT_SUMMARY_INFORMATION_ID[0], PropertyIDMap.getDocumentSummaryInformationProperties());
|
||||
defaultMap.set(m);
|
||||
|
@ -57,7 +57,7 @@ public class BiffDrawingToXml {
|
||||
}
|
||||
|
||||
private static List<Integer> getIndexesByName(String[] params, HSSFWorkbook workbook) {
|
||||
List<Integer> list = new ArrayList<Integer>();
|
||||
List<Integer> list = new ArrayList<>();
|
||||
int pos = getAttributeIndex(SHEET_NAME_PARAM, params);
|
||||
if (-1 != pos) {
|
||||
if (pos >= params.length) {
|
||||
@ -74,7 +74,7 @@ public class BiffDrawingToXml {
|
||||
}
|
||||
|
||||
private static List<Integer> getIndexesByIdArray(String[] params) {
|
||||
List<Integer> list = new ArrayList<Integer>();
|
||||
List<Integer> list = new ArrayList<>();
|
||||
int pos = getAttributeIndex(SHEET_INDEXES_PARAM, params);
|
||||
if (-1 != pos) {
|
||||
if (pos >= params.length) {
|
||||
@ -90,7 +90,7 @@ public class BiffDrawingToXml {
|
||||
}
|
||||
|
||||
private static List<Integer> getSheetsIndexes(String[] params, HSSFWorkbook workbook) {
|
||||
List<Integer> list = new ArrayList<Integer>();
|
||||
List<Integer> list = new ArrayList<>();
|
||||
list.addAll(getIndexesByIdArray(params));
|
||||
list.addAll(getIndexesByName(params, workbook));
|
||||
if (0 == list.size()) {
|
||||
|
@ -76,7 +76,7 @@ public final class BiffViewer {
|
||||
*/
|
||||
public static Record[] createRecords(InputStream is, PrintWriter ps, BiffRecordListener recListener, boolean dumpInterpretedRecords)
|
||||
throws org.apache.poi.util.RecordFormatException {
|
||||
List<Record> temp = new ArrayList<Record>();
|
||||
List<Record> temp = new ArrayList<>();
|
||||
|
||||
RecordInputStream recStream = new RecordInputStream(is);
|
||||
while (true) {
|
||||
@ -456,7 +456,7 @@ public final class BiffViewer {
|
||||
_hexDumpWriter = hexDumpWriter;
|
||||
_zeroAlignEachRecord = zeroAlignEachRecord;
|
||||
_noHeader = noHeader;
|
||||
_headers = new ArrayList<String>();
|
||||
_headers = new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -480,7 +480,7 @@ public final class BiffViewer {
|
||||
}
|
||||
public List<String> getRecentHeaders() {
|
||||
List<String> result = _headers;
|
||||
_headers = new ArrayList<String>();
|
||||
_headers = new ArrayList<>();
|
||||
return result;
|
||||
}
|
||||
private static String formatRecordDetails(int globalOffset, int sid, int size, int recordCounter) {
|
||||
|
@ -65,7 +65,7 @@ public class EventWorkbookBuilder {
|
||||
*/
|
||||
public static InternalWorkbook createStubWorkbook(ExternSheetRecord[] externs,
|
||||
BoundSheetRecord[] bounds, SSTRecord sst) {
|
||||
List<Record> wbRecords = new ArrayList<Record>();
|
||||
List<Record> wbRecords = new ArrayList<>();
|
||||
|
||||
// Core Workbook records go first
|
||||
if(bounds != null) {
|
||||
@ -114,8 +114,8 @@ public class EventWorkbookBuilder {
|
||||
*/
|
||||
public static class SheetRecordCollectingListener implements HSSFListener {
|
||||
private final HSSFListener childListener;
|
||||
private final List<BoundSheetRecord> boundSheetRecords = new ArrayList<BoundSheetRecord>();
|
||||
private final List<ExternSheetRecord> externSheetRecords = new ArrayList<ExternSheetRecord>();
|
||||
private final List<BoundSheetRecord> boundSheetRecords = new ArrayList<>();
|
||||
private final List<ExternSheetRecord> externSheetRecords = new ArrayList<>();
|
||||
private SSTRecord sstRecord;
|
||||
|
||||
public SheetRecordCollectingListener(HSSFListener childListener) {
|
||||
|
@ -45,8 +45,8 @@ public class FormatTrackingHSSFListener implements HSSFListener {
|
||||
private final HSSFListener _childListener;
|
||||
private final HSSFDataFormatter _formatter;
|
||||
private final NumberFormat _defaultFormat;
|
||||
private final Map<Integer, FormatRecord> _customFormatRecords = new HashMap<Integer, FormatRecord>();
|
||||
private final List<ExtendedFormatRecord> _xfRecords = new ArrayList<ExtendedFormatRecord>();
|
||||
private final Map<Integer, FormatRecord> _customFormatRecords = new HashMap<>();
|
||||
private final List<ExtendedFormatRecord> _xfRecords = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Creates a format tracking wrapper around the given listener, using
|
||||
|
@ -38,7 +38,7 @@ public class HSSFRequest {
|
||||
|
||||
/** Creates a new instance of HSSFRequest */
|
||||
public HSSFRequest() {
|
||||
_records = new HashMap<Short, List<HSSFListener>>(50); // most folks won't listen for too many of these
|
||||
_records = new HashMap<>(50); // most folks won't listen for too many of these
|
||||
}
|
||||
|
||||
/**
|
||||
@ -58,7 +58,7 @@ public class HSSFRequest {
|
||||
List<HSSFListener> list = _records.get(Short.valueOf(sid));
|
||||
|
||||
if (list == null) {
|
||||
list = new ArrayList<HSSFListener>(1); // probably most people will use one listener
|
||||
list = new ArrayList<>(1); // probably most people will use one listener
|
||||
_records.put(Short.valueOf(sid), list);
|
||||
}
|
||||
list.add(lsnr);
|
||||
|
@ -171,7 +171,7 @@ public class EventBasedExcelExtractor extends POIOLE2TextExtractor implements or
|
||||
private int nextRow = -1;
|
||||
|
||||
public TextListener() {
|
||||
sheetNames = new ArrayList<String>();
|
||||
sheetNames = new ArrayList<>();
|
||||
}
|
||||
public void processRecord(Record record) {
|
||||
String thisText = null;
|
||||
|
@ -30,7 +30,7 @@ import org.apache.poi.util.Removal;
|
||||
*/
|
||||
public class DrawingManager2 {
|
||||
private final EscherDggRecord dgg;
|
||||
private final List<EscherDgRecord> drawingGroups = new ArrayList<EscherDgRecord>();
|
||||
private final List<EscherDgRecord> drawingGroups = new ArrayList<>();
|
||||
|
||||
|
||||
public DrawingManager2( EscherDggRecord dgg ) {
|
||||
|
@ -125,7 +125,7 @@ public final class InternalSheet {
|
||||
_mergedCellsTable = new MergedCellsTable();
|
||||
RowRecordsAggregate rra = null;
|
||||
|
||||
List<RecordBase> records = new ArrayList<RecordBase>(128);
|
||||
List<RecordBase> records = new ArrayList<>(128);
|
||||
_records = records; // needed here due to calls to findFirstRecordLocBySid before we're done
|
||||
int dimsloc = -1;
|
||||
|
||||
@ -378,7 +378,7 @@ public final class InternalSheet {
|
||||
* @return the cloned sheet
|
||||
*/
|
||||
public InternalSheet cloneSheet() {
|
||||
List<Record> clonedRecords = new ArrayList<Record>(_records.size());
|
||||
List<Record> clonedRecords = new ArrayList<>(_records.size());
|
||||
for (int i = 0; i < _records.size(); i++) {
|
||||
RecordBase rb = _records.get(i);
|
||||
if (rb instanceof RecordAggregate) {
|
||||
@ -413,7 +413,7 @@ public final class InternalSheet {
|
||||
}
|
||||
private InternalSheet() {
|
||||
_mergedCellsTable = new MergedCellsTable();
|
||||
List<RecordBase> records = new ArrayList<RecordBase>(32);
|
||||
List<RecordBase> records = new ArrayList<>(32);
|
||||
|
||||
if (log.check( POILogger.DEBUG ))
|
||||
log.log(POILogger.DEBUG, "Sheet createsheet from scratch called");
|
||||
@ -1662,7 +1662,7 @@ public final class InternalSheet {
|
||||
* @return never <code>null</code>, typically empty array
|
||||
*/
|
||||
public NoteRecord[] getNoteRecords() {
|
||||
List<NoteRecord> temp = new ArrayList<NoteRecord>();
|
||||
List<NoteRecord> temp = new ArrayList<>();
|
||||
for(int i=_records.size()-1; i>=0; i--) {
|
||||
RecordBase rec = _records.get(i);
|
||||
if (rec instanceof NoteRecord) {
|
||||
|
@ -197,15 +197,15 @@ public final class InternalWorkbook {
|
||||
private InternalWorkbook() {
|
||||
records = new WorkbookRecordList();
|
||||
|
||||
boundsheets = new ArrayList<BoundSheetRecord>();
|
||||
formats = new ArrayList<FormatRecord>();
|
||||
hyperlinks = new ArrayList<HyperlinkRecord>();
|
||||
boundsheets = new ArrayList<>();
|
||||
formats = new ArrayList<>();
|
||||
hyperlinks = new ArrayList<>();
|
||||
numxfs = 0;
|
||||
numfonts = 0;
|
||||
maxformatid = -1;
|
||||
uses1904datewindowing = false;
|
||||
escherBSERecords = new ArrayList<EscherBSERecord>();
|
||||
commentRecords = new LinkedHashMap<String, NameCommentRecord>();
|
||||
escherBSERecords = new ArrayList<>();
|
||||
commentRecords = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -223,7 +223,7 @@ public final class InternalWorkbook {
|
||||
public static InternalWorkbook createWorkbook(List<Record> recs) {
|
||||
LOG.log(DEBUG, "Workbook (readfile) created with reclen=", recs.size());
|
||||
InternalWorkbook retval = new InternalWorkbook();
|
||||
List<Record> records = new ArrayList<Record>(recs.size() / 3);
|
||||
List<Record> records = new ArrayList<>(recs.size() / 3);
|
||||
retval.records.setRecords(records);
|
||||
|
||||
boolean eofPassed = false;
|
||||
@ -369,7 +369,7 @@ public final class InternalWorkbook {
|
||||
LOG.log( DEBUG, "creating new workbook from scratch" );
|
||||
|
||||
InternalWorkbook retval = new InternalWorkbook();
|
||||
List<Record> records = new ArrayList<Record>( 30 );
|
||||
List<Record> records = new ArrayList<>(30);
|
||||
retval.records.setRecords(records);
|
||||
List<FormatRecord> formats = retval.formats;
|
||||
|
||||
|
@ -96,7 +96,7 @@ final class LinkTable {
|
||||
|
||||
public ExternalBookBlock(RecordStream rs) {
|
||||
_externalBookRecord = (SupBookRecord) rs.getNext();
|
||||
List<Object> temp = new ArrayList<Object>();
|
||||
List<Object> temp = new ArrayList<>();
|
||||
while(rs.peekNextClass() == ExternalNameRecord.class) {
|
||||
temp.add(rs.getNext());
|
||||
}
|
||||
@ -191,7 +191,7 @@ final class LinkTable {
|
||||
_workbookRecordList = workbookRecordList;
|
||||
RecordStream rs = new RecordStream(inputList, startIndex);
|
||||
|
||||
List<ExternalBookBlock> temp = new ArrayList<ExternalBookBlock>();
|
||||
List<ExternalBookBlock> temp = new ArrayList<>();
|
||||
while(rs.peekNextClass() == SupBookRecord.class) {
|
||||
temp.add(new ExternalBookBlock(rs));
|
||||
}
|
||||
@ -212,7 +212,7 @@ final class LinkTable {
|
||||
_externSheetRecord = null;
|
||||
}
|
||||
|
||||
_definedNames = new ArrayList<NameRecord>();
|
||||
_definedNames = new ArrayList<>();
|
||||
// collect zero or more DEFINEDNAMEs id=0x18,
|
||||
// with their comments if present
|
||||
while(true) {
|
||||
@ -235,7 +235,7 @@ final class LinkTable {
|
||||
}
|
||||
|
||||
private static ExternSheetRecord readExtSheetRecord(RecordStream rs) {
|
||||
List<ExternSheetRecord> temp = new ArrayList<ExternSheetRecord>(2);
|
||||
List<ExternSheetRecord> temp = new ArrayList<>(2);
|
||||
while(rs.peekNextClass() == ExternSheetRecord.class) {
|
||||
temp.add((ExternSheetRecord) rs.getNext());
|
||||
}
|
||||
@ -258,7 +258,7 @@ final class LinkTable {
|
||||
|
||||
public LinkTable(int numberOfSheets, WorkbookRecordList workbookRecordList) {
|
||||
_workbookRecordList = workbookRecordList;
|
||||
_definedNames = new ArrayList<NameRecord>();
|
||||
_definedNames = new ArrayList<>();
|
||||
_externalBookBlocks = new ExternalBookBlock[] {
|
||||
new ExternalBookBlock(numberOfSheets),
|
||||
};
|
||||
|
@ -47,12 +47,12 @@ public final class RowBlocksReader {
|
||||
* @param rs the record stream
|
||||
*/
|
||||
public RowBlocksReader(RecordStream rs) {
|
||||
List<Record> plainRecords = new ArrayList<Record>();
|
||||
List<Record> shFrmRecords = new ArrayList<Record>();
|
||||
List<CellReference> firstCellRefs = new ArrayList<CellReference>();
|
||||
List<Record> arrayRecords = new ArrayList<Record>();
|
||||
List<Record> tableRecords = new ArrayList<Record>();
|
||||
List<Record> mergeCellRecords = new ArrayList<Record>();
|
||||
List<Record> plainRecords = new ArrayList<>();
|
||||
List<Record> shFrmRecords = new ArrayList<>();
|
||||
List<CellReference> firstCellRefs = new ArrayList<>();
|
||||
List<Record> arrayRecords = new ArrayList<>();
|
||||
List<Record> tableRecords = new ArrayList<>();
|
||||
List<Record> mergeCellRecords = new ArrayList<>();
|
||||
|
||||
Record prevRec = null;
|
||||
while(!RecordOrderer.isEndOfRowBlock(rs.peekNextSid())) {
|
||||
|
@ -23,7 +23,7 @@ import java.util.List;
|
||||
import org.apache.poi.hssf.record.Record;
|
||||
|
||||
public final class WorkbookRecordList {
|
||||
private List<Record> records = new ArrayList<Record>();
|
||||
private List<Record> records = new ArrayList<>();
|
||||
|
||||
/** holds the position of the protect record */
|
||||
private int protpos;
|
||||
|
@ -47,12 +47,12 @@ public abstract class AbstractEscherHolderRecord extends Record implements Clone
|
||||
|
||||
public AbstractEscherHolderRecord()
|
||||
{
|
||||
escherRecords = new ArrayList<EscherRecord>();
|
||||
escherRecords = new ArrayList<>();
|
||||
}
|
||||
|
||||
public AbstractEscherHolderRecord(RecordInputStream in)
|
||||
{
|
||||
escherRecords = new ArrayList<EscherRecord>();
|
||||
escherRecords = new ArrayList<>();
|
||||
if (! DESERIALISE ) {
|
||||
rawDataContainer.concatenate(in.readRemainder());
|
||||
} else {
|
||||
|
@ -295,12 +295,12 @@ public final class EscherAggregate extends AbstractEscherHolderRecord {
|
||||
/**
|
||||
* Maps shape container objects to their {@link TextObjectRecord} or {@link ObjRecord}
|
||||
*/
|
||||
private final Map<EscherRecord, Record> shapeToObj = new HashMap<EscherRecord, Record>();
|
||||
private final Map<EscherRecord, Record> shapeToObj = new HashMap<>();
|
||||
|
||||
/**
|
||||
* list of "tail" records that need to be serialized after all drawing group records
|
||||
*/
|
||||
private final Map<Integer, NoteRecord> tailRec = new LinkedHashMap<Integer, NoteRecord>();
|
||||
private final Map<Integer, NoteRecord> tailRec = new LinkedHashMap<>();
|
||||
|
||||
/**
|
||||
* create new EscherAggregate
|
||||
@ -376,7 +376,7 @@ public final class EscherAggregate extends AbstractEscherHolderRecord {
|
||||
public static EscherAggregate createAggregate(List<RecordBase> records, int locFirstDrawingRecord) {
|
||||
// Keep track of any shape records created so we can match them back to the object id's.
|
||||
// Textbox objects are also treated as shape objects.
|
||||
final List<EscherRecord> shapeRecords = new ArrayList<EscherRecord>();
|
||||
final List<EscherRecord> shapeRecords = new ArrayList<>();
|
||||
EscherRecordFactory recordFactory = new DefaultEscherRecordFactory() {
|
||||
public EscherRecord createRecord(byte[] data, int offset) {
|
||||
EscherRecord r = super.createRecord(data, offset);
|
||||
@ -466,8 +466,8 @@ public final class EscherAggregate extends AbstractEscherHolderRecord {
|
||||
byte[] buffer = new byte[size];
|
||||
|
||||
// Serialize escher records into one big data structure and keep note of ending offsets.
|
||||
final List <Integer>spEndingOffsets = new ArrayList<Integer>();
|
||||
final List <EscherRecord> shapes = new ArrayList<EscherRecord>();
|
||||
final List <Integer>spEndingOffsets = new ArrayList<>();
|
||||
final List <EscherRecord> shapes = new ArrayList<>();
|
||||
int pos = 0;
|
||||
for (Object record : records) {
|
||||
EscherRecord e = (EscherRecord) record;
|
||||
@ -594,7 +594,7 @@ public final class EscherAggregate extends AbstractEscherHolderRecord {
|
||||
List<EscherRecord> records = getEscherRecords();
|
||||
int rawEscherSize = getEscherRecordSize(records);
|
||||
byte[] buffer = new byte[rawEscherSize];
|
||||
final List<Integer> spEndingOffsets = new ArrayList<Integer>();
|
||||
final List<Integer> spEndingOffsets = new ArrayList<>();
|
||||
int pos = 0;
|
||||
for (EscherRecord e : records) {
|
||||
pos += e.serialize(pos, buffer, new EscherSerializationListener() {
|
||||
|
@ -93,7 +93,7 @@ public final class ExtSSTRecord extends ContinuableRecord {
|
||||
_stringsPerBucket = in.readShort();
|
||||
|
||||
int nInfos = in.remaining() / InfoSubRecord.ENCODED_SIZE;
|
||||
ArrayList<InfoSubRecord> lst = new ArrayList<InfoSubRecord>(nInfos);
|
||||
ArrayList<InfoSubRecord> lst = new ArrayList<>(nInfos);
|
||||
|
||||
while (in.available() > 0) {
|
||||
InfoSubRecord info = new InfoSubRecord(in);
|
||||
|
@ -87,11 +87,11 @@ public class ExternSheetRecord extends StandardRecord {
|
||||
|
||||
|
||||
public ExternSheetRecord() {
|
||||
_list = new ArrayList<RefSubRecord>();
|
||||
_list = new ArrayList<>();
|
||||
}
|
||||
|
||||
public ExternSheetRecord(RecordInputStream in) {
|
||||
_list = new ArrayList<RefSubRecord>();
|
||||
_list = new ArrayList<>();
|
||||
|
||||
int nItems = in.readShort();
|
||||
|
||||
|
@ -51,7 +51,7 @@ public final class ObjRecord extends Record implements Cloneable {
|
||||
|
||||
|
||||
public ObjRecord() {
|
||||
subrecords = new ArrayList<SubRecord>(2);
|
||||
subrecords = new ArrayList<>(2);
|
||||
// TODO - ensure 2 sub-records (ftCmo 15h, and ftEnd 00h) are always created
|
||||
_uninterpretedData = null;
|
||||
}
|
||||
@ -84,7 +84,7 @@ public final class ObjRecord extends Record implements Cloneable {
|
||||
}
|
||||
*/
|
||||
|
||||
subrecords = new ArrayList<SubRecord>();
|
||||
subrecords = new ArrayList<>();
|
||||
ByteArrayInputStream bais = new ByteArrayInputStream(subRecordData);
|
||||
LittleEndianInputStream subRecStream = new LittleEndianInputStream(bais);
|
||||
CommonObjectDataSubRecord cmo = (CommonObjectDataSubRecord)SubRecord.createSubRecord(subRecStream, 0);
|
||||
|
@ -77,15 +77,15 @@ public abstract class PageBreakRecord extends StandardRecord {
|
||||
}
|
||||
|
||||
protected PageBreakRecord() {
|
||||
_breaks = new ArrayList<Break>();
|
||||
_breakMap = new HashMap<Integer, Break>();
|
||||
_breaks = new ArrayList<>();
|
||||
_breakMap = new HashMap<>();
|
||||
}
|
||||
|
||||
public PageBreakRecord(RecordInputStream in)
|
||||
{
|
||||
int nBreaks = in.readShort();
|
||||
_breaks = new ArrayList<Break>(nBreaks + 2);
|
||||
_breakMap = new HashMap<Integer, Break>();
|
||||
_breaks = new ArrayList<>(nBreaks + 2);
|
||||
_breakMap = new HashMap<>();
|
||||
|
||||
for(int k = 0; k < nBreaks; k++) {
|
||||
Break br = new Break(in);
|
||||
|
@ -39,7 +39,7 @@ public final class PaletteRecord extends StandardRecord {
|
||||
|
||||
public PaletteRecord() {
|
||||
PColor[] defaultPalette = createDefaultPalette();
|
||||
_colors = new ArrayList<PColor>(defaultPalette.length);
|
||||
_colors = new ArrayList<>(defaultPalette.length);
|
||||
for (PColor element : defaultPalette) {
|
||||
_colors.add(element);
|
||||
}
|
||||
@ -47,7 +47,7 @@ public final class PaletteRecord extends StandardRecord {
|
||||
|
||||
public PaletteRecord(RecordInputStream in) {
|
||||
int field_1_numcolors = in.readShort();
|
||||
_colors = new ArrayList<PColor>(field_1_numcolors);
|
||||
_colors = new ArrayList<>(field_1_numcolors);
|
||||
for (int k = 0; k < field_1_numcolors; k++) {
|
||||
_colors.add(new PColor(in));
|
||||
}
|
||||
|
@ -425,8 +425,8 @@ public final class RecordFactory {
|
||||
* most of org.apache.poi.hssf.record.*
|
||||
*/
|
||||
private static Map<Integer, I_RecordCreator> recordsToMap(Class<? extends Record> [] records) {
|
||||
Map<Integer, I_RecordCreator> result = new HashMap<Integer, I_RecordCreator>();
|
||||
Set<Class<?>> uniqueRecClasses = new HashSet<Class<?>>(records.length * 3 / 2);
|
||||
Map<Integer, I_RecordCreator> result = new HashMap<>();
|
||||
Set<Class<?>> uniqueRecClasses = new HashSet<>(records.length * 3 / 2);
|
||||
|
||||
for (Class<? extends Record> recClass : records) {
|
||||
if(!Record.class.isAssignableFrom(recClass)) {
|
||||
@ -486,7 +486,7 @@ public final class RecordFactory {
|
||||
*/
|
||||
public static List<Record> createRecords(InputStream in) throws org.apache.poi.util.RecordFormatException {
|
||||
|
||||
List<Record> records = new ArrayList<Record>(NUM_RECORDS);
|
||||
List<Record> records = new ArrayList<>(NUM_RECORDS);
|
||||
|
||||
RecordFactoryInputStream recStream = new RecordFactoryInputStream(in, true);
|
||||
|
||||
|
@ -183,7 +183,7 @@ public final class RecordFactoryInputStream {
|
||||
*/
|
||||
public RecordFactoryInputStream(InputStream in, boolean shouldIncludeContinueRecords) {
|
||||
RecordInputStream rs = new RecordInputStream(in);
|
||||
List<Record> records = new ArrayList<Record>();
|
||||
List<Record> records = new ArrayList<>();
|
||||
StreamEncryptionInfo sei = new StreamEncryptionInfo(rs, records);
|
||||
if (sei.hasEncryption()) {
|
||||
rs = sei.createDecryptingStream(in);
|
||||
|
@ -68,7 +68,7 @@ public final class SSTRecord extends ContinuableRecord {
|
||||
{
|
||||
field_1_num_strings = 0;
|
||||
field_2_num_unique_strings = 0;
|
||||
field_3_strings = new IntMapper<UnicodeString>();
|
||||
field_3_strings = new IntMapper<>();
|
||||
deserializer = new SSTDeserializer(field_3_strings);
|
||||
}
|
||||
|
||||
@ -240,7 +240,7 @@ public final class SSTRecord extends ContinuableRecord {
|
||||
// we initialize our fields
|
||||
field_1_num_strings = in.readInt();
|
||||
field_2_num_unique_strings = in.readInt();
|
||||
field_3_strings = new IntMapper<UnicodeString>();
|
||||
field_3_strings = new IntMapper<>();
|
||||
|
||||
deserializer = new SSTDeserializer(field_3_strings);
|
||||
// Bug 57456: some Excel Sheets send 0 as field=1, but have some random number in field_2,
|
||||
|
@ -72,7 +72,7 @@ public final class CFRecordsAggregate extends RecordAggregate {
|
||||
throw new RecordFormatException("Mismatch number of rules");
|
||||
}
|
||||
header = pHeader;
|
||||
rules = new ArrayList<CFRuleBase>(pRules.length);
|
||||
rules = new ArrayList<>(pRules.length);
|
||||
for (CFRuleBase pRule : pRules) {
|
||||
checkRuleType(pRule);
|
||||
rules.add(pRule);
|
||||
@ -221,7 +221,7 @@ public final class CFRecordsAggregate extends RecordAggregate {
|
||||
public boolean updateFormulasAfterCellShift(FormulaShifter shifter, int currentExternSheetIx) {
|
||||
CellRangeAddress[] cellRanges = header.getCellRanges();
|
||||
boolean changed = false;
|
||||
List<CellRangeAddress> temp = new ArrayList<CellRangeAddress>();
|
||||
List<CellRangeAddress> temp = new ArrayList<>();
|
||||
for (CellRangeAddress craOld : cellRanges) {
|
||||
CellRangeAddress craNew = shiftRange(shifter, craOld, currentExternSheetIx);
|
||||
if (craNew == null) {
|
||||
|
@ -38,7 +38,7 @@ public final class ChartSubstreamRecordAggregate extends RecordAggregate {
|
||||
|
||||
public ChartSubstreamRecordAggregate(RecordStream rs) {
|
||||
_bofRec = (BOFRecord) rs.getNext();
|
||||
List<RecordBase> temp = new ArrayList<RecordBase>();
|
||||
List<RecordBase> temp = new ArrayList<>();
|
||||
while (rs.peekNextClass() != EOFRecord.class) {
|
||||
if (PageSettingsBlock.isComponentRecord(rs.peekNextSid())) {
|
||||
if (_psBlock != null) {
|
||||
|
@ -52,7 +52,7 @@ public final class ColumnInfoRecordsAggregate extends RecordAggregate implements
|
||||
* Creates an empty aggregate
|
||||
*/
|
||||
public ColumnInfoRecordsAggregate() {
|
||||
records = new ArrayList<ColumnInfoRecord>();
|
||||
records = new ArrayList<>();
|
||||
}
|
||||
public ColumnInfoRecordsAggregate(RecordStream rs) {
|
||||
this();
|
||||
|
@ -37,12 +37,12 @@ public final class ConditionalFormattingTable extends RecordAggregate {
|
||||
* Creates an empty ConditionalFormattingTable
|
||||
*/
|
||||
public ConditionalFormattingTable() {
|
||||
_cfHeaders = new ArrayList<CFRecordsAggregate>();
|
||||
_cfHeaders = new ArrayList<>();
|
||||
}
|
||||
|
||||
public ConditionalFormattingTable(RecordStream rs) {
|
||||
|
||||
List<CFRecordsAggregate> temp = new ArrayList<CFRecordsAggregate>();
|
||||
List<CFRecordsAggregate> temp = new ArrayList<>();
|
||||
while (rs.peekNextClass() == CFHeaderRecord.class ||
|
||||
rs.peekNextClass() == CFHeader12Record.class) {
|
||||
temp.add(CFRecordsAggregate.createCFAggregate(rs));
|
||||
|
@ -42,7 +42,7 @@ public final class CustomViewSettingsRecordAggregate extends RecordAggregate {
|
||||
if (_begin.getSid() != UserSViewBegin.sid) {
|
||||
throw new IllegalStateException("Bad begin record");
|
||||
}
|
||||
List<RecordBase> temp = new ArrayList<RecordBase>();
|
||||
List<RecordBase> temp = new ArrayList<>();
|
||||
while (rs.peekNextSid() != UserSViewEnd.sid) {
|
||||
if (PageSettingsBlock.isComponentRecord(rs.peekNextSid())) {
|
||||
if (_psBlock != null) {
|
||||
|
@ -39,7 +39,7 @@ public final class DataValidityTable extends RecordAggregate {
|
||||
|
||||
public DataValidityTable(RecordStream rs) {
|
||||
_headerRec = (DVALRecord) rs.getNext();
|
||||
List<DVRecord> temp = new ArrayList<DVRecord>();
|
||||
List<DVRecord> temp = new ArrayList<>();
|
||||
while (rs.peekNextClass() == DVRecord.class) {
|
||||
temp.add((DVRecord) rs.getNext());
|
||||
}
|
||||
@ -48,7 +48,7 @@ public final class DataValidityTable extends RecordAggregate {
|
||||
|
||||
public DataValidityTable() {
|
||||
_headerRec = new DVALRecord();
|
||||
_validationList = new ArrayList<DVRecord>();
|
||||
_validationList = new ArrayList<>();
|
||||
}
|
||||
|
||||
public void visitContainedRecords(RecordVisitor rv) {
|
||||
|
@ -38,7 +38,7 @@ public final class MergedCellsTable extends RecordAggregate {
|
||||
* Creates an empty aggregate
|
||||
*/
|
||||
public MergedCellsTable() {
|
||||
_mergedRegions = new ArrayList<CellRangeAddress>();
|
||||
_mergedRegions = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -52,7 +52,7 @@ public final class PageSettingsBlock extends RecordAggregate {
|
||||
public PLSAggregate(RecordStream rs) {
|
||||
_pls = rs.getNext();
|
||||
if (rs.peekNextSid()==ContinueRecord.sid) {
|
||||
List<ContinueRecord> temp = new ArrayList<ContinueRecord>();
|
||||
List<ContinueRecord> temp = new ArrayList<>();
|
||||
while (rs.peekNextSid()==ContinueRecord.sid) {
|
||||
temp.add((ContinueRecord)rs.getNext());
|
||||
}
|
||||
@ -93,11 +93,11 @@ public final class PageSettingsBlock extends RecordAggregate {
|
||||
* The indicator of such records is a non-zero GUID,
|
||||
* see {@link org.apache.poi.hssf.record.HeaderFooterRecord#getGuid()}
|
||||
*/
|
||||
private final List<HeaderFooterRecord> _sviewHeaderFooters = new ArrayList<HeaderFooterRecord>();
|
||||
private final List<HeaderFooterRecord> _sviewHeaderFooters = new ArrayList<>();
|
||||
private Record _printSize;
|
||||
|
||||
public PageSettingsBlock(RecordStream rs) {
|
||||
_plsRecords = new ArrayList<PLSAggregate>();
|
||||
_plsRecords = new ArrayList<>();
|
||||
while(true) {
|
||||
if (!readARecord(rs)) {
|
||||
break;
|
||||
@ -109,7 +109,7 @@ public final class PageSettingsBlock extends RecordAggregate {
|
||||
* Creates a PageSettingsBlock with default settings
|
||||
*/
|
||||
public PageSettingsBlock() {
|
||||
_plsRecords = new ArrayList<PLSAggregate>();
|
||||
_plsRecords = new ArrayList<>();
|
||||
_rowBreaksRecord = new HorizontalPageBreakRecord();
|
||||
_columnBreaksRecord = new VerticalPageBreakRecord();
|
||||
_header = new HeaderRecord("");
|
||||
@ -482,7 +482,7 @@ public final class PageSettingsBlock extends RecordAggregate {
|
||||
private static void shiftBreaks(PageBreakRecord breaks, int start, int stop, int count) {
|
||||
|
||||
Iterator<PageBreakRecord.Break> iterator = breaks.getBreaksIterator();
|
||||
List<PageBreakRecord.Break> shiftedBreak = new ArrayList<PageBreakRecord.Break>();
|
||||
List<PageBreakRecord.Break> shiftedBreak = new ArrayList<>();
|
||||
while(iterator.hasNext())
|
||||
{
|
||||
PageBreakRecord.Break breakItem = iterator.next();
|
||||
@ -672,9 +672,9 @@ public final class PageSettingsBlock extends RecordAggregate {
|
||||
public void positionRecords(List<RecordBase> sheetRecords) {
|
||||
// Take a copy to loop over, so we can update the real one
|
||||
// without concurrency issues
|
||||
List<HeaderFooterRecord> hfRecordsToIterate = new ArrayList<HeaderFooterRecord>(_sviewHeaderFooters);
|
||||
List<HeaderFooterRecord> hfRecordsToIterate = new ArrayList<>(_sviewHeaderFooters);
|
||||
|
||||
final Map<String, HeaderFooterRecord> hfGuidMap = new HashMap<String, HeaderFooterRecord>();
|
||||
final Map<String, HeaderFooterRecord> hfGuidMap = new HashMap<>();
|
||||
|
||||
for(final HeaderFooterRecord hf : hfRecordsToIterate) {
|
||||
hfGuidMap.put(HexDump.toHex(hf.getGuid()), hf);
|
||||
|
@ -53,9 +53,9 @@ public final class RowRecordsAggregate extends RecordAggregate {
|
||||
if (svm == null) {
|
||||
throw new IllegalArgumentException("SharedValueManager must be provided.");
|
||||
}
|
||||
_rowRecords = new TreeMap<Integer, RowRecord>();
|
||||
_rowRecords = new TreeMap<>();
|
||||
_valuesAgg = new ValueRecordsAggregate();
|
||||
_unknownRecords = new ArrayList<Record>();
|
||||
_unknownRecords = new ArrayList<>();
|
||||
_sharedValueManager = svm;
|
||||
}
|
||||
|
||||
|
@ -122,7 +122,7 @@ public final class SharedValueManager {
|
||||
}
|
||||
_arrayRecords = toList(arrayRecords);
|
||||
_tableRecords = tableRecords;
|
||||
Map<SharedFormulaRecord, SharedFormulaGroup> m = new HashMap<SharedFormulaRecord, SharedFormulaGroup>(nShF * 3 / 2);
|
||||
Map<SharedFormulaRecord, SharedFormulaGroup> m = new HashMap<>(nShF * 3 / 2);
|
||||
for (int i = 0; i < nShF; i++) {
|
||||
SharedFormulaRecord sfr = sharedFormulaRecords[i];
|
||||
m.put(sfr, new SharedFormulaGroup(sfr, firstCells[i]));
|
||||
@ -134,7 +134,7 @@ public final class SharedValueManager {
|
||||
* @return a modifiable list, independent of the supplied array
|
||||
*/
|
||||
private static <Z> List<Z> toList(Z[] zz) {
|
||||
List<Z> result = new ArrayList<Z>(zz.length);
|
||||
List<Z> result = new ArrayList<>(zz.length);
|
||||
for (int i = 0; i < zz.length; i++) {
|
||||
result.add(zz[i]);
|
||||
}
|
||||
@ -167,7 +167,7 @@ public final class SharedValueManager {
|
||||
|
||||
private SharedFormulaGroup findFormulaGroupForCell(final CellReference cellRef) {
|
||||
if(null == _groupsCache) {
|
||||
_groupsCache = new HashMap<Integer,SharedFormulaGroup>(_groupsBySharedFormulaRecord.size());
|
||||
_groupsCache = new HashMap<>(_groupsBySharedFormulaRecord.size());
|
||||
for(SharedFormulaGroup group: _groupsBySharedFormulaRecord.values()) {
|
||||
_groupsCache.put(getKeyForCache(group._firstCell),group);
|
||||
}
|
||||
|
@ -464,7 +464,7 @@ public class UnicodeString implements Comparable<UnicodeString> {
|
||||
field_3_string = (isCompressed) ? in.readCompressedUnicode(cc) : in.readUnicodeLEString(cc);
|
||||
|
||||
if (isRichText() && (runCount > 0)) {
|
||||
field_4_format_runs = new ArrayList<FormatRun>(runCount);
|
||||
field_4_format_runs = new ArrayList<>(runCount);
|
||||
for (int i=0;i<runCount;i++) {
|
||||
field_4_format_runs.add(new FormatRun(in));
|
||||
}
|
||||
@ -610,7 +610,7 @@ public class UnicodeString implements Comparable<UnicodeString> {
|
||||
*/
|
||||
public void addFormatRun(FormatRun r) {
|
||||
if (field_4_format_runs == null) {
|
||||
field_4_format_runs = new ArrayList<FormatRun>();
|
||||
field_4_format_runs = new ArrayList<>();
|
||||
}
|
||||
|
||||
int index = findFormatRunAt(r._character);
|
||||
@ -812,7 +812,7 @@ public class UnicodeString implements Comparable<UnicodeString> {
|
||||
str.field_2_optionflags = field_2_optionflags;
|
||||
str.field_3_string = field_3_string;
|
||||
if (field_4_format_runs != null) {
|
||||
str.field_4_format_runs = new ArrayList<FormatRun>();
|
||||
str.field_4_format_runs = new ArrayList<>();
|
||||
for (FormatRun r : field_4_format_runs) {
|
||||
str.field_4_format_runs.add(new FormatRun(r._character, r._fontIndex));
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ public final class Biff8EncryptionKey {
|
||||
* using a {@link ThreadLocal} in order to avoid further overloading the various public APIs
|
||||
* (e.g. {@link HSSFWorkbook}) that need this functionality.
|
||||
*/
|
||||
private static final ThreadLocal<String> _userPasswordTLS = new ThreadLocal<String>();
|
||||
private static final ThreadLocal<String> _userPasswordTLS = new ThreadLocal<>();
|
||||
|
||||
/**
|
||||
* Sets the BIFF8 encryption/decryption password for the current thread.
|
||||
|
@ -29,7 +29,7 @@ public class FontDetails
|
||||
{
|
||||
private String _fontName;
|
||||
private int _height;
|
||||
private final Map<Character, Integer> charWidths = new HashMap<Character, Integer>();
|
||||
private final Map<Character, Integer> charWidths = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Construct the font details with the given name and height.
|
||||
|
@ -116,8 +116,8 @@ public final class HSSFCellStyle implements CellStyle {
|
||||
return Short.MIN_VALUE;
|
||||
}
|
||||
};
|
||||
private static final ThreadLocal<List<FormatRecord>> lastFormats = new ThreadLocal<List<FormatRecord>>();
|
||||
private static final ThreadLocal<String> getDataFormatStringCache = new ThreadLocal<String>();
|
||||
private static final ThreadLocal<List<FormatRecord>> lastFormats = new ThreadLocal<>();
|
||||
private static final ThreadLocal<String> getDataFormatStringCache = new ThreadLocal<>();
|
||||
|
||||
/**
|
||||
* Get the contents of the format string, by looking up
|
||||
|
@ -42,7 +42,7 @@ import org.apache.poi.ss.usermodel.DataFormat;
|
||||
public final class HSSFDataFormat implements DataFormat {
|
||||
private static final String[] _builtinFormats = BuiltinFormats.getAll();
|
||||
|
||||
private final Vector<String> _formats = new Vector<String>();
|
||||
private final Vector<String> _formats = new Vector<>();
|
||||
private final InternalWorkbook _workbook;
|
||||
private boolean _movedBuiltins; // Flag to see if need to
|
||||
// check the built in list
|
||||
|
@ -137,7 +137,7 @@ public class HSSFOptimiser {
|
||||
// the new locations of the fonts
|
||||
// Remember that one underlying unicode string
|
||||
// may be shared by multiple RichTextStrings!
|
||||
HashSet<UnicodeString> doneUnicodeStrings = new HashSet<UnicodeString>();
|
||||
HashSet<UnicodeString> doneUnicodeStrings = new HashSet<>();
|
||||
for(int sheetNum=0; sheetNum<workbook.getNumberOfSheets(); sheetNum++) {
|
||||
HSSFSheet s = workbook.getSheetAt(sheetNum);
|
||||
for (Row row : s) {
|
||||
|
@ -60,7 +60,7 @@ import org.apache.poi.util.StringUtil;
|
||||
*/
|
||||
public final class HSSFPatriarch implements HSSFShapeContainer, Drawing<HSSFShape> {
|
||||
// private static POILogger log = POILogFactory.getLogger(HSSFPatriarch.class);
|
||||
private final List<HSSFShape> _shapes = new ArrayList<HSSFShape>();
|
||||
private final List<HSSFShape> _shapes = new ArrayList<>();
|
||||
|
||||
private final EscherSpgrRecord _spgrRecord;
|
||||
private final EscherContainerRecord _mainSpgrContainer;
|
||||
@ -122,7 +122,7 @@ public final class HSSFPatriarch implements HSSFShapeContainer, Drawing<HSSFShap
|
||||
/**
|
||||
* contains coordinates of comments we iterate over
|
||||
*/
|
||||
Set<String> coordinates = new HashSet<String>(tailRecords.size());
|
||||
Set<String> coordinates = new HashSet<>(tailRecords.size());
|
||||
for(NoteRecord rec : tailRecords.values()){
|
||||
String noteRef = new CellReference(rec.getRow(),
|
||||
rec.getColumn()).formatAsString(); // A1-style notation
|
||||
@ -416,7 +416,7 @@ public final class HSSFPatriarch implements HSSFShapeContainer, Drawing<HSSFShap
|
||||
*/
|
||||
@Override
|
||||
public void clear() {
|
||||
ArrayList <HSSFShape> copy = new ArrayList<HSSFShape>(_shapes);
|
||||
ArrayList <HSSFShape> copy = new ArrayList<>(_shapes);
|
||||
for (HSSFShape shape: copy){
|
||||
removeShape(shape);
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ import java.util.Iterator;
|
||||
* sheet.
|
||||
*/
|
||||
public class HSSFShapeGroup extends HSSFShape implements HSSFShapeContainer {
|
||||
private final List<HSSFShape> shapes = new ArrayList<HSSFShape>();
|
||||
private final List<HSSFShape> shapes = new ArrayList<>();
|
||||
private EscherSpgrRecord _spgrRecord;
|
||||
|
||||
public HSSFShapeGroup(EscherContainerRecord spgrContainer, ObjRecord objRecord) {
|
||||
@ -268,7 +268,7 @@ public class HSSFShapeGroup extends HSSFShape implements HSSFShapeContainer {
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
ArrayList <HSSFShape> copy = new ArrayList<HSSFShape>(shapes);
|
||||
ArrayList <HSSFShape> copy = new ArrayList<>(shapes);
|
||||
for (HSSFShape shape: copy){
|
||||
removeShape(shape);
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
*/
|
||||
protected HSSFSheet(HSSFWorkbook workbook) {
|
||||
_sheet = InternalSheet.createSheet();
|
||||
_rows = new TreeMap<Integer, HSSFRow>();
|
||||
_rows = new TreeMap<>();
|
||||
this._workbook = workbook;
|
||||
this._book = workbook.getWorkbook();
|
||||
}
|
||||
@ -139,7 +139,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
*/
|
||||
protected HSSFSheet(HSSFWorkbook workbook, InternalSheet sheet) {
|
||||
this._sheet = sheet;
|
||||
_rows = new TreeMap<Integer, HSSFRow>();
|
||||
_rows = new TreeMap<>();
|
||||
this._workbook = workbook;
|
||||
this._book = workbook.getWorkbook();
|
||||
setPropertiesFromSheet(sheet);
|
||||
@ -424,7 +424,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
@Override
|
||||
public List<HSSFDataValidation> getDataValidations() {
|
||||
DataValidityTable dvt = _sheet.getOrCreateDataValidityTable();
|
||||
final List<HSSFDataValidation> hssfValidations = new ArrayList<HSSFDataValidation>();
|
||||
final List<HSSFDataValidation> hssfValidations = new ArrayList<>();
|
||||
RecordVisitor visitor = new RecordVisitor() {
|
||||
private HSSFEvaluationWorkbook book = HSSFEvaluationWorkbook.create(getWorkbook());
|
||||
|
||||
@ -908,7 +908,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
*/
|
||||
@Override
|
||||
public void removeMergedRegions(Collection<Integer> indices) {
|
||||
for (int i : (new TreeSet<Integer>(indices)).descendingSet()) {
|
||||
for (int i : (new TreeSet<>(indices)).descendingSet()) {
|
||||
_sheet.removeMergedRegion(i);
|
||||
}
|
||||
}
|
||||
@ -936,7 +936,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
*/
|
||||
@Override
|
||||
public List<CellRangeAddress> getMergedRegions() {
|
||||
List<CellRangeAddress> addresses = new ArrayList<CellRangeAddress>();
|
||||
List<CellRangeAddress> addresses = new ArrayList<>();
|
||||
int count = _sheet.getNumMergedRegions();
|
||||
for (int i=0; i < count; i++) {
|
||||
addresses.add(_sheet.getMergedRegionAt(i));
|
||||
@ -2223,7 +2223,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
*/
|
||||
@Override
|
||||
public List<HSSFHyperlink> getHyperlinkList() {
|
||||
final List<HSSFHyperlink> hyperlinkList = new ArrayList<HSSFHyperlink>();
|
||||
final List<HSSFHyperlink> hyperlinkList = new ArrayList<>();
|
||||
for (RecordBase rec : _sheet.getRecords()) {
|
||||
if (rec instanceof HyperlinkRecord) {
|
||||
HyperlinkRecord link = (HyperlinkRecord) rec;
|
||||
@ -2290,7 +2290,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
int lastColumn = range.getLastColumn();
|
||||
int height = lastRow - firstRow + 1;
|
||||
int width = lastColumn - firstColumn + 1;
|
||||
List<HSSFCell> temp = new ArrayList<HSSFCell>(height * width);
|
||||
List<HSSFCell> temp = new ArrayList<>(height * width);
|
||||
for (int rowIn = firstRow; rowIn <= lastRow; rowIn++) {
|
||||
for (int colIn = firstColumn; colIn <= lastColumn; colIn++) {
|
||||
HSSFRow row = getRow(rowIn);
|
||||
@ -2432,7 +2432,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
patriarch = createDrawingPatriarch();
|
||||
}
|
||||
|
||||
Map<CellAddress, HSSFComment> locations = new TreeMap<CellAddress, HSSFComment>();
|
||||
Map<CellAddress, HSSFComment> locations = new TreeMap<>();
|
||||
findCellCommentLocations(patriarch, locations);
|
||||
return locations;
|
||||
}
|
||||
@ -2530,7 +2530,7 @@ public final class HSSFSheet implements org.apache.poi.ss.usermodel.Sheet {
|
||||
NameRecord.BUILTIN_PRINT_TITLE, sheetIndex);
|
||||
}
|
||||
|
||||
List<Ptg> ptgList = new ArrayList<Ptg>();
|
||||
List<Ptg> ptgList = new ArrayList<>();
|
||||
if (setBoth) {
|
||||
final int exprsSize = 2 * 11 + 1; // 2 * Area3DPtg.SIZE + UnionPtg.SIZE
|
||||
ptgList.add(new MemFuncPtg(exprsSize));
|
||||
|
@ -217,8 +217,8 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
private HSSFWorkbook(InternalWorkbook book) {
|
||||
super((DirectoryNode)null);
|
||||
workbook = book;
|
||||
_sheets = new ArrayList<HSSFSheet>(INITIAL_CAPACITY);
|
||||
names = new ArrayList<HSSFName>(INITIAL_CAPACITY);
|
||||
_sheets = new ArrayList<>(INITIAL_CAPACITY);
|
||||
names = new ArrayList<>(INITIAL_CAPACITY);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -345,8 +345,8 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
clearDirectory();
|
||||
}
|
||||
|
||||
_sheets = new ArrayList<HSSFSheet>(INITIAL_CAPACITY);
|
||||
names = new ArrayList<HSSFName>(INITIAL_CAPACITY);
|
||||
_sheets = new ArrayList<>(INITIAL_CAPACITY);
|
||||
names = new ArrayList<>(INITIAL_CAPACITY);
|
||||
|
||||
// Grab the data from the workbook stream, however
|
||||
// it happens to be spelled.
|
||||
@ -606,7 +606,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
* @param indexes Array of sheets to select, the index is 0-based.
|
||||
*/
|
||||
public void setSelectedTabs(int[] indexes) {
|
||||
Collection<Integer> list = new ArrayList<Integer>(indexes.length);
|
||||
Collection<Integer> list = new ArrayList<>(indexes.length);
|
||||
for (int index : indexes) {
|
||||
list.add(index);
|
||||
}
|
||||
@ -626,7 +626,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
validateSheetIndex(index);
|
||||
}
|
||||
// ignore duplicates
|
||||
Set<Integer> set = new HashSet<Integer>(indexes);
|
||||
Set<Integer> set = new HashSet<>(indexes);
|
||||
int nSheets = _sheets.size();
|
||||
for (int i=0; i<nSheets; i++) {
|
||||
boolean bSelect = set.contains(i);
|
||||
@ -643,7 +643,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
* @return indices of selected sheets
|
||||
*/
|
||||
public Collection<Integer> getSelectedTabs() {
|
||||
Collection<Integer> indexes = new ArrayList<Integer>();
|
||||
Collection<Integer> indexes = new ArrayList<>();
|
||||
int nSheets = _sheets.size();
|
||||
for (int i=0; i<nSheets; i++) {
|
||||
HSSFSheet sheet = getSheetAt(i);
|
||||
@ -946,7 +946,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
*/
|
||||
@Override
|
||||
public Iterator<Sheet> sheetIterator() {
|
||||
return new SheetIterator<Sheet>();
|
||||
return new SheetIterator<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1235,7 +1235,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
@Override
|
||||
public HSSFFont getFontAt(short idx) {
|
||||
if(fonts == null) {
|
||||
fonts = new HashMap<Short, HSSFFont>();
|
||||
fonts = new HashMap<>();
|
||||
}
|
||||
|
||||
// So we don't confuse users, give them back
|
||||
@ -1260,7 +1260,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
* and that's not something you should normally do
|
||||
*/
|
||||
protected void resetFontCache() {
|
||||
fonts = new HashMap<Short, HSSFFont>();
|
||||
fonts = new HashMap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1401,7 +1401,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
private void write(NPOIFSFileSystem fs) throws IOException {
|
||||
// For tracking what we've written out, used if we're
|
||||
// going to be preserving nodes
|
||||
List<String> excepts = new ArrayList<String>(1);
|
||||
List<String> excepts = new ArrayList<>(1);
|
||||
|
||||
// Write out the Workbook stream
|
||||
fs.createDocument(new ByteArrayInputStream(getBytes()), "Workbook");
|
||||
@ -1445,7 +1445,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
|
||||
public SheetRecordCollector() {
|
||||
_totalSize = 0;
|
||||
_list = new ArrayList<Record>(128);
|
||||
_list = new ArrayList<>(128);
|
||||
}
|
||||
public int getTotalSize() {
|
||||
return _totalSize;
|
||||
@ -1597,7 +1597,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
|
||||
@Override
|
||||
public List<HSSFName> getNames(String name) {
|
||||
List<HSSFName> nameList = new ArrayList<HSSFName>();
|
||||
List<HSSFName> nameList = new ArrayList<>();
|
||||
for(HSSFName nr : names) {
|
||||
if(nr.getNameName().equals(name)) {
|
||||
nameList.add(nr);
|
||||
@ -1970,7 +1970,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
public List<HSSFPictureData> getAllPictures()
|
||||
{
|
||||
// The drawing group record always exists at the top level, so we won't need to do this recursively.
|
||||
List<HSSFPictureData> pictures = new ArrayList<HSSFPictureData>();
|
||||
List<HSSFPictureData> pictures = new ArrayList<>();
|
||||
for (Record r : workbook.getRecords()) {
|
||||
if (r instanceof AbstractEscherHolderRecord) {
|
||||
((AbstractEscherHolderRecord) r).decode();
|
||||
@ -2011,7 +2011,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
}
|
||||
|
||||
protected static Map<String,ClassID> getOleMap() {
|
||||
Map<String,ClassID> olemap = new HashMap<String,ClassID>();
|
||||
Map<String,ClassID> olemap = new HashMap<>();
|
||||
olemap.put("PowerPoint Document", ClassID.PPT_SHOW);
|
||||
for (String str : WORKBOOK_DIR_ENTRY_NAMES) {
|
||||
olemap.put(str, ClassID.XLS_WORKBOOK);
|
||||
@ -2123,7 +2123,7 @@ public final class HSSFWorkbook extends POIDocument implements org.apache.poi.ss
|
||||
*/
|
||||
public List<HSSFObjectData> getAllEmbeddedObjects()
|
||||
{
|
||||
List<HSSFObjectData> objects = new ArrayList<HSSFObjectData>();
|
||||
List<HSSFObjectData> objects = new ArrayList<>();
|
||||
for (HSSFSheet sheet : _sheets)
|
||||
{
|
||||
getAllEmbeddedObjects(sheet, objects);
|
||||
|
@ -41,7 +41,7 @@ final class StaticFontMetrics {
|
||||
/** The font metrics property file we're using */
|
||||
private static Properties fontMetricsProps;
|
||||
/** Our cache of font details we've already looked up */
|
||||
private static final Map<String, FontDetails> fontDetailsMap = new HashMap<String, FontDetails>();
|
||||
private static final Map<String, FontDetails> fontDetailsMap = new HashMap<>();
|
||||
|
||||
private StaticFontMetrics() {}
|
||||
|
||||
|
@ -187,7 +187,7 @@ public class HSSFColor implements Color {
|
||||
|
||||
private static Map<Integer,HSSFColor> createColorsByIndexMap() {
|
||||
Map<HSSFColorPredefined,HSSFColor> eList = mapEnumToColorClass();
|
||||
Map<Integer,HSSFColor> result = new HashMap<Integer,HSSFColor>(eList.size() * 3 / 2);
|
||||
Map<Integer,HSSFColor> result = new HashMap<>(eList.size() * 3 / 2);
|
||||
|
||||
for (Map.Entry<HSSFColorPredefined,HSSFColor> colorRef : eList.entrySet()) {
|
||||
Integer index1 = (int)colorRef.getKey().getIndex();
|
||||
@ -218,7 +218,7 @@ public class HSSFColor implements Color {
|
||||
|
||||
private static Map<String,HSSFColor> createColorsByHexStringMap() {
|
||||
Map<HSSFColorPredefined,HSSFColor> eList = mapEnumToColorClass();
|
||||
Map<String,HSSFColor> result = new HashMap<String,HSSFColor>(eList.size());
|
||||
Map<String,HSSFColor> result = new HashMap<>(eList.size());
|
||||
|
||||
for (Map.Entry<HSSFColorPredefined,HSSFColor> colorRef : eList.entrySet()) {
|
||||
String hexString = colorRef.getKey().getHexString();
|
||||
@ -239,7 +239,7 @@ public class HSSFColor implements Color {
|
||||
@Removal(version="3.18")
|
||||
private static synchronized Map<HSSFColorPredefined,HSSFColor> mapEnumToColorClass() {
|
||||
if (enumList == null) {
|
||||
enumList = new EnumMap<HSSFColorPredefined,HSSFColor>(HSSFColorPredefined.class);
|
||||
enumList = new EnumMap<>(HSSFColorPredefined.class);
|
||||
// AUTOMATIC is not add to list
|
||||
addHSSFColorPredefined(HSSFColorPredefined.BLACK);
|
||||
addHSSFColorPredefined(HSSFColorPredefined.BROWN);
|
||||
|
@ -26,7 +26,7 @@ import java.util.List;
|
||||
* a large amount of time.
|
||||
*/
|
||||
public class LazilyConcatenatedByteArray {
|
||||
private final List<byte[]> arrays = new ArrayList<byte[]>(1);
|
||||
private final List<byte[]> arrays = new ArrayList<>(1);
|
||||
|
||||
/**
|
||||
* Clears the array (sets the concatenated length back to zero.
|
||||
|
@ -128,7 +128,7 @@ public class CryptoAPIEncryptor extends Encryptor implements Cloneable {
|
||||
byte buf[] = new byte[8];
|
||||
|
||||
bos.write(buf, 0, 8); // skip header
|
||||
List<StreamDescriptorEntry> descList = new ArrayList<StreamDescriptorEntry>();
|
||||
List<StreamDescriptorEntry> descList = new ArrayList<>();
|
||||
|
||||
int block = 0;
|
||||
for (Entry entry : entries.getRoot()) {
|
||||
|
@ -55,7 +55,7 @@ public class POIFSViewEngine
|
||||
final int indentLevel,
|
||||
final String indentString)
|
||||
{
|
||||
List<String> objects = new ArrayList<String>();
|
||||
List<String> objects = new ArrayList<>();
|
||||
|
||||
if (viewable instanceof POIFSViewable)
|
||||
{
|
||||
|
@ -57,9 +57,9 @@ class POIFSReaderRegistry
|
||||
|
||||
POIFSReaderRegistry()
|
||||
{
|
||||
omnivorousListeners = new HashSet<POIFSReaderListener>();
|
||||
selectiveListeners = new HashMap<POIFSReaderListener, Set<DocumentDescriptor>>();
|
||||
chosenDocumentDescriptors = new HashMap<DocumentDescriptor,Set<POIFSReaderListener>>();
|
||||
omnivorousListeners = new HashSet<>();
|
||||
selectiveListeners = new HashMap<>();
|
||||
chosenDocumentDescriptors = new HashMap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -85,7 +85,7 @@ class POIFSReaderRegistry
|
||||
{
|
||||
|
||||
// this listener has not registered before
|
||||
descriptors = new HashSet<DocumentDescriptor>();
|
||||
descriptors = new HashSet<>();
|
||||
selectiveListeners.put(listener, descriptors);
|
||||
}
|
||||
DocumentDescriptor descriptor = new DocumentDescriptor(path,
|
||||
@ -104,7 +104,7 @@ class POIFSReaderRegistry
|
||||
{
|
||||
|
||||
// nobody was listening for this document before
|
||||
listeners = new HashSet<POIFSReaderListener>();
|
||||
listeners = new HashSet<>();
|
||||
chosenDocumentDescriptors.put(descriptor, listeners);
|
||||
}
|
||||
listeners.add(listener);
|
||||
@ -143,7 +143,7 @@ class POIFSReaderRegistry
|
||||
|
||||
Iterator<POIFSReaderListener> getListeners(final POIFSDocumentPath path, final String name)
|
||||
{
|
||||
Set<POIFSReaderListener> rval = new HashSet<POIFSReaderListener>(omnivorousListeners);
|
||||
Set<POIFSReaderListener> rval = new HashSet<>(omnivorousListeners);
|
||||
Set<POIFSReaderListener> selectiveListenersInner =
|
||||
chosenDocumentDescriptors.get(new DocumentDescriptor(path, name));
|
||||
|
||||
|
@ -107,8 +107,8 @@ public class DirectoryNode
|
||||
property.getName()
|
||||
});
|
||||
}
|
||||
_byname = new HashMap<String, Entry>();
|
||||
_entries = new ArrayList<Entry>();
|
||||
_byname = new HashMap<>();
|
||||
_entries = new ArrayList<>();
|
||||
Iterator<Property> iter = property.getChildren();
|
||||
|
||||
while (iter.hasNext())
|
||||
@ -588,7 +588,7 @@ public class DirectoryNode
|
||||
*/
|
||||
public Iterator<Object> getViewableIterator()
|
||||
{
|
||||
List<Object> components = new ArrayList<Object>();
|
||||
List<Object> components = new ArrayList<>();
|
||||
|
||||
components.add(getProperty());
|
||||
Iterator<Entry> iter = _entries.iterator();
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user