Exception in thread "main" org.apache.poi.util.RecordFormatException: Tried to allocate an array of length 167,757,507, but the maximum length for this record type is 100,000,000.

If the file is not corrupt and not large, please open an issue on bugzilla to request

increasing the maximum allowable size for this record type.

You can set a higher override value with IOUtils.setByteArrayMaxOverride()

at org.apache.poi.util.IOUtils.throwRFE(IOUtils.java:599)

at org.apache.poi.util.IOUtils.checkLength(IOUtils.java:276)

at org.apache.poi.util.IOUtils.toByteArray(IOUtils.java:230)

at org.apache.poi.util.IOUtils.toByteArray(IOUtils.java:203)

at org.apache.poi.openxml4j.util.ZipArchiveFakeEntry.(ZipArchiveFakeEntry.java:82)

at org.apache.poi.openxml4j.util.ZipInputStreamZipEntrySource.(ZipInputStreamZipEntrySource.java:98)

at org.apache.poi.openxml4j.opc.ZipPackage.(ZipPackage.java:132)

at org.apache.poi.openxml4j.opc.OPCPackage.open(OPCPackage.java:319)

at org.apache.poi.xssf.usermodel.XSSFWorkbookFactory.create(XSSFWorkbookFactory.java:97)

at org.apache.poi.xssf.usermodel.XSSFWorkbookFactory.create(XSSFWorkbookFactory.java:36)

at org.apache.poi.ss.usermodel.WorkbookFactory.lambda$create$2(WorkbookFactory.java:224)

at org.apache.poi.ss.usermodel.WorkbookFactory.wp(WorkbookFactory.java:329)

at org.apache.poi.ss.usermodel.WorkbookFactory.create(WorkbookFactory.java:224)

at org.apache.poi.ss.usermodel.WorkbookFactory.create(WorkbookFactory.java:185)

at com.crealytics.spark.excel.v2.ExcelHelper.getWorkbook(ExcelHelper.scala:120)

at com.crealytics.spark.excel.v2.ExcelHelper.getSheetData(ExcelHelper.scala:137)

at com.crealytics.spark.excel.v2.ExcelHelper.parseSheetData(ExcelHelper.scala:160)

at com.crealytics.spark.excel.v2.ExcelTable.infer(ExcelTable.scala:77)

at com.crealytics.spark.excel.v2.ExcelTable.inferSchema(ExcelTable.scala:48)

at org.apache.spark.sql.execution.datasources.v2.FileTable.$anonfun$dataSchema$4(FileTable.scala:70)

at scala.Option.orElse(Option.scala:447)

at org.apache.spark.sql.execution.datasources.v2.FileTable.dataSchema$lzycompute(FileTable.scala:70)

at org.apache.spark.sql.execution.datasources.v2.FileTable.dataSchema(FileTable.scala:64)

at org.apache.spark.sql.execution.datasources.v2.FileTable.schema$lzycompute(FileTable.scala:82)

at org.apache.spark.sql.execution.datasources.v2.FileTable.schema(FileTable.scala:80)

at org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2.inferSchema(FileDataSourceV2.scala:94)

at org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2.inferSchema$(FileDataSourceV2.scala:92)

at com.crealytics.spark.excel.v2.ExcelDataSource.inferSchema(ExcelDataSource.scala:27)

at org.apache.spark.sql.execution.datasources.v2.DataSourceV2Utils$.getTableFromProvider(DataSourceV2Utils.scala:90)

at org.apache.spark.sql.execution.datasources.v2.DataSourceV2Utils$.loadV2Source(DataSourceV2Utils.scala:140)

at org.apache.spark.sql.DataFrameReader.$anonfun$load$1(DataFrameReader.scala:209)

at scala.Option.flatMap(Option.scala:271)

at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:207)

at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:185)

at com.delta.graph.scala.TestGenerateGraph$.main(TestGenerateGraph.scala:34)

at com.delta.graph.scala.TestGenerateGraph.main(TestGenerateGraph.scala)

解决方法: IOUtils.setByteArrayMaxOverride(200000000)

相关阅读

评论可见,请评论后查看内容,谢谢!!!
 您阅读本篇文章共花了: