Splitting and Merging large files (size in GB) in Java - java

Suppose,
I am splitting 2590400 KB (approx 2.5 GB) file in 30 parts.
It will produce 30 files with size of 86347 KB.
Which seems correct, 2590400/30 = 86346.66666667
Now if I merge all the parts (30) again it is producing the file of 3453873 KB file, which should be 2590410 KB.
Can anyone help me why this difference is there? I am using below code for merge and split files.
SplitFile.java
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.RandomAccessFile;
/**
* #author vishal.zanzrukia
*
*/
public class SplitFile {
public static final String INPUT_FILE = "D:\\me\\projects\\input\\file\\path.txt";
public static final int NUMBER_OF_OUTPUT_FILES = 30;
public static final String FILE_SUFFIX = ".txt";
/**
* split file
*
* #throws Exception
*/
static void splitFile() throws Exception{
File inputFile = new File(INPUT_FILE + "_Splits");
inputFile.mkdir();
RandomAccessFile raf = new RandomAccessFile(INPUT_FILE, "r");
long sourceSize = raf.length();
long bytesPerSplit = sourceSize / NUMBER_OF_OUTPUT_FILES;
long remainingBytes = sourceSize % NUMBER_OF_OUTPUT_FILES;
int maxReadBufferSize = 8 * 1024; // 8KB
for (int destIx = 1; destIx <= NUMBER_OF_OUTPUT_FILES; destIx++) {
BufferedOutputStream bw = new BufferedOutputStream(new FileOutputStream(INPUT_FILE + "_Splits\\split." + destIx + FILE_SUFFIX));
if (bytesPerSplit > maxReadBufferSize) {
long numReads = bytesPerSplit / maxReadBufferSize;
long numRemainingRead = bytesPerSplit % maxReadBufferSize;
for (int i = 0; i < numReads; i++) {
readWrite(raf, bw, maxReadBufferSize);
}
if (numRemainingRead > 0) {
readWrite(raf, bw, numRemainingRead);
}
} else {
readWrite(raf, bw, bytesPerSplit);
}
bw.close();
}
if (remainingBytes > 0) {
BufferedOutputStream bw = new BufferedOutputStream(new FileOutputStream("split." + NUMBER_OF_OUTPUT_FILES + 1));
readWrite(raf, bw, remainingBytes);
bw.close();
}
raf.close();
}
/**
* join file
*
* #throws Exception
*/
static void joinFiles() throws Exception{
int maxReadBufferSize = 8 * 1024;
BufferedOutputStream bw = new BufferedOutputStream(new FileOutputStream(INPUT_FILE + "_Splits\\fullJoin" + FILE_SUFFIX));
File inputFileDir = new File(INPUT_FILE + "_Splits");
RandomAccessFile raf = null;
if(inputFileDir.isDirectory()){
for(File file : inputFileDir.listFiles()){
raf = new RandomAccessFile(file, "r");
long numReads = raf.length() / maxReadBufferSize;
long numRemainingRead = raf.length() % maxReadBufferSize;
for (int i = 0; i < numReads; i++) {
readWrite(raf, bw, maxReadBufferSize);
}
if (numRemainingRead > 0) {
readWrite(raf, bw, numRemainingRead);
}
raf.close();
}
}
bw.close();
}
public static void mergeFiles() {
File[] files = new File[NUMBER_OF_OUTPUT_FILES];
for(int i=1;i<=NUMBER_OF_OUTPUT_FILES;i++){
files[i-1] = new File(INPUT_FILE + "_Splits\\split."+i+FILE_SUFFIX);
}
String mergedFilePath = INPUT_FILE + "_Splits\\fullJoin" + FILE_SUFFIX;
File mergedFile = new File(mergedFilePath);
mergeFiles(files, mergedFile);
}
public static void mergeFiles(File[] files, File mergedFile) {
FileWriter fstream = null;
BufferedWriter out = null;
try {
fstream = new FileWriter(mergedFile, true);
out = new BufferedWriter(fstream);
} catch (IOException e1) {
e1.printStackTrace();
}
for (File f : files) {
System.out.println("merging: " + f.getName());
FileInputStream fis;
try {
fis = new FileInputStream(f);
BufferedReader in = new BufferedReader(new InputStreamReader(fis));
String aLine;
while ((aLine = in.readLine()) != null) {
out.write(aLine);
out.newLine();
}
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws Exception {
// splitFile();
mergeFiles();
}
static void readWrite(RandomAccessFile raf, BufferedOutputStream bw, long numBytes) throws IOException {
byte[] buf = new byte[(int) numBytes];
int val = raf.read(buf);
if (val != -1) {
bw.write(buf);
}
}
}

Use your joinFiles method: don't try to read a file by line-by-line using a Reader if you want to keep it exactly like it was, because line endings may differ by platform.
Instead read them as a binary file using an InputStream or RandomAccessFile and write using an OutputStream.
The only problem in your joinFiles method is that it used File.listFiles() which makes no guarantees about the order in which the files are returned.
I combined your mergeFiles() code with joinFiles() to make this work (remember to invoke joinFiles() instead of mergeFiles() from your main method)
static void joinFiles(File[] files) throws Exception {
int maxReadBufferSize = 8 * 1024;
BufferedOutputStream bw = new BufferedOutputStream(new FileOutputStream(INPUT_FILE + "_Splits\\fullJoin"
+ FILE_SUFFIX));
RandomAccessFile raf = null;
for (File file : files) {
raf = new RandomAccessFile(file, "r");
long numReads = raf.length() / maxReadBufferSize;
long numRemainingRead = raf.length() % maxReadBufferSize;
for (int i = 0; i < numReads; i++) {
readWrite(raf, bw, maxReadBufferSize);
}
if (numRemainingRead > 0) {
readWrite(raf, bw, numRemainingRead);
}
raf.close();
}
bw.close();
}
public static void joinFiles() throws Exception {
File[] files = new File[NUMBER_OF_OUTPUT_FILES];
for (int i = 1; i <= NUMBER_OF_OUTPUT_FILES; i++) {
files[i - 1] = new File(INPUT_FILE + "_Splits\\split." + i + FILE_SUFFIX);
}
joinFiles(files);
}

The problem is the very last line of code:
static void readWrite(RandomAccessFile raf, BufferedOutputStream bw, long numBytes) throws IOException {
byte[] buf = new byte[(int) numBytes];
int val = raf.read(buf);
if (val != -1) {
bw.write(buf);
}
}
When you write, you write back numBytes of data, but the read function has usefully returned:
the total number of bytes read into the buffer, or -1 if there is no more data because the end of this file has been reached.
Therefore, your fix is to use a different write:
bw.write(buf, 0 val);

Related

breaking the lines while chunking into multiple files

I am dividing my file into chunks but only problem i am facing is,
i have .srt file, but while doing chunks, it's cutting the characters i.e in first .srt file it's like 00:26:20,230 --> . in next file it continuing the next time stamp 00:27:40,343.
I need to check the timestamp to be complete and then next full subtitle sentence too. i.e if it's cutting the subtitle timesstamp or dialogue in in file, that tect to be append to next file. Please suggest me how can i achieve.
I am trying like below,
String FilePath = "/Users/meh/Desktop/escapeplan.srt";
FileInputStream fin = new FileInputStream(FilePath);
System.out.println("size: " +fin.getChannel().size());
long abc = 0l;
abc = (fin.getChannel().size())/3;
System.out.println("6: " +abc);
System.out.println("abc: " +abc);
//FilePath = args[1];
File filename = new File(FilePath);
long splitFileSize = 0,bytefileSize=0;
if (filename.exists()) {
try {
//bytefileSize = Long.parseLong(args[2]);
splitFileSize = abc;
Splitme spObj = new Splitme();
spObj.split(FilePath, (long) splitFileSize);
spObj = null;
} catch (Exception e) {
e.printStackTrace();
}
} else {
System.out.println("File Not Found....");
}
public void split(String FilePath, long splitlen) {
long leninfile = 0, leng = 0;
int count = 1, data;
try {
File filename = new File(FilePath);
InputStream infile = new BufferedInputStream(new FileInputStream(filename));
data = infile.read();
System.out.println("data");
System.out.println(data);
while (data != -1) {
filename = new File("/Users/meh/Documents/srt" + count + ".srt");
//RandomAccessFile outfile = new RandomAccessFile(filename, "rw");
OutputStream outfile = new BufferedOutputStream(new FileOutputStream(filename));
while (data != -1 && leng < splitlen) {
outfile.write(data);
leng++;
data = infile.read();
}
leninfile += leng;
leng = 0;
outfile.close();
changeTimeStamp(filename, count);
count++;
}
} catch (Exception e) {
e.printStackTrace();
}
}
i am trying to check the time stamp is in correct format or not. Then i need to check next line to be a dialogue and then the next line to be empty line. then it can stop chunk or else it should append the text from the previous chunk to next chunk file in the beginning of line . so that it may get in correct format.
I tried checking the format like,
while ((strLine = br.readLine()) != null) {
String[] atoms = strLine.split(" --> ");
if (atoms.length == 1) {
out.write(strLine + "\n");
} else {
String startTS = atoms[0];
String endTS = atoms[1];
System.out.print("sri atmos start" + startTS);
System.out.print("sri atmos end" + endTS);
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss,SSS");
sdf.setLenient(false);
try
{
sdf.parse(startTS);
sdf.parse(endTS);
System.out.println("Valid time");
System.out.println("File path" + srcFileNm);
}
catch(Exception e) {
System.out.println("Invalid time");
System.out.println("Exception start" + startTS);
System.out.println("Exception end" + endTS);
}
}
some screens of my output chunks,
Help me how can i make this possible.
I think you should change approach, and fully use basic I/O methods. I tried to encapsulate logic in a small class, that produces a triple with id, msecs and a list of subtitles (if I'm not wrong, you can have more than a line). Then I leaved the remainder externally. Chunker is a class that reads a triple (class Three) from file, so that you can manage it and write it somewhere.
This is just a "quick&dirty" idea that you can refine, but it should work.
package org.norsam.stackoverflow;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class Chunker
{
BufferedReader r;
int chunk = 0;
File dir;
public Chunker(File dir, String filename) throws IOException
{
File f = new File(dir, filename);
this.dir = dir;
this.r = new BufferedReader(new FileReader(f));
}
public Three readThree() throws IOException
{
Integer id = Integer.parseInt(r.readLine());
String msecs = r.readLine();
String s = null;
List<String> srt = new ArrayList<>();
while (!(s = r.readLine().trim()).isEmpty()) {
srt.add(s);
}
return new Three(id, msecs, srt);
}
class Three
{
Integer id;
String msecs;
List<String> srts;
Three(Integer id, String msecs, List<String> srts)
{
this.id = id;
this.msecs = msecs;
this.srts = srts;
}
Three doSomething() {
// here you can do something with your data,
// e.g. split msecs on "-->" and check times
return this;
}
void write(BufferedWriter r) throws IOException
{
r.write(id);
r.newLine();
r.write(msecs);
r.newLine();
for (String s : srts) {
r.write(s);
r.newLine();
}
r.newLine();
}
}
public static void main(String[] args) throws IOException
{
String baseDir = "/dir/where/resides/srt";
String filename = "filename.srt";
int elemPerChunk = 50;
int fileNum = 0;
File dir = new File(baseDir);
Chunker chunker = new Chunker(dir, filename);
boolean completed = false;
while (!completed) {
int srtCount = 0;
File f = new File(baseDir, "ch." + (fileNum++) + "." + filename);
BufferedWriter w = new BufferedWriter(new FileWriter(f));
try {
while (srtCount++ < elemPerChunk) {
chunker.readThree().doSomething().write(w);
}
} catch (NullPointerException e) {
completed = true;
}
w.close();
}
}
}

Make faster a read from file

I'm going to pass my data from MongoDB to Neo4j.
So, I exported my MongoDB documents in .csv. As you can read here I have a problem with the array uniform.
So I wrote a java program to fix this problem.
Here is the .csv exported from MongoDB (note the different about uniform array):
_id,official_name,common_name,country,started_by.day,started_by.month,started_by.year,championship,stadium.name,stadium.capacity,palmares.first_prize,palmares.second_prize,palmares.third_prize,palmares.fourth_prize,average_age,squad_value,foreigners,uniform
0,yaDIXxLAOV,WWYWLqPcYM,QsVwiNmeGl,7,9,1479,oYKGgstIMv,qskcxizCkd,8560,10,25,9,29,16,58,6,"[""first_colour"",""second_colour"",""third_colour""]"
Here is how it must be to import in Neo4j:
_id,official_name,common_name,country,started_by.day,started_by.month,started_by.year,championship,stadium.name,stadium.capacity,palmares.first_prize,palmares.second_prize,palmares.third_prize,palmares.fourth_prize,average_age,squad_value,foreigners,uniform.0,uniform.1,uniform.2
0,yaDIXxLAOV,WWYWLqPcYM,QsVwiNmeGl,7,9,1479,oYKGgstIMv,qskcxizCkd,8560,10,25,9,29,16,58,6,first_colour,second_colour,third_colour
My code works, but I have to convert 500k line of the .csv file and the program it is too much slow(it's still working after 20 minutes :/):
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
public class ConvertireCSV {
public static void main(String[] args) throws IOException {
FileReader f;
f=new FileReader("output.csv");
BufferedReader b;
b=new BufferedReader(f);
String firstLine= b.readLine();
int uniform = firstLine.indexOf("uniform");
firstLine=firstLine.substring(0, uniform);
firstLine = firstLine + "uniform.0,uniform.1,uniform.2\n";
String line="";
String csv="";
while(true) {
line=b.readLine();
if(line==null)
break;
int u = line.indexOf("\"[");
line=line.substring(0, u);
line=line + "first_colour,second_colour,third_colour \n";
csv=csv+line;
}
File file = new File("outputForNeo4j.csv");
if(file.createNewFile()) {
PrintWriter pw = new PrintWriter(file);
pw.println(firstLine + csv);
System.out.println("New file \"outputForNeo4j.csv\" created.");
pw.flush();
pw.close();
}
}
}
How can I make it faster?
Okay some basic ways to improve your code:
Make sure that your variables got the minimal scope required. If you don't need line outside your loop, don't declare it outside your loop.
Concatenation of simple strings is in general slow. Use a StringBuilder to speed things to there.
Why are you buffering the string anyway? Seems like a waste of memory. Just open the output stream to your target file and write the lines to the new file as you process them.
Examples:
I don't think you need a example on the first point.
For the second things could look like this:
...
StringBuilder csv = new StringBuilder();
while(true) {
...
csv.append(line);
}
...
if(file.createNewFile()) {
...
pw.println(firstLine + csv.toString());
...
}
For the third point the rewriting would be a little more extensive:
public static void main(String[] args) throws IOException {
FileReader f;
f=new FileReader("output.csv");
BufferedReader b;
b=new BufferedReader(f);
String firstLine= b.readLine();
int uniform = firstLine.indexOf("uniform");
firstLine=firstLine.substring(0, uniform);
firstLine = firstLine + "uniform.0,uniform.1,uniform.2\n";
File file = new File("outputForNeo4j.csv");
if(!file.createNewFile()) {
// all work would be for nothing! Bailing out.
return;
}
PrintWriter pw = new PrintWriter(file);
pw.print(firstLine);
while(true) {
String line=b.readLine();
if(line==null)
break;
int u = line.indexOf("\"[");
line=line.substring(0, u);
line=line + "first_colour,second_colour,third_colour \n";
pw.print(line);
}
System.out.println("New file \"outputForNeo4j.csv\" created.");
pw.flush();
pw.close();
b.close()
}
csv=csv+line;
string concatenation is expensive operation. I would suggest using bufferedWriter.
something like this:
FileReader f;
f=new FileReader("output.csv");
BufferedReader b;
BufferedWriter out;
b=new BufferedReader(f);
try{
out = new BufferedWriter(new FileWriter("outputForNeo4j.csv"));
} catch(Exception e){
//cannot create file
}
System.out.println("New file \"outputForNeo4j.csv\" created.");
String firstLine= b.readLine();
int uniform = firstLine.indexOf("uniform");
firstLine=firstLine.substring(0, uniform);
firstLine = firstLine + "uniform.0,uniform.1,uniform.2\n";
String line="";
String csv="";
out.write(firstLine);
while(true) {
line=b.readLine();
if(line==null)
break;
int u = line.indexOf("\"[");
line=line.substring(0, u);
line=line + "first_colour,second_colour,third_colour \n";
out.write(line);
}
out.flush();
}
Results :
test0 : Runs: 241 iterations ,avarage milis = 246
test1 : Runs: 249 iterations ,avarage milis = 118
test2 : Runs: 269 iterations ,avarage milis = 5
test3 : Runs: 241 iterations ,avarage milis = 2
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Random;
public class Tester {
private static final String filePath = "c:\\bigFile.txt";
//private static final String filePath = "c:\\bigfileNewLine.txt";
private static final int numOfMethods = 4;
private static final int numOfIter = 1000;
public Tester() throws NoSuchMethodException {
System.out.println("Tester.Tester");
int[] milisArr = new int [numOfMethods];
int[] actualRun = new int [numOfMethods];
Random rnd = new Random(System.currentTimeMillis());
Long startMs = 0l, endMs = 0l;
Method[] method = new Method[numOfMethods];
for (int i = 0; i < numOfMethods; i++)
method[i] = this.getClass().getMethod("test" + i);
int testCount = 0;
while (testCount++ < numOfIter) {
int testMethod = rnd.nextInt(numOfMethods);
Method m = method[testMethod];
try {
System.gc();
startMs = System.currentTimeMillis();
String retval = (String) m.invoke(null);
endMs = System.currentTimeMillis();
} catch (IllegalAccessException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
} catch (InvocationTargetException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
milisArr[testMethod] += (endMs - startMs);
actualRun[testMethod]++;
System.out.println("Test name: " + m.getName() + " testCount=" + testCount + " Of " + numOfIter + " iteration, Total time :" + (endMs - startMs) / 1000.0 + " seconds");
}
System.out.println("Test Summery :");
for (int i = 0; i < numOfMethods; i++)
System.out.println("test" + i + " : Runs: " + actualRun[i] + " iterations ,avarage milis = " + milisArr[i]/numOfIter);
}
public static String test0() throws IOException {
InputStream file = getInputStream();
StringBuffer textBuffer = new StringBuffer();
int c;
while ((c = file.read()) != -1)
textBuffer.append((char) c);
file.close();
return textBuffer.toString();
}
public static String test1() throws IOException {
Reader reader = new FileReader(new File(filePath));
BufferedReader br = new BufferedReader(reader);
String line = br.readLine();
String result = line;
while (line != null) {
line = br.readLine();
if (line == null) {
} else {
result = result + "\n" + line;
}
}
br.close();
reader.close();
return result;
}
public static String test2() throws IOException {
byte[] buf = new byte[1024];
int l;
InputStream is = getInputStream();
StringBuffer tmpBuf = new StringBuffer();
while ((l = is.read(buf)) != -1) {
tmpBuf.append(new String(buf, 0, l));
}
is.close();
return tmpBuf.toString();
}
public static String test3() throws IOException {
File source = new File(filePath);
final DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(source)));
final byte[] buffer = new byte[(int) source.length()];
dis.readFully(buffer);
dis.close();
return new String(buffer, "UTF-8");
}
private static InputStream getInputStream() {
try {
return new FileInputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
return null;
}
}
public static void main(String[] args) {
try {
new Tester();
} catch (NoSuchMethodException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
}

Compare 2 large sorted CSV file by 2 columns in java

I have 2 large csv 5GB files suppose FileA and FileB. I want to compare 2 files with their 2 columns in a efficient way. Each file have 63 columns. I want some files as a output explained below :
FileA - FileB = File(Unmatched records in FileA)
FileB - FileA = File(Unmatched records in FileB)
FileA ∩ FileB = File(Matched records in both files)
I followed a way to solve this problem. I dont know is it efficient or not. A code snippet will be very helpful.
EDIT: This is the process i am following. First splitting both files in 75 chunks in 2 different threads.
public class SplitFiles implements Runnable {
private final String sourcePath;
private final String destPath;
SplitFiles(String sourcePath, String destPath) {
this.sourcePath = sourcePath;
this.destPath = destPath;
}
static void readWrite(RandomAccessFile raf, BufferedOutputStream bw,
long numBytes) throws IOException {
byte[] buf = new byte[(int) numBytes];
int val = raf.read(buf);
if (val != -1) {
bw.write(buf);
}
}
#Override
public void run() {
try(RandomAccessFile raf = new RandomAccessFile(sourcePath, "r")) {
long numSplits = 75; // from user input, extract it from args
long sourceSize = raf.length();
long bytesPerSplit = sourceSize / numSplits;
long remainingBytes = sourceSize % numSplits;
int maxReadBufferSize = 64 * 1024; // 8KB
for (int destIx = 1; destIx <= numSplits; destIx++) {
BufferedOutputStream bw = new BufferedOutputStream(
new FileOutputStream(destPath + destIx));
if (bytesPerSplit > maxReadBufferSize) {
long numReads = bytesPerSplit / maxReadBufferSize;
long numRemainingRead = bytesPerSplit % maxReadBufferSize;
for (int i = 0; i < numReads; i++) {
readWrite(raf, bw, maxReadBufferSize);
}
if (numRemainingRead > 0) {
readWrite(raf, bw, numRemainingRead);
}
} else {
readWrite(raf, bw, bytesPerSplit);
}
bw.close();
}
if (remainingBytes > 0) {
BufferedOutputStream bw = new BufferedOutputStream(
new FileOutputStream(destPath + numSplits + 1));
readWrite(raf, bw, remainingBytes);
bw.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
Then reading both files in 75 threads and comparing it.
public class CompareFiles implements Runnable {
private static String SOURCE_A_FILE_NAME = "SplitA/split.";
private static String SOURCE_B_FILE_NAME = "SplitB/split.";
private final int countUntil;
CompareFiles(int countUntil) {
this.countUntil = countUntil;
}
#Override
public void run() {
try (BufferedReader inA = Files.newBufferedReader(
Paths.get(SOURCE_A_FILE_NAME + countUntil),
Charset.forName("UTF-8"))) {
for (String lineA; (lineA = inA.readLine()) != null;) {
try (BufferedReader inB = Files.newBufferedReader(
Paths.get(SOURCE_B_FILE_NAME + countUntil),
Charset.forName("UTF-8"))) {
String[] arrayA = lineA.split("\\|");
for (String lineB; (lineB = inB.readLine()) != null;) {
String[] arrayB = lineB.split("\\|");
if(arrayA[11].equals(arrayB[11]) && arrayA[13].equals(arrayB[13])){
writeMatchedRecords();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public synchronized void writeMatchedRecords(){
System.out.println("Write files for matched records");
}
}

Java - Read file and split into multiple files

I have a file which I would like to read in Java and split this file into n (user input) output files. Here is how I read the file:
int n = 4;
BufferedReader br = new BufferedReader(new FileReader("file.csv"));
try {
String line = br.readLine();
while (line != null) {
line = br.readLine();
}
} finally {
br.close();
}
How do I split the file - file.csv into n files?
Note - Since the number of entries in the file are of the order of 100k, I can't store the file content into an array and then split it and save into multiple files.
Since one file can be very large, each split file could be large as well.
Example:
Source File Size: 5GB
Num Splits: 5: Destination
File Size: 1GB each (5 files)
There is no way to read this large split chunk in one go, even if we have such a memory. Basically for each split we can read a fix size byte-array which we know should be feasible in terms of performance as well memory.
NumSplits: 10 MaxReadBytes: 8KB
public static void main(String[] args) throws Exception
{
RandomAccessFile raf = new RandomAccessFile("test.csv", "r");
long numSplits = 10; //from user input, extract it from args
long sourceSize = raf.length();
long bytesPerSplit = sourceSize/numSplits ;
long remainingBytes = sourceSize % numSplits;
int maxReadBufferSize = 8 * 1024; //8KB
for(int destIx=1; destIx <= numSplits; destIx++) {
BufferedOutputStream bw = new BufferedOutputStream(new FileOutputStream("split."+destIx));
if(bytesPerSplit > maxReadBufferSize) {
long numReads = bytesPerSplit/maxReadBufferSize;
long numRemainingRead = bytesPerSplit % maxReadBufferSize;
for(int i=0; i<numReads; i++) {
readWrite(raf, bw, maxReadBufferSize);
}
if(numRemainingRead > 0) {
readWrite(raf, bw, numRemainingRead);
}
}else {
readWrite(raf, bw, bytesPerSplit);
}
bw.close();
}
if(remainingBytes > 0) {
BufferedOutputStream bw = new BufferedOutputStream(new FileOutputStream("split."+(numSplits+1)));
readWrite(raf, bw, remainingBytes);
bw.close();
}
raf.close();
}
static void readWrite(RandomAccessFile raf, BufferedOutputStream bw, long numBytes) throws IOException {
byte[] buf = new byte[(int) numBytes];
int val = raf.read(buf);
if(val != -1) {
bw.write(buf);
}
}
import java.io.*;
import java.util.Scanner;
public class split {
public static void main(String args[])
{
try{
// Reading file and getting no. of files to be generated
String inputfile = "C:/test.txt"; // Source File Name.
double nol = 2000.0; // No. of lines to be split and saved in each output file.
File file = new File(inputfile);
Scanner scanner = new Scanner(file);
int count = 0;
while (scanner.hasNextLine())
{
scanner.nextLine();
count++;
}
System.out.println("Lines in the file: " + count); // Displays no. of lines in the input file.
double temp = (count/nol);
int temp1=(int)temp;
int nof=0;
if(temp1==temp)
{
nof=temp1;
}
else
{
nof=temp1+1;
}
System.out.println("No. of files to be generated :"+nof); // Displays no. of files to be generated.
//---------------------------------------------------------------------------------------------------------
// Actual splitting of file into smaller files
FileInputStream fstream = new FileInputStream(inputfile); DataInputStream in = new DataInputStream(fstream);
BufferedReader br = new BufferedReader(new InputStreamReader(in)); String strLine;
for (int j=1;j<=nof;j++)
{
FileWriter fstream1 = new FileWriter("C:/New Folder/File"+j+".txt"); // Destination File Location
BufferedWriter out = new BufferedWriter(fstream1);
for (int i=1;i<=nol;i++)
{
strLine = br.readLine();
if (strLine!= null)
{
out.write(strLine);
if(i!=nol)
{
out.newLine();
}
}
}
out.close();
}
in.close();
}catch (Exception e)
{
System.err.println("Error: " + e.getMessage());
}
}
}
Though its a old question but for reference I am listing out the code which I used to split large files to any sizes and it works with any Java versions above 1.4 .
Sample Split and Join blocks were like below:
public void join(String FilePath) {
long leninfile = 0, leng = 0;
int count = 1, data = 0;
try {
File filename = new File(FilePath);
//RandomAccessFile outfile = new RandomAccessFile(filename,"rw");
OutputStream outfile = new BufferedOutputStream(new FileOutputStream(filename));
while (true) {
filename = new File(FilePath + count + ".sp");
if (filename.exists()) {
//RandomAccessFile infile = new RandomAccessFile(filename,"r");
InputStream infile = new BufferedInputStream(new FileInputStream(filename));
data = infile.read();
while (data != -1) {
outfile.write(data);
data = infile.read();
}
leng++;
infile.close();
count++;
} else {
break;
}
}
outfile.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public void split(String FilePath, long splitlen) {
long leninfile = 0, leng = 0;
int count = 1, data;
try {
File filename = new File(FilePath);
//RandomAccessFile infile = new RandomAccessFile(filename, "r");
InputStream infile = new BufferedInputStream(new FileInputStream(filename));
data = infile.read();
while (data != -1) {
filename = new File(FilePath + count + ".sp");
//RandomAccessFile outfile = new RandomAccessFile(filename, "rw");
OutputStream outfile = new BufferedOutputStream(new FileOutputStream(filename));
while (data != -1 && leng < splitlen) {
outfile.write(data);
leng++;
data = infile.read();
}
leninfile += leng;
leng = 0;
outfile.close();
count++;
}
} catch (Exception e) {
e.printStackTrace();
}
}
Complete java code available here in File Split in Java Program link.
a clean solution to edit.
this solution involves loading the entire file into memory.
set all line of a file in List<String> rowsOfFile;
edit maxSizeFile to choice max size of a single file splitted
public void splitFile(File fileToSplit) throws IOException {
long maxSizeFile = 10000000 // 10mb
StringBuilder buffer = new StringBuilder((int) maxSizeFile);
int sizeOfRows = 0;
int recurrence = 0;
String fileName;
List<String> rowsOfFile;
rowsOfFile = Files.readAllLines(fileToSplit.toPath(), Charset.defaultCharset());
for (String row : rowsOfFile) {
buffer.append(row);
numOfRow++;
sizeOfRows += row.getBytes(StandardCharsets.UTF_8).length;
if (sizeOfRows >= maxSizeFile) {
fileName = generateFileName(recurrence);
File newFile = new File(fileName);
try (PrintWriter writer = new PrintWriter(newFile)) {
writer.println(buffer.toString());
}
recurrence++;
sizeOfRows = 0;
buffer = new StringBuilder();
}
}
// last rows
if (sizeOfRows > 0) {
fileName = generateFileName(recurrence);
File newFile = createFile(fileName);
try (PrintWriter writer = new PrintWriter(newFile)) {
writer.println(buffer.toString());
}
}
Files.delete(fileToSplit.toPath());
}
method to generate Name of file:
public String generateFileName(int numFile) {
String extension = ".txt";
return "myFile" + numFile + extension;
}
Have a counter to count no of entries. Let's say one entry per line.
step1: Initially create new subfile, set counter=0;
step2: increment counter as you read each entry from source file to buffer
step3: when counter reaches limit to number of entries that you want to write in each sub file, flush contents of buffer to subfile. close the subfile
step4 : jump to step1 till you have data in source file to read from
There's no need to loop twice through the file. You could estimate the size of each chunk as the source file size divided by number of chunks needed. Then you just stop filling each cunk with data as it's size exceeds estimated.
Here is one that worked for me and I used it to split 10GB file. it also enables you to add a header and a footer. very useful when splitting document based format such as XML and JSON because you need to add document wrapper in the new split files.
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
public class FileSpliter
{
public static void main(String[] args) throws IOException
{
splitTextFiles("D:\\xref.csx", 750000, "", "", null);
}
public static void splitTextFiles(String fileName, int maxRows, String header, String footer, String targetDir) throws IOException
{
File bigFile = new File(fileName);
int i = 1;
String ext = fileName.substring(fileName.lastIndexOf("."));
String fileNoExt = bigFile.getName().replace(ext, "");
File newDir = null;
if(targetDir != null)
{
newDir = new File(targetDir);
}
else
{
newDir = new File(bigFile.getParent() + "\\" + fileNoExt + "_split");
}
newDir.mkdirs();
try (BufferedReader reader = Files.newBufferedReader(Paths.get(fileName)))
{
String line = null;
int lineNum = 1;
Path splitFile = Paths.get(newDir.getPath() + "\\" + fileNoExt + "_" + String.format("%02d", i) + ext);
BufferedWriter writer = Files.newBufferedWriter(splitFile, StandardOpenOption.CREATE);
while ((line = reader.readLine()) != null)
{
if(lineNum == 1)
{
System.out.print("new file created '" + splitFile.toString());
if(header != null && header.length() > 0)
{
writer.append(header);
writer.newLine();
}
}
writer.append(line);
if (lineNum >= maxRows)
{
if(footer != null && footer.length() > 0)
{
writer.newLine();
writer.append(footer);
}
writer.close();
System.out.println(", " + lineNum + " lines written to file");
lineNum = 1;
i++;
splitFile = Paths.get(newDir.getPath() + "\\" + fileNoExt + "_" + String.format("%02d", i) + ext);
writer = Files.newBufferedWriter(splitFile, StandardOpenOption.CREATE);
}
else
{
writer.newLine();
lineNum++;
}
}
if(lineNum <= maxRows) // early exit
{
if(footer != null && footer.length() > 0)
{
writer.newLine();
lineNum++;
writer.append(footer);
}
}
writer.close();
System.out.println(", " + lineNum + " lines written to file");
}
System.out.println("file '" + bigFile.getName() + "' split into " + i + " files");
}
}
Below code used to split a big file into small files with lesser lines.
long linesWritten = 0;
int count = 1;
try {
File inputFile = new File(inputFilePath);
InputStream inputFileStream = new BufferedInputStream(new FileInputStream(inputFile));
BufferedReader reader = new BufferedReader(new InputStreamReader(inputFileStream));
String line = reader.readLine();
String fileName = inputFile.getName();
String outfileName = outputFolderPath + "\\" + fileName;
while (line != null) {
File outFile = new File(outfileName + "_" + count + ".split");
Writer writer = new OutputStreamWriter(new FileOutputStream(outFile));
while (line != null && linesWritten < linesPerSplit) {
writer.write(line);
line = reader.readLine();
linesWritten++;
}
writer.close();
linesWritten = 0;//next file
count++;//nect file count
}
reader.close();
} catch (Exception e) {
e.printStackTrace();
}
Split a file to multiple chunks (in memory operation), here I'm splitting any file to a size of 500kb(500000 bytes) :
public static List<ByteArrayOutputStream> splitFile(File f) {
List<ByteArrayOutputStream> datalist = new ArrayList<>();
try {
int sizeOfFiles = 500000;
byte[] buffer = new byte[sizeOfFiles];
try (FileInputStream fis = new FileInputStream(f); BufferedInputStream bis = new BufferedInputStream(fis)) {
int bytesAmount = 0;
while ((bytesAmount = bis.read(buffer)) > 0) {
try (OutputStream out = new ByteArrayOutputStream()) {
out.write(buffer, 0, bytesAmount);
out.flush();
datalist.add((ByteArrayOutputStream) out);
}
}
}
} catch (Exception e) {
//get the error
}
return datalist; }
I am a bit late to answer, But here's how I did it:
Approach:
First I determine how many bytes each of the individual files should contain then I split the large file by bytes. Only one file chunk worth of data is loaded into memory at a time.
Example:- if a 5 GB file is split into 10 files then only 500MB worth of bytes are loaded into memory at a time which are held in the buffer variable in the splitBySize method below.
Code Explaination:
The method splitFile first gets the number of bytes each of the individual file chunks should contain by calling the getSizeInBytes method, then it calls the splitBySize method which splits the large file by size (i..e maxChunkSize represents the number of bytes each of file chunks will contain).
public static List<File> splitFile(File largeFile, int noOfFiles) throws IOException {
return splitBySize(largeFile, getSizeInBytes(largeFile.length(), noOfFiles));
}
public static List<File> splitBySize(File largeFile, int maxChunkSize) throws IOException {
List<File> list = new ArrayList<>();
int numberOfFiles = 0;
try (InputStream in = Files.newInputStream(largeFile.toPath())) {
final byte[] buffer = new byte[maxChunkSize];
int dataRead = in.read(buffer);
while (dataRead > -1) {
list.add(stageLocally(buffer, dataRead));
numberOfFiles++;
dataRead = in.read(buffer);
}
}
System.out.println("Number of files generated: " + numberOfFiles);
return list;
}
private static int getSizeInBytes(long totalBytes, int numberOfFiles) {
if (totalBytes % numberOfFiles != 0) {
totalBytes = ((totalBytes / numberOfFiles) + 1)*numberOfFiles;
}
long x = totalBytes / numberOfFiles;
if (x > Integer.MAX_VALUE){
throw new NumberFormatException("Byte chunk too large");
}
return (int) x;
}
Full Code:
public class StackOverflow {
private static final String INPUT_FILE_PATH = "/Users/malkesingh/Downloads/5MB.zip";
private static final String TEMP_DIRECTORY = "/Users/malkesingh/temp";
public static void main(String[] args) throws IOException {
File input = new File(INPUT_FILE_PATH);
File outPut = fileJoin2(splitFile(input, 5));
try (InputStream in = Files.newInputStream(input.toPath()); InputStream out = Files.newInputStream(outPut.toPath())) {
System.out.println(IOUtils.contentEquals(in, out));
}
}
public static List<File> splitFile(File largeFile, int noOfFiles) throws IOException {
return splitBySize(largeFile, getSizeInBytes(largeFile.length(), noOfFiles));
}
public static List<File> splitBySize(File largeFile, int maxChunkSize) throws IOException {
List<File> list = new ArrayList<>();
int numberOfFiles = 0;
try (InputStream in = Files.newInputStream(largeFile.toPath())) {
final byte[] buffer = new byte[maxChunkSize];
int dataRead = in.read(buffer);
while (dataRead > -1) {
list.add(stageLocally(buffer, dataRead));
numberOfFiles++;
dataRead = in.read(buffer);
}
}
System.out.println("Number of files generated: " + numberOfFiles);
return list;
}
private static int getSizeInBytes(long totalBytes, int numberOfFiles) {
if (totalBytes % numberOfFiles != 0) {
totalBytes = ((totalBytes / numberOfFiles) + 1)*numberOfFiles;
}
long x = totalBytes / numberOfFiles;
if (x > Integer.MAX_VALUE){
throw new NumberFormatException("Byte chunk too large");
}
return (int) x;
}
private static File stageLocally(byte[] buffer, int length) throws IOException {
File outPutFile = File.createTempFile("temp-", "split", new File(TEMP_DIRECTORY));
try(FileOutputStream fos = new FileOutputStream(outPutFile)) {
fos.write(buffer, 0, length);
}
return outPutFile;
}
public static File fileJoin2(List<File> list) throws IOException {
File outPutFile = File.createTempFile("temp-", "unsplit", new File(TEMP_DIRECTORY));
FileOutputStream fos = new FileOutputStream(outPutFile);
for (File file : list) {
Files.copy(file.toPath(), fos);
}
fos.close();
return outPutFile;
}}
import java.util.*;
import java.io.*;
public class task13 {
public static void main(String[] args)throws IOException{
Scanner s =new Scanner(System.in);
System.out.print("Enter path:");
String a=s.next();
File f=new File(a+".txt");
Scanner st=new Scanner(f);
System.out.println(f.canRead()+"\n"+f.canWrite());
long l=f.length();
System.out.println("Length is:"+l);
System.out.print("Enter no.of partitions:");
int p=s.nextInt();
long x=l/p;
st.useDelimiter("\\Z");
String t=st.next();
int j=0;
System.out.println("Each File Length is:"+x);
for(int i=1;i<=p;i++){
File ft=new File(a+"-"+i+".txt");
ft.createNewFile();
int g=(j*(int)x);
int h=(j+1)*(int)x;
if(g<=l&&h<=l){
FileWriter fw=new FileWriter(a+"-"+i+".txt");
String v=t.substring(g,h);
fw.write(v);
j++;
fw.close();
}}
}}

How do you uncompress a split volume zip in Java?

I need to reassemble a 100-part zip file and extract the content. I tried simply concatenating the zip volumes together in an input stream but that does not work. Any suggestions would be appreciated.
Thanks.
Here is the code you can start from. It extracts a single file entry from the multivolume zip archive:
package org.test.zip;
import java.io.BufferedOutputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.SequenceInputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
public class Main {
public static void main(String[] args) throws IOException {
ZipInputStream is = new ZipInputStream(new SequenceInputStream(Collections.enumeration(
Arrays.asList(new FileInputStream("test.zip.001"), new FileInputStream("test.zip.002"), new FileInputStream("test.zip.003")))));
try {
for(ZipEntry entry = null; (entry = is.getNextEntry()) != null; ) {
OutputStream os = new BufferedOutputStream(new FileOutputStream(entry.getName()));
try {
final int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
for(int readBytes = -1; (readBytes = is.read(buffer, 0, bufferSize)) > -1; ) {
os.write(buffer, 0, readBytes);
}
os.flush();
} finally {
os.close();
}
}
} finally {
is.close();
}
}
}
Just a note to make it more dynamic -- 100% based on mijer code below.
private void CombineFiles (String[] files) throws FileNotFoundException, IOException {
Vector<FileInputStream> v = new Vector<FileInputStream>(files.length);
for (int x = 0; x < files.length; x++)
v.add(new FileInputStream(inputDirectory + files[x]));
Enumeration<FileInputStream> e = v.elements();
SequenceInputStream sequenceInputStream = new SequenceInputStream(e);
ZipInputStream is = new ZipInputStream(sequenceInputStream);
try {
for (ZipEntry entry = null; (entry = is.getNextEntry()) != null;) {
OutputStream os = new BufferedOutputStream(new FileOutputStream(entry.getName()));
try {
final int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
for (int readBytes = -1; (readBytes = is.read(buffer, 0, bufferSize)) > -1;) {
os.write(buffer, 0, readBytes);
}
os.flush();
} finally {
os.close();
}
}
} finally {
is.close();
}
}
To just concatenate the segment data did not work for me. In this case the segments had been created with Linux command-line zip (InfoZip version 3.0):
> zip -s 5m data.zip -r data/
Segment files named data.z01, data.z02, ..., data.zip was created.
The first segment data.z01 contained the spanning signature 0x08074b50, as described in the Zip File Format Specification by PKWARE. The presence of these 4 bytes made Java ZipInputStream ignore all entries in the archive. The central registry in the last segment also contained extra segment information compared to a non-split archive but that did not cause ZipInputStream any problems.
All I had to do was to skip the spanning signature. The following code will extract entries both from an archive that have been segmented with zip -s and from a zip file that have been split by the Linux split commad, like this: split -d -b 5M data.zip data.zip.. The code is based on szhem's.
public class ZipCat {
private final static byte[] SPANNING_SIGNATURE = {0x50, 0x4b, 0x07, 0x08};
public static void main(String[] args) throws IOException {
List<InputStream> asList = new ArrayList<>();
byte[] buf4 = new byte[4];
PushbackInputStream pis = new PushbackInputStream(new FileInputStream(args[0]), buf4.length);
asList.add(pis);
if (pis.read(buf4) != buf4.length) {
throw new IOException(args[0] + " is too small for a zip file/segment");
}
if (!Arrays.equals(buf4, SPANNING_SIGNATURE)) {
pis.unread(buf4, 0, buf4.length);
}
for (int i = 1; i < args.length; i++) {
asList.add(new FileInputStream(args[i]));
}
try (ZipInputStream is = new ZipInputStream(new SequenceInputStream(Collections.enumeration(asList)))) {
for (ZipEntry entry = null; (entry = is.getNextEntry()) != null;) {
if (entry.isDirectory()) {
new File(entry.getName()).mkdirs();
} else {
try (OutputStream os = new BufferedOutputStream(new FileOutputStream(entry.getName()))) {
byte[] buffer = new byte[1024];
int count = -1;
while ((count = is.read(buffer)) != -1) {
os.write(buffer, 0, count);
}
}
}
}
}
}
}

Categories