Save/Retreive Image from Couchbase stored in chunks

I am new to Couchbase.I came across below post about storing blobs
https://blog.couchbase.com/storing-blobs-in-couchbase-for-content-management/

I have written below code to store large image file in chunks and read chunks and write the same image to local disk.
But the entire image is not downloaded.Only part of image is downloaded.Are the chunks not stored properly or not read properly.When I query in Couch DB data exists in proper format.I am not able to figure it out what the issue is.Any help on this?

@Override
public MetadataDoc saveImageInCouchbase(Bucket bucket) throws IOException {

    BufferedImage image = ImageIO.read(new File("/Users/path/Desktop/world.topo.jpg"));
    MetadataDoc metadata = new MetadataDoc();
    // new code to get type;
    String format = null;
    String keyToken = "ReactiveImageSplitTopo";
    ImageInputStream input = ImageIO
            .createImageInputStream(new File("/Users/path/Desktop/world.topo.jpg"));
    //long size = input.length();

    Iterator<ImageReader> readers = ImageIO.getImageReaders(input);
    if (readers.hasNext()) {
        ImageReader reader = readers.next();
        reader.setInput(input);
        format = reader.getFormatName();
    }

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ImageIO.write(image, format, baos);
    baos.flush();
    byte[] bytes = baos.toByteArray();

    // try splitting data in metadata and binary document
    // count the number of parts
    long length = baos.size();
    System.out.println("Length :: " + length + "Format  :: " + format);
    long BUFFER_SIZE = 1000000;
    long nbparts = length / BUFFER_SIZE;
    // the last part
    long finalSize = length - nbparts * BUFFER_SIZE;
    if (finalSize > 0)
        nbparts++;
    metadata.setCount(nbparts);
    metadata.setMimetype(format);
    metadata.setLength(length);

    long totalLength = 0;

    int read = 0;
    // The number of bytes not yet read
    byte[] byteArray = new byte[(int) BUFFER_SIZE];
    int offset = 0;
    String dataId = "";

    dataId = keyToken + imagecounterRepo.counter();
    TreeMap<String, Integer> strStrMap = new TreeMap<String, Integer>();
    for (int i = 0; i < nbparts; i++) {
        try {
            String keyWithId = metadata.getKeyFor((long) i);
            read = input.read(byteArray, 0, (int) BUFFER_SIZE);
            totalLength += read;
            offset += read;
            writeContentPart(keyWithId, byteArray, read, bucket);
            strStrMap.put(keyWithId, read);

            logger.info("i   " + i);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    metadata.setBinaryParts(strStrMap);
    metadata.setKey(keyToken + imagecounterRepo.counter());
    metadata.setId(imagecounterRepo.counter()); // just a counter 1,2,3

    // new logic to save metadata using spring-data-couchbase
     imageRepo.save(metadata);
    baos.close();
    logger.info("Image written");
    return metadata;
}

Code to write Binary Content
static void writeContentPart(String partId, byte[] bytesArray, int length, Bucket bucket) {
BinaryDocument bDoc = BinaryDocument.create(partId, Unpooled.copiedBuffer(bytesArray));
bucket.upsert(bDoc);
}

Code to read chunks and create image.

@Override
    public void extractImageFromCouchbase(Bucket bucket) throws IOException {

        // reding parts and joining
        long BUFFER_SIZE = 1000000;
        // new code to fetch using repository
        Optional<MetadataDoc> docOptional = imageRepo.findById("ReactiveImageSplitTopo17");
        MetadataDoc doc = docOptional.get();
        Long nbparts = doc.getCount();
        Long length = doc.getLength();
        String mimeType = doc.getMimetype();

        if (nbparts == null || length == null || mimeType == null)
            throw new IOException("Document invalid");

        byte[] byteArray = new byte[(int) (BUFFER_SIZE * nbparts)];
        // for each part, read the content into the byteArray

        Integer partLength = null;
        int offset = 0;
        for (int i = 0; i < nbparts; i++) {

            TreeMap<String, Integer> strStrMap = doc.getBinaryParts();

            String keyWithId = doc.getKeyFor((long) i);
            partLength = strStrMap.get(keyWithId);
            if (partLength == null)
                throw new IOException("length of part " + " is mandatory");

            // test observable
             List<BinaryDocument> bDoc = Observable.from(strStrMap.keySet())
             .flatMap(new Func1<String, Observable<BinaryDocument>>() {
             @Override
             public Observable<BinaryDocument> call(String t) {
             return bucket.async().get(t, BinaryDocument.class);
             }
             }).toList().toBlocking().single();
            
            //BinaryDocument bDoc = bucket.get(keyWithId, BinaryDocument.class);
             ByteBuf part =null;
             for (BinaryDocument singleDocument:bDoc){
            
             part=singleDocument.content();
             partLength=part.capacity();
             byte[] dst = new byte[partLength];
             part.readBytes(dst);
             for (int k = 0; k < partLength; k++) {
             byteArray[k + offset] = dst[k];
             }
             offset += partLength;
             part.release();
             }
        }

        // convert to image
        InputStream in = new ByteArrayInputStream(byteArray);
        BufferedImage buffImage = ImageIO.read(in);
        ImageIO.write(buffImage, mimeType, new File("/Users/path/Downloads/LargeImage.jpg"));
    }