Posting video returning "Segments do not add up to provided total file size"


#1

Hello there! I am making a bot that publishes multiple videos, located on my machine, on twitter. Currently I register the media path and the authentication keys of my account in a database and pull them through the “date” variable observed at the beginning of the code below! The application is asynchronous and I use yield to organize the calls of the functions.
For media with less than 5MB the code runs perfectly. Now for media larger than 5MB I perform the same logic, however, in APPEND I create a stream for the file and step it into chunks of 1 * 1024 * 1024 bytes, the event ‘date’ executes APPEND for all received chunks until if the file size is complete. It seems to be fine, but I have tried to modify the code several times with .then or changing promises and when I give a fStream.pause () inside event ‘data’ of readable stream, the stream only reads until the first chunk received and returns the same error “Segments do not add up to provided total file size”. The code is very similar to the one developed by ttezel in the twit package, in twit / tests / rest_chunked_upload.js ( https://github.com/ttezel/twit/blob/master/tests/rest_chunked_upload.js ). And I already checked that my videos are postable with the application developed in python by jcipriano and indicated in the documentation (https://github.com/twitterdev/large-video-upload-python/blob/master/async-upload.py).

const data = target.data;
            const T = new Twit({
                consumer_key: process.env.TWITTER_CONSUMER_KEY,
                consumer_secret: process.env.TWITTER_CONSUMER_SECRET,
                access_token: data.accessToken,
                access_token_secret: data.accessTokenSecret
            });
            
            const mediaSizeBytes = fs.statSync(data.mediaPath).size;
            const mediaData = fs.readFileSync(data.mediaPath, { encoding: 'base64' });
            // We need to do it this way because, by default, postMediaChunked does not
            // return a future if you don't pass the callback. This is not the case for
            // the other methods, though.

            if (mediaSizeBytes > 5000000) {
                console.log('Posting large media starts!');
                const mediaId = yield new Promise((resolve, reject) => {   
                    T.post('media/upload', {
                        'command': 'INIT',
                        'total_bytes': mediaSizeBytes,
                        'media_category': 'tweet_video',
                        'media_type': 'video/mp4'
                    }, (error, body, response) => {
                        if (error)
                            return reject(error);
                        return resolve(body.media_id_string);
                    });
                });

                console.log('INIT large midia ok! Media id:' + mediaId);

                var segmentIndex = 0;
                var maxSize = 0;
                var fStream = fs.createReadStream(data.mediaPath, { highWaterMark: 1 * 1024 * 1024 }); 

                fStream.on('data', function (chunk) {

                    console.log('Uploading chunk number', + segmentIndex);
                    console.log(chunk.length);                    

                    T.post('media/upload', {
                        'command': 'APPEND',
                        'media_id': mediaId,
                        'segment_index': segmentIndex,
                        'media_data': chunk.toString('base64'),
                    });
                    maxSize += chunk.length;
                    segmentIndex += 1;
                    console.log('Chunks Total Size: ' + maxSize + ';');
                    console.log('Media id:' + mediaId);
                });
                fStream.on('end', function () {
                    console.log('Stream Finished!');
                });
                console.log('APPEND chunks large midia ok!');
                fStream.on('error', function (err) {
                    console.log(err);
                });
                
                yield T.post('media/upload', {
                    'command': 'FINALIZE',
                    'media_id': mediaId,
                });

                yield T.post('media/metadata/create', { media_id: mediaId });
                console.log('Metadata large midia ok!');

                const tweet = (yield T.post('statuses/update', {
                    status: data.message || '',
                    media_ids: [mediaId]
                })).data;
                console.log('Large midia posted!');
          
            } else if (mediaSizeBytes < 5000000) {
                console.log('Posting small media starts!');
                const mediaId = yield new Promise((resolve, reject) => {
                    T.post('media/upload', {
                        'command': 'INIT',
                        'total_bytes': mediaSizeBytes,
                        'media_category': 'tweet_video',
                        'media_type': 'video/mp4'
                    }, (error, body, response) => {
                        if (error)
                            return reject(error);
                        return resolve(body.media_id_string);
                    });
                });
                console.log('INIT small midia ok!');

                yield T.post('media/upload', {
                    'command': 'APPEND',
                    'media_id': mediaId,
                    'segment_index': 0,
                    'media_data': mediaData,
                });
                console.log('APPEND small midia ok!');

                yield T.post('media/upload', {
                    'command': 'FINALIZE',
                    'media_id': mediaId,
                });
                console.log('FINALIZE small midia ok!');

                yield T.post('media/metadata/create', { media_id: mediaId });
                console.log('Metadata small midia ok!');

                const tweet = (yield T.post('statuses/update', {
                    status: data.message || '',
                    media_ids: [mediaId]
                })).data;
                console.log('Small midia posted!');
            };

Here is the response :

Posting large media starts!
INIT large midia ok! Media id:1087881346494709760
APPEND chunks large midia ok!
Uploading chunk number 0
1048576
Chunks Total Size: 1048576;
Media id:1087881346494709760
Uploading chunk number 1
1048576
Chunks Total Size: 2097152;
Media id:1087881346494709760
Uploading chunk number 2
1048576
Chunks Total Size: 3145728;
Media id:1087881346494709760
Uploading chunk number 3
1048576
Chunks Total Size: 4194304;
Media id:1087881346494709760
Uploading chunk number 4
1048576
Chunks Total Size: 5242880;
Media id:1087881346494709760
Uploading chunk number 5
1048576
Chunks Total Size: 6291456;
Media id:1087881346494709760
Uploading chunk number 6
379522
Chunks Total Size: 6670978;
Media id:1087881346494709760
Stream Finished!
Failed because of error: Error: Segments do not add up to provided total file size.
Error: Segments do not add up to provided total file size.
    at Object.exports.makeTwitError (/home/pipostefanelli/framefy/taskieBranch/world/applications/taskie/node_modules/twit/lib/helpers.js:74:13)
    at onRequestComplete (/home/pipostefanelli/framefy/taskieBranch/world/applications/taskie/node_modules/twit/lib/twitter.js:344:25)
    at Request.<anonymous> (/home/pipostefanelli/framefy/taskieBranch/world/applications/taskie/node_modules/twit/lib/twitter.js:364:7)
    at emitOne (events.js:121:20)
    at Request.emit (events.js:211:7)
    at Gunzip.<anonymous> (/home/pipostefanelli/framefy/taskieBranch/world/applications/taskie/node_modules/request/request.js:1083:12)
    at Object.onceWrapper (events.js:313:30)
    at emitNone (events.js:111:20)
    at Gunzip.emit (events.js:208:7)
    at endReadableNT (_stream_readable.js:1064:12)
    at _combinedTickCallback (internal/process/next_tick.js:139:11)
    at process._tickDomainCallback (internal/process/next_tick.js:219:9)
Done in 5.34s.