[SalesForce] System.LimitException: Too many queueable jobs added to the queue

I have a scheduler class which is scheduled to run every 5 min which calls a batch class. This batch class calls a method from another class which adds multiple jobs to a queue. When it's scheduled I am getting System.LimitException: Too many queueable jobs added to the queue: 2.
But if I don't schedule and run the code from Ananymous block or from VF button it works just fine. Below I am attaching the code sample for the same.

Scheduler Class:

global with sharing class getSocialPostsScheduleBatch implements Schedulable 
{
    global void execute(SchedulableContext sc) {
         ID BatchId = Database.executeBatch(new getPostsBatch(), 200);
    }
}

Batch class:

global class getPostsBatch implements Database.Batchable<sObject>, Database.AllowsCallouts,Database.Stateful{

global Database.QueryLocator start(Database.BatchableContext BC)
{
    String query='Select Id from User Limit 1';
    return Database.getQueryLocator(query);
}

global void execute(Database.BatchableContext BC, List<Object> scope)
{
   getSocialPosts.getPosts();

}   
global void finish(Database.BatchableContext BC)
{

}
}

Callout Class:

public class getSocialPosts{
    public static void getPosts() {
           String auth_token = oauthLogin();

           DateTime endTime = dateTime.now();
           String unixendTime = ''+endTime.getTime()/1000;
           System.debug('Unix time-stamp: '+unixendTime);

           DateTime startTime = dateTime.now().addHours(-24);
           String unixStartTime = ''+startTime.getTime()/1000;
           System.debug('Unix time-stamp: '+unixStartTime);

           SocialPostsCustomSetting__c customSet = SocialPostsCustomSetting__c.getInstance('Record');
           String custId=customSet.SocialPostId__c;
           System.debug('ID from Custom Settings='+custId);

           HttpRequest req = new HttpRequest(); 
           req.setMethod('GET');
           req.setEndpoint('https://api.socialstudio.radian6.com/v3/posts?topics=1135172&limit=5&sinceId='+custId+'&sortBy=publishedDate');
           req.setHeader('access_token',auth_token);
           Http http = new Http();
           HTTPResponse res = http.send(req);
           System.debug('BODY: '+res.getBody());
           System.debug('STATUS:'+res.getStatus());
           System.debug('STATUS_CODE:'+res.getStatusCode());
           //fromJSON jsonObj = fromJSON.parse(res.getBody());
           string jsonstr = res.getBody();
           system.debug('JSON Body='+jsonstr);
           if((jsonstr.containsIgnoreCase('Comment from')||jsonstr.containsIgnoreCase('Reply From'))&&jsonstr.containsIgnoreCase('Post from')){
               JsonParser5 jsonobj;
               //fromJSON jsonobj;
               try{
                      jsonobj = (JsonParser5)JSON.deserialize(res.getBody(),JsonParser5.class);
                      //jsonobj= fromJSON.parse(res.getBody());
                      System.debug('Parsed Data='+jsonobj);
                   }catch(Exception e){

                           system.debug('Exception in deserialize '+e.getMessage());

                   }
               system.debug('JSONOBJ inside if='+jsonobj );

               List<JsonParser5.data> jsonList =new List<JsonParser5.data>();
               if(jsonobj!=null){
                       if(jsonobj.data!=null){
                           jsonList = jsonobj.data;
                       }
               }
               system.debug('JSONLIST5='+jsonList );
               List<String> idSet = new List<String>();
               Map<String,Map<String,String>> socialIdUrlMap = new Map<String,Map<String,String>>();
               if(jsonList.size()>0){
                   for(JsonParser5.data obj : jsonList){

                       if(obj.title.containsIgnoreCase('Comment from')){
                           if(!obj.title.containsIgnoreCase('Comment From: Lopples')){

                               postToFacebook.postReplytoComment(obj.externalId);
                           }
                       }
                       else{
                           if(!obj.title.contains('Lopples')){
                               System.debug('Id='+obj.id);
                               idSet.add(obj.id);
                               Map<String,String> contentMap = new Map<String,String>();
                               contentMap.put(obj.entities.media[0].media_url,obj.content);
                               socialIdUrlMap.put(obj.externalId,contentMap );

                           }  
                       }
                   }
                   if(socialIdUrlMap.size()>0){
                       saveImages(socialIdUrlMap);
                   }
                   customSet.SocialPostId__c=idSet[0];
                   update customSet;
               }

           }
        }
    public static void saveImages(Map<String,Map<String,String>> socialIdUrlMap){
        List<ContentVersion> cvList = new List<ContentVersion>();
        Integer count=0;
        for (String socialId : socialIdUrlMap.keySet()){
            Map<String,String> data= new Map<String,String>();
            data = socialIdUrlMap.get(socialId);
            for(String url: data.keySet()){
                if(!url.contains('https://www.facebook.com')){
                count++;
                Http h = new Http();
                HttpRequest req = new HttpRequest();
                req.setMethod('GET');
                req.setEndpoint(url);
                if(url.contains('png')){
                    system.debug('Inside PNG'+url);
                    req.setHeader('Content-Type', 'image/png');
                }
                else if(url.contains('jpg')){
                    system.debug('Inside JPEG'+url);
                    req.setHeader('Content-Type', 'image/jpeg');
                }
                HttpResponse res = null;
                string responseValue = '';
                try{
                    res = h.send(req);
                }catch (System.CalloutException e){
                     System.debug('ERROR:' + e.getMessage());
                }
                blob image = res.getBodyAsBlob();
                ContentVersion cv = new ContentVersion();
                cv.VersionData = image;
                String finalurl = url.split('\\?').get(0);
                cv.PathOnClient = finalurl;
                cv.title = socialId;
                cv.Description = data.get(url);
                cvList.add(cv);
                }
            }
        }

        insert cvList;

            if(!cvList.isEmpty()){
                for(ContentVersion cv : cvList){
                    System.Queueable job = new DeferredHandler(cv.Id);
                    System.enqueueJob(job);
                }
            }
        }
    }

The last snippet where I am adding multiple jobs to the queue is causing the issue.
Please let me know if if I am doing wrong here and any workaround to fix the issue​

Best Answer

As you are chaining the jobs you have limit of 1 on child jobs. When chaining jobs, you can add only one job from an executing job with System.enqueueJob, which means that only one child job can exist for each parent queueable job. Starting multiple child jobs from the same queueable job isn’t supported.

See here under queuable Apex limit section: https://developer.salesforce.com/docs/atlas.en-us.apexcode.meta/apexcode/apex_queueing_jobs.htm