[SalesForce] Problem using AggregateResult in batch apex using Database.Batchable class

I wrote a batch apex that implements Database.Batchable class. I am using a AggregateResult query on execute method for sum and count etc. Is there any chance of hitting governor limit if my no of records are around 1/2 lakhs and batch size is 200. Actually I came to know , that AggregateResult counts against the no of rows processed during calculation and not on the no of rows returned.
Kindly let me know if there is any alternative.
Thanks. Sample Code is added here –

  global class sample_BATCH implements Database.Batchable<sObject> {
global Database.QueryLocator start(Database.BatchableContext BC) 
{
  Set<Id> rtId = new Set<Id>();
  String rtName = 'xxx;
  for (RecordType rt : [select id, Name  from RecordType where name = : rtName])
  {
     rtId.add(rt.Id);
  }
 //master obj
  String query = 'SELECT Id, recordtypeid, Name, End_Date__c,start_Date__c FROM Survey__c WHERE recordtypeid IN : rtId ';
  return Database.getQueryLocator(query);  
}

global void execute(Database.BatchableContext BC, List<Survey__c> scope) 
{
  Decimal rs1 =0.0;                   
  Decimal rs2 = 0.0;
  Decimal rs3 = 0.0;
// detail obj
  List <Survey_Target__c> updSRTlst = new List <Survey_Target__c>();
// this query may have ~ 50k record per scope i.e for each master record id.
  Map<id,List<Decimal>> sTgtToSurvMap = new Map<id,List<Decimal>> ();
  AggregateResult[] sg= [SELECT Survey__c, SUM(r_1__c) Res1, SUM(r_2__c) Res2 ,COUNT(id) Res3 FROM Survey_Target__c  where Survey__c  in : scope 
                             AND Status__c = 'Submitted_a' GROUP BY Survey__c];   

  List <Decimal> r1List = new List <Decimal>();
  List <Decimal> r2List = new List <Decimal>();

  Decimal r1 = 0.0;
  Decimal r2 = 0.0;
  Decimal r3 = 0.0;
  for (Sobject sObj : sg)
  {
      r1 = (Decimal) (sObj.get('Res1'));    
      r2 = (Decimal) (sObj.get('Res2'));       
      r3 = (Decimal) (sObj.get('Res3'));                           
      List <Decimal> reslst = new List <Decimal>();
      if (reslst == null ||  reslst.isEmpty())
      {
         reslst.add(res2);                               
         reslst.add(r2);                          
         reslst.add(r3);
      }
      //stores master record is and cut off values from Agg Res query
      sTgtToSurvMap.put((Id)sObj.get('Survey__c'),reslst);
  }
 //my detail obj
  List <Survey_Target__c> stList= new List <Survey_Target__c>([select id, name,Survey__c r_1__c, r_2__c,End_Date__c,start_Date__c from Survey_Target__c 
                                                                      where Survey__c  in : scope and Status__c = 'Submitted_a' order by survey__c asc,r_1__c desc]);
  for (Survey_Target_vod__c stg : stList)
  {
    // final DML will be done here based on some logic on map.
  }

Best Answer

With 50 master records each of which can have > 50,000 child records, running the batchable over the child records may make sense. The code would look something like this:

public class MyBatchable implements Database.Batchable<SObject>, Database.Stateful {

    private Map<Id, Parent__c> parents = new Map<Id, Parent__c>();

    public Database.QueryLocator start(Database.BatchableContext context) {
        return Database.getQueryLocator([
                select Parent__c, Amount__c, ...
                from Child__c
                ]);
    }

    public void execute(Database.BatchableContext context, List<Child__c> scope) {
        for (Child__c c : scope) {
            Parent__c p = parents.get(c.Parent__c);
            if (p == null) {
                p = new Parent__c(Id = c.Parent__c);
                parents.put(c.ParentId, p);
            }
            p.Sum__c = add(p.Sum__c, c.Amount__c);
            ...
        }
    }

    public void finish(Database.BatchableContext context) {
        update parents.values();
    }

    private Decimal add(Decimal a, Decimal b) {
        if (a != null && b != null) return a + b;
        else if (a != null) return a;
        else if (b != null) return b;
        else return null;
    }
}

and then execute with the maximum batch size of 2,000.

Related Topic