I've tested array insert using oracle 9i and OraDirect.NET 3.5.
Data was selected from table which is made using varchar2, number and long data types. Data types of the target table were varchar2 and number for the test.
I encoutered outofmemory execption error and CPU usage was more than 90%, when I array-inserted by 500 records chunk. So I reduced it to 100 records chunk then there was no outofmemory execption error however still high memory usage.
Is there any way I can use less memory and CPU to process array insert?
Can I use array list instead of array to process array insert?
The following is a sample code I used for the test.
Code: Select all
private bool ExecuteArray(System.Collections.ArrayList workList, ArrayList paramList, string sTableName, ref string sError)
{
bool bResult = true;
int nSize = workList.Count;
string[][] arrays = new string[paramList.Count][];
for( int i=0 ; i<paramList.Count; i++ )
{
arrays[i] = new string[workList.Count];
}
for ( int i=0; i<workList.Count; i++ )
{
WorkItem workItem = (WorkItem)workList[i];
for( int j=0; j<workItem.fieldValueList.Count; j++ )
{
arrays[j][i] = (string)workItem.fieldValueList[j];
}
}
string dataTypes = m_resMngSQLDataType.GetString( sTableName );
string[] dataTypeList = dataTypes.Split( ',' );
string sSql = m_resMngSQLInsert.GetString( sTableName );
using( OracleCommand cmd = new OracleCommand( sSql, this.m_trg_conn ) )
{
for( int i=0; i<paramList.Count; i++ )
{
CoreLab.Oracle.OracleParameter param = cmd.Parameters.Add( (string)paramList[i], GetOracleDbType( dataTypeList[i]) );
param.Value = arrays[i];
}
cmd.ExecuteArray( workList.Count );
cmd.Parameters.Clear();
}
return bResult;
}