Personally, I've created a Map(), Filter(), and Reduce() function for problems such as this. Here is an example using the Map() function and on my system is is roughly 50% - 60% faster than creating a table to do the search.
Here is the map function (explicitly globalized):
::map = Function( {inputs /* list, function */ },
/* uses a single underscore _ as the wild-card */
{__i__, __result__, __list__, _, __, __keys__},
__list__ = Eval( Arg( inputs, 1 ) );
If( Is List( __list__ ),
__result__ = {};
Eval(
Substitute(
Expr(
For( __i__ = 1, __i__ <= __N__, __i__++,
_ = __list__[__i__];
__result__[__i__] = __function__
)
)
,
Expr( __N__ ), N Items( __list__ ),
Expr( __function__ ), Arg( inputs, 2 )
);
);
,
Is Associative Array( __list__ ),
__result__ = [=>];
__keys__ = __list__ << Get Keys;
Eval(
Substitute(
Expr(
For( __i__ = 1, __i__ <= __N__, __i__++,
__ = __keys__[__i__];
_ = __list__[__];
__result__[__] = __function__
)
)
,
Expr( __N__ ), N Items( __keys__ ),
Expr( __function__ ), Arg( inputs, 2 )
)
)
);
__result__
);
and here is a comparison of using a table vs. the map function:
Names Default To Here( 1 );
filename = Convert File Path( "$SAMPLE_IMPORT_DATA/UN Malaria 2009.csv", absolute, windows );
result = Load Text File( filename );
Show( result );
file_text2 = Words( result, "\!N" );
N = 10000;
word = "malaria";
s = HP Time();
Summation( i = 1, N,
dt0 = New Table( "file_dt", New Column( "file_content", character ), Private );
dt0:file_content << set values( file_text2 );
pass_qty_row_array 1 = dt0 << get rows where( Contains( :file_content, word ) );
close( dt0, No Save );
0
);
Show( time 1 = (HP Time() - s) / 1000000 );
s = HP Time();
Summation( i = 1, N,
pass_qty_row_array 2 = loc( Matrix( ::map({ file_text2, Contains( _, word ) }) ) );
0
);
Show( time 2 = (HP Time() - s) / 1000000 );
Show( All( pass_qty_row_array 1 == pass_qty_row_array 2 ) );
1 - (time 1 - time 2) / time 1
Aside from defining the map function, using it is usually much cleaner looking in code than any other solution.
Jordan