Share your ideas for the JMP Scripting Unsession at Discovery Summit by September 17th. We hope to see you there!
Choose Language Hide Translation Bar
Level III

batch process for http request



I trying to write a script that sends an http request by putting together an endpoint.


Putting together the request itself is not an issue but in some cases, I need to request some info on more than a 1000 items (test_article) at once. 

I'm limited by the maximum length of the http request to requesting about a 1000 at a time.


Unfortunately, I have not been able to figure out how to put together a loop that would send requests for  batches of 1000 test_article and concatenate those as they are received.


Any insight would be greatly appreciated

Level II

Re: batch process for http request

I had a question regarding background http requests that @Craige_Hales answered. 

Http Background Request

I then took his code and modified it to do something like what you are looking for.  In my case I was querying over different dates but you can modify the code to loop over whatever you want.  You will probably need to make some adjustments depending on your specific use case.

Hopefully this helps.




url = "www....";

//Using this to generate different queries
query_keys = Associative Array();
query_keys["key1"] ="Bob";  //Or whatever your query parameters are

//I am looping over different dates
days_back = 20;
day_inc = 2;

start = Date Increment(today(),"day",-days_back);
end = Today();

cur_start = Date Increment(today(),"day",-day_inc);
cur_end = end;
index = 1;
//Here I am making files that contain the url and query key for each http request.  I will use these later.  This is only really needed if you intent to parallelize the http requset among multiple jmp instances.
//If you don't want to parallelize the queries you can skip the writing to disk and just make a list of your query keys and/or urls. while(cur_end>start, query_key["startdate"] = char(Format(cur_start,"yyyy-mm-dd")); query_key["enddate"] = char(Format(cur_end,"yyyy-mm-dd")); Save Text File("$temp/url_file"||char(index)||".txt",url); Save Text File("$temp/query_keys_file"||char(index)||".txt",As JSON Expr(query_keys)); index++; if(Date Increment(cur_start,"day",-day_inc)>start, cur_start = Date Increment(cur_start,"day",-day_inc), cur_start = start; ); cur_end = Date Increment(cur_end,"day",-day_inc); ); index--; nscripts = index; njmp = 1; //You can parallelize this as much as you want by launching more jmp instances. Or if you want to // a place to keep up with running instances rp = [=> ]; rpIndex=0; For( i = 1, i <= njmp, i += 1, // build a custom JSL for each JMP to execute workerFileName = Save Text File( "$temp\deleteMe_RunsWhenLaunchedBeCareful" || Char( i ) || ".jsl", "\[//! deletefile("$temp/sentinel.txt"); for(i=1,i<=]\"||char(nscripts)||"\[,i++, if(Is File("$temp/url_file"||char(i)||".txt"), url = Load Text File( "$temp/url_file"||char(i)||".txt" ) ; query_key = Parse JSON( Load Text File( "$temp/query_keys_file"||char(i)||".txt" ) ); Delete File("$temp/url_file"||char(i)||".txt"); Delete File("$temp/query_keys_file"||char(i)||".txt");
//Using try so if one request fails I can keep moving through the requests Try( dig_http = New HTTP Request(URL(url),Method("Get"),Query String(query_key),Timeout(60*60*2))<<Send;
//My data is coming in as csv so I convert that to jmp. file_name = "$temp/data"||char(i)||".csv"; Save Text File(file_name,dig_http); dt = Open(file_name); dt << Save("$temp/data"||char(i)||".jmp"); Delete File("$temp/data"||char(i)||".csv"); ,
//Dummy file if try fails dt = New Table(); dt << Save("$temp/data"||char(i)||".jmp"); ); ); ); exit(); ]\" ); // don't launch another copy until the previous copy is running. // JMP will become unhappy if the preferences file is busy in another // copy of JMP. The sentinel will be deleted when JMP starts savetextfile("$temp/sentinel.txt",""); rp[rpIndex+=1] = Run Program( executable( "jmp" ), options( {workerFileName} ) ); write("\!n",i," started..."); // wait for the sentinel to vanish while(fileexists("$temp/sentinel.txt"), wait(1);// don't burn the CPU while waiting ); write("ok"); ); // many queries are probably still queued, wait for them to finish scripts_left = nscripts; While( scripts_left, scripts_left = 0; For( i = 1, i <= nscripts, i++, If(Is File("$temp/url_file"||char(i)||".txt"), scripts_left += 1; // count the ones still alive ) ); write("\!n",scripts_left," http requests left"); wait(1); // don't burn the CPU while waiting ); Write( "\!ngathering results..." ); dt = New table("Final Table"); For( i = 1, i <= nscripts, i += 1, filename="$temp/data"||char(i)||".jmp"; // do something with the results // clean up the temp files tmp_dt = open(filename); dt << Concatenate(tmp_dt,Append to First Table); Close(tmp_dt); deletefile(filename); deletefile("$temp\deleteMe_RunsWhenLaunchedBeCareful" || Char( i ) || ".jsl"); ); Write( "\!ndone" );



Level III

Re: batch process for http request

Thanks a lot! I'll try to adapt your approach to my issue.
Community Manager Community Manager

Re: batch process for http request

Here is an approach that I use. It uses a single instance of JMP. The code combines the HTTP results into a single JSON object which is later converted to a data table.


list = {};
For( i = 1, i <= number_of_iterations, i++, 
        request = HTTP Request(
            Url( url ),
            Method( "GET" ),
            Headers( request_headers ),
            QueryString( query ),
            Password( access_token ),
            Timeout( 360 )
        data = request << Send;
        If( request << IsSuccess,
            response = Parse JSON( data );
            If( AndMZ( Is Associative Array( response ), Contains( response, "records" ), N Items( response["records"] ) > 0 ),
                list ||= response["records"]
            Wait( 0.25 )
Level III

Re: batch process for http request

I realized I've left out the scripts I was starting from my initial inquiry. For the sake of clarity, here's the starting point script I would like to find a way to breakdown into batches when the number of items exceeds 1000.



		Names Default To Here( 1 );

		dt = Data Table( "test_article" ); // This is a summary data table only contains 1 column of interest 'test_article' - I want to retrieve some info for each of the unique items in that column 
		//If the number of items exceeds 1000, I'll need to break down the query in batches of 1000 or less to construct the http request
		//Right if the list is 1000 or less, I'm golden, if it's greater, the request will fail
		dt:test_article << set data type( "Numeric" );
		dt << Transpose( columns( :test_article ), Output Table( "Transpose of test_article" ) );
		dt = Data Table( "Transpose of test_article" );
		Col_List = dt << Get Column Names( "String" );
		For( i = 1, i <= N Items( Col_List ), i++,
			If( Contains( Col_List[i], "Row" ),
				Column( Col_List[i] ) << Set Selected( 1 )
		dt << Combine Columns(
			delimiter( "," ),
			selected Columns only( 1 ),
			Selected Columns are Indicator Columns( 0 ),
			Column Name( "test_article" )
		nc = N Cols( Data Table( "Transpose of test_article" ) );
		Show( nc );
		rowNum = 1;
		col = Column( Data Table( "Transpose of test_article" ), "test_article" );
		tableVal = col[rowNum];

				adr0 = "http:/xyz:1234/oligos/more-oligo-info-intended?ids=";
				address = adr0 || tableval;

			request = New HTTP Request( URL( address ), Method( "GET" ), Timeout( 240 ) );
			json1 = request << Send;
			dt = JSON To Data Table( json1 );
			Current Data Table() << set name( "test_article_info" );
			Data Table( "test_article" ) << close window;
			Data Table( "Transpose of test_article" ) << close window;
		// I need to build a for loop that will send 1 request after the next and progressively concatenate those file together


Level III

Re: batch process for http request

Hi Ryan,

Thanks for the answer. I'll see if I can adapt your suggestion to the problem I've listed below.


Article Labels