<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Is there any solution for this? in SAS Programming</title>
    <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145726#M29044</link>
    <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Not much different to what you already have been provided with, but split each type of "KEY" out into a dataset, then set back together again (note depending on number of keys, this could create a lot of small datasets):&lt;/P&gt;&lt;P&gt;/* Some test data */&lt;/P&gt;&lt;P&gt;data have;&lt;/P&gt;&lt;P&gt;&amp;nbsp; attrib key format=$5.;&lt;/P&gt;&lt;P&gt;&amp;nbsp; array var{15} $20.;&lt;/P&gt;&lt;P&gt;&amp;nbsp; do i=1 to 1000; /*32498768;*/&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp; key=strip(put(mod(i,234),best.));&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp; output;&lt;/P&gt;&lt;P&gt;&amp;nbsp; end;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;/* Get list of unique KEY values */&lt;/P&gt;&lt;P&gt;proc sql;&lt;/P&gt;&lt;P&gt;&amp;nbsp; create table WORK.LOOP as&lt;/P&gt;&lt;P&gt;&amp;nbsp; select&amp;nbsp; distinct&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; KEY&lt;/P&gt;&lt;P&gt;&amp;nbsp; from&amp;nbsp;&amp;nbsp;&amp;nbsp; WORK.HAVE;&lt;/P&gt;&lt;P&gt;quit;&lt;/P&gt;&lt;P&gt;/* Generate code to split data into each key value */&lt;/P&gt;&lt;P&gt;data _null_;&lt;/P&gt;&lt;P&gt;&amp;nbsp; set loop;&lt;/P&gt;&lt;P&gt;&amp;nbsp; call execute('data work.srt'||strip(key)||'; set work.have (where=(key="'||strip(key)||'")); run;');&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;/* Generate final datastep to append back together again */&lt;/P&gt;&lt;P&gt;data _null_;&lt;/P&gt;&lt;P&gt;&amp;nbsp; set loop end=last;&lt;/P&gt;&lt;P&gt;&amp;nbsp; if _n_=1 then call execute('data want; set ');&lt;/P&gt;&lt;P&gt;&amp;nbsp; call execute(' work.srt'||strip(key));&lt;/P&gt;&lt;P&gt;&amp;nbsp; if last then call execute(';run;');&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
    <pubDate>Wed, 17 Sep 2014 14:51:36 GMT</pubDate>
    <dc:creator>RW9</dc:creator>
    <dc:date>2014-09-17T14:51:36Z</dc:date>
    <item>
      <title>Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145708#M29026</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Hi all&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I´m trying to improve my time processing and so I am development a hash object for a faster sort but an error about the amount of memory ocurrs and I not sure if there is any option to avoid it.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Regards&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data _null_;&lt;/P&gt;&lt;P&gt;if 0 then set a;&lt;/P&gt;&lt;P&gt;declare hash epi (dataset:"a", ordered:"y");&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; epi.definekey ("key1","key2");&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; epi.definedata (all:"yes");&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; epi.definedone ();&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; epi.output (dataset: "fastorder");&lt;/P&gt;&lt;P&gt;stop;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;ERROR: Hash object added 7864304 items when memory failure occurred.&lt;/P&gt;&lt;P&gt;FATAL: Insufficient memory to execute DATA step program. Aborted during the EXECUTION phase.&lt;/P&gt;&lt;P&gt;ERROR: The SAS System stopped processing this step because of insufficient memory.&lt;/P&gt;&lt;P&gt;NOTE: DATA statement used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 9.06 seconds&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; user cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 7.78 seconds&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; system cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 1.20 seconds&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Memory&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 1137687k&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; OS Memory&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 1217248k&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Timestamp&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 15/09/2014&amp;nbsp; 15:48:46&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 14:01:15 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145708#M29026</guid>
      <dc:creator>SergioSanchez</dc:creator>
      <dc:date>2014-09-15T14:01:15Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145709#M29027</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;To the above problem increasing the memsize will help.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;to check the current or default memsize, please run the below code&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;PROC OPTIONS GROUP= MEMORY ;&lt;/P&gt;&lt;P&gt;RUN;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;then increase the memsize by options memsize = X;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Thanks,&lt;/P&gt;&lt;P&gt;Jag&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 14:14:14 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145709#M29027</guid>
      <dc:creator>Jagadishkatam</dc:creator>
      <dc:date>2014-09-15T14:14:14Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145710#M29028</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;You also need sufficient physical RAM available.&lt;/P&gt;&lt;P&gt;It was interrupted at almost 8'' rows, how many more do you need?&lt;/P&gt;&lt;P&gt;Depending on the look-up logic (hit-rate, 1-M etc), hash-tables isn't necessary the most efficient solution.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 14:24:59 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145710#M29028</guid>
      <dc:creator>LinusH</dc:creator>
      <dc:date>2014-09-15T14:24:59Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145711#M29029</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Thanks guys.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I increase the amount of memory without any result. I need more physical memory installed in my pc, only&amp;nbsp; 4 gb is very short&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Only two images to show you the issue.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Before (1.jpg) and after (2.jpg)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Thanks for your help&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;P:S : A better way to sort a dataset of 40 millions rows instead of Proc report ?&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Regards&lt;/P&gt;&lt;BR /&gt;&lt;IMG src="https://communities.sas.com/t5/image/serverpage/image-id/10776i1D8CD4E8DF6BAC77/image-size/large?v=1.0&amp;amp;px=600" border="0" alt="2.jpg" title="2.jpg" /&gt;&lt;IMG src="https://communities.sas.com/t5/image/serverpage/image-id/10777i9C6DECB849F3238E/image-size/large?v=1.0&amp;amp;px=600" border="0" alt="1.jpg" title="1.jpg" /&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 15:03:01 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145711#M29029</guid>
      <dc:creator>SergioSanchez</dc:creator>
      <dc:date>2014-09-15T15:03:01Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145712#M29030</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;This doesn't sound like a job for a client installation?&lt;/P&gt;&lt;P&gt;Are you asking for a better option or not?&lt;/P&gt;&lt;P&gt;If so, you must share of your whole program, the look-up logic.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 15:37:23 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145712#M29030</guid>
      <dc:creator>LinusH</dc:creator>
      <dc:date>2014-09-15T15:37:23Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145713#M29031</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;How fast is PROC SORT on your dataset? Have you tried different sorting options?&lt;/P&gt;&lt;P&gt;The default SORTSIZE is sometimes very small, and gives people the feeling, that hash object is significantly better for sorting.&lt;/P&gt;&lt;P&gt;&lt;A href="http://support.sas.com/documentation/cdl/en/hostwin/63285/HTML/default/viewer.htm#win-sysop-sortsize.htm" title="http://support.sas.com/documentation/cdl/en/hostwin/63285/HTML/default/viewer.htm#win-sysop-sortsize.htm"&gt;SAS(R) 9.2 Companion for Windows, Second Edition&lt;/A&gt;&lt;/P&gt;&lt;P&gt;try:&lt;/P&gt;&lt;P&gt;options sortsize=3G;&lt;/P&gt;&lt;P&gt;proc sort ...&lt;/P&gt;&lt;P&gt;But first increase your MEMSIZE to 3.5G. You can do this in a &lt;STRONG&gt;configuration file&lt;/STRONG&gt; or &lt;STRONG&gt;startup option&lt;/STRONG&gt;! (&lt;STRONG&gt;NOT&lt;/STRONG&gt;with &lt;EM&gt;options memsize=X;&lt;/EM&gt;&amp;nbsp; statement )&lt;/P&gt;&lt;P&gt;&lt;A href="http://support.sas.com/documentation/cdl/en/hostwin/63285/HTML/default/viewer.htm#win-sysop-memsize.htm" title="http://support.sas.com/documentation/cdl/en/hostwin/63285/HTML/default/viewer.htm#win-sysop-memsize.htm"&gt;SAS(R) 9.2 Companion for Windows, Second Edition&lt;/A&gt;&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 16:02:04 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145713#M29031</guid>
      <dc:creator>gergely_batho</dc:creator>
      <dc:date>2014-09-15T16:02:04Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145714#M29032</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;&lt;SPAN style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;client installation?&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;Sorry Linush, I don't know what you mean with it.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;One of the task of my job is to make sure that the data load from the source is the same that the data that is working with so we take the data set from the source, order it, take the data that is loaded in the system, order it and verify if the data are the same, wich data are in the source and not in the system and viceversa.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;My code is something like this below&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data a;&lt;/P&gt;&lt;P&gt;set b (where= vardate1&amp;lt;= date1 and vardate2&amp;lt;=date2 and vardate3&amp;lt;=date3 and vardate4&amp;lt;=date4);&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data x;&lt;/P&gt;&lt;P&gt;set y (where= vardate1&amp;lt;= date1 and vardate2&amp;lt;=date2 and vardate3&amp;lt;=date3 and vardate4&amp;lt;=date4);&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;proc sort data = a;&lt;/P&gt;&lt;P&gt;order by key;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;proc sort data = x (rename=(var_f = key));&lt;/P&gt;&lt;P&gt;order by key;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data h i j;&lt;/P&gt;&lt;P&gt;merge a (in=w) b(in=q);&lt;/P&gt;&lt;P&gt;if w then output h;&lt;/P&gt;&lt;P&gt;else if g then output i;&lt;/P&gt;&lt;P&gt;else output j;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;The problem is that the proc sort takes tooooooooo much time, so I´m trying to replace this piece of the code.&lt;/P&gt;&lt;P&gt;A good alternative was to use a hash object but muy PC hasn't enough memory for perform it with success.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Regards&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 16:09:55 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145714#M29032</guid>
      <dc:creator>SergioSanchez</dc:creator>
      <dc:date>2014-09-15T16:09:55Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145715#M29033</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Sergio replacing a proc sort is a bad idea as that one is one of the most optimal in processing (includes multithreading).&lt;/P&gt;&lt;P&gt;The memory sizing gives me the feeling you are running a 32-bit OS on a 4Gb system. &lt;/P&gt;&lt;P&gt;Only 3Gb is left for your processes and 1Gb has got lost. (yep it is there on your screenshots) And you are having 4 cpu's, could be used all in parallel.&amp;nbsp; &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;As you are using Windows desktop the SAS installation will give you as much is available for your processing.&lt;/P&gt;&lt;P&gt;Please verify those assumptions. To be corrected when I am wrong. Gergely did already the on sortsize setting. &lt;/P&gt;&lt;P&gt;You are having 40M records en just 8M where able to fit into memory. I assume the recordsize is about 250bytes the dataset size is about 5Gb.&lt;/P&gt;&lt;P&gt;I see 8 numerics for dates (total 64 bytes) there must be some 180 being elsewhere. You have an unique key (key but the input is not ordered that way.&lt;/P&gt;&lt;P&gt;&lt;BR /&gt;A question:&lt;/P&gt;&lt;P&gt;- Is the order of the key (or other indentifier) in both datasets ordered in the same way?&lt;/P&gt;&lt;P&gt;&amp;nbsp; Than you do not need to do the sorting the merge by is accepting a non sorted input. &lt;A href="http://support.sas.com/documentation/cdl/en/lestmtsref/63323/HTML/default/viewer.htm#p0yeyftk8ftuckn1o5qzy53284gz.htm" title="http://support.sas.com/documentation/cdl/en/lestmtsref/63323/HTML/default/viewer.htm#p0yeyftk8ftuckn1o5qzy53284gz.htm"&gt;SAS(R) 9.3 Statements: Reference&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp; I ma not seeing the by statement so it could be a process with some defined ordering values. &lt;A href="http://support.sas.com/documentation/cdl/en/lestmtsref/63323/HTML/default/viewer.htm#n1i8w2bwu1fn5kn1gpxj18xttbb0.htm" title="http://support.sas.com/documentation/cdl/en/lestmtsref/63323/HTML/default/viewer.htm#n1i8w2bwu1fn5kn1gpxj18xttbb0.htm"&gt;SAS(R) 9.3 Statements: Reference&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp; When the source is a RDBMS you can retrieve the data ordered.&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;P&gt;&amp;nbsp; Your program will become like"&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data h i j;&lt;/P&gt;&lt;P&gt;merge&lt;/P&gt;&lt;P&gt;&amp;nbsp; b (in=w&amp;nbsp;&amp;nbsp; where=( vardate1&amp;lt;= date1 and vardate2&amp;lt;=date2 and vardate3&amp;lt;=date3 and vardate4&amp;lt;=date4) )&lt;/P&gt;&lt;P&gt;&amp;nbsp; y (in=q&amp;nbsp;&amp;nbsp;&amp;nbsp; where=( vardate1&amp;lt;= date1 and vardate2&amp;lt;=date2 and vardate3&amp;lt;=date3 and vardate4&amp;lt;=date4) )&amp;nbsp; ;&lt;/P&gt;&lt;P&gt;if w then output h;&lt;/P&gt;&lt;P&gt;else if g then output i;&lt;/P&gt;&lt;P&gt;else output j;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;There are possible other approaches but the are requiring more knowledge on you data. Questions:&lt;/P&gt;&lt;P&gt;- What is the sizing of your datasets (observations variables recordsize)?&lt;/P&gt;&lt;P&gt;- What I needed for the selection and what data is propagated?&lt;/P&gt;&lt;P&gt;- Which orderings are present and which are important as result?&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;P&gt;- Is the merge split as described your wanted process? &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;An idea. Is one of the datasets just an indicator (the other key merging) in an other order of the same original dataset&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp; just keep that key and the original order present in the dataset and drop all other variables.&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;BR /&gt; &lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 17:30:01 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145715#M29033</guid>
      <dc:creator>jakarman</dc:creator>
      <dc:date>2014-09-15T17:30:01Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145716#M29034</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;I agree with Jaap, you're most unlikely to be able to improve on the speed of a Proc Sort by using a hash object.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;One thing you can do is try using SAS MP Connect to run the two sorts in parallel and then do the merge. I've done similar things myself and it's usually much faster with large data sates, even on a Windows machine.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;There's an excellent example of doing just that here &lt;A class="active_link" href="http://support.sas.com/documentation/cdl/en/connref/61908/HTML/default/viewer.htm#a001249955.htm" title="http://support.sas.com/documentation/cdl/en/connref/61908/HTML/default/viewer.htm#a001249955.htm"&gt;SAS/CONNECT(R) 9.2 User's Guide&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Chris&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 21:27:19 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145716#M29034</guid>
      <dc:creator>ChrisBrooks</dc:creator>
      <dc:date>2014-09-15T21:27:19Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145717#M29035</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;I'm also wondering what your hash object actually looks like. That many records makes me think there may be a better way.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Mon, 15 Sep 2014 22:45:45 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145717#M29035</guid>
      <dc:creator>ballardw</dc:creator>
      <dc:date>2014-09-15T22:45:45Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145718#M29036</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Good morning&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I left here a proc sort, this proc sort is one of the bigger.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;72&amp;nbsp;&amp;nbsp; proc sort data = dataset_1= dataset_2;&lt;/P&gt;&lt;P&gt;73&amp;nbsp;&amp;nbsp; by key;&lt;/P&gt;&lt;P&gt;74&amp;nbsp;&amp;nbsp; run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTA: Se han leído &lt;STRONG&gt;32498768&lt;/STRONG&gt; observaciones del conj. datos WORK.EC_DIM_PER.&lt;/P&gt;&lt;P&gt;NOTA: El conj. datos WORK.PERSORDENADO tiene 32498768 observaciones y 31 variables.&lt;/P&gt;&lt;P&gt;NOTA: PROCEDIMIENTO SORT utilizado (Tiempo de proceso total):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; tiempo real&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &lt;STRONG&gt; 45:25.58&lt;/STRONG&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; tiempo de cpu del usuario&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 42.85 segundos&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; tiempo de cpu del sistema&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 2:40.40&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Memoria&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 131842k&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Jaap, I atttach you a proc sort screenshort, I hope it can resolved your questions.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Ballardw, Hash hasn`t enough memory for run in my PC. It couldn't load the entire table in memory. Please see my post &lt;SPAN style="color: #000000; font-family: arial, sans-serif; background-color: #ffffff;"&gt;above&lt;/SPAN&gt;.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Thanks all for your help&lt;/P&gt;&lt;BR /&gt;&lt;IMG src="https://communities.sas.com/t5/image/serverpage/image-id/11845i09EF4A000257B1E8/image-size/large?v=1.0&amp;amp;px=600" border="0" alt="Proc Contents.jpg" title="Proc Contents.jpg" /&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Tue, 16 Sep 2014 07:54:39 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145718#M29036</guid>
      <dc:creator>SergioSanchez</dc:creator>
      <dc:date>2014-09-16T07:54:39Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145719#M29037</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Sergio,&lt;BR /&gt;There is a lot to see with that info. It are not the answers to all of my questions.&lt;/P&gt;&lt;P&gt;It is definitely time for a upgrade SAS 9.1 Windows XP (32) is getting outdated.&lt;BR /&gt;The recordlength of 672&amp;nbsp; with 10 numerics (80byte) and 6 chars 40 (240+ 3 to 9) there must be more chars in that dataset.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;In you sort processing 45m45 is the time you did need to wait before it was ready.&lt;/P&gt;&lt;P&gt;You SAS process did use 42s and the system used 2m:40.&amp;nbsp; It is a signal there is a lot time waiting for something.&lt;/P&gt;&lt;P&gt;That "waiting for something" must be I/O (writing/reading from disk) trying to focus on minimizing that.&lt;/P&gt;&lt;P&gt;Your file size must by about 22Gb with these numbers. reading 22Gb and writing that back is 44Gb data-transfer, not counted the intermediate utility files.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;P&gt;As the hardware is fixed some tuning options. &lt;A href="http://support.sas.com/documentation/cdl/en/lrcon/67401/HTML/default/viewer.htm#n0a1u9b2buxl5yn1nv12rnoppiip.htm" title="http://support.sas.com/documentation/cdl/en/lrcon/67401/HTML/default/viewer.htm#n0a1u9b2buxl5yn1nv12rnoppiip.htm"&gt;SAS(R) 9.4 Language Reference: Concepts, Third Edition&lt;/A&gt;&lt;/P&gt;&lt;P&gt;- compress=binary&amp;nbsp;&amp;nbsp;&amp;nbsp; You are having many char variables it can gain for reading and writing the visible datasets&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;P&gt;- Bufsize=64k bufno=8 &lt;A href="http://support.sas.com/documentation/cdl/en/hostwin/67279/HTML/default/viewer.htm#p1pwg39c440bqtn1xgmw49lo7mix.htm" title="http://support.sas.com/documentation/cdl/en/hostwin/67279/HTML/default/viewer.htm#p1pwg39c440bqtn1xgmw49lo7mix.htm"&gt;SAS(R) 9.4 Companion for Windows, Third Edition&lt;/A&gt; as attempt to minimize IO.&lt;/P&gt;&lt;P&gt;&amp;nbsp; The Utilities files are getting options in 9.4 these are out of scope with this.&amp;nbsp; &lt;/P&gt;&lt;P&gt;&amp;nbsp; Chris proposal to spilt the dataset in eg 10 parts (2Gb) sorting them each and merging can help as avoiding a lot of those intermediate big files.&amp;nbsp;&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;P&gt;&lt;BR /&gt;The sort memory usage is saying just 131842k being used. That one is the &lt;A href="http://support.sas.com/documentation/cdl/en/hostwin/67279/HTML/default/viewer.htm#p024jq3a5zotf3n19rt86580m0cv.htm" title="http://support.sas.com/documentation/cdl/en/hostwin/67279/HTML/default/viewer.htm#p024jq3a5zotf3n19rt86580m0cv.htm"&gt;SAS(R) 9.4 Companion for Windows, Third Edition&lt;/A&gt; sortsize was set with much smaller values in older releases (64Kb default 9.1). Please verify this one.&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;There are possible other approaches but the are requiring more knowledge on you data. Questions:&lt;/P&gt;&lt;P&gt;- (answered)&lt;/P&gt;&lt;P&gt;- What is needed for the selection and what data is propagated?&lt;/P&gt;&lt;P&gt;- Which orderings are present and which are important as result?&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;P&gt;- Is the merge split as described your wanted process? &lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Tue, 16 Sep 2014 10:35:59 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145719#M29037</guid>
      <dc:creator>jakarman</dc:creator>
      <dc:date>2014-09-16T10:35:59Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145720#M29038</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Thanks Jaap for this "free of charge lessons" :smileylaugh:.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I`d try to increase the amount of memory in the buffer and total too.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;As for your questions, I coul say&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;- The most of the datases I need the hole set, in case I only need subset it´s already implemented in the code&lt;/P&gt;&lt;P style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;- Is some dataset there is no order at all, in other the order is not the one I need &lt;/P&gt;&lt;P style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;- In fact it is not what I was thinking, but it works is doesn´t matter.&lt;/P&gt;&lt;P style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;&lt;/P&gt;&lt;P style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;What about multiprocessing in local, I'd try to explain myselft. Is posible to run the two short in parallel?. I read about this but I know sure if it could be posible running a local sesion and "emulate" a server on local machine.&lt;/P&gt;&lt;P style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;&lt;/P&gt;&lt;P style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;Thanks&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Tue, 16 Sep 2014 13:07:03 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145720#M29038</guid>
      <dc:creator>SergioSanchez</dc:creator>
      <dc:date>2014-09-16T13:07:03Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145721#M29039</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Sergio, I have to thank you keeping my brains sharp for hearing and trying to understand the issue and trying to find acceptable ways out.&lt;/P&gt;&lt;P&gt;The "proc sort" will use multi threading (using all 4 cores) as it reaches some point. It will split up your dataset in smaller pieces sort them and doe a merger when writing out the results. There is some point with data sizing where the efficiency of this process rapidly falls by resource consumption on the system. As you are&amp;nbsp; already short on that I would not advice to run those in parallel.&lt;BR /&gt;If the sorting of let us take a 2 Gb is acceptable fast (&amp;lt;1min) then sorting 10 of them in serial will take a factor 10 times this time.&lt;/P&gt;&lt;P&gt;Merging that 10 files together will be the last step. Do not expect miracles from this. Copying 2Gb is still a lot and will need the time for that.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Running processes parallel can be done by batch scripting&amp;nbsp;&amp;nbsp; or using MP-connect part of SAS/connect.&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; &lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Tue, 16 Sep 2014 13:33:16 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145721#M29039</guid>
      <dc:creator>jakarman</dc:creator>
      <dc:date>2014-09-16T13:33:16Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145722#M29040</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;I don't think using Hash Table to sort would be faster than using proc sort . If the table is too big . Here is a proposal .&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data F M;&lt;/P&gt;&lt;P&gt;set sashelp.class;&lt;/P&gt;&lt;P&gt;if sex='F' then output F;&lt;/P&gt;&lt;P&gt;else if sex='M' then output M;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;proc append base=want data=F force;run;&lt;/P&gt;&lt;P&gt;proc append base=want data=M force;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Or simply make an index for it ,and you don't sort it again every time you need to BY statement .&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Xia Keshan&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Message was edited by: xia keshan&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Wed, 17 Sep 2014 12:21:49 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145722#M29040</guid>
      <dc:creator>Ksharp</dc:creator>
      <dc:date>2014-09-17T12:21:49Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145723#M29041</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;HI Sergio,&lt;/P&gt;&lt;P&gt;&amp;nbsp; As stated, hash is not the answer here. I don't think MP will work either unless you have CONNECT on your installation, you can check that by running proc setinit. If you don't have CONNECT, you still may be able to marginally increase the speed of your sort by defragmenting your disk and/or breaking up the data into smaller chunks and sorting those.&amp;nbsp; Your sort needs about 3 times the size of the data to execute, and if you have a fragmented disk then it has to get the space from multiple spots.&amp;nbsp; If you split the data up into 40 different datasets of 1 million observations each, sort each of them by your keys, then set them together using the by variables, you will have a single sorted data set, and it may run faster than your sort of a single big dataset.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Wed, 17 Sep 2014 12:31:50 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145723#M29041</guid>
      <dc:creator>AmySwinford</dc:creator>
      <dc:date>2014-09-17T12:31:50Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145724#M29042</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Thanks all for your comments&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I have made one text splitting the dataset in 10 smaller datasets, the results given are below&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;options fullstimer;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;proc sort data = Dataset_A out = Dataset_B (compress=binary);&lt;/P&gt;&lt;P&gt;by KEY;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 32498768 observations read from the data set Dataset_A.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.PERSONSHORT has 32498768 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: Compressing data set Dataset_B decreased size by 71.08 percent.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Compressed is 391645 pages; un-compressed would require 1354116 pages.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 44:04.28&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; user cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 1:54.30&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; system cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 1:13.66&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Memory&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 66929k&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; OS Memory&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 100216k&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; Timestamp&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 16/09/2014&amp;nbsp; 17:22:45&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Now I splitt the dataset with a macro (thanks to Selvaratnam Sridharma for the tool &lt;img id="smileywink" class="emoticon emoticon-smileywink" src="https://communities.sas.com/i/smilies/16x16_smiley-wink.png" alt="Smiley Wink" title="Smiley Wink" /&gt;)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data _null_;&lt;/P&gt;&lt;P&gt;33&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; if 0 then set ec_dim_per nobs=count;&lt;/P&gt;&lt;P&gt;34&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; call symput('numobs',put(count,8.));&lt;/P&gt;&lt;P&gt;35&amp;nbsp;&amp;nbsp; run;&lt;/P&gt;&lt;P&gt;36&amp;nbsp;&amp;nbsp; %let n=%sysevalf(&amp;amp;numobs/&amp;amp;num,ceil);&lt;/P&gt;&lt;P&gt;37&amp;nbsp;&amp;nbsp; data %do J=1 %to &amp;amp;num ; DATASET_B_&amp;amp;J&amp;nbsp; %end; ;&lt;/P&gt;&lt;P&gt;38&amp;nbsp;&amp;nbsp; set DATASET_B;&lt;/P&gt;&lt;P&gt;39&amp;nbsp;&amp;nbsp; %do I=1 %to &amp;amp;num;&lt;/P&gt;&lt;P&gt;40&amp;nbsp;&amp;nbsp; if %eval(&amp;amp;n*(&amp;amp;i-1)) &amp;lt;_n_ &amp;lt;= %eval(&amp;amp;n*&amp;amp;I)&lt;/P&gt;&lt;P&gt;41&amp;nbsp;&amp;nbsp; then output DATASET_B_&amp;amp;I;&lt;/P&gt;&lt;P&gt;42&amp;nbsp;&amp;nbsp; %end;&lt;/P&gt;&lt;P&gt;43&amp;nbsp;&amp;nbsp; run;&lt;/P&gt;&lt;P&gt;44&amp;nbsp;&amp;nbsp; %mend split;&lt;/P&gt;&lt;P&gt;45&amp;nbsp;&amp;nbsp; %split (10)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: DATA STEP stopped due to looping.&lt;/P&gt;&lt;P&gt;NOTE: DATA statement used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 0.04 seconds&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 0.00 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 32498768 observations read from the data set WORK.DATASET_B.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_1 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_2 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_3 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_4 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_5 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_6 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_7 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_8 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_9 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_10 has 3249875 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: DATA statement used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 15:13.22&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 49.68 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: This SAS session is using a registry in WORK.&amp;nbsp; All changes will be lost at the end of&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; this session.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Another macro (this time made by me &lt;img id="smileywink" class="emoticon emoticon-smileywink" src="https://communities.sas.com/i/smilies/16x16_smiley-wink.png" alt="Smiley Wink" title="Smiley Wink" /&gt;)&amp;nbsp; to run all the proc sort throught the datasets&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;46&amp;nbsp;&amp;nbsp; %macro sort;&lt;/P&gt;&lt;P&gt;47&amp;nbsp;&amp;nbsp; %do i=1 %to 10;&lt;/P&gt;&lt;P&gt;48&amp;nbsp;&amp;nbsp; proc sort data = DATASET_B_&amp;amp;i;&lt;/P&gt;&lt;P&gt;49&amp;nbsp;&amp;nbsp; by KEY;&lt;/P&gt;&lt;P&gt;50&amp;nbsp;&amp;nbsp; run;&lt;/P&gt;&lt;P&gt;51&amp;nbsp;&amp;nbsp; %end;&lt;/P&gt;&lt;P&gt;52&amp;nbsp;&amp;nbsp; run;&lt;/P&gt;&lt;P&gt;53&amp;nbsp;&amp;nbsp; %mend sort;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;54&amp;nbsp;&amp;nbsp; %short&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_1.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_1 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 4:19.91&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 17.59 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_2.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_2 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 5:08.46&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 19.56 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_3.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_3 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 2:29.01&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 19.51 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_4.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_4 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 4:03.43&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 17.03 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_5.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_5 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 6:03.90&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 18.95 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_6.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_6 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 6:44.73&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 19.23 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_7.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_7 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 2:48.26&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 18.72 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_8.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_8 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 3:36.37&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 20.17 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249877 observations read from the data set WORK.DATASET_B_9.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_9 has 3249877 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 4:41.83&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 19.00 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;NOTE: There were 3249875 observations read from the data set WORK.DATASET_B_10.&lt;/P&gt;&lt;P&gt;NOTE: The data set WORK.DATASET_B_10 has 3249875 observations and 31 variables.&lt;/P&gt;&lt;P&gt;NOTE: PROCEDURE SORT used (Total process time):&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; real time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 3:17.87&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; cpu time&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; 19.48 seconds&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;the sum of the all sorts procedures are almost 41 min without counting the 15 min for the macro to work. And we still need a merge to build the dataset again (by the way, what do you think is the best way to merge all the datasets?)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;As you can see split in 10 dataset isn't faster than the original proc sort. If I able to free space I'd try with perhaps 20 small datasets.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;AmySwinford, I have SAS/ACCESS ready for use in my PC but I never use it and to be honest I don´t know where to start to make it work in local mode (server/cliente in the same machine).&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-family: 'Helvetica Neue', Helvetica, Arial, 'Lucida Grande', sans-serif; background-color: #ffffff;"&gt;I think defragmenting&lt;/SPAN&gt; the hard disk is a very good idea &lt;img id="smileywink" class="emoticon emoticon-smileywink" src="https://communities.sas.com/i/smilies/16x16_smiley-wink.png" alt="Smiley Wink" title="Smiley Wink" /&gt;, I´d try if my user have admin privileges,...&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Regards&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Wed, 17 Sep 2014 13:48:00 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145724#M29042</guid>
      <dc:creator>SergioSanchez</dc:creator>
      <dc:date>2014-09-17T13:48:00Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145725#M29043</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Probably knowledge of maximum and minimum values for KEY1 and KEY2 might be useful for array solution.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Wed, 17 Sep 2014 13:50:47 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145725#M29043</guid>
      <dc:creator>KachiM</dc:creator>
      <dc:date>2014-09-17T13:50:47Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145726#M29044</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Not much different to what you already have been provided with, but split each type of "KEY" out into a dataset, then set back together again (note depending on number of keys, this could create a lot of small datasets):&lt;/P&gt;&lt;P&gt;/* Some test data */&lt;/P&gt;&lt;P&gt;data have;&lt;/P&gt;&lt;P&gt;&amp;nbsp; attrib key format=$5.;&lt;/P&gt;&lt;P&gt;&amp;nbsp; array var{15} $20.;&lt;/P&gt;&lt;P&gt;&amp;nbsp; do i=1 to 1000; /*32498768;*/&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp; key=strip(put(mod(i,234),best.));&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp; output;&lt;/P&gt;&lt;P&gt;&amp;nbsp; end;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;/* Get list of unique KEY values */&lt;/P&gt;&lt;P&gt;proc sql;&lt;/P&gt;&lt;P&gt;&amp;nbsp; create table WORK.LOOP as&lt;/P&gt;&lt;P&gt;&amp;nbsp; select&amp;nbsp; distinct&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; KEY&lt;/P&gt;&lt;P&gt;&amp;nbsp; from&amp;nbsp;&amp;nbsp;&amp;nbsp; WORK.HAVE;&lt;/P&gt;&lt;P&gt;quit;&lt;/P&gt;&lt;P&gt;/* Generate code to split data into each key value */&lt;/P&gt;&lt;P&gt;data _null_;&lt;/P&gt;&lt;P&gt;&amp;nbsp; set loop;&lt;/P&gt;&lt;P&gt;&amp;nbsp; call execute('data work.srt'||strip(key)||'; set work.have (where=(key="'||strip(key)||'")); run;');&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;/* Generate final datastep to append back together again */&lt;/P&gt;&lt;P&gt;data _null_;&lt;/P&gt;&lt;P&gt;&amp;nbsp; set loop end=last;&lt;/P&gt;&lt;P&gt;&amp;nbsp; if _n_=1 then call execute('data want; set ');&lt;/P&gt;&lt;P&gt;&amp;nbsp; call execute(' work.srt'||strip(key));&lt;/P&gt;&lt;P&gt;&amp;nbsp; if last then call execute(';run;');&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Wed, 17 Sep 2014 14:51:36 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145726#M29044</guid>
      <dc:creator>RW9</dc:creator>
      <dc:date>2014-09-17T14:51:36Z</dc:date>
    </item>
    <item>
      <title>Re: Is there any solution for this?</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145727#M29045</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;If your Key1 and Key2 are numbers and that their minimum and maximum values are small enough(your system memory can hold) to use a temporary array, you may reduce the workload much.&lt;/P&gt;&lt;P&gt;But you need to find out the maximum occurrence of Key1 or Key2 whichever is maximum. The array will hold the Record ID(RID) of the observation for the Key1 and Key2.&lt;/P&gt;&lt;P&gt;You then use this RID in POINT = option of SET and get the sorted output.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Here is an example using SASHELP.CLASS with AGE and HEIGHT. Though Height has a&amp;nbsp; decimal point, it does not matter to use it as array-index, the decimal is&lt;/P&gt;&lt;P&gt;ignored.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;The same idea can be used with Hash Table too.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data class;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp; set sashelp.class;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data need;&lt;/P&gt;&lt;P&gt;array k[11:16, 51:72, 2] _temporary_;&lt;/P&gt;&lt;P&gt;if _n_ = 1 then do;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp; do _n_ = 1 by 1 until(eof);&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; set class(keep = age height) end = eof;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; i = 1;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; do while(k[age, height, i] &amp;gt; 0); i + 1; end;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; k[age, height, i] = _n_; &lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp; end;&lt;/P&gt;&lt;P&gt;end;&lt;/P&gt;&lt;P&gt;do i = lbound1(k) to hbound1(k);&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp; do j = lbound2(k) to hbound2(k);&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; m = 1;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; do while(k[i, j, m] &amp;gt; 0 );&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; ptr =&amp;nbsp; k[i, j, m]; &lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; set class point = ptr; &lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; output; &lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; m + 1;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; if m &amp;gt; hbound3(k) then leave;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp; end;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp; end;&lt;/P&gt;&lt;P&gt;end;&lt;/P&gt;&lt;P&gt;stop;&lt;/P&gt;&lt;P&gt;drop i j m;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;proc print data = need;&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Note the use of &lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-size: 13.6000003814697px;"&gt;array k[11:16, 51:72, 2] _temporary_;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-size: 13.6000003814697px;"&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-size: 13.6000003814697px;"&gt;By the homework, 11:16 for Age, 51:72 for Height and the maximum occurrence of 2 are found. The homework is needed to minimize the array sizes. The RID is _N_;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-size: 13.6000003814697px;"&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN style="font-size: 13.6000003814697px;"&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Wed, 17 Sep 2014 17:01:59 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Is-there-any-solution-for-this/m-p/145727#M29045</guid>
      <dc:creator>KachiM</dc:creator>
      <dc:date>2014-09-17T17:01:59Z</dc:date>
    </item>
  </channel>
</rss>

