<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: huge data file import in SAS Programming</title>
    <link>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175199#M302060</link>
    <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;You can read it as long as you have enough disk space.&amp;nbsp; If it is just a text file then you are better off writing the code to read it yourself. Or take a small set of sample records and run it through PROC IMPORT and pull back the generated code and clean it up and point it to the large file.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
    <pubDate>Fri, 14 Feb 2014 22:45:01 GMT</pubDate>
    <dc:creator>Tom</dc:creator>
    <dc:date>2014-02-14T22:45:01Z</dc:date>
    <item>
      <title>huge data file import</title>
      <link>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175197#M302058</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Hi, &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I have a huge .dat file of size 63G. Is it even possible to read into PC-SAS? My computer has 12G memory.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Thanks.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Fri, 14 Feb 2014 20:08:10 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175197#M302058</guid>
      <dc:creator>swimmer</dc:creator>
      <dc:date>2014-02-14T20:08:10Z</dc:date>
    </item>
    <item>
      <title>Re: huge data file import</title>
      <link>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175198#M302059</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;There are people with SAS datasets in the terabyte range. Output disc space and run time will likely be the issue.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I would recommend using an OBS option to try importing a few hundred records to make sure the formats and variables look right before loading the whole thing. Nothing like waiting a few hours for something to finish importing only to discover a variable was imported with 6 charcters instead of 12...&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Fri, 14 Feb 2014 21:29:55 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175198#M302059</guid>
      <dc:creator>ballardw</dc:creator>
      <dc:date>2014-02-14T21:29:55Z</dc:date>
    </item>
    <item>
      <title>Re: huge data file import</title>
      <link>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175199#M302060</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;You can read it as long as you have enough disk space.&amp;nbsp; If it is just a text file then you are better off writing the code to read it yourself. Or take a small set of sample records and run it through PROC IMPORT and pull back the generated code and clean it up and point it to the large file.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Fri, 14 Feb 2014 22:45:01 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175199#M302060</guid>
      <dc:creator>Tom</dc:creator>
      <dc:date>2014-02-14T22:45:01Z</dc:date>
    </item>
    <item>
      <title>Re: huge data file import</title>
      <link>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175200#M302061</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;Thank you, Tom. It is a .dat file. I have the data library which tells the name, position, length and type of each variable.&lt;/P&gt;&lt;P&gt;I used data step, infile statement with obs=50, but it still took unbearably long time to get result.&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Fri, 14 Feb 2014 23:08:32 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175200#M302061</guid>
      <dc:creator>swimmer</dc:creator>
      <dc:date>2014-02-14T23:08:32Z</dc:date>
    </item>
    <item>
      <title>Re: huge data file import</title>
      <link>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175201#M302062</link>
      <description>&lt;HTML&gt;&lt;HEAD&gt;&lt;/HEAD&gt;&lt;BODY&gt;&lt;P&gt;OBS=50 on INFILE statement should cause it to stop pretty quickly.&lt;/P&gt;&lt;P&gt;You could try coding a STOP statement instead.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data small ;&lt;/P&gt;&lt;P&gt;&amp;nbsp; infile 'big.dat' lrecl=100000 obs=50;&lt;/P&gt;&lt;P&gt;....&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;or&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;data small ;&lt;/P&gt;&lt;P&gt;&amp;nbsp; if _n_ &amp;gt; 50 then stop;&lt;/P&gt;&lt;P&gt;&amp;nbsp; infile 'big.dat' lrecl=100000 ;&lt;/P&gt;&lt;P&gt;....&lt;/P&gt;&lt;P&gt;run;&lt;/P&gt;&lt;/BODY&gt;&lt;/HTML&gt;</description>
      <pubDate>Fri, 14 Feb 2014 23:27:48 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/huge-data-file-import/m-p/175201#M302062</guid>
      <dc:creator>Tom</dc:creator>
      <dc:date>2014-02-14T23:27:48Z</dc:date>
    </item>
  </channel>
</rss>

