<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Efficienly loading data from sas work into an empty teradata table in SAS Programming</title>
    <link>https://communities.sas.com/t5/SAS-Programming/Efficienly-loading-data-from-sas-work-into-an-empty-teradata/m-p/972654#M377549</link>
    <description>Did you try PROC APPEND ?&lt;BR /&gt;&lt;BR /&gt;proc append base=spe.limits data=limits5 force;&lt;BR /&gt;run;</description>
    <pubDate>Fri, 15 Aug 2025 00:51:42 GMT</pubDate>
    <dc:creator>Ksharp</dc:creator>
    <dc:date>2025-08-15T00:51:42Z</dc:date>
    <item>
      <title>Efficienly loading data from sas work into an empty teradata table</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Efficienly-loading-data-from-sas-work-into-an-empty-teradata/m-p/972606#M377534</link>
      <description>&lt;P&gt;good day, i have table in my sas work called limits5 with (5.6 million rows) , but when i try to insert data into my empty teradata table with fastload=yes , duplicate records are ommited. so i need help in optimizing below code to insert all 5.6 million rows into my teradata db, would need method which is fast though, below is what i have tried&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;nbsp;&lt;/P&gt;
&lt;PRE&gt;libname SPE teradata
    user="a"
    password="12"
    server="11"
    mode=teradata
    connection=global
    database=Payments_DB
    dbcommit=10000
	tpt=yes
    fastload=no;


proc sql;
connect to teradata (&amp;amp;td_connection_string);
execute (
    CREATE MULTISET TABLE Payments_DB.limits
    (
            accumulatedSpend        FLOAT,
            active                  FLOAT,
            amount                  FLOAT,
            blockingLimitRequired   FLOAT,
            consumerChannel         VARCHAR(20),
            consumerCorrelationId   VARCHAR(50),
            consumerUsername        VARCHAR(20),
            createdOn2              FLOAT,
            currencyCode            VARCHAR(10),
            definitionId            VARCHAR(50),
            expiryDateTime          VARCHAR(30),
            expiryDateTime2         FLOAT,
            forexBlockIndicator     VARCHAR(10),
            impactStatus            VARCHAR(10),
            limitId                 VARCHAR(50),
            limit_type              VARCHAR(10),
            remainingBalance        FLOAT,
            segmentation            VARCHAR(10),
            spendPercentage         FLOAT,
            thresholdPercentage     FLOAT,
            updatedOn2              FLOAT
    )
    PRIMARY INDEX (limitId)
) by teradata;

disconnect from teradata;
quit;


proc sql;
    insert into spe.limits
    select
        accumulatedSpend,
        active,
        amount,
        blockingLimitRequired,
        consumerChannel,
        consumerCorrelationId,
        consumerUsername,
        createdOn2,
        currencyCode,
        definitionId,
        expiryDateTime,
        expiryDateTime2,
        forexBlockIndicator,
        impactStatus,
        limitId,
        limit_type,
        remainingBalance,
        segmentation,
        spendPercentage,
        thresholdPercentage,
        updatedOn2
    from limits5;
quit;
&lt;/PRE&gt;</description>
      <pubDate>Thu, 14 Aug 2025 07:56:55 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Efficienly-loading-data-from-sas-work-into-an-empty-teradata/m-p/972606#M377534</guid>
      <dc:creator>Solly7</dc:creator>
      <dc:date>2025-08-14T07:56:55Z</dc:date>
    </item>
    <item>
      <title>Re: Efficienly loading data from sas work into an empty teradata table</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Efficienly-loading-data-from-sas-work-into-an-empty-teradata/m-p/972653#M377548</link>
      <description>&lt;P&gt;Add INSERTBUFF = 10000 to your LIBNAME statement - does that speed things up? Also you need to explain what you mean by duplicate records. Does that mean entire rows are duplicated or are you referring to rows where only the primary key is duplicated? I would imagine defining a unique primary key would only allow the first row where the key is repeated to be inserted.&lt;/P&gt;</description>
      <pubDate>Fri, 15 Aug 2025 00:47:35 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Efficienly-loading-data-from-sas-work-into-an-empty-teradata/m-p/972653#M377548</guid>
      <dc:creator>SASKiwi</dc:creator>
      <dc:date>2025-08-15T00:47:35Z</dc:date>
    </item>
    <item>
      <title>Re: Efficienly loading data from sas work into an empty teradata table</title>
      <link>https://communities.sas.com/t5/SAS-Programming/Efficienly-loading-data-from-sas-work-into-an-empty-teradata/m-p/972654#M377549</link>
      <description>Did you try PROC APPEND ?&lt;BR /&gt;&lt;BR /&gt;proc append base=spe.limits data=limits5 force;&lt;BR /&gt;run;</description>
      <pubDate>Fri, 15 Aug 2025 00:51:42 GMT</pubDate>
      <guid>https://communities.sas.com/t5/SAS-Programming/Efficienly-loading-data-from-sas-work-into-an-empty-teradata/m-p/972654#M377549</guid>
      <dc:creator>Ksharp</dc:creator>
      <dc:date>2025-08-15T00:51:42Z</dc:date>
    </item>
  </channel>
</rss>

