good day, i have table in my sas work called limits5 with (5.6 million rows) , but when i try to insert data into my empty teradata table with fastload=yes , duplicate records are ommited. so i need help in optimizing below code to insert all 5.6 million rows into my teradata db, would need method which is fast though, below is what i have tried
libname SPE teradata
user="a"
password="12"
server="11"
mode=teradata
connection=global
database=Payments_DB
dbcommit=10000
tpt=yes
fastload=no;
proc sql;
connect to teradata (&td_connection_string);
execute (
CREATE MULTISET TABLE Payments_DB.limits
(
accumulatedSpend FLOAT,
active FLOAT,
amount FLOAT,
blockingLimitRequired FLOAT,
consumerChannel VARCHAR(20),
consumerCorrelationId VARCHAR(50),
consumerUsername VARCHAR(20),
createdOn2 FLOAT,
currencyCode VARCHAR(10),
definitionId VARCHAR(50),
expiryDateTime VARCHAR(30),
expiryDateTime2 FLOAT,
forexBlockIndicator VARCHAR(10),
impactStatus VARCHAR(10),
limitId VARCHAR(50),
limit_type VARCHAR(10),
remainingBalance FLOAT,
segmentation VARCHAR(10),
spendPercentage FLOAT,
thresholdPercentage FLOAT,
updatedOn2 FLOAT
)
PRIMARY INDEX (limitId)
) by teradata;
disconnect from teradata;
quit;
proc sql;
insert into spe.limits
select
accumulatedSpend,
active,
amount,
blockingLimitRequired,
consumerChannel,
consumerCorrelationId,
consumerUsername,
createdOn2,
currencyCode,
definitionId,
expiryDateTime,
expiryDateTime2,
forexBlockIndicator,
impactStatus,
limitId,
limit_type,
remainingBalance,
segmentation,
spendPercentage,
thresholdPercentage,
updatedOn2
from limits5;
quit;
... View more