TPT Delimited Data Parsing error: Too many columns in row 1

Database
Highlighted
Enthusiast

TPT Delimited Data Parsing error: Too many columns in row 1

Hello everyone,

 

I'm trying to load data througt TPT. While execution i have this kind or error :

W_0_o_CSV_job[1]: TPT19134 !ERROR! Fatal data error processing file 'C:\Users\291506\Desktop\testdico.csv'. Delimited Data Parsing error: Too many columns in row 1.
I tried to change the format of import file but nothing change...

Here is my script :

/* 1 */  
/* 2 */  
/* 3 */  DEFINE JOB CSV_job
/* 4 */  (
/* 5 */  	DEFINE OPERATOR W_1_o_CSV_job
/* 6 */  	TYPE LOAD
/* 7 */  	SCHEMA *
/* 8 */  	ATTRIBUTES
/* 9 */  	(
/* 10 */  		VARCHAR UserName, 
/* 11 */  		VARCHAR UserPassword, 
/* 12 */  		VARCHAR LogTable, 
/* 13 */  		VARCHAR TargetTable, 
/* 14 */  		INTEGER BufferSize, 
/* 15 */  		INTEGER ErrorLimit, 
/* 16 */  		INTEGER MaxSessions, 
/* 17 */  		INTEGER MinSessions, 
/* 18 */  		INTEGER TenacityHours, 
/* 19 */  		INTEGER TenacitySleep, 
/* 20 */  		VARCHAR AccountID, 
/* 21 */  		VARCHAR DateForm, 
/* 22 */  		VARCHAR ErrorTable1, 
/* 23 */  		VARCHAR ErrorTable2, 
/* 24 */  		VARCHAR NotifyExit, 
/* 25 */  		VARCHAR NotifyExitIsDLL, 
/* 26 */  		VARCHAR NotifyLevel, 
/* 27 */  		VARCHAR NotifyMethod, 
/* 28 */  		VARCHAR NotifyString, 
/* 29 */  		VARCHAR PauseAcq, 
/* 30 */  		VARCHAR PrivateLogName, 
/* 31 */  		VARCHAR TdpId, 
/* 32 */  		VARCHAR TraceLevel, 
/* 33 */  		VARCHAR WorkingDatabase
/* 34 */  	);
/* 35 */  
/* 36 */  	DEFINE SCHEMA W_0_s_CSV_job
/* 37 */  	(
/* 38 */  		REGLE_OFS VARCHAR(50),
/* 39 */  		OFS_FIELD_BC801 VARCHAR(50),
/* 40 */  		ME131 VARCHAR(50),
/* 41 */  		DATA_TYPE VARCHAR(50),
/* 42 */  		DATA_TYPOLOGY VARCHAR(50),
/* 43 */  		FU_REF VARCHAR(50),
/* 44 */  		FU_FIELD VARCHAR(250),
/* 45 */  		FU_FIELD_EN VARCHAR(250),
/* 46 */  		CODE_RADAR VARCHAR(50),
/* 47 */  		REGLE_RADAR VARCHAR(50),
/* 48 */  		DEFINITION_RADAR VARCHAR(5000),
/* 49 */  		MODIFICATION_REGLE_RADAR VARCHAR(50),
/* 50 */  		ANACREDIT VARCHAR(50),
/* 51 */  		ANACREDIT_CRITICAL_FLAG VARCHAR(50),
/* 52 */  		NPE_FBE VARCHAR(50),
/* 53 */  		CRE VARCHAR(50),
/* 54 */  		IFRS9 VARCHAR(50),
/* 55 */  		IFRS9_CRITICAL_FLAG VARCHAR(50),
/* 56 */  		SHS_2018 VARCHAR(50),
/* 57 */  		SHS_CRITICAL_FLAG VARCHAR(50),
/* 58 */  		STRESS_TEST VARCHAR(50),
/* 59 */  		STOP_1_BMRC VARCHAR(50),
/* 60 */  		MANDATORY_OPTIMA VARCHAR(50),
/* 61 */  		FRANCE_REPORTING_Data VARCHAR(50),
/* 62 */  		DEFAULTING_RDI VARCHAR(50)
/* 63 */  	);
/* 64 */  
/* 65 */  	DEFINE OPERATOR W_0_o_CSV_job
/* 66 */  	TYPE DATACONNECTOR PRODUCER
/* 67 */  	SCHEMA W_0_s_CSV_job
/* 68 */  	ATTRIBUTES
/* 69 */  	(
/* 70 */  		VARCHAR FileName, 
/* 71 */  		VARCHAR Format, 
/* 72 */  		VARCHAR OpenMode, 
/* 73 */  		INTEGER BlockSize, 
/* 74 */  		INTEGER BufferSize, 
/* 75 */  		INTEGER RetentionPeriod, 
/* 76 */  		INTEGER RowsPerInstance, 
/* 77 */  		INTEGER SecondarySpace, 
/* 78 */  		INTEGER UnitCount, 
/* 79 */  		INTEGER VigilElapsedTime, 
/* 80 */  		INTEGER VigilWaitTime, 
/* 81 */  		INTEGER VolumeCount, 
/* 82 */  		VARCHAR AccessModuleName, 
/* 83 */  		VARCHAR AccessModuleInitStr, 
/* 84 */  		VARCHAR DirectoryPath, 
/* 85 */  		VARCHAR ExpirationDate, 
/* 86 */  		VARCHAR IndicatorMode, 
/* 87 */  		VARCHAR PrimarySpace, 
/* 88 */  		VARCHAR PrivateLogName, 
/* 89 */  		VARCHAR RecordFormat, 
/* 90 */  		VARCHAR RecordLength, 
/* 91 */  		VARCHAR SpaceUnit, 
/* 92 */  		VARCHAR TextDelimiter, 
/* 93 */  		VARCHAR VigilNoticeFileName, 
/* 94 */  		VARCHAR VigilStartTime, 
/* 95 */  		VARCHAR VigilStopTime, 
/* 96 */  		VARCHAR VolSerNumber, 
/* 97 */  		VARCHAR UnitType
/* 98 */  	);
/* 99 */  
/* 100 */  	APPLY
/* 101 */  		(
/* 102 */  			'INSERT INTO DB_FTG_SRS_DATALAB.DICOGLOB_VCOBALT (REGLE_OFS,OFS_FIELD_BC801,ME131,DATA_TYPE,DATA_TYPOLOGY,FU_REF,FU_FIELD,FU_FIELD_EN,CODE_RADAR,REGLE_RADAR,DEFINITION_RADAR,MODIFICATION_REGLE_RADAR,ANACREDIT,ANACREDIT_CRITICAL_FLAG,NPE_FBE,CRE,IFRS9,IFRS9_CRITICAL_FLAG,SHS_2018,SHS_CRITICAL_FLAG,STRESS_TEST,STOP_1_BMRC,MANDATORY_OPTIMA,FRANCE_REPORTING_Data,DEFAULTING_RDI) VALUES (:REGLE_OFS,:OFS_FIELD_BC801,:ME131,:DATA_TYPE,:DATA_TYPOLOGY,:FU_REF,:FU_FIELD,:FU_FIELD_EN,:CODE_RADAR,:REGLE_RADAR,:DEFINITION_RADAR,:MODIFICATION_REGLE_RADAR,:ANACREDIT,:ANACREDIT_CRITICAL_FLAG,:NPE_FBE,:CRE,:IFRS9,:IFRS9_CRITICAL_FLAG,:SHS_2018,:SHS_CRITICAL_FLAG,:STRESS_TEST,:STOP_1_BMRC,:MANDATORY_OPTIMA,:FRANCE_REPORTING_Data,:DEFAULTING_RDI);'
/* 103 */  		)
/* 104 */  	TO OPERATOR
/* 105 */  	(
/* 106 */  		W_1_o_CSV_job[1]
/* 107 */  
/* 108 */  		ATTRIBUTES
/* 109 */  		(
/* 110 */  			UserName = 'L2efef', 
/* 111 */  			UserPassword = '******', 
/* 112 */  			LogTable = 'DB_FTG_SRS_DATALAB.DICOGLOB_VCOBALT_log', 
/* 113 */  			TargetTable = 'DB_FTG_SRS_DATALAB.DICOGLOB_VCOBALT', 
/* 114 */  			TdpId = 'FTGPRDTD'
/* 115 */  		)
/* 116 */  	)
/* 117 */  	SELECT * FROM OPERATOR
/* 118 */  	(
/* 119 */  		W_0_o_CSV_job[1]
/* 120 */  
/* 121 */  		ATTRIBUTES
/* 122 */  		(
/* 123 */  			FileName = 'testdico.csv', 
/* 124 */  			Format = 'DELIMITED', 
/* 125 */  			OpenMode = 'Read', 
/* 126 */  			DirectoryPath = 'C:\Users\291506\Desktop', 
/* 127 */  			IndicatorMode = 'N', 
/* 128 */  			TextDelimiter = ';'
/* 129 */  		)
/* 130 */  	);
/* 131 */  );

thanks,

 

Best regards,

 

Kair0

1 REPLY 1
Teradata Employee

Re: TPT Delimited Data Parsing error: Too many columns in row 1

Most likely causes:

  • The data contains more delimited fields per line than the 25 described by the schema
  • Some field contains the delimiter character within the data
  • The input records are terminated with LF only (Unix style), instead of Windows (CR+LF)