- Hardware parameters:
- OS type:
- Others:
CREATE TABLE `table_basic_for_load_100m` (
`col1` tinyint DEFAULT NULL,
`col2` smallint DEFAULT NULL,
`col3` int DEFAULT NULL,
`col4` bigint DEFAULT NULL,
`col5` tinyint unsigned DEFAULT NULL,
`col6` smallint unsigned DEFAULT NULL,
`col7` int unsigned DEFAULT NULL,
`col8` bigint unsigned DEFAULT NULL,
`col9` float DEFAULT NULL,
`col10` double DEFAULT NULL,
`col11` varchar(255) DEFAULT NULL,
`col12` date DEFAULT NULL,
`col13` datetime DEFAULT NULL,
`col14` timestamp NULL DEFAULT NULL,
`col15` bool DEFAULT NULL,
`col16` decimal(16,6) DEFAULT NULL,
`col17` text DEFAULT NULL,
`col18` json DEFAULT NULL,
`col19` blob DEFAULT NULL,
`col20` binary(255) DEFAULT NULL,
`col21` varbinary(255) DEFAULT NULL,
`col22` vecf32(3) DEFAULT NULL,
`col23` vecf32(3) DEFAULT NULL,
`col24` vecf64(3) DEFAULT NULL,
`col25` vecf64(3) DEFAULT NULL
);
load data url s3option {"endpoint"='http://cos.ap-guangzhou.myqcloud.com','access_key_id'='***','secret_access_key'='***','bucket'='mo-load-guangzhou-1308875761', 'filepath'='mo-big-data/100000000_20_columns_load_data_new.csv'} into table big_data_test.table_basic_for_load_100M fields terminated by '|' lines terminated by '\n' parallel 'true';
CREATE TABLE `insert_into_table_limit` (
`col1` tinyint DEFAULT NULL,
`col2` smallint DEFAULT NULL,
`col3` int DEFAULT NULL,
`col4` bigint DEFAULT NULL,
`col5` tinyint unsigned DEFAULT NULL,
`col6` smallint unsigned DEFAULT NULL,
`col7` int unsigned DEFAULT NULL,
`col8` bigint unsigned DEFAULT NULL,
`col9` float DEFAULT NULL,
`col10` double DEFAULT NULL,
`col11` varchar(255) DEFAULT NULL,
`col12` date DEFAULT NULL,
`col13` datetime DEFAULT NULL,
`col14` timestamp NULL DEFAULT NULL,
`col15` bool DEFAULT NULL,
`col16` decimal(16,6) DEFAULT NULL,
`col17` text DEFAULT NULL,
`col18` json DEFAULT NULL,
`col19` blob DEFAULT NULL,
`col20` binary(255) DEFAULT NULL,
`col21` varbinary(255) DEFAULT NULL,
`col22` vecf32(3) DEFAULT NULL,
`col23` vecf32(3) DEFAULT NULL,
`col24` vecf64(3) DEFAULT NULL,
`col25` vecf64(3) DEFAULT NULL
);
insert into big_data_test.insert_into_table_limit select * from big_data_test.table_basic_for_load_100M order by col4 limit 5000000;
Is there an existing issue for the same bug?
Branch Name
main
Commit ID
0bd860b
Other Environment Information
Actual Behavior
log: https://grafana.ci.matrixorigin.cn/explore?panes=%7B%22ZXL%22:%7B%22datasource%22:%22loki%22,%22queries%22:%5B%7B%22refId%22:%22A%22,%22expr%22:%22%7Bnamespace%3D%5C%22mo-main-commit-0bd860b1c-20250818%5C%22%7D%20%7C%3D%20%60stream%20closed%60%22,%22queryType%22:%22range%22,%22datasource%22:%7B%22type%22:%22loki%22,%22uid%22:%22loki%22%7D,%22editorMode%22:%22builder%22%7D%5D,%22range%22:%7B%22from%22:%221755583200000%22,%22to%22:%221755584580000%22%7D%7D%7D&schemaVersion=1&orgId=1
Expected Behavior
No response
Steps to Reproduce
Additional information
No response