Skip to content

Commit 3ddaa40

Browse files
committed
test class modify
1 parent 4d9a910 commit 3ddaa40

File tree

3 files changed

+15
-17
lines changed

3 files changed

+15
-17
lines changed

hadooptool/src/test/java/com/robin/test/TestCloudStorage.java

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -43,23 +43,19 @@ public void testWriteToMinio(){
4343
builder.addColumn("amount",Const.META_TYPE_INTEGER,null);
4444
builder.addColumn("type",Const.META_TYPE_INTEGER,null);
4545

46-
builder.resourceCfg(ResourceConst.PARQUETFILEFORMAT,ResourceConst.PARQUETSUPPORTFORMAT.PROTOBUF.getValue()).fileFormat(Const.FILEFORMATSTR.ARROW.getValue())
47-
.resPath("tmp/bigdata3.arrow")
46+
builder.resourceCfg(ResourceConst.PARQUETFILEFORMAT,ResourceConst.PARQUETSUPPORTFORMAT.PROTOBUF.getValue()).fileFormat(Const.FILEFORMATSTR.CSV.getValue())
47+
.resPath("/tmp/bigdata3.csv.gz").resourceCfg("protocol",Const.VFS_PROTOCOL.FTP.getValue())
48+
.resourceCfg("hostName","127.0.0.1").resourceCfg("userName","test").resourceCfg("password","test").protocol(Const.VFS_PROTOCOL.FTP.getValue()).fsType(Const.FILESYSTEM.VFS.getValue())
4849
//.resourceCfg(ResourceConst.USEASYNCUPLOAD,"true")
4950
.resourceCfg(ResourceConst.DEFAULTCACHEOFFHEAPSIZEKEY,1000*1000*6);
5051
ResourceBundle bundle=ResourceBundle.getBundle("minio");
5152
DataCollectionMeta colmeta=builder.build();
52-
MinioFileSystemAccessor.Builder builder1=new MinioFileSystemAccessor.Builder();
53+
/*MinioFileSystemAccessor.Builder builder1=new MinioFileSystemAccessor.Builder();
5354
MinioFileSystemAccessor accessor=builder1.accessKey(bundle.getString("minio.accessKey")).secretKey(bundle.getString("minio.secretKey")).endpoint(bundle.getString("minio.endpoint"))
54-
.bucket("test").build();
55-
/*QiniuFileSystemAccessor.Builder builder1=new QiniuFileSystemAccessor.Builder();
56-
ResourceBundle bundle=ResourceBundle.getBundle("qiniu");
55+
.bucket("test").build();*/
5756

58-
builder1.domain(bundle.getString("domain")).region(Region.autoRegion()).bucket(bundle.getString("bucket"))
59-
.accessKey(bundle.getString("accessKey")).urlPrefix(bundle.getString("urlPrefix")).secretKey(bundle.getString("secretKey"));
60-
QiniuFileSystemAccessor accessor=builder1.build();*/
6157

62-
try (AbstractFileWriter jwriter = (AbstractFileWriter) TextFileWriterFactory.getWriterByType(colmeta, accessor)){
58+
try (AbstractFileWriter jwriter = (AbstractFileWriter) TextFileWriterFactory.getWriterByType(colmeta)){
6359
System.out.println(new Date());
6460
jwriter.beginWrite();
6561
Map<String, Object> recMap = new HashMap<>();
@@ -69,7 +65,7 @@ public void testWriteToMinio(){
6965
priceMap.put(i,i*10.0);
7066
}
7167

72-
for (int i = 0; i < 50000; i++) {
68+
for (int i = 0; i < 10000; i++) {
7369
recMap.put("id", Long.valueOf(i));
7470
recMap.put("name", StringUtils.generateRandomChar(32));
7571
recMap.put("description", StringUtils.generateRandomChar(32));
@@ -98,9 +94,10 @@ public void testReadFromMinio(){
9894
builder.addColumn("type",Const.META_TYPE_INTEGER,null);
9995

10096
builder.resourceCfg(ResourceConst.PARQUETFILEFORMAT,ResourceConst.PARQUETSUPPORTFORMAT.AVRO.getValue())
101-
.resourceCfg(ResourceConst.STORAGEFILTERSQL,"select name,sno,type,price*amount as totalFee from test where price*amount>500 and sno<7 and name like 'A%'")
97+
//.resourceCfg(ResourceConst.STORAGEFILTERSQL,"select name,sno,type,price*amount as totalFee from test where price*amount>500 and sno<7 and name like 'A%'")
98+
.resourceCfg(ResourceConst.STORAGEFILTERSQL,"select type,sno,sum(price*amount) as totalFee from test where price*amount>500 group by sno,type having sum(price*amount)>100000.0")
10299
.fileFormat(Const.FILEFORMATSTR.CSV.getValue()).tableName("test")
103-
.resPath("tmp/bigdata3.CSV.gz");
100+
.resPath("tmp/bigdata3.csv.gz");
104101
ResourceBundle bundle=ResourceBundle.getBundle("minio");
105102
DataCollectionMeta colmeta=builder.build();
106103
MinioFileSystemAccessor.Builder builder1=new MinioFileSystemAccessor.Builder();

hadooptool/src/test/java/com/robin/test/TestProtobufWriter.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,9 @@ public void testInsert(){
4444
colmeta.setFileFormat(Const.FILESUFFIX_PROTOBUF);
4545

4646
colmeta.setPath("d:/tmp/luoming/1.proto.gz");
47-
AbstractFileSystemAccessor util= FileSystemAccessorFactory.getResourceAccessorByType(Const.FILESYSTEM.LOCAL.getValue());
47+
AbstractFileSystemAccessor util= FileSystemAccessorFactory.getResourceAccessorByType(Const.FILESYSTEM.LOCAL.getValue(),colmeta);
4848

49-
final AbstractFileWriter jwriter= (AbstractFileWriter) TextFileWriterFactory.getOutputStreamByType(colmeta,util.getOutResourceByStream(colmeta,colmeta.getPath()));
49+
final AbstractFileWriter jwriter= (AbstractFileWriter) TextFileWriterFactory.getOutputStreamByType(colmeta,util.getOutResourceByStream(colmeta.getPath()));
5050
jwriter.beginWrite();
5151
SimpleJdbcDao.executeOperationWithQuery(connection, "select id,cs_id,item_name,item_value from t_sys_code", null, false, new ResultSetOperationExtractor() {
5252
@Override

hadooptool/src/test/java/com/robin/test/TestResourceGen.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import com.robin.core.base.datameta.DataBaseMetaFactory;
77
import com.robin.core.base.datameta.DataBaseParam;
88
import com.robin.core.base.util.Const;
9+
import com.robin.core.fileaccess.fs.FileSystemAccessorFactory;
910
import com.robin.core.fileaccess.meta.DataCollectionMeta;
1011
import com.robin.core.fileaccess.writer.AbstractFileWriter;
1112
import com.robin.core.fileaccess.writer.TextFileWriterFactory;
@@ -51,15 +52,15 @@ public static void main(String[] args){
5152
List<Map<String, Object>> list=SimpleJdbcDao.queryString(conn, "select config_name as name,config_value as value from t_hadoop_cluster_config where cluster_id=4");
5253
conn=SimpleJdbcDao.getConnection(meta1);
5354
List<Map<String, Object>> resultlist=SimpleJdbcDao.queryString(conn, "select info_id,url,title,content from shw_internet_info_dtl");
54-
HdfsFileSystemAccessor util=new HdfsFileSystemAccessor();
55+
HdfsFileSystemAccessor util= (HdfsFileSystemAccessor) FileSystemAccessorFactory.getResourceAccessorByType("hdfs");
5556
Map<String, Object> hdfsparam=new HashMap<String, Object>();
5657
for (Map<String, Object> tmap:list) {
5758
hdfsparam.put(tmap.get("name").toString(), tmap.get("value"));
5859
}
5960
colmeta.setResourceCfgMap(hdfsparam);
6061
colmeta.setPath("/testdata/test1.gz");
6162
colmeta.setEncode("UTF-8");
62-
pair=util.getOutResourceByWriter(colmeta, colmeta.getPath());
63+
pair=util.getOutResourceByWriter(colmeta.getPath());
6364
colmeta.setFileFormat(Const.FILETYPE_JSON);
6465
AbstractFileWriter jwriter=(AbstractFileWriter) TextFileWriterFactory.getWriterByType(colmeta, pair.getKey());
6566
System.out.println(new Date());

0 commit comments

Comments
 (0)