springboot java 实现 从数据看获取数据 导出dbf 文件

773 阅读2分钟

目录

介绍

依赖

工具类

控制层


介绍

  • 实现java springboot 导出dbf通过如下步骤
  • 1.编写一个dbf工具类
  • 2.实现通过传入一个文件地址名字,加一个list<hashmap>>
  • 3.工具类会生成文件然后把list解析把数据生成dbf到文件中
  • 4.通过response 返回给页面客户端文件
  • 5.删除之前创建的本地dbf文件
  • ok

 

 

依赖

<dependency>
   <groupId>com.github.albfernandez</groupId>
   <artifactId>javadbf</artifactId>
   <version>1.9.4</version>
</dependency>

 

工具类

package com.superman.uitl;

import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

import com.linuxense.javadbf.DBFDataType;
import com.linuxense.javadbf.DBFException;
import com.linuxense.javadbf.DBFField;
import com.linuxense.javadbf.DBFWriter;

/**
 * dbf 工具
 * 
 * @author yushen
 *
 */
public class DBFUitl {

	public static void main(String args[]) throws DBFException, IOException {
		List<HashMap<String,Object>> listdata = new ArrayList<HashMap<String,Object>>();
		HashMap<String,Object>  m = new HashMap<String,Object>();
		m.put("a", "1");
		m.put("b", "1");
		m.put("c", "1");
		HashMap<String,Object>  m2 = new HashMap<String,Object>();
		m2.put("a", "1");
		m2.put("b", "1");
		m2.put("c", "1");
		listdata.add(m2);
		listdata.add(m);
		
		exportdbf("c:/temp/test01.dbf",listdata);
		
		 
	}

	/**
	 * list 生成 dbf 
	 * 
	 * @param dbfname 文件 名
	 * @param listdata	文件源数据
	 * 
	 * @throws IOException
	 */
	public static void exportdbf(String dbfname,List<HashMap<String,Object>> listdata) throws IOException {

		int i2 = 0;
		for(String key : listdata.get(0).keySet()) {
			i2++;
		}
		DBFField fields[] = new DBFField[i2];

		int i = 0;
		for(String key : listdata.get(0).keySet()) {
			fields[i] = new DBFField();
			fields[i].setName(key);
			fields[i].setType(DBFDataType.CHARACTER);
			fields[i].setLength(100);
			i++;
		}
		 
		FileOutputStream fos = new FileOutputStream(dbfname);
		DBFWriter writer = new DBFWriter(fos);
		writer.setFields(fields);


		for (int j = 0; j < listdata.size(); j++) {
//			HashMap<String,Object> m3 = listdata.get(j);
			Object rowData[] = new Object[i];
			int i1 = 0;
			for(String key : listdata.get(j).keySet()) {
				rowData[i1] = listdata.get(j).get(key);
				i1++;
			}
			writer.addRecord(rowData);
		}
		
		writer.write(fos);
		fos.close();
		System.out.println("dbf文件生成!");
	}
}

 

控制层

package com.superman.global.ctr;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.List;

import javax.servlet.ServletContext;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;

import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.system.ApplicationHome;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;

import com.superman.global.service.PosetgresqlService;
import com.superman.uitl.DBFUitl;

/**
 * 导出dbf文件
 * 
 * @author yushen
 *
 */
@RestController
public class ExportDbfFileCtr {
	
	private static final Logger logger = LoggerFactory.getLogger(ExportDataCtr.class);

	 @Value("${ds.datasource.linktype}")
	 private String linktype;
	 
	@Autowired
	private PosetgresqlService ps;

	
	/**
	 * 导出excel get 请求 返回单个表数据 or 多个文件压缩后的 zip <P/>
	 * <p/>
	 * 
	 * 
	 * 测试地址规则 get : /ExportDataCtr/excel/{tables}/{page}/{pageSize}<P/>
	 * 			http://localhost:9997/export/dbf/cd/1/10000
	 * 
	 * @param tables table  必传<P/>
	 * @param page   必传<P/>
	 * @param pageSize   必传<P/>
	 * 
	 * @return 返回excel 文件<P/>
	 * 
	 * @throws Exception<P/>
	 * 
	 */
	@GetMapping(value = "/export/dbf/{tables}/{page}/{pageSize}")
	public void getUserInfoEx(HttpServletResponse response, @PathVariable("tables") String tablesName,
			@PathVariable("page") int page, @PathVariable("pageSize") int pageSize) throws Exception {

		String tables = tablesName.replaceAll(" ", ""); 
		if (tables.contains("--") || tables.equals(""))
			return;
		
		if(!tables.contains(",")) {// 单个走这个
			HashMap<String, Object> map0 = new HashMap<String, Object>();
			map0.put("sql", "select * from " + tables + " limit " + pageSize + " OFFSET " + (page - 1) * pageSize);

			List<HashMap<String, Object>> userList = ps.getTableData(map0);

			if (userList.size() == 0)
				return;
			
			ApplicationHome home = new ApplicationHome(getClass());
			File jarFile = home.getSource();
			String FilePath = jarFile.getParentFile().toString() + "/zorefile/";
			File fileDir = new File(FilePath); 
			if (!fileDir.exists()) {
				try {
					// 按照指定的路径创建文件夹
					fileDir.setWritable(true, false);
					fileDir.mkdir();
				} catch (Exception e) {
					logger.info(e.toString());
				}
			}
			
			DBFUitl.exportdbf(FilePath+tables+".dbf",userList);
			
			retrunfile(FilePath,tables+".dbf",response);
			
			delFile(FilePath+tables+".dbf");
			return;
		}
		
	}
	
	protected void retrunfile(String path, String filename,HttpServletResponse response) throws Exception {
		response.setContentType("application/dbf");
		response.setHeader("Location", filename);
		response.setHeader("Content-Disposition", "attachment; filename=" + filename);
		OutputStream outputStream = response.getOutputStream();
		InputStream inputStream = new FileInputStream(path+filename);
		byte[] buffer = new byte[1024];
		int i = -1;
		while ((i = inputStream.read(buffer)) != -1) {
			outputStream.write(buffer, 0, i);
		}
		outputStream.flush();
		outputStream.close();
		inputStream.close();
		outputStream = null;
	}
	static boolean delFile(String pathfile) {
		File file = new File(pathfile);
        if (!file.exists()) {
            return false;
        }

        if (file.isFile()) {
            return file.delete();
        } else {
            File[] files = file.listFiles();
            for (File f : files) {
//                delFile(f);
            }
            return file.delete();
        }
    }
}

 

 

 

 

 

ok

 

 

 

持续更新