SSM框架分页展示Hdfs文件列表,返回上一级,进入下一级,上传文件,删除文件,下载文件前后端代码
时间: 2023-06-14 14:02:34 浏览: 90
基于SSM框架,前后端分离的文件云平台项目+源代码+文档说明
1. 分页展示Hdfs文件列表前端代码:
```html
<!-- 文件列表展示 -->
<div class="row">
<div class="col-md-12">
<table class="table table-striped table-bordered">
<thead>
<tr>
<th>文件名</th>
<th>修改时间</th>
<th>文件大小</th>
<th>操作</th>
</tr>
</thead>
<tbody>
<tr v-for="(item, index) in fileList" :key="index">
<td>{{item.name}}</td>
<td>{{item.time}}</td>
<td>{{item.size}}</td>
<td>
<button class="btn btn-sm btn-info" @click="gotoSubDir(item)">进入</button>
<button class="btn btn-sm btn-danger" @click="deleteFile(item)">删除</button>
</td>
</tr>
</tbody>
</table>
</div>
</div>
<!-- 分页 -->
<div class="row">
<div class="col-md-12">
<nav aria-label="Page navigation example">
<ul class="pagination">
<li class="page-item" :class="{disabled: currentPage === 1}">
<a class="page-link" href="#" @click="prevPage()">上一页</a>
</li>
<li class="page-item" v-for="page in pages" :key="page" :class="{active: currentPage === page}">
<a class="page-link" href="#" @click="gotoPage(page)">{{page}}</a>
</li>
<li class="page-item" :class="{disabled: currentPage === pageCount}">
<a class="page-link" href="#" @click="nextPage()">下一页</a>
</li>
</ul>
</nav>
</div>
</div>
```
2. 分页展示Hdfs文件列表后端代码:
```java
@Controller
@RequestMapping("/hdfs")
public class HdfsController {
@Autowired
private HdfsService hdfsService;
/**
* 分页展示Hdfs文件列表
* @param request
* @param pageNum
* @param pageSize
* @return
*/
@GetMapping("/list")
@ResponseBody
public PageInfo<HdfsFile> list(HttpServletRequest request,
@RequestParam(defaultValue = "1") int pageNum,
@RequestParam(defaultValue = "10") int pageSize) {
String path = request.getParameter("path");
return hdfsService.list(path, pageNum, pageSize);
}
/**
* 进入子目录
* @param request
* @return
*/
@GetMapping("/subdir")
@ResponseBody
public String subdir(HttpServletRequest request) {
String path = request.getParameter("path");
hdfsService.setWorkingDirectory(path);
return "success";
}
/**
* 返回上一级目录
* @return
*/
@GetMapping("/parent")
@ResponseBody
public String parent() {
hdfsService.setParentDirectory();
return "success";
}
/**
* 删除文件
* @param request
* @return
*/
@PostMapping("/delete")
@ResponseBody
public String delete(HttpServletRequest request) {
String path = request.getParameter("path");
hdfsService.delete(path);
return "success";
}
/**
* 上传文件
* @param file
* @return
* @throws IOException
*/
@PostMapping("/upload")
@ResponseBody
public String upload(@RequestParam("file") MultipartFile file) throws IOException {
String filename = file.getOriginalFilename();
InputStream inputStream = file.getInputStream();
hdfsService.upload(filename, inputStream);
return "success";
}
/**
* 下载文件
* @param request
* @param response
* @throws IOException
*/
@GetMapping("/download")
public void download(HttpServletRequest request, HttpServletResponse response) throws IOException {
String path = request.getParameter("path");
InputStream inputStream = hdfsService.download(path);
response.setContentType("application/octet-stream");
response.setHeader("Content-Disposition", "attachment; filename=" + new File(path).getName());
IOUtils.copy(inputStream, response.getOutputStream());
response.flushBuffer();
}
}
```
3. 分页展示Hdfs文件列表服务层代码:
```java
@Service
public class HdfsServiceImpl implements HdfsService {
private FileSystem fileSystem;
private Path workingDirectory;
@Override
public void init() throws IOException {
Configuration configuration = new Configuration();
fileSystem = FileSystem.get(URI.create("hdfs://localhost:9000"), configuration, "hadoop");
setWorkingDirectory("/");
}
@Override
public void setWorkingDirectory(String path) {
if (path.startsWith("/")) {
workingDirectory = new Path(path);
} else {
workingDirectory = new Path(workingDirectory, path);
}
}
@Override
public void setParentDirectory() {
workingDirectory = workingDirectory.getParent();
}
@Override
public PageInfo<HdfsFile> list(String path, int pageNum, int pageSize) {
boolean isDir = StringUtils.isBlank(path) || path.equals("/");
if (isDir) {
path = workingDirectory.toString();
}
try {
FileStatus[] fileStatuses = fileSystem.listStatus(new Path(path));
List<HdfsFile> fileList = Arrays.stream(fileStatuses)
.map(fileStatus -> new HdfsFile(fileStatus.getPath().toString(),
fileStatus.getModificationTime(),
fileStatus.isDirectory() ? "-" : FormatUtils.formatFileSize(fileStatus.getLen())))
.collect(Collectors.toList());
PageInfo<HdfsFile> pageInfo = new PageInfo<>(pageNum, pageSize, fileList);
return pageInfo;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
@Override
public void delete(String path) {
try {
fileSystem.delete(new Path(path), true);
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void upload(String filename, InputStream inputStream) {
try {
Path path = new Path(workingDirectory, filename);
FSDataOutputStream outputStream = fileSystem.create(path);
IOUtils.copy(inputStream, outputStream);
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public InputStream download(String path) {
try {
return fileSystem.open(new Path(path));
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
```
4. 分页展示Hdfs文件列表工具类代码:
```java
public class FormatUtils {
/**
* 格式化文件大小
* @param size
* @return
*/
public static String formatFileSize(long size) {
DecimalFormat df = new DecimalFormat("#.00");
String fileSizeString = "";
if (size < 1024) {
fileSizeString = df.format((double) size) + "B";
} else if (size < 1048576) {
fileSizeString = df.format((double) size / 1024) + "KB";
} else if (size < 1073741824) {
fileSizeString = df.format((double) size / 1048576) + "MB";
} else {
fileSizeString = df.format((double) size / 1073741824) + "GB";
}
return fileSizeString;
}
}
```
5. 分页展示Hdfs文件列表实体类代码:
```java
public class HdfsFile implements Serializable {
private String name;
private String time;
private String size;
public HdfsFile() {
}
public HdfsFile(String name, long time, String size) {
this.name = name;
this.time = DateFormatUtils.format(time, "yyyy-MM-dd HH:mm:ss");
this.size = size;
}
// getter/setter 略
}
```
阅读全文