新增视频上传和媒体上传

This commit is contained in:
WenG 2021-06-09 23:35:50 +08:00
parent f091e820c4
commit fef2fba311
21 changed files with 3126 additions and 0 deletions

29
module-file-media/pom.xml Normal file
View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>wg-basic</artifactId>
<groupId>ink.wgink</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>module-file-media</artifactId>
<description>媒体文件</description>
<dependencies>
<dependency>
<groupId>ink.wgink</groupId>
<artifactId>common</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,23 @@
package ink.wgink.module.file.media.controller.api.audio;
import ink.wgink.common.base.DefaultBaseController;
import ink.wgink.interfaces.consts.ISystemConstant;
import io.swagger.annotations.Api;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: AudioController
* @Description: 音频
* @Author: WangGeng
* @Date: 2021/6/8 22:05
* @Version: 1.0
**/
@Api(tags = ISystemConstant.API_TAGS_SYSTEM_PREFIX + "音频接口")
@RestController
@RequestMapping(ISystemConstant.API_PREFIX + "/audio")
public class AudioController extends DefaultBaseController {
}

View File

@ -0,0 +1,57 @@
package ink.wgink.module.file.media.controller.api.video;
import ink.wgink.common.base.DefaultBaseController;
import ink.wgink.exceptions.PropertiesException;
import ink.wgink.interfaces.consts.ISystemConstant;
import ink.wgink.module.file.media.service.video.IVideoService;
import ink.wgink.pojo.result.ErrorResult;
import ink.wgink.pojo.result.SuccessResultData;
import ink.wgink.properties.media.MediaProperties;
import io.swagger.annotations.*;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import java.util.concurrent.Callable;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: VideoController
* @Description: 视频
* @Author: WangGeng
* @Date: 2021/6/8 22:05
* @Version: 1.0
**/
@Api(tags = ISystemConstant.API_TAGS_SYSTEM_PREFIX + "视频接口")
@RestController
@RequestMapping(ISystemConstant.API_PREFIX + "/video")
public class VideoController extends DefaultBaseController {
@Autowired
private MediaProperties mediaProperties;
@Autowired
private IVideoService videoService;
@ApiOperation(value = "上传视频", notes = "上传视频接口")
@ApiImplicitParams({
@ApiImplicitParam(name = "video", value = "文件video", paramType = "query")
})
@ApiResponses({@ApiResponse(code = 400, message = "请求失败", response = ErrorResult.class)})
@PostMapping("upload")
public Callable<SuccessResultData<String>> upload(@RequestParam("video") MultipartFile video) {
if (StringUtils.isBlank(mediaProperties.getUploadPath())) {
throw new PropertiesException("未设置文件上传路径");
}
if (StringUtils.isBlank(mediaProperties.getFfmpegPath())) {
throw new PropertiesException("未设置解码器路径");
}
return () -> new SuccessResultData<>(videoService.upload(video));
}
}

View File

@ -0,0 +1,939 @@
package ink.wgink.module.file.media.manager;
import ink.wgink.module.file.media.manager.domain.ImageMetaInfo;
import ink.wgink.module.file.media.manager.domain.MusicMetaInfo;
import ink.wgink.module.file.media.manager.domain.VideoMetaInfo;
import ink.wgink.module.file.media.manager.domain.gif.AnimatedGifEncoder;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.*;
import java.sql.Time;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* 基于FFmpeg内核来编解码音视频信息
* 使用前需手动在运行环境中安装FFmpeg运行程序然后正确设置FFmpeg运行路径后MediaUtil.java才能正常调用到FFmpeg程序去处理音视频
* <p>
* version: 1.0
*/
public class MediaManager {
private static final Logger LOG = LoggerFactory.getLogger(MediaManager.class);
/**
* 可以处理的视频格式
*/
public final static String[] VIDEO_TYPE = {"MP4", "WMV"};
/**
* 可以处理的图片格式
*/
public final static String[] IMAGE_TYPE = {"JPG", "JPEG", "PNG", "GIF"};
/**
* 可以处理的音频格式
*/
public final static String[] AUDIO_TYPE = {"AAC"};
/**
* 视频帧抽取时的默认时间点第10s
* Time类构造参数的单位:ms
*/
private static final Time DEFAULT_TIME = new Time(0, 0, 10);
/**
* 视频帧抽取的默认宽度值单位px
*/
private static int DEFAULT_WIDTH = 320;
/**
* 视频帧抽取的默认时长单位s
*/
private static int DEFAULT_TIME_LENGTH = 10;
/**
* 抽取多张视频帧以合成gif动图时gif的播放速度
*/
private static int DEFAULT_GIF_PLAYTIME = 110;
/**
* FFmpeg程序执行路径
* 当前系统安装好ffmpeg程序并配置好相应的环境变量后值为ffmpeg可执行程序文件在实际系统中的绝对路径
*/
private static String FFMPEG_PATH = null;
/**
* 视频时长正则匹配式
* 用于解析视频及音频的时长等信息时使用
* <p>
* (.*?)表示匹配任何除\r\n之外的任何0或多个字符非贪婪模式
*/
private static String durationRegex = "Duration: (\\d*?):(\\d*?):(\\d*?)\\.(\\d*?), start: (.*?), bitrate: (\\d*) kb\\/s.*";
private static Pattern durationPattern;
/**
* 视频流信息正则匹配式
* 用于解析视频详细信息时使用
*/
private static String videoStreamRegex = "Stream #\\d:\\d[\\(]??\\S*[\\)]??: Video: (\\S*\\S$?)[^\\,]*, (.*?), (\\d*)x(\\d*)[^\\,]*, (\\d*) kb\\/s, (\\d*[\\.]??\\d*) fps";
private static Pattern videoStreamPattern;
/**
* 音频流信息正则匹配式
* 用于解析音频详细信息时使用
*/
private static String musicStreamRegex = "Stream #\\d:\\d[\\(]??\\S*[\\)]??: Audio: (\\S*\\S$?)(.*), (.*?) Hz, (.*?), (.*?), (\\d*) kb\\/s";
;
private static Pattern musicStreamPattern;
private static MediaManager MEDIA_MANAGER = MediaManagerBuilder.mediaManager;
/**
* 静态初始化时先加载好用于音视频解析的正则匹配式
*/
static {
durationPattern = Pattern.compile(durationRegex);
videoStreamPattern = Pattern.compile(videoStreamRegex);
musicStreamPattern = Pattern.compile(musicStreamRegex);
}
private MediaManager() {}
public static MediaManager getInstance() {
return MEDIA_MANAGER;
}
/**
* 获取当前多媒体处理工具内的ffmpeg的执行路径
*
* @return
*/
public String getFFmpegPath() {
return FFMPEG_PATH;
}
/**
* 设置当前多媒体工具内的ffmpeg的执行路径
*
* @param ffmpeg_path ffmpeg可执行程序在实际系统中的绝对路径
* @return
*/
public boolean setFFmpegPath(String ffmpeg_path) {
if (StringUtils.isBlank(ffmpeg_path)) {
LOG.error("--- 设置ffmpeg执行路径失败因为传入的ffmpeg可执行程序路径为空 ---");
return false;
}
File ffmpegFile = new File(ffmpeg_path);
if (!ffmpegFile.exists()) {
LOG.error("--- 设置ffmpeg执行路径失败因为传入的ffmpeg可执行程序路径下的ffmpeg文件不存在 ---");
return false;
}
FFMPEG_PATH = ffmpeg_path;
LOG.info("--- 设置ffmpeg执行路径成功 --- 当前ffmpeg可执行程序路径为 " + ffmpeg_path);
return true;
}
/**
* 测试当前多媒体工具是否可以正常工作
*
* @return
*/
public boolean isExecutable() {
File ffmpegFile = new File(FFMPEG_PATH);
if (!ffmpegFile.exists()) {
LOG.error("--- 工作状态异常因为传入的ffmpeg可执行程序路径下的ffmpeg文件不存在 ---");
return false;
}
List<String> cmds = new ArrayList<>(1);
cmds.add("-version");
String ffmpegVersionStr = executeCommand(cmds);
if (StringUtils.isBlank(ffmpegVersionStr)) {
LOG.error("--- 工作状态异常因为ffmpeg命令执行失败 ---");
return false;
}
LOG.info("--- 工作状态正常 ---");
return true;
}
/**
* 执行FFmpeg命令
*
* @param commonds 要执行的FFmpeg命令
* @return FFmpeg程序在执行命令过程中产生的各信息执行出错时返回null
*/
public String executeCommand(List<String> commonds) {
if (CollectionUtils.isEmpty(commonds)) {
LOG.error("--- 指令执行失败因为要执行的FFmpeg指令为空 ---");
return null;
}
LinkedList<String> ffmpegCmds = new LinkedList<>(commonds);
ffmpegCmds.addFirst(FFMPEG_PATH); // 设置ffmpeg程序所在路径
LOG.info("--- 待执行的FFmpeg指令为---" + ffmpegCmds);
Runtime runtime = Runtime.getRuntime();
Process ffmpeg = null;
try {
// 执行ffmpeg指令
ProcessBuilder builder = new ProcessBuilder();
builder.command(ffmpegCmds);
ffmpeg = builder.start();
LOG.info("--- 开始执行FFmpeg指令--- 执行线程名:" + builder.toString());
// 取出输出流和错误流的信息
// 注意必须要取出ffmpeg在执行命令过程中产生的输出信息如果不取的话当输出流信息填满jvm存储输出留信息的缓冲区时线程就回阻塞住
PrintStream errorStream = new PrintStream(ffmpeg.getErrorStream());
PrintStream inputStream = new PrintStream(ffmpeg.getInputStream());
errorStream.start();
inputStream.start();
// 等待ffmpeg命令执行完
ffmpeg.waitFor();
// 获取执行结果字符串
String result = errorStream.stringBuffer.append(inputStream.stringBuffer).toString();
// 输出执行的命令信息
String cmdStr = Arrays.toString(ffmpegCmds.toArray()).replace(",", "");
String resultStr = StringUtils.isBlank(result) ? "【异常】" : "正常";
LOG.info("--- 已执行的FFmepg命令 ---" + cmdStr + " 已执行完毕,执行结果: " + resultStr);
return result;
} catch (Exception e) {
LOG.error("--- FFmpeg命令执行出错 --- 出错信息: " + e.getMessage());
return null;
} finally {
if (null != ffmpeg) {
ProcessKiller ffmpegKiller = new ProcessKiller(ffmpeg);
// JVM退出时先通过钩子关闭FFmepg进程
runtime.addShutdownHook(ffmpegKiller);
}
}
}
/**
* 视频转换
* <p>
* 注意指定视频分辨率时宽度和高度必须同时有值
*
* @param fileInput 源视频路径
* @param fileOutPut 转换后的视频输出路径
* @param withAudio 是否保留音频true-保留false-不保留
* @param crf 指定视频的质量系数值越小视频质量越高体积越大该系数取值为0-51直接影响视频码率大小,取值参考CrfValueEnum.code
* @param preset 指定视频的编码速率速率越快压缩率越低取值参考PresetVauleEnum.presetValue
* @param width 视频宽度为空则保持源视频宽度
* @param height 视频高度为空则保持源视频高度
*/
public void convertVideo(File fileInput, File fileOutPut, boolean withAudio, Integer crf, String preset, Integer width, Integer height) {
if (null == fileInput || !fileInput.exists()) {
throw new RuntimeException("源视频文件不存在,请检查源视频路径");
}
if (null == fileOutPut) {
throw new RuntimeException("转换后的视频路径为空,请检查转换后的视频存放路径是否正确");
}
if (!fileOutPut.exists()) {
try {
fileOutPut.createNewFile();
} catch (IOException e) {
LOG.error("视频转换时新建输出文件失败");
}
}
String format = getFormat(fileInput);
if (!isLegalFormat(format, VIDEO_TYPE)) {
throw new RuntimeException("无法解析的视频格式:" + format);
}
List<String> commond = new ArrayList<String>();
commond.add("-i");
commond.add(fileInput.getAbsolutePath());
if (!withAudio) { // 设置是否保留音频
commond.add("-an"); // 去掉音频
}
if (null != width && width > 0 && null != height && height > 0) { // 设置分辨率
commond.add("-s");
String resolution = width.toString() + "x" + height.toString();
commond.add(resolution);
}
commond.add("-vcodec"); // 指定输出视频文件时使用的编码器
commond.add("libx264"); // 指定使用x264编码器
commond.add("-preset"); // 当使用x264时需要带上该参数
commond.add(preset); // 指定preset参数
commond.add("-crf"); // 指定输出视频质量
commond.add(crf.toString()); // 视频质量参数值越小视频质量越高
commond.add("-y"); // 当已存在输出文件时不提示是否覆盖
commond.add(fileOutPut.getAbsolutePath());
executeCommand(commond);
}
/**
* 视频帧抽取
* 默认抽取第10秒的帧画面
* 抽取的帧图片默认宽度为300px
* <p>
* 转换后的文件路径以.gif结尾时默认截取从第10s开始后10s以内的帧画面来生成gif
*
* @param videoFile 源视频路径
* @param fileOutPut 转换后的文件路径
*/
public void cutVideoFrame(File videoFile, File fileOutPut) {
cutVideoFrame(videoFile, fileOutPut, DEFAULT_TIME);
}
/**
* 视频帧抽取抽取指定时间点的帧画面
* 抽取的视频帧图片宽度默认为320px
* <p>
* 转换后的文件路径以.gif结尾时默认截取从指定时间点开始后10s以内的帧画面来生成gif
*
* @param videoFile 源视频路径
* @param fileOutPut 转换后的文件路径
* @param time 指定抽取视频帧的时间点单位s
*/
public void cutVideoFrame(File videoFile, File fileOutPut, Time time) {
cutVideoFrame(videoFile, fileOutPut, time, DEFAULT_WIDTH);
}
/**
* 视频帧抽取抽取指定时间点指定宽度值的帧画面
* 只需指定视频帧的宽度高度随宽度自动计算
* <p>
* 转换后的文件路径以.gif结尾时默认截取从指定时间点开始后10s以内的帧画面来生成gif
*
* @param videoFile 源视频路径
* @param fileOutPut 转换后的文件路径
* @param time 指定要抽取第几秒的视频帧单位s
* @param width 抽取的视频帧图片的宽度单位px
*/
public void cutVideoFrame(File videoFile, File fileOutPut, Time time, int width) {
if (null == videoFile || !videoFile.exists()) {
throw new RuntimeException("源视频文件不存在,请检查源视频路径");
}
if (null == fileOutPut) {
throw new RuntimeException("转换后的视频路径为空,请检查转换后的视频存放路径是否正确");
}
VideoMetaInfo info = getVideoMetaInfo(videoFile);
if (null == info) {
LOG.error("--- 未能解析源视频信息,视频帧抽取操作失败 --- 源视频: " + videoFile);
return;
}
int height = width * info.getHeight() / info.getWidth(); // 根据宽度计算适合的高度防止画面变形
cutVideoFrame(videoFile, fileOutPut, time, width, height);
}
/**
* 视频帧抽取抽取指定时间点指定宽度值指定高度值的帧画面
* <p>
* 转换后的文件路径以.gif结尾时默认截取从指定时间点开始后10s以内的帧画面来生成gif
*
* @param videoFile 源视频路径
* @param fileOutPut 转换后的文件路径
* @param time 指定要抽取第几秒的视频帧单位s
* @param width 抽取的视频帧图片的宽度单位px
* @param height 抽取的视频帧图片的高度单位px
*/
public void cutVideoFrame(File videoFile, File fileOutPut, Time time, int width, int height) {
if (null == videoFile || !videoFile.exists()) {
throw new RuntimeException("源视频文件不存在,请检查源视频路径");
}
if (null == fileOutPut) {
throw new RuntimeException("转换后的视频路径为空,请检查转换后的视频存放路径是否正确");
}
String format = getFormat(fileOutPut);
if (!isLegalFormat(format, IMAGE_TYPE)) {
throw new RuntimeException("无法生成指定格式的帧图片:" + format);
}
String fileOutPutPath = fileOutPut.getAbsolutePath();
if (!"GIF".equals(StringUtils.upperCase(format))) {
// 输出路径不是以.gif结尾抽取并生成一张静态图
cutVideoFrame(videoFile, fileOutPutPath, time, width, height, 1, false);
} else {
// 抽取并生成一个gifgif由10张静态图构成
String path = fileOutPut.getParent();
String name = fileOutPut.getName();
// 创建临时文件存储多张静态图用于生成gif
String tempPath = path + File.separator + System.currentTimeMillis() + "_" + name.substring(0, name.indexOf("."));
File file = new File(tempPath);
if (!file.exists()) {
file.mkdir();
}
try {
cutVideoFrame(videoFile, tempPath, time, width, height, DEFAULT_TIME_LENGTH, true);
// 生成gif
String images[] = file.list();
for (int i = 0; i < images.length; i++) {
images[i] = tempPath + File.separator + images[i];
}
createGifImage(images, fileOutPut.getAbsolutePath(), DEFAULT_GIF_PLAYTIME);
} catch (Exception e) {
LOG.error("--- 截取视频帧操作出错 --- 错误信息:" + e.getMessage());
} finally {
// 删除用于生成gif的临时文件
String images[] = file.list();
for (int i = 0; i < images.length; i++) {
File fileDelete = new File(tempPath + File.separator + images[i]);
fileDelete.delete();
}
file.delete();
}
}
}
/**
* 视频帧抽取抽取指定时间点指定宽度值指定高度值指定时长指定单张/多张的帧画面
*
* @param videoFile 源视频
* @param path 转换后的文件输出路径
* @param time 开始截取视频帧的时间点单位s
* @param width 截取的视频帧图片的宽度单位px
* @param height 截取的视频帧图片的高度单位px需要大于20
* @param timeLength 截取的视频帧的时长从time开始算单位:s需小于源视频的最大时长
* @param isContinuty false - 静态图只截取time时间点的那一帧图片true - 动态图截取从time时间点开始,timelength这段时间内的多张帧图
*/
private void cutVideoFrame(File videoFile, String path, Time time, int width, int height, int timeLength, boolean isContinuty) {
if (videoFile == null || !videoFile.exists()) {
throw new RuntimeException("源视频文件不存在,源视频路径: ");
}
if (null == path) {
throw new RuntimeException("转换后的文件路径为空,请检查转换后的文件存放路径是否正确");
}
VideoMetaInfo info = getVideoMetaInfo(videoFile);
if (null == info) {
throw new RuntimeException("未解析到视频信息");
}
if (time.getTime() + timeLength > info.getDuration()) {
throw new RuntimeException("开始截取视频帧的时间点不合法:" + time.toString() + ",因为截取时间点晚于视频的最后时间点");
}
if (width <= 20 || height <= 20) {
throw new RuntimeException("截取的视频帧图片的宽度或高度不合法宽高值必须大于20");
}
try {
List<String> commond = new ArrayList<String>();
commond.add("-ss");
commond.add(time.toString());
if (isContinuty) {
commond.add("-t");
commond.add(timeLength + "");
} else {
commond.add("-vframes");
commond.add("1");
}
commond.add("-i");
commond.add(videoFile.getAbsolutePath());
commond.add("-an");
commond.add("-f");
commond.add("image2");
if (isContinuty) {
commond.add("-r");
commond.add("3");
}
commond.add("-s");
commond.add(width + "*" + height);
if (isContinuty) {
commond.add(path + File.separator + "foo-%03d.jpeg");
} else {
commond.add(path);
}
executeCommand(commond);
} catch (Exception e) {
LOG.error("--- 视频帧抽取过程出错 --- 错误信息: " + e.getMessage());
}
}
/**
* 截取视频中的某一段生成新视频
*
* @param videoFile 源视频路径
* @param outputFile 转换后的视频路径
* @param startTime 开始抽取的时间点单位:s
* @param timeLength 需要抽取的时间段单位:s需小于源视频最大时长例如该参数值为10时即抽取从startTime开始之后10秒内的视频作为新视频
*/
public void cutVideo(File videoFile, File outputFile, Time startTime, int timeLength) {
if (videoFile == null || !videoFile.exists()) {
throw new RuntimeException("视频文件不存在:");
}
if (null == outputFile) {
throw new RuntimeException("转换后的视频路径为空,请检查转换后的视频存放路径是否正确");
}
VideoMetaInfo info = getVideoMetaInfo(videoFile);
if (null == info) {
throw new RuntimeException("未解析到视频信息");
}
if (startTime.getTime() + timeLength > info.getDuration()) {
throw new RuntimeException("截取时间不合法:" + startTime.toString() + ",因为截取时间大于视频的时长");
}
try {
if (!outputFile.exists()) {
outputFile.createNewFile();
}
List<String> commond = new ArrayList<String>();
commond.add("-ss");
commond.add(startTime.toString());
commond.add("-t");
commond.add("" + timeLength);
commond.add("-i");
commond.add(videoFile.getAbsolutePath());
commond.add("-vcodec");
commond.add("copy");
commond.add("-acodec");
commond.add("copy");
commond.add(outputFile.getAbsolutePath());
executeCommand(commond);
} catch (IOException e) {
LOG.error("--- 视频截取过程出错 ---");
}
}
/**
* 抽取视频里的音频信息
* 只能抽取成MP3文件
*
* @param videoFile 源视频文件
* @param audioFile 从源视频提取的音频文件
*/
public void getAudioFromVideo(File videoFile, File audioFile) {
if (null == videoFile || !videoFile.exists()) {
throw new RuntimeException("源视频文件不存在: ");
}
if (null == audioFile) {
throw new RuntimeException("要提取的音频路径为空:");
}
String format = getFormat(audioFile);
if (!isLegalFormat(format, AUDIO_TYPE)) {
throw new RuntimeException("无法生成指定格式的音频:" + format + " 请检查要输出的音频文件是否是AAC类型");
}
try {
if (!audioFile.exists()) {
audioFile.createNewFile();
}
List<String> commond = new ArrayList<String>();
commond.add("-i");
commond.add(videoFile.getAbsolutePath());
commond.add("-vn"); // no video去除视频信息
commond.add("-y");
commond.add("-acodec");
commond.add("copy");
commond.add(audioFile.getAbsolutePath());
executeCommand(commond);
} catch (Exception e) {
LOG.error("--- 抽取视频中的音频信息的过程出错 --- 错误信息: " + e.getMessage());
}
}
/**
* 解析视频的基本信息从文件中
* <p>
* 解析出的视频信息一般为以下格式
* Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '6.mp4':
* Duration: 00:00:30.04, start: 0.000000, bitrate: 19031 kb/s
* Stream #0:0(eng): Video: h264 (Main) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080, 18684 kb/s, 25 fps, 25 tbr, 25k tbn, 50 tbc (default)
* Stream #0:1(eng): Audio: aac (LC) (mp4a / 0x6134706D), 48000 Hz, stereo, fltp, 317 kb/s (default)
* <p>
* 注解
* Duration: 00:00:30.04视频时长, start: 0.000000视频开始时间, bitrate: 19031 kb/s视频比特率/码率
* Stream #0:0(eng): Video: h264视频编码格式 (Main) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080视频分辨率宽x高, 18684视频比特率 kb/s, 25视频帧率 fps, 25 tbr, 25k tbn, 50 tbc (default)
* Stream #0:1(eng): Audio: aac音频格式 (LC) (mp4a / 0x6134706D), 48000音频采样率 Hz, stereo, fltp, 317音频码率 kb/s (default)
*
* @param videoFile 源视频路径
* @return 视频的基本信息解码失败时返回null
*/
public VideoMetaInfo getVideoMetaInfo(File videoFile) {
if (null == videoFile || !videoFile.exists()) {
LOG.error("--- 解析视频信息失败,因为要解析的源视频文件不存在 ---");
return null;
}
VideoMetaInfo videoInfo = new VideoMetaInfo();
String parseResult = getMetaInfoFromFFmpeg(videoFile);
Matcher durationMacher = durationPattern.matcher(parseResult);
Matcher videoStreamMacher = videoStreamPattern.matcher(parseResult);
Matcher videoMusicStreamMacher = musicStreamPattern.matcher(parseResult);
Long duration = 0L; // 视频时长
Integer videoBitrate = 0; // 视频码率
String videoFormat = getFormat(videoFile); // 视频格式
Long videoSize = videoFile.length(); // 视频大小
String videoEncoder = ""; // 视频编码器
Integer videoHeight = 0; // 视频高度
Integer videoWidth = 0; // 视频宽度
Float videoFramerate = 0F; // 视频帧率
String musicFormat = ""; // 音频格式
Long samplerate = 0L; // 音频采样率
Integer musicBitrate = 0; // 音频码率
try {
// 匹配视频播放时长等信息
if (durationMacher.find()) {
long hours = (long) Integer.parseInt(durationMacher.group(1));
long minutes = (long) Integer.parseInt(durationMacher.group(2));
long seconds = (long) Integer.parseInt(durationMacher.group(3));
long dec = (long) Integer.parseInt(durationMacher.group(4));
duration = dec * 100L + seconds * 1000L + minutes * 60L * 1000L + hours * 60L * 60L * 1000L;
//String startTime = durationMacher.group(5) + "ms";
videoBitrate = Integer.parseInt(durationMacher.group(6));
}
// 匹配视频分辨率等信息
if (videoStreamMacher.find()) {
videoEncoder = videoStreamMacher.group(1);
String s2 = videoStreamMacher.group(2);
videoWidth = Integer.parseInt(videoStreamMacher.group(3));
videoHeight = Integer.parseInt(videoStreamMacher.group(4));
String s5 = videoStreamMacher.group(5);
videoFramerate = Float.parseFloat(videoStreamMacher.group(6));
}
// 匹配视频中的音频信息
if (videoMusicStreamMacher.find()) {
musicFormat = videoMusicStreamMacher.group(1); // 提取音频格式
//String s2 = videoMusicStreamMacher.group(2);
samplerate = Long.parseLong(videoMusicStreamMacher.group(3)); // 提取采样率
//String s4 = videoMusicStreamMacher.group(4);
//String s5 = videoMusicStreamMacher.group(5);
musicBitrate = Integer.parseInt(videoMusicStreamMacher.group(6)); // 提取比特率
}
} catch (Exception e) {
LOG.error("--- 解析视频参数信息出错! --- 错误信息: " + e.getMessage());
return null;
}
// 封装视频中的音频信息
MusicMetaInfo musicMetaInfo = new MusicMetaInfo();
musicMetaInfo.setFormat(musicFormat);
musicMetaInfo.setDuration(duration);
musicMetaInfo.setBitRate(musicBitrate);
musicMetaInfo.setSampleRate(samplerate);
// 封装视频信息
VideoMetaInfo videoMetaInfo = new VideoMetaInfo();
videoMetaInfo.setFormat(videoFormat);
videoMetaInfo.setSize(videoSize);
videoMetaInfo.setBitRate(videoBitrate);
videoMetaInfo.setDuration(duration);
videoMetaInfo.setEncoder(videoEncoder);
videoMetaInfo.setFrameRate(videoFramerate);
videoMetaInfo.setHeight(videoHeight);
videoMetaInfo.setWidth(videoWidth);
videoMetaInfo.setMusicMetaInfo(musicMetaInfo);
return videoMetaInfo;
}
/**
* 获取视频的基本信息从流中
*
* @param inputStream 源视频流路径
* @return 视频的基本信息解码失败时返回null
*/
public VideoMetaInfo getVideoMetaInfo(InputStream inputStream) {
VideoMetaInfo videoInfo = new VideoMetaInfo();
try {
File file = File.createTempFile("tmp", null);
if (!file.exists()) {
return null;
}
FileUtils.copyInputStreamToFile(inputStream, file);
videoInfo = getVideoMetaInfo(file);
file.deleteOnExit();
return videoInfo;
} catch (Exception e) {
LOG.error("--- 从流中获取视频基本信息出错 --- 错误信息: " + e.getMessage());
return null;
}
}
/**
* 获取音频的基本信息从文件中
*
* @param musicFile 音频文件路径
* @return 音频的基本信息解码失败时返回null
*/
public MusicMetaInfo getMusicMetaInfo(File musicFile) {
if (null == musicFile || !musicFile.exists()) {
LOG.error("--- 无法获取音频信息,因为要解析的音频文件为空 ---");
return null;
}
// 获取音频信息字符串方便后续解析
String parseResult = getMetaInfoFromFFmpeg(musicFile);
Long duration = 0L; // 音频时长
Integer musicBitrate = 0; // 音频码率
Long samplerate = 0L; // 音频采样率
String musicFormat = ""; // 音频格式
Long musicSize = musicFile.length(); // 音频大小
Matcher durationMacher = durationPattern.matcher(parseResult);
Matcher musicStreamMacher = musicStreamPattern.matcher(parseResult);
try {
// 匹配音频播放时长等信息
if (durationMacher.find()) {
long hours = (long) Integer.parseInt(durationMacher.group(1));
long minutes = (long) Integer.parseInt(durationMacher.group(2));
long seconds = (long) Integer.parseInt(durationMacher.group(3));
long dec = (long) Integer.parseInt(durationMacher.group(4));
duration = dec * 100L + seconds * 1000L + minutes * 60L * 1000L + hours * 60L * 60L * 1000L;
//String startTime = durationMacher.group(5) + "ms";
musicBitrate = Integer.parseInt(durationMacher.group(6));
}
// 匹配音频采样率等信息
if (musicStreamMacher.find()) {
musicFormat = musicStreamMacher.group(1); // 提取音频格式
//String s2 = videoMusicStreamMacher.group(2);
samplerate = Long.parseLong(musicStreamMacher.group(3)); // 提取采样率
//String s4 = videoMusicStreamMacher.group(4);
//String s5 = videoMusicStreamMacher.group(5);
musicBitrate = Integer.parseInt(musicStreamMacher.group(6)); // 提取比特率
}
} catch (Exception e) {
LOG.error("--- 解析音频参数信息出错! --- 错误信息: " + e.getMessage());
return null;
}
// 封装视频中的音频信息
MusicMetaInfo musicMetaInfo = new MusicMetaInfo();
musicMetaInfo.setFormat(musicFormat);
musicMetaInfo.setDuration(duration);
musicMetaInfo.setBitRate(musicBitrate);
musicMetaInfo.setSampleRate(samplerate);
musicMetaInfo.setSize(musicSize);
return musicMetaInfo;
}
/**
* 获取音频的基本信息从流中
*
* @param inputStream 源音乐流路径
* @return 音频基本信息解码出错时返回null
*/
public MusicMetaInfo getMusicMetaInfo(InputStream inputStream) {
MusicMetaInfo musicMetaInfo = new MusicMetaInfo();
try {
File file = File.createTempFile("tmp", null);
if (!file.exists()) {
return null;
}
FileUtils.copyInputStreamToFile(inputStream, file);
musicMetaInfo = getMusicMetaInfo(file);
file.deleteOnExit();
return musicMetaInfo;
} catch (Exception e) {
LOG.error("--- 从流中获取音频基本信息出错 --- 错误信息: " + e.getMessage());
return null;
}
}
/**
* 获取图片的基本信息从流中
*
* @param inputStream 源图片路径
* @return 图片的基本信息获取信息失败时返回null
*/
public ImageMetaInfo getImageInfo(InputStream inputStream) {
BufferedImage image = null;
ImageMetaInfo imageInfo = new ImageMetaInfo();
try {
image = ImageIO.read(inputStream);
imageInfo.setWidth(image.getWidth());
imageInfo.setHeight(image.getHeight());
imageInfo.setSize(Long.valueOf(String.valueOf(inputStream.available())));
return imageInfo;
} catch (Exception e) {
LOG.error("--- 获取图片的基本信息失败 --- 错误信息: " + e.getMessage());
return null;
}
}
/**
* 获取图片的基本信息 从文件中
*
* @param imageFile 源图片路径
* @return 图片的基本信息获取信息失败时返回null
*/
public ImageMetaInfo getImageInfo(File imageFile) {
BufferedImage image = null;
ImageMetaInfo imageInfo = new ImageMetaInfo();
try {
if (null == imageFile || !imageFile.exists()) {
return null;
}
image = ImageIO.read(imageFile);
imageInfo.setWidth(image.getWidth());
imageInfo.setHeight(image.getHeight());
imageInfo.setSize(imageFile.length());
imageInfo.setFormat(getFormat(imageFile));
return imageInfo;
} catch (Exception e) {
LOG.error("--- 获取图片的基本信息失败 --- 错误信息: " + e.getMessage());
return null;
}
}
/**
* 检查文件类型是否是给定的类型
*
* @param inputFile 源文件
* @param givenFormat 指定的文件类型例如{"MP4", "AVI"}
* @return
*/
public boolean isGivenFormat(File inputFile, String[] givenFormat) {
if (null == inputFile || !inputFile.exists()) {
LOG.error("--- 无法检查文件类型是否满足要求,因为要检查的文件不存在 --- 源文件: " + inputFile);
return false;
}
if (null == givenFormat || givenFormat.length <= 0) {
LOG.error("--- 无法检查文件类型是否满足要求,因为没有指定的文件类型 ---");
return false;
}
String fomat = getFormat(inputFile);
return isLegalFormat(fomat, givenFormat);
}
/**
* 使用FFmpeg的"-i"命令来解析视频信息
*
* @param inputFile 源媒体文件
* @return 解析后的结果字符串解析失败时为空
*/
public String getMetaInfoFromFFmpeg(File inputFile) {
if (inputFile == null || !inputFile.exists()) {
throw new RuntimeException("源媒体文件不存在,源媒体文件路径: ");
}
List<String> commond = new ArrayList<String>();
commond.add("-i");
commond.add(inputFile.getAbsolutePath());
String executeResult = this.executeCommand(commond);
return executeResult;
}
/**
* 检测视频格式是否合法
*
* @param format
* @param formats
* @return
*/
private boolean isLegalFormat(String format, String formats[]) {
for (String item : formats) {
if (item.equals(StringUtils.upperCase(format))) {
return true;
}
}
return false;
}
/**
* 创建gif
*
* @param image 多个jpg文件名包含路径
* @param outputPath 生成的gif文件名包含路径
* @param playTime 播放的延迟时间可调整gif的播放速度
*/
private void createGifImage(String image[], String outputPath, int playTime) {
if (null == outputPath) {
throw new RuntimeException("转换后的GIF路径为空请检查转换后的GIF存放路径是否正确");
}
try {
AnimatedGifEncoder encoder = new AnimatedGifEncoder();
encoder.setRepeat(0);
encoder.start(outputPath);
BufferedImage src[] = new BufferedImage[image.length];
for (int i = 0; i < src.length; i++) {
encoder.setDelay(playTime); // 设置播放的延迟时间
src[i] = ImageIO.read(new File(image[i])); // 读入需要播放的jpg文件
encoder.addFrame(src[i]); // 添加到帧中
}
encoder.finish();
} catch (Exception e) {
LOG.error("--- 多张静态图转换成动态GIF图的过程出错 --- 错误信息: " + e.getMessage());
}
}
/**
* 获取指定文件的后缀名
*
* @param file
* @return
*/
private String getFormat(File file) {
String fileName = file.getName();
String format = fileName.substring(fileName.indexOf(".") + 1);
return format;
}
/**
* 在程序退出前结束已有的FFmpeg进程
*/
private class ProcessKiller extends Thread {
private Process process;
public ProcessKiller(Process process) {
this.process = process;
}
@Override
public void run() {
this.process.destroy();
LOG.info("--- 已销毁FFmpeg进程 --- 进程名: " + process.toString());
}
}
/**
* 用于取出ffmpeg线程执行过程中产生的各种输出和错误流的信息
*/
static class PrintStream extends Thread {
InputStream inputStream = null;
BufferedReader bufferedReader = null;
StringBuffer stringBuffer = new StringBuffer();
public PrintStream(InputStream inputStream) {
this.inputStream = inputStream;
}
@Override
public void run() {
try {
if (null == inputStream) {
LOG.error("--- 读取输出流出错!因为当前输出流为空!---");
}
bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
String line = null;
while ((line = bufferedReader.readLine()) != null) {
LOG.info(line);
stringBuffer.append(line);
}
} catch (Exception e) {
LOG.error("--- 读取输入流出错了!--- 错误信息:" + e.getMessage());
} finally {
try {
if (null != bufferedReader) {
bufferedReader.close();
}
if (null != inputStream) {
inputStream.close();
}
} catch (IOException e) {
LOG.error("--- 调用PrintStream读取输出流后关闭流时出错---");
}
}
}
}
public static class MediaManagerBuilder {
static MediaManager mediaManager = new MediaManager();
}
}

View File

@ -0,0 +1,43 @@
package ink.wgink.module.file.media.manager.domain;
/**
* Description: 图片数据的基本信息类
*/
public class ImageMetaInfo extends MetaInfo {
/**
* 图片宽度单位为px
*/
private Integer width;
/**
* 图片高度单位为px
*/
private Integer height;
public Integer getWidth() {
return width;
}
public void setWidth(Integer width) {
this.width = width;
}
public Integer getHeight() {
return height;
}
public void setHeight(Integer height) {
this.height = height;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("{");
sb.append("\"width\":")
.append(width);
sb.append(",\"height\":")
.append(height);
sb.append('}');
return sb.toString();
}
}

View File

@ -0,0 +1,44 @@
package ink.wgink.module.file.media.manager.domain;
/**
* Description: 多媒体数据包含图片视频音频等的基本信息类
*/
public class MetaInfo {
/**
* 多媒体的大小指的是存储体积单位为B
* 某些系统返回的大小可能为0
*/
private Long size;
/**
* 格式
*/
private String format;
public Long getSize() {
return size;
}
public void setSize(Long size) {
this.size = size;
}
public String getFormat() {
return format == null ? "" : format.trim();
}
public void setFormat(String format) {
this.format = format;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("{");
sb.append("\"size\":")
.append(size);
sb.append(",\"format\":\"")
.append(format).append('\"');
sb.append('}');
return sb.toString();
}
}

View File

@ -0,0 +1,59 @@
package ink.wgink.module.file.media.manager.domain;
/**
* Description: 音频数据的基本信息
*/
public class MusicMetaInfo extends MetaInfo {
/**
* 音频时长 ,单位毫秒
*/
private Long duration;
/**
* 比特率单位Kb/s
* 指音频每秒传送包含的比特数
*/
private Integer bitRate;
/**
* 采样频率单位Hz
* 指一秒钟内对声音信号的采样次数
*/
private Long sampleRate;
public Long getDuration() {
return duration;
}
public void setDuration(Long duration) {
this.duration = duration;
}
public Integer getBitRate() {
return bitRate;
}
public void setBitRate(Integer bitRate) {
this.bitRate = bitRate;
}
public Long getSampleRate() {
return sampleRate;
}
public void setSampleRate(Long sampleRate) {
this.sampleRate = sampleRate;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("{");
sb.append("\"duration\":")
.append(duration);
sb.append(",\"bitRate\":")
.append(bitRate);
sb.append(",\"sampleRate\":")
.append(sampleRate);
sb.append('}');
return sb.toString();
}
}

View File

@ -0,0 +1,114 @@
package ink.wgink.module.file.media.manager.domain;
/**
* Description: 视频数据基本信息类
*/
public class VideoMetaInfo extends MetaInfo {
/**
* 视频宽度 单位为px
*/
private Integer width;
/**
* 视频高度 单位为px
*/
private Integer height;
/**
* 视频时长 ,单位毫秒
*/
private Long duration;
/**
* 比特率单位Kb/s
* 指视频每秒传送包含的比特数
*/
private Integer bitRate;
/**
* 编码器
*/
private String encoder;
/**
* 帧率单位FPSFrame Per Second
* 指视频每秒包含的帧数
*/
private Float frameRate;
/**
* 视频中包含的音频信息
*/
private MusicMetaInfo musicMetaInfo;
public Integer getWidth() {
return width;
}
public void setWidth(Integer width) {
this.width = width;
}
public Integer getHeight() {
return height;
}
public void setHeight(Integer height) {
this.height = height;
}
public Long getDuration() {
return duration;
}
public void setDuration(Long duration) {
this.duration = duration;
}
public Integer getBitRate() {
return bitRate;
}
public void setBitRate(Integer bitRate) {
this.bitRate = bitRate;
}
public String getEncoder() {
return encoder == null ? "" : encoder.trim();
}
public void setEncoder(String encoder) {
this.encoder = encoder;
}
public Float getFrameRate() {
return frameRate;
}
public void setFrameRate(Float frameRate) {
this.frameRate = frameRate;
}
public MusicMetaInfo getMusicMetaInfo() {
return musicMetaInfo;
}
public void setMusicMetaInfo(MusicMetaInfo musicMetaInfo) {
this.musicMetaInfo = musicMetaInfo;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("{");
sb.append("\"width\":")
.append(width);
sb.append(",\"height\":")
.append(height);
sb.append(",\"duration\":")
.append(duration);
sb.append(",\"bitRate\":")
.append(bitRate);
sb.append(",\"encoder\":\"")
.append(encoder).append('\"');
sb.append(",\"frameRate\":")
.append(frameRate);
sb.append(",\"musicMetaInfo\":")
.append(musicMetaInfo);
sb.append('}');
return sb.toString();
}
}

View File

@ -0,0 +1,81 @@
package ink.wgink.module.file.media.manager.domain.enums;
import java.util.HashSet;
import java.util.Set;
/**
* Description: 视频压缩时采用x264编码器时需要指定的视频质量值
*/
public enum CrfValueEnum {
LOW_QUALITY("低质量", 28),
MEDIUM_QUALITY("中等质量", 26),
HIGH_QUALITY("高质量", 23);
private String name;
private Integer code;
private static Set<Integer> TYPE_CODE_SET = new HashSet<Integer>();
static {
CrfValueEnum[] types = CrfValueEnum.values();
if (null != types) {
for (CrfValueEnum type : types) {
TYPE_CODE_SET.add(type.getCode());
}
}
}
CrfValueEnum(String typeName, Integer typeCode) {
this.name = typeName;
this.code = typeCode;
}
public static boolean isValid(Integer typeCode) {
if (TYPE_CODE_SET.contains(typeCode)) {
return true;
}
return false;
}
public static CrfValueEnum convertoEnum(Integer typeCode) {
if (!isValid(typeCode)) {
return null;
}
for (CrfValueEnum type : CrfValueEnum.values()) {
if (typeCode.equals(type.getCode())) {
return type;
}
}
return null;
}
public boolean isEqual(Integer typeCode) {
if (typeCode == null) {
return false;
}
return this.getCode().equals(typeCode);
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getCode() {
return code;
}
public void setCode(Integer code) {
this.code = code;
}
public static Set<Integer> getTypeCodeSet() {
return TYPE_CODE_SET;
}
public static void setTypeCodeSet(Set<Integer> typeCodeSet) {
TYPE_CODE_SET = typeCodeSet;
}
}

View File

@ -0,0 +1,83 @@
package ink.wgink.module.file.media.manager.domain.enums;
import java.util.HashSet;
import java.util.Set;
/**
* Description: 视频压缩时采用x264编码器时需要指定的压缩速率值压缩速度越快压缩率越低
*/
public enum PresetVauleEnum {
MAX_FAST_ZIP_SPEED("最快压缩速度,最低压缩率", "faster"),
SECOND_FAST_ZIP_SPEED("第二快的压缩速度", "fast"),
MEDIUM_ZIP_SPEED("中等压缩速度", "medium"),
SLOW_ZIP_SPEED("低压缩速度", "slow"),
SLOWER_ZIP_SPEED("最慢压缩速度", "slower");
private String presetName;
private String presetValue;
private static Set<String> TYPE_VALUE_SET = new HashSet<String>();
static {
PresetVauleEnum[] types = PresetVauleEnum.values();
if (null != types) {
for (PresetVauleEnum type : types) {
TYPE_VALUE_SET.add(type.getPresetValue());
}
}
}
PresetVauleEnum(String presetName, String presetValue) {
this.presetName = presetName;
this.presetValue = presetValue;
}
public static boolean isValid(String typeValue) {
if (TYPE_VALUE_SET.contains(typeValue)) {
return true;
}
return false;
}
public static PresetVauleEnum convertoEnum(String typeValue) {
if (!isValid(typeValue)) {
return null;
}
for (PresetVauleEnum type : PresetVauleEnum.values()) {
if (typeValue.equals(type.getPresetValue())) {
return type;
}
}
return null;
}
public boolean isEqual(String typeValue) {
if (typeValue == null) {
return false;
}
return this.getPresetValue().equals(typeValue);
}
public String getPresetName() {
return presetName;
}
public void setPresetName(String presetName) {
this.presetName = presetName;
}
public String getPresetValue() {
return presetValue;
}
public void setPresetValue(String presetValue) {
this.presetValue = presetValue;
}
public static Set<String> getTypeValueSet() {
return TYPE_VALUE_SET;
}
public static void setTypeValueSet(Set<String> typeValueSet) {
TYPE_VALUE_SET = typeValueSet;
}
}

View File

@ -0,0 +1,449 @@
package ink.wgink.module.file.media.manager.domain.gif;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.io.BufferedOutputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.math.BigDecimal;
public class AnimatedGifEncoder {
protected int width; // image size
protected int height;
protected Color transparent = null; // transparent color if given
protected int transIndex; // transparent index in color table
protected int repeat = -1; // no repeat
protected int delay = 0; // frame delay (hundredths)
protected boolean started = false; // ready to output frames
protected OutputStream out;
protected BufferedImage image; // current frame
protected byte[] pixels; // BGR byte array from frame
protected byte[] indexedPixels; // converted frame indexed to palette
protected int colorDepth; // number of bit planes
protected byte[] colorTab; // RGB palette
protected boolean[] usedEntry = new boolean[256]; // active palette entries
protected int palSize = 7; // color table size (bits-1)
protected int dispose = -1; // disposal code (-1 = use default)
protected boolean closeStream = false; // close stream when finished
protected boolean firstFrame = true;
protected boolean sizeSet = false; // if false, get size from first frame
protected int sample = 10; // default sample interval for quantizer
protected FileOutputStream fileOutputStream;
/**
* Sets the delay time between each frame, or changes it for subsequent
* frames (applies to last frame added).
*
* @param ms
* int delay time in milliseconds
*/
public void setDelay(int ms) {
BigDecimal result = BigDecimal.valueOf(ms).divide(BigDecimal.valueOf(10.0f));
delay = Math.round(result.floatValue());
}
/**
* Sets the GIF frame disposal code for the last added frame and any
* subsequent frames. Default is 0 if no transparent color has been set,
* otherwise 2.
*
* @param code
* int disposal code.
*/
public void setDispose(int code) {
if (code >= 0) {
dispose = code;
}
}
/**
* Sets the number of times the set of GIF frames should be played. Default
* is 1; 0 means play indefinitely. Must be invoked before the first image
* is added.
*
* @param iter
* int number of iterations.
* @return
*/
public void setRepeat(int iter) {
if (iter >= 0) {
repeat = iter;
}
}
/**
* Sets the transparent color for the last added frame and any subsequent
* frames. Since all colors are subject to modification in the quantization
* process, the color in the final palette for each frame closest to the
* given color becomes the transparent color for that frame. May be set to
* null to indicate no transparent color.
*
* @param c
* Color to be treated as transparent on display.
*/
public void setTransparent(Color c) {
transparent = c;
}
/**
* Adds next GIF frame. The frame is not written immediately, but is
* actually deferred until the next frame is received so that timing data
* can be inserted. Invoking <code>finish()</code> flushes all frames. If
* <code>setSize</code> was not invoked, the size of the first image is used
* for all subsequent frames.
*
* @param im
* BufferedImage containing frame to write.
* @return true if successful.
*/
public boolean addFrame(BufferedImage im) {
if ((im == null) || !started) {
return false;
}
boolean ok = true;
try {
if (!sizeSet) {
// use first frame's size
setSize(im.getWidth(), im.getHeight());
}
image = im;
getImagePixels(); // convert to correct format if necessary
analyzePixels(); // build color table & map pixels
if (firstFrame) {
writeLSD(); // logical screen descriptior
writePalette(); // global color table
if (repeat >= 0) {
// use NS app extension to indicate reps
writeNetscapeExt();
}
}
writeGraphicCtrlExt(); // write graphic control extension
writeImageDesc(); // image descriptor
if (!firstFrame) {
writePalette(); // local color table
}
writePixels(); // encode and write pixel data
firstFrame = false;
} catch (IOException e) {
ok = false;
}
return ok;
}
/**
* Flushes any pending data and closes output file. If writing to an
* OutputStream, the stream is not closed.
*/
public boolean finish() {
if (!started)
return false;
boolean ok = true;
started = false;
try {
out.write(0x3b); // gif trailer
out.flush();
if (closeStream) {
out.close();
}
} catch (IOException e) {
ok = false;
}
// reset for subsequent use
transIndex = 0;
out = null;
image = null;
pixels = null;
indexedPixels = null;
colorTab = null;
closeStream = false;
firstFrame = true;
return ok;
}
/**
* Sets frame rate in frames per second. Equivalent to
* <code>setDelay(1000/fps)</code>.
*
* @param fps
* float frame rate (frames per second)
*/
public void setFrameRate(float fps) {
if ((int)fps != 0) {
BigDecimal result = BigDecimal.valueOf(100f).divide(BigDecimal.valueOf(fps));
delay = Math.round(result.floatValue());
}
}
/**
* Sets quality of color quantization (conversion of images to the maximum
* 256 colors allowed by the GIF specification). Lower values (minimum = 1)
* produce better colors, but slow processing significantly. 10 is the
* default, and produces good color mapping at reasonable speeds. Values
* greater than 20 do not yield significant improvements in speed.
*
* @param quality
* int greater than 0.
* @return
*/
public void setQuality(int quality) {
if (quality < 1)
quality = 1;
sample = quality;
}
/**
* Sets the GIF frame size. The default size is the size of the first frame
* added if this method is not invoked.
*
* @param w
* int frame width.
* @param h
* int frame width.
*/
public void setSize(int w, int h) {
if (started && !firstFrame)
return;
width = w;
height = h;
if (width < 1)
width = 320;
if (height < 1)
height = 240;
sizeSet = true;
}
/**
* Initiates GIF file creation on the given stream. The stream is not closed
* automatically.
*
* @param os
* OutputStream on which GIF images are written.
* @return false if initial write failed.
*/
public boolean start(OutputStream os) {
if (os == null)
return false;
boolean ok = true;
closeStream = false;
out = os;
try {
writeString("GIF89a"); // header
} catch (IOException e) {
ok = false;
}
return started = ok;
}
/**
* Initiates writing of a GIF file with the specified name.
*
* @param file
* String containing output file name.
* @return false if open or initial write failed.
*/
public boolean start(String file){
boolean ok = true;
try {
fileOutputStream = new FileOutputStream(file);
out = new BufferedOutputStream(fileOutputStream);
ok = start(out);
closeStream = true;
} catch (IOException e) {
ok = false;
}
return started = ok;
}
/**
* Analyzes image colors and creates color map.
*/
protected void analyzePixels() {
int len = pixels.length;
int nPix = len / 3;
indexedPixels = new byte[nPix];
NeuQuant nq = new NeuQuant(pixels, len, sample);
// initialize quantizer
colorTab = nq.process(); // create reduced palette
// convert map from BGR to RGB
for (int i = 0; i < colorTab.length; i += 3) {
byte temp = colorTab[i];
colorTab[i] = colorTab[i + 2];
colorTab[i + 2] = temp;
usedEntry[i / 3] = false;
}
// map image pixels to new palette
int k = 0;
for (int i = 0; i < nPix; i++) {
int index = nq.map(pixels[k++] & 0xff, pixels[k++] & 0xff, pixels[k++] & 0xff);
usedEntry[index] = true;
indexedPixels[i] = (byte) index;
}
pixels = null;
colorDepth = 8;
palSize = 7;
// get closest match to transparent color if specified
if (transparent != null) {
transIndex = findClosest(transparent);
}
}
/**
* Returns index of palette color closest to c
*
*/
protected int findClosest(Color c) {
if (colorTab == null)
return -1;
int r = c.getRed();
int g = c.getGreen();
int b = c.getBlue();
int minpos = 0;
int dmin = 256 * 256 * 256;
int len = colorTab.length;
for (int i = 0; i < len;) {
int dr = r - (colorTab[i++] & 0xff);
int dg = g - (colorTab[i++] & 0xff);
int db = b - (colorTab[i] & 0xff);
int d = dr * dr + dg * dg + db * db;
int index = i / 3;
if (usedEntry[index] && (d < dmin)) {
dmin = d;
minpos = index;
}
i++;
}
return minpos;
}
/**
* Extracts image pixels into byte array "pixels"
*/
protected void getImagePixels() {
int w = image.getWidth();
int h = image.getHeight();
int type = image.getType();
if ((w != width) || (h != height) || (type != BufferedImage.TYPE_3BYTE_BGR)) {
// create new image with right size/format
BufferedImage temp = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
Graphics2D g = temp.createGraphics();
g.drawImage(image, 0, 0, null);
image = temp;
}
pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
}
/**
* Writes Graphic Control Extension
*/
protected void writeGraphicCtrlExt() throws IOException {
out.write(0x21); // extension introducer
out.write(0xf9); // GCE label
out.write(4); // data block size
int transp, disp;
if (transparent == null) {
transp = 0;
disp = 0; // dispose = no action
} else {
transp = 1;
disp = 2; // force clear if using transparent color
}
if (dispose >= 0) {
disp = dispose & 7; // user override
}
disp <<= 2;
// packed fields
out.write(0 | disp | transp); // 8 transparency flag
writeShort(delay); // delay x 1/100 sec
out.write(transIndex); // transparent color index
out.write(0); // block terminator
}
/**
* Writes Image Descriptor
*/
protected void writeImageDesc() throws IOException {
out.write(0x2c); // image separator
writeShort(0); // image position x,y = 0,0
writeShort(0);
writeShort(width); // image size
writeShort(height);
// packed fields
if (firstFrame) {
// no LCT - GCT is used for first (or only) frame
out.write(0);
} else {
// specify normal LCT
out.write(0x80 | // 1 local color table 1=yes
palSize); // 6-8 size of color table
}
}
/**
* Writes Logical Screen Descriptor
*/
protected void writeLSD() throws IOException {
// logical screen size
writeShort(width);
writeShort(height);
// packed fields
out.write((0x80 | // 1 : global color table flag = 1 (gct used)
0x70 | // 2-4 : color resolution = 7
0x00 | // 5 : gct sort flag = 0
palSize)); // 6-8 : gct size
out.write(0); // background color index
out.write(0); // pixel aspect ratio - assume 1:1
}
/**
* Writes Netscape application extension to define repeat count.
*/
protected void writeNetscapeExt() throws IOException {
out.write(0x21); // extension introducer
out.write(0xff); // app extension label
out.write(11); // block size
writeString("NETSCAPE" + "2.0"); // app id + auth code
out.write(3); // sub-block size
out.write(1); // loop sub-block id
writeShort(repeat); // loop count (extra iterations, 0=repeat forever)
out.write(0); // block terminator
}
/**
* Writes color table
*/
protected void writePalette() throws IOException {
out.write(colorTab, 0, colorTab.length);
int n = (3 * 256) - colorTab.length;
for (int i = 0; i < n; i++) {
out.write(0);
}
}
/**
* Encodes and writes pixel data
*/
protected void writePixels() throws IOException {
LZWEncoder encoder = new LZWEncoder(width, height, indexedPixels, colorDepth);
encoder.encode(out);
}
/**
* Write 16-bit value to output stream, LSB first
*/
protected void writeShort(int value) throws IOException {
out.write(value & 0xff);
out.write((value >> 8) & 0xff);
}
/**
* Writes string to output stream
*/
protected void writeString(String s) throws IOException {
for (int i = 0; i < s.length(); i++) {
out.write((byte) s.charAt(i));
}
}
}

View File

@ -0,0 +1,240 @@
package ink.wgink.module.file.media.manager.domain.gif;
import java.io.IOException;
import java.io.OutputStream;
/**
* 多张静态图片合成动态gif图工具类
*/
public class LZWEncoder {
private static final int EOF = -1;
private int imgW, imgH;
private byte[] pixAry;
private int initCodeSize;
private int remaining;
private int curPixel;
// GIFCOMPR.C - GIF Image compression routines
//
// Lempel-Ziv compression based on 'compress'. GIF modifications by
// David Rowley (mgardi@watdcsu.waterloo.edu)
// General DEFINEs
static final int BITS = 12;
static final int HSIZE = 5003; // 80% occupancy
// GIF Image compression - modified 'compress'
//
// Based on: compress.c - File compression ala IEEE Computer, June 1984.
//
// By Authors: Spencer W. Thomas (decvax!harpo!utah-cs!utah-gr!thomas)
// Jim McKie (decvax!mcvax!jim)
// Steve Davies (decvax!vax135!petsd!peora!srd)
// Ken Turkowski (decvax!decwrl!turtlevax!ken)
// James A. Woods (decvax!ihnp4!ames!jaw)
// Joe Orost (decvax!vax135!petsd!joe)
int n_bits; // number of bits/code
int maxbits = BITS; // user settable max # bits/code
int maxcode; // maximum code, given n_bits
int maxmaxcode = 1 << BITS; // should NEVER generate this code
int[] htab = new int[HSIZE];
int[] codetab = new int[HSIZE];
int hsize = HSIZE; // for dynamic table sizing
int free_ent = 0; // first unused entry
// block compression parameters -- after all codes are used up,
// and compression rate changes, start over.
boolean clear_flg = false;
// Algorithm: use open addressing double hashing (no chaining) on the
// prefix code / next character combination. We do a variant of Knuth's
// algorithm D (vol. 3, sec. 6.4) along with G. Knott's relatively-prime
// secondary probe. Here, the modular division first probe is gives way
// to a faster exclusive-or manipulation. Also do block compression with
// an adaptive reset, whereby the code table is cleared when the compression
// ratio decreases, but after the table fills. The variable-length output
// codes are re-sized at this point, and a special CLEAR code is generated
// for the decompressor. Late addition: construct the table according to
// file size for noticeable speed improvement on small files. Please direct
// questions about this implementation to ames!jaw.
int g_init_bits;
int ClearCode;
int EOFCode;
// output
//
// Output the given code.
// Inputs:
// code: A n_bits-bit integer. If == -1, then EOF. This assumes
// that n_bits =< wordsize - 1.
// Outputs:
// Outputs code to the file.
// Assumptions:
// Chars are 8 bits long.
// Algorithm:
// Maintain a BITS character long buffer (so that 8 codes will
// fit in it exactly). Use the VAX insv instruction to insert each
// code in turn. When the buffer fills up empty it and start over.
int cur_accum = 0;
int cur_bits = 0;
int masks[] = { 0x0000, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F, 0x007F, 0x00FF, 0x01FF, 0x03FF, 0x07FF,
0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF };
// Number of characters so far in this 'packet'
int a_count;
// Define the storage for the packet accumulator
byte[] accum = new byte[256];
// ----------------------------------------------------------------------------
LZWEncoder(int width, int height, byte[] pixels, int color_depth) {
imgW = width;
imgH = height;
pixAry = pixels;
initCodeSize = Math.max(2, color_depth);
}
// Add a character to the end of the current packet, and if it is 254
// characters, flush the packet to disk.
void char_out(byte c, OutputStream outs) throws IOException {
accum[a_count++] = c;
if (a_count >= 254)
flush_char(outs);
}
// Clear out the hash table
// table clear for block compress
void cl_block(OutputStream outs) throws IOException {
cl_hash(hsize);
free_ent = ClearCode + 2;
clear_flg = true;
output(ClearCode, outs);
}
// reset code table
void cl_hash(int hsize) {
for (int i = 0; i < hsize; ++i)
htab[i] = -1;
}
void compress(int init_bits, OutputStream outs) throws IOException {
int fcode;
int i /* = 0 */;
int c;
int ent;
int disp;
int hsize_reg;
int hshift;
// Set up the globals: g_init_bits - initial number of bits
g_init_bits = init_bits;
// Set up the necessary values
clear_flg = false;
n_bits = g_init_bits;
maxcode = MAXCODE(n_bits);
ClearCode = 1 << (init_bits - 1);
EOFCode = ClearCode + 1;
free_ent = ClearCode + 2;
a_count = 0; // clear packet
ent = nextPixel();
hshift = 0;
for (fcode = hsize; fcode < 65536; fcode *= 2)
++hshift;
hshift = 8 - hshift; // set hash code range bound
hsize_reg = hsize;
cl_hash(hsize_reg); // clear hash table
output(ClearCode, outs);
outer_loop: while ((c = nextPixel()) != EOF) {
fcode = (c << maxbits) + ent;
i = (c << hshift) ^ ent; // xor hashing
if (htab[i] == fcode) {
ent = codetab[i];
continue;
} else if (htab[i] >= 0) // non-empty slot
{
disp = hsize_reg - i; // secondary hash (after G. Knott)
if (i == 0)
disp = 1;
do {
if ((i -= disp) < 0)
i += hsize_reg;
if (htab[i] == fcode) {
ent = codetab[i];
continue outer_loop;
}
} while (htab[i] >= 0);
}
output(ent, outs);
ent = c;
if (free_ent < maxmaxcode) {
codetab[i] = free_ent++; // code -> hashtable
htab[i] = fcode;
} else
cl_block(outs);
}
// Put out the final code.
output(ent, outs);
output(EOFCode, outs);
}
// ----------------------------------------------------------------------------
void encode(OutputStream os) throws IOException {
os.write(initCodeSize); // write "initial code size" byte
remaining = imgW * imgH; // reset navigation variables
curPixel = 0;
compress(initCodeSize + 1, os); // compress and write the pixel data
os.write(0); // write block terminator
}
// Flush the packet to disk, and reset the accumulator
void flush_char(OutputStream outs) throws IOException {
if (a_count > 0) {
outs.write(a_count);
outs.write(accum, 0, a_count);
a_count = 0;
}
}
final int MAXCODE(int n_bits) {
return (1 << n_bits) - 1;
}
// ----------------------------------------------------------------------------
// Return the next pixel from the image
// ----------------------------------------------------------------------------
private int nextPixel() {
if (remaining == 0)
return EOF;
--remaining;
byte pix = pixAry[curPixel++];
return pix & 0xff;
}
void output(int code, OutputStream outs) throws IOException {
cur_accum &= masks[cur_bits];
if (cur_bits > 0)
cur_accum |= (code << cur_bits);
else
cur_accum = code;
cur_bits += n_bits;
while (cur_bits >= 8) {
char_out((byte) (cur_accum & 0xff), outs);
cur_accum >>= 8;
cur_bits -= 8;
}
// If the next entry is going to be too big for the code size,
// then increase it, if possible.
if (free_ent > maxcode || clear_flg) {
if (clear_flg) {
maxcode = MAXCODE(n_bits = g_init_bits);
clear_flg = false;
} else {
++n_bits;
if (n_bits == maxbits)
maxcode = maxmaxcode;
else
maxcode = MAXCODE(n_bits);
}
}
if (code == EOFCode) {
// At EOF, write the rest of the buffer.
while (cur_bits > 0) {
char_out((byte) (cur_accum & 0xff), outs);
cur_accum >>= 8;
cur_bits -= 8;
}
flush_char(outs);
}
}
}

View File

@ -0,0 +1,426 @@
package ink.wgink.module.file.media.manager.domain.gif;
/**
* 多张静态图片合成动态gif图工具类
*/
public class NeuQuant {
protected static final int netsize = 256; /* number of colours used */
/* four primes near 500 - assume no image has a length so large */
/* that it is divisible by all four primes */
protected static final int prime1 = 499;
protected static final int prime2 = 491;
protected static final int prime3 = 487;
protected static final int prime4 = 503;
protected static final int minpicturebytes = (3 * prime4);
/* minimum size for input image */
/*
* Program Skeleton ---------------- [select samplefac in range 1..30] [read
* image from input file] pic = (unsigned char*) malloc(3*width*height);
* initnet(pic,3*width*height,samplefac); learn(); unbiasnet(); [write
* output image header, using writecolourmap(f)] inxbuild(); write output
* image using inxsearch(b,g,r)
*/
/*
* Network Definitions -------------------
*/
protected static final int maxnetpos = (netsize - 1);
protected static final int netbiasshift = 4; /* bias for colour values */
protected static final int ncycles = 100; /* no. of learning cycles */
/* defs for freq and bias */
protected static final int intbiasshift = 16; /* bias for fractions */
protected static final int intbias = (((int) 1) << intbiasshift);
protected static final int gammashift = 10; /* gamma = 1024 */
protected static final int gamma = (((int) 1) << gammashift);
protected static final int betashift = 10;
protected static final int beta = (intbias >> betashift); /*
* beta = 1/1024
*/
protected static final int betagamma = (intbias << (gammashift - betashift));
/* defs for decreasing radius factor */
protected static final int initrad = (netsize >> 3); /*
* for 256 cols, radius
* starts
*/
protected static final int radiusbiasshift = 6; /*
* at 32.0 biased by 6 bits
*/
protected static final int radiusbias = (((int) 1) << radiusbiasshift);
protected static final int initradius = (initrad
* radiusbias); /* and decreases by a */
protected static final int radiusdec = 30; /* factor of 1/30 each cycle */
/* defs for decreasing alpha factor */
protected static final int alphabiasshift = 10; /* alpha starts at 1.0 */
protected static final int initalpha = (((int) 1) << alphabiasshift);
protected int alphadec; /* biased by 10 bits */
/* radbias and alpharadbias used for radpower calculation */
protected static final int radbiasshift = 8;
protected static final int radbias = (((int) 1) << radbiasshift);
protected static final int alpharadbshift = (alphabiasshift + radbiasshift);
protected static final int alpharadbias = (((int) 1) << alpharadbshift);
/*
* Types and Global Variables --------------------------
*/
protected byte[] thepicture; /* the input image itself */
protected int lengthcount; /* lengthcount = H*W*3 */
protected int samplefac; /* sampling factor 1..30 */
// typedef int pixel[4]; /* BGRc */
protected int[][] network; /* the network itself - [netsize][4] */
protected int[] netindex = new int[256];
/* for network lookup - really 256 */
protected int[] bias = new int[netsize];
/* bias and freq arrays for learning */
protected int[] freq = new int[netsize];
protected int[] radpower = new int[initrad];
/* radpower for precomputation */
/*
* Initialise network in range (0,0,0) to (255,255,255) and set parameters
* -----------------------------------------------------------------------
*/
public NeuQuant(byte[] thepic, int len, int sample) {
int i;
int[] p;
thepicture = thepic;
lengthcount = len;
samplefac = sample;
network = new int[netsize][];
for (i = 0; i < netsize; i++) {
network[i] = new int[4];
p = network[i];
p[0] = p[1] = p[2] = (i << (netbiasshift + 8)) / netsize;
freq[i] = intbias / netsize; /* 1/netsize */
bias[i] = 0;
}
}
public byte[] colorMap() {
byte[] map = new byte[3 * netsize];
int[] index = new int[netsize];
for (int i = 0; i < netsize; i++)
index[network[i][3]] = i;
int k = 0;
for (int i = 0; i < netsize; i++) {
int j = index[i];
map[k++] = (byte) (network[j][0]);
map[k++] = (byte) (network[j][1]);
map[k++] = (byte) (network[j][2]);
}
return map;
}
/*
* Insertion sort of network and building of netindex[0..255] (to do after
* unbias)
* -------------------------------------------------------------------------
* ------
*/
public void inxbuild() {
int i, j, smallpos, smallval;
int[] p;
int[] q;
int previouscol, startpos;
previouscol = 0;
startpos = 0;
for (i = 0; i < netsize; i++) {
p = network[i];
smallpos = i;
smallval = p[1]; /* index on g */
/* find smallest in i..netsize-1 */
for (j = i + 1; j < netsize; j++) {
q = network[j];
if (q[1] < smallval) { /* index on g */
smallpos = j;
smallval = q[1]; /* index on g */
}
}
q = network[smallpos];
/* swap p (i) and q (smallpos) entries */
if (i != smallpos) {
j = q[0];
q[0] = p[0];
p[0] = j;
j = q[1];
q[1] = p[1];
p[1] = j;
j = q[2];
q[2] = p[2];
p[2] = j;
j = q[3];
q[3] = p[3];
p[3] = j;
}
/* smallval entry is now in position i */
if (smallval != previouscol) {
netindex[previouscol] = (startpos + i) >> 1;
for (j = previouscol + 1; j < smallval; j++)
netindex[j] = i;
previouscol = smallval;
startpos = i;
}
}
netindex[previouscol] = (startpos + maxnetpos) >> 1;
for (j = previouscol + 1; j < 256; j++)
netindex[j] = maxnetpos; /* really 256 */
}
/*
* Main Learning Loop ------------------
*/
public void learn() {
int i, j, b, g, r;
int radius, rad, alpha, step, delta, samplepixels;
byte[] p;
int pix, lim;
if (lengthcount < minpicturebytes)
samplefac = 1;
alphadec = 30 + ((samplefac - 1) / 3);
p = thepicture;
pix = 0;
lim = lengthcount;
samplepixels = lengthcount / (3 * samplefac);
delta = samplepixels / ncycles;
alpha = initalpha;
radius = initradius;
rad = radius >> radiusbiasshift;
if (rad <= 1)
rad = 0;
for (i = 0; i < rad; i++)
radpower[i] = alpha * (((rad * rad - i * i) * radbias) / (rad * rad));
// fprintf(stderr,"beginning 1D learning: initial radius=%d/n", rad);
if (lengthcount < minpicturebytes)
step = 3;
else if ((lengthcount % prime1) != 0)
step = 3 * prime1;
else {
if ((lengthcount % prime2) != 0)
step = 3 * prime2;
else {
if ((lengthcount % prime3) != 0)
step = 3 * prime3;
else
step = 3 * prime4;
}
}
i = 0;
while (i < samplepixels) {
b = (p[pix + 0] & 0xff) << netbiasshift;
g = (p[pix + 1] & 0xff) << netbiasshift;
r = (p[pix + 2] & 0xff) << netbiasshift;
j = contest(b, g, r);
altersingle(alpha, j, b, g, r);
if (rad != 0)
alterneigh(rad, j, b, g, r); /* alter neighbours */
pix += step;
if (pix >= lim)
pix -= lengthcount;
i++;
if (delta == 0)
delta = 1;
if (i % delta == 0) {
alpha -= alpha / alphadec;
radius -= radius / radiusdec;
rad = radius >> radiusbiasshift;
if (rad <= 1)
rad = 0;
for (j = 0; j < rad; j++)
radpower[j] = alpha * (((rad * rad - j * j) * radbias) / (rad * rad));
}
}
// fprintf(stderr,"finished 1D learning: final alpha=%f
// !/n",((float)alpha)/initalpha);
}
/*
* Search for BGR values 0..255 (after net is unbiased) and return colour
* index
* -------------------------------------------------------------------------
* ---
*/
public int map(int b, int g, int r) {
int i, j, dist, a, bestd;
int[] p;
int best;
bestd = 1000; /* biggest possible dist is 256*3 */
best = -1;
i = netindex[g]; /* index on g */
j = i - 1; /* start at netindex[g] and work outwards */
while ((i < netsize) || (j >= 0)) {
if (i < netsize) {
p = network[i];
dist = p[1] - g; /* inx key */
if (dist >= bestd)
i = netsize; /* stop iter */
else {
i++;
if (dist < 0)
dist = -dist;
a = p[0] - b;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
a = p[2] - r;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
bestd = dist;
best = p[3];
}
}
}
}
if (j >= 0) {
p = network[j];
dist = g - p[1]; /* inx key - reverse dif */
if (dist >= bestd)
j = -1; /* stop iter */
else {
j--;
if (dist < 0)
dist = -dist;
a = p[0] - b;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
a = p[2] - r;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
bestd = dist;
best = p[3];
}
}
}
}
}
return (best);
}
public byte[] process() {
learn();
unbiasnet();
inxbuild();
return colorMap();
}
/*
* Unbias network to give byte values 0..255 and record position i to
* prepare for sort
* -------------------------------------------------------------------------
* ----------
*/
public void unbiasnet() {
int i, j;
for (i = 0; i < netsize; i++) {
network[i][0] >>= netbiasshift;
network[i][1] >>= netbiasshift;
network[i][2] >>= netbiasshift;
network[i][3] = i; /* record colour no */
}
}
/*
* Move adjacent neurons by precomputed alpha*(1-((i-j)^2/[r]^2)) in
* radpower[|i-j|]
* -------------------------------------------------------------------------
* --------
*/
protected void alterneigh(int rad, int i, int b, int g, int r) {
int j, k, lo, hi, a, m;
int[] p;
lo = i - rad;
if (lo < -1)
lo = -1;
hi = i + rad;
if (hi > netsize)
hi = netsize;
j = i + 1;
k = i - 1;
m = 1;
while ((j < hi) || (k > lo)) {
a = radpower[m++];
if (j < hi) {
p = network[j++];
try {
p[0] -= (a * (p[0] - b)) / alpharadbias;
p[1] -= (a * (p[1] - g)) / alpharadbias;
p[2] -= (a * (p[2] - r)) / alpharadbias;
} catch (Exception e) {
} // prevents 1.3 miscompilation
}
if (k > lo) {
p = network[k--];
try {
p[0] -= (a * (p[0] - b)) / alpharadbias;
p[1] -= (a * (p[1] - g)) / alpharadbias;
p[2] -= (a * (p[2] - r)) / alpharadbias;
} catch (Exception e) {
}
}
}
}
/*
* Move neuron i towards biased (b,g,r) by factor alpha
* ----------------------------------------------------
*/
protected void altersingle(int alpha, int i, int b, int g, int r) {
/* alter hit neuron */
int[] n = network[i];
n[0] -= (alpha * (n[0] - b)) / initalpha;
n[1] -= (alpha * (n[1] - g)) / initalpha;
n[2] -= (alpha * (n[2] - r)) / initalpha;
}
/*
* Search for biased BGR values ----------------------------
*/
protected int contest(int b, int g, int r) {
/* finds closest neuron (min dist) and updates freq */
/* finds best neuron (min dist-bias) and returns position */
/*
* for frequently chosen neurons, freq[i] is high and bias[i] is
* negative
*/
/* bias[i] = gamma*((1/netsize)-freq[i]) */
int i, dist, a, biasdist, betafreq;
int bestpos, bestbiaspos, bestd, bestbiasd;
int[] n;
bestd = ~(((int) 1) << 31);
bestbiasd = bestd;
bestpos = -1;
bestbiaspos = bestpos;
for (i = 0; i < netsize; i++) {
n = network[i];
dist = n[0] - b;
if (dist < 0)
dist = -dist;
a = n[1] - g;
if (a < 0)
a = -a;
dist += a;
a = n[2] - r;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
bestd = dist;
bestpos = i;
}
biasdist = dist - ((bias[i]) >> (intbiasshift - netbiasshift));
if (biasdist < bestbiasd) {
bestbiasd = biasdist;
bestbiaspos = i;
}
betafreq = (freq[i] >> betashift);
freq[i] -= betafreq;
bias[i] += (betafreq << gammashift);
}
freq[bestpos] += beta;
bias[bestpos] -= betagamma;
return (bestbiaspos);
}
}

View File

@ -0,0 +1,87 @@
package ink.wgink.module.file.media.pojo.vos;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: MediaVO
* @Description: 媒体
* @Author: WangGeng
* @Date: 2021/6/9 23:10
* @Version: 1.0
**/
public class MediaVO {
private String fileName;
private String filePath;
private String fileFullPath;
private String fileUrl;
private String fileType;
private Long fileSize;
private String fileSummary;
private String fileMd5;
public String getFileName() {
return fileName == null ? "" : fileName.trim();
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getFilePath() {
return filePath == null ? "" : filePath.trim();
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public String getFileFullPath() {
return fileFullPath == null ? "" : fileFullPath.trim();
}
public void setFileFullPath(String fileFullPath) {
this.fileFullPath = fileFullPath;
}
public String getFileUrl() {
return fileUrl == null ? "" : fileUrl.trim();
}
public void setFileUrl(String fileUrl) {
this.fileUrl = fileUrl;
}
public String getFileType() {
return fileType == null ? "" : fileType.trim();
}
public void setFileType(String fileType) {
this.fileType = fileType;
}
public Long getFileSize() {
return fileSize;
}
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
public String getFileSummary() {
return fileSummary == null ? "" : fileSummary.trim();
}
public void setFileSummary(String fileSummary) {
this.fileSummary = fileSummary;
}
public String getFileMd5() {
return fileMd5 == null ? "" : fileMd5.trim();
}
public void setFileMd5(String fileMd5) {
this.fileMd5 = fileMd5;
}
}

View File

@ -0,0 +1,107 @@
package ink.wgink.module.file.media.pojo.vos.video;
import ink.wgink.module.file.media.pojo.vos.MediaVO;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: VideoVO
* @Description: 视频
* @Author: WangGeng
* @Date: 2021/6/9 23:10
* @Version: 1.0
**/
public class VideoVO extends MediaVO {
private String keyframe;
private Long duration;
private Integer width;
private Integer height;
private Integer bitRate;
private String encoder;
private Float frameRate;
private Long audioDuration;
private Integer audioBitRate;
private Long audioSampleRate;
public String getKeyframe() {
return keyframe == null ? "" : keyframe.trim();
}
public void setKeyframe(String keyframe) {
this.keyframe = keyframe;
}
public Long getDuration() {
return duration;
}
public void setDuration(Long duration) {
this.duration = duration;
}
public Integer getWidth() {
return width;
}
public void setWidth(Integer width) {
this.width = width;
}
public Integer getHeight() {
return height;
}
public void setHeight(Integer height) {
this.height = height;
}
public Integer getBitRate() {
return bitRate;
}
public void setBitRate(Integer bitRate) {
this.bitRate = bitRate;
}
public String getEncoder() {
return encoder == null ? "" : encoder.trim();
}
public void setEncoder(String encoder) {
this.encoder = encoder;
}
public Float getFrameRate() {
return frameRate;
}
public void setFrameRate(Float frameRate) {
this.frameRate = frameRate;
}
public Long getAudioDuration() {
return audioDuration;
}
public void setAudioDuration(Long audioDuration) {
this.audioDuration = audioDuration;
}
public Integer getAudioBitRate() {
return audioBitRate;
}
public void setAudioBitRate(Integer audioBitRate) {
this.audioBitRate = audioBitRate;
}
public Long getAudioSampleRate() {
return audioSampleRate;
}
public void setAudioSampleRate(Long audioSampleRate) {
this.audioSampleRate = audioSampleRate;
}
}

View File

@ -0,0 +1,48 @@
package ink.wgink.module.file.media.service;
import org.springframework.web.multipart.MultipartFile;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: IMediaService
* @Description: 媒体
* @Author: WangGeng
* @Date: 2021/6/9 21:32
* @Version: 1.0
**/
public interface IMediaService {
/**
* 读取流大小 1M
*/
int INPUT_STREAM_SIZE = 1048576;
/**
* 单文件上传
*
* @param multipartFile 上传文件
* @param uploadPath 上传的文件路径
* @param uploadName 上传的文件名称
* @return 文件MD5
*/
String upload(MultipartFile multipartFile, String uploadPath, String uploadName);
/**
* 获得ContentType
*
* @param fileType
* @return ContentType
*/
String getContentType(String fileType);
/**
* 得到文件名称
*
* @param fileName
* @return
*/
String getFileType(String fileName);
}

View File

@ -0,0 +1,73 @@
package ink.wgink.module.file.media.service.impl;
import ink.wgink.common.base.DefaultBaseService;
import ink.wgink.exceptions.FileException;
import ink.wgink.module.file.media.service.IMediaService;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.security.MessageDigest;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: MediaServiceImpl
* @Description: 媒体
* @Author: WangGeng
* @Date: 2021/6/9 21:54
* @Version: 1.0
**/
@Service
public class MediaServiceImpl extends DefaultBaseService implements IMediaService {
private static final char[] HEX_CODE = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
@Override
public String upload(MultipartFile multipartFile, String uploadPath, String uploadName) {
String fileMd5;
File uploadFolderFile = new File(uploadPath);
if (!uploadFolderFile.exists()) {
uploadFolderFile.mkdirs();
}
try (InputStream uploadFileInputStream = multipartFile.getInputStream();
FileOutputStream uploadFileOutputStream = new FileOutputStream(uploadPath + File.separator + uploadName);) {
MessageDigest messageDigest = MessageDigest.getInstance("MD5");
for (byte[] buf = new byte[INPUT_STREAM_SIZE]; uploadFileInputStream.read(buf) > -1; ) {
uploadFileOutputStream.write(buf, 0, buf.length);
messageDigest.update(buf, 0, buf.length);
}
uploadFileOutputStream.flush();
// 计算MD5
byte[] data = messageDigest.digest();
StringBuilder fileMd5SB = new StringBuilder(data.length * 2);
for (byte b : data) {
fileMd5SB.append(HEX_CODE[(b >> 4) & 0xF]);
fileMd5SB.append(HEX_CODE[(b & 0xF)]);
}
fileMd5 = fileMd5SB.toString();
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new FileException("视频上传失败");
}
return fileMd5;
}
@Override
public String getContentType(String fileType) {
return null;
}
@Override
public String getFileType(String fileName) {
String[] names = fileName.split("\\.");
if (names != null) {
return names[names.length - 1].toLowerCase();
}
return "";
}
}

View File

@ -0,0 +1,53 @@
package ink.wgink.module.file.media.service.video;
import ink.wgink.module.file.media.pojo.vos.video.VideoVO;
import org.springframework.web.multipart.MultipartFile;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: IVideoService
* @Description: 视频
* @Author: WangGeng
* @Date: 2021/6/8 22:12
* @Version: 1.0
**/
public interface IVideoService {
String VIDEO_PATH = "videos";
/**
* 保存
*
* @param videoVO
* @return
*/
String save(VideoVO videoVO);
/**
* 保存
*
* @param token
* @param videoVO
* @return
*/
String save(String token, VideoVO videoVO);
/**
* 上传视频
*
* @param video
* @return
*/
String upload(MultipartFile video);
/**
* 上传视频
*
* @param token
* @param video
* @return
*/
String upload(String token, MultipartFile video);
}

View File

@ -0,0 +1,93 @@
package ink.wgink.module.file.media.service.video.impl;
import ink.wgink.common.base.DefaultBaseService;
import ink.wgink.exceptions.FileException;
import ink.wgink.module.file.media.manager.MediaManager;
import ink.wgink.module.file.media.manager.domain.MusicMetaInfo;
import ink.wgink.module.file.media.manager.domain.VideoMetaInfo;
import ink.wgink.module.file.media.pojo.vos.video.VideoVO;
import ink.wgink.module.file.media.service.IMediaService;
import ink.wgink.module.file.media.service.video.IVideoService;
import ink.wgink.properties.media.MediaProperties;
import ink.wgink.util.UUIDUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: VideoServiceImpl
* @Description: 视频
* @Author: WangGeng
* @Date: 2021/6/8 22:12
* @Version: 1.0
**/
@Service
public class VideoServiceImpl extends DefaultBaseService implements IVideoService {
@Autowired
private IMediaService mediaService;
@Autowired
private MediaProperties mediaProperties;
@Override
public String save(VideoVO videoVO) {
return null;
}
@Override
public String save(String token, VideoVO videoVO) {
String fileId = UUIDUtil.getUUID();
return null;
}
@Override
public String upload(MultipartFile video) {
return upload(null, video);
}
@Override
public String upload(String token, MultipartFile video) {
String filePath = mediaProperties.getUploadPath() + File.separator + VIDEO_PATH;
String fileName = video.getOriginalFilename();
long fileSize = video.getSize();
String fileType = mediaService.getFileType(fileName);
String fileMd5 = mediaService.upload(video, filePath, fileName);
String fileFullPath = filePath + File.separator + fileName;
// 构建视频内容
VideoVO videoVO = new VideoVO();
videoVO.setFileName(fileName);
videoVO.setFileFullPath(fileFullPath);
videoVO.setFilePath(VIDEO_PATH + File.separator + fileName);
videoVO.setFileSize(fileSize);
videoVO.setFileType(fileType);
videoVO.setFileMd5(fileMd5);
File uploadFile = new File(fileFullPath);
VideoMetaInfo videoMetaInfo = MediaManager.getInstance().getVideoMetaInfo(uploadFile);
if (videoMetaInfo == null) {
throw new FileException("上传失败");
}
videoVO.setDuration(videoMetaInfo.getDuration());
videoVO.setWidth(videoMetaInfo.getWidth());
videoVO.setHeight(videoMetaInfo.getHeight());
videoVO.setBitRate(videoMetaInfo.getBitRate());
videoVO.setEncoder(videoMetaInfo.getEncoder());
videoVO.setFrameRate(videoMetaInfo.getFrameRate());
MusicMetaInfo musicMetaInfo = videoMetaInfo.getMusicMetaInfo();
if (musicMetaInfo != null) {
videoVO.setAudioDuration(musicMetaInfo.getDuration());
videoVO.setAudioBitRate(musicMetaInfo.getBitRate());
videoVO.setAudioSampleRate(musicMetaInfo.getSampleRate());
}
return save(token, videoVO);
}
}

View File

@ -0,0 +1,30 @@
package ink.wgink.module.file.media.startup;
import ink.wgink.module.file.media.manager.MediaManager;
import ink.wgink.properties.media.MediaProperties;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.stereotype.Component;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: MediaStartUp
* @Description: 启动
* @Author: WangGeng
* @Date: 2021/6/8 21:56
* @Version: 1.0
**/
@Component
public class ModuleFileMediaStartUp implements ApplicationRunner {
@Autowired
private MediaProperties mediaProperties;
@Override
public void run(ApplicationArguments args) throws Exception {
MediaManager.getInstance().setFFmpegPath(mediaProperties.getFfmpegPath());
}
}

View File

@ -0,0 +1,48 @@
import ink.wgink.module.file.media.manager.MediaManager;
import ink.wgink.module.file.media.manager.domain.VideoMetaInfo;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
/**
* When you feel like quitting. Think about why you started
* 当你想要放弃的时候想想当初你为何开始
*
* @ClassName: MediaTest
* @Description:
* @Author: WangGeng
* @Date: 2021/6/7 22:37
* @Version: 1.0
**/
public class MediaTest {
@Test
public void t1() {
File videoFile = new File("I:\\电视剧\\神探狄仁杰\\第一部\\神探狄仁杰-01.mp4");
// File frameFile = new File("C:\\Users\\wenc0\\Desktop\\UploadFiles\\frame.gif");
// MediaComponent.cutVideoFrame(videoFile, frameFile);
MediaManager.getInstance().setFFmpegPath("D:\\ffmpeg-4.4-full_build\\ffmpeg-4.4-full_build\\bin\\ffmpeg.exe");
VideoMetaInfo videoMetaInfo = MediaManager.getInstance().getVideoMetaInfo(videoFile);
System.out.println(videoMetaInfo);
}
@Test
public void t2() throws Exception {
Process process = Runtime.getRuntime().exec("node -v");
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream(), "GBK"));
new Thread(() -> {
try {
for (String line; (line = bufferedReader.readLine()) != null; ) {
System.out.println(line);
}
bufferedReader.close();
} catch (Exception e) {}
}).start();
process.waitFor();
process.destroy();
}
}