+ * version: 1.0 + */ +public class MediaManager { + private static final Logger LOG = LoggerFactory.getLogger(MediaManager.class); + /** + * 可以处理的视频格式 + */ + public final static String[] VIDEO_TYPE = {"MP4", "WMV"}; + /** + * 可以处理的图片格式 + */ + public final static String[] IMAGE_TYPE = {"JPG", "JPEG", "PNG", "GIF"}; + /** + * 可以处理的音频格式 + */ + public final static String[] AUDIO_TYPE = {"AAC"}; + + /** + * 视频帧抽取时的默认时间点,第10s(秒) + * (Time类构造参数的单位:ms) + */ + private static final Time DEFAULT_TIME = new Time(0, 0, 10); + /** + * 视频帧抽取的默认宽度值,单位:px + */ + private static int DEFAULT_WIDTH = 320; + /** + * 视频帧抽取的默认时长,单位:s(秒) + */ + private static int DEFAULT_TIME_LENGTH = 10; + /** + * 抽取多张视频帧以合成gif动图时,gif的播放速度 + */ + private static int DEFAULT_GIF_PLAYTIME = 110; + /** + * FFmpeg程序执行路径 + * 当前系统安装好ffmpeg程序并配置好相应的环境变量后,值为ffmpeg可执行程序文件在实际系统中的绝对路径 + */ + private static String FFMPEG_PATH = null; + + /** + * 视频时长正则匹配式 + * 用于解析视频及音频的时长等信息时使用; + *
+ * (.*?)表示:匹配任何除\r\n之外的任何0或多个字符,非贪婪模式
+ */
+ private static String durationRegex = "Duration: (\\d*?):(\\d*?):(\\d*?)\\.(\\d*?), start: (.*?), bitrate: (\\d*) kb\\/s.*";
+ private static Pattern durationPattern;
+ /**
+ * 视频流信息正则匹配式
+ * 用于解析视频详细信息时使用;
+ */
+ private static String videoStreamRegex = "Stream #\\d:\\d[\\(]??\\S*[\\)]??: Video: (\\S*\\S$?)[^\\,]*, (.*?), (\\d*)x(\\d*)[^\\,]*, (\\d*) kb\\/s, (\\d*[\\.]??\\d*) fps";
+ private static Pattern videoStreamPattern;
+ /**
+ * 音频流信息正则匹配式
+ * 用于解析音频详细信息时使用;
+ */
+ private static String musicStreamRegex = "Stream #\\d:\\d[\\(]??\\S*[\\)]??: Audio: (\\S*\\S$?)(.*), (.*?) Hz, (.*?), (.*?), (\\d*) kb\\/s";
+ ;
+ private static Pattern musicStreamPattern;
+
+ private static MediaManager MEDIA_MANAGER = MediaManagerBuilder.mediaManager;
+
+ /**
+ * 静态初始化时先加载好用于音视频解析的正则匹配式
+ */
+ static {
+ durationPattern = Pattern.compile(durationRegex);
+ videoStreamPattern = Pattern.compile(videoStreamRegex);
+ musicStreamPattern = Pattern.compile(musicStreamRegex);
+ }
+
+ private MediaManager() {}
+
+ public static MediaManager getInstance() {
+ return MEDIA_MANAGER;
+ }
+
+ /**
+ * 获取当前多媒体处理工具内的ffmpeg的执行路径
+ *
+ * @return
+ */
+ public String getFFmpegPath() {
+ return FFMPEG_PATH;
+ }
+
+ /**
+ * 设置当前多媒体工具内的ffmpeg的执行路径
+ *
+ * @param ffmpeg_path ffmpeg可执行程序在实际系统中的绝对路径
+ * @return
+ */
+ public boolean setFFmpegPath(String ffmpeg_path) {
+ if (StringUtils.isBlank(ffmpeg_path)) {
+ LOG.error("--- 设置ffmpeg执行路径失败,因为传入的ffmpeg可执行程序路径为空! ---");
+ return false;
+ }
+ File ffmpegFile = new File(ffmpeg_path);
+ if (!ffmpegFile.exists()) {
+ LOG.error("--- 设置ffmpeg执行路径失败,因为传入的ffmpeg可执行程序路径下的ffmpeg文件不存在! ---");
+ return false;
+ }
+ FFMPEG_PATH = ffmpeg_path;
+ LOG.info("--- 设置ffmpeg执行路径成功 --- 当前ffmpeg可执行程序路径为: " + ffmpeg_path);
+ return true;
+ }
+
+ /**
+ * 测试当前多媒体工具是否可以正常工作
+ *
+ * @return
+ */
+ public boolean isExecutable() {
+ File ffmpegFile = new File(FFMPEG_PATH);
+ if (!ffmpegFile.exists()) {
+ LOG.error("--- 工作状态异常,因为传入的ffmpeg可执行程序路径下的ffmpeg文件不存在! ---");
+ return false;
+ }
+ List
+ * 注意指定视频分辨率时,宽度和高度必须同时有值;
+ *
+ * @param fileInput 源视频路径
+ * @param fileOutPut 转换后的视频输出路径
+ * @param withAudio 是否保留音频;true-保留,false-不保留
+ * @param crf 指定视频的质量系数(值越小,视频质量越高,体积越大;该系数取值为0-51,直接影响视频码率大小),取值参考:CrfValueEnum.code
+ * @param preset 指定视频的编码速率(速率越快压缩率越低),取值参考:PresetVauleEnum.presetValue
+ * @param width 视频宽度;为空则保持源视频宽度
+ * @param height 视频高度;为空则保持源视频高度
+ */
+ public void convertVideo(File fileInput, File fileOutPut, boolean withAudio, Integer crf, String preset, Integer width, Integer height) {
+ if (null == fileInput || !fileInput.exists()) {
+ throw new RuntimeException("源视频文件不存在,请检查源视频路径");
+ }
+ if (null == fileOutPut) {
+ throw new RuntimeException("转换后的视频路径为空,请检查转换后的视频存放路径是否正确");
+ }
+
+ if (!fileOutPut.exists()) {
+ try {
+ fileOutPut.createNewFile();
+ } catch (IOException e) {
+ LOG.error("视频转换时新建输出文件失败");
+ }
+ }
+
+ String format = getFormat(fileInput);
+ if (!isLegalFormat(format, VIDEO_TYPE)) {
+ throw new RuntimeException("无法解析的视频格式:" + format);
+ }
+
+ List
+ * 转换后的文件路径以.gif结尾时,默认截取从第10s开始,后10s以内的帧画面来生成gif
+ *
+ * @param videoFile 源视频路径
+ * @param fileOutPut 转换后的文件路径
+ */
+ public void cutVideoFrame(File videoFile, File fileOutPut) {
+ cutVideoFrame(videoFile, fileOutPut, DEFAULT_TIME);
+ }
+
+ /**
+ * 视频帧抽取(抽取指定时间点的帧画面)
+ * 抽取的视频帧图片宽度默认为320px
+ *
+ * 转换后的文件路径以.gif结尾时,默认截取从指定时间点开始,后10s以内的帧画面来生成gif
+ *
+ * @param videoFile 源视频路径
+ * @param fileOutPut 转换后的文件路径
+ * @param time 指定抽取视频帧的时间点(单位:s)
+ */
+ public void cutVideoFrame(File videoFile, File fileOutPut, Time time) {
+ cutVideoFrame(videoFile, fileOutPut, time, DEFAULT_WIDTH);
+ }
+
+ /**
+ * 视频帧抽取(抽取指定时间点、指定宽度值的帧画面)
+ * 只需指定视频帧的宽度,高度随宽度自动计算
+ *
+ * 转换后的文件路径以.gif结尾时,默认截取从指定时间点开始,后10s以内的帧画面来生成gif
+ *
+ * @param videoFile 源视频路径
+ * @param fileOutPut 转换后的文件路径
+ * @param time 指定要抽取第几秒的视频帧(单位:s)
+ * @param width 抽取的视频帧图片的宽度(单位:px)
+ */
+ public void cutVideoFrame(File videoFile, File fileOutPut, Time time, int width) {
+ if (null == videoFile || !videoFile.exists()) {
+ throw new RuntimeException("源视频文件不存在,请检查源视频路径");
+ }
+ if (null == fileOutPut) {
+ throw new RuntimeException("转换后的视频路径为空,请检查转换后的视频存放路径是否正确");
+ }
+ VideoMetaInfo info = getVideoMetaInfo(videoFile);
+ if (null == info) {
+ LOG.error("--- 未能解析源视频信息,视频帧抽取操作失败 --- 源视频: " + videoFile);
+ return;
+ }
+ int height = width * info.getHeight() / info.getWidth(); // 根据宽度计算适合的高度,防止画面变形
+ cutVideoFrame(videoFile, fileOutPut, time, width, height);
+ }
+
+ /**
+ * 视频帧抽取(抽取指定时间点、指定宽度值、指定高度值的帧画面)
+ *
+ * 转换后的文件路径以.gif结尾时,默认截取从指定时间点开始,后10s以内的帧画面来生成gif
+ *
+ * @param videoFile 源视频路径
+ * @param fileOutPut 转换后的文件路径
+ * @param time 指定要抽取第几秒的视频帧(单位:s)
+ * @param width 抽取的视频帧图片的宽度(单位:px)
+ * @param height 抽取的视频帧图片的高度(单位:px)
+ */
+ public void cutVideoFrame(File videoFile, File fileOutPut, Time time, int width, int height) {
+ if (null == videoFile || !videoFile.exists()) {
+ throw new RuntimeException("源视频文件不存在,请检查源视频路径");
+ }
+ if (null == fileOutPut) {
+ throw new RuntimeException("转换后的视频路径为空,请检查转换后的视频存放路径是否正确");
+ }
+ String format = getFormat(fileOutPut);
+ if (!isLegalFormat(format, IMAGE_TYPE)) {
+ throw new RuntimeException("无法生成指定格式的帧图片:" + format);
+ }
+ String fileOutPutPath = fileOutPut.getAbsolutePath();
+ if (!"GIF".equals(StringUtils.upperCase(format))) {
+ // 输出路径不是以.gif结尾,抽取并生成一张静态图
+ cutVideoFrame(videoFile, fileOutPutPath, time, width, height, 1, false);
+ } else {
+ // 抽取并生成一个gif(gif由10张静态图构成)
+ String path = fileOutPut.getParent();
+ String name = fileOutPut.getName();
+ // 创建临时文件存储多张静态图用于生成gif
+ String tempPath = path + File.separator + System.currentTimeMillis() + "_" + name.substring(0, name.indexOf("."));
+ File file = new File(tempPath);
+ if (!file.exists()) {
+ file.mkdir();
+ }
+ try {
+ cutVideoFrame(videoFile, tempPath, time, width, height, DEFAULT_TIME_LENGTH, true);
+ // 生成gif
+ String images[] = file.list();
+ for (int i = 0; i < images.length; i++) {
+ images[i] = tempPath + File.separator + images[i];
+ }
+ createGifImage(images, fileOutPut.getAbsolutePath(), DEFAULT_GIF_PLAYTIME);
+ } catch (Exception e) {
+ LOG.error("--- 截取视频帧操作出错 --- 错误信息:" + e.getMessage());
+ } finally {
+ // 删除用于生成gif的临时文件
+ String images[] = file.list();
+ for (int i = 0; i < images.length; i++) {
+ File fileDelete = new File(tempPath + File.separator + images[i]);
+ fileDelete.delete();
+ }
+ file.delete();
+ }
+ }
+ }
+
+ /**
+ * 视频帧抽取(抽取指定时间点、指定宽度值、指定高度值、指定时长、指定单张/多张的帧画面)
+ *
+ * @param videoFile 源视频
+ * @param path 转换后的文件输出路径
+ * @param time 开始截取视频帧的时间点(单位:s)
+ * @param width 截取的视频帧图片的宽度(单位:px)
+ * @param height 截取的视频帧图片的高度(单位:px,需要大于20)
+ * @param timeLength 截取的视频帧的时长(从time开始算,单位:s,需小于源视频的最大时长)
+ * @param isContinuty false - 静态图(只截取time时间点的那一帧图片),true - 动态图(截取从time时间点开始,timelength这段时间内的多张帧图)
+ */
+ private void cutVideoFrame(File videoFile, String path, Time time, int width, int height, int timeLength, boolean isContinuty) {
+ if (videoFile == null || !videoFile.exists()) {
+ throw new RuntimeException("源视频文件不存在,源视频路径: ");
+ }
+ if (null == path) {
+ throw new RuntimeException("转换后的文件路径为空,请检查转换后的文件存放路径是否正确");
+ }
+ VideoMetaInfo info = getVideoMetaInfo(videoFile);
+ if (null == info) {
+ throw new RuntimeException("未解析到视频信息");
+ }
+ if (time.getTime() + timeLength > info.getDuration()) {
+ throw new RuntimeException("开始截取视频帧的时间点不合法:" + time.toString() + ",因为截取时间点晚于视频的最后时间点");
+ }
+ if (width <= 20 || height <= 20) {
+ throw new RuntimeException("截取的视频帧图片的宽度或高度不合法,宽高值必须大于20");
+ }
+ try {
+ List
+ * 解析出的视频信息一般为以下格式:
+ * Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '6.mp4':
+ * Duration: 00:00:30.04, start: 0.000000, bitrate: 19031 kb/s
+ * Stream #0:0(eng): Video: h264 (Main) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080, 18684 kb/s, 25 fps, 25 tbr, 25k tbn, 50 tbc (default)
+ * Stream #0:1(eng): Audio: aac (LC) (mp4a / 0x6134706D), 48000 Hz, stereo, fltp, 317 kb/s (default)
+ *
+ * 注解:
+ * Duration: 00:00:30.04【视频时长】, start: 0.000000【视频开始时间】, bitrate: 19031 kb/s【视频比特率/码率】
+ * Stream #0:0(eng): Video: h264【视频编码格式】 (Main) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080【视频分辨率,宽x高】, 18684【视频比特率】 kb/s, 25【视频帧率】 fps, 25 tbr, 25k tbn, 50 tbc (default)
+ * Stream #0:1(eng): Audio: aac【音频格式】 (LC) (mp4a / 0x6134706D), 48000【音频采样率】 Hz, stereo, fltp, 317【音频码率】 kb/s (default)
+ *
+ * @param videoFile 源视频路径
+ * @return 视频的基本信息,解码失败时返回null
+ */
+ public VideoMetaInfo getVideoMetaInfo(File videoFile) {
+ if (null == videoFile || !videoFile.exists()) {
+ LOG.error("--- 解析视频信息失败,因为要解析的源视频文件不存在 ---");
+ return null;
+ }
+
+ VideoMetaInfo videoInfo = new VideoMetaInfo();
+
+ String parseResult = getMetaInfoFromFFmpeg(videoFile);
+
+ Matcher durationMacher = durationPattern.matcher(parseResult);
+ Matcher videoStreamMacher = videoStreamPattern.matcher(parseResult);
+ Matcher videoMusicStreamMacher = musicStreamPattern.matcher(parseResult);
+
+ Long duration = 0L; // 视频时长
+ Integer videoBitrate = 0; // 视频码率
+ String videoFormat = getFormat(videoFile); // 视频格式
+ Long videoSize = videoFile.length(); // 视频大小
+
+ String videoEncoder = ""; // 视频编码器
+ Integer videoHeight = 0; // 视频高度
+ Integer videoWidth = 0; // 视频宽度
+ Float videoFramerate = 0F; // 视频帧率
+
+ String musicFormat = ""; // 音频格式
+ Long samplerate = 0L; // 音频采样率
+ Integer musicBitrate = 0; // 音频码率
+
+ try {
+ // 匹配视频播放时长等信息
+ if (durationMacher.find()) {
+ long hours = (long) Integer.parseInt(durationMacher.group(1));
+ long minutes = (long) Integer.parseInt(durationMacher.group(2));
+ long seconds = (long) Integer.parseInt(durationMacher.group(3));
+ long dec = (long) Integer.parseInt(durationMacher.group(4));
+ duration = dec * 100L + seconds * 1000L + minutes * 60L * 1000L + hours * 60L * 60L * 1000L;
+ //String startTime = durationMacher.group(5) + "ms";
+ videoBitrate = Integer.parseInt(durationMacher.group(6));
+ }
+ // 匹配视频分辨率等信息
+ if (videoStreamMacher.find()) {
+ videoEncoder = videoStreamMacher.group(1);
+ String s2 = videoStreamMacher.group(2);
+ videoWidth = Integer.parseInt(videoStreamMacher.group(3));
+ videoHeight = Integer.parseInt(videoStreamMacher.group(4));
+ String s5 = videoStreamMacher.group(5);
+ videoFramerate = Float.parseFloat(videoStreamMacher.group(6));
+ }
+ // 匹配视频中的音频信息
+ if (videoMusicStreamMacher.find()) {
+ musicFormat = videoMusicStreamMacher.group(1); // 提取音频格式
+ //String s2 = videoMusicStreamMacher.group(2);
+ samplerate = Long.parseLong(videoMusicStreamMacher.group(3)); // 提取采样率
+ //String s4 = videoMusicStreamMacher.group(4);
+ //String s5 = videoMusicStreamMacher.group(5);
+ musicBitrate = Integer.parseInt(videoMusicStreamMacher.group(6)); // 提取比特率
+ }
+ } catch (Exception e) {
+ LOG.error("--- 解析视频参数信息出错! --- 错误信息: " + e.getMessage());
+ return null;
+ }
+
+ // 封装视频中的音频信息
+ MusicMetaInfo musicMetaInfo = new MusicMetaInfo();
+ musicMetaInfo.setFormat(musicFormat);
+ musicMetaInfo.setDuration(duration);
+ musicMetaInfo.setBitRate(musicBitrate);
+ musicMetaInfo.setSampleRate(samplerate);
+ // 封装视频信息
+ VideoMetaInfo videoMetaInfo = new VideoMetaInfo();
+ videoMetaInfo.setFormat(videoFormat);
+ videoMetaInfo.setSize(videoSize);
+ videoMetaInfo.setBitRate(videoBitrate);
+ videoMetaInfo.setDuration(duration);
+ videoMetaInfo.setEncoder(videoEncoder);
+ videoMetaInfo.setFrameRate(videoFramerate);
+ videoMetaInfo.setHeight(videoHeight);
+ videoMetaInfo.setWidth(videoWidth);
+ videoMetaInfo.setMusicMetaInfo(musicMetaInfo);
+
+ return videoMetaInfo;
+ }
+
+ /**
+ * 获取视频的基本信息(从流中)
+ *
+ * @param inputStream 源视频流路径
+ * @return 视频的基本信息,解码失败时返回null
+ */
+ public VideoMetaInfo getVideoMetaInfo(InputStream inputStream) {
+ VideoMetaInfo videoInfo = new VideoMetaInfo();
+ try {
+ File file = File.createTempFile("tmp", null);
+ if (!file.exists()) {
+ return null;
+ }
+ FileUtils.copyInputStreamToFile(inputStream, file);
+ videoInfo = getVideoMetaInfo(file);
+ file.deleteOnExit();
+ return videoInfo;
+ } catch (Exception e) {
+ LOG.error("--- 从流中获取视频基本信息出错 --- 错误信息: " + e.getMessage());
+ return null;
+ }
+ }
+
+ /**
+ * 获取音频的基本信息(从文件中)
+ *
+ * @param musicFile 音频文件路径
+ * @return 音频的基本信息,解码失败时返回null
+ */
+ public MusicMetaInfo getMusicMetaInfo(File musicFile) {
+ if (null == musicFile || !musicFile.exists()) {
+ LOG.error("--- 无法获取音频信息,因为要解析的音频文件为空 ---");
+ return null;
+ }
+ // 获取音频信息字符串,方便后续解析
+ String parseResult = getMetaInfoFromFFmpeg(musicFile);
+
+ Long duration = 0L; // 音频时长
+ Integer musicBitrate = 0; // 音频码率
+ Long samplerate = 0L; // 音频采样率
+ String musicFormat = ""; // 音频格式
+ Long musicSize = musicFile.length(); // 音频大小
+
+ Matcher durationMacher = durationPattern.matcher(parseResult);
+ Matcher musicStreamMacher = musicStreamPattern.matcher(parseResult);
+
+ try {
+ // 匹配音频播放时长等信息
+ if (durationMacher.find()) {
+ long hours = (long) Integer.parseInt(durationMacher.group(1));
+ long minutes = (long) Integer.parseInt(durationMacher.group(2));
+ long seconds = (long) Integer.parseInt(durationMacher.group(3));
+ long dec = (long) Integer.parseInt(durationMacher.group(4));
+ duration = dec * 100L + seconds * 1000L + minutes * 60L * 1000L + hours * 60L * 60L * 1000L;
+ //String startTime = durationMacher.group(5) + "ms";
+ musicBitrate = Integer.parseInt(durationMacher.group(6));
+ }
+ // 匹配音频采样率等信息
+ if (musicStreamMacher.find()) {
+ musicFormat = musicStreamMacher.group(1); // 提取音频格式
+ //String s2 = videoMusicStreamMacher.group(2);
+ samplerate = Long.parseLong(musicStreamMacher.group(3)); // 提取采样率
+ //String s4 = videoMusicStreamMacher.group(4);
+ //String s5 = videoMusicStreamMacher.group(5);
+ musicBitrate = Integer.parseInt(musicStreamMacher.group(6)); // 提取比特率
+ }
+ } catch (Exception e) {
+ LOG.error("--- 解析音频参数信息出错! --- 错误信息: " + e.getMessage());
+ return null;
+ }
+
+ // 封装视频中的音频信息
+ MusicMetaInfo musicMetaInfo = new MusicMetaInfo();
+ musicMetaInfo.setFormat(musicFormat);
+ musicMetaInfo.setDuration(duration);
+ musicMetaInfo.setBitRate(musicBitrate);
+ musicMetaInfo.setSampleRate(samplerate);
+ musicMetaInfo.setSize(musicSize);
+ return musicMetaInfo;
+ }
+
+ /**
+ * 获取音频的基本信息(从流中)
+ *
+ * @param inputStream 源音乐流路径
+ * @return 音频基本信息,解码出错时返回null
+ */
+ public MusicMetaInfo getMusicMetaInfo(InputStream inputStream) {
+ MusicMetaInfo musicMetaInfo = new MusicMetaInfo();
+ try {
+ File file = File.createTempFile("tmp", null);
+ if (!file.exists()) {
+ return null;
+ }
+ FileUtils.copyInputStreamToFile(inputStream, file);
+ musicMetaInfo = getMusicMetaInfo(file);
+ file.deleteOnExit();
+ return musicMetaInfo;
+ } catch (Exception e) {
+ LOG.error("--- 从流中获取音频基本信息出错 --- 错误信息: " + e.getMessage());
+ return null;
+ }
+ }
+
+
+ /**
+ * 获取图片的基本信息(从流中)
+ *
+ * @param inputStream 源图片路径
+ * @return 图片的基本信息,获取信息失败时返回null
+ */
+ public ImageMetaInfo getImageInfo(InputStream inputStream) {
+ BufferedImage image = null;
+ ImageMetaInfo imageInfo = new ImageMetaInfo();
+ try {
+ image = ImageIO.read(inputStream);
+ imageInfo.setWidth(image.getWidth());
+ imageInfo.setHeight(image.getHeight());
+ imageInfo.setSize(Long.valueOf(String.valueOf(inputStream.available())));
+ return imageInfo;
+ } catch (Exception e) {
+ LOG.error("--- 获取图片的基本信息失败 --- 错误信息: " + e.getMessage());
+ return null;
+ }
+ }
+
+ /**
+ * 获取图片的基本信息 (从文件中)
+ *
+ * @param imageFile 源图片路径
+ * @return 图片的基本信息,获取信息失败时返回null
+ */
+ public ImageMetaInfo getImageInfo(File imageFile) {
+ BufferedImage image = null;
+ ImageMetaInfo imageInfo = new ImageMetaInfo();
+ try {
+ if (null == imageFile || !imageFile.exists()) {
+ return null;
+ }
+ image = ImageIO.read(imageFile);
+ imageInfo.setWidth(image.getWidth());
+ imageInfo.setHeight(image.getHeight());
+ imageInfo.setSize(imageFile.length());
+ imageInfo.setFormat(getFormat(imageFile));
+ return imageInfo;
+ } catch (Exception e) {
+ LOG.error("--- 获取图片的基本信息失败 --- 错误信息: " + e.getMessage());
+ return null;
+ }
+ }
+
+ /**
+ * 检查文件类型是否是给定的类型
+ *
+ * @param inputFile 源文件
+ * @param givenFormat 指定的文件类型;例如:{"MP4", "AVI"}
+ * @return
+ */
+ public boolean isGivenFormat(File inputFile, String[] givenFormat) {
+ if (null == inputFile || !inputFile.exists()) {
+ LOG.error("--- 无法检查文件类型是否满足要求,因为要检查的文件不存在 --- 源文件: " + inputFile);
+ return false;
+ }
+ if (null == givenFormat || givenFormat.length <= 0) {
+ LOG.error("--- 无法检查文件类型是否满足要求,因为没有指定的文件类型 ---");
+ return false;
+ }
+ String fomat = getFormat(inputFile);
+ return isLegalFormat(fomat, givenFormat);
+ }
+
+ /**
+ * 使用FFmpeg的"-i"命令来解析视频信息
+ *
+ * @param inputFile 源媒体文件
+ * @return 解析后的结果字符串,解析失败时为空
+ */
+ public String getMetaInfoFromFFmpeg(File inputFile) {
+ if (inputFile == null || !inputFile.exists()) {
+ throw new RuntimeException("源媒体文件不存在,源媒体文件路径: ");
+ }
+ Listfinish()
flushes all frames. If
+ * setSize
was not invoked, the size of the first image is used
+ * for all subsequent frames.
+ *
+ * @param im
+ * BufferedImage containing frame to write.
+ * @return true if successful.
+ */
+ public boolean addFrame(BufferedImage im) {
+ if ((im == null) || !started) {
+ return false;
+ }
+ boolean ok = true;
+ try {
+ if (!sizeSet) {
+ // use first frame's size
+ setSize(im.getWidth(), im.getHeight());
+ }
+ image = im;
+ getImagePixels(); // convert to correct format if necessary
+ analyzePixels(); // build color table & map pixels
+ if (firstFrame) {
+ writeLSD(); // logical screen descriptior
+ writePalette(); // global color table
+ if (repeat >= 0) {
+ // use NS app extension to indicate reps
+ writeNetscapeExt();
+ }
+ }
+ writeGraphicCtrlExt(); // write graphic control extension
+ writeImageDesc(); // image descriptor
+ if (!firstFrame) {
+ writePalette(); // local color table
+ }
+ writePixels(); // encode and write pixel data
+ firstFrame = false;
+ } catch (IOException e) {
+ ok = false;
+ }
+ return ok;
+ }
+
+ /**
+ * Flushes any pending data and closes output file. If writing to an
+ * OutputStream, the stream is not closed.
+ */
+ public boolean finish() {
+ if (!started)
+ return false;
+ boolean ok = true;
+ started = false;
+ try {
+ out.write(0x3b); // gif trailer
+ out.flush();
+ if (closeStream) {
+ out.close();
+ }
+ } catch (IOException e) {
+ ok = false;
+ }
+ // reset for subsequent use
+ transIndex = 0;
+ out = null;
+ image = null;
+ pixels = null;
+ indexedPixels = null;
+ colorTab = null;
+ closeStream = false;
+ firstFrame = true;
+ return ok;
+ }
+
+ /**
+ * Sets frame rate in frames per second. Equivalent to
+ * setDelay(1000/fps)
.
+ *
+ * @param fps
+ * float frame rate (frames per second)
+ */
+ public void setFrameRate(float fps) {
+ if ((int)fps != 0) {
+ BigDecimal result = BigDecimal.valueOf(100f).divide(BigDecimal.valueOf(fps));
+ delay = Math.round(result.floatValue());
+ }
+ }
+
+ /**
+ * Sets quality of color quantization (conversion of images to the maximum
+ * 256 colors allowed by the GIF specification). Lower values (minimum = 1)
+ * produce better colors, but slow processing significantly. 10 is the
+ * default, and produces good color mapping at reasonable speeds. Values
+ * greater than 20 do not yield significant improvements in speed.
+ *
+ * @param quality
+ * int greater than 0.
+ * @return
+ */
+ public void setQuality(int quality) {
+ if (quality < 1)
+ quality = 1;
+ sample = quality;
+ }
+
+ /**
+ * Sets the GIF frame size. The default size is the size of the first frame
+ * added if this method is not invoked.
+ *
+ * @param w
+ * int frame width.
+ * @param h
+ * int frame width.
+ */
+ public void setSize(int w, int h) {
+ if (started && !firstFrame)
+ return;
+ width = w;
+ height = h;
+ if (width < 1)
+ width = 320;
+ if (height < 1)
+ height = 240;
+ sizeSet = true;
+ }
+
+ /**
+ * Initiates GIF file creation on the given stream. The stream is not closed
+ * automatically.
+ *
+ * @param os
+ * OutputStream on which GIF images are written.
+ * @return false if initial write failed.
+ */
+ public boolean start(OutputStream os) {
+ if (os == null)
+ return false;
+ boolean ok = true;
+ closeStream = false;
+ out = os;
+ try {
+ writeString("GIF89a"); // header
+ } catch (IOException e) {
+ ok = false;
+ }
+ return started = ok;
+ }
+
+ /**
+ * Initiates writing of a GIF file with the specified name.
+ *
+ * @param file
+ * String containing output file name.
+ * @return false if open or initial write failed.
+ */
+ public boolean start(String file){
+ boolean ok = true;
+ try {
+ fileOutputStream = new FileOutputStream(file);
+ out = new BufferedOutputStream(fileOutputStream);
+ ok = start(out);
+ closeStream = true;
+ } catch (IOException e) {
+ ok = false;
+ }
+ return started = ok;
+ }
+
+ /**
+ * Analyzes image colors and creates color map.
+ */
+ protected void analyzePixels() {
+ int len = pixels.length;
+ int nPix = len / 3;
+ indexedPixels = new byte[nPix];
+ NeuQuant nq = new NeuQuant(pixels, len, sample);
+ // initialize quantizer
+ colorTab = nq.process(); // create reduced palette
+ // convert map from BGR to RGB
+ for (int i = 0; i < colorTab.length; i += 3) {
+ byte temp = colorTab[i];
+ colorTab[i] = colorTab[i + 2];
+ colorTab[i + 2] = temp;
+ usedEntry[i / 3] = false;
+ }
+ // map image pixels to new palette
+ int k = 0;
+ for (int i = 0; i < nPix; i++) {
+ int index = nq.map(pixels[k++] & 0xff, pixels[k++] & 0xff, pixels[k++] & 0xff);
+ usedEntry[index] = true;
+ indexedPixels[i] = (byte) index;
+ }
+ pixels = null;
+ colorDepth = 8;
+ palSize = 7;
+ // get closest match to transparent color if specified
+ if (transparent != null) {
+ transIndex = findClosest(transparent);
+ }
+ }
+
+ /**
+ * Returns index of palette color closest to c
+ *
+ */
+ protected int findClosest(Color c) {
+ if (colorTab == null)
+ return -1;
+ int r = c.getRed();
+ int g = c.getGreen();
+ int b = c.getBlue();
+ int minpos = 0;
+ int dmin = 256 * 256 * 256;
+ int len = colorTab.length;
+ for (int i = 0; i < len;) {
+ int dr = r - (colorTab[i++] & 0xff);
+ int dg = g - (colorTab[i++] & 0xff);
+ int db = b - (colorTab[i] & 0xff);
+ int d = dr * dr + dg * dg + db * db;
+ int index = i / 3;
+ if (usedEntry[index] && (d < dmin)) {
+ dmin = d;
+ minpos = index;
+ }
+ i++;
+ }
+ return minpos;
+ }
+
+ /**
+ * Extracts image pixels into byte array "pixels"
+ */
+ protected void getImagePixels() {
+ int w = image.getWidth();
+ int h = image.getHeight();
+ int type = image.getType();
+ if ((w != width) || (h != height) || (type != BufferedImage.TYPE_3BYTE_BGR)) {
+ // create new image with right size/format
+ BufferedImage temp = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR);
+ Graphics2D g = temp.createGraphics();
+ g.drawImage(image, 0, 0, null);
+ image = temp;
+ }
+ pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
+ }
+
+ /**
+ * Writes Graphic Control Extension
+ */
+ protected void writeGraphicCtrlExt() throws IOException {
+ out.write(0x21); // extension introducer
+ out.write(0xf9); // GCE label
+ out.write(4); // data block size
+ int transp, disp;
+ if (transparent == null) {
+ transp = 0;
+ disp = 0; // dispose = no action
+ } else {
+ transp = 1;
+ disp = 2; // force clear if using transparent color
+ }
+ if (dispose >= 0) {
+ disp = dispose & 7; // user override
+ }
+ disp <<= 2;
+ // packed fields
+ out.write(0 | disp | transp); // 8 transparency flag
+ writeShort(delay); // delay x 1/100 sec
+ out.write(transIndex); // transparent color index
+ out.write(0); // block terminator
+ }
+
+ /**
+ * Writes Image Descriptor
+ */
+ protected void writeImageDesc() throws IOException {
+ out.write(0x2c); // image separator
+ writeShort(0); // image position x,y = 0,0
+ writeShort(0);
+ writeShort(width); // image size
+ writeShort(height);
+ // packed fields
+ if (firstFrame) {
+ // no LCT - GCT is used for first (or only) frame
+ out.write(0);
+ } else {
+ // specify normal LCT
+ out.write(0x80 | // 1 local color table 1=yes
+ palSize); // 6-8 size of color table
+ }
+ }
+
+ /**
+ * Writes Logical Screen Descriptor
+ */
+ protected void writeLSD() throws IOException {
+ // logical screen size
+ writeShort(width);
+ writeShort(height);
+ // packed fields
+ out.write((0x80 | // 1 : global color table flag = 1 (gct used)
+ 0x70 | // 2-4 : color resolution = 7
+ 0x00 | // 5 : gct sort flag = 0
+ palSize)); // 6-8 : gct size
+ out.write(0); // background color index
+ out.write(0); // pixel aspect ratio - assume 1:1
+ }
+
+ /**
+ * Writes Netscape application extension to define repeat count.
+ */
+ protected void writeNetscapeExt() throws IOException {
+ out.write(0x21); // extension introducer
+ out.write(0xff); // app extension label
+ out.write(11); // block size
+ writeString("NETSCAPE" + "2.0"); // app id + auth code
+ out.write(3); // sub-block size
+ out.write(1); // loop sub-block id
+ writeShort(repeat); // loop count (extra iterations, 0=repeat forever)
+ out.write(0); // block terminator
+ }
+
+ /**
+ * Writes color table
+ */
+ protected void writePalette() throws IOException {
+ out.write(colorTab, 0, colorTab.length);
+ int n = (3 * 256) - colorTab.length;
+ for (int i = 0; i < n; i++) {
+ out.write(0);
+ }
+ }
+
+ /**
+ * Encodes and writes pixel data
+ */
+ protected void writePixels() throws IOException {
+ LZWEncoder encoder = new LZWEncoder(width, height, indexedPixels, colorDepth);
+ encoder.encode(out);
+ }
+
+ /**
+ * Write 16-bit value to output stream, LSB first
+ */
+ protected void writeShort(int value) throws IOException {
+ out.write(value & 0xff);
+ out.write((value >> 8) & 0xff);
+ }
+
+ /**
+ * Writes string to output stream
+ */
+ protected void writeString(String s) throws IOException {
+ for (int i = 0; i < s.length(); i++) {
+ out.write((byte) s.charAt(i));
+ }
+ }
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/manager/domain/gif/LZWEncoder.java b/module-file-media/src/main/java/ink/wgink/module/file/media/manager/domain/gif/LZWEncoder.java
new file mode 100644
index 00000000..f79668cd
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/manager/domain/gif/LZWEncoder.java
@@ -0,0 +1,240 @@
+package ink.wgink.module.file.media.manager.domain.gif;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * 多张静态图片合成动态gif图工具类
+ */
+public class LZWEncoder {
+ private static final int EOF = -1;
+ private int imgW, imgH;
+ private byte[] pixAry;
+ private int initCodeSize;
+ private int remaining;
+ private int curPixel;
+ // GIFCOMPR.C - GIF Image compression routines
+ //
+ // Lempel-Ziv compression based on 'compress'. GIF modifications by
+ // David Rowley (mgardi@watdcsu.waterloo.edu)
+ // General DEFINEs
+ static final int BITS = 12;
+ static final int HSIZE = 5003; // 80% occupancy
+ // GIF Image compression - modified 'compress'
+ //
+ // Based on: compress.c - File compression ala IEEE Computer, June 1984.
+ //
+ // By Authors: Spencer W. Thomas (decvax!harpo!utah-cs!utah-gr!thomas)
+ // Jim McKie (decvax!mcvax!jim)
+ // Steve Davies (decvax!vax135!petsd!peora!srd)
+ // Ken Turkowski (decvax!decwrl!turtlevax!ken)
+ // James A. Woods (decvax!ihnp4!ames!jaw)
+ // Joe Orost (decvax!vax135!petsd!joe)
+ int n_bits; // number of bits/code
+ int maxbits = BITS; // user settable max # bits/code
+ int maxcode; // maximum code, given n_bits
+ int maxmaxcode = 1 << BITS; // should NEVER generate this code
+ int[] htab = new int[HSIZE];
+ int[] codetab = new int[HSIZE];
+ int hsize = HSIZE; // for dynamic table sizing
+ int free_ent = 0; // first unused entry
+ // block compression parameters -- after all codes are used up,
+ // and compression rate changes, start over.
+ boolean clear_flg = false;
+ // Algorithm: use open addressing double hashing (no chaining) on the
+ // prefix code / next character combination. We do a variant of Knuth's
+ // algorithm D (vol. 3, sec. 6.4) along with G. Knott's relatively-prime
+ // secondary probe. Here, the modular division first probe is gives way
+ // to a faster exclusive-or manipulation. Also do block compression with
+ // an adaptive reset, whereby the code table is cleared when the compression
+ // ratio decreases, but after the table fills. The variable-length output
+ // codes are re-sized at this point, and a special CLEAR code is generated
+ // for the decompressor. Late addition: construct the table according to
+ // file size for noticeable speed improvement on small files. Please direct
+ // questions about this implementation to ames!jaw.
+ int g_init_bits;
+ int ClearCode;
+ int EOFCode;
+ // output
+ //
+ // Output the given code.
+ // Inputs:
+ // code: A n_bits-bit integer. If == -1, then EOF. This assumes
+ // that n_bits =< wordsize - 1.
+ // Outputs:
+ // Outputs code to the file.
+ // Assumptions:
+ // Chars are 8 bits long.
+ // Algorithm:
+ // Maintain a BITS character long buffer (so that 8 codes will
+ // fit in it exactly). Use the VAX insv instruction to insert each
+ // code in turn. When the buffer fills up empty it and start over.
+ int cur_accum = 0;
+ int cur_bits = 0;
+ int masks[] = { 0x0000, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F, 0x007F, 0x00FF, 0x01FF, 0x03FF, 0x07FF,
+ 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF };
+ // Number of characters so far in this 'packet'
+ int a_count;
+ // Define the storage for the packet accumulator
+ byte[] accum = new byte[256];
+
+ // ----------------------------------------------------------------------------
+ LZWEncoder(int width, int height, byte[] pixels, int color_depth) {
+ imgW = width;
+ imgH = height;
+ pixAry = pixels;
+ initCodeSize = Math.max(2, color_depth);
+ }
+
+ // Add a character to the end of the current packet, and if it is 254
+ // characters, flush the packet to disk.
+ void char_out(byte c, OutputStream outs) throws IOException {
+ accum[a_count++] = c;
+ if (a_count >= 254)
+ flush_char(outs);
+ }
+
+ // Clear out the hash table
+ // table clear for block compress
+ void cl_block(OutputStream outs) throws IOException {
+ cl_hash(hsize);
+ free_ent = ClearCode + 2;
+ clear_flg = true;
+ output(ClearCode, outs);
+ }
+
+ // reset code table
+ void cl_hash(int hsize) {
+ for (int i = 0; i < hsize; ++i)
+ htab[i] = -1;
+ }
+
+ void compress(int init_bits, OutputStream outs) throws IOException {
+ int fcode;
+ int i /* = 0 */;
+ int c;
+ int ent;
+ int disp;
+ int hsize_reg;
+ int hshift;
+ // Set up the globals: g_init_bits - initial number of bits
+ g_init_bits = init_bits;
+ // Set up the necessary values
+ clear_flg = false;
+ n_bits = g_init_bits;
+ maxcode = MAXCODE(n_bits);
+ ClearCode = 1 << (init_bits - 1);
+ EOFCode = ClearCode + 1;
+ free_ent = ClearCode + 2;
+ a_count = 0; // clear packet
+ ent = nextPixel();
+ hshift = 0;
+ for (fcode = hsize; fcode < 65536; fcode *= 2)
+ ++hshift;
+ hshift = 8 - hshift; // set hash code range bound
+ hsize_reg = hsize;
+ cl_hash(hsize_reg); // clear hash table
+ output(ClearCode, outs);
+ outer_loop: while ((c = nextPixel()) != EOF) {
+ fcode = (c << maxbits) + ent;
+ i = (c << hshift) ^ ent; // xor hashing
+ if (htab[i] == fcode) {
+ ent = codetab[i];
+ continue;
+ } else if (htab[i] >= 0) // non-empty slot
+ {
+ disp = hsize_reg - i; // secondary hash (after G. Knott)
+ if (i == 0)
+ disp = 1;
+ do {
+ if ((i -= disp) < 0)
+ i += hsize_reg;
+ if (htab[i] == fcode) {
+ ent = codetab[i];
+ continue outer_loop;
+ }
+ } while (htab[i] >= 0);
+ }
+ output(ent, outs);
+ ent = c;
+ if (free_ent < maxmaxcode) {
+ codetab[i] = free_ent++; // code -> hashtable
+ htab[i] = fcode;
+ } else
+ cl_block(outs);
+ }
+ // Put out the final code.
+ output(ent, outs);
+ output(EOFCode, outs);
+ }
+
+ // ----------------------------------------------------------------------------
+ void encode(OutputStream os) throws IOException {
+ os.write(initCodeSize); // write "initial code size" byte
+ remaining = imgW * imgH; // reset navigation variables
+ curPixel = 0;
+ compress(initCodeSize + 1, os); // compress and write the pixel data
+ os.write(0); // write block terminator
+ }
+
+ // Flush the packet to disk, and reset the accumulator
+ void flush_char(OutputStream outs) throws IOException {
+ if (a_count > 0) {
+ outs.write(a_count);
+ outs.write(accum, 0, a_count);
+ a_count = 0;
+ }
+ }
+
+ final int MAXCODE(int n_bits) {
+ return (1 << n_bits) - 1;
+ }
+
+ // ----------------------------------------------------------------------------
+ // Return the next pixel from the image
+ // ----------------------------------------------------------------------------
+ private int nextPixel() {
+ if (remaining == 0)
+ return EOF;
+ --remaining;
+ byte pix = pixAry[curPixel++];
+ return pix & 0xff;
+ }
+
+ void output(int code, OutputStream outs) throws IOException {
+ cur_accum &= masks[cur_bits];
+ if (cur_bits > 0)
+ cur_accum |= (code << cur_bits);
+ else
+ cur_accum = code;
+ cur_bits += n_bits;
+ while (cur_bits >= 8) {
+ char_out((byte) (cur_accum & 0xff), outs);
+ cur_accum >>= 8;
+ cur_bits -= 8;
+ }
+ // If the next entry is going to be too big for the code size,
+ // then increase it, if possible.
+ if (free_ent > maxcode || clear_flg) {
+ if (clear_flg) {
+ maxcode = MAXCODE(n_bits = g_init_bits);
+ clear_flg = false;
+ } else {
+ ++n_bits;
+ if (n_bits == maxbits)
+ maxcode = maxmaxcode;
+ else
+ maxcode = MAXCODE(n_bits);
+ }
+ }
+ if (code == EOFCode) {
+ // At EOF, write the rest of the buffer.
+ while (cur_bits > 0) {
+ char_out((byte) (cur_accum & 0xff), outs);
+ cur_accum >>= 8;
+ cur_bits -= 8;
+ }
+ flush_char(outs);
+ }
+ }
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/manager/domain/gif/NeuQuant.java b/module-file-media/src/main/java/ink/wgink/module/file/media/manager/domain/gif/NeuQuant.java
new file mode 100644
index 00000000..f672dc58
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/manager/domain/gif/NeuQuant.java
@@ -0,0 +1,426 @@
+package ink.wgink.module.file.media.manager.domain.gif;
+
+/**
+ * 多张静态图片合成动态gif图工具类
+ */
+public class NeuQuant {
+ protected static final int netsize = 256; /* number of colours used */
+ /* four primes near 500 - assume no image has a length so large */
+ /* that it is divisible by all four primes */
+ protected static final int prime1 = 499;
+ protected static final int prime2 = 491;
+ protected static final int prime3 = 487;
+ protected static final int prime4 = 503;
+ protected static final int minpicturebytes = (3 * prime4);
+ /* minimum size for input image */
+ /*
+ * Program Skeleton ---------------- [select samplefac in range 1..30] [read
+ * image from input file] pic = (unsigned char*) malloc(3*width*height);
+ * initnet(pic,3*width*height,samplefac); learn(); unbiasnet(); [write
+ * output image header, using writecolourmap(f)] inxbuild(); write output
+ * image using inxsearch(b,g,r)
+ */
+ /*
+ * Network Definitions -------------------
+ */
+ protected static final int maxnetpos = (netsize - 1);
+ protected static final int netbiasshift = 4; /* bias for colour values */
+ protected static final int ncycles = 100; /* no. of learning cycles */
+ /* defs for freq and bias */
+ protected static final int intbiasshift = 16; /* bias for fractions */
+ protected static final int intbias = (((int) 1) << intbiasshift);
+ protected static final int gammashift = 10; /* gamma = 1024 */
+ protected static final int gamma = (((int) 1) << gammashift);
+ protected static final int betashift = 10;
+ protected static final int beta = (intbias >> betashift); /*
+ * beta = 1/1024
+ */
+ protected static final int betagamma = (intbias << (gammashift - betashift));
+ /* defs for decreasing radius factor */
+ protected static final int initrad = (netsize >> 3); /*
+ * for 256 cols, radius
+ * starts
+ */
+ protected static final int radiusbiasshift = 6; /*
+ * at 32.0 biased by 6 bits
+ */
+ protected static final int radiusbias = (((int) 1) << radiusbiasshift);
+ protected static final int initradius = (initrad
+ * radiusbias); /* and decreases by a */
+ protected static final int radiusdec = 30; /* factor of 1/30 each cycle */
+ /* defs for decreasing alpha factor */
+ protected static final int alphabiasshift = 10; /* alpha starts at 1.0 */
+ protected static final int initalpha = (((int) 1) << alphabiasshift);
+ protected int alphadec; /* biased by 10 bits */
+ /* radbias and alpharadbias used for radpower calculation */
+ protected static final int radbiasshift = 8;
+ protected static final int radbias = (((int) 1) << radbiasshift);
+ protected static final int alpharadbshift = (alphabiasshift + radbiasshift);
+ protected static final int alpharadbias = (((int) 1) << alpharadbshift);
+ /*
+ * Types and Global Variables --------------------------
+ */
+ protected byte[] thepicture; /* the input image itself */
+ protected int lengthcount; /* lengthcount = H*W*3 */
+ protected int samplefac; /* sampling factor 1..30 */
+ // typedef int pixel[4]; /* BGRc */
+ protected int[][] network; /* the network itself - [netsize][4] */
+ protected int[] netindex = new int[256];
+ /* for network lookup - really 256 */
+ protected int[] bias = new int[netsize];
+ /* bias and freq arrays for learning */
+ protected int[] freq = new int[netsize];
+ protected int[] radpower = new int[initrad];
+
+ /* radpower for precomputation */
+ /*
+ * Initialise network in range (0,0,0) to (255,255,255) and set parameters
+ * -----------------------------------------------------------------------
+ */
+ public NeuQuant(byte[] thepic, int len, int sample) {
+ int i;
+ int[] p;
+ thepicture = thepic;
+ lengthcount = len;
+ samplefac = sample;
+ network = new int[netsize][];
+ for (i = 0; i < netsize; i++) {
+ network[i] = new int[4];
+ p = network[i];
+ p[0] = p[1] = p[2] = (i << (netbiasshift + 8)) / netsize;
+ freq[i] = intbias / netsize; /* 1/netsize */
+ bias[i] = 0;
+ }
+ }
+
+ public byte[] colorMap() {
+ byte[] map = new byte[3 * netsize];
+ int[] index = new int[netsize];
+ for (int i = 0; i < netsize; i++)
+ index[network[i][3]] = i;
+ int k = 0;
+ for (int i = 0; i < netsize; i++) {
+ int j = index[i];
+ map[k++] = (byte) (network[j][0]);
+ map[k++] = (byte) (network[j][1]);
+ map[k++] = (byte) (network[j][2]);
+ }
+ return map;
+ }
+
+ /*
+ * Insertion sort of network and building of netindex[0..255] (to do after
+ * unbias)
+ * -------------------------------------------------------------------------
+ * ------
+ */
+ public void inxbuild() {
+ int i, j, smallpos, smallval;
+ int[] p;
+ int[] q;
+ int previouscol, startpos;
+ previouscol = 0;
+ startpos = 0;
+ for (i = 0; i < netsize; i++) {
+ p = network[i];
+ smallpos = i;
+ smallval = p[1]; /* index on g */
+ /* find smallest in i..netsize-1 */
+ for (j = i + 1; j < netsize; j++) {
+ q = network[j];
+ if (q[1] < smallval) { /* index on g */
+ smallpos = j;
+ smallval = q[1]; /* index on g */
+ }
+ }
+ q = network[smallpos];
+ /* swap p (i) and q (smallpos) entries */
+ if (i != smallpos) {
+ j = q[0];
+ q[0] = p[0];
+ p[0] = j;
+ j = q[1];
+ q[1] = p[1];
+ p[1] = j;
+ j = q[2];
+ q[2] = p[2];
+ p[2] = j;
+ j = q[3];
+ q[3] = p[3];
+ p[3] = j;
+ }
+ /* smallval entry is now in position i */
+ if (smallval != previouscol) {
+ netindex[previouscol] = (startpos + i) >> 1;
+ for (j = previouscol + 1; j < smallval; j++)
+ netindex[j] = i;
+ previouscol = smallval;
+ startpos = i;
+ }
+ }
+ netindex[previouscol] = (startpos + maxnetpos) >> 1;
+ for (j = previouscol + 1; j < 256; j++)
+ netindex[j] = maxnetpos; /* really 256 */
+ }
+
+ /*
+ * Main Learning Loop ------------------
+ */
+ public void learn() {
+ int i, j, b, g, r;
+ int radius, rad, alpha, step, delta, samplepixels;
+ byte[] p;
+ int pix, lim;
+ if (lengthcount < minpicturebytes)
+ samplefac = 1;
+ alphadec = 30 + ((samplefac - 1) / 3);
+ p = thepicture;
+ pix = 0;
+ lim = lengthcount;
+ samplepixels = lengthcount / (3 * samplefac);
+ delta = samplepixels / ncycles;
+ alpha = initalpha;
+ radius = initradius;
+ rad = radius >> radiusbiasshift;
+ if (rad <= 1)
+ rad = 0;
+ for (i = 0; i < rad; i++)
+ radpower[i] = alpha * (((rad * rad - i * i) * radbias) / (rad * rad));
+ // fprintf(stderr,"beginning 1D learning: initial radius=%d/n", rad);
+ if (lengthcount < minpicturebytes)
+ step = 3;
+ else if ((lengthcount % prime1) != 0)
+ step = 3 * prime1;
+ else {
+ if ((lengthcount % prime2) != 0)
+ step = 3 * prime2;
+ else {
+ if ((lengthcount % prime3) != 0)
+ step = 3 * prime3;
+ else
+ step = 3 * prime4;
+ }
+ }
+ i = 0;
+ while (i < samplepixels) {
+ b = (p[pix + 0] & 0xff) << netbiasshift;
+ g = (p[pix + 1] & 0xff) << netbiasshift;
+ r = (p[pix + 2] & 0xff) << netbiasshift;
+ j = contest(b, g, r);
+ altersingle(alpha, j, b, g, r);
+ if (rad != 0)
+ alterneigh(rad, j, b, g, r); /* alter neighbours */
+ pix += step;
+ if (pix >= lim)
+ pix -= lengthcount;
+ i++;
+ if (delta == 0)
+ delta = 1;
+ if (i % delta == 0) {
+ alpha -= alpha / alphadec;
+ radius -= radius / radiusdec;
+ rad = radius >> radiusbiasshift;
+ if (rad <= 1)
+ rad = 0;
+ for (j = 0; j < rad; j++)
+ radpower[j] = alpha * (((rad * rad - j * j) * radbias) / (rad * rad));
+ }
+ }
+ // fprintf(stderr,"finished 1D learning: final alpha=%f
+ // !/n",((float)alpha)/initalpha);
+ }
+
+ /*
+ * Search for BGR values 0..255 (after net is unbiased) and return colour
+ * index
+ * -------------------------------------------------------------------------
+ * ---
+ */
+ public int map(int b, int g, int r) {
+ int i, j, dist, a, bestd;
+ int[] p;
+ int best;
+ bestd = 1000; /* biggest possible dist is 256*3 */
+ best = -1;
+ i = netindex[g]; /* index on g */
+ j = i - 1; /* start at netindex[g] and work outwards */
+ while ((i < netsize) || (j >= 0)) {
+ if (i < netsize) {
+ p = network[i];
+ dist = p[1] - g; /* inx key */
+ if (dist >= bestd)
+ i = netsize; /* stop iter */
+ else {
+ i++;
+ if (dist < 0)
+ dist = -dist;
+ a = p[0] - b;
+ if (a < 0)
+ a = -a;
+ dist += a;
+ if (dist < bestd) {
+ a = p[2] - r;
+ if (a < 0)
+ a = -a;
+ dist += a;
+ if (dist < bestd) {
+ bestd = dist;
+ best = p[3];
+ }
+ }
+ }
+ }
+ if (j >= 0) {
+ p = network[j];
+ dist = g - p[1]; /* inx key - reverse dif */
+ if (dist >= bestd)
+ j = -1; /* stop iter */
+ else {
+ j--;
+ if (dist < 0)
+ dist = -dist;
+ a = p[0] - b;
+ if (a < 0)
+ a = -a;
+ dist += a;
+ if (dist < bestd) {
+ a = p[2] - r;
+ if (a < 0)
+ a = -a;
+ dist += a;
+ if (dist < bestd) {
+ bestd = dist;
+ best = p[3];
+ }
+ }
+ }
+ }
+ }
+ return (best);
+ }
+
+ public byte[] process() {
+ learn();
+ unbiasnet();
+ inxbuild();
+ return colorMap();
+ }
+
+ /*
+ * Unbias network to give byte values 0..255 and record position i to
+ * prepare for sort
+ * -------------------------------------------------------------------------
+ * ----------
+ */
+ public void unbiasnet() {
+ int i, j;
+ for (i = 0; i < netsize; i++) {
+ network[i][0] >>= netbiasshift;
+ network[i][1] >>= netbiasshift;
+ network[i][2] >>= netbiasshift;
+ network[i][3] = i; /* record colour no */
+ }
+ }
+
+ /*
+ * Move adjacent neurons by precomputed alpha*(1-((i-j)^2/[r]^2)) in
+ * radpower[|i-j|]
+ * -------------------------------------------------------------------------
+ * --------
+ */
+ protected void alterneigh(int rad, int i, int b, int g, int r) {
+ int j, k, lo, hi, a, m;
+ int[] p;
+ lo = i - rad;
+ if (lo < -1)
+ lo = -1;
+ hi = i + rad;
+ if (hi > netsize)
+ hi = netsize;
+ j = i + 1;
+ k = i - 1;
+ m = 1;
+ while ((j < hi) || (k > lo)) {
+ a = radpower[m++];
+ if (j < hi) {
+ p = network[j++];
+ try {
+ p[0] -= (a * (p[0] - b)) / alpharadbias;
+ p[1] -= (a * (p[1] - g)) / alpharadbias;
+ p[2] -= (a * (p[2] - r)) / alpharadbias;
+ } catch (Exception e) {
+ } // prevents 1.3 miscompilation
+ }
+ if (k > lo) {
+ p = network[k--];
+ try {
+ p[0] -= (a * (p[0] - b)) / alpharadbias;
+ p[1] -= (a * (p[1] - g)) / alpharadbias;
+ p[2] -= (a * (p[2] - r)) / alpharadbias;
+ } catch (Exception e) {
+ }
+ }
+ }
+ }
+
+ /*
+ * Move neuron i towards biased (b,g,r) by factor alpha
+ * ----------------------------------------------------
+ */
+ protected void altersingle(int alpha, int i, int b, int g, int r) {
+ /* alter hit neuron */
+ int[] n = network[i];
+ n[0] -= (alpha * (n[0] - b)) / initalpha;
+ n[1] -= (alpha * (n[1] - g)) / initalpha;
+ n[2] -= (alpha * (n[2] - r)) / initalpha;
+ }
+
+ /*
+ * Search for biased BGR values ----------------------------
+ */
+ protected int contest(int b, int g, int r) {
+ /* finds closest neuron (min dist) and updates freq */
+ /* finds best neuron (min dist-bias) and returns position */
+ /*
+ * for frequently chosen neurons, freq[i] is high and bias[i] is
+ * negative
+ */
+ /* bias[i] = gamma*((1/netsize)-freq[i]) */
+ int i, dist, a, biasdist, betafreq;
+ int bestpos, bestbiaspos, bestd, bestbiasd;
+ int[] n;
+ bestd = ~(((int) 1) << 31);
+ bestbiasd = bestd;
+ bestpos = -1;
+ bestbiaspos = bestpos;
+ for (i = 0; i < netsize; i++) {
+ n = network[i];
+ dist = n[0] - b;
+ if (dist < 0)
+ dist = -dist;
+ a = n[1] - g;
+ if (a < 0)
+ a = -a;
+ dist += a;
+ a = n[2] - r;
+ if (a < 0)
+ a = -a;
+ dist += a;
+ if (dist < bestd) {
+ bestd = dist;
+ bestpos = i;
+ }
+ biasdist = dist - ((bias[i]) >> (intbiasshift - netbiasshift));
+ if (biasdist < bestbiasd) {
+ bestbiasd = biasdist;
+ bestbiaspos = i;
+ }
+ betafreq = (freq[i] >> betashift);
+ freq[i] -= betafreq;
+ bias[i] += (betafreq << gammashift);
+ }
+ freq[bestpos] += beta;
+ bias[bestpos] -= betagamma;
+ return (bestbiaspos);
+ }
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/pojo/vos/MediaVO.java b/module-file-media/src/main/java/ink/wgink/module/file/media/pojo/vos/MediaVO.java
new file mode 100644
index 00000000..f944f753
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/pojo/vos/MediaVO.java
@@ -0,0 +1,87 @@
+package ink.wgink.module.file.media.pojo.vos;
+
+/**
+ * When you feel like quitting. Think about why you started
+ * 当你想要放弃的时候,想想当初你为何开始
+ *
+ * @ClassName: MediaVO
+ * @Description: 媒体
+ * @Author: WangGeng
+ * @Date: 2021/6/9 23:10
+ * @Version: 1.0
+ **/
+public class MediaVO {
+
+ private String fileName;
+ private String filePath;
+ private String fileFullPath;
+ private String fileUrl;
+ private String fileType;
+ private Long fileSize;
+ private String fileSummary;
+ private String fileMd5;
+
+ public String getFileName() {
+ return fileName == null ? "" : fileName.trim();
+ }
+
+ public void setFileName(String fileName) {
+ this.fileName = fileName;
+ }
+
+ public String getFilePath() {
+ return filePath == null ? "" : filePath.trim();
+ }
+
+ public void setFilePath(String filePath) {
+ this.filePath = filePath;
+ }
+
+ public String getFileFullPath() {
+ return fileFullPath == null ? "" : fileFullPath.trim();
+ }
+
+ public void setFileFullPath(String fileFullPath) {
+ this.fileFullPath = fileFullPath;
+ }
+
+ public String getFileUrl() {
+ return fileUrl == null ? "" : fileUrl.trim();
+ }
+
+ public void setFileUrl(String fileUrl) {
+ this.fileUrl = fileUrl;
+ }
+
+ public String getFileType() {
+ return fileType == null ? "" : fileType.trim();
+ }
+
+ public void setFileType(String fileType) {
+ this.fileType = fileType;
+ }
+
+ public Long getFileSize() {
+ return fileSize;
+ }
+
+ public void setFileSize(Long fileSize) {
+ this.fileSize = fileSize;
+ }
+
+ public String getFileSummary() {
+ return fileSummary == null ? "" : fileSummary.trim();
+ }
+
+ public void setFileSummary(String fileSummary) {
+ this.fileSummary = fileSummary;
+ }
+
+ public String getFileMd5() {
+ return fileMd5 == null ? "" : fileMd5.trim();
+ }
+
+ public void setFileMd5(String fileMd5) {
+ this.fileMd5 = fileMd5;
+ }
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/pojo/vos/video/VideoVO.java b/module-file-media/src/main/java/ink/wgink/module/file/media/pojo/vos/video/VideoVO.java
new file mode 100644
index 00000000..dd742aab
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/pojo/vos/video/VideoVO.java
@@ -0,0 +1,107 @@
+package ink.wgink.module.file.media.pojo.vos.video;
+
+import ink.wgink.module.file.media.pojo.vos.MediaVO;
+
+/**
+ * When you feel like quitting. Think about why you started
+ * 当你想要放弃的时候,想想当初你为何开始
+ *
+ * @ClassName: VideoVO
+ * @Description: 视频
+ * @Author: WangGeng
+ * @Date: 2021/6/9 23:10
+ * @Version: 1.0
+ **/
+public class VideoVO extends MediaVO {
+
+ private String keyframe;
+ private Long duration;
+ private Integer width;
+ private Integer height;
+ private Integer bitRate;
+ private String encoder;
+ private Float frameRate;
+ private Long audioDuration;
+ private Integer audioBitRate;
+ private Long audioSampleRate;
+
+ public String getKeyframe() {
+ return keyframe == null ? "" : keyframe.trim();
+ }
+
+ public void setKeyframe(String keyframe) {
+ this.keyframe = keyframe;
+ }
+
+ public Long getDuration() {
+ return duration;
+ }
+
+ public void setDuration(Long duration) {
+ this.duration = duration;
+ }
+
+ public Integer getWidth() {
+ return width;
+ }
+
+ public void setWidth(Integer width) {
+ this.width = width;
+ }
+
+ public Integer getHeight() {
+ return height;
+ }
+
+ public void setHeight(Integer height) {
+ this.height = height;
+ }
+
+ public Integer getBitRate() {
+ return bitRate;
+ }
+
+ public void setBitRate(Integer bitRate) {
+ this.bitRate = bitRate;
+ }
+
+ public String getEncoder() {
+ return encoder == null ? "" : encoder.trim();
+ }
+
+ public void setEncoder(String encoder) {
+ this.encoder = encoder;
+ }
+
+ public Float getFrameRate() {
+ return frameRate;
+ }
+
+ public void setFrameRate(Float frameRate) {
+ this.frameRate = frameRate;
+ }
+
+ public Long getAudioDuration() {
+ return audioDuration;
+ }
+
+ public void setAudioDuration(Long audioDuration) {
+ this.audioDuration = audioDuration;
+ }
+
+ public Integer getAudioBitRate() {
+ return audioBitRate;
+ }
+
+ public void setAudioBitRate(Integer audioBitRate) {
+ this.audioBitRate = audioBitRate;
+ }
+
+ public Long getAudioSampleRate() {
+ return audioSampleRate;
+ }
+
+ public void setAudioSampleRate(Long audioSampleRate) {
+ this.audioSampleRate = audioSampleRate;
+ }
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/service/IMediaService.java b/module-file-media/src/main/java/ink/wgink/module/file/media/service/IMediaService.java
new file mode 100644
index 00000000..84069a70
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/service/IMediaService.java
@@ -0,0 +1,48 @@
+package ink.wgink.module.file.media.service;
+
+import org.springframework.web.multipart.MultipartFile;
+
+/**
+ * When you feel like quitting. Think about why you started
+ * 当你想要放弃的时候,想想当初你为何开始
+ *
+ * @ClassName: IMediaService
+ * @Description: 媒体
+ * @Author: WangGeng
+ * @Date: 2021/6/9 21:32
+ * @Version: 1.0
+ **/
+public interface IMediaService {
+
+ /**
+ * 读取流大小 1M
+ */
+ int INPUT_STREAM_SIZE = 1048576;
+
+ /**
+ * 单文件上传
+ *
+ * @param multipartFile 上传文件
+ * @param uploadPath 上传的文件路径
+ * @param uploadName 上传的文件名称
+ * @return 文件MD5
+ */
+ String upload(MultipartFile multipartFile, String uploadPath, String uploadName);
+
+ /**
+ * 获得ContentType
+ *
+ * @param fileType
+ * @return ContentType
+ */
+ String getContentType(String fileType);
+
+ /**
+ * 得到文件名称
+ *
+ * @param fileName
+ * @return
+ */
+ String getFileType(String fileName);
+
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/service/impl/MediaServiceImpl.java b/module-file-media/src/main/java/ink/wgink/module/file/media/service/impl/MediaServiceImpl.java
new file mode 100644
index 00000000..95a28f5f
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/service/impl/MediaServiceImpl.java
@@ -0,0 +1,73 @@
+package ink.wgink.module.file.media.service.impl;
+
+import ink.wgink.common.base.DefaultBaseService;
+import ink.wgink.exceptions.FileException;
+import ink.wgink.module.file.media.service.IMediaService;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.security.MessageDigest;
+
+/**
+ * When you feel like quitting. Think about why you started
+ * 当你想要放弃的时候,想想当初你为何开始
+ *
+ * @ClassName: MediaServiceImpl
+ * @Description: 媒体
+ * @Author: WangGeng
+ * @Date: 2021/6/9 21:54
+ * @Version: 1.0
+ **/
+@Service
+public class MediaServiceImpl extends DefaultBaseService implements IMediaService {
+ private static final char[] HEX_CODE = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
+
+
+ @Override
+ public String upload(MultipartFile multipartFile, String uploadPath, String uploadName) {
+ String fileMd5;
+ File uploadFolderFile = new File(uploadPath);
+ if (!uploadFolderFile.exists()) {
+ uploadFolderFile.mkdirs();
+ }
+
+ try (InputStream uploadFileInputStream = multipartFile.getInputStream();
+ FileOutputStream uploadFileOutputStream = new FileOutputStream(uploadPath + File.separator + uploadName);) {
+ MessageDigest messageDigest = MessageDigest.getInstance("MD5");
+ for (byte[] buf = new byte[INPUT_STREAM_SIZE]; uploadFileInputStream.read(buf) > -1; ) {
+ uploadFileOutputStream.write(buf, 0, buf.length);
+ messageDigest.update(buf, 0, buf.length);
+ }
+ uploadFileOutputStream.flush();
+ // 计算MD5
+ byte[] data = messageDigest.digest();
+ StringBuilder fileMd5SB = new StringBuilder(data.length * 2);
+ for (byte b : data) {
+ fileMd5SB.append(HEX_CODE[(b >> 4) & 0xF]);
+ fileMd5SB.append(HEX_CODE[(b & 0xF)]);
+ }
+ fileMd5 = fileMd5SB.toString();
+ } catch (Exception e) {
+ LOG.error(e.getMessage(), e);
+ throw new FileException("视频上传失败");
+ }
+ return fileMd5;
+ }
+
+ @Override
+ public String getContentType(String fileType) {
+ return null;
+ }
+
+ @Override
+ public String getFileType(String fileName) {
+ String[] names = fileName.split("\\.");
+ if (names != null) {
+ return names[names.length - 1].toLowerCase();
+ }
+ return "";
+ }
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/service/video/IVideoService.java b/module-file-media/src/main/java/ink/wgink/module/file/media/service/video/IVideoService.java
new file mode 100644
index 00000000..ab48d9d9
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/service/video/IVideoService.java
@@ -0,0 +1,53 @@
+package ink.wgink.module.file.media.service.video;
+
+import ink.wgink.module.file.media.pojo.vos.video.VideoVO;
+import org.springframework.web.multipart.MultipartFile;
+
+/**
+ * When you feel like quitting. Think about why you started
+ * 当你想要放弃的时候,想想当初你为何开始
+ *
+ * @ClassName: IVideoService
+ * @Description: 视频
+ * @Author: WangGeng
+ * @Date: 2021/6/8 22:12
+ * @Version: 1.0
+ **/
+public interface IVideoService {
+
+ String VIDEO_PATH = "videos";
+
+ /**
+ * 保存
+ *
+ * @param videoVO
+ * @return
+ */
+ String save(VideoVO videoVO);
+
+ /**
+ * 保存
+ *
+ * @param token
+ * @param videoVO
+ * @return
+ */
+ String save(String token, VideoVO videoVO);
+
+ /**
+ * 上传视频
+ *
+ * @param video
+ * @return
+ */
+ String upload(MultipartFile video);
+
+ /**
+ * 上传视频
+ *
+ * @param token
+ * @param video
+ * @return
+ */
+ String upload(String token, MultipartFile video);
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/service/video/impl/VideoServiceImpl.java b/module-file-media/src/main/java/ink/wgink/module/file/media/service/video/impl/VideoServiceImpl.java
new file mode 100644
index 00000000..7f22cfe2
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/service/video/impl/VideoServiceImpl.java
@@ -0,0 +1,93 @@
+package ink.wgink.module.file.media.service.video.impl;
+
+import ink.wgink.common.base.DefaultBaseService;
+import ink.wgink.exceptions.FileException;
+import ink.wgink.module.file.media.manager.MediaManager;
+import ink.wgink.module.file.media.manager.domain.MusicMetaInfo;
+import ink.wgink.module.file.media.manager.domain.VideoMetaInfo;
+import ink.wgink.module.file.media.pojo.vos.video.VideoVO;
+import ink.wgink.module.file.media.service.IMediaService;
+import ink.wgink.module.file.media.service.video.IVideoService;
+import ink.wgink.properties.media.MediaProperties;
+import ink.wgink.util.UUIDUtil;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.File;
+
+/**
+ * When you feel like quitting. Think about why you started
+ * 当你想要放弃的时候,想想当初你为何开始
+ *
+ * @ClassName: VideoServiceImpl
+ * @Description: 视频
+ * @Author: WangGeng
+ * @Date: 2021/6/8 22:12
+ * @Version: 1.0
+ **/
+@Service
+public class VideoServiceImpl extends DefaultBaseService implements IVideoService {
+
+ @Autowired
+ private IMediaService mediaService;
+ @Autowired
+ private MediaProperties mediaProperties;
+
+ @Override
+ public String save(VideoVO videoVO) {
+ return null;
+ }
+
+ @Override
+ public String save(String token, VideoVO videoVO) {
+ String fileId = UUIDUtil.getUUID();
+
+ return null;
+ }
+
+ @Override
+ public String upload(MultipartFile video) {
+ return upload(null, video);
+ }
+
+ @Override
+ public String upload(String token, MultipartFile video) {
+ String filePath = mediaProperties.getUploadPath() + File.separator + VIDEO_PATH;
+ String fileName = video.getOriginalFilename();
+ long fileSize = video.getSize();
+ String fileType = mediaService.getFileType(fileName);
+ String fileMd5 = mediaService.upload(video, filePath, fileName);
+ String fileFullPath = filePath + File.separator + fileName;
+ // 构建视频内容
+ VideoVO videoVO = new VideoVO();
+ videoVO.setFileName(fileName);
+ videoVO.setFileFullPath(fileFullPath);
+ videoVO.setFilePath(VIDEO_PATH + File.separator + fileName);
+ videoVO.setFileSize(fileSize);
+ videoVO.setFileType(fileType);
+ videoVO.setFileMd5(fileMd5);
+
+ File uploadFile = new File(fileFullPath);
+ VideoMetaInfo videoMetaInfo = MediaManager.getInstance().getVideoMetaInfo(uploadFile);
+ if (videoMetaInfo == null) {
+ throw new FileException("上传失败");
+ }
+ videoVO.setDuration(videoMetaInfo.getDuration());
+ videoVO.setWidth(videoMetaInfo.getWidth());
+ videoVO.setHeight(videoMetaInfo.getHeight());
+ videoVO.setBitRate(videoMetaInfo.getBitRate());
+ videoVO.setEncoder(videoMetaInfo.getEncoder());
+ videoVO.setFrameRate(videoMetaInfo.getFrameRate());
+
+ MusicMetaInfo musicMetaInfo = videoMetaInfo.getMusicMetaInfo();
+ if (musicMetaInfo != null) {
+ videoVO.setAudioDuration(musicMetaInfo.getDuration());
+ videoVO.setAudioBitRate(musicMetaInfo.getBitRate());
+ videoVO.setAudioSampleRate(musicMetaInfo.getSampleRate());
+ }
+
+ return save(token, videoVO);
+ }
+
+}
diff --git a/module-file-media/src/main/java/ink/wgink/module/file/media/startup/ModuleFileMediaStartUp.java b/module-file-media/src/main/java/ink/wgink/module/file/media/startup/ModuleFileMediaStartUp.java
new file mode 100644
index 00000000..6f566e75
--- /dev/null
+++ b/module-file-media/src/main/java/ink/wgink/module/file/media/startup/ModuleFileMediaStartUp.java
@@ -0,0 +1,30 @@
+package ink.wgink.module.file.media.startup;
+
+import ink.wgink.module.file.media.manager.MediaManager;
+import ink.wgink.properties.media.MediaProperties;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.ApplicationArguments;
+import org.springframework.boot.ApplicationRunner;
+import org.springframework.stereotype.Component;
+
+/**
+ * When you feel like quitting. Think about why you started
+ * 当你想要放弃的时候,想想当初你为何开始
+ *
+ * @ClassName: MediaStartUp
+ * @Description: 启动
+ * @Author: WangGeng
+ * @Date: 2021/6/8 21:56
+ * @Version: 1.0
+ **/
+@Component
+public class ModuleFileMediaStartUp implements ApplicationRunner {
+
+ @Autowired
+ private MediaProperties mediaProperties;
+
+ @Override
+ public void run(ApplicationArguments args) throws Exception {
+ MediaManager.getInstance().setFFmpegPath(mediaProperties.getFfmpegPath());
+ }
+}
diff --git a/module-file-media/src/test/java/MediaTest.java b/module-file-media/src/test/java/MediaTest.java
new file mode 100644
index 00000000..9bbd9978
--- /dev/null
+++ b/module-file-media/src/test/java/MediaTest.java
@@ -0,0 +1,48 @@
+import ink.wgink.module.file.media.manager.MediaManager;
+import ink.wgink.module.file.media.manager.domain.VideoMetaInfo;
+import org.junit.Test;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.InputStreamReader;
+
+/**
+ * When you feel like quitting. Think about why you started
+ * 当你想要放弃的时候,想想当初你为何开始
+ *
+ * @ClassName: MediaTest
+ * @Description:
+ * @Author: WangGeng
+ * @Date: 2021/6/7 22:37
+ * @Version: 1.0
+ **/
+public class MediaTest {
+
+ @Test
+ public void t1() {
+ File videoFile = new File("I:\\电视剧\\神探狄仁杰\\第一部\\神探狄仁杰-01.mp4");
+// File frameFile = new File("C:\\Users\\wenc0\\Desktop\\UploadFiles\\frame.gif");
+// MediaComponent.cutVideoFrame(videoFile, frameFile);
+ MediaManager.getInstance().setFFmpegPath("D:\\ffmpeg-4.4-full_build\\ffmpeg-4.4-full_build\\bin\\ffmpeg.exe");
+ VideoMetaInfo videoMetaInfo = MediaManager.getInstance().getVideoMetaInfo(videoFile);
+ System.out.println(videoMetaInfo);
+ }
+
+ @Test
+ public void t2() throws Exception {
+ Process process = Runtime.getRuntime().exec("node -v");
+ BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream(), "GBK"));
+ new Thread(() -> {
+ try {
+ for (String line; (line = bufferedReader.readLine()) != null; ) {
+ System.out.println(line);
+ }
+ bufferedReader.close();
+ } catch (Exception e) {}
+ }).start();
+ process.waitFor();
+ process.destroy();
+ }
+
+
+}