mirror of
https://framagit.org/tykayn/mastodon.git
synced 2023-08-25 08:33:12 +02:00
parent
ae3d2f446a
commit
6542c68a59
@ -5,6 +5,7 @@ class MediaController < ApplicationController
|
||||
|
||||
before_action :set_media_attachment
|
||||
before_action :verify_permitted_status!
|
||||
before_action :check_playable!, only: :player
|
||||
|
||||
def show
|
||||
redirect_to @media_attachment.file.url(:original)
|
||||
@ -12,7 +13,7 @@ class MediaController < ApplicationController
|
||||
|
||||
def player
|
||||
@body_classes = 'player'
|
||||
raise ActiveRecord::RecordNotFound unless @media_attachment.video? || @media_attachment.gifv?
|
||||
@autoplay = truthy_param?(:autoplay) || truthy_param?(:auto_play)
|
||||
end
|
||||
|
||||
private
|
||||
@ -27,4 +28,12 @@ class MediaController < ApplicationController
|
||||
# Reraise in order to get a 404 instead of a 403 error code
|
||||
raise ActiveRecord::RecordNotFound
|
||||
end
|
||||
|
||||
def check_playable!
|
||||
raise ActiveRecord::RecordNotFound unless playable?
|
||||
end
|
||||
|
||||
def playable?
|
||||
@media_attachment.video? || @media_attachment.gifv? || @media_attachment.audio?
|
||||
end
|
||||
end
|
||||
|
@ -12,7 +12,7 @@ import AttachmentList from './attachment_list';
|
||||
import Card from '../features/status/components/card';
|
||||
import { injectIntl, FormattedMessage } from 'react-intl';
|
||||
import ImmutablePureComponent from 'react-immutable-pure-component';
|
||||
import { MediaGallery, Video } from '../features/ui/util/async-components';
|
||||
import { MediaGallery, Video, Audio } from '../features/ui/util/async-components';
|
||||
import { HotKeys } from 'react-hotkeys';
|
||||
import classNames from 'classnames';
|
||||
|
||||
@ -113,6 +113,10 @@ class Status extends ImmutablePureComponent {
|
||||
return <div className='media-spoiler-video' style={{ height: '110px' }} />;
|
||||
}
|
||||
|
||||
renderLoadingAudioPlayer () {
|
||||
return <div className='media-spoiler-video' style={{ height: '61px' }} />;
|
||||
}
|
||||
|
||||
handleOpenVideo = (media, startTime) => {
|
||||
this.props.onOpenVideo(media, startTime);
|
||||
}
|
||||
@ -232,6 +236,14 @@ class Status extends ImmutablePureComponent {
|
||||
media={status.get('media_attachments')}
|
||||
/>
|
||||
);
|
||||
} else if (status.getIn(['media_attachments', 0, 'type']) === 'audio') {
|
||||
const audio = status.getIn(['media_attachments', 0]);
|
||||
|
||||
media = (
|
||||
<Bundle fetchComponent={Audio} loading={this.renderLoadingAudioPlayer} >
|
||||
{Component => <Component src={audio.get('url')} duration={audio.getIn(['meta', 'original', 'duration'])} />}
|
||||
</Bundle>
|
||||
);
|
||||
} else if (status.getIn(['media_attachments', 0, 'type']) === 'video') {
|
||||
const video = status.getIn(['media_attachments', 0]);
|
||||
|
||||
|
@ -5,6 +5,7 @@ import { IntlProvider, addLocaleData } from 'react-intl';
|
||||
import { getLocale } from '../locales';
|
||||
import MediaGallery from '../components/media_gallery';
|
||||
import Video from '../features/video';
|
||||
import Audio from '../features/audio';
|
||||
import Card from '../features/status/components/card';
|
||||
import ModalRoot from '../components/modal_root';
|
||||
import MediaModal from '../features/ui/components/media_modal';
|
||||
@ -13,7 +14,7 @@ import { List as ImmutableList, fromJS } from 'immutable';
|
||||
const { localeData, messages } = getLocale();
|
||||
addLocaleData(localeData);
|
||||
|
||||
const MEDIA_COMPONENTS = { MediaGallery, Video, Card };
|
||||
const MEDIA_COMPONENTS = { MediaGallery, Video, Audio, Card };
|
||||
|
||||
export default class MediaContainer extends PureComponent {
|
||||
|
||||
|
305
app/javascript/mastodon/features/audio/index.js
Normal file
305
app/javascript/mastodon/features/audio/index.js
Normal file
@ -0,0 +1,305 @@
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { defineMessages, injectIntl } from 'react-intl';
|
||||
import { throttle } from 'lodash';
|
||||
import classNames from 'classnames';
|
||||
|
||||
const messages = defineMessages({
|
||||
play: { id: 'video.play', defaultMessage: 'Play' },
|
||||
pause: { id: 'video.pause', defaultMessage: 'Pause' },
|
||||
mute: { id: 'video.mute', defaultMessage: 'Mute sound' },
|
||||
unmute: { id: 'video.unmute', defaultMessage: 'Unmute sound' },
|
||||
});
|
||||
|
||||
const formatTime = secondsNum => {
|
||||
let hours = Math.floor(secondsNum / 3600);
|
||||
let minutes = Math.floor((secondsNum - (hours * 3600)) / 60);
|
||||
let seconds = Math.floor(secondsNum - (hours * 3600) - (minutes * 60));
|
||||
|
||||
if (hours < 10) hours = '0' + hours;
|
||||
if (minutes < 10) minutes = '0' + minutes;
|
||||
if (seconds < 10) seconds = '0' + seconds;
|
||||
|
||||
return (hours === '00' ? '' : `${hours}:`) + `${minutes}:${seconds}`;
|
||||
};
|
||||
|
||||
export const findElementPosition = el => {
|
||||
let box;
|
||||
|
||||
if (el.getBoundingClientRect && el.parentNode) {
|
||||
box = el.getBoundingClientRect();
|
||||
}
|
||||
|
||||
if (!box) {
|
||||
return {
|
||||
left: 0,
|
||||
top: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const docEl = document.documentElement;
|
||||
const body = document.body;
|
||||
|
||||
const clientLeft = docEl.clientLeft || body.clientLeft || 0;
|
||||
const scrollLeft = window.pageXOffset || body.scrollLeft;
|
||||
const left = (box.left + scrollLeft) - clientLeft;
|
||||
|
||||
const clientTop = docEl.clientTop || body.clientTop || 0;
|
||||
const scrollTop = window.pageYOffset || body.scrollTop;
|
||||
const top = (box.top + scrollTop) - clientTop;
|
||||
|
||||
return {
|
||||
left: Math.round(left),
|
||||
top: Math.round(top),
|
||||
};
|
||||
};
|
||||
|
||||
export const getPointerPosition = (el, event) => {
|
||||
const position = {};
|
||||
const box = findElementPosition(el);
|
||||
const boxW = el.offsetWidth;
|
||||
const boxH = el.offsetHeight;
|
||||
const boxY = box.top;
|
||||
const boxX = box.left;
|
||||
|
||||
let pageY = event.pageY;
|
||||
let pageX = event.pageX;
|
||||
|
||||
if (event.changedTouches) {
|
||||
pageX = event.changedTouches[0].pageX;
|
||||
pageY = event.changedTouches[0].pageY;
|
||||
}
|
||||
|
||||
position.y = Math.max(0, Math.min(1, (pageY - boxY) / boxH));
|
||||
position.x = Math.max(0, Math.min(1, (pageX - boxX) / boxW));
|
||||
|
||||
return position;
|
||||
};
|
||||
|
||||
// hard coded in components.scss
|
||||
// any way to get ::before values programatically?
|
||||
const VOL_WIDTH = 50;
|
||||
const VOL_OFFSET = 70;
|
||||
|
||||
export default @injectIntl
|
||||
class Audio extends React.PureComponent {
|
||||
|
||||
static propTypes = {
|
||||
src: PropTypes.string.isRequired,
|
||||
duration: PropTypes.number,
|
||||
intl: PropTypes.object.isRequired,
|
||||
};
|
||||
|
||||
state = {
|
||||
currentTime: 0,
|
||||
duration: 0,
|
||||
volume: 0.5,
|
||||
paused: true,
|
||||
dragging: false,
|
||||
fullscreen: false,
|
||||
muted: false,
|
||||
};
|
||||
|
||||
volHandleOffset = v => {
|
||||
const offset = v * VOL_WIDTH + VOL_OFFSET;
|
||||
return (offset > 110) ? 110 : offset;
|
||||
}
|
||||
|
||||
componentDidMount () {
|
||||
this.setState({ duration: this.props.duration });
|
||||
}
|
||||
|
||||
componentWillReceiveProps (nextProps) {
|
||||
this.setState({ duration: nextProps.duration });
|
||||
}
|
||||
|
||||
setAudioRef = c => {
|
||||
this.audio = c;
|
||||
}
|
||||
|
||||
setSeekRef = c => {
|
||||
this.seek = c;
|
||||
}
|
||||
|
||||
setVolumeRef = c => {
|
||||
this.volume = c;
|
||||
}
|
||||
|
||||
handleClickRoot = e => e.stopPropagation();
|
||||
|
||||
handlePlay = () => {
|
||||
this.setState({ paused: false });
|
||||
}
|
||||
|
||||
handlePause = () => {
|
||||
this.setState({ paused: true });
|
||||
}
|
||||
|
||||
handleTimeUpdate = () => {
|
||||
this.setState({
|
||||
currentTime: Math.floor(this.audio.currentTime),
|
||||
duration: Math.floor(this.audio.duration),
|
||||
});
|
||||
}
|
||||
|
||||
handleVolumeMouseDown = e => {
|
||||
document.addEventListener('mousemove', this.handleMouseVolSlide, true);
|
||||
document.addEventListener('mouseup', this.handleVolumeMouseUp, true);
|
||||
document.addEventListener('touchmove', this.handleMouseVolSlide, true);
|
||||
document.addEventListener('touchend', this.handleVolumeMouseUp, true);
|
||||
|
||||
this.handleMouseVolSlide(e);
|
||||
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
}
|
||||
|
||||
handleVolumeMouseUp = () => {
|
||||
document.removeEventListener('mousemove', this.handleMouseVolSlide, true);
|
||||
document.removeEventListener('mouseup', this.handleVolumeMouseUp, true);
|
||||
document.removeEventListener('touchmove', this.handleMouseVolSlide, true);
|
||||
document.removeEventListener('touchend', this.handleVolumeMouseUp, true);
|
||||
}
|
||||
|
||||
handleMouseVolSlide = throttle(e => {
|
||||
const rect = this.volume.getBoundingClientRect();
|
||||
const x = (e.clientX - rect.left) / VOL_WIDTH; //x position within the element.
|
||||
|
||||
if(!isNaN(x)) {
|
||||
var slideamt = x;
|
||||
|
||||
if(x > 1) {
|
||||
slideamt = 1;
|
||||
} else if(x < 0) {
|
||||
slideamt = 0;
|
||||
}
|
||||
|
||||
this.audio.volume = slideamt;
|
||||
this.setState({ volume: slideamt });
|
||||
}
|
||||
}, 60);
|
||||
|
||||
handleMouseDown = e => {
|
||||
document.addEventListener('mousemove', this.handleMouseMove, true);
|
||||
document.addEventListener('mouseup', this.handleMouseUp, true);
|
||||
document.addEventListener('touchmove', this.handleMouseMove, true);
|
||||
document.addEventListener('touchend', this.handleMouseUp, true);
|
||||
|
||||
this.setState({ dragging: true });
|
||||
this.audio.pause();
|
||||
this.handleMouseMove(e);
|
||||
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
}
|
||||
|
||||
handleMouseUp = () => {
|
||||
document.removeEventListener('mousemove', this.handleMouseMove, true);
|
||||
document.removeEventListener('mouseup', this.handleMouseUp, true);
|
||||
document.removeEventListener('touchmove', this.handleMouseMove, true);
|
||||
document.removeEventListener('touchend', this.handleMouseUp, true);
|
||||
|
||||
this.setState({ dragging: false });
|
||||
this.audio.play();
|
||||
}
|
||||
|
||||
handleMouseMove = throttle(e => {
|
||||
const { x } = getPointerPosition(this.seek, e);
|
||||
const currentTime = Math.floor(this.audio.duration * x);
|
||||
|
||||
if (!isNaN(currentTime)) {
|
||||
this.audio.currentTime = currentTime;
|
||||
this.setState({ currentTime });
|
||||
}
|
||||
}, 60);
|
||||
|
||||
togglePlay = () => {
|
||||
if (this.state.paused) {
|
||||
this.audio.play();
|
||||
} else {
|
||||
this.audio.pause();
|
||||
}
|
||||
}
|
||||
|
||||
toggleMute = () => {
|
||||
this.audio.muted = !this.audio.muted;
|
||||
this.setState({ muted: this.audio.muted });
|
||||
}
|
||||
|
||||
handleProgress = () => {
|
||||
if (this.audio.buffered.length > 0) {
|
||||
this.setState({ buffer: this.audio.buffered.end(0) / this.audio.duration * 100 });
|
||||
}
|
||||
}
|
||||
|
||||
render () {
|
||||
const { src, intl } = this.props;
|
||||
const { currentTime, duration, volume, buffer, dragging, paused, muted } = this.state;
|
||||
const progress = (currentTime / duration) * 100;
|
||||
const volumeWidth = (muted) ? 0 : volume * VOL_WIDTH;
|
||||
const volumeHandleLoc = (muted) ? this.volHandleOffset(0) : this.volHandleOffset(volume);
|
||||
|
||||
return (
|
||||
<div
|
||||
role='menuitem'
|
||||
className='video-player inline audio-player'
|
||||
onClick={this.handleClickRoot}
|
||||
tabIndex={0}
|
||||
>
|
||||
<audio
|
||||
ref={this.setAudioRef}
|
||||
src={src}
|
||||
preload='none'
|
||||
loop
|
||||
role='button'
|
||||
tabIndex='0'
|
||||
volume={volume}
|
||||
onClick={this.togglePlay}
|
||||
onPlay={this.handlePlay}
|
||||
onPause={this.handlePause}
|
||||
onTimeUpdate={this.handleTimeUpdate}
|
||||
onProgress={this.handleProgress}
|
||||
/>
|
||||
|
||||
<div className='video-player__controls active'>
|
||||
<div className='video-player__seek' onMouseDown={this.handleMouseDown} ref={this.setSeekRef}>
|
||||
<div className='video-player__seek__buffer' style={{ width: `${buffer}%` }} />
|
||||
<div className='video-player__seek__progress' style={{ width: `${progress}%` }} />
|
||||
|
||||
<span
|
||||
className={classNames('video-player__seek__handle', { active: dragging })}
|
||||
tabIndex='0'
|
||||
style={{ left: `${progress}%` }}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='video-player__buttons-bar'>
|
||||
<div className='video-player__buttons left'>
|
||||
<button type='button' aria-label={intl.formatMessage(paused ? messages.play : messages.pause)} onClick={this.togglePlay}><i className={classNames('fa fa-fw', { 'fa-play': paused, 'fa-pause': !paused })} /></button>
|
||||
<button type='button' aria-label={intl.formatMessage(muted ? messages.unmute : messages.mute)} onMouseEnter={this.volumeSlider} onMouseLeave={this.volumeSlider} onClick={this.toggleMute}><i className={classNames('fa fa-fw', { 'fa-volume-off': muted, 'fa-volume-up': !muted })} /></button>
|
||||
|
||||
<div className='video-player__volume' onMouseDown={this.handleVolumeMouseDown} ref={this.setVolumeRef}>
|
||||
<div className='video-player__volume__current' style={{ width: `${volumeWidth}px` }} />
|
||||
|
||||
<span
|
||||
className={classNames('video-player__volume__handle')}
|
||||
tabIndex='0'
|
||||
style={{ left: `${volumeHandleLoc}px` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='video-player__buttons right'>
|
||||
<span>
|
||||
<span className='video-player__time-current'>{formatTime(currentTime)}</span>
|
||||
<span className='video-player__time-sep'>/</span>
|
||||
<span className='video-player__time-total'>{formatTime(duration)}</span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
}
|
@ -11,6 +11,7 @@ import { FormattedDate, FormattedNumber } from 'react-intl';
|
||||
import Card from './card';
|
||||
import ImmutablePureComponent from 'react-immutable-pure-component';
|
||||
import Video from '../../video';
|
||||
import Audio from '../../audio';
|
||||
|
||||
export default class DetailedStatus extends ImmutablePureComponent {
|
||||
|
||||
@ -53,6 +54,12 @@ export default class DetailedStatus extends ImmutablePureComponent {
|
||||
if (status.get('media_attachments').size > 0) {
|
||||
if (status.get('media_attachments').some(item => item.get('type') === 'unknown')) {
|
||||
media = <AttachmentList media={status.get('media_attachments')} />;
|
||||
} else if (status.getIn(['media_attachments', 0, 'type']) === 'audio') {
|
||||
const audio = status.getIn(['media_attachments', 0]);
|
||||
|
||||
media = (
|
||||
<Audio src={audio.get('url')} duration={audio.getIn(['meta', 'original', 'duration'])} />
|
||||
);
|
||||
} else if (status.getIn(['media_attachments', 0, 'type']) === 'video') {
|
||||
const video = status.getIn(['media_attachments', 0]);
|
||||
|
||||
|
@ -122,6 +122,10 @@ export function Video () {
|
||||
return import(/* webpackChunkName: "features/video" */'../../video');
|
||||
}
|
||||
|
||||
export function Audio () {
|
||||
return import(/* webpackChunkName: "features/audio" */'../../audio');
|
||||
}
|
||||
|
||||
export function EmbedModal () {
|
||||
return import(/* webpackChunkName: "modals/embed_modal" */'../components/embed_modal');
|
||||
}
|
||||
|
@ -505,6 +505,7 @@
|
||||
|
||||
.compose-form__upload-thumbnail {
|
||||
border-radius: 4px;
|
||||
background-color: $base-shadow-color;
|
||||
background-position: center;
|
||||
background-size: cover;
|
||||
background-repeat: no-repeat;
|
||||
@ -4947,6 +4948,36 @@ a.status-card.compact:hover {
|
||||
}
|
||||
}
|
||||
|
||||
.audio-player {
|
||||
.video-player__controls {
|
||||
position: static;
|
||||
background: transparent;
|
||||
padding-top: 6px;
|
||||
}
|
||||
|
||||
.video-player__buttons button {
|
||||
color: $darker-text-color;
|
||||
|
||||
&:hover,
|
||||
&:focus,
|
||||
&:active {
|
||||
color: lighten($darker-text-color, 4%);
|
||||
}
|
||||
}
|
||||
|
||||
.video-player__time-sep,
|
||||
.video-player__time-total,
|
||||
.video-player__time-current {
|
||||
color: $dark-text-color;
|
||||
line-height: 23px;
|
||||
}
|
||||
|
||||
.video-player__seek::before,
|
||||
.video-player__volume::before {
|
||||
background: lighten($ui-base-color, 8%);
|
||||
}
|
||||
}
|
||||
|
||||
.media-spoiler-video {
|
||||
background-size: cover;
|
||||
background-repeat: no-repeat;
|
||||
|
@ -22,14 +22,16 @@
|
||||
class MediaAttachment < ApplicationRecord
|
||||
self.inheritance_column = nil
|
||||
|
||||
enum type: [:image, :gifv, :video, :unknown]
|
||||
enum type: %i(image gifv video unknown audio)
|
||||
|
||||
IMAGE_FILE_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.gif'].freeze
|
||||
VIDEO_FILE_EXTENSIONS = ['.webm', '.mp4', '.m4v', '.mov'].freeze
|
||||
IMAGE_FILE_EXTENSIONS = %w(.jpg .jpeg .png .gif).freeze
|
||||
VIDEO_FILE_EXTENSIONS = %w(.webm .mp4 .m4v .mov).freeze
|
||||
AUDIO_FILE_EXTENSIONS = %w(.ogg .oga .mp3).freeze
|
||||
|
||||
IMAGE_MIME_TYPES = ['image/jpeg', 'image/png', 'image/gif'].freeze
|
||||
VIDEO_MIME_TYPES = ['video/webm', 'video/mp4', 'video/quicktime'].freeze
|
||||
VIDEO_CONVERTIBLE_MIME_TYPES = ['video/webm', 'video/quicktime'].freeze
|
||||
IMAGE_MIME_TYPES = %w(image/jpeg image/png image/gif).freeze
|
||||
VIDEO_MIME_TYPES = %w(video/webm video/mp4 video/quicktime).freeze
|
||||
VIDEO_CONVERTIBLE_MIME_TYPES = %w(video/webm video/quicktime).freeze
|
||||
AUDIO_MIME_TYPES = %w(audio/ogg video/ogg audio/mpeg).freeze
|
||||
|
||||
IMAGE_STYLES = {
|
||||
original: {
|
||||
@ -73,6 +75,23 @@ class MediaAttachment < ApplicationRecord
|
||||
},
|
||||
}.freeze
|
||||
|
||||
VIDEO_CONVERTED_STYLES = {
|
||||
small: VIDEO_STYLES[:small],
|
||||
original: VIDEO_FORMAT,
|
||||
}.freeze
|
||||
|
||||
AUDIO_STYLES = {
|
||||
original: {
|
||||
format: 'ogg',
|
||||
convert_options: {},
|
||||
},
|
||||
}.freeze
|
||||
|
||||
GIFV_STYLES = {
|
||||
small: IMAGE_STYLES[:small],
|
||||
original: VIDEO_FORMAT,
|
||||
}.freeze
|
||||
|
||||
IMAGE_LIMIT = 8.megabytes
|
||||
VIDEO_LIMIT = 40.megabytes
|
||||
|
||||
@ -84,7 +103,7 @@ class MediaAttachment < ApplicationRecord
|
||||
processors: ->(f) { file_processors f },
|
||||
convert_options: { all: '-quality 90 -strip' }
|
||||
|
||||
validates_attachment_content_type :file, content_type: IMAGE_MIME_TYPES + VIDEO_MIME_TYPES
|
||||
validates_attachment_content_type :file, content_type: IMAGE_MIME_TYPES + VIDEO_MIME_TYPES + AUDIO_MIME_TYPES
|
||||
validates_attachment_size :file, less_than: IMAGE_LIMIT, unless: :video?
|
||||
validates_attachment_size :file, less_than: VIDEO_LIMIT, if: :video?
|
||||
remotable_attachment :file, VIDEO_LIMIT
|
||||
@ -142,26 +161,24 @@ class MediaAttachment < ApplicationRecord
|
||||
|
||||
def file_styles(f)
|
||||
if f.instance.file_content_type == 'image/gif'
|
||||
{
|
||||
small: IMAGE_STYLES[:small],
|
||||
original: VIDEO_FORMAT,
|
||||
}
|
||||
elsif IMAGE_MIME_TYPES.include? f.instance.file_content_type
|
||||
GIFV_STYLES
|
||||
elsif IMAGE_MIME_TYPES.include?(f.instance.file_content_type)
|
||||
IMAGE_STYLES
|
||||
elsif VIDEO_CONVERTIBLE_MIME_TYPES.include?(f.instance.file_content_type)
|
||||
{
|
||||
small: VIDEO_STYLES[:small],
|
||||
original: VIDEO_FORMAT,
|
||||
}
|
||||
else
|
||||
VIDEO_CONVERTED_STYLES
|
||||
elsif VIDEO_MIME_TYPES.include?(f.instance.file_content_type)
|
||||
VIDEO_STYLES
|
||||
else
|
||||
AUDIO_STYLES
|
||||
end
|
||||
end
|
||||
|
||||
def file_processors(f)
|
||||
if f.file_content_type == 'image/gif'
|
||||
[:gif_transcoder]
|
||||
elsif VIDEO_MIME_TYPES.include? f.file_content_type
|
||||
elsif AUDIO_MIME_TYPES.include?(f.file_content_type)
|
||||
[:transcoder]
|
||||
elsif VIDEO_MIME_TYPES.include?(f.file_content_type)
|
||||
[:video_transcoder]
|
||||
else
|
||||
[:lazy_thumbnail]
|
||||
@ -187,7 +204,13 @@ class MediaAttachment < ApplicationRecord
|
||||
end
|
||||
|
||||
def set_type_and_extension
|
||||
self.type = VIDEO_MIME_TYPES.include?(file_content_type) ? :video : :image
|
||||
self.type = if VIDEO_MIME_TYPES.include?(file_content_type)
|
||||
:video
|
||||
elsif AUDIO_MIME_TYPES.include?(file_content_type)
|
||||
:audio
|
||||
else
|
||||
:image
|
||||
end
|
||||
end
|
||||
|
||||
def set_meta
|
||||
@ -230,7 +253,7 @@ class MediaAttachment < ApplicationRecord
|
||||
frame_rate: movie.frame_rate,
|
||||
duration: movie.duration,
|
||||
bitrate: movie.bitrate,
|
||||
}
|
||||
}.compact
|
||||
end
|
||||
|
||||
def reset_parent_cache
|
||||
|
@ -55,7 +55,7 @@ class InitialStateSerializer < ActiveModel::Serializer
|
||||
end
|
||||
|
||||
def media_attachments
|
||||
{ accept_content_types: MediaAttachment::IMAGE_FILE_EXTENSIONS + MediaAttachment::VIDEO_FILE_EXTENSIONS + MediaAttachment::IMAGE_MIME_TYPES + MediaAttachment::VIDEO_MIME_TYPES }
|
||||
{ accept_content_types: MediaAttachment::IMAGE_FILE_EXTENSIONS + MediaAttachment::VIDEO_FILE_EXTENSIONS + MediaAttachment::AUDIO_FILE_EXTENSIONS + MediaAttachment::IMAGE_MIME_TYPES + MediaAttachment::VIDEO_MIME_TYPES + MediaAttachment::AUDIO_MIME_TYPES }
|
||||
end
|
||||
|
||||
private
|
||||
|
@ -61,7 +61,7 @@ class PostStatusService < BaseService
|
||||
|
||||
media = MediaAttachment.where(status_id: nil).where(id: media_ids.take(4).map(&:to_i))
|
||||
|
||||
raise Mastodon::ValidationError, I18n.t('media_attachments.validations.images_and_video') if media.size > 1 && media.find(&:video?)
|
||||
raise Mastodon::ValidationError, I18n.t('media_attachments.validations.images_and_video') if media.size > 1 && media.find { |x| x.audio? || x.video? }
|
||||
|
||||
media
|
||||
end
|
||||
|
@ -1,2 +1,6 @@
|
||||
- if @media_attachment.audio?
|
||||
%audio{ preload: 'auto', controls: 'controls' }
|
||||
%source{ src: @media_attachment.file.url(:original), type: @media_attachment.file_content_type }
|
||||
- else
|
||||
%video{ poster: @media_attachment.file.url(:small), preload: 'auto', autoplay: 'autoplay', muted: 'muted', loop: 'loop', controls: 'controls', style: "width: #{@media_attachment.file.meta.dig('original', 'width')}px; height: #{@media_attachment.file.meta.dig('original', 'height')}px" }
|
||||
%source{ src: @media_attachment.file.url(:original), type: @media_attachment.file_content_type }
|
||||
|
@ -23,7 +23,10 @@
|
||||
.e-content{ lang: status.language, style: "display: #{!current_account&.user&.setting_expand_spoilers && status.spoiler_text? ? 'none' : 'block'}; direction: #{rtl_status?(status) ? 'rtl' : 'ltr'}" }= Formatter.instance.format(status, custom_emojify: true, autoplay: autoplay)
|
||||
|
||||
- if !status.media_attachments.empty?
|
||||
- if status.media_attachments.first.video?
|
||||
- if status.media_attachments.first.audio?
|
||||
- audio = status.media_attachments.first
|
||||
= react_component :audio, src: audio.file.url(:original), duration: audio.file.meta.dig(:original, :duration)
|
||||
- elsif status.media_attachments.first.video?
|
||||
- video = status.media_attachments.first
|
||||
= react_component :video, src: video.file.url(:original), preview: video.file.url(:small), sensitive: !current_account&.user&.show_all_media? && status.sensitive? || current_account&.user&.hide_all_media?, width: 670, height: 380, detailed: true, inline: true, alt: video.description
|
||||
- else
|
||||
|
@ -25,6 +25,11 @@
|
||||
= opengraph 'og:video:height', media.file.meta.dig('original', 'height')
|
||||
= opengraph 'twitter:player:width', media.file.meta.dig('original', 'width')
|
||||
= opengraph 'twitter:player:height', media.file.meta.dig('original', 'height')
|
||||
- elsif media.audio?
|
||||
- player_card = true
|
||||
= opengraph 'og:audio', full_asset_url(media.file.url(:original))
|
||||
= opengraph 'og:audio:secure_url', full_asset_url(media.file.url(:original))
|
||||
= opengraph 'og:audio:type', media.file_content_type
|
||||
- if player_card
|
||||
= opengraph 'twitter:card', 'player'
|
||||
- else
|
||||
|
@ -27,7 +27,10 @@
|
||||
.e-content{ lang: status.language, style: "display: #{!current_account&.user&.setting_expand_spoilers && status.spoiler_text? ? 'none' : 'block'}; direction: #{rtl_status?(status) ? 'rtl' : 'ltr'}" }= Formatter.instance.format(status, custom_emojify: true, autoplay: autoplay)
|
||||
|
||||
- unless status.media_attachments.empty?
|
||||
- if status.media_attachments.first.video?
|
||||
- if status.media_attachments.first.audio?
|
||||
- audio = status.media_attachments.first
|
||||
= react_component :audio, src: audio.file.url(:original), duration: audio.file.meta.dig(:original, :duration)
|
||||
- elsif status.media_attachments.first.video?
|
||||
- video = status.media_attachments.first
|
||||
= react_component :video, src: video.file.url(:original), preview: video.file.url(:small), sensitive: !current_account&.user&.show_all_media? && status.sensitive? || current_account&.user&.hide_all_media?, width: 610, height: 343, inline: true, alt: video.description
|
||||
- else
|
||||
|
Loading…
Reference in New Issue
Block a user