Commit 928f79d8 authored by linfeng's avatar linfeng

提交代码

parent 0d850247
......@@ -4,63 +4,65 @@ package com.frame.base.url;
* ARouter上面的注解需要我们写路径标识
*/
public final class Constance {
public static final String ACTIVITY_URL_ORDERLIST ="/order/OrderListActivity";
public static final String ACTIVITY_URL_ORDERLIST = "/order/OrderListActivity";
public static final String ACTIVITY_URL_CHOOSEAVISITOR ="/traveler/ChooseAVisitorActivity";
public static final String ACTIVITY_URL_CHOOSEAVISITOR = "/traveler/ChooseAVisitorActivity";
public static final String ACTIVITY_URL_POPULARTOURLIST ="/other/PopularTourListActivity";
public static final String ACTIVITY_URL_POPULARTOURLIST = "/other/PopularTourListActivity";
public static final String ACTIVITY_URL_LOGINRV ="/ui/login/LoginRvActivity";
public static final String ACTIVITY_URL_LOGINRV = "/ui/login/LoginRvActivity";
public static final String ACTIVITY_URL_CITYLIST ="/main/home/CityListActivity";
public static final String ACTIVITY_URL_CITYLIST = "/main/home/CityListActivity";
public static final String ACTIVITY_URL_TRAVELDETAILS ="/tourism/other/TravelDetailsActivity";
public static final String ACTIVITY_URL_TRAVELDETAILS = "/tourism/other/TravelDetailsActivity";
public static final String ACTIVITY_URL_CAMPDETAIL ="/camp/camp/CampDetailActivity";
public static final String ACTIVITY_URL_CAMPDETAIL = "/camp/camp/CampDetailActivity";
public static final String ACTIVITY_URL_IDCARDCERTIFICATION ="/rvmine/personal/IDCardCertificationActivity";
public static final String ACTIVITY_URL_IDCARDCERTIFICATION = "/rvmine/personal/IDCardCertificationActivity";
public static final String ACTIVITY_URL_TRAVELSEARCH ="/rv/search/TravelSearchActivity";
public static final String ACTIVITY_URL_TRAVELSEARCH = "/rv/search/TravelSearchActivity";
public static final String ACTIVITY_URL_WEBVIEW ="/module/basic/WebActivity";
public static final String ACTIVITY_URL_WEBVIEW = "/module/basic/WebActivity";
public static final String ACTIVITY_URL_CARDETAILMAP ="/main/home/CarDetailMapActivity";
public static final String ACTIVITY_URL_CARDETAILMAP = "/main/home/CarDetailMapActivity";
public static final String ACTIVITY_URL_CALENDAR ="/plugin/calendar/CalendarActivity";
public static final String ACTIVITY_URL_CALENDAR = "/plugin/calendar/CalendarActivity";
public static final String ACTIVITY_URL_SHAREIMAGE="/main/home/ShareImageActivity";
public static final String ACTIVITY_URL_SHAREIMAGE = "/main/home/ShareImageActivity";
public static final String ACTIVITY_URL_PUBLISHINGPROBLEM="/discovery/other/PublishingProblemActivity";
public static final String ACTIVITY_URL_PUBLISHINGPROBLEM = "/discovery/other/PublishingProblemActivity";
public static final String ACTIVITY_URL_DETAILQUESTIONANDANSWER="/discovery/other/DetailQuestionAndAnswerActivity";
public static final String ACTIVITY_URL_DETAILQUESTIONANDANSWER = "/discovery/other/DetailQuestionAndAnswerActivity";
public static final String ACTIVITY_URL_POSTPAT="/discovery/other/PostPatActivity";
public static final String ACTIVITY_URL_POSTPAT = "/discovery/other/PostPatActivity";
public static final String ACTIVITY_URL_DETAILPAT="/discovery/other/DetailPatActivity";
public static final String ACTIVITY_URL_DETAILPAT = "/discovery/other/DetailPatActivity";
public static final String ACTIVITY_URL_SENDVIDEO="/discovery/other/SendVideoActivity";
public static final String ACTIVITY_URL_SENDVIDEO = "/discovery/other/SendVideoActivity";
public static final String ACTIVITY_URL_CAMERAVIEW="/discovery/other/CameraViewActivity";
public static final String ACTIVITY_URL_CAMERAVIEW = "/discovery/other/CameraViewActivity";
public static final String ACTIVITY_URL_MAPLOCATION="/discovery/other/MapLocationActivity";
public static final String ACTIVITY_URL_MAPLOCATION = "/discovery/other/MapLocationActivity";
public static final String ACTIVITY_URL_COUPONALL="/xxrv/coupon/CouponAllActivity";
public static final String ACTIVITY_URL_COUPONALL = "/xxrv/coupon/CouponAllActivity";
public static final String ACTIVITY_URL_CARCOUPON="/xxrv/coupon/CarCouponActivity";
public static final String ACTIVITY_URL_CARCOUPON = "/xxrv/coupon/CarCouponActivity";
public static final String ACTIVITY_URL_WALLET="/wallet/activity/WalletActivity";
public static final String ACTIVITY_URL_WALLET = "/wallet/activity/WalletActivity";
public static final String ACTIVITY_URL_CARPURCHASELIST="/carpurchase/activity/CarPurchaseListActivity";
public static final String ACTIVITY_URL_CARPURCHASELIST = "/carpurchase/activity/CarPurchaseListActivity";
public static final String ACTIVITY_URL_DETAILCARPURCHASE="/carpurchase/activity/DetailCarPurchaseActivity";
public static final String ACTIVITY_URL_DETAILCARPURCHASE = "/carpurchase/activity/DetailCarPurchaseActivity";
public static final String ACTIVITY_URL_SHOPSHOW="/main/home/ShopShowActivity";
public static final String ACTIVITY_URL_SHOPSHOW = "/main/home/ShopShowActivity";
public static final String ACTIVITY_URL_SELECTSHOP="/main/home/SelectShopActivity";
public static final String ACTIVITY_URL_SELECTSHOP = "/main/home/SelectShopActivity";
public static final String ACTIVITY_URL_PATGENERATEPOSTER="/discovery/other/PatGeneratePosterActivity";
public static final String ACTIVITY_URL_PATGENERATEPOSTER = "/discovery/other/PatGeneratePosterActivity";
public static final String ACTIVITY_URL_POSTERACTIVITY="/poster/plugin/PosterActivity";
public static final String ACTIVITY_URL_POSTERACTIVITY = "/poster/plugin/PosterActivity";
public static final String ACTIVITY_URL_CARPOSTERACTIVITY="/poster/plugin/CarPosterActivity";
public static final String ACTIVITY_URL_CARPOSTERACTIVITY = "/poster/plugin/CarPosterActivity";
public static final String ACTIVITY_URL_RECORDXACTIVITY = "/video/activity/RecordxActivity";
}
......@@ -13,6 +13,10 @@ import android.widget.TextView;
*/
public class DisplayUtil {
// 声音最长可以表现为多少毫秒(实际本程序是60s,但是如果这里是60s的话,当时间很短,就没啥差别
public static final float voice_max_length = 30;
public static final int voice_view_max_width = 145;// dp
public static final int voice_view_min_width = 40;// dp
/**
* 将px值转换为dip或dp值,保证尺寸大小不变
*
......@@ -107,4 +111,13 @@ public class DisplayUtil {
}
return px;
}
public static int getVoiceViewWidth(Context context, int seconds) {
if (seconds >= voice_max_length) {
return dip2px(context, voice_view_max_width);
}
final int dpLen = (int) ((seconds / voice_max_length) * (voice_view_max_width - voice_view_min_width)) + voice_view_min_width;
return dip2px(context, dpLen);
}
}
......@@ -34,7 +34,7 @@ dependencies {
testImplementation 'junit:junit:4.12'
implementation 'com.android.support:appcompat-v7:28.0.0'
//事件总线
api "org.greenrobot:eventbus:$rootProject.ext.eventBusVersion"
api 'de.greenrobot:eventbus:2.4.0'
}
......@@ -48,7 +48,7 @@ public class ChatImageView extends ImageView {
private void initPaint() {
mBitPaint = new Paint();
mBitPaint.setAntiAlias(true); //设置画笔为无锯齿
mBitPaint.setColor(Color.parseColor("#EBEBEB")); //设置画笔颜色
mBitPaint.setColor(Color.parseColor("#f2f2f2")); //设置画笔颜色 EBEBEB
mBitPaint.setStrokeWidth(1f); //线宽
mBitPaint.setStyle(Paint.Style.FILL); //填充
triangleY = 30;
......@@ -160,4 +160,10 @@ public class ChatImageView extends ImageView {
invalidate();
}
// public void setImageGifDrawable(pl.droidsonroids.gif.GifDrawable gifFromFile) {
// }
public void setImageGifDrawable(Drawable drawable) {
super.setImageDrawable(drawable);
}
}
......@@ -30,7 +30,7 @@ public class JCMediaManager implements TextureView.SurfaceTextureListener, Media
public static String CURRENT_PLAYING_URL;
public static boolean CURRENT_PLING_LOOP;
public static Map<String, String> MAP_HEADER_DATA;
private static JCMediaManager JCMediaManager;
private static fm.jiecao.jcvideoplayer_lib.JCMediaManager JCMediaManager;
private static OnJcvdListener mJcvdListener;
public MediaPlayer mediaPlayer = new MediaPlayer();
public int currentVideoWidth = 0;
......@@ -46,7 +46,7 @@ public class JCMediaManager implements TextureView.SurfaceTextureListener, Media
mainThreadHandler = new Handler();
}
public static JCMediaManager instance() {
public static fm.jiecao.jcvideoplayer_lib.JCMediaManager instance() {
if (JCMediaManager == null) {
JCMediaManager = new JCMediaManager();
}
......@@ -57,6 +57,12 @@ public class JCMediaManager implements TextureView.SurfaceTextureListener, Media
mJcvdListener = listener;
}
public static void removeOnJcdvListener(OnJcvdListener listener) {
if (mJcvdListener == listener) {
mJcvdListener = null;
}
}
public Point getVideoSize() {
if (currentVideoWidth != 0 && currentVideoHeight != 0) {
return new Point(currentVideoWidth, currentVideoHeight);
......@@ -70,7 +76,6 @@ public class JCMediaManager implements TextureView.SurfaceTextureListener, Media
Message msg = new Message();
msg.what = HANDLER_PREPARE;
mMediaHandler.sendMessage(msg);
Log.e("xuan", "jcm prepare: ");
}
......@@ -85,7 +90,6 @@ public class JCMediaManager implements TextureView.SurfaceTextureListener, Media
public void recoverMediaPlayer() {
mediaPlayer.release();
onCompletion(mediaPlayer);
Log.e("xuan", "jcm recoverMediaPlayer: ");
}
......@@ -241,14 +245,14 @@ public class JCMediaManager implements TextureView.SurfaceTextureListener, Media
Method method = clazz.getDeclaredMethod("setDataSource", String.class, Map.class);
method.invoke(mediaPlayer, CURRENT_PLAYING_URL, MAP_HEADER_DATA);
mediaPlayer.setLooping(false);
mediaPlayer.setOnPreparedListener(JCMediaManager.this);
mediaPlayer.setOnCompletionListener(JCMediaManager.this);
mediaPlayer.setOnBufferingUpdateListener(JCMediaManager.this);
mediaPlayer.setOnPreparedListener(fm.jiecao.jcvideoplayer_lib.JCMediaManager.this);
mediaPlayer.setOnCompletionListener(fm.jiecao.jcvideoplayer_lib.JCMediaManager.this);
mediaPlayer.setOnBufferingUpdateListener(fm.jiecao.jcvideoplayer_lib.JCMediaManager.this);
mediaPlayer.setScreenOnWhilePlaying(true);
mediaPlayer.setOnSeekCompleteListener(JCMediaManager.this);
mediaPlayer.setOnErrorListener(JCMediaManager.this);
mediaPlayer.setOnInfoListener(JCMediaManager.this);
mediaPlayer.setOnVideoSizeChangedListener(JCMediaManager.this);
mediaPlayer.setOnSeekCompleteListener(fm.jiecao.jcvideoplayer_lib.JCMediaManager.this);
mediaPlayer.setOnErrorListener(fm.jiecao.jcvideoplayer_lib.JCMediaManager.this);
mediaPlayer.setOnInfoListener(fm.jiecao.jcvideoplayer_lib.JCMediaManager.this);
mediaPlayer.setOnVideoSizeChangedListener(fm.jiecao.jcvideoplayer_lib.JCMediaManager.this);
mediaPlayer.prepareAsync();
if (savedSurfaceTexture != null) {
mediaPlayer.setSurface(new Surface(savedSurfaceTexture));
......
......@@ -27,13 +27,13 @@ import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;
import org.greenrobot.eventbus.EventBus;
import java.lang.reflect.Constructor;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
import de.greenrobot.event.EventBus;
/**
* Created by Nathen on 16/7/30.
*/
......
......@@ -123,7 +123,6 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
}
}
/**
* 暂停播放
*/
......@@ -148,6 +147,7 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
Log.e("xuan", "stop: " + mCurrState);
mCurrState = JCVideoPlayer.CURRENT_STATE_NORMAL;
JCMediaManager.instance().releaseMediaPlayer();
JCMediaManager.removeOnJcdvListener(this);
// 加上这句,避免循环播放video的时候,内存不断飙升。
Runtime.getRuntime().gc();
}
......@@ -160,7 +160,6 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
// 清理缓存变量
this.removeView(JCMediaManager.textureView);
mAudioManager = (AudioManager) getContext().getSystemService(Context.AUDIO_SERVICE);
mAudioManager.abandonAudioFocus(onAudioFocusChangeListener);
......@@ -192,7 +191,6 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
* 修改当前播放进度
*/
public void changeProgress(int pro) {
// 得到视频长度
int duration = getDuration();
// 将长度转换成时间
......@@ -206,7 +204,6 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
JCUtils.clearSavedProgress(getContext(), mCurrUrl);
}
public void seekTo(int msec) {
// 跳转到一个位置 毫秒
JCMediaManager.instance().mediaPlayer.seekTo(msec);
......@@ -217,7 +214,6 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
*/
@Override
public void onPrepared() {
Log.e("xuan", "开始播放: " + mCurrUrl);
mCurrState = JCVideoPlayer.CURRENT_STATE_PLAYING;
if (mListener != null) {
......@@ -228,8 +224,7 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
// 播放完成的回调
@Override
public void onCompletion() {
Log.e("xuan", "播放完成: ");
Log.e("xuan", "播放完成: state = " + mCurrState);
mCurrState = JCVideoPlayer.CURRENT_STATE_NORMAL;
if (mListener != null) {
mListener.onCompletion();
......@@ -256,7 +251,6 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
}
@Override
public void onReset() {
......@@ -294,5 +288,4 @@ public class JCVideoViewbyXuan extends FrameLayout implements OnJcvdListener {
return mCurrState == JCVideoPlayer.CURRENT_STATE_PLAYING;
}
}
......@@ -108,7 +108,7 @@ public class JVCideoPlayerStandardSecond extends JCVideoPlayer {
ContentValues localContentValue = getVideoContentValues(file, System.currentTimeMillis());
Uri uri = localResolver.insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, localContentValue);
Log.e("zx", "downloadUrl: " + uri);
Toast.makeText(mContext, "视频已保存", Toast.LENGTH_SHORT).show();
Toast.makeText(mContext, getContext().getString(R.string.video_saved_jv), Toast.LENGTH_SHORT).show();
if (mVideoPath.startsWith("http")) {
String path = "/storage/emulated/0/Android/data/com.sk.weichat/files/Movies" +
mVideoPath.substring(mVideoPath.length() - 6, mVideoPath.length() - 4) + ".mp4";
......@@ -118,9 +118,9 @@ public class JVCideoPlayerStandardSecond extends JCVideoPlayer {
ContentResolver localContentResolver = mContext.getContentResolver();
ContentValues localContentValues = getVideoContentValues(f, System.currentTimeMillis());
localContentResolver.insert(MediaStore.Video.Media.EXTERNAL_CONTENT_URI, localContentValues);
Toast.makeText(mContext, "视频已保存", Toast.LENGTH_SHORT).show();
Toast.makeText(mContext, getContext().getString(R.string.video_saved_jv), Toast.LENGTH_SHORT).show();
} else
Toast.makeText(mContext, "视频已存在", Toast.LENGTH_SHORT).show();
Toast.makeText(mContext, getContext().getString(R.string.video_already_exists), Toast.LENGTH_SHORT).show();
}
savaVideoDialog.dismiss();
}
......
......@@ -18,11 +18,11 @@ import android.widget.ProgressBar;
import android.widget.SeekBar;
import android.widget.TextView;
import org.greenrobot.eventbus.EventBus;
import java.util.Timer;
import java.util.TimerTask;
import de.greenrobot.event.EventBus;
/**
* 聊天界面播放器
* Todo Modify by zq in 2018.12.3
......
......@@ -34,7 +34,6 @@ public class downloadTask extends Thread {
int downloadSize = 0;//初始下载为0
long downloadTime = System.currentTimeMillis();//起始下载时间
try {
// TanX.Log("开始下载,URL:" + urlStr);
URL url = new URL(urlStr);
URLConnection conn = url.openConnection();
//获取下载文件的总大小
......@@ -54,11 +53,9 @@ public class downloadTask extends Thread {
}
boolean finished = false;
while (!finished) {
// TanX.Log("下载中");
if (System.currentTimeMillis() - downloadTime >= overTime) {
//到了超时的时限
if (mDownloadedSize <= downloadSize) {
// TanX.Log("总下载大小:" + mDownloadedSize);
//过了10秒,但是并没下载到东西,超时
throw new Exception();
}
......@@ -76,9 +73,7 @@ public class downloadTask extends Thread {
}
}
}
// TanX.Log("下载完成");
} catch (Exception e) {
// TanX.Log("下载异常");
e.printStackTrace();
}
}
......
......@@ -4,4 +4,6 @@
<string name="tips_not_wifi_confirm">Continuar</string>
<string name="tips_not_wifi_cancel">Parar</string>
<string name="no_url">Sem Vídeo</string>
<string name="video_saved_jv">视频已保存</string>
<string name="video_already_exists">视频已存在</string>
</resources>
......@@ -4,4 +4,6 @@
<string name="tips_not_wifi_confirm">繼續播放</string>
<string name="tips_not_wifi_cancel">停止播放</string>
<string name="no_url">播放地址無效</string>
<string name="video_saved_jv">视频已保存</string>
<string name="video_already_exists">视频已存在</string>
</resources>
......@@ -4,4 +4,6 @@
<string name="tips_not_wifi_confirm">繼續播放</string>
<string name="tips_not_wifi_cancel">停止播放</string>
<string name="no_url">播放地址無效</string>
<string name="video_saved_jv">视频已保存</string>
<string name="video_already_exists">视频已存在</string>
</resources>
......@@ -5,5 +5,7 @@
<string name="tips_not_wifi_cancel">Stop play</string>
<string name="no_url">No mUrl</string>
<string name="sava_video">保存视频</string>
<string name="video_saved_jv">视频已保存</string>
<string name="video_already_exists">视频已存在</string>
</resources>
......@@ -129,8 +129,8 @@ public class DiscoveryFragment extends BaseFragment<DiscoveryPresenter> {
List<String> list = new ArrayList<>();
list.add(_mActivity.getString(R.string.discovery_recommend));
list.add(_mActivity.getString(R.string.discovery_pat));
// list.add(_mActivity.getString(R.string.discovery_short_video));
// list.add(_mActivity.getString(R.string.discovery_question_and_answer));
list.add(_mActivity.getString(R.string.discovery_short_video));
list.add(_mActivity.getString(R.string.discovery_question_and_answer));
menuAdapter.setNewData(list);
menuAdapter.setOnItemClickListener(new BaseQuickAdapter.OnItemClickListener() {
@Override
......@@ -174,8 +174,8 @@ public class DiscoveryFragment extends BaseFragment<DiscoveryPresenter> {
List<BaseFragment> list = new ArrayList<>();
list.add(RecommendFragment.getInstance(TYPE_RECOMMEND));
list.add(PatFragment.getInstance(TYPE_PAT));
// list.add(ShortVideoFragment.getInstance(TYPE_SHORT_VIDEO));
// list.add(QuestionAndAnswerFragment.getInstance(TYPE_QUESTION_AND_ANSWER));
list.add(ShortVideoFragment.getInstance(TYPE_SHORT_VIDEO));
list.add(QuestionAndAnswerFragment.getInstance(TYPE_QUESTION_AND_ANSWER));
return list;
}
......@@ -201,16 +201,16 @@ public class DiscoveryFragment extends BaseFragment<DiscoveryPresenter> {
int id = view.getId();
if (id == R.id.iv_discovery_content_add) {
//添加
// showPopupWindow(ivDiscoveryContentAdd);
showPopupWindow(ivDiscoveryContentAdd);
//判断是否已经登录
if (TextUtils.isEmpty(OkGoUtil.getToken())) {
ARouter.getInstance().build(Constance.ACTIVITY_URL_LOGINRV).navigation();
return;
}
ARouter.getInstance()
.build(Constance.ACTIVITY_URL_POSTPAT)
.navigation();
// if (TextUtils.isEmpty(OkGoUtil.getToken())) {
// ARouter.getInstance().build(Constance.ACTIVITY_URL_LOGINRV).navigation();
// return;
// }
//
// ARouter.getInstance()
// .build(Constance.ACTIVITY_URL_POSTPAT)
// .navigation();
}
}
......@@ -249,9 +249,8 @@ public class DiscoveryFragment extends BaseFragment<DiscoveryPresenter> {
} else if (id == R.id.tv_short_video) {
//短视频
ARouter.getInstance()
.build(Constance.ACTIVITY_URL_SENDVIDEO)
.build(Constance.ACTIVITY_URL_RECORDXACTIVITY)
.navigation();
dismiss();
} else if (id == R.id.tv_ask_questions) {
......
......@@ -21,12 +21,9 @@ import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.resource.bitmap.RoundedCorners;
import com.bumptech.glide.request.RequestOptions;
import com.chad.library.adapter.base.BaseQuickAdapter;
import com.dueeeke.videoplayer.player.VideoView;
import com.dueeeke.videoplayer.widget.RotateVideoView;
import com.frame.rv.config.RvFrameConfig;
import com.ruiwenliu.wrapper.base.BaseBean;
import com.ruiwenliu.wrapper.base.BaseStatusActivity;
......@@ -52,7 +49,6 @@ import com.xxfc.discovery.bean.DiscoveryIMTokenBean;
import com.xxfc.discovery.bean.DiscoveryRecommendBean;
import com.xxfc.discovery.dialog.TrillCommentInputDialog;
import com.xxfc.discovery.presenter.DiscoveryPresenter;
import com.xxfc.discovery.widget.TikTokController;
import com.yuyife.okgo.OkGoUtil;
import java.util.ArrayList;
......@@ -106,8 +102,7 @@ public class ShortVideoActivity extends BaseStatusActivity<DiscoveryPresenter> i
// RotateVideoView mVideoView;
private VideoView mVideoView;
private TikTokController mTikTokController;
private DiscoveryVideoCommentAdapter commentAdapter;
private DiscoveryRecommendBean.DataBeanX.DataBean dataBean;
......@@ -143,11 +138,9 @@ public class ShortVideoActivity extends BaseStatusActivity<DiscoveryPresenter> i
showTitle(false);
dataBean = (DiscoveryRecommendBean.DataBeanX.DataBean) intent.getSerializableExtra("bean");
dataBeanList = new ArrayList();
mVideoView = new VideoView(this);
mVideoView.setLooping(true);
mTikTokController = new TikTokController(this);
mVideoView.setVideoController(mTikTokController);
commentAdapter = new DiscoveryVideoCommentAdapter();
rvVideoComment.setLayoutManager(new LinearLayoutManager(mActivity, LinearLayoutManager.VERTICAL, false));
......@@ -201,41 +194,7 @@ public class ShortVideoActivity extends BaseStatusActivity<DiscoveryPresenter> i
}
}
startPlay();
}
private void startPlay() {
if (dataBean != null && dataBean.getBody() != null) {
if (dataBean.getBody().getVideos() != null && dataBean.getBody().getVideos().size() > 0) {
GlideOptions options = GlideOptions.placeholderOf(com.ruiwenliu.wrapper.R.drawable.glide_icon_placeholder).
error(com.ruiwenliu.wrapper.R.drawable.glide_icon_error);
TransformationUtil utils = new TransformationUtil(mTikTokController.getThumb());
GlideApp.with(mActivity)
.setDefaultRequestOptions(
new RequestOptions()
.frame(1000000)
.centerCrop()
)
.asBitmap()
.load(dataBean.getBody().getVideos().get(0).getOurl())
.apply(options)
.apply(RequestOptions.bitmapTransform(new RoundedCorners(12)).override(mTikTokController.getThumb().getWidth(), mTikTokController.getThumb().getHeight()))
.placeholder(android.R.color.white)
.into(utils);
}
ViewParent parent = mVideoView.getParent();
if (parent instanceof FrameLayout) {
((FrameLayout) parent).removeView(mVideoView);
}
container.addView(mVideoView);
if (dataBean.getBody().getVideos() != null && dataBean.getBody().getVideos().size() > 0) {
mVideoView.setUrl(dataBean.getBody().getVideos().get(0).getOurl());
mVideoView.setScreenScale(VideoView.SCREEN_SCALE_DEFAULT);
mVideoView.start();
}
}
}
@Override
......@@ -336,30 +295,13 @@ public class ShortVideoActivity extends BaseStatusActivity<DiscoveryPresenter> i
}
@Override
public void onPause() {
super.onPause();
mVideoView.pause();
}
@Override
public void onDestroy() {
super.onDestroy();
mVideoView.release();
}
@OnClick({R2.id.iv_item_video_play, R2.id.iv_item_video_close, R2.id.v_comment_show_bg, R2.id.ll_item_comment
, R2.id.ll_item_video_islike, R2.id.ll_item_video_comment, R2.id.ll_item_video_share, R2.id.rl_item_video})
public void onViewClicked(View v) {
int id = v.getId();
if (id == R.id.rl_item_video || id == R.id.iv_item_video_play) {
if (mVideoView.isPlaying()) {
mVideoView.pause();
ivVideoPlay.animate().alpha(1.0f).start();
} else {
mVideoView.resume();
ivVideoPlay.animate().alpha(0f).start();
}
} else if (id == R.id.ll_item_video_islike) {
//点赞
getImToken(4);
......
package com.xxfc.discovery.widget;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.widget.ImageView;
import com.dueeeke.videoplayer.controller.BaseVideoController;
import com.dueeeke.videoplayer.player.VideoView;
import com.dueeeke.videoplayer.util.L;
import com.ruiwenliu.wrapper.dialog.LoadingDialog;
import com.xxfc.discovery.R;
public class TikTokController extends BaseVideoController {
private ImageView thumb;
public TikTokController(@NonNull Context context) {
super(context);
}
public TikTokController(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
public TikTokController(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
protected int getLayoutId() {
return R.layout.layout_tiktok_controller;
}
@Override
protected void initView() {
super.initView();
thumb = mControllerView.findViewById(R.id.iv_thumb);
}
@Override
public void setPlayState(int playState) {
super.setPlayState(playState);
switch (playState) {
case VideoView.STATE_IDLE:
L.e("STATE_IDLE");
thumb.setVisibility(VISIBLE);
break;
case VideoView.STATE_PLAYING:
L.e("STATE_PLAYING");
thumb.setVisibility(GONE);
break;
case VideoView.STATE_PREPARED:
L.e("STATE_PREPARED");
break;
}
}
public ImageView getThumb() {
return thumb;
}
}
apply plugin: 'com.android.library'
android {
compileSdkVersion rootProject.ext.compileSdkVersion
buildToolsVersion rootProject.ext.buildToolsVersion
defaultConfig {
minSdkVersion rootProject.ext.minSdkVersion
targetSdkVersion rootProject.ext.targetSdkVersion
versionCode 1
versionName "1.0"
flavorDimensions "default"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
javaCompileOptions {
annotationProcessorOptions {
arguments = [moduleName: project.getName()]
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
productFlavors {
//开发环境
dev {
}
sit {
}
pro {
}
}
}
dependencies {
api fileTree(include: ['*.jar'], dir: 'libs')
api 'com.android.support:appcompat-v7:25.3.1'
api 'com.android.support:recyclerview-v7:25.3.1'
api 'com.android.support.constraint:constraint-layout:1.0.2'
// api 'com.android.support:support-v4:25.3.1'
// api 'com.android.support:support-v13:25.3.1'
}
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in C:\Users\Administrator\AppData\Local\Android\Sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.joe.camera2recorddemo">
<uses-feature
android:glEsVersion="0x00020000"
android:required="true"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.WAKE_LOCK"/>
<!--<application-->
<!--android:allowBackup="true"-->
<!--android:icon="@mipmap/ic_launcher"-->
<!--android:label="@string/app_name"-->
<!--android:roundIcon="@mipmap/ic_launcher_round"-->
<!--android:supportsRtl="true"-->
<!--android:theme="@style/AppTheme">-->
<!--<activity-->
<!--android:name=".Activity.MP4Activity"-->
<!--android:theme="@style/Theme.AppCompat.NoActionBar">-->
<!--</activity>-->
<!--</application>-->
</manifest>
\ No newline at end of file
precision mediump float;
varying vec2 vTextureCo;
uniform sampler2D uTexture;
void main() {
gl_FragColor = texture2D( uTexture, vTextureCo);
}
\ No newline at end of file
attribute vec4 aVertexCo;
attribute vec2 aTextureCo;
uniform mat4 uVertexMatrix;
uniform mat4 uTextureMatrix;
varying vec2 vTextureCo;
void main(){
gl_Position = uVertexMatrix*aVertexCo;
vTextureCo = aTextureCo.xy;
}
\ No newline at end of file
precision mediump float;
varying vec2 textureCoordinate;
uniform sampler2D vTexture;
void main() {
gl_FragColor = texture2D( vTexture, textureCoordinate );
}
\ No newline at end of file
attribute vec4 vPosition;
attribute vec2 vCoord;
uniform mat4 vMatrix;
varying vec2 textureCoordinate;
void main(){
gl_Position = vMatrix*vPosition;
textureCoordinate = vCoord;
}
\ No newline at end of file
precision highp float;
precision highp int;
uniform sampler2D uTexture;
uniform int uIternum;
uniform float uACoef; //参数
uniform float uMixCoef; //混合系数
varying highp vec2 vTextureCo;
varying highp vec2 vBlurCoord1s[14];
const float distanceNormalizationFactor = 4.0;
const mat3 saturateMatrix = mat3(1.1102,-0.0598,-0.061,-0.0774,1.0826,-0.1186,-0.0228,-0.0228,1.1772);
void main() {
vec3 centralColor;
float central;
float gaussianWeightTotal;
float sum;
float sampleColor;
float distanceFromCentralColor;
float gaussianWeight;
central = texture2D( uTexture, vTextureCo ).g;
gaussianWeightTotal = 0.2;
sum = central * 0.2;
for (int i = 0; i < 6; i++) {
sampleColor = texture2D( uTexture, vBlurCoord1s[i] ).g;
distanceFromCentralColor = min( abs( central - sampleColor ) * distanceNormalizationFactor, 1.0 );
gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
}
for (int i = 6; i < 14; i++) {
sampleColor = texture2D( uTexture, vBlurCoord1s[i] ).g;
distanceFromCentralColor = min( abs( central - sampleColor ) * distanceNormalizationFactor, 1.0 );
gaussianWeight = 0.1 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;
}
sum = sum / gaussianWeightTotal;
centralColor = texture2D( uTexture, vTextureCo ).rgb;
sampleColor = centralColor.g - sum + 0.5;
for (int i = 0; i < uIternum; ++i) {
if (sampleColor <= 0.5) {
sampleColor = sampleColor * sampleColor * 2.0;
}
else {
sampleColor = 1.0 - ((1.0 - sampleColor)*(1.0 - sampleColor) * 2.0);
}
}
float aa = 1.0 + pow( centralColor.g, 0.3 )*uACoef;
vec3 smoothColor = centralColor*aa - vec3( sampleColor )*(aa - 1.0);
smoothColor = clamp( smoothColor, vec3( 0.0 ), vec3( 1.0 ) );
smoothColor = mix( centralColor, smoothColor, pow( centralColor.g, 0.33 ) );
smoothColor = mix( centralColor, smoothColor, pow( centralColor.g, uMixCoef ) );
gl_FragColor = vec4( pow( smoothColor, vec3( 0.96 ) ), 1.0 );
vec3 satcolor = gl_FragColor.rgb * saturateMatrix;
gl_FragColor.rgb = mix( gl_FragColor.rgb, satcolor, 0.23 );
}
\ No newline at end of file
attribute vec4 aVertexCo;
attribute vec2 aTextureCo;
varying vec2 vTextureCo;
varying vec2 vBlurCoord1s[14];
uniform float uWidth;
uniform float uHeight;
uniform mat4 uVertexMatrix;
uniform mat4 uTextureMatrix;
void main()
{
gl_Position = uVertexMatrix*aVertexCo;
vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;
highp float mul_x = 2.0 / uWidth;
highp float mul_y = 2.0 / uHeight;
vBlurCoord1s[0] = vTextureCo + vec2( 0.0 * mul_x, -10.0 * mul_y );
vBlurCoord1s[1] = vTextureCo + vec2( 8.0 * mul_x, -5.0 * mul_y );
vBlurCoord1s[2] = vTextureCo + vec2( 8.0 * mul_x, 5.0 * mul_y );
vBlurCoord1s[3] = aTextureCo + vec2( 0.0 * mul_x, 10.0 * mul_y );
vBlurCoord1s[4] = aTextureCo + vec2( -8.0 * mul_x, 5.0 * mul_y );
vBlurCoord1s[5] = aTextureCo + vec2( -8.0 * mul_x, -5.0 * mul_y );
mul_x = 1.2 / uWidth;
mul_y = 1.2 / uHeight;
vBlurCoord1s[6] = aTextureCo + vec2( 0.0 * mul_x, -6.0 * mul_y );
vBlurCoord1s[7] = aTextureCo + vec2( -4.0 * mul_x, -4.0 * mul_y );
vBlurCoord1s[8] = aTextureCo + vec2( -6.0 * mul_x, 0.0 * mul_y );
vBlurCoord1s[9] = aTextureCo + vec2( -4.0 * mul_x, 4.0 * mul_y );
vBlurCoord1s[10] = aTextureCo + vec2( 0.0 * mul_x, 6.0 * mul_y );
vBlurCoord1s[11] = aTextureCo + vec2( 4.0 * mul_x, 4.0 * mul_y );
vBlurCoord1s[12] = aTextureCo + vec2( 6.0 * mul_x, 0.0 * mul_y );
vBlurCoord1s[13] = aTextureCo + vec2( 4.0 * mul_x, -4.0 * mul_y );
}
\ No newline at end of file
precision highp float;
varying vec2 vTextureCo;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
uniform sampler2D uTexture;
uniform int vChangeType;
uniform highp float intensity;
uniform float uWidth;
uniform float uHeight;
const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);//灰度滤镜
const highp vec3 COOL = vec3(0.0, 0.0, 0.1);//冷色调
const highp vec3 WARM = vec3(0.1, 0.1, 0.0);//暖色调
const vec2 texSize = vec2(1920,1080);//浮雕参数
const lowp float intensityone = 1.0;
const lowp mat4 colorMatrix = mat4(0.3588, 0.7044, 0.1368, 0.0,0.2990, 0.5870, 0.1140, 0.0,0.2392, 0.4696, 0.0912, 0.0,0, 0, 0, 1.0);
const highp float threshold = 0.2;
const highp float quantizationLevels = 10.0;
const mediump mat3 convolutionMatrix = mat3(-1.0, 0.0, 1.0,-2.0, 0.0, 2.0,-1.0, 0.0, 1.0);
const float stepcv=1.;
const mat3 GX=mat3(-1.,0., +1., -2., 0., +2., -1., 0., +1.);
const mat3 GY=mat3(-1., -2., -1., 0., 0., 0., +1., +2., +1.);
float colorR(vec2 center,float shiftX,float shiftY){
return texture2D(uTexture,vec2(vTextureCo.x+shiftX/uWidth,vTextureCo.y+shiftY/uHeight)).r;
}
void main()
{
vec4 textureColor = texture2D(uTexture, vTextureCo);
if(vChangeType == 0){
gl_FragColor = textureColor;
}
else if(vChangeType == 1){
vec4 nColor=texture2D(uTexture,vTextureCo);
vec4 deltaColor=nColor+vec4(COOL,0.0);
gl_FragColor=deltaColor;
}
else if(vChangeType == 2){
vec4 nColor=texture2D(uTexture,vTextureCo);
vec4 deltaColor=nColor+vec4(WARM,0.0);
gl_FragColor=deltaColor;
}
else if(vChangeType == 3){
gl_FragColor=vec4(vec3(dot(texture2D( uTexture, vTextureCo).rgb,W)),1.0);
}
else if(vChangeType == 4){
vec2 tex = vTextureCo;
vec2 upLeftUV = vec2(tex.x - 1.0/texSize.x, tex.y - 1.0/texSize.y);
vec4 upLeftColor = texture2D(uTexture,upLeftUV);
vec4 delColor = textureColor - upLeftColor;
float h = 0.3*delColor.x + 0.59*delColor.y + 0.11*delColor.z;
vec4 bkColor = vec4(0.5, 0.5, 0.5, 1.0);
gl_FragColor = vec4(h,h,h,0.0) +bkColor;
}
else if(vChangeType == 5){
gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
}
else if(vChangeType == 6){
lowp vec4 outputColor = textureColor * colorMatrix;
gl_FragColor = (intensityone * outputColor) + ((1.0 - intensityone) * textureColor);
}
else if(vChangeType == 7){
float bottomLeftIntensity = texture2D(uTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(uTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(uTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(uTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(uTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(uTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(uTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(uTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = length(vec2(h, v));
vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;
float thresholdTest = 1.0 - step(threshold, mag);
gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);
}
else if(vChangeType == 8){
mediump vec4 bottomColor = texture2D(uTexture, bottomTextureCoordinate);
mediump vec4 bottomLeftColor = texture2D(uTexture, bottomLeftTextureCoordinate);
mediump vec4 bottomRightColor = texture2D(uTexture, bottomRightTextureCoordinate);
mediump vec4 leftColor = texture2D(uTexture, leftTextureCoordinate);
mediump vec4 rightColor = texture2D(uTexture, rightTextureCoordinate);
mediump vec4 topColor = texture2D(uTexture, topTextureCoordinate);
mediump vec4 topRightColor = texture2D(uTexture, topRightTextureCoordinate);
mediump vec4 topLeftColor = texture2D(uTexture, topLeftTextureCoordinate);
mediump vec4 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
resultColor += leftColor * convolutionMatrix[1][0] + textureColor * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
gl_FragColor = resultColor;
}
else if(vChangeType == 9){
vec2 center=vec2(vTextureCo.x*uWidth,vTextureCo.y*uHeight);
float leftTop=colorR(center,-stepcv,-stepcv);
float centerTop=colorR(center,0.,-stepcv);
float rightTop=colorR(center,stepcv,-stepcv);
float leftCenter=colorR(center,-stepcv,0.);
float rightCenter=colorR(center,stepcv,0.);
float leftBottom=colorR(center,-stepcv,stepcv);
float centerBottom=colorR(center,0.,stepcv);
float rightBottom=colorR(center,stepcv,stepcv);
mat3 d=mat3(colorR(center,-stepcv,-stepcv),colorR(center,0.,-stepcv),colorR(center,stepcv,-stepcv),
colorR(center,-stepcv,0.),colorR(center,0.,0.),colorR(center,stepcv,0.),
colorR(center,-stepcv,stepcv),colorR(center,0.,stepcv),colorR(center,stepcv,stepcv));
float x = d[0][0]*GX[0][0]+d[1][0]*GX[1][0]+d[2][0]*GX[2][0]+
d[0][1]*GX[0][1]+d[1][1]*GX[1][1]+d[2][1]*GX[2][1]+
d[0][2]*GX[0][2]+d[1][2]*GX[1][2]+d[2][2]*GX[2][2];
float y = d[0][0]*GY[0][0]+d[1][0]*GY[1][0]+d[2][0]*GY[2][0]+
d[0][1]*GY[0][1]+d[1][1]*GY[1][1]+d[2][1]*GY[2][1]+
d[0][2]*GY[0][2]+d[1][2]*GY[1][2]+d[2][2]*GY[2][2];
gl_FragColor=vec4(vec3(length(vec2(x,y))),1.);
}
else if(vChangeType == 10){
float bottomLeftIntensity = texture2D(uTexture, bottomLeftTextureCoordinate).r;
float topRightIntensity = texture2D(uTexture, topRightTextureCoordinate).r;
float topLeftIntensity = texture2D(uTexture, topLeftTextureCoordinate).r;
float bottomRightIntensity = texture2D(uTexture, bottomRightTextureCoordinate).r;
float leftIntensity = texture2D(uTexture, leftTextureCoordinate).r;
float rightIntensity = texture2D(uTexture, rightTextureCoordinate).r;
float bottomIntensity = texture2D(uTexture, bottomTextureCoordinate).r;
float topIntensity = texture2D(uTexture, topTextureCoordinate).r;
float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
float mag = 1.0 - length(vec2(h, v));
gl_FragColor = vec4(vec3(mag), 1.0);
}
else{
gl_FragColor = textureColor;
}
}
\ No newline at end of file
attribute vec4 aVertexCo;
attribute vec4 aTextureCo;
uniform mat4 uVertexMatrix;
uniform mat4 uTextureMatrix;
uniform highp float texelWidth;
uniform highp float texelHeight;
varying vec2 vTextureCo;
varying vec2 leftTextureCoordinate;
varying vec2 rightTextureCoordinate;
varying vec2 topTextureCoordinate;
varying vec2 topLeftTextureCoordinate;
varying vec2 topRightTextureCoordinate;
varying vec2 bottomTextureCoordinate;
varying vec2 bottomLeftTextureCoordinate;
varying vec2 bottomRightTextureCoordinate;
void main()
{
gl_Position = uVertexMatrix*aVertexCo;
vTextureCo = aTextureCo.xy;
vec2 widthStep = vec2(texelWidth, 0.0);
vec2 heightStep = vec2(0.0, texelHeight);
vec2 widthHeightStep = vec2(texelWidth, texelHeight);
vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);
leftTextureCoordinate = aTextureCo.xy - widthStep;
rightTextureCoordinate = aTextureCo.xy + widthStep;
topTextureCoordinate = aTextureCo.xy - heightStep;
topLeftTextureCoordinate = aTextureCo.xy - widthHeightStep;
topRightTextureCoordinate = aTextureCo.xy + widthNegativeHeightStep;
bottomTextureCoordinate = aTextureCo.xy + heightStep;
bottomLeftTextureCoordinate = aTextureCo.xy - widthNegativeHeightStep;
bottomRightTextureCoordinate = aTextureCo.xy + widthHeightStep;
}
\ No newline at end of file
precision mediump float;
varying vec2 textureCoordinate;
uniform sampler2D vTexture;
void main() {
vec4 color=texture2D( vTexture, textureCoordinate);
float rgb=color.g;
vec4 c=vec4(rgb,rgb,rgb,color.a);
gl_FragColor = c;
}
\ No newline at end of file
precision mediump float;
varying vec2 vTextureCo;
uniform sampler2D uTexture;
uniform lowp float brightness;
void main() {
lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
}
\ No newline at end of file
precision mediump float;
varying vec2 vTextureCo;
uniform sampler2D uTexture;
uniform lowp float stepcv;
void main() {
lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * stepcv + vec3(0.5)), textureColor.w);
}
\ No newline at end of file
precision mediump float;
varying vec2 vTextureCo;
uniform sampler2D uTexture;
uniform lowp float saturation;
const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
void main() {
lowp vec4 textureColor = texture2D(uTexture, vTextureCo);
lowp float luminance = dot(textureColor.rgb, luminanceWeighting);
lowp vec3 greyScaleColor = vec3(luminance);
gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
}
\ No newline at end of file
precision mediump float;
varying vec2 vTextureCo;
uniform sampler2D uTexture;
uniform lowp vec2 vignetteCenter;
uniform lowp vec3 vignetteColor;
uniform highp float vignetteStart;
uniform highp float vignetteEnd;
void main() {
lowp vec3 rgb = texture2D(uTexture, vTextureCo).rgb;
lowp float d = distance(vTextureCo, vec2(0.5,0.5));
rgb *= (1.0 - smoothstep(vignetteStart, vignetteEnd, d));
gl_FragColor = vec4(vec3(rgb),1.0);
}
\ No newline at end of file
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCo;
uniform samplerExternalOES uTexture;
void main() {
gl_FragColor = texture2D( uTexture, vTextureCo);
}
\ No newline at end of file
attribute vec4 aVertexCo;
attribute vec2 aTextureCo;
uniform mat4 uVertexMatrix;
uniform mat4 uTextureMatrix;
varying vec2 vTextureCo;
void main(){
gl_Position = uVertexMatrix*aVertexCo;
vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;
}
\ No newline at end of file
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 textureCoordinate;
uniform samplerExternalOES vTexture;
void main() {
gl_FragColor = texture2D( vTexture, textureCoordinate );
}
\ No newline at end of file
attribute vec4 vPosition;
attribute vec2 vCoordinate;
uniform mat4 vMatrix;
varying vec2 aCoordinate;
void main(){
gl_Position=vMatrix*vPosition;
aCoordinate=vCoordinate;
}
\ No newline at end of file
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 textureCoordinate;
uniform samplerExternalOES vTexture;
void main() {
gl_FragColor = texture2D( vTexture, textureCoordinate );
}
\ No newline at end of file
attribute vec4 vPosition;
attribute vec2 vCoord;
uniform mat4 vMatrix;
uniform mat4 vCoordMatrix;
varying vec2 textureCoordinate;
void main(){
gl_Position = vMatrix*vPosition;
textureCoordinate = (vCoordMatrix*vec4(vCoord,0,1)).xy;
}
\ No newline at end of file
precision mediump float;
varying vec2 aCoord;
uniform sampler2D vTexture;
uniform sampler2D vTextureAlpha;
void main() {
vec4 color=texture2D( vTexture, aCoord);
color.a=texture2D(vTextureAlpha,aCoord).r;
gl_FragColor = color;
}
\ No newline at end of file
attribute vec4 vPosition;
attribute vec2 vCoord;
varying vec2 aCoord;
uniform mat4 vMatrix;
void main(){
aCoord = vCoord;
gl_Position = vMatrix*vPosition;
}
\ No newline at end of file
package com.joe.camera2recorddemo.Entity;
public class FilterInfo {
private String name;
private int type;
private int rid;
public FilterInfo(int type, String name,int rid) {
this.name = name;
this.type = type;
this.rid= rid;
}
public String getName() {
return name;
}
public int getType() {
return type;
}
public int getRid() {
return rid;
}
}
package com.joe.camera2recorddemo.Entity;
public class SizeInfo {
private int mWidth;
private int mHeight;
public SizeInfo(int width, int height) {
mWidth = width;
mHeight = height;
}
public int getWidth() {
return mWidth;
}
public int getHeight() {
return mHeight;
}
public void setSize(int width, int height) {
this.mWidth = width;
this.mHeight = height;
}
@Override
public String toString() {
return "SizeInfo{" +
"mWidth=" + mWidth +
", mHeight=" + mHeight +
'}';
}
}
package com.joe.camera2recorddemo.MediaCodecUtil;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.util.Log;
/**
* Created by Yj on 2017/3/29.
*/
public class TrackUtils {
private static final String TAG = "TrackUtils";
/**
* 查找视频轨道
* @param extractor
* @return
*/
public static int selectVideoTrack(MediaExtractor extractor) {
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
return i;
}
}
return -1;
}
/**
* 查找音频轨道
* @param extractor
* @return
*/
public static int selectAudioTrack(MediaExtractor extractor) {
int numTracks = extractor.getTrackCount();
for (int i = 0; i < numTracks; i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("audio/")) {
Log.d(TAG, "Extractor selected track " + i + " (" + mime + "): " + format);
return i;
}
}
return -1;
}
}
package com.joe.camera2recorddemo.MediaCodecUtil;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.util.Log;
import android.view.Surface;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
public class VideoDecode {
private static final String TAG = "VideoToFrames";
private static final long DEFAULT_TIMEOUT_US = 10000;
private final int decodeColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
public int ImageWidth = 0;
public int ImageHeight = 0;
MediaExtractor extractor = null;
MediaCodec decoder = null;
MediaFormat mediaFormat;
private boolean isLoop = false;//是否循环播放
private boolean isStop = false;//是否停止
private String videoFilePath;
/**
* 解码器初始化
* @param videoFilePath
*/
public void decodePrepare(String videoFilePath) {
this.videoFilePath = videoFilePath;
extractor = null;
decoder = null;
try {
File videoFile = new File(videoFilePath);
extractor = new MediaExtractor();
extractor.setDataSource(videoFile.toString());
int trackIndex = TrackUtils.selectVideoTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + videoFilePath);
}
extractor.selectTrack(trackIndex);
mediaFormat = extractor.getTrackFormat(trackIndex);
String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
if (isColorFormatSupported(decodeColorFormat, decoder.getCodecInfo().getCapabilitiesForType(mime))) {
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, decodeColorFormat);
Log.i(TAG, "set decode color format to type " + decodeColorFormat);
} else {
Log.i(TAG, "unable to set decode color format, color format type " + decodeColorFormat + " not supported");
}
//消除旋转信息,防止拉伸
mediaFormat.setInteger(MediaFormat.KEY_ROTATION,0);
//设置
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 2500000);
decoder.configure(mediaFormat,mSurface, null, 0);
decoder.start();
} catch (IOException ioe) {
throw new RuntimeException("failed init encoder", ioe);
}
}
public void close() {
try {
if (decoder != null) {
decoder.stop();
decoder.release();
}
if (extractor != null) {
extractor.release();
extractor = null;
}
}catch (IllegalStateException e){
e.printStackTrace();
}
}
/**
* 外部调用开始解码
*/
public void excuate() {
try {
decodeFramesToImage(decoder, extractor, mediaFormat);
} finally {
close();
if(isLoop && !isStop){
decodePrepare(videoFilePath);
excuate();
}
}
}
/**
* 设置是否循环
* @param isLoop
*/
public void setLoop(boolean isLoop){
this.isLoop = isLoop;
}
/**
* 检查是否支持的色彩格式
* @param colorFormat
* @param caps
* @return
*/
private boolean isColorFormatSupported(int colorFormat, MediaCodecInfo.CodecCapabilities caps) {
for (int c : caps.colorFormats) {
if (c == colorFormat) {
return true;
}
}
return false;
}
/**
* 开始解码
* @param decoder
* @param extractor
* @param mediaFormat
*/
public void decodeFramesToImage(MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat) {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
final int width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
final int height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
ImageWidth = width;
ImageHeight = height;
long startMs = System.currentTimeMillis();
while (!sawOutputEOS && !isStop) {
if (!sawInputEOS) {
int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = decoder.getInputBuffer(inputBufferId);
int sampleSize = extractor.readSampleData(inputBuffer, 0); //将一部分视频数据读取到inputbuffer中,大小为sampleSize
if (sampleSize < 0) {
decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
sawInputEOS = true;
} else {
long presentationTimeUs = extractor.getSampleTime();
Log.v(TAG, "presentationTimeUs:"+presentationTimeUs);
decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
extractor.advance(); //移动到视频文件的下一个地址
}
}
}
int outputBufferId = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
if (outputBufferId >= 0) {
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
}
boolean doRender = (info.size != 0);
if (doRender) {
sleepRender(info, startMs);//延迟解码
decoder.releaseOutputBuffer(outputBufferId, true);
}
}
}
}
public void stop(){
isStop = true;
}
public void start(){
isStop = false;
}
//======================设置输出Surface==============================
private Surface mSurface;
public void setSurface(Surface surface){
this.mSurface = surface;
}
/**
* 延迟解码,按帧播放
*/
private void sleepRender(MediaCodec.BufferInfo audioBufferInfo, long startMs) {
while (audioBufferInfo.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
}
}
\ No newline at end of file
package com.joe.camera2recorddemo.OpenGL;
import android.annotation.TargetApi;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.util.Log;
import javax.microedition.khronos.opengles.GL10;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class EGLHelper {
private EGLSurface mEGLSurface;
private EGLContext mEGLContext;
private EGLDisplay mEGLDisplay;
private EGLConfig mEGLConfig;
private EGLContext mShareEGLContext= EGL14.EGL_NO_CONTEXT;
private boolean isDebug=true;
private int mEglSurfaceType= EGL14.EGL_WINDOW_BIT;
private Object mSurface;
private Object mCopySurface;
/**
* @param type one of {@link EGL14#EGL_WINDOW_BIT}、{@link EGL14#EGL_PBUFFER_BIT}、{@link EGL14#EGL_PIXMAP_BIT}
*/
public void setEGLSurfaceType(int type){
this.mEglSurfaceType=type;
}
public void setSurface(Object surface){
this.mSurface=surface;
}
public void setCopySurface(Object surface){
this.mCopySurface=surface;
}
/**
* create the environment for OpenGLES
* @param eglWidth width
* @param eglHeight height
*/
public boolean createGLES(int eglWidth, int eglHeight){
int[] attributes = new int[] {
EGL14.EGL_SURFACE_TYPE, mEglSurfaceType, //渲染类型
EGL14.EGL_RED_SIZE, 8, //指定RGB中的R大小(bits)
EGL14.EGL_GREEN_SIZE, 8, //指定G大小
EGL14.EGL_BLUE_SIZE, 8, //指定B大小
EGL14.EGL_ALPHA_SIZE, 8, //指定Alpha大小,以上四项实际上指定了像素格式
EGL14.EGL_DEPTH_SIZE, 16, //指定深度缓存(Z Buffer)大小
EGL14.EGL_RENDERABLE_TYPE, 4, //指定渲染api类别, 如上一小节描述,这里或者是硬编码的4(EGL14.EGL_OPENGL_ES2_BIT)
EGL14.EGL_NONE }; //总是以EGL14.EGL_NONE结尾
int glAttrs[] = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, //0x3098是EGL14.EGL_CONTEXT_CLIENT_VERSION,但是4.2以前没有EGL14
EGL14.EGL_NONE
};
int bufferAttrs[]={
EGL14.EGL_WIDTH,eglWidth,
EGL14.EGL_HEIGHT,eglHeight,
EGL14.EGL_NONE
};
//第二步 获取Display
//获取默认显示设备,一般为设备主屏幕
mEGLDisplay= EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
//获取版本号,[0]为版本号,[1]为子版本号
int[] versions=new int[2];
EGL14.eglInitialize(mEGLDisplay,versions,0,versions,1);
log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_VENDOR));
log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_VERSION));
log(EGL14.eglQueryString(mEGLDisplay, EGL14.EGL_EXTENSIONS));
//第三部 选择config
//获取EGL可用配置
EGLConfig[] configs = new EGLConfig[1];
int[] configNum = new int[1];
EGL14.eglChooseConfig(mEGLDisplay, attributes,0, configs,0, 1, configNum,0);
if(configs[0]==null){
log("eglChooseConfig Error:"+ EGL14.eglGetError());
return false;
}
mEGLConfig = configs[0];
//创建EGLContext
mEGLContext= EGL14.eglCreateContext(mEGLDisplay,mEGLConfig,mShareEGLContext, glAttrs,0);
if(mEGLContext== EGL14.EGL_NO_CONTEXT){
return false;
}
//获取创建后台绘制的Surface
switch (mEglSurfaceType){
case EGL14.EGL_WINDOW_BIT:
mEGLSurface= EGL14.eglCreateWindowSurface(mEGLDisplay,mEGLConfig,mSurface,new int[]{EGL14.EGL_NONE},0);
break;
case EGL14.EGL_PIXMAP_BIT:
break;
case EGL14.EGL_PBUFFER_BIT:
mEGLSurface= EGL14.eglCreatePbufferSurface(mEGLDisplay,mEGLConfig,bufferAttrs,0);
break;
}
if(mEGLSurface== EGL14.EGL_NO_SURFACE){
log("eglCreateSurface Error:"+ EGL14.eglGetError());
return false;
}
if(!EGL14.eglMakeCurrent(mEGLDisplay,mEGLSurface,mEGLSurface,mEGLContext)){
log("eglMakeCurrent Error:"+ EGL14.eglQueryString(mEGLDisplay, EGL14.eglGetError()));
return false;
}
log("gl environment create success");
return true;
}
public EGLSurface createEGLWindowSurface(Object object){
return EGL14.eglCreateWindowSurface(mEGLDisplay,mEGLConfig,object,new int[]{EGL14.EGL_NONE},0);
}
public void setShareEGLContext(EGLContext context){
this.mShareEGLContext=context;
}
public EGLContext getEGLContext(){
return mEGLContext;
}
public boolean makeCurrent(){
return EGL14.eglMakeCurrent(mEGLDisplay,mEGLSurface,mEGLSurface,mEGLContext);
}
public boolean makeCurrent(EGLSurface surface){
return EGL14.eglMakeCurrent(mEGLDisplay,surface,surface,mEGLContext);
}
public boolean destroyGLES(){
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay,mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay,mEGLContext);
EGL14.eglTerminate(mEGLDisplay);
log("gl destroy gles");
return true;
}
public void setPresentationTime(long time){
EGLExt.eglPresentationTimeANDROID(mEGLDisplay,mEGLSurface,time);
}
public void setPresentationTime(EGLSurface surface,long time){
EGLExt.eglPresentationTimeANDROID(mEGLDisplay,surface,time);
}
public boolean swapBuffers(){
return EGL14.eglSwapBuffers(mEGLDisplay,mEGLSurface);
}
public boolean swapBuffers(EGLSurface surface){
return EGL14.eglSwapBuffers(mEGLDisplay,surface);
}
//创建视频数据流的OES TEXTURE
public int createTextureID() {
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
private void log(String log){
if(isDebug){
Log.e("EGLHelper",log);
}
}
}
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.content.res.Resources;
public class BaseFilter extends Filter {
public BaseFilter(Resources resource) {
super(resource,"shader/base.vert","shader/base.frag");
}
public BaseFilter(String vert,String frag){
super(null,vert,frag);
}
public BaseFilter(){
super(null,"attribute vec4 aVertexCo;\n" +
"attribute vec2 aTextureCo;\n" +
"\n" +
"uniform mat4 uVertexMatrix;\n" +
"uniform mat4 uTextureMatrix;\n" +
"\n" +
"varying vec2 vTextureCo;\n" +
"\n" +
"void main(){\n" +
" gl_Position = uVertexMatrix*aVertexCo;\n" +
" vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;\n" +
"}",
"precision mediump float;\n" +
"varying vec2 vTextureCo;\n" +
"uniform sampler2D uTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D( uTexture, vTextureCo);\n" +
"}");
}
@Override
protected void onCreate() {
super.onCreate();
}
}
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.content.res.Resources;
import android.opengl.GLES20;
/**
* Created by Yj on 2017/9/18.
*/
public class BeautyFilter extends Filter {
private int mGLaaCoef;
private int mGLmixCoef;
private int mGLiternum;
private float aaCoef;
private float mixCoef;
private int iternum;
public BeautyFilter(Resources resource) {
super(resource,"shader/beauty/beauty.vert", "shader/beauty/beauty.frag");
shaderNeedTextureSize(true);
setBeautyLevel(0);
}
@Override
protected void onCreate() {
super.onCreate();
mGLaaCoef=GLES20.glGetUniformLocation(mGLProgram,"uACoef");
mGLmixCoef=GLES20.glGetUniformLocation(mGLProgram,"uMixCoef");
mGLiternum=GLES20.glGetUniformLocation(mGLProgram,"uIternum");
}
public Filter setBeautyLevel(int level){
switch (level){
case 1:
a(1,0.19f,0.54f);
break;
case 2:
a(2,0.29f,0.54f);
break;
case 3:
a(3,0.17f,0.39f);
break;
case 4:
a(3,0.25f,0.54f);
break;
case 5:
a(4,0.13f,0.54f);
break;
case 6:
a(4,0.19f,0.69f);
break;
default:
a(0,0f,0f);
break;
}
return this;
}
private void a(int a,float b,float c){
this.iternum=a;
this.aaCoef=b;
this.mixCoef=c;
}
@Override
protected void onSetExpandData() {
super.onSetExpandData();
GLES20.glUniform1f(mGLaaCoef,aaCoef);
GLES20.glUniform1f(mGLmixCoef,mixCoef);
GLES20.glUniform1i(mGLiternum,iternum);
}
}
/*
*
* NoFilter.java
*
* Created by Wuwang on 2016/10/17
*/
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.content.res.Resources;
import android.opengl.GLES20;
/**
* 滤镜集合
*/
public class ChooseFilter extends Filter {
//滤镜选择代码
private int hChangeType;
private int hFilterCode = 0;
//复杂
private int width;
private int height;
private boolean needGLWH = false;
private int mGLWidth;
private int mGLHeight;
private boolean needTexelWH = false;
private float mTexelWidth;
private float mTexelHeight;
private int mUniformTexelWidthLocation;
private int mUniformTexelHeightLocation;
public ChooseFilter(Resources resource) {
super(resource, "shader/choose/choose.vert", "shader/choose/choose.frag");
}
@Override
protected void onCreate() {
super.onCreate();
hChangeType = GLES20.glGetUniformLocation(mGLProgram, "vChangeType");
mGLWidth = GLES20.glGetUniformLocation(mGLProgram, "uWidth");
mGLHeight = GLES20.glGetUniformLocation(mGLProgram, "uHeight");
mUniformTexelWidthLocation = GLES20.glGetUniformLocation(mGLProgram, "texelWidth");
mUniformTexelHeightLocation = GLES20.glGetUniformLocation(mGLProgram, "texelHeight");
}
@Override
protected void onSizeChanged(int width, int height) {
super.onSizeChanged(width, height);
this.width = width;
this.height = height;
setTextlSize(5.0f);
}
@Override
protected void onSetExpandData() {
super.onSetExpandData();
GLES20.glUniform1i(hChangeType, hFilterCode);
if (needGLWH) {
GLES20.glUniform1f(mGLWidth, width);
GLES20.glUniform1f(mGLHeight, height);
}
if (needTexelWH) {
GLES20.glUniform1f(mUniformTexelWidthLocation, mTexelWidth);
GLES20.glUniform1f(mUniformTexelHeightLocation, mTexelHeight);
}
}
/**
* 设置滤镜类型
*
* @param code
*/
public void setChangeType(int code) {
this.hFilterCode = code;
switch (code) {
case FilterType.TOON:
needTexelWH = true;
setTextlSize(4.2f);
break;
case FilterType.CONVOLUTION:
needTexelWH = true;
setTextlSize(1.3f);
break;
case FilterType.SOBEL:
needGLWH = true;
break;
case FilterType.SKETCH:
needTexelWH = true;
setTextlSize(3.0f);
break;
default:
needTexelWH = false;
needGLWH = false;
break;
}
}
private void setTextlSize(float size) {
mTexelWidth = size / width;
mTexelHeight = size / height;
}
/**
* 滤镜类型
*/
public static class FilterType {
public static final int NORMAL = 0;
public static final int COOL = 1;
public static final int WARM = 2;
public static final int GRAY = 3;
public static final int CAMEO = 4;
public static final int INVERT = 5;
public static final int SEPIA = 6;
public static final int TOON = 7;
public static final int CONVOLUTION = 8;
public static final int SOBEL = 9;
public static final int SKETCH = 10;
}
}
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.content.res.Resources;
import com.joe.camera2recorddemo.OpenGL.TransUtil;
import com.joe.camera2recorddemo.OpenGL.Transformation;
import com.joe.camera2recorddemo.Utils.MatrixUtils;
/**
* 旋转,翻转,裁剪类滤镜
* Created by Yj on 2017/10/31.
*/
public class DistortionFilter extends Filter {
//旋转,翻转,裁剪变换类
private Transformation mTransformation;
private float[] mTextureCo;
public DistortionFilter(Resources resource) {
super(resource, "shader/base.vert","shader/base.frag");
initTransformation();
}
/**
* 初始化变化类
*/
private void initTransformation() {
mTextureCo = MatrixUtils.getOriginalTextureCo();
if (mTransformation == null) {
mTransformation = new Transformation();
}
}
/**
* 设置变化类
* @param transformation
*/
public void setTransformation(Transformation transformation){
mTransformation = transformation;
setTextureCo(TransUtil.getTransformationCo(mTextureCo, mTransformation));
}
}
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.content.res.Resources;
/**
* Created by Administrator on 2017/11/16.
*/
public class DrawFilter extends Filter {
public DrawFilter(Resources resource) {
super(resource, "", "");
}
}
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.content.res.Resources;
import android.opengl.GLES20;
import com.joe.camera2recorddemo.OpenGL.FrameBuffer;
import com.joe.camera2recorddemo.OpenGL.Renderer;
import com.joe.camera2recorddemo.Utils.GpuUtils;
import com.joe.camera2recorddemo.Utils.MatrixUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
public abstract class Filter implements Renderer {
private float[] mVertexMatrix= MatrixUtils.getOriginalMatrix();
private float[] mTextureMatrix=MatrixUtils.getOriginalMatrix();
private float[] mVertexCo;
protected FloatBuffer mVertexBuffer;
protected FloatBuffer mTextureBuffer;
protected int mWidth;
protected int mHeight;
protected Resources mRes;
private String mVertex;
private String mFragment;
protected int mGLProgram;
protected int mGLVertexCo;
protected int mGLTextureCo;
protected int mGLVertexMatrix;
protected int mGLTextureMatrix;
protected int mGLTexture;
private int mGLWidth;
private int mGLHeight;
private boolean isUseSize=false;
private FrameBuffer mFrameTemp;
protected Filter(Resources resource,String vertex,String fragment){
this.mRes=resource;
this.mVertex=vertex;
this.mFragment=fragment;
mFrameTemp=new FrameBuffer();
initBuffer();
}
protected void initBuffer(){
ByteBuffer vertex=ByteBuffer.allocateDirect(32);
vertex.order(ByteOrder.nativeOrder());
mVertexBuffer=vertex.asFloatBuffer();
mVertexBuffer.put(MatrixUtils.getOriginalVertexCo());
mVertexBuffer.position(0);
ByteBuffer texture=ByteBuffer.allocateDirect(32);
texture.order(ByteOrder.nativeOrder());
mTextureBuffer=texture.asFloatBuffer();
mTextureBuffer.put(MatrixUtils.getOriginalTextureCo());
mTextureBuffer.position(0);
}
public void setVertexCo(float[] vertexCo){
mVertexCo = vertexCo;
mVertexBuffer.clear();
mVertexBuffer.put(vertexCo);
mVertexBuffer.position(0);
}
public void setTextureCo(float[] textureCo){
mTextureBuffer.clear();
mTextureBuffer.put(textureCo);
mTextureBuffer.position(0);
}
public void setVertexBuffer(FloatBuffer vertexBuffer){
this.mVertexBuffer=vertexBuffer;
}
public void setTextureBuffer(FloatBuffer textureBuffer){
this.mTextureBuffer=textureBuffer;
}
public void setVertexMatrix(float[] matrix){
this.mVertexMatrix=matrix;
}
public void setTextureMatrix(float[] matrix){
this.mTextureMatrix=matrix;
}
public float[] getVertexMatrix(){
return mVertexMatrix;
}
public float[] getTextureMatrix(){
return mTextureMatrix;
}
public float[] getVertexCo() { return mVertexCo;}
protected void shaderNeedTextureSize(boolean need){
this.isUseSize=need;
}
protected void onCreate(){
if(mRes!=null){
mGLProgram= GpuUtils.createGLProgramByAssetsFile(mRes,mVertex,mFragment);
}else{
mGLProgram= GpuUtils.createGLProgram(mVertex,mFragment);
}
mGLVertexCo=GLES20.glGetAttribLocation(mGLProgram,"aVertexCo");
mGLTextureCo=GLES20.glGetAttribLocation(mGLProgram,"aTextureCo");
mGLVertexMatrix=GLES20.glGetUniformLocation(mGLProgram,"uVertexMatrix");
mGLTextureMatrix=GLES20.glGetUniformLocation(mGLProgram,"uTextureMatrix");
mGLTexture=GLES20.glGetUniformLocation(mGLProgram,"uTexture");
if(isUseSize){
mGLWidth=GLES20.glGetUniformLocation(mGLProgram,"uWidth");
mGLHeight=GLES20.glGetUniformLocation(mGLProgram,"uHeight");
}
}
protected void onSizeChanged(int width,int height){
}
@Override
public final void create() {
if(mVertex!=null&&mFragment!=null){
onCreate();
}
}
@Override
public void sizeChanged(int width, int height) {
onSizeChanged(width, height);
this.mWidth=width;
this.mHeight=height;
mFrameTemp.destroyFrameBuffer();
}
@Override
public void draw(int texture) {
onClear();
onUseProgram();
onSetExpandData();
onBindTexture(texture);
onDraw();
}
public int drawToTexture(int texture){
mFrameTemp.bindFrameBuffer(mWidth,mHeight);
onClear();
onUseProgram();
MatrixUtils.flip(mVertexMatrix,false,true);
onSetExpandData();
MatrixUtils.flip(mVertexMatrix,false,true);
onBindTexture(texture);
onDraw();
mFrameTemp.unBindFrameBuffer();
return mFrameTemp.getCacheTextureId();
}
@Override
public void destroy() {
mFrameTemp.destroyFrameBuffer();
GLES20.glDeleteProgram(mGLProgram);
}
protected void onUseProgram(){
GLES20.glUseProgram(mGLProgram);
}
protected void onDraw(){
GLES20.glEnableVertexAttribArray(mGLVertexCo);
GLES20.glVertexAttribPointer(mGLVertexCo,2, GLES20.GL_FLOAT, false, 0,mVertexBuffer);
GLES20.glEnableVertexAttribArray(mGLTextureCo);
GLES20.glVertexAttribPointer(mGLTextureCo, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);
GLES20.glDisableVertexAttribArray(mGLVertexCo);
GLES20.glDisableVertexAttribArray(mGLTextureCo);
}
protected void onClear(){
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
}
/**
* 设置其他扩展数据
*/
protected void onSetExpandData(){
GLES20.glUniformMatrix4fv(mGLVertexMatrix,1,false,mVertexMatrix,0);
GLES20.glUniformMatrix4fv(mGLTextureMatrix,1,false,mTextureMatrix,0);
if(isUseSize){
GLES20.glUniform1f(mGLWidth,mWidth);
GLES20.glUniform1f(mGLHeight,mHeight);
}
}
/**
* 绑定默认纹理
*/
protected void onBindTexture(int textureId){
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textureId);
GLES20.glUniform1i(mGLTexture,0);
}
}
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.content.res.Resources;
import java.util.Iterator;
import java.util.Vector;
/**
* Created by Yj on 2017/9/24 0024.
*/
public class GroupFilter extends BaseFilter {
private Vector<Filter> mGroup;
private Vector<Filter> mTempGroup;
public GroupFilter(Resources resource) {
super(resource);
}
public GroupFilter(){
super();
}
@Override
protected void initBuffer() {
super.initBuffer();
mGroup=new Vector<>();
mTempGroup=new Vector<>();
}
public synchronized void addFilter(Filter filter){
mGroup.add(filter);
mTempGroup.add(filter);
}
public synchronized void addFilter(int index,Filter filter){
mGroup.add(index, filter);
mTempGroup.add(filter);
}
public synchronized Filter removeFilter(int index){
return mGroup.remove(index);
}
public boolean removeFilter(Filter filter){
return mGroup.remove(filter);
}
public synchronized Filter element(int index){
return mGroup.elementAt(index);
}
public synchronized Iterator<Filter> iterator(){
return mGroup.iterator();
}
public synchronized boolean isEmpty(){
return mGroup.isEmpty();
}
@Override
protected synchronized void onCreate() {
super.onCreate();
for (Filter filter : mGroup) {
filter.create();
}
mTempGroup.clear();
}
private void tempFilterInit(int width,int height){
for (Filter filter : mTempGroup) {
filter.create();
filter.sizeChanged(width, height);
}
mTempGroup.removeAllElements();
}
@Override
protected synchronized void onSizeChanged(int width, int height) {
super.onSizeChanged(width, height);
for (Filter filter : mGroup) {
filter.sizeChanged(width, height);
}
}
@Override
public void draw(int texture) {
if(mTempGroup.size()>0){
tempFilterInit(mWidth, mHeight);
}
int tempTextureId=texture;
for (int i=0;i<mGroup.size();i++){
Filter filter=mGroup.get(i);
tempTextureId=filter.drawToTexture(tempTextureId);
}
super.draw(tempTextureId);
}
@Override
public int drawToTexture(int texture) {
if(mTempGroup.size()>0){
tempFilterInit(mWidth, mHeight);
}
int tempTextureId=texture;
for (int i=0;i<mGroup.size();i++){
Filter filter=mGroup.get(i);
tempTextureId=filter.drawToTexture(tempTextureId);
}
return super.drawToTexture(tempTextureId);
}
}
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.content.res.Resources;
/**
* 综合滤镜
* Created by Yj on 2017/11/1.
*/
public class Mp4EditFilter extends GroupFilter {
private ChooseFilter chooseFilter;
private DistortionFilter distortionFilter;
public Mp4EditFilter(Resources resource) {
super(resource);
}
@Override
protected void initBuffer() {
super.initBuffer();
chooseFilter = new ChooseFilter(mRes);
distortionFilter = new DistortionFilter(mRes);
addFilter(chooseFilter);
addFilter(distortionFilter);
}
public ChooseFilter getChooseFilter() {
return chooseFilter;
}
public DistortionFilter getDistortionFilter() {
return distortionFilter;
}
}
/*
* Created by Wuwang on 2017/9/11
* Copyright © 2017年 深圳哎吖科技. All rights reserved.
*/
package com.joe.camera2recorddemo.OpenGL.Filter;
import android.annotation.TargetApi;
import android.content.res.Resources;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public class OesFilter extends Filter {
public OesFilter(Resources resource) {
super(resource,"shader/oes.vert", "shader/oes.frag");
}
public OesFilter(){
super(null,"attribute vec4 aVertexCo;\n" +
"attribute vec2 aTextureCo;\n" +
"\n" +
"uniform mat4 uVertexMatrix;\n" +
"uniform mat4 uTextureMatrix;\n" +
"\n" +
"varying vec2 vTextureCo;\n" +
"\n" +
"void main(){\n" +
" gl_Position = uVertexMatrix*aVertexCo;\n" +
" vTextureCo = (uTextureMatrix*vec4(aTextureCo,0,1)).xy;\n" +
"}",
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCo;\n" +
"uniform samplerExternalOES uTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D( uTexture, vTextureCo);\n" +
"}");
}
@Override
protected void onBindTexture(int textureId) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,textureId);
GLES20.glUniform1i(mGLTexture,0);
}
}
package com.joe.camera2recorddemo.OpenGL;
import android.opengl.GLES20;
/**
* Created by Yj on 2017/9/13.
*/
public class FrameBuffer {
private int[] mFrameTemp;
public int bindFrameBuffer(int width,int height){
if(mFrameTemp==null){
mFrameTemp=new int[3];
GLES20.glGenFramebuffers(1,mFrameTemp,0);
GLES20.glGenTextures(1,mFrameTemp,1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,mFrameTemp[1]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0,GLES20.GL_RGBA, width, height,
0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
//设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
//设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
//设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE);
//设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, mFrameTemp[1], 0);
}else{
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameTemp[0]);
}
return GLES20.glGetError();
}
public void createFrameBuffer(int width,int height,int tex_type,int tex_format,
int min_params,int max_params,int wrap_s,int wrap_t){
mFrameTemp=new int[3];
GLES20.glGenFramebuffers(1,mFrameTemp,0);
GLES20.glGenTextures(1,mFrameTemp,1);
GLES20.glBindTexture(tex_type,mFrameTemp[1]);
GLES20.glTexImage2D(tex_type, 0,tex_format, width, height,
0, tex_format, GLES20.GL_UNSIGNED_BYTE, null);
//设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_MIN_FILTER,min_params);
//设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_MAG_FILTER,max_params);
//设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_WRAP_S,wrap_s);
//设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
GLES20.glTexParameteri(tex_type, GLES20.GL_TEXTURE_WRAP_T,wrap_t);
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
tex_type, mFrameTemp[1], 0);
}
public int bindFrameBuffer(){
if(mFrameTemp==null)return -1;
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING,mFrameTemp,2);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[0]);
return GLES20.glGetError();
}
public void unBindFrameBuffer(){
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,mFrameTemp[2]);
}
public int getCacheTextureId(){
return mFrameTemp!=null?mFrameTemp[1]:-1;
}
public void destroyFrameBuffer(){
if(mFrameTemp!=null){
GLES20.glDeleteFramebuffers(1,mFrameTemp,0);
GLES20.glDeleteTextures(1,mFrameTemp,1);
mFrameTemp=null;
}
}
}
package com.joe.camera2recorddemo.OpenGL.MhFilter;
import android.content.res.Resources;
import com.joe.camera2recorddemo.OpenGL.Filter.ChooseFilter;
import com.joe.camera2recorddemo.OpenGL.Filter.DistortionFilter;
import com.joe.camera2recorddemo.OpenGL.Filter.GroupFilter;
/**
* 调整滤镜
* Created by Yj on 2018/6/14.
*/
public class AdjustFilter extends GroupFilter {
private ContrastFilter contrastFilter;
private BrightnessFilter brightnessFilter;
private SaturationFilter saturationFilter;
private VignetteFilter vignetteFilter;
private DistortionFilter distortionFilter;
public AdjustFilter(Resources resource) {
super(resource);
}
@Override
protected void initBuffer() {
super.initBuffer();
contrastFilter = new ContrastFilter(mRes);
brightnessFilter = new BrightnessFilter(mRes);
saturationFilter = new SaturationFilter(mRes);
distortionFilter = new DistortionFilter(mRes);
vignetteFilter = new VignetteFilter(mRes);
addFilter(contrastFilter);
addFilter(brightnessFilter);
addFilter(saturationFilter);
addFilter(vignetteFilter);
addFilter(distortionFilter);
}
public ContrastFilter getContrastFilter() {
return contrastFilter;
}
public BrightnessFilter getBrightnessFilter() {
return brightnessFilter;
}
public SaturationFilter getSaturationFilter() {
return saturationFilter;
}
public DistortionFilter getDistortionFilter() {
return distortionFilter;
}
public VignetteFilter getVignetteFilter() {
return vignetteFilter;
}
}
package com.joe.camera2recorddemo.OpenGL.MhFilter;
import android.content.res.Resources;
import android.opengl.GLES20;
import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
/**
* 亮度滤镜
*/
public class BrightnessFilter extends Filter {
private int brightnessType;
private float brightnessCode = 0.0f;
public BrightnessFilter(Resources resource) {
super(resource,"shader/base.vert","shader/mh/brightness.frag");
}
@Override
protected void onCreate() {
super.onCreate();
brightnessType = GLES20.glGetUniformLocation(mGLProgram, "brightness");
}
@Override
protected void onSetExpandData() {
super.onSetExpandData();
GLES20.glUniform1f(brightnessType, brightnessCode);
}
public void setBrightnessCode(float brightnessCode) {
this.brightnessCode = brightnessCode;
}
}
package com.joe.camera2recorddemo.OpenGL.MhFilter;
import android.content.res.Resources;
import android.opengl.GLES20;
import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
/**
* 对比度滤镜
*/
public class ContrastFilter extends Filter {
private int contrastType;
private float contrastCode = 1.0f;
public ContrastFilter(Resources resource) {
super(resource,"shader/base.vert","shader/mh/contrast.frag");
}
@Override
protected void onCreate() {
super.onCreate();
contrastType = GLES20.glGetUniformLocation(mGLProgram, "stepcv");
}
@Override
protected void onSetExpandData() {
super.onSetExpandData();
GLES20.glUniform1f(contrastType, contrastCode);
}
public void setContrastCode(float contrastCode) {
this.contrastCode = contrastCode;
}
}
package com.joe.camera2recorddemo.OpenGL.MhFilter;
import android.content.res.Resources;
import android.opengl.GLES20;
import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
/**
* 饱和度滤镜
*/
public class SaturationFilter extends Filter {
private int saturationType;
private float saturationCode = 1.0f;
public SaturationFilter(Resources resource) {
super(resource,"shader/base.vert","shader/mh/saturation.frag");
}
@Override
protected void onCreate() {
super.onCreate();
saturationType = GLES20.glGetUniformLocation(mGLProgram, "saturation");
}
@Override
protected void onSetExpandData() {
super.onSetExpandData();
GLES20.glUniform1f(saturationType, saturationCode);
}
public void setSaturationCode(float saturationCode) {
this.saturationCode = saturationCode;
}
}
package com.joe.camera2recorddemo.OpenGL.MhFilter;
import android.content.res.Resources;
import android.graphics.PointF;
import android.opengl.GLES20;
import com.joe.camera2recorddemo.OpenGL.Filter.Filter;
import java.nio.FloatBuffer;
/**
* 暗角滤镜
*/
public class VignetteFilter extends Filter {
private int mVignetteCenterLocation;
private PointF mVignetteCenter = new PointF();
private int mVignetteColorLocation;
private float[] mVignetteColor = new float[] {0.0f, 0.0f, 0.0f};
private int mVignetteStartLocation;
private float mVignetteStart = 0.75f;
private int mVignetteEndLocation;
private float mVignetteEnd = 0.75f;
private float[] vec2 = new float[2];
public VignetteFilter(Resources resource) {
super(resource,"shader/base.vert","shader/mh/vignette.frag");
vec2[0] = mVignetteCenter.x;
vec2[1] = mVignetteCenter.y;
}
@Override
protected void onCreate() {
super.onCreate();
mVignetteCenterLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteCenter");
mVignetteColorLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteColor");
mVignetteStartLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteStart");
mVignetteEndLocation = GLES20.glGetUniformLocation(mGLProgram, "vignetteEnd");
}
@Override
protected void onSetExpandData() {
super.onSetExpandData();
GLES20.glUniform2fv(mVignetteCenterLocation,1,vec2,0);
GLES20.glUniform3fv(mVignetteColorLocation,1, FloatBuffer.wrap(mVignetteColor));
GLES20.glUniform1f(mVignetteStartLocation, mVignetteStart);
GLES20.glUniform1f(mVignetteEndLocation, mVignetteEnd);
}
public void setmVignetteStart(float mVignetteStart) {
this.mVignetteStart = mVignetteStart;
}
public void setmVignetteEnd(float mVignetteEnd) {
this.mVignetteEnd = mVignetteEnd;
}
}
package com.joe.camera2recorddemo.OpenGL;
public interface Renderer {
void create();
void sizeChanged(int width, int height);
void draw(int texture);
void destroy();
}
package com.joe.camera2recorddemo.OpenGL;
/**
* Created by Yj on 2017/10/30.
* 变换的帮助类
*/
public class TransUtil {
public static float[] textureCoords;
/**
* 获得变换后的数据
* @param tc 原始数据
* @param transformation 变化类型
* @return
*/
public static float[] getTransformationCo(float[] tc,final Transformation transformation) {
textureCoords = tc;
if (transformation.getCropRect() != null) {
resolveCrop(transformation.getCropRect().x, transformation.getCropRect().y,
transformation.getCropRect().width, transformation.getCropRect().height);
} else {
resolveCrop(Transformation.FULL_RECT.x, Transformation.FULL_RECT.y,
Transformation.FULL_RECT.width, Transformation.FULL_RECT.height);
}
resolveFlip(transformation.getFlip());
resolveRotate(transformation.getRotation());
return textureCoords;
}
private static void resolveCrop(float x, float y, float width, float height) {
float minX = x;
float minY = y;
float maxX = minX + width;
float maxY = minY + height;
// left bottom
textureCoords[0] = minX;
textureCoords[1] = minY;
// right bottom
textureCoords[2] = maxX;
textureCoords[3] = minY;
// left top
textureCoords[4] = minX;
textureCoords[5] = maxY;
// right top
textureCoords[6] = maxX;
textureCoords[7] = maxY;
}
private static void resolveFlip(int flip) {
switch (flip) {
case Transformation.FLIP_HORIZONTAL:
swap(textureCoords, 0, 2);
swap(textureCoords, 4, 6);
break;
case Transformation.FLIP_VERTICAL:
swap(textureCoords, 1, 5);
swap(textureCoords, 3, 7);
break;
case Transformation.FLIP_HORIZONTAL_VERTICAL:
swap(textureCoords, 0, 2);
swap(textureCoords, 4, 6);
swap(textureCoords, 1, 5);
swap(textureCoords, 3, 7);
break;
case Transformation.FLIP_NONE:
default:
break;
}
}
private static void resolveRotate(int rotation) {
float x, y;
switch (rotation) {
case 90:
x = textureCoords[0];
y = textureCoords[1];
textureCoords[0] = textureCoords[4];
textureCoords[1] = textureCoords[5];
textureCoords[4] = textureCoords[6];
textureCoords[5] = textureCoords[7];
textureCoords[6] = textureCoords[2];
textureCoords[7] = textureCoords[3];
textureCoords[2] = x;
textureCoords[3] = y;
break;
case 180:
swap(textureCoords, 0, 6);
swap(textureCoords, 1, 7);
swap(textureCoords, 2, 4);
swap(textureCoords, 3, 5);
break;
case 270:
x = textureCoords[0];
y = textureCoords[1];
textureCoords[0] = textureCoords[2];
textureCoords[1] = textureCoords[3];
textureCoords[2] = textureCoords[6];
textureCoords[3] = textureCoords[7];
textureCoords[6] = textureCoords[4];
textureCoords[7] = textureCoords[5];
textureCoords[4] = x;
textureCoords[5] = y;
break;
case 0:
default:
break;
}
}
// /**
// * 缩放变换
// * @param vertices 顶点坐标系
// * @param inputWidth 输入宽度
// * @param inputHeight 输入高度
// * @param outputWidth 输出宽度
// * @param outputHeight 输出高度
// * @param scaleType 缩放类型
// * @return
// */
// public static float[] resolveScale(float[] vertices,int inputWidth, int inputHeight, int outputWidth, int outputHeight,
// int scaleType) {
// if (scaleType == Transformation.SCALE_TYPE_FIT_XY) {
// // The default is FIT_XY
// return vertices;
// }
//
// // Note: scale type need to be implemented by adjusting
// // the vertices (not textureCoords).
// if (inputWidth * outputHeight == inputHeight * outputWidth) {
// // Optional optimization: If input w/h aspect is the same as output's,
// // there is no need to adjust vertices at all.
// return vertices;
// }
//
// float inputAspect = inputWidth / (float) inputHeight;
// float outputAspect = outputWidth / (float) outputHeight;
//
// if (scaleType == Transformation.SCALE_TYPE_CENTER_CROP) {
// if (inputAspect < outputAspect) {
// float heightRatio = outputAspect / inputAspect;
// vertices[1] *= heightRatio;
// vertices[3] *= heightRatio;
// vertices[5] *= heightRatio;
// vertices[7] *= heightRatio;
// } else {
// float widthRatio = inputAspect / outputAspect;
// vertices[0] *= widthRatio;
// vertices[2] *= widthRatio;
// vertices[4] *= widthRatio;
// vertices[6] *= widthRatio;
// }
// } else if (scaleType == Transformation.SCALE_TYPE_CENTER_INSIDE) {
// if (inputAspect < outputAspect) {
// float widthRatio = inputAspect / outputAspect;
// vertices[0] *= widthRatio;
// vertices[2] *= widthRatio;
// vertices[4] *= widthRatio;
// vertices[6] *= widthRatio;
// } else {
// float heightRatio = outputAspect / inputAspect;
// vertices[1] *= heightRatio;
// vertices[3] *= heightRatio;
// vertices[5] *= heightRatio;
// vertices[7] *= heightRatio;
// }
// }
// return vertices;
// }
private static void swap(float[] arr, int index1, int index2) {
float temp = arr[index1];
arr[index1] = arr[index2];
arr[index2] = temp;
}
}
package com.joe.camera2recorddemo.OpenGL;
import android.util.Size;
/**
* Created by Yj on 2017/10/30.
* 图形旋转,翻转,缩放,裁剪类
*/
public class Transformation {
public static final Rect FULL_RECT = new Rect(0, 0, 1, 1);
public static final int FLIP_NONE = 2001;
public static final int FLIP_HORIZONTAL = 2002;
public static final int FLIP_VERTICAL = 2003;
public static final int FLIP_HORIZONTAL_VERTICAL = 2004;
private Rect cropRect = FULL_RECT;
private int flip = FLIP_NONE;
private int rotation = 0;
private Size inputSize;
private Size outputSize;
private int scaleType = 0;
public void setCrop(Rect cropRect) {
this.cropRect = cropRect;
}
public void setFlip(int flip) {
this.flip = flip;
}
public void setRotation(int rotation) {
this.rotation = rotation;
}
public void setInputSize(Size inputSize) {
this.inputSize = inputSize;
}
public void setOutputSize(Size outputSize) {
this.outputSize = outputSize;
}
public Rect getCropRect() {
return cropRect;
}
public int getFlip() {
return flip;
}
public int getRotation() {
return rotation;
}
public Size getInputSize() {
return inputSize;
}
public Size getOutputSize() {
return outputSize;
}
public int getScaleType() {
return scaleType;
}
public void setScale(Size inputSize, Size outputSize, int scaleType) {
this.inputSize = inputSize;
this.outputSize = outputSize;
this.scaleType = scaleType;
}
public static class Rect {
final float x;
final float y;
final float width;
final float height;
public Rect(final float x, final float y, final float width, final float height) {
this.x = x;
this.y = y;
this.width = width;
this.height = height;
}
}
}
package com.joe.camera2recorddemo.OpenGL;
import com.joe.camera2recorddemo.OpenGL.Filter.OesFilter;
import com.joe.camera2recorddemo.Utils.MatrixUtils;
/**
* Created by aiya on 2017/9/12.
*/
class WrapRenderer implements Renderer{
private Renderer mRenderer;
private OesFilter mFilter;
private FrameBuffer mFrameBuffer;
public static final int TYPE_MOVE=0;
public static final int TYPE_CAMERA=1;
public static final int TYPE_SURFACE=2;
public WrapRenderer(Renderer renderer){
this.mRenderer=renderer;
mFrameBuffer=new FrameBuffer();
mFilter=new OesFilter();
if(renderer!=null){
MatrixUtils.flip(mFilter.getVertexMatrix(),false,true);
}
}
public OesFilter getmFilter() {
return mFilter;
}
public void setFlag(int flag){
if(flag==TYPE_SURFACE){
mFilter.setVertexCo(MatrixUtils.getSurfaceVertexCo());
}else if(flag==TYPE_CAMERA){
mFilter.setVertexCo(MatrixUtils.getCameraVertexCo());
}else if(flag==TYPE_MOVE){
mFilter.setVertexCo(MatrixUtils.getMoveVertexCo());
}
}
public float[] getTextureMatrix(){
return mFilter.getTextureMatrix();
}
@Override
public void create() {
mFilter.create();
if(mRenderer!=null){
mRenderer.create();
}
}
@Override
public void sizeChanged(int width, int height) {
mFilter.sizeChanged(width, height);
if(mRenderer!=null){
mRenderer.sizeChanged(width, height);
}
}
@Override
public void draw(int texture) {
if(mRenderer!=null){
mRenderer.draw(mFilter.drawToTexture(texture));
}else{
mFilter.draw(texture);
}
}
@Override
public void destroy() {
if(mRenderer!=null){
mRenderer.destroy();
}
mFilter.destroy();
}
}
package com.joe.camera2recorddemo.Utils;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.Surface;
import android.view.WindowManager;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class CameraParamUtil {
private static final String TAG = "JCameraView";
private static CameraParamUtil cameraParamUtil = null;
private CameraSizeComparator sizeComparator = new CameraSizeComparator();
private CameraParamUtil() {
}
public static CameraParamUtil getInstance() {
if (cameraParamUtil == null) {
cameraParamUtil = new CameraParamUtil();
return cameraParamUtil;
} else {
return cameraParamUtil;
}
}
public Camera.Size getPreviewSize(List<Camera.Size> list, int th, float rate) {
Collections.sort(list, sizeComparator);
int i = 0;
for (Camera.Size s : list) {
if ((s.width > th) && equalRate(s, rate)) {
Log.i(TAG, "MakeSure Preview :w = " + s.width + " h = " + s.height);
break;
}
i++;
}
if (i == list.size()) {
return getBestSize(list, rate);
} else {
return list.get(i);
}
}
public Camera.Size getPictureSize(List<Camera.Size> list, int th, float rate) {
Collections.sort(list, sizeComparator);
int i = 0;
for (Camera.Size s : list) {
if ((s.width > th) && equalRate(s, rate)) {
Log.i(TAG, "MakeSure Picture :w = " + s.width + " h = " + s.height);
break;
}
i++;
}
if (i == list.size()) {
return getBestSize(list, rate);
} else {
return list.get(i);
}
}
private Camera.Size getBestSize(List<Camera.Size> list, float rate) {
float previewDisparity = 100;
int index = 0;
for (int i = 0; i < list.size(); i++) {
Camera.Size cur = list.get(i);
float prop = (float) cur.width / (float) cur.height;
if (Math.abs(rate - prop) < previewDisparity) {
previewDisparity = Math.abs(rate - prop);
index = i;
}
}
return list.get(index);
}
private boolean equalRate(Camera.Size s, float rate) {
float r = (float) (s.width) / (float) (s.height);
return Math.abs(r - rate) <= 0.2;
}
public boolean isSupportedFocusMode(List<String> focusList, String focusMode) {
for (int i = 0; i < focusList.size(); i++) {
if (focusMode.equals(focusList.get(i))) {
Log.i(TAG, "FocusMode supported " + focusMode);
return true;
}
}
Log.i(TAG, "FocusMode not supported " + focusMode);
return false;
}
public boolean isSupportedPictureFormats(List<Integer> supportedPictureFormats, int jpeg) {
for (int i = 0; i < supportedPictureFormats.size(); i++) {
if (jpeg == supportedPictureFormats.get(i)) {
Log.i(TAG, "Formats supported " + jpeg);
return true;
}
}
Log.i(TAG, "Formats not supported " + jpeg);
return false;
}
public int getCameraDisplayOrientation(Context context, int cameraId) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
int rotation = wm.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else {
// back-facing
result = (info.orientation - degrees + 360) % 360;
}
return result;
}
private class CameraSizeComparator implements Comparator<Camera.Size> {
public int compare(Camera.Size lhs, Camera.Size rhs) {
if (lhs.width == rhs.width) {
return 0;
} else if (lhs.width > rhs.width) {
return 1;
} else {
return -1;
}
}
}
}
package com.joe.camera2recorddemo.Utils;
import android.app.Activity;
import android.hardware.Camera;
import android.view.Surface;
/**
* Created by Administrator on 2017/10/10.
*/
public class CameraUtils {
/**
* 解决前置摄像头上下颠倒的问题
*
* @param cameraId
* @param camera
*/
public static void setCameraDisplayOrientation(Activity activity, int cameraId, Camera camera) {
Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 90;
break;
case Surface.ROTATION_90:
degrees = 180;
break;
case Surface.ROTATION_180:
degrees = 270;
break;
case Surface.ROTATION_270:
degrees = 0;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
}
package com.joe.camera2recorddemo.Utils;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.util.Size;
import com.joe.camera2recorddemo.MediaCodecUtil.TrackUtils;
/**
* 获取视频信息类
* Created by Administrator on 2017/12/6.
*/
public class FormatUtils {
/**
* 获取视频尺寸
*
* @param url 视频地址
* @return 返回视频尺寸
*/
public static Size getVideoSize(String url) {
int mInputHeight = 0, mInputWidth = 0;
MediaExtractor extractor = new MediaExtractor();
try {
extractor.setDataSource(url);
int trackIndex = TrackUtils.selectVideoTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + url);
}
extractor.selectTrack(trackIndex);
MediaFormat mediaFormat = extractor.getTrackFormat(trackIndex);
//获取宽高信息
int rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
if (rotation == 90 || rotation == 270) {
mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
} else {
mInputWidth = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
mInputHeight = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
}
} catch (Exception e) {
e.printStackTrace();
}
return new Size(mInputWidth, mInputHeight);
}
/**
* 获取视频信息
*
* @param url
* @return
*/
public static VideoFormat getVideoFormat(String url) {
VideoFormat videoFormat = new VideoFormat();
videoFormat.height = 0;
videoFormat.width = 0;
MediaExtractor extractor = new MediaExtractor();
try {
extractor.setDataSource(url);
int trackIndex = TrackUtils.selectVideoTrack(extractor);
if (trackIndex < 0) {
throw new RuntimeException("No video track found in " + url);
}
extractor.selectTrack(trackIndex);
MediaFormat mediaFormat = extractor.getTrackFormat(trackIndex);
//获取宽高信息
videoFormat.rotation = mediaFormat.containsKey(MediaFormat.KEY_ROTATION) ? mediaFormat.getInteger(MediaFormat.KEY_ROTATION) : 0;
if (videoFormat.rotation == 90 || videoFormat.rotation == 270) {
videoFormat.height = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
videoFormat.width = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
} else {
videoFormat.width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
videoFormat.height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
}
} catch (Exception e) {
e.printStackTrace();
}
return videoFormat;
}
public static class VideoFormat {
public int width;
public int height;
public int rotation;
}
}
package com.joe.camera2recorddemo.Utils;
import android.annotation.SuppressLint;
import android.content.ContentUris;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
public class GetPathFromUri4kitkat {
/**
* 专为Android4.4设计的从Uri获取文件绝对路径,以前的方法已不好使
*/
@SuppressLint("NewApi")
public static String getPath(final Context context, final Uri uri) {
final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
// DocumentProvider
if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
// ExternalStorageProvider
if (isExternalStorageDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return Environment.getExternalStorageDirectory() + "/" + split[1];
}
// TODO handle non-primary volumes
}
// DownloadsProvider
else if (isDownloadsDocument(uri)) {
final String id = DocumentsContract.getDocumentId(uri);
final Uri contentUri = ContentUris.withAppendedId(
Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
}
// MediaProvider
else if (isMediaDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
} else if ("audio".equals(type)) {
contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
}
final String selection = "_id=?";
final String[] selectionArgs = new String[]{split[1]};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
}
// MediaStore (and general)
else if ("content".equalsIgnoreCase(uri.getScheme())) {
return getDataColumn(context, uri, null, null);
}
// File
else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
/**
* Get the value of the data column for this Uri. This is useful for
* MediaStore Uris, and other file-based ContentProviders.
*
* @param context The context.
* @param uri The Uri to query.
* @param selection (Optional) Filter used in the query.
* @param selectionArgs (Optional) Selection arguments used in the query.
* @return The value of the _data column, which is typically a file path.
*/
public static String getDataColumn(Context context, Uri uri, String selection,
String[] selectionArgs) {
Cursor cursor = null;
final String column = "_data";
final String[] projection = {column};
try {
cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
null);
if (cursor != null && cursor.moveToFirst()) {
final int column_index = cursor.getColumnIndexOrThrow(column);
return cursor.getString(column_index);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is ExternalStorageProvider.
*/
public static boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is DownloadsProvider.
*/
public static boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is MediaProvider.
*/
public static boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
}
\ No newline at end of file
package com.joe.camera2recorddemo.Utils;
import android.content.ContentResolver;
import android.content.Context;
import android.net.Uri;
import android.util.Log;
/**
* Created by Yj on 2017/10/16.
*/
public class UriUtils {
/**
* 获取URI的绝对路径
*
* @param context
* @param uri
* @return
*/
public static String getRealFilePath(Context context, final Uri uri) {
if (null == uri) return null;
final String scheme = uri.getScheme();
String data = null;
if (scheme == null) {
Log.e("UriUtils", "scheme is null");
data = uri.getPath();
} else if (ContentResolver.SCHEME_FILE.equals(scheme)) {
data = uri.getPath();
Log.e("UriUtils", "SCHEME_FILE");
} else if (ContentResolver.SCHEME_CONTENT.equals(scheme)) {
data = GetPathFromUri4kitkat.getPath(context, uri);
}
return data;
}
}
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android" >
<item android:state_pressed="true" android:drawable="@mipmap/btn_camera_all_click"/>
<item android:drawable="@mipmap/btn_camera_all"/>
</selector>
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android">
<solid android:color="#49000000" />
<corners android:topLeftRadius="10dp"
android:topRightRadius="10dp"
android:bottomRightRadius="10dp"
android:bottomLeftRadius="10dp"/>
<stroke android:width="1dp" android:color="#49000000" />
</shape>
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android">
<solid android:color="#8c000000" />
<corners android:topLeftRadius="20dp"
android:topRightRadius="20dp"
android:bottomRightRadius="20dp"
android:bottomLeftRadius="20dp"/>
<stroke android:width="1dp" android:color="#8c000000" />
</shape>
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment