diff --git a/README.md b/README.md
new file mode 100644
index 0000000..5e47ef3
--- /dev/null
+++ b/README.md
@@ -0,0 +1,211 @@
+
+
+
Android Instagram Clone Course
+A step-by-step guide to build your own Instagram Clone
+
+
+
+
+In the course we'll be using:
+
+ - Firebase Email Authentication
+ - Firebase Database
+ - Firebase Cloud-Storage
+
+
+
+To keep things simple and condensed I'll be using Firebase for everything.
+
+Lecture Source Code:
+
+- Getting Started
+
+- Toolbars and NavigationView
+
+- Customizing the BottomNavigationView
+
+- BottomNavigationView Activities
+
+- Organizing Things and Tab-Prep
+
+- SectionsPagerAdapter (Home Screen Tabs)
+
+- Profile Toolbar and Menu
+
+- Building the Profile Part 1
+
+- Building the Profile Part 2
+
+- Account Settings Layout
+
+- Account Settings Navigation
+
+- Account Settings Fragments
+
+- EditProfile Fragment Layout
+
+- Universal Image Loader Config
+
+- Testing Images in the User Profile
+
+- Square ImageView Widgets
+
+- Login Layout
+
+- Register Layout
+
+- Get Started with Firebase
+
+- Setup Firebase Authentication
+
+- Testing Firebase Authentication
+
+- Setup Register Activity Widgets
+
+- Register New User with Firebase
+
+- Firebase Database Structure (no source code)
+
+- Check if Username Already Exists
+
+- Insert New Data
+
+- Email Verificaiton
+
+- Enable User Signout
+
+- Profile Fragment
+
+- Profile Fragment Setup
+
+- Retrieving User Data from Firebase
+
+- Setting Profile Fragment Widgets
+
+- Navigating to EditProfile Fragment
+
+- Setting EditProfile Fragment Widgets
+
+- Saving User Profile Changes
+
+- Query Firebase Database
+
+- Changing Firebase Authenticated Email (part 1)
+
+- Changing Firebase Authenticated Email (part 2)
+
+- Changing Firebase Authenticated Email (part 3)
+
+- Update User Account Settings
+
+- Verifying Permissions for Sharing
+
+- ShareActivity Layout and Tabs
+
+- Setup Gallery and Photo Fragments
+
+- Camera Intent
+
+- GalleryFragment Layout
+
+- Phone Directories
+
+- ShareActivity GridView
+
+- Selected Image to Share
+
+- NextActivity Setup
+
+- How to upload images to Firebase Storage (no source code)
+
+- Getting the Image Count
+
+- Firebase Storage Reference
+
+- Convert Bitmap to Byte Array
+
+- Upload Photo to Firebase Storage
+
+- Insert Photo into Firebase Database
+
+- Changing Profile Photo
+
+- Upload New Profile Photo to Firebase Storage
+
+- Fixing the Navigation
+
+- New Profile Photo Using Camera
+
+- Share Photo using Camera
+
+- Populating User Profile Gridview
+
+- Activity Animations
+
+- Post Viewing Layout (part1)
+
+- Post Viewing Layout (part2)
+
+- GridImage Selection Interface
+
+- Retrieving Post Image From Bundle
+
+- Image Post Date
+
+- Query Photo Details
+
+- Likes Toggle Part1
+
+- Likes Toggle Part2
+
+- Likes Toggle Part3
+
+- Likes Toggle Part4
+
+- Likes Toggle Part5
+
+- Testing Instagram Likes
+
+- Comments Layout
+
+- Comments ListAdapter part1
+
+- Comments ListAdapter part2
+
+- Displaying the First Comment
+
+- Inserting Comments
+
+- Reading Comments
+
+- Finishing Comments (IMPORTANT)
+
+- Search Activity Layout
+
+- Searching for Users
+
+- Viewing User Profiles
+
+- View Profile Fragment
+
+- Following Users (part1)
+
+- Following Users (part2)
+
+- Fixing a Few Bugs
+
+- Mainfeed ListAdapter (part 1/2)
+
+- Mainfeed ListAdapter (part 2/2)
+
+- Displaying Posts in the Main Feed (part 1/3)
+
+- Displaying Posts in the Main Feed (part 2/3)
+
+- Displaying Posts in the Main Feed (part 3/3)
+
+- ListView Pagination and Bug Fixes
+
+- End?!
+
+
diff --git a/app/build.gradle b/app/build.gradle
index e740f65..653c004 100644
--- a/app/build.gradle
+++ b/app/build.gradle
@@ -1,12 +1,11 @@
apply plugin: 'com.android.application'
android {
- compileSdkVersion 25
- buildToolsVersion "25.0.0"
+ compileSdkVersion 27
defaultConfig {
applicationId "tabian.com.instagramclone2"
minSdkVersion 18
- targetSdkVersion 25
+ targetSdkVersion 27
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
@@ -20,35 +19,46 @@ android {
}
dependencies {
- compile fileTree(dir: 'libs', include: ['*.jar'])
+ compile fileTree(include: ['*.jar'], dir: 'libs')
androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', {
exclude group: 'com.android.support', module: 'support-annotations'
})
- compile 'com.android.support:appcompat-v7:25.3.1'
+ compile 'com.android.support:appcompat-v7:27.0.2'
compile 'com.android.support.constraint:constraint-layout:1.0.2'
testCompile 'junit:junit:4.12'
-
//Design library for Coordinator Layout and Toolbars
- compile 'com.android.support:design:25.3.1'
-
+ compile 'com.android.support:design:27.0.2'
+ //ExifInterface support
+ compile 'com.android.support:exifinterface:27.0.2'
+ //design library for Coordinator Layout
+ compile 'com.android.support:design:27.0.2'
+ //Exo Player
+ compile 'com.google.android.exoplayer:exoplayer:2.6.0'
+ //cardview
+ compile 'com.android.support:cardview-v7:27.0.2'
+ //recyclerview
+ compile 'com.android.support:recyclerview-v7:27.0.2'
//BottomNavigationViewEx library
compile 'com.github.ittianyu:BottomNavigationViewEx:1.1.9'
-
//Circle ImageView
compile 'de.hdodenhof:circleimageview:2.1.0'
-
+ //elastic header
+ compile 'com.github.eschao:android-ElasticListView:v1.0'
+ //material dialogs
+ implementation 'com.afollestad.material-dialogs:core:0.9.6.0'
+ //glide
+ implementation 'com.github.bumptech.glide:glide:4.4.0'
+ annotationProcessor 'com.github.bumptech.glide:compiler:4.4.0'
+ implementation files('libs/aspectjrt-1.7.3 (1).jar')
//Universal image loader
compile 'com.nostra13.universalimageloader:universal-image-loader:1.9.5'
-
//firebase authentication
compile 'com.google.firebase:firebase-auth:10.2.6'
-
//firebase database
compile 'com.google.firebase:firebase-database:10.2.6'
-
//firebase storage
compile 'com.google.firebase:firebase-storage:10.2.6'
-
+ compile files('libs/aspectjrt-1.7.3 (1).jar')
}
//Firebase
diff --git a/app/libs/aspectjrt-1.7.3 (1).jar b/app/libs/aspectjrt-1.7.3 (1).jar
new file mode 100644
index 0000000..ef9fe4b
Binary files /dev/null and b/app/libs/aspectjrt-1.7.3 (1).jar differ
diff --git a/app/libs/isoparser-1.0.6.jar b/app/libs/isoparser-1.0.6.jar
new file mode 100644
index 0000000..f0142bb
Binary files /dev/null and b/app/libs/isoparser-1.0.6.jar differ
diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index 38f3b86..85523ae 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -4,8 +4,14 @@
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
-
-
-
+
+
+
+
+
+
-
-
-
-
-
+
+
\ No newline at end of file
diff --git a/app/src/main/java/tabian/com/instagramclone2/Home/HomeActivity.java b/app/src/main/java/tabian/com/instagramclone2/Home/HomeActivity.java
index 49d1ac1..df4ca36 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Home/HomeActivity.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Home/HomeActivity.java
@@ -24,15 +24,17 @@
import tabian.com.instagramclone2.Login.LoginActivity;
import tabian.com.instagramclone2.R;
import tabian.com.instagramclone2.Utils.BottomNavigationViewHelper;
-import tabian.com.instagramclone2.Utils.MainfeedListAdapter;
+import tabian.com.instagramclone2.Utils.FirebaseMethods;
+import tabian.com.instagramclone2.Utils.MainFeedListAdapter;
import tabian.com.instagramclone2.Utils.SectionsPagerAdapter;
import tabian.com.instagramclone2.Utils.UniversalImageLoader;
import tabian.com.instagramclone2.Utils.ViewCommentsFragment;
import tabian.com.instagramclone2.models.Photo;
-import tabian.com.instagramclone2.models.UserAccountSettings;
+import tabian.com.instagramclone2.opengl.AddToStoryDialog;
+import tabian.com.instagramclone2.opengl.NewStoryActivity;
public class HomeActivity extends AppCompatActivity implements
- MainfeedListAdapter.OnLoadMoreItemsListener{
+ MainFeedListAdapter.OnLoadMoreItemsListener{
@Override
public void onLoadMoreItems() {
@@ -47,6 +49,9 @@ public void onLoadMoreItems() {
private static final String TAG = "HomeActivity";
private static final int ACTIVITY_NUM = 0;
private static final int HOME_FRAGMENT = 1;
+ private static final int RESULT_ADD_NEW_STORY = 7891;
+ private final static int CAMERA_RQ = 6969;
+ private static final int REQUEST_ADD_NEW_STORY = 8719;
private Context mContext = HomeActivity.this;
@@ -76,6 +81,16 @@ protected void onCreate(Bundle savedInstanceState) {
}
+ public void openNewStoryActivity(){
+ Intent intent = new Intent(this, NewStoryActivity.class);
+ startActivityForResult(intent, REQUEST_ADD_NEW_STORY);
+ }
+
+ public void showAddToStoryDialog(){
+ Log.d(TAG, "showAddToStoryDialog: showing add to story dialog.");
+ AddToStoryDialog dialog = new AddToStoryDialog();
+ dialog.show(getFragmentManager(), getString(R.string.dialog_add_to_story));
+ }
public void onCommentThreadSelected(Photo photo, String callingActivity){
@@ -116,6 +131,36 @@ public void onBackPressed() {
}
+ @Override
+ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+ super.onActivityResult(requestCode, resultCode, data);
+ Log.d(TAG, "onActivityResult: incoming result.");
+ // Received recording or error from MaterialCamera
+
+ if (requestCode == REQUEST_ADD_NEW_STORY) {
+ Log.d(TAG, "onActivityResult: incoming new story.");
+ if (resultCode == RESULT_ADD_NEW_STORY) {
+ Log.d(TAG, "onActivityResult: got the new story.");
+ Log.d(TAG, "onActivityResult: data type: " + data.getType());
+
+ final HomeFragment fragment = (HomeFragment) getSupportFragmentManager().findFragmentByTag("android:switcher:" + R.id.viewpager_container + ":" + 1);
+ if (fragment != null) {
+
+ FirebaseMethods firebaseMethods = new FirebaseMethods(this);
+ firebaseMethods.uploadNewStory(data, fragment);
+
+ }
+ else{
+ Log.d(TAG, "onActivityResult: could not communicate with home fragment.");
+ }
+
+
+
+ }
+ }
+ }
+
+
private void initImageLoader(){
UniversalImageLoader universalImageLoader = new UniversalImageLoader(mContext);
ImageLoader.getInstance().init(universalImageLoader.getConfig());
diff --git a/app/src/main/java/tabian/com/instagramclone2/Home/HomeFragment.java b/app/src/main/java/tabian/com/instagramclone2/Home/HomeFragment.java
index 7e9e6b6..63de068 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Home/HomeFragment.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Home/HomeFragment.java
@@ -3,20 +3,30 @@
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
+import android.support.v7.widget.LinearLayoutManager;
+import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
-import android.widget.ListView;
+import android.widget.RelativeLayout;
+import android.widget.TextView;
+import com.eschao.android.widget.elasticlistview.ElasticListView;
+import com.eschao.android.widget.elasticlistview.LoadFooter;
+import com.eschao.android.widget.elasticlistview.OnLoadListener;
+import com.eschao.android.widget.elasticlistview.OnUpdateListener;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
-import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.Query;
import com.google.firebase.database.ValueEventListener;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@@ -25,150 +35,404 @@
import java.util.Map;
import tabian.com.instagramclone2.R;
-import tabian.com.instagramclone2.Utils.MainfeedListAdapter;
+import tabian.com.instagramclone2.Utils.MainFeedListAdapter;
+import tabian.com.instagramclone2.Utils.StoriesRecyclerViewAdapter;
import tabian.com.instagramclone2.models.Comment;
-import tabian.com.instagramclone2.models.Like;
import tabian.com.instagramclone2.models.Photo;
+import tabian.com.instagramclone2.models.Story;
import tabian.com.instagramclone2.models.UserAccountSettings;
/**
* Created by User on 5/28/2017.
*/
-public class HomeFragment extends Fragment {
+public class HomeFragment extends Fragment implements OnUpdateListener, OnLoadListener {
+
private static final String TAG = "HomeFragment";
+ @Override
+ public void onUpdate() {
+ Log.d(TAG, "ElasticListView: updating list view...");
+
+ getFollowing();
+ }
+
+
+ @Override
+ public void onLoad() {
+ Log.d(TAG, "ElasticListView: loading...");
+
+ // Notify load is done
+ mListView.notifyLoaded();
+ }
+
+
//vars
private ArrayList mPhotos;
private ArrayList mPaginatedPhotos;
private ArrayList mFollowing;
- private ListView mListView;
- private MainfeedListAdapter mAdapter;
- private int mResults;
+ private int recursionIterator = 0;
+ // private ListView mListView;
+ private ElasticListView mListView;
+ private MainFeedListAdapter adapter;
+ private int resultsCount = 0;
+ private ArrayList mUserAccountSettings;
+ // private ArrayList mAllUserStories = new ArrayList<>();
+ private JSONArray mMasterStoriesArray;
+
+ private RecyclerView mRecyclerView;
+ public StoriesRecyclerViewAdapter mStoriesAdapter;
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_home, container, false);
- mListView = (ListView) view.findViewById(R.id.listView);
- mFollowing = new ArrayList<>();
- mPhotos = new ArrayList<>();
+// mListView = (ListView) view.findViewById(R.id.listView);
+ mListView = (ElasticListView) view.findViewById(R.id.listView);
+ initListViewRefresh();
getFollowing();
return view;
}
- private void getFollowing(){
+ private void initListViewRefresh(){
+ mListView.setHorizontalFadingEdgeEnabled(true);
+ mListView.setAdapter(adapter);
+ mListView.enableLoadFooter(true)
+ .getLoadFooter().setLoadAction(LoadFooter.LoadAction.RELEASE_TO_LOAD);
+ mListView.setOnUpdateListener(this)
+ .setOnLoadListener(this);
+// mListView.requestUpdate();
+ }
+
+
+ private void getFriendsAccountSettings(){
+ Log.d(TAG, "getFriendsAccountSettings: getting friends account settings.");
+
+ for(int i = 0; i < mFollowing.size(); i++) {
+ Log.d(TAG, "getFriendsAccountSettings: user: " + mFollowing.get(i));
+ final int count = i;
+ Query query = FirebaseDatabase.getInstance().getReference()
+ .child(getString(R.string.dbname_user_account_settings))
+ .orderByKey()
+ .equalTo(mFollowing.get(i));
+
+ query.addListenerForSingleValueEvent(new ValueEventListener() {
+ @Override
+ public void onDataChange(DataSnapshot dataSnapshot) {
+
+
+ for (DataSnapshot snapshot : dataSnapshot.getChildren()) {
+ Log.d(TAG, "getFriendsAccountSettings: got a user: " + snapshot.getValue(UserAccountSettings.class).getDisplay_name());
+ mUserAccountSettings.add(snapshot.getValue(UserAccountSettings.class));
+
+ if(count == 0){
+ JSONObject userObject = new JSONObject();
+ try {
+ userObject.put(getString(R.string.field_display_name), mUserAccountSettings.get(count).getDisplay_name());
+ userObject.put(getString(R.string.field_username), mUserAccountSettings.get(count).getUsername());
+ userObject.put(getString(R.string.field_profile_photo), mUserAccountSettings.get(count).getProfile_photo());
+ userObject.put(getString(R.string.field_user_id), mUserAccountSettings.get(count).getUser_id());
+ JSONObject userSettingsStoryObject = new JSONObject();
+ userSettingsStoryObject.put(getString(R.string.user_account_settings), userObject);
+ mMasterStoriesArray.put(0, userSettingsStoryObject);
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ }
+ if (count == mFollowing.size() - 1) {
+ getFriendsStories();
+ }
+ }
+
+ @Override
+ public void onCancelled(DatabaseError databaseError) {
+
+ }
+ });
+ }
+ }
+
+
+ private void getFriendsStories(){
+ Log.d(TAG, "getFriendsStories: getting stories of following.");
+
+ for(int i = 0; i < mUserAccountSettings.size(); i++){
+ Log.d(TAG, "getFriendsStories: checking user for stories: " + mUserAccountSettings.get(i));
+ final int count = i;
+ Query query = FirebaseDatabase.getInstance().getReference()
+ .child(getString(R.string.dbname_stories))
+ .child(mUserAccountSettings.get(i).getUser_id());
+
+ query.addListenerForSingleValueEvent(new ValueEventListener() {
+ @Override
+ public void onDataChange(DataSnapshot dataSnapshot) {
+ JSONArray storiesArray = new JSONArray();
+ JSONObject userObject = new JSONObject();
+
+ Log.d(TAG, "getFriendsStories: count: " + count);
+ Log.d(TAG, "getFriendsStories: user: " + mUserAccountSettings.get(count).getDisplay_name());
+ try{
+ if(count != 0){
+ userObject.put(getString(R.string.field_display_name), mUserAccountSettings.get(count).getDisplay_name());
+ userObject.put(getString(R.string.field_username), mUserAccountSettings.get(count).getUsername());
+ userObject.put(getString(R.string.field_profile_photo), mUserAccountSettings.get(count).getProfile_photo());
+ userObject.put(getString(R.string.field_user_id), mUserAccountSettings.get(count).getUser_id());
+ }
+
+ for(DataSnapshot snapshot: dataSnapshot.getChildren()){
+ JSONObject story = new JSONObject();
+ story.put(getString(R.string.field_user_id), snapshot.getValue(Story.class).getUser_id());
+ story.put(getString(R.string.field_timestamp), snapshot.getValue(Story.class).getTimestamp());
+ story.put(getString(R.string.field_image_uri), snapshot.getValue(Story.class).getImage_url());
+ story.put(getString(R.string.field_video_uri), snapshot.getValue(Story.class).getVideo_url());
+ story.put(getString(R.string.field_story_id), snapshot.getValue(Story.class).getStory_id());
+ story.put(getString(R.string.field_views), snapshot.getValue(Story.class).getViews());
+ story.put(getString(R.string.field_duration), snapshot.getValue(Story.class).getDuration());
+
+
+ Log.d(TAG, "getFriendsStories: got a story: " + story.get(getString(R.string.field_user_id)));
+// Log.d(TAG, "getFriendsStories: story: " + story.toString());
+ storiesArray.put(story);
+ }
+
+ JSONObject userSettingsStoryObject = new JSONObject();
+ if(count != 0){
+ userSettingsStoryObject.put(getString(R.string.user_account_settings), userObject);
+ if(storiesArray.length() > 0){
+ userSettingsStoryObject.put(getString(R.string.user_stories), storiesArray);
+ int position = mMasterStoriesArray.length();
+ mMasterStoriesArray.put(position, userSettingsStoryObject);
+ Log.d(TAG, "onDataChange: adding list of stories to position #" + position);
+ }
+ }
+ else {
+ userObject = mMasterStoriesArray.getJSONObject(0).getJSONObject(getString(R.string.user_account_settings));
+ userSettingsStoryObject.put(getString(R.string.user_account_settings), userObject);
+ userSettingsStoryObject.put(getString(R.string.user_stories), storiesArray);
+// int position = mMasterStoriesArray.length() - 1;
+ int position = 0;
+ mMasterStoriesArray.put(position, userSettingsStoryObject);
+ Log.d(TAG, "onDataChange: adding list of stories to position #" + position);
+ }
+
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+
+ if(!dataSnapshot.exists()){
+ Log.d(TAG, "getFriendsStories: no stories could be found.");
+// Log.d(TAG, "getFriendsStories: " + mMasterStoriesArray.toString());
+
+ }
+ if(count == mFollowing.size() - 1){
+ initRecyclerView();
+ }
+
+ }
+
+ @Override
+ public void onCancelled(DatabaseError databaseError) {
+
+ }
+ });
+ }
+
+ }
+
+
+
+ private void initRecyclerView(){
+ Log.d(TAG, "initRecyclerView: init recyclerview.");
+ if(mRecyclerView == null){
+ TextView textView = new TextView(getActivity());
+ textView.setText("Stories");
+ textView.setTextColor(getResources().getColor(R.color.black));
+ textView.setTextSize(14);
+ RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
+ RelativeLayout.LayoutParams.WRAP_CONTENT,
+ RelativeLayout.LayoutParams.WRAP_CONTENT
+ );
+ textView.setLayoutParams(params);
+ mListView.addHeaderView(textView);
+
+ LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity(), LinearLayoutManager.HORIZONTAL, false);
+ mRecyclerView = new RecyclerView(getActivity());
+ mRecyclerView.setLayoutManager(layoutManager);
+ mListView.addHeaderView(mRecyclerView);
+ }
+
+ mStoriesAdapter = new StoriesRecyclerViewAdapter(mMasterStoriesArray, getActivity());
+ mRecyclerView.setAdapter(mStoriesAdapter);
+ }
+
+ private void clearAll(){
+ if(mFollowing != null){
+ mFollowing.clear();
+ }
+ if(mPhotos != null){
+ mPhotos.clear();
+ if(adapter != null){
+ adapter.clear();
+ adapter.notifyDataSetChanged();
+ }
+ }
+ if(mUserAccountSettings != null){
+ mUserAccountSettings.clear();
+ }
+ if(mPaginatedPhotos != null){
+ mPaginatedPhotos.clear();
+ }
+ mMasterStoriesArray = new JSONArray(new ArrayList());
+ if(mStoriesAdapter != null){
+ mStoriesAdapter.notifyDataSetChanged();
+ }
+ if(mRecyclerView != null){
+ mRecyclerView.setAdapter(null);
+ }
+ mFollowing = new ArrayList<>();
+ mPhotos = new ArrayList<>();
+ mPaginatedPhotos = new ArrayList<>();
+ mUserAccountSettings = new ArrayList<>();
+ }
+
+ /**
+ // * Retrieve all user id's that current user is following
+ // */
+ private void getFollowing() {
Log.d(TAG, "getFollowing: searching for following");
- DatabaseReference reference = FirebaseDatabase.getInstance().getReference();
- Query query = reference
- .child(getString(R.string.dbname_following))
- .child(FirebaseAuth.getInstance().getCurrentUser().getUid());
+ clearAll();
+ //also add your own id to the list
+ mFollowing.add(FirebaseAuth.getInstance().getCurrentUser().getUid());
+
+ Query query = FirebaseDatabase.getInstance().getReference()
+ .child(getActivity().getString(R.string.dbname_following))
+ .child(FirebaseAuth.getInstance().getCurrentUser().getUid())
+ ;
query.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
- for(DataSnapshot singleSnapshot : dataSnapshot.getChildren()){
- Log.d(TAG, "onDataChange: found user: " +
- singleSnapshot.child(getString(R.string.field_user_id)).getValue());
+ for (DataSnapshot singleSnapshot : dataSnapshot.getChildren()) {
+ Log.d(TAG, "getFollowing: found user: " + singleSnapshot
+ .child(getString(R.string.field_user_id)).getValue());
- mFollowing.add(singleSnapshot.child(getString(R.string.field_user_id)).getValue().toString());
+ mFollowing.add(singleSnapshot
+ .child(getString(R.string.field_user_id)).getValue().toString());
}
- mFollowing.add(FirebaseAuth.getInstance().getCurrentUser().getUid());
- //get the photos
+
getPhotos();
+// getMyUserAccountSettings();
+ getFriendsAccountSettings();
}
@Override
public void onCancelled(DatabaseError databaseError) {
}
+
});
+
}
private void getPhotos(){
- Log.d(TAG, "getPhotos: getting photos");
- DatabaseReference reference = FirebaseDatabase.getInstance().getReference();
+ Log.d(TAG, "getPhotos: getting list of photos");
+
for(int i = 0; i < mFollowing.size(); i++){
final int count = i;
- Query query = reference
- .child(getString(R.string.dbname_user_photos))
+ Query query = FirebaseDatabase.getInstance().getReference()
+ .child(getActivity().getString(R.string.dbname_user_photos))
.child(mFollowing.get(i))
.orderByChild(getString(R.string.field_user_id))
- .equalTo(mFollowing.get(i));
+ .equalTo(mFollowing.get(i))
+ ;
query.addListenerForSingleValueEvent(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot dataSnapshot) {
- for(DataSnapshot singleSnapshot : dataSnapshot.getChildren()){
+ for ( DataSnapshot singleSnapshot : dataSnapshot.getChildren()){
- Photo photo = new Photo();
+ Photo newPhoto = new Photo();
Map objectMap = (HashMap) singleSnapshot.getValue();
- photo.setCaption(objectMap.get(getString(R.string.field_caption)).toString());
- photo.setTags(objectMap.get(getString(R.string.field_tags)).toString());
- photo.setPhoto_id(objectMap.get(getString(R.string.field_photo_id)).toString());
- photo.setUser_id(objectMap.get(getString(R.string.field_user_id)).toString());
- photo.setDate_created(objectMap.get(getString(R.string.field_date_created)).toString());
- photo.setImage_path(objectMap.get(getString(R.string.field_image_path)).toString());
+ newPhoto.setCaption(objectMap.get(getString(R.string.field_caption)).toString());
+ newPhoto.setTags(objectMap.get(getString(R.string.field_tags)).toString());
+ newPhoto.setPhoto_id(objectMap.get(getString(R.string.field_photo_id)).toString());
+ newPhoto.setUser_id(objectMap.get(getString(R.string.field_user_id)).toString());
+ newPhoto.setDate_created(objectMap.get(getString(R.string.field_date_created)).toString());
+ newPhoto.setImage_path(objectMap.get(getString(R.string.field_image_path)).toString());
- ArrayList comments = new ArrayList();
+ Log.d(TAG, "getPhotos: photo: " + newPhoto.getPhoto_id());
+ List commentsList = new ArrayList();
for (DataSnapshot dSnapshot : singleSnapshot
.child(getString(R.string.field_comments)).getChildren()){
+ Map object_map = (HashMap) dSnapshot.getValue();
Comment comment = new Comment();
- comment.setUser_id(dSnapshot.getValue(Comment.class).getUser_id());
- comment.setComment(dSnapshot.getValue(Comment.class).getComment());
- comment.setDate_created(dSnapshot.getValue(Comment.class).getDate_created());
- comments.add(comment);
+ comment.setUser_id(object_map.get(getString(R.string.field_user_id)).toString());
+ comment.setComment(object_map.get(getString(R.string.field_comment)).toString());
+ comment.setDate_created(object_map.get(getString(R.string.field_date_created)).toString());
+ commentsList.add(comment);
}
-
- photo.setComments(comments);
- mPhotos.add(photo);
+ newPhoto.setComments(commentsList);
+ mPhotos.add(newPhoto);
}
- if(count >= mFollowing.size() -1){
- //display our photos
+ if(count >= mFollowing.size() - 1){
+ //display the photos
displayPhotos();
}
+
}
@Override
public void onCancelled(DatabaseError databaseError) {
-
+ Log.d(TAG, "onCancelled: query cancelled.");
}
});
+
}
}
private void displayPhotos(){
- mPaginatedPhotos = new ArrayList<>();
+// mPaginatedPhotos = new ArrayList<>();
if(mPhotos != null){
+
try{
+
+ //sort for newest to oldest
Collections.sort(mPhotos, new Comparator() {
- @Override
public int compare(Photo o1, Photo o2) {
return o2.getDate_created().compareTo(o1.getDate_created());
}
});
+ //we want to load 10 at a time. So if there is more than 10, just load 10 to start
int iterations = mPhotos.size();
-
if(iterations > 10){
iterations = 10;
}
-
- mResults = 10;
+//
+ resultsCount = 0;
for(int i = 0; i < iterations; i++){
mPaginatedPhotos.add(mPhotos.get(i));
+ resultsCount++;
+ Log.d(TAG, "displayPhotos: adding a photo to paginated list: " + mPhotos.get(i).getPhoto_id());
}
- mAdapter = new MainfeedListAdapter(getActivity(), R.layout.layout_mainfeed_listitem, mPaginatedPhotos);
- mListView.setAdapter(mAdapter);
+ adapter = new MainFeedListAdapter(getActivity(), R.layout.layout_mainfeed_listitem, mPaginatedPhotos);
+ mListView.setAdapter(adapter);
+
+ // Notify update is done
+ mListView.notifyUpdated();
- }catch (NullPointerException e){
- Log.e(TAG, "displayPhotos: NullPointerException: " + e.getMessage() );
}catch (IndexOutOfBoundsException e){
- Log.e(TAG, "displayPhotos: IndexOutOfBoundsException: " + e.getMessage() );
+ Log.e(TAG, "displayPhotos: IndexOutOfBoundsException:" + e.getMessage() );
+ }catch (NullPointerException e){
+ Log.e(TAG, "displayPhotos: NullPointerException:" + e.getMessage() );
}
}
}
@@ -178,31 +442,33 @@ public void displayMorePhotos(){
try{
- if(mPhotos.size() > mResults && mPhotos.size() > 0){
+ if(mPhotos.size() > resultsCount && mPhotos.size() > 0){
int iterations;
- if(mPhotos.size() > (mResults + 10)){
+ if(mPhotos.size() > (resultsCount + 10)){
Log.d(TAG, "displayMorePhotos: there are greater than 10 more photos");
iterations = 10;
}else{
Log.d(TAG, "displayMorePhotos: there is less than 10 more photos");
- iterations = mPhotos.size() - mResults;
+ iterations = mPhotos.size() - resultsCount;
}
- //add the new photos to the paginated results
- for(int i = mResults; i < mResults + iterations; i++){
+ //add the new photos to the paginated list
+ for(int i = resultsCount; i < resultsCount + iterations; i++){
mPaginatedPhotos.add(mPhotos.get(i));
}
- mResults = mResults + iterations;
- mAdapter.notifyDataSetChanged();
+
+ resultsCount = resultsCount + iterations;
+ adapter.notifyDataSetChanged();
}
- }catch (NullPointerException e){
- Log.e(TAG, "displayPhotos: NullPointerException: " + e.getMessage() );
}catch (IndexOutOfBoundsException e){
- Log.e(TAG, "displayPhotos: IndexOutOfBoundsException: " + e.getMessage() );
+ Log.e(TAG, "displayPhotos: IndexOutOfBoundsException:" + e.getMessage() );
+ }catch (NullPointerException e){
+ Log.e(TAG, "displayPhotos: NullPointerException:" + e.getMessage() );
}
}
+
}
diff --git a/app/src/main/java/tabian/com/instagramclone2/Login/LoginActivity.java b/app/src/main/java/tabian/com/instagramclone2/Login/LoginActivity.java
index 253a949..9305036 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Login/LoginActivity.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Login/LoginActivity.java
@@ -31,6 +31,7 @@
public class LoginActivity extends AppCompatActivity {
private static final String TAG = "LoginActivity";
+ private static final Boolean CHECK_IF_VERIFIED = false;
//firebase
private FirebaseAuth mAuth;
@@ -114,16 +115,24 @@ public void onComplete(@NonNull Task task) {
}
else{
try{
- if(user.isEmailVerified()){
+ if(CHECK_IF_VERIFIED){
+ if(user.isEmailVerified()){
+ Log.d(TAG, "onComplete: success. email is verified.");
+ Intent intent = new Intent(LoginActivity.this, HomeActivity.class);
+ startActivity(intent);
+ }else{
+ Toast.makeText(mContext, "Email is not verified \n check your email inbox.", Toast.LENGTH_SHORT).show();
+ mProgressBar.setVisibility(View.GONE);
+ mPleaseWait.setVisibility(View.GONE);
+ mAuth.signOut();
+ }
+ }
+ else{
Log.d(TAG, "onComplete: success. email is verified.");
Intent intent = new Intent(LoginActivity.this, HomeActivity.class);
startActivity(intent);
- }else{
- Toast.makeText(mContext, "Email is not verified \n check your email inbox.", Toast.LENGTH_SHORT).show();
- mProgressBar.setVisibility(View.GONE);
- mPleaseWait.setVisibility(View.GONE);
- mAuth.signOut();
}
+
}catch (NullPointerException e){
Log.e(TAG, "onComplete: NullPointerException: " + e.getMessage() );
}
diff --git a/app/src/main/java/tabian/com/instagramclone2/Profile/ProfileFragment.java b/app/src/main/java/tabian/com/instagramclone2/Profile/ProfileFragment.java
index cd2bd46..4ea4348 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Profile/ProfileFragment.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Profile/ProfileFragment.java
@@ -21,6 +21,7 @@
import android.widget.TextView;
+import com.bumptech.glide.Glide;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import com.google.firebase.database.DataSnapshot;
@@ -306,6 +307,10 @@ private void setProfileWidgets(UserSettings userSettings) {
UniversalImageLoader.setImage(settings.getProfile_photo(), mProfilePhoto, null, "");
+// Glide.with(getActivity())
+// .load(settings.getProfile_photo())
+// .into(mProfilePhoto);
+
mDisplayName.setText(settings.getDisplay_name());
mUsername.setText(settings.getUsername());
mWebsite.setText(settings.getWebsite());
diff --git a/app/src/main/java/tabian/com/instagramclone2/Utils/FilePaths.java b/app/src/main/java/tabian/com/instagramclone2/Utils/FilePaths.java
index 6626eab..e52dd78 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Utils/FilePaths.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Utils/FilePaths.java
@@ -14,7 +14,9 @@ public class FilePaths {
public String PICTURES = ROOT_DIR + "/Pictures";
public String CAMERA = ROOT_DIR + "/DCIM/camera";
+ public String STORIES = ROOT_DIR + "/Stories";
+ public String FIREBASE_STORY_STORAGE = "stories/users";
public String FIREBASE_IMAGE_STORAGE = "photos/users/";
}
diff --git a/app/src/main/java/tabian/com/instagramclone2/Utils/FirebaseMethods.java b/app/src/main/java/tabian/com/instagramclone2/Utils/FirebaseMethods.java
index defa7cb..48f5100 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Utils/FirebaseMethods.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Utils/FirebaseMethods.java
@@ -5,7 +5,9 @@
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
+import android.os.AsyncTask;
import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
import android.support.v4.app.FragmentManager;
import android.util.Log;
import android.widget.Toast;
@@ -28,6 +30,11 @@
import com.google.firebase.storage.StorageReference;
import com.google.firebase.storage.UploadTask;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
@@ -35,9 +42,12 @@
import java.util.TimeZone;
import tabian.com.instagramclone2.Home.HomeActivity;
+import tabian.com.instagramclone2.Home.HomeFragment;
import tabian.com.instagramclone2.Profile.AccountSettingsActivity;
import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.materialcamera.MaterialCamera;
import tabian.com.instagramclone2.models.Photo;
+import tabian.com.instagramclone2.models.Story;
import tabian.com.instagramclone2.models.User;
import tabian.com.instagramclone2.models.UserAccountSettings;
import tabian.com.instagramclone2.models.UserSettings;
@@ -189,6 +199,197 @@ public void onProgress(UploadTask.TaskSnapshot taskSnapshot) {
}
+ public void uploadNewStory(Intent intent, final HomeFragment fragment){
+ Log.d(TAG, "uploadNewStory: attempting to upload new story to storage.");
+
+ final String uri = intent.getDataString();
+ final boolean deleteCompressedVideo = intent.getBooleanExtra(MaterialCamera.DELETE_UPLOAD_FILE_EXTRA, false);
+ /*
+ upload a new photo to firebase storage
+ */
+ if(!isMediaVideo(uri)){
+ Log.d(TAG, "uploadNewStory: uploading new story (IMAGE) to firebase storage.");
+ fragment.mStoriesAdapter.startProgressBar();
+ FilePaths filePaths = new FilePaths();
+
+ //specify where the photo will be stored
+ final StorageReference storageReference = mStorageReference
+ .child(filePaths.FIREBASE_STORY_STORAGE + "/" + userID + "/" + uri.substring(uri.indexOf("Stories/") + 8, uri.indexOf(".")));
+
+ BackgroundGetBytesFromBitmap getBytes = new BackgroundGetBytesFromBitmap();
+ byte[] bytes = getBytes.doInBackground(uri);
+
+ UploadTask uploadTask = null;
+ uploadTask = storageReference.putBytes(bytes);
+ uploadTask.addOnSuccessListener(new OnSuccessListener() {
+ @Override
+ public void onSuccess(UploadTask.TaskSnapshot taskSnapshot) {
+ Uri firebaseURL = taskSnapshot.getDownloadUrl();
+ fragment.mStoriesAdapter.stopProgressBar();
+ Toast.makeText(mContext, "Upload Success", Toast.LENGTH_SHORT).show();
+ addNewStoryImageToDatabase(firebaseURL.toString());
+ }
+ }).addOnFailureListener(new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception exception) {
+ fragment.mStoriesAdapter.stopProgressBar();
+ Toast.makeText(mContext, "Upload Failed", Toast.LENGTH_SHORT).show();
+ }
+ });
+
+ }
+ else{
+ Log.d(TAG, "uploadNewStory: uploading new story (VIDEO) to firebase storage.");
+ fragment.mStoriesAdapter.startProgressBar();
+ FilePaths filePaths = new FilePaths();
+
+ //specify where the photo will be stored
+ final StorageReference storageReference = mStorageReference
+ .child(filePaths.FIREBASE_STORY_STORAGE + "/" + userID + "/" + uri.substring(uri.indexOf("Stories/") + 8, uri.indexOf(".")));
+
+
+ FileInputStream fis = null;
+ File file = new File(uri);
+ try {
+ fis = new FileInputStream(file);
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ }
+ byte[] bytes = new byte[0];
+ try {
+ bytes = readBytes(fis);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ Log.d(TAG, "uploadNewStory: video upload bytes: " + bytes.length);
+ final byte[] uploadBytes = bytes;
+
+ UploadTask uploadTask = null;
+ uploadTask = storageReference.putBytes(bytes);
+ uploadTask.addOnSuccessListener(new OnSuccessListener() {
+ @Override
+ public void onSuccess(UploadTask.TaskSnapshot taskSnapshot) {
+ Uri firebaseURL = taskSnapshot.getDownloadUrl();
+ fragment.mStoriesAdapter.stopProgressBar();
+ Toast.makeText(mContext, "Upload Success", Toast.LENGTH_SHORT).show();
+ addNewStoryVideoToDatabase(firebaseURL.toString(), uploadBytes);
+
+ if(deleteCompressedVideo){
+ deleteOutputFile(uri);
+ }
+ }
+ }).addOnFailureListener(new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception exception) {
+ fragment.mStoriesAdapter.stopProgressBar();
+ Toast.makeText(mContext, "Upload Failed", Toast.LENGTH_SHORT).show();
+ if(deleteCompressedVideo){
+ deleteOutputFile(uri);
+ }
+ }
+ });
+ }
+ }
+
+ private class BackgroundGetBytesFromBitmap extends AsyncTask {
+
+ @Override
+ protected byte[] doInBackground(String... params) {
+ byte[] bytes = null;
+
+// Bitmap bm = ImageManager.getBitmap(Uri.parse(params[0]).getPath());
+ Bitmap bm = null;
+ try{
+ RotateBitmap rotateBitmap = new RotateBitmap();
+ bm = rotateBitmap.HandleSamplingAndRotationBitmap(mContext, Uri.parse("file://" + params[0]));
+ }catch (IOException e){
+ Log.e(TAG, "BackgroundGetBytesFromBitmap: IOException: " + e.getMessage());
+ }
+
+ bytes = ImageManager.getBytesFromBitmap(bm, ImageManager.IMAGE_SAVE_QUALITY);
+ return bytes;
+ }
+ }
+
+
+ private void deleteOutputFile(@Nullable String uri) {
+ if (uri != null)
+ //noinspection ResultOfMethodCallIgnored
+ new File(Uri.parse(uri).getPath()).delete();
+ }
+
+
+ public byte[] readBytes(FileInputStream inputStream) throws IOException {
+ // this dynamically extends to take the bytes you read
+ ByteArrayOutputStream byteBuffer = new ByteArrayOutputStream();
+
+ // this is storage overwritten on each iteration with bytes
+ int bufferSize = 1024;
+ byte[] buffer = new byte[bufferSize];
+
+ // we need to know how may bytes were read to write them to the byteBuffer
+ int len = 0;
+ while ((len = inputStream.read(buffer)) != -1) {
+ byteBuffer.write(buffer, 0, len);
+ }
+
+ // and then we can return your byte array.
+ return byteBuffer.toByteArray();
+ }
+
+
+ private void addNewStoryImageToDatabase(String url){
+ Log.d(TAG, "addNewStoryToDatabase: adding new story to database.");
+
+ Story story = new Story();
+ story.setImage_url(url);
+ String newKey = myRef.push().getKey();
+ story.setStory_id(newKey);
+ story.setTimestamp(getTimestamp());
+ story.setUser_id(userID);
+ story.setViews("0");
+
+ myRef.child(mContext.getString(R.string.dbname_stories))
+ .child(userID)
+ .child(newKey)
+ .setValue(story);
+
+ }
+
+ private void addNewStoryVideoToDatabase(String url, byte[] bytes){
+ Log.d(TAG, "addNewStoryToDatabase: adding new story to database.");
+
+ Story story = new Story();
+ story.setVideo_url(url);
+ String newKey = myRef.push().getKey();
+ story.setStory_id(newKey);
+ story.setTimestamp(getTimestamp());
+ story.setUser_id(userID);
+ story.setViews("0");
+
+ // calculate the estimated duration.
+ // need to do this for the progress bars in the block. We can't get the video duration of MP4 files
+ double megabytes = bytes.length / 1000000.000;
+ Log.d(TAG, "addNewStoryVideoToDatabase: estimated MB: " + megabytes);
+ String duration = String.valueOf(Math.round(15 * (megabytes / 6.3)));
+ Log.d(TAG, "addNewStoryVideoToDatabase: estimated video duration: " + duration);
+ story.setDuration(duration);
+
+ myRef.child(mContext.getString(R.string.dbname_stories))
+ .child(userID)
+ .child(newKey)
+ .setValue(story);
+
+ }
+
+ private boolean isMediaVideo(String uri){
+ if(uri.contains(".mp4") || uri.contains(".wmv") || uri.contains(".flv") || uri.contains(".avi")){
+ return true;
+ }
+ return false;
+ }
+
private void setProfilePhoto(String url){
Log.d(TAG, "setProfilePhoto: setting new profile image: " + url);
diff --git a/app/src/main/java/tabian/com/instagramclone2/Utils/ImageManager.java b/app/src/main/java/tabian/com/instagramclone2/Utils/ImageManager.java
index 455ebf9..2ad582f 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Utils/ImageManager.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Utils/ImageManager.java
@@ -17,6 +17,7 @@
public class ImageManager {
private static final String TAG = "ImageManager";
+ public static final int IMAGE_SAVE_QUALITY = 90;
public static Bitmap getBitmap(String imgUrl){
File imageFile = new File(imgUrl);
diff --git a/app/src/main/java/tabian/com/instagramclone2/Utils/MainfeedListAdapter.java b/app/src/main/java/tabian/com/instagramclone2/Utils/MainfeedListAdapter.java
index c25e703..6ff84a5 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Utils/MainfeedListAdapter.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Utils/MainfeedListAdapter.java
@@ -49,14 +49,14 @@
* Created by User on 9/22/2017.
*/
-public class MainfeedListAdapter extends ArrayAdapter {
+public class MainFeedListAdapter extends ArrayAdapter {
public interface OnLoadMoreItemsListener{
void onLoadMoreItems();
}
OnLoadMoreItemsListener mOnLoadMoreItemsListener;
- private static final String TAG = "MainfeedListAdapter";
+ private static final String TAG = "MainFeedListAdapter";
private LayoutInflater mInflater;
private int mLayoutResource;
@@ -64,7 +64,7 @@ public interface OnLoadMoreItemsListener{
private DatabaseReference mReference;
private String currentUsername = "";
- public MainfeedListAdapter(@NonNull Context context, @LayoutRes int resource, @NonNull List objects) {
+ public MainFeedListAdapter(@NonNull Context context, @LayoutRes int resource, @NonNull List objects) {
super(context, resource, objects);
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
mLayoutResource = resource;
@@ -72,7 +72,7 @@ public MainfeedListAdapter(@NonNull Context context, @LayoutRes int resource, @N
mReference = FirebaseDatabase.getInstance().getReference();
// for(Photo photo: objects){
-// Log.d(TAG, "MainfeedListAdapter: photo id: " + photo.getPhoto_id());
+// Log.d(TAG, "MainFeedListAdapter: photo id: " + photo.getPhoto_id());
// }
}
diff --git a/app/src/main/java/tabian/com/instagramclone2/Utils/RotateBitmap.java b/app/src/main/java/tabian/com/instagramclone2/Utils/RotateBitmap.java
new file mode 100644
index 0000000..7f5c691
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/Utils/RotateBitmap.java
@@ -0,0 +1,137 @@
+package tabian.com.instagramclone2.Utils;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Matrix;
+import android.net.Uri;
+import android.support.media.ExifInterface;
+import android.util.Log;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Created by User on 10/29/2017.
+ */
+
+public class RotateBitmap {
+
+ private static final String TAG = "RotateBitmap";
+
+ private Context mContext;
+
+ /*
+ ----------------------------- Image Rotation --------------------------------------------------
+ */
+
+ private static Bitmap rotateImage(Bitmap img, int degree) {
+ Matrix matrix = new Matrix();
+ matrix.postRotate(degree);
+ Bitmap rotatedImg = Bitmap.createBitmap(img, 0, 0, img.getWidth(), img.getHeight(), matrix, true);
+ img.recycle();
+ return rotatedImg;
+ }
+
+ /**
+ * This method is responsible for solving the rotation issue if exist. Also scale the images to
+ * 1024x1024 resolution
+ *
+ * @param selectedImage The Image URI
+ * @return Bitmap image results
+ * @throws IOException
+ */
+ public Bitmap HandleSamplingAndRotationBitmap(Context context, Uri selectedImage)
+ throws IOException {
+ mContext = context;
+ int MAX_HEIGHT = 1024;
+ int MAX_WIDTH = 1024;
+
+ // First decode with inJustDecodeBounds=true to check dimensions
+ final BitmapFactory.Options options = new BitmapFactory.Options();
+ options.inJustDecodeBounds = true;
+ InputStream imageStream = context.getContentResolver().openInputStream(selectedImage);
+ BitmapFactory.decodeStream(imageStream, null, options);
+ imageStream.close();
+
+ // Calculate inSampleSize
+ options.inSampleSize = calculateInSampleSize(options, MAX_WIDTH, MAX_HEIGHT);
+
+ // Decode bitmap with inSampleSize set
+ options.inJustDecodeBounds = false;
+ imageStream = context.getContentResolver().openInputStream(selectedImage);
+
+ Bitmap img = BitmapFactory.decodeStream(imageStream, null, options);;
+
+ img = rotateImageIfRequired(img, selectedImage);
+ return img;
+ }
+
+
+ private static int calculateInSampleSize(BitmapFactory.Options options,
+ int reqWidth, int reqHeight) {
+ // Raw height and width of image
+ final int height = options.outHeight;
+ final int width = options.outWidth;
+ int inSampleSize = 1;
+
+ if (height > reqHeight || width > reqWidth) {
+
+ // Calculate ratios of height and width to requested height and width
+ final int heightRatio = Math.round((float) height / (float) reqHeight);
+ final int widthRatio = Math.round((float) width / (float) reqWidth);
+
+ // Choose the smallest ratio as inSampleSize value, this will guarantee a final image
+ // with both dimensions larger than or equal to the requested height and width.
+ inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio;
+
+ // This offers some additional logic in case the image has a strange
+ // aspect ratio. For example, a panorama may have a much larger
+ // width than height. In these cases the total pixels might still
+ // end up being too large to fit comfortably in memory, so we should
+ // be more aggressive with sample down the image (=larger inSampleSize).
+
+ final float totalPixels = width * height;
+
+ // Anything more than 2x the requested pixels we'll sample down further
+ final float totalReqPixelsCap = reqWidth * reqHeight * 2;
+
+ while (totalPixels / (inSampleSize * inSampleSize) > totalReqPixelsCap) {
+ inSampleSize++;
+ }
+ }
+ return inSampleSize;
+ }
+
+ /**
+ * Rotate an image if required.
+ *
+ * @param img The image bitmap
+ * @param selectedImage Image URI
+ * @return The resulted Bitmap after manipulation
+ */
+ private Bitmap rotateImageIfRequired(Bitmap img, Uri selectedImage) throws IOException {
+
+ InputStream input = mContext.getContentResolver().openInputStream(selectedImage);
+ ExifInterface ei;
+ try {
+ ei = new ExifInterface(input);
+
+ int orientation = ei.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
+
+ switch (orientation) {
+ case ExifInterface.ORIENTATION_ROTATE_90:
+ return rotateImage(img, 90);
+ case ExifInterface.ORIENTATION_ROTATE_180:
+ return rotateImage(img, 180);
+ case ExifInterface.ORIENTATION_ROTATE_270:
+ return rotateImage(img, 270);
+ default:
+ return img;
+ }
+ } catch (NullPointerException e) {
+ Log.e(TAG, "rotateImageIfRequired: Could not read file." + e.getMessage());
+ }
+ return img;
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/Utils/SectionsPagerAdapter.java b/app/src/main/java/tabian/com/instagramclone2/Utils/SectionsPagerAdapter.java
index ae89aeb..2b29491 100644
--- a/app/src/main/java/tabian/com/instagramclone2/Utils/SectionsPagerAdapter.java
+++ b/app/src/main/java/tabian/com/instagramclone2/Utils/SectionsPagerAdapter.java
@@ -7,9 +7,6 @@
import java.util.ArrayList;
import java.util.List;
-/**
- * Created by User on 5/28/2017.
- */
/**
* Class that stores fragments for tabs
diff --git a/app/src/main/java/tabian/com/instagramclone2/Utils/StoriesRecyclerViewAdapter.java b/app/src/main/java/tabian/com/instagramclone2/Utils/StoriesRecyclerViewAdapter.java
new file mode 100644
index 0000000..95cced9
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/Utils/StoriesRecyclerViewAdapter.java
@@ -0,0 +1,254 @@
+package tabian.com.instagramclone2.Utils;
+
+import android.content.Context;
+import android.content.Intent;
+import android.os.Handler;
+import android.support.v7.widget.RecyclerView;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.MotionEvent;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ImageView;
+import android.widget.ProgressBar;
+import android.widget.TextView;
+
+import com.bumptech.glide.Glide;
+import com.google.firebase.auth.FirebaseAuth;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+
+import java.util.HashMap;
+
+import de.hdodenhof.circleimageview.CircleImageView;
+import tabian.com.instagramclone2.Home.HomeActivity;
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.opengl.OpenGLES10Activity;
+
+
+/**
+ * Created by User on 12/27/2017.
+ */
+
+public class StoriesRecyclerViewAdapter extends RecyclerView.Adapter {
+
+ private static final String TAG = "RecyclerViewAdapter";
+ private static final int CLICK_DURATION = 500;
+ private static final int NEW_STORY_REQUEST = 2349;
+
+ //vars
+ private HashMap mViewHolders = new HashMap<>();
+// private ArrayList mUserStories = new ArrayList<>();
+ private JSONArray mMasterStoriesArray = new JSONArray();
+ private Runnable mOnTouchRunnable;
+ private boolean isRunning = false;
+ private Context mContext;
+ private boolean down = false;
+ private boolean up = false;
+ private float x1 = 0;
+ private float y1 = 0;
+ private float x2 = 0;
+ private float y2 = 0;
+// private long t1 = 0;
+// private long t2 = 0;
+ private long runningTime = 0;
+
+
+ public StoriesRecyclerViewAdapter(JSONArray masterStoriesArray, Context context) {
+ mMasterStoriesArray = masterStoriesArray;
+ mContext = context;
+ }
+
+ @Override
+ public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
+ View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.layout_stories_listitem, parent, false);
+ ViewHolder holder = new ViewHolder(view);
+ return holder;
+ }
+
+ @Override
+ public void onBindViewHolder(final ViewHolder holder, final int position) {
+ Log.d(TAG, "onBindViewHolder: called.");
+
+ try{
+ if(mMasterStoriesArray.getJSONObject(position).getJSONObject(mContext.getString(R.string.user_account_settings))
+ .get(mContext.getString(R.string.field_user_id)).equals(FirebaseAuth.getInstance().getCurrentUser().getUid())){
+ int numStories = 0;
+ try{
+ numStories = mMasterStoriesArray.getJSONObject(position).getJSONArray(mContext.getString(R.string.user_stories)).length();
+ }catch (JSONException e){
+ Log.e(TAG, "onBindViewHolder: authenticated user has no stories.");
+ }
+ Log.d(TAG, "onBindViewHolder: user: " + FirebaseAuth.getInstance().getCurrentUser().getUid());
+ Log.d(TAG, "onBindViewHolder: number of stories for this user: " + numStories);
+ if(numStories == 0){
+ Log.d(TAG, "onBindViewHolder: no stories for authenticated user.");
+ holder.plusIcon.setVisibility(View.VISIBLE);
+ holder.layout.setBackground(null);
+ }
+ else{
+ Log.d(TAG, "onBindViewHolder: found stories for authenticated user.");
+ holder.hasStories = true;
+ holder.plusIcon.setVisibility(View.INVISIBLE);
+ holder.layout.setBackground(mContext.getResources().getDrawable(R.drawable.circle_grey));
+ }
+ }
+ else{
+ Log.d(TAG, "onBindViewHolder: not the authenticated user.");
+ holder.plusIcon.setVisibility(View.INVISIBLE);
+ holder.layout.setBackground(mContext.getResources().getDrawable(R.drawable.circle_red));
+ }
+
+
+ mViewHolders.put(position, holder);
+
+ Glide.with(mContext)
+ .asBitmap()
+ .load(mMasterStoriesArray.getJSONObject(position).getJSONObject(mContext.getString(R.string.user_account_settings))
+ .get(mContext.getString(R.string.field_profile_photo)))
+ .into(holder.image);
+
+ holder.name.setText(mMasterStoriesArray.getJSONObject(position).getJSONObject(mContext.getString(R.string.user_account_settings))
+ .get(mContext.getString(R.string.field_username)).toString());
+
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ holder.layout.setOnTouchListener(new View.OnTouchListener() {
+
+ @Override
+ public boolean onTouch(View view, final MotionEvent event) {
+
+ switch (event.getAction()) {
+
+ case MotionEvent.ACTION_UP:
+ Log.d(TAG, "onTouch: ACTION UP.");
+ if(down){
+ up = true;
+ down = false;
+ x2 = event.getX();
+ y2 = event.getY();
+ runningTime = 0;
+ }
+
+ case MotionEvent.ACTION_DOWN:
+ if(!up){
+ Log.d(TAG, "onTouch: ACTION DOWN");
+ x1 = event.getX();
+ y1 = event.getY();
+// t1 = System.currentTimeMillis();
+// Log.d(TAG, "onTouch: t1: " + t1);
+ down = true;
+ if(!isRunning){
+ isRunning = true;
+ final Handler handler = new Handler();
+ mOnTouchRunnable = new Runnable() {
+ @Override
+ public void run() {
+ if(isRunning){
+ handler.postDelayed(mOnTouchRunnable, 200);
+ try{
+
+ if(runningTime >= CLICK_DURATION){
+ Log.d(TAG, "onTouch: long click. opening add to story dialog.");
+ isRunning = false;
+ if(mMasterStoriesArray.getJSONObject(position).getJSONObject(mContext.getString(R.string.user_account_settings))
+ .get(mContext.getString(R.string.field_user_id)).equals(FirebaseAuth.getInstance().getCurrentUser().getUid())){
+ ((HomeActivity)mContext).showAddToStoryDialog();
+ }
+ }
+ else{
+ runningTime += 200;
+ }
+ if (x1 == x2 && y1 == y2 && runningTime < CLICK_DURATION && isRunning) {
+ String userId = mMasterStoriesArray.getJSONObject(position)
+ .getJSONObject(mContext.getString(R.string.user_account_settings)).get(mContext.getString(R.string.field_user_id)).toString();
+ Log.d(TAG, "onTouch: clicked on: " + mMasterStoriesArray.getJSONObject(position)
+ .getJSONObject(mContext.getString(R.string.user_account_settings)).get(mContext.getString(R.string.field_username)));
+
+ if(userId.equals(FirebaseAuth.getInstance().getCurrentUser().getUid()) && holder.hasStories){
+ Intent intent = new Intent(mContext, OpenGLES10Activity.class);
+ intent.putExtra(mContext.getString(R.string.user_stories), mMasterStoriesArray.toString());
+ intent.putExtra(mContext.getString(R.string.resource_index), position);
+ mContext.startActivity(intent);
+ isRunning = false;
+ }
+ else if(userId.equals(FirebaseAuth.getInstance().getCurrentUser().getUid()) && !holder.hasStories){
+ ((HomeActivity)mContext).showAddToStoryDialog();
+ isRunning = false;
+ }
+ else if(!userId.equals(FirebaseAuth.getInstance().getCurrentUser().getUid())){
+ Intent intent = new Intent(mContext, OpenGLES10Activity.class);
+ intent.putExtra(mContext.getString(R.string.user_stories), mMasterStoriesArray.toString());
+ intent.putExtra(mContext.getString(R.string.resource_index), position);
+ mContext.startActivity(intent);
+ isRunning = false;
+ }
+
+
+ }
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+ }
+ };
+ mOnTouchRunnable.run();
+ }
+ }
+ else{
+ up = false;
+ }
+
+
+ return true;
+ }
+
+ return false;
+ }
+ });
+
+
+ }
+
+ @Override
+ public int getItemCount() {
+// return mUserStories.size();
+ return mMasterStoriesArray.length();
+ }
+
+ public void startProgressBar(){
+ Log.d(TAG, "startProgressBar: starting story upload progress bar.");
+ mViewHolders.get(0).progressBar.setVisibility(View.VISIBLE);
+ }
+
+ public void stopProgressBar(){
+ Log.d(TAG, "stopProgressBar: stopping story upload progress bar.");
+ mViewHolders.get(0).progressBar.setVisibility(View.GONE);
+ mViewHolders.get(0).plusIcon.setVisibility(View.GONE);
+ mViewHolders.get(0).layout.setBackground(mContext.getResources().getDrawable(R.drawable.circle_grey));
+ }
+
+ public class ViewHolder extends RecyclerView.ViewHolder {
+
+ CircleImageView image;
+ TextView name;
+ ImageView plusIcon;
+ TouchableRelativeLayout layout;
+ ProgressBar progressBar;
+ Boolean hasStories;
+
+ public ViewHolder(View itemView) {
+ super(itemView);
+ image = itemView.findViewById(R.id.image);
+ name = itemView.findViewById(R.id.name);
+ plusIcon = itemView.findViewById(R.id.plus_icon);
+ layout = itemView.findViewById(R.id.relLayout1);
+ progressBar = itemView.findViewById(R.id.story_upload_progress_bar);
+ hasStories = false;
+ }
+ }
+}
+
diff --git a/app/src/main/java/tabian/com/instagramclone2/Utils/TouchableRelativeLayout.java b/app/src/main/java/tabian/com/instagramclone2/Utils/TouchableRelativeLayout.java
new file mode 100644
index 0000000..aad0806
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/Utils/TouchableRelativeLayout.java
@@ -0,0 +1,28 @@
+package tabian.com.instagramclone2.Utils;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.widget.RelativeLayout;
+
+/**
+ * Created by User on 1/15/2018.
+ */
+
+public class TouchableRelativeLayout extends RelativeLayout {
+ public TouchableRelativeLayout(Context context) {
+ super(context);
+ }
+
+ public TouchableRelativeLayout(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ }
+
+ public TouchableRelativeLayout(Context context, AttributeSet attrs, int defStyleAttr) {
+ super(context, attrs, defStyleAttr);
+ }
+
+ @Override
+ public boolean performClick() {
+ return super.performClick();
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoCallback.java b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoCallback.java
new file mode 100644
index 0000000..6c79e16
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoCallback.java
@@ -0,0 +1,31 @@
+package tabian.com.instagramclone2.easyvideoplayer;
+
+import android.net.Uri;
+
+/** @author Aidan Follestad (afollestad) */
+public interface EasyVideoCallback {
+
+ void onStarted(EasyVideoPlayer player);
+
+ void onPaused(EasyVideoPlayer player);
+
+ void onPreparing(EasyVideoPlayer player);
+
+ void onPrepared(EasyVideoPlayer player);
+
+ void onBuffering(int percent);
+
+ void onError(EasyVideoPlayer player, Exception e);
+
+ void onCompletion(EasyVideoPlayer player);
+
+ void onRetry(EasyVideoPlayer player, Uri source);
+
+ void onSubmit(EasyVideoPlayer player, Uri source);
+
+ void onClickVideoFrame(EasyVideoPlayer player);
+
+ void addToStory(EasyVideoPlayer player, Uri source);
+
+ void saveStory(EasyVideoPlayer player, Uri source);
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoPlayer.java b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoPlayer.java
new file mode 100644
index 0000000..a88ee3f
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoPlayer.java
@@ -0,0 +1,1150 @@
+package tabian.com.instagramclone2.easyvideoplayer;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.content.res.ColorStateList;
+import android.content.res.TypedArray;
+import android.graphics.Color;
+import android.graphics.Matrix;
+import android.graphics.PorterDuff;
+import android.graphics.SurfaceTexture;
+import android.graphics.drawable.Drawable;
+import android.graphics.drawable.RippleDrawable;
+import android.media.AudioManager;
+import android.media.MediaPlayer;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Handler;
+import android.support.annotation.CheckResult;
+import android.support.annotation.ColorInt;
+import android.support.annotation.ColorRes;
+import android.support.annotation.DrawableRes;
+import android.support.annotation.FloatRange;
+import android.support.annotation.IntDef;
+import android.support.annotation.IntRange;
+import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
+import android.support.annotation.StringRes;
+import android.support.v4.content.ContextCompat;
+import android.support.v4.graphics.drawable.DrawableCompat;
+import android.support.v4.view.ViewCompat;
+import android.support.v7.app.AppCompatDelegate;
+import android.support.v7.content.res.AppCompatResources;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.LayoutInflater;
+import android.view.Surface;
+import android.view.TextureView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.FrameLayout;
+import android.widget.RelativeLayout;
+import android.widget.SeekBar;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import tabian.com.instagramclone2.R;
+
+
+/** @author Aidan Follestad (afollestad) */
+public class EasyVideoPlayer extends FrameLayout
+ implements IUserMethods,
+ TextureView.SurfaceTextureListener,
+ MediaPlayer.OnPreparedListener,
+ MediaPlayer.OnBufferingUpdateListener,
+ MediaPlayer.OnCompletionListener,
+ MediaPlayer.OnVideoSizeChangedListener,
+ MediaPlayer.OnErrorListener,
+ View.OnClickListener,
+ SeekBar.OnSeekBarChangeListener {
+
+ private static final String TAG = "EasyVideoPlayer";
+
+ @IntDef({LEFT_ACTION_NONE, LEFT_ACTION_RESTART, LEFT_ACTION_RETRY})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface LeftAction {}
+
+ @IntDef({RIGHT_ACTION_NONE, RIGHT_ACTION_SUBMIT, RIGHT_ACTION_CUSTOM_LABEL})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface RightAction {}
+
+ public static final int LEFT_ACTION_NONE = 0;
+ public static final int LEFT_ACTION_RESTART = 1;
+ public static final int LEFT_ACTION_RETRY = 2;
+ public static final int RIGHT_ACTION_NONE = 3;
+ public static final int RIGHT_ACTION_SUBMIT = 4;
+ public static final int RIGHT_ACTION_CUSTOM_LABEL = 5;
+ private static final int UPDATE_INTERVAL = 100;
+
+ public EasyVideoPlayer(Context context) {
+ super(context);
+ init(context, null);
+ }
+
+ public EasyVideoPlayer(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ init(context, attrs);
+ }
+
+ public EasyVideoPlayer(Context context, AttributeSet attrs, int defStyleAttr) {
+ super(context, attrs, defStyleAttr);
+ init(context, attrs);
+ }
+
+ private TextureView mTextureView;
+ private Surface mSurface;
+
+ private View mControlsFrame;
+ private View mProgressFrame;
+ private View mClickFrame;
+
+// private SeekBar mSeeker;
+// private TextView mLabelPosition;
+// private TextView mLabelDuration;
+// private ImageButton mBtnRestart;
+// private TextView mBtnRetry;
+// private ImageButton mBtnPlayPause;
+// private TextView mBtnSubmit;
+// private TextView mLabelCustom;
+// private TextView mLabelBottom;
+
+ private RelativeLayout mRetry;
+ private RelativeLayout mAddToStory;
+ private RelativeLayout mSaveStory;
+
+ private MediaPlayer mPlayer;
+ private boolean mSurfaceAvailable;
+ private boolean mIsPrepared;
+ private boolean mWasPlaying;
+ private int mInitialTextureWidth;
+ private int mInitialTextureHeight;
+
+ private Handler mHandler;
+
+ private Uri mSource;
+ private EasyVideoCallback mCallback;
+ private EasyVideoProgressCallback mProgressCallback;
+ @LeftAction private int mLeftAction = LEFT_ACTION_RESTART;
+ @RightAction private int mRightAction = RIGHT_ACTION_NONE;
+ private CharSequence mRetryText;
+ private CharSequence mSubmitText;
+ private Drawable mRestartDrawable;
+ private Drawable mPlayDrawable;
+ private Drawable mPauseDrawable;
+ private CharSequence mCustomLabelText;
+ private CharSequence mBottomLabelText;
+ private boolean mHideControlsOnPlay = true;
+ private boolean mAutoPlay;
+ private int mInitialPosition = -1;
+ private boolean mControlsDisabled;
+ private int mThemeColor = 0;
+ private boolean mAutoFullscreen = false;
+ private boolean mLoop = true;
+ private int mVideoResetCounter = 0;
+ private int mPlayerPosition = 0;
+
+// // Runnable used to run code on an interval to update counters and seeker
+ private final Runnable mUpdateCounters =
+ new Runnable() {
+ @Override
+ public void run() {
+// if (mHandler == null || !mIsPrepared || mSeeker == null || mPlayer == null) return;
+ if (mHandler == null || !mIsPrepared || mPlayer == null) return;
+// if(mPlayerPosition == mPlayer.getCurrentPosition()){
+// mVideoResetCounter++;
+// }
+// if(mVideoResetCounter >= 4){
+// Log.d(TAG, "UpdateCounters: finished playing. Resetting player.");
+// mPlayerPosition = 0;
+// mPlayer.seekTo(mPlayerPosition);
+// mVideoResetCounter = 0;
+// }
+ mPlayerPosition = mPlayer.getCurrentPosition();
+ final int dur = mPlayer.getDuration();
+ if (mPlayerPosition > dur) mPlayerPosition = dur;
+// mLabelPosition.setText(Util.getDurationString(pos, false));
+// mLabelDuration.setText(Util.getDurationString(dur - pos, true));
+// mSeeker.setProgress(pos);
+// mSeeker.setMax(dur);
+
+
+ if (mProgressCallback != null) mProgressCallback.onVideoProgressUpdate(mPlayerPosition, dur);
+ if (mHandler != null) mHandler.postDelayed(this, UPDATE_INTERVAL);
+ }
+ };
+
+ private void init(Context context, AttributeSet attrs) {
+ AppCompatDelegate.setCompatVectorFromResourcesEnabled(true);
+ setBackgroundColor(Color.BLACK);
+
+ if (attrs != null) {
+ TypedArray a =
+ context.getTheme().obtainStyledAttributes(attrs, R.styleable.EasyVideoPlayer, 0, 0);
+ try {
+ String source = a.getString(R.styleable.EasyVideoPlayer_evp_source);
+ if (source != null && !source.trim().isEmpty()) mSource = Uri.parse(source);
+
+ //noinspection WrongConstant
+ mLeftAction = a.getInteger(R.styleable.EasyVideoPlayer_evp_leftAction, LEFT_ACTION_RESTART);
+ //noinspection WrongConstant
+ mRightAction = a.getInteger(R.styleable.EasyVideoPlayer_evp_rightAction, RIGHT_ACTION_NONE);
+
+ mCustomLabelText = a.getText(R.styleable.EasyVideoPlayer_evp_customLabelText);
+ mRetryText = a.getText(R.styleable.EasyVideoPlayer_evp_retryText);
+ mSubmitText = a.getText(R.styleable.EasyVideoPlayer_evp_submitText);
+ mBottomLabelText = a.getText(R.styleable.EasyVideoPlayer_evp_bottomText);
+
+ int restartDrawableResId =
+ a.getResourceId(R.styleable.EasyVideoPlayer_evp_restartDrawable, -1);
+ int playDrawableResId = a.getResourceId(R.styleable.EasyVideoPlayer_evp_playDrawable, -1);
+ int pauseDrawableResId = a.getResourceId(R.styleable.EasyVideoPlayer_evp_pauseDrawable, -1);
+
+ if (restartDrawableResId != -1) {
+ mRestartDrawable = AppCompatResources.getDrawable(context, restartDrawableResId);
+ }
+ if (playDrawableResId != -1) {
+ mPlayDrawable = AppCompatResources.getDrawable(context, playDrawableResId);
+ }
+ if (pauseDrawableResId != -1) {
+ mPauseDrawable = AppCompatResources.getDrawable(context, pauseDrawableResId);
+ }
+
+ mHideControlsOnPlay =
+ a.getBoolean(R.styleable.EasyVideoPlayer_evp_hideControlsOnPlay, true);
+// mAutoPlay = a.getBoolean(R.styleable.EasyVideoPlayer_evp_autoPlay, false);
+ mAutoPlay = true;
+ mControlsDisabled = a.getBoolean(R.styleable.EasyVideoPlayer_evp_disableControls, false);
+
+ mThemeColor =
+ a.getColor(
+ R.styleable.EasyVideoPlayer_evp_themeColor,
+ Util.resolveColor(context, R.attr.colorPrimary));
+
+ mAutoFullscreen = a.getBoolean(R.styleable.EasyVideoPlayer_evp_autoFullscreen, false);
+// mLoop = a.getBoolean(R.styleable.EasyVideoPlayer_evp_loop, false);
+ mLoop = true;
+ } finally {
+ a.recycle();
+ }
+ } else {
+ mLeftAction = LEFT_ACTION_RESTART;
+ mRightAction = RIGHT_ACTION_NONE;
+ mHideControlsOnPlay = true;
+// mAutoPlay = false;
+ mAutoPlay = true;
+ mControlsDisabled = false;
+ mThemeColor = Util.resolveColor(context, R.attr.colorPrimary);
+ mAutoFullscreen = false;
+// mLoop = false;
+ mLoop = true;
+ }
+
+ if (mRetryText == null) mRetryText = context.getResources().getText(R.string.evp_retry);
+ if (mSubmitText == null) mSubmitText = context.getResources().getText(R.string.evp_submit);
+
+ if (mRestartDrawable == null)
+ mRestartDrawable = AppCompatResources.getDrawable(context, R.drawable.evp_action_restart);
+ if (mPlayDrawable == null)
+ mPlayDrawable = AppCompatResources.getDrawable(context, R.drawable.evp_action_play);
+ if (mPauseDrawable == null)
+ mPauseDrawable = AppCompatResources.getDrawable(context, R.drawable.evp_action_pause);
+ }
+
+ @Override
+ public void setSource(@NonNull Uri source) {
+ boolean hadSource = mSource != null;
+ if (hadSource) stop();
+ mSource = source;
+ if (mPlayer != null) {
+ if (hadSource) {
+ sourceChanged();
+ } else {
+ prepare();
+ }
+ }
+ }
+
+ @Override
+ public void setCallback(@NonNull EasyVideoCallback callback) {
+ mCallback = callback;
+ }
+
+ @Override
+ public void setProgressCallback(@NonNull EasyVideoProgressCallback callback) {
+ mProgressCallback = callback;
+ }
+
+ @Override
+ public void setLeftAction(@LeftAction int action) {
+ if (action < LEFT_ACTION_NONE || action > LEFT_ACTION_RETRY)
+ throw new IllegalArgumentException("Invalid left action specified.");
+ mLeftAction = action;
+ invalidateActions();
+ }
+
+ @Override
+ public void setRightAction(@RightAction int action) {
+ if (action < RIGHT_ACTION_NONE || action > RIGHT_ACTION_CUSTOM_LABEL)
+ throw new IllegalArgumentException("Invalid right action specified.");
+ mRightAction = action;
+ invalidateActions();
+ }
+
+ @Override
+ public void setCustomLabelText(@Nullable CharSequence text) {
+// mCustomLabelText = text;
+// mLabelCustom.setText(text);
+// setRightAction(RIGHT_ACTION_CUSTOM_LABEL);
+ }
+
+ @Override
+ public void setCustomLabelTextRes(@StringRes int textRes) {
+ setCustomLabelText(getResources().getText(textRes));
+ }
+
+ @Override
+ public void setBottomLabelText(@Nullable CharSequence text) {
+// mBottomLabelText = text;
+// mLabelBottom.setText(text);
+// if (text == null || text.toString().trim().length() == 0) mLabelBottom.setVisibility(View.GONE);
+// else mLabelBottom.setVisibility(View.VISIBLE);
+ }
+
+ @Override
+ public void setBottomLabelTextRes(@StringRes int textRes) {
+ setBottomLabelText(getResources().getText(textRes));
+ }
+
+ @Override
+ public void setRetryText(@Nullable CharSequence text) {
+// mRetryText = text;
+// mBtnRetry.setText(text);
+ }
+
+ @Override
+ public void setRetryTextRes(@StringRes int res) {
+ setRetryText(getResources().getText(res));
+ }
+
+ @Override
+ public void setSubmitText(@Nullable CharSequence text) {
+// mSubmitText = text;
+// mBtnSubmit.setText(text);
+ }
+
+ @Override
+ public void setSubmitTextRes(@StringRes int res) {
+ setSubmitText(getResources().getText(res));
+ }
+
+ @Override
+ public void setRestartDrawable(@NonNull Drawable drawable) {
+// mRestartDrawable = drawable;
+// mBtnRestart.setImageDrawable(drawable);
+ }
+
+ @Override
+ public void setRestartDrawableRes(@DrawableRes int res) {
+ setRestartDrawable(AppCompatResources.getDrawable(getContext(), res));
+ }
+
+ @Override
+ public void setPlayDrawable(@NonNull Drawable drawable) {
+// mPlayDrawable = drawable;
+// if (!isPlaying()) mBtnPlayPause.setImageDrawable(drawable);
+ }
+
+ @Override
+ public void setPlayDrawableRes(@DrawableRes int res) {
+ setPlayDrawable(AppCompatResources.getDrawable(getContext(), res));
+ }
+
+ @Override
+ public void setPauseDrawable(@NonNull Drawable drawable) {
+// mPauseDrawable = drawable;
+// if (isPlaying()) mBtnPlayPause.setImageDrawable(drawable);
+ }
+
+ @Override
+ public void setPauseDrawableRes(@DrawableRes int res) {
+ setPauseDrawable(AppCompatResources.getDrawable(getContext(), res));
+ }
+
+ @Override
+ public void setThemeColor(@ColorInt int color) {
+ mThemeColor = color;
+ invalidateThemeColors();
+ }
+
+ @Override
+ public void setThemeColorRes(@ColorRes int colorRes) {
+ setThemeColor(ContextCompat.getColor(getContext(), colorRes));
+ }
+
+ @Override
+ public void setHideControlsOnPlay(boolean hide) {
+ mHideControlsOnPlay = hide;
+ }
+
+ @Override
+ public void setAutoPlay(boolean autoPlay) {
+ mAutoPlay = autoPlay;
+ }
+
+ @Override
+ public void setInitialPosition(@IntRange(from = 0, to = Integer.MAX_VALUE) int pos) {
+ mInitialPosition = pos;
+ }
+
+ private void sourceChanged() {
+ setControlsEnabled(false);
+// mSeeker.setProgress(0);
+// mSeeker.setEnabled(false);
+ mPlayer.reset();
+ if (mCallback != null) mCallback.onPreparing(this);
+ try {
+ setSourceInternal();
+ } catch (IOException e) {
+ throwError(e);
+ }
+ }
+
+ private void setSourceInternal() throws IOException {
+ if (mSource.getScheme() != null
+ && (mSource.getScheme().equals("http") || mSource.getScheme().equals("https"))) {
+ LOG("Loading web URI: " + mSource.toString());
+ mPlayer.setDataSource(mSource.toString());
+ } else if (mSource.getScheme() != null
+ && (mSource.getScheme().equals("file") && mSource.getPath().contains("/android_assets/"))) {
+ LOG("Loading assets URI: " + mSource.toString());
+ AssetFileDescriptor afd;
+ afd =
+ getContext()
+ .getAssets()
+ .openFd(mSource.toString().replace("file:///android_assets/", ""));
+ mPlayer.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
+ afd.close();
+ } else if (mSource.getScheme() != null && mSource.getScheme().equals("asset")) {
+ LOG("Loading assets URI: " + mSource.toString());
+ AssetFileDescriptor afd;
+ afd = getContext().getAssets().openFd(mSource.toString().replace("asset://", ""));
+ mPlayer.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
+ afd.close();
+ } else {
+ LOG("Loading local URI: " + mSource.toString());
+ mPlayer.setDataSource(getContext(), mSource);
+ }
+ mPlayer.prepareAsync();
+ }
+
+ private void prepare() {
+ if (!mSurfaceAvailable || mSource == null || mPlayer == null || mIsPrepared) return;
+ if (mCallback != null) mCallback.onPreparing(this);
+ try {
+ mPlayer.setSurface(mSurface);
+ setSourceInternal();
+ } catch (IOException e) {
+ throwError(e);
+ }
+ }
+
+ private void setControlsEnabled(boolean enabled) {
+// if (mSeeker == null) return;
+// mSeeker.setEnabled(enabled);
+// mBtnPlayPause.setEnabled(enabled);
+// mBtnSubmit.setEnabled(enabled);
+// mBtnRestart.setEnabled(enabled);
+// mBtnRetry.setEnabled(enabled);
+ mRetry.setEnabled(enabled);
+
+ final float disabledAlpha = .4f;
+// mBtnPlayPause.setAlpha(enabled ? 1f : disabledAlpha);
+// mBtnSubmit.setAlpha(enabled ? 1f : disabledAlpha);
+// mBtnRestart.setAlpha(enabled ? 1f : disabledAlpha);
+
+ mClickFrame.setEnabled(enabled);
+ }
+
+ @Override
+ public void showControls() {
+// if (mControlsDisabled || isControlsShown() || mSeeker == null) return;
+
+// mControlsFrame.animate().cancel();
+// mControlsFrame.setAlpha(0f);
+ mControlsFrame.setVisibility(View.VISIBLE);
+// mControlsFrame
+// .animate()
+// .alpha(1f)
+// .setInterpolator(new DecelerateInterpolator())
+// .setListener(
+// new AnimatorListenerAdapter() {
+// @Override
+// public void onAnimationEnd(Animator animation) {
+// if (mAutoFullscreen) setFullscreen(false);
+// }
+// })
+// .start();
+ }
+
+ @Override
+ public void hideControls() {
+// if (mControlsDisabled || !isControlsShown() || mSeeker == null) return;
+// mControlsFrame.animate().cancel();
+// mControlsFrame.setAlpha(1f);
+ mControlsFrame.setVisibility(View.VISIBLE);
+// mControlsFrame
+// .animate()
+// .alpha(0f)
+// .setInterpolator(new DecelerateInterpolator())
+// .setListener(
+// new AnimatorListenerAdapter() {
+// @Override
+// public void onAnimationEnd(Animator animation) {
+// setFullscreen(true);
+//
+// if (mControlsFrame != null) mControlsFrame.setVisibility(View.INVISIBLE);
+// }
+// })
+// .start();
+ }
+
+ @CheckResult
+ @Override
+ public boolean isControlsShown() {
+ return !mControlsDisabled && mControlsFrame != null && mControlsFrame.getAlpha() > .5f;
+ }
+
+ @Override
+ public void toggleControls() {
+ if (mControlsDisabled) return;
+ if (isControlsShown()) {
+ hideControls();
+ } else {
+ showControls();
+ }
+ }
+
+ @Override
+ public void enableControls(boolean andShow) {
+ mControlsDisabled = false;
+ if (andShow) showControls();
+ mClickFrame.setOnClickListener(
+ new OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ toggleControls();
+ }
+ });
+ mClickFrame.setClickable(true);
+ }
+
+ @Override
+ public void disableControls() {
+ mControlsDisabled = true;
+ mControlsFrame.setVisibility(View.GONE);
+ mClickFrame.setOnClickListener(null);
+ mClickFrame.setClickable(false);
+ }
+
+ @CheckResult
+ @Override
+ public boolean isPrepared() {
+ return mPlayer != null && mIsPrepared;
+ }
+
+ @CheckResult
+ @Override
+ public boolean isPlaying() {
+ return mPlayer != null && mPlayer.isPlaying();
+ }
+
+ @CheckResult
+ @Override
+ public int getCurrentPosition() {
+ if (mPlayer == null) return -1;
+ return mPlayer.getCurrentPosition();
+ }
+
+ @CheckResult
+ @Override
+ public int getDuration() {
+ if (mPlayer == null) return -1;
+ return mPlayer.getDuration();
+ }
+
+ @Override
+ public void start() {
+ if (mPlayer == null) return;
+ mPlayer.start();
+ if (mCallback != null) mCallback.onStarted(this);
+ if (mHandler == null) mHandler = new Handler();
+ mHandler.post(mUpdateCounters);
+// mBtnPlayPause.setImageDrawable(mPauseDrawable);
+ }
+
+ @Override
+ public void seekTo(@IntRange(from = 0, to = Integer.MAX_VALUE) int pos) {
+ if (mPlayer == null) return;
+ mPlayer.seekTo(pos);
+ }
+
+ public void setVolume(
+ @FloatRange(from = 0f, to = 1f) float leftVolume,
+ @FloatRange(from = 0f, to = 1f) float rightVolume) {
+ if (mPlayer == null || !mIsPrepared)
+ throw new IllegalStateException(
+ "You cannot use setVolume(float, float) until the player is prepared.");
+ mPlayer.setVolume(leftVolume, rightVolume);
+ }
+
+ @Override
+ public void pause() {
+ Log.d(TAG, "PLAYER: pausing player.");
+ if (mPlayer == null || !isPlaying()) return;
+ mPlayer.pause();
+ if (mCallback != null) mCallback.onPaused(this);
+ if (mHandler == null) return;
+ mHandler.removeCallbacks(mUpdateCounters);
+// mBtnPlayPause.setImageDrawable(mPlayDrawable);
+ }
+
+ @Override
+ public void stop() {
+ Log.d(TAG, "PLAYER: stopping player.");
+ if (mPlayer == null) return;
+ try {
+ mPlayer.stop();
+ } catch (Throwable ignored) {
+ }
+ if (mHandler == null) return;
+ mHandler.removeCallbacks(mUpdateCounters);
+// mBtnPlayPause.setImageDrawable(mPauseDrawable);
+ }
+
+ @Override
+ public void reset() {
+ if (mPlayer == null) return;
+ mIsPrepared = false;
+ mPlayer.reset();
+ mIsPrepared = false;
+ }
+
+ @Override
+ public void release() {
+ Log.d(TAG, "PLAYER: releasing player.");
+ mIsPrepared = false;
+
+ if (mPlayer != null) {
+ try {
+ mPlayer.release();
+ } catch (Throwable ignored) {
+ }
+ mPlayer = null;
+ deleteOutputFile(mSource.getPath());
+ }
+
+ if (mHandler != null) {
+ mHandler.removeCallbacks(mUpdateCounters);
+ mHandler = null;
+ }
+
+ LOG("Released player and Handler");
+ }
+
+ private void deleteOutputFile(@Nullable String uri) {
+ if (uri != null)
+ //noinspection ResultOfMethodCallIgnored
+ new File(Uri.parse(uri).getPath()).delete();
+ }
+
+ @Override
+ public void setAutoFullscreen(boolean autoFullscreen) {
+ this.mAutoFullscreen = autoFullscreen;
+ }
+
+ @Override
+ public void setLoop(boolean loop) {
+ mLoop = loop;
+ if (mPlayer != null) mPlayer.setLooping(loop);
+ }
+
+ // Surface listeners
+
+ @Override
+ public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
+ LOG("Surface texture available: %dx%d", width, height);
+ mInitialTextureWidth = width;
+ mInitialTextureHeight = height;
+ mSurfaceAvailable = true;
+ mSurface = new Surface(surfaceTexture);
+ if (mIsPrepared) {
+ mPlayer.setSurface(mSurface);
+ } else {
+ prepare();
+ }
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
+ LOG("Surface texture changed: %dx%d", width, height);
+ adjustAspectRatio(width, height, mPlayer.getVideoWidth(), mPlayer.getVideoHeight());
+ }
+
+ @Override
+ public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
+ LOG("Surface texture destroyed");
+ mSurfaceAvailable = false;
+ mSurface = null;
+ return false;
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {}
+
+ // Media player listeners
+
+ @Override
+ public void onPrepared(MediaPlayer mediaPlayer) {
+ LOG("onPrepared()");
+ mProgressFrame.setVisibility(View.INVISIBLE);
+ mIsPrepared = true;
+ if (mCallback != null) mCallback.onPrepared(this);
+// mLabelPosition.setText(Util.getDurationString(0, false));
+// mLabelDuration.setText(Util.getDurationString(mediaPlayer.getDuration(), false));
+// mSeeker.setProgress(0);
+// mSeeker.setMax(mediaPlayer.getDuration());
+ setControlsEnabled(true);
+
+ if (mAutoPlay) {
+ if (!mControlsDisabled && mHideControlsOnPlay) hideControls();
+ start();
+ if (mInitialPosition > 0) {
+ seekTo(mInitialPosition);
+ mInitialPosition = -1;
+ }
+ } else {
+ // Hack to show first frame, is there another way?
+ mPlayer.start();
+ mPlayer.pause();
+ }
+ }
+
+ @Override
+ public void onBufferingUpdate(MediaPlayer mediaPlayer, int percent) {
+ LOG("Buffering: %d%%", percent);
+ if (mCallback != null) mCallback.onBuffering(percent);
+// if (mSeeker != null) {
+// if (percent == 100) mSeeker.setSecondaryProgress(0);
+// else mSeeker.setSecondaryProgress(mSeeker.getMax() * (percent / 100));
+// }
+ }
+
+ @Override
+ public void onCompletion(MediaPlayer mediaPlayer) {
+ LOG("onCompletion()");
+ if (mLoop) {
+// mBtnPlayPause.setImageDrawable(mPlayDrawable);
+ if (mHandler != null) mHandler.removeCallbacks(mUpdateCounters);
+// mSeeker.setProgress(mSeeker.getMax());
+ showControls();
+ }
+ if (mCallback != null) {
+ mCallback.onCompletion(this);
+ if (mLoop) mCallback.onStarted(this);
+ }
+ }
+
+ @Override
+ public void onVideoSizeChanged(MediaPlayer mediaPlayer, int width, int height) {
+ LOG("Video size changed: %dx%d", width, height);
+ adjustAspectRatio(mInitialTextureWidth, mInitialTextureHeight, width, height);
+ }
+
+ @Override
+ public boolean onError(MediaPlayer mediaPlayer, int what, int extra) {
+ if (what == -38) {
+ // Error code -38 happens on some Samsung devices
+ // Just ignore it
+ return false;
+ }
+ String errorMsg = "Preparation/playback error (" + what + "): ";
+ switch (what) {
+ default:
+ errorMsg += "Unknown error";
+ break;
+ case MediaPlayer.MEDIA_ERROR_IO:
+ errorMsg += "I/O error";
+ break;
+ case MediaPlayer.MEDIA_ERROR_MALFORMED:
+ errorMsg += "Malformed";
+ break;
+ case MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
+ errorMsg += "Not valid for progressive playback";
+ break;
+ case MediaPlayer.MEDIA_ERROR_SERVER_DIED:
+ errorMsg += "Server died";
+ break;
+ case MediaPlayer.MEDIA_ERROR_TIMED_OUT:
+ errorMsg += "Timed out";
+ break;
+ case MediaPlayer.MEDIA_ERROR_UNSUPPORTED:
+ errorMsg += "Unsupported";
+ break;
+ }
+ throwError(new Exception(errorMsg));
+ return false;
+ }
+
+ // View events
+
+ @Override
+ protected void onFinishInflate() {
+ super.onFinishInflate();
+
+ if (isInEditMode()) {
+ return;
+ }
+
+ setKeepScreenOn(true);
+
+// mHandler = new Handler();
+ mPlayer = new MediaPlayer();
+
+ mPlayer.setOnPreparedListener(this);
+ mPlayer.setOnBufferingUpdateListener(this);
+ mPlayer.setOnCompletionListener(this);
+ mPlayer.setOnVideoSizeChangedListener(this);
+ mPlayer.setOnErrorListener(this);
+ mPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+ mPlayer.setLooping(mLoop);
+
+ // Instantiate and add TextureView for rendering
+ final LayoutParams textureLp =
+ new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
+ mTextureView = new TextureView(getContext());
+ addView(mTextureView, textureLp);
+ mTextureView.setSurfaceTextureListener(this);
+
+ final LayoutInflater li = LayoutInflater.from(getContext());
+
+ // Inflate and add progress
+ mProgressFrame = li.inflate(R.layout.evp_include_progress, this, false);
+ addView(mProgressFrame);
+
+ // Instantiate and add click frame (used to toggle controls)
+ mClickFrame = new FrameLayout(getContext());
+ //noinspection RedundantCast
+ ((FrameLayout) mClickFrame)
+ .setForeground(Util.resolveDrawable(getContext(), R.attr.selectableItemBackground));
+ addView(
+ mClickFrame,
+ new ViewGroup.LayoutParams(
+ ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
+
+ // Inflate controls
+ mControlsFrame = li.inflate(R.layout.evp_include_controls, this, false);
+ final LayoutParams controlsLp =
+ new LayoutParams(
+ ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT);
+ controlsLp.gravity = Gravity.BOTTOM;
+ addView(mControlsFrame, controlsLp);
+
+ final EasyVideoPlayer easyVideoPlayer = this;
+
+ if (mControlsDisabled) {
+ mClickFrame.setOnClickListener(null);
+ mControlsFrame.setVisibility(View.GONE);
+ } else {
+ mClickFrame.setOnClickListener(
+ new OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ toggleControls();
+ mCallback.onClickVideoFrame(easyVideoPlayer);
+ }
+ });
+ }
+
+ // Retrieve controls
+// mSeeker = (SeekBar) mControlsFrame.findViewById(R.id.seeker);
+// mSeeker.setOnSeekBarChangeListener(this);
+//
+// mLabelPosition = (TextView) mControlsFrame.findViewById(R.id.position);
+// mLabelPosition.setText(Util.getDurationString(0, false));
+//
+// mLabelDuration = (TextView) mControlsFrame.findViewById(R.id.duration);
+// mLabelDuration.setText(Util.getDurationString(0, true));
+//
+// mBtnRestart = (ImageButton) mControlsFrame.findViewById(R.id.btnRestart);
+// mBtnRestart.setOnClickListener(this);
+// mBtnRestart.setImageDrawable(mRestartDrawable);
+
+// mBtnRetry = (TextView) mControlsFrame.findViewById(R.id.btnRetry);
+// mBtnRetry.setOnClickListener(this);
+// mBtnRetry.setText(mRetryText);
+ mRetry = mControlsFrame.findViewById(R.id.retry);
+ mRetry.setOnClickListener(this);
+
+ mSaveStory = mControlsFrame.findViewById(R.id.save_story);
+ mSaveStory.setOnClickListener(this);
+
+ mAddToStory = mControlsFrame.findViewById(R.id.add_to_story);
+ mAddToStory.setOnClickListener(this);
+
+
+
+// mBtnPlayPause = (ImageButton) mControlsFrame.findViewById(R.id.btnPlayPause);
+// mBtnPlayPause.setOnClickListener(this);
+// mBtnPlayPause.setImageDrawable(mPlayDrawable);
+//
+// mBtnSubmit = (TextView) mControlsFrame.findViewById(R.id.btnSubmit);
+// mBtnSubmit.setOnClickListener(this);
+// mBtnSubmit.setText(mSubmitText);
+//
+// mLabelCustom = (TextView) mControlsFrame.findViewById(R.id.labelCustom);
+// mLabelCustom.setText(mCustomLabelText);
+//
+// mLabelBottom = (TextView) mControlsFrame.findViewById(R.id.labelBottom);
+// setBottomLabelText(mBottomLabelText);
+
+ invalidateThemeColors();
+
+ setControlsEnabled(false);
+ invalidateActions();
+ prepare();
+ }
+
+ @Override
+ public void onClick(View view) {
+// if (view.getId() == R.id.btnPlayPause) {
+// if (mPlayer.isPlaying()) {
+// pause();
+// } else {
+// if (mHideControlsOnPlay && !mControlsDisabled) hideControls();
+// start();
+// }
+// }
+// else if (view.getId() == R.id.btnRestart) {
+// seekTo(0);
+// if (!isPlaying()) start();
+// }
+// else if (view.getId() == R.id.btnRetry) {
+// if (mCallback != null) mCallback.onRetry(this, mSource);
+// }
+// else if (view.getId() == R.id.btnSubmit) {
+// if (mCallback != null) mCallback.onSubmit(this, mSource);
+// }
+ if (view.getId() == R.id.retry) {
+ if (mCallback != null) mCallback.onRetry(this, mSource);
+ }
+ else if (view.getId() == R.id.add_to_story) {
+ if (mCallback != null) mCallback.addToStory(this, mSource);
+ }
+ else if (view.getId() == R.id.save_story) {
+ if (mCallback != null) mCallback.saveStory(this, mSource);
+ }
+ }
+
+ @Override
+ public void onProgressChanged(SeekBar seekBar, int value, boolean fromUser) {
+ if (fromUser) seekTo(value);
+ }
+
+ @Override
+ public void onStartTrackingTouch(SeekBar seekBar) {
+ mWasPlaying = isPlaying();
+ if (mWasPlaying) mPlayer.pause(); // keeps the time updater running, unlike pause()
+ }
+
+ @Override
+ public void onStopTrackingTouch(SeekBar seekBar) {
+ if (mWasPlaying) mPlayer.start();
+ }
+
+ @Override
+ protected void onDetachedFromWindow() {
+ super.onDetachedFromWindow();
+ LOG("Detached from window");
+ release();
+//
+// mSeeker = null;
+// mLabelPosition = null;
+// mLabelDuration = null;
+// mBtnPlayPause = null;
+// mBtnRestart = null;
+// mBtnSubmit = null;
+
+ mControlsFrame = null;
+ mClickFrame = null;
+ mProgressFrame = null;
+//
+ if (mHandler != null) {
+ mHandler.removeCallbacks(mUpdateCounters);
+ mHandler = null;
+ }
+ }
+
+ // Utilities
+
+ private static void LOG(String message, Object... args) {
+ try {
+ if (args != null) message = String.format(message, args);
+ Log.d("EasyVideoPlayer", message);
+ } catch (Exception ignored) {
+ }
+ }
+
+ private void invalidateActions() {
+ switch (mLeftAction) {
+ case LEFT_ACTION_NONE:
+// mBtnRetry.setVisibility(View.GONE);
+// mBtnRestart.setVisibility(View.GONE);
+ mRetry.setVisibility(View.GONE);
+ break;
+ case LEFT_ACTION_RESTART:
+// mBtnRetry.setVisibility(View.GONE);
+// mBtnRestart.setVisibility(View.VISIBLE);
+ mRetry.setVisibility(View.GONE);
+ break;
+ case LEFT_ACTION_RETRY:
+// mBtnRetry.setVisibility(View.VISIBLE);
+// mBtnRestart.setVisibility(View.GONE);
+ mRetry.setVisibility(View.VISIBLE);
+ break;
+ }
+ switch (mRightAction) {
+ case RIGHT_ACTION_NONE:
+// mBtnSubmit.setVisibility(View.GONE);
+// mLabelCustom.setVisibility(View.GONE);
+ break;
+ case RIGHT_ACTION_SUBMIT:
+// mBtnSubmit.setVisibility(View.VISIBLE);
+// mLabelCustom.setVisibility(View.GONE);
+ break;
+ case RIGHT_ACTION_CUSTOM_LABEL:
+// mBtnSubmit.setVisibility(View.GONE);
+// mLabelCustom.setVisibility(View.VISIBLE);
+ break;
+ }
+ }
+
+ private void adjustAspectRatio(int viewWidth, int viewHeight, int videoWidth, int videoHeight) {
+ final double aspectRatio = (double) videoHeight / videoWidth;
+ int newWidth, newHeight;
+
+ if (viewHeight > (int) (viewWidth * aspectRatio)) {
+ // limited by narrow width; restrict height
+ newWidth = viewWidth;
+ newHeight = (int) (viewWidth * aspectRatio);
+ } else {
+ // limited by short height; restrict width
+ newWidth = (int) (viewHeight / aspectRatio);
+ newHeight = viewHeight;
+ }
+
+ final int xoff = (viewWidth - newWidth) / 2;
+ final int yoff = (viewHeight - newHeight) / 2;
+
+ final Matrix txform = new Matrix();
+ mTextureView.getTransform(txform);
+ txform.setScale((float) newWidth / viewWidth, (float) newHeight / viewHeight);
+ txform.postTranslate(xoff, yoff);
+ mTextureView.setTransform(txform);
+ }
+
+ private void throwError(Exception e) {
+ if (mCallback != null) mCallback.onError(this, e);
+ else throw new RuntimeException(e);
+ }
+
+ private static void setTint(@NonNull SeekBar seekBar, @ColorInt int color) {
+ ColorStateList s1 = ColorStateList.valueOf(color);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ seekBar.setThumbTintList(s1);
+ seekBar.setProgressTintList(s1);
+ seekBar.setSecondaryProgressTintList(s1);
+ } else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.GINGERBREAD_MR1) {
+ Drawable progressDrawable = DrawableCompat.wrap(seekBar.getProgressDrawable());
+ seekBar.setProgressDrawable(progressDrawable);
+ DrawableCompat.setTintList(progressDrawable, s1);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
+ Drawable thumbDrawable = DrawableCompat.wrap(seekBar.getThumb());
+ DrawableCompat.setTintList(thumbDrawable, s1);
+ seekBar.setThumb(thumbDrawable);
+ }
+ } else {
+ PorterDuff.Mode mode = PorterDuff.Mode.SRC_IN;
+ if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1) {
+ mode = PorterDuff.Mode.MULTIPLY;
+ }
+ if (seekBar.getIndeterminateDrawable() != null)
+ seekBar.getIndeterminateDrawable().setColorFilter(color, mode);
+ if (seekBar.getProgressDrawable() != null)
+ seekBar.getProgressDrawable().setColorFilter(color, mode);
+ }
+ }
+
+ private Drawable tintDrawable(@NonNull Drawable d, @ColorInt int color) {
+ d = DrawableCompat.wrap(d.mutate());
+ DrawableCompat.setTint(d, color);
+ return d;
+ }
+
+ private void tintSelector(@NonNull View view, @ColorInt int color) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
+ && view.getBackground() instanceof RippleDrawable) {
+ final RippleDrawable rd = (RippleDrawable) view.getBackground();
+ rd.setColor(ColorStateList.valueOf(Util.adjustAlpha(color, 0.3f)));
+ }
+ }
+
+ private void invalidateThemeColors() {
+ final int labelColor = Util.isColorDark(mThemeColor) ? Color.WHITE : Color.BLACK;
+// mControlsFrame.setBackgroundColor(Util.adjustAlpha(mThemeColor, 0.8f));
+// tintSelector(mBtnRestart, labelColor);
+// tintSelector(mBtnPlayPause, labelColor);
+// mLabelDuration.setTextColor(labelColor);
+// mLabelPosition.setTextColor(labelColor);
+// setTint(mSeeker, labelColor);
+// mBtnRetry.setTextColor(labelColor);
+// tintSelector(mBtnRetry, labelColor);
+// mBtnSubmit.setTextColor(labelColor);
+// tintSelector(mBtnSubmit, labelColor);
+// mLabelCustom.setTextColor(labelColor);
+// mLabelBottom.setTextColor(labelColor);
+ mPlayDrawable = tintDrawable(mPlayDrawable.mutate(), labelColor);
+ mRestartDrawable = tintDrawable(mRestartDrawable.mutate(), labelColor);
+ mPauseDrawable = tintDrawable(mPauseDrawable.mutate(), labelColor);
+ }
+
+ @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
+ private void setFullscreen(boolean fullscreen) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
+ if (mAutoFullscreen) {
+ int flags = !fullscreen ? 0 : View.SYSTEM_UI_FLAG_LOW_PROFILE;
+
+ ViewCompat.setFitsSystemWindows(mControlsFrame, !fullscreen);
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
+ flags |=
+ View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
+ | View.SYSTEM_UI_FLAG_LAYOUT_STABLE;
+ if (fullscreen) {
+ flags |=
+ View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
+ | View.SYSTEM_UI_FLAG_FULLSCREEN
+ | View.SYSTEM_UI_FLAG_IMMERSIVE;
+ }
+ }
+
+ mClickFrame.setSystemUiVisibility(flags);
+ }
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoProgressCallback.java b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoProgressCallback.java
new file mode 100644
index 0000000..446ef33
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/EasyVideoProgressCallback.java
@@ -0,0 +1,7 @@
+package tabian.com.instagramclone2.easyvideoplayer;
+
+/** @author Aidan Follestad (afollestad) */
+public interface EasyVideoProgressCallback {
+
+ void onVideoProgressUpdate(int position, int duration);
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/IUserMethods.java b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/IUserMethods.java
new file mode 100644
index 0000000..5bd0ca6
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/IUserMethods.java
@@ -0,0 +1,111 @@
+package tabian.com.instagramclone2.easyvideoplayer;
+
+import android.graphics.drawable.Drawable;
+import android.net.Uri;
+import android.support.annotation.CheckResult;
+import android.support.annotation.ColorInt;
+import android.support.annotation.ColorRes;
+import android.support.annotation.DrawableRes;
+import android.support.annotation.FloatRange;
+import android.support.annotation.IntRange;
+import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
+import android.support.annotation.StringRes;
+
+/** @author Aidan Follestad (afollestad) */
+@SuppressWarnings("unused")
+interface IUserMethods {
+
+ void setSource(@NonNull Uri source);
+
+ void setCallback(@NonNull EasyVideoCallback callback);
+
+ void setProgressCallback(@NonNull EasyVideoProgressCallback callback);
+
+ void setLeftAction(@EasyVideoPlayer.LeftAction int action);
+
+ void setRightAction(@EasyVideoPlayer.RightAction int action);
+
+ void setCustomLabelText(@Nullable CharSequence text);
+
+ void setCustomLabelTextRes(@StringRes int textRes);
+
+ void setBottomLabelText(@Nullable CharSequence text);
+
+ void setBottomLabelTextRes(@StringRes int textRes);
+
+ void setRetryText(@Nullable CharSequence text);
+
+ void setRetryTextRes(@StringRes int res);
+
+ void setSubmitText(@Nullable CharSequence text);
+
+ void setSubmitTextRes(@StringRes int res);
+
+ void setRestartDrawable(@NonNull Drawable drawable);
+
+ void setRestartDrawableRes(@DrawableRes int res);
+
+ void setPlayDrawable(@NonNull Drawable drawable);
+
+ void setPlayDrawableRes(@DrawableRes int res);
+
+ void setPauseDrawable(@NonNull Drawable drawable);
+
+ void setPauseDrawableRes(@DrawableRes int res);
+
+ void setThemeColor(@ColorInt int color);
+
+ void setThemeColorRes(@ColorRes int colorRes);
+
+ void setHideControlsOnPlay(boolean hide);
+
+ void setAutoPlay(boolean autoPlay);
+
+ void setInitialPosition(@IntRange(from = 0, to = Integer.MAX_VALUE) int pos);
+
+ void showControls();
+
+ void hideControls();
+
+ @CheckResult
+ boolean isControlsShown();
+
+ void toggleControls();
+
+ void enableControls(boolean andShow);
+
+ void disableControls();
+
+ @CheckResult
+ boolean isPrepared();
+
+ @CheckResult
+ boolean isPlaying();
+
+ @CheckResult
+ int getCurrentPosition();
+
+ @CheckResult
+ int getDuration();
+
+ void start();
+
+ void seekTo(@IntRange(from = 0, to = Integer.MAX_VALUE) int pos);
+
+ void setVolume(
+ @FloatRange(from = 0f, to = 1f) float leftVolume,
+ @FloatRange(from = 0f, to = 1f) float rightVolume);
+
+ void pause();
+
+ void stop();
+
+ void reset();
+
+ void release();
+
+ void setAutoFullscreen(boolean autoFullScreen);
+
+ void setLoop(boolean loop);
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/Util.java b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/Util.java
new file mode 100644
index 0000000..75718de
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/easyvideoplayer/Util.java
@@ -0,0 +1,71 @@
+package tabian.com.instagramclone2.easyvideoplayer;
+
+import android.content.Context;
+import android.content.res.TypedArray;
+import android.graphics.Color;
+import android.graphics.drawable.Drawable;
+import android.support.annotation.AttrRes;
+
+import java.util.Locale;
+import java.util.concurrent.TimeUnit;
+
+/** @author Aidan Follestad (afollestad) */
+class Util {
+
+ static String getDurationString(long durationMs, boolean negativePrefix) {
+ return String.format(
+ Locale.getDefault(),
+ "%s%02d:%02d",
+ negativePrefix ? "-" : "",
+ TimeUnit.MILLISECONDS.toMinutes(durationMs),
+ TimeUnit.MILLISECONDS.toSeconds(durationMs)
+ - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(durationMs)));
+ }
+
+ static boolean isColorDark(int color) {
+ double darkness =
+ 1
+ - (0.299 * Color.red(color) + 0.587 * Color.green(color) + 0.114 * Color.blue(color))
+ / 255;
+ return darkness >= 0.5;
+ }
+
+ static int adjustAlpha(int color, @SuppressWarnings("SameParameterValue") float factor) {
+ int alpha = Math.round(Color.alpha(color) * factor);
+ int red = Color.red(color);
+ int green = Color.green(color);
+ int blue = Color.blue(color);
+ return Color.argb(alpha, red, green, blue);
+ }
+
+ static int resolveColor(Context context, @AttrRes int attr) {
+ return resolveColor(context, attr, 0);
+ }
+
+ private static int resolveColor(Context context, @AttrRes int attr, int fallback) {
+ TypedArray a = context.getTheme().obtainStyledAttributes(new int[] {attr});
+ try {
+ return a.getColor(0, fallback);
+ } finally {
+ a.recycle();
+ }
+ }
+
+ static Drawable resolveDrawable(Context context, @AttrRes int attr) {
+ return resolveDrawable(context, attr, null);
+ }
+
+ private static Drawable resolveDrawable(
+ Context context,
+ @AttrRes int attr,
+ @SuppressWarnings("SameParameterValue") Drawable fallback) {
+ TypedArray a = context.getTheme().obtainStyledAttributes(new int[] {attr});
+ try {
+ Drawable d = a.getDrawable(0);
+ if (d == null && fallback != null) d = fallback;
+ return d;
+ } finally {
+ a.recycle();
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/CaptureActivity.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/CaptureActivity.java
new file mode 100644
index 0000000..984e701
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/CaptureActivity.java
@@ -0,0 +1,16 @@
+package tabian.com.instagramclone2.materialcamera;
+
+import android.app.Fragment;
+import android.support.annotation.NonNull;
+
+import tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity;
+import tabian.com.instagramclone2.materialcamera.internal.CameraFragment;
+
+public class CaptureActivity extends BaseCaptureActivity {
+
+ @Override
+ @NonNull
+ public Fragment getFragment() {
+ return CameraFragment.newInstance();
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/CaptureActivity2.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/CaptureActivity2.java
new file mode 100644
index 0000000..c3fa7f6
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/CaptureActivity2.java
@@ -0,0 +1,17 @@
+package tabian.com.instagramclone2.materialcamera;
+
+import android.app.Fragment;
+import android.support.annotation.NonNull;
+
+import tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity;
+import tabian.com.instagramclone2.materialcamera.internal.Camera2Fragment;
+
+
+public class CaptureActivity2 extends BaseCaptureActivity {
+
+ @Override
+ @NonNull
+ public Fragment getFragment() {
+ return Camera2Fragment.newInstance();
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/ICallback.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/ICallback.java
new file mode 100644
index 0000000..7ee9f55
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/ICallback.java
@@ -0,0 +1,9 @@
+package tabian.com.instagramclone2.materialcamera;
+
+public interface ICallback {
+ /**
+ * It is called when the background operation completes. If the operation is successful, {@code
+ * exception} will be {@code null}.
+ */
+ void done(Exception exception);
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/MaterialCamera.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/MaterialCamera.java
new file mode 100644
index 0000000..c794160
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/MaterialCamera.java
@@ -0,0 +1,380 @@
+package tabian.com.instagramclone2.materialcamera;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.media.CamcorderProfile;
+import android.support.annotation.AttrRes;
+import android.support.annotation.ColorInt;
+import android.support.annotation.ColorRes;
+import android.support.annotation.DrawableRes;
+import android.support.annotation.FloatRange;
+import android.support.annotation.IntDef;
+import android.support.annotation.IntRange;
+import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
+import android.support.annotation.StringRes;
+import android.support.v4.content.ContextCompat;
+import android.util.Log;
+
+import com.afollestad.materialdialogs.util.DialogUtils;
+
+import java.io.File;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey;
+import tabian.com.instagramclone2.materialcamera.util.CameraUtil;
+
+/** @author Aidan Follestad (afollestad) */
+@SuppressWarnings("WeakerAccess")
+public class MaterialCamera {
+
+ private static final String TAG = "MaterialCamera";
+
+ @IntDef({QUALITY_HIGH, QUALITY_LOW, QUALITY_480P, QUALITY_720P, QUALITY_1080P})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface QualityProfile {}
+
+ public static final int QUALITY_HIGH = CamcorderProfile.QUALITY_HIGH;
+ public static final int QUALITY_LOW = CamcorderProfile.QUALITY_LOW;
+ public static final int QUALITY_480P = CamcorderProfile.QUALITY_480P;
+ public static final int QUALITY_720P = CamcorderProfile.QUALITY_720P;
+ public static final int QUALITY_1080P = CamcorderProfile.QUALITY_1080P;
+
+ public static final String ERROR_EXTRA = "mcam_error";
+ public static final String STATUS_EXTRA = "mcam_status";
+ public static final String DELETE_UPLOAD_FILE_EXTRA = "mcam_delete_upload_file";
+
+ public static final int STATUS_RECORDED = 1;
+ public static final int STATUS_RETRY = 2;
+
+ private Context mContext;
+ private Activity mActivityContext;
+ private android.app.Fragment mAppFragment;
+ private android.support.v4.app.Fragment mSupportFragment;
+ private boolean mIsFragment = false;
+ private long mLengthLimit = -1;
+ private boolean mAllowRetry = true;
+ private boolean mAutoSubmit = false;
+ private String mSaveDir;
+ private int mPrimaryColor;
+ private boolean mShowPortraitWarning = true;
+ private boolean mAllowChangeCamera = true;
+ private boolean mDefaultToFrontFacing = false;
+ private boolean mCountdownImmediately = false;
+ private boolean mRetryExists = false;
+ private boolean mRestartTimerOnRetry = false;
+ private boolean mContinueTimerInPlayback = true;
+ private boolean mForceCamera1 = false;
+ private boolean mStillShot;
+ private boolean mAudioDisabled = false;
+ private long mAutoRecord = -1;
+
+ private int mVideoEncodingBitRate = -1;
+ private int mAudioEncodingBitRate = -1;
+ private int mVideoFrameRate = -1;
+ private int mVideoPreferredHeight = -1;
+ private float mVideoPreferredAspect = -1f;
+ private long mMaxFileSize = -1;
+ private int mQualityProfile = -1;
+
+ private int mIconRecord;
+ private int mIconStop;
+ private int mIconFrontCamera;
+ private int mIconRearCamera;
+ private int mIconPlay;
+ private int mIconPause;
+ private int mIconRestart;
+
+ private int mLabelRetry;
+ private int mLabelConfirm;
+
+ public MaterialCamera(@NonNull Activity context) {
+ mContext = context;
+ mActivityContext = context;
+ mPrimaryColor = DialogUtils.resolveColor(context, R.attr.colorPrimary);
+ }
+
+ public MaterialCamera(@NonNull android.app.Fragment context) {
+ mIsFragment = true;
+ mContext = context.getActivity();
+ mAppFragment = context;
+ mSupportFragment = null;
+ mPrimaryColor = DialogUtils.resolveColor(mContext, R.attr.colorPrimary);
+ }
+
+ public MaterialCamera(@NonNull android.support.v4.app.Fragment context) {
+ mIsFragment = true;
+ mContext = context.getContext();
+ mSupportFragment = context;
+ mAppFragment = null;
+ mPrimaryColor = DialogUtils.resolveColor(mContext, R.attr.colorPrimary);
+ }
+
+ public MaterialCamera countdownMillis(long lengthLimitMs) {
+ mLengthLimit = lengthLimitMs;
+ return this;
+ }
+
+ public MaterialCamera countdownSeconds(float lengthLimitSec) {
+ return countdownMillis((int) (lengthLimitSec * 1000f));
+ }
+
+ public MaterialCamera countdownMinutes(float lengthLimitMin) {
+ return countdownMillis((int) (lengthLimitMin * 1000f * 60f));
+ }
+
+ public MaterialCamera allowRetry(boolean allowRetry) {
+ mAllowRetry = allowRetry;
+ return this;
+ }
+
+ public MaterialCamera autoSubmit(boolean autoSubmit) {
+ mAutoSubmit = autoSubmit;
+ return this;
+ }
+
+ public MaterialCamera countdownImmediately(boolean immediately) {
+ mCountdownImmediately = immediately;
+ return this;
+ }
+
+ public MaterialCamera saveDir(@Nullable File dir) {
+ if (dir == null) return saveDir((String) null);
+ return saveDir(dir.getAbsolutePath());
+ }
+
+ public MaterialCamera saveDir(@Nullable String dir) {
+ mSaveDir = dir;
+ return this;
+ }
+
+ public MaterialCamera primaryColor(@ColorInt int color) {
+ mPrimaryColor = color;
+ return this;
+ }
+
+ public MaterialCamera primaryColorRes(@ColorRes int colorRes) {
+ return primaryColor(ContextCompat.getColor(mContext, colorRes));
+ }
+
+ public MaterialCamera primaryColorAttr(@AttrRes int colorAttr) {
+ return primaryColor(DialogUtils.resolveColor(mContext, colorAttr));
+ }
+
+ public MaterialCamera showPortraitWarning(boolean show) {
+ mShowPortraitWarning = show;
+ return this;
+ }
+
+ public MaterialCamera allowChangeCamera(boolean allowChangeCamera) {
+ mAllowChangeCamera = allowChangeCamera;
+ return this;
+ }
+
+ public MaterialCamera defaultToFrontFacing(boolean frontFacing) {
+ mDefaultToFrontFacing = frontFacing;
+ return this;
+ }
+
+ public MaterialCamera retryExits(boolean exits) {
+ mRetryExists = exits;
+ return this;
+ }
+
+ public MaterialCamera restartTimerOnRetry(boolean restart) {
+ mRestartTimerOnRetry = restart;
+ return this;
+ }
+
+ public MaterialCamera continueTimerInPlayback(boolean continueTimer) {
+ mContinueTimerInPlayback = continueTimer;
+ return this;
+ }
+
+ public MaterialCamera forceCamera1() {
+ mForceCamera1 = true;
+ return this;
+ }
+
+ public MaterialCamera audioDisabled(boolean disabled) {
+ mAudioDisabled = disabled;
+ return this;
+ }
+
+ /** @deprecated Renamed to videoEncodingBitRate(int). */
+ @Deprecated
+ public MaterialCamera videoBitRate(@IntRange(from = 1, to = Integer.MAX_VALUE) int rate) {
+ return videoEncodingBitRate(rate);
+ }
+
+ public MaterialCamera videoEncodingBitRate(@IntRange(from = 1, to = Integer.MAX_VALUE) int rate) {
+ mVideoEncodingBitRate = rate;
+ return this;
+ }
+
+ public MaterialCamera audioEncodingBitRate(@IntRange(from = 1, to = Integer.MAX_VALUE) int rate) {
+ mAudioEncodingBitRate = rate;
+ return this;
+ }
+
+ public MaterialCamera videoFrameRate(@IntRange(from = 1, to = Integer.MAX_VALUE) int rate) {
+ mVideoFrameRate = rate;
+ return this;
+ }
+
+ public MaterialCamera videoPreferredHeight(
+ @IntRange(from = 1, to = Integer.MAX_VALUE) int height) {
+ mVideoPreferredHeight = height;
+ return this;
+ }
+
+ public MaterialCamera videoPreferredAspect(
+ @FloatRange(from = 0.1, to = Float.MAX_VALUE) float ratio) {
+ mVideoPreferredAspect = ratio;
+ return this;
+ }
+
+ public MaterialCamera maxAllowedFileSize(long size) {
+ mMaxFileSize = size;
+ return this;
+ }
+
+ public MaterialCamera qualityProfile(@QualityProfile int profile) {
+ mQualityProfile = profile;
+ return this;
+ }
+
+ public MaterialCamera iconRecord(@DrawableRes int iconRes) {
+ mIconRecord = iconRes;
+ return this;
+ }
+
+ public MaterialCamera iconStop(@DrawableRes int iconRes) {
+ mIconStop = iconRes;
+ return this;
+ }
+
+ public MaterialCamera iconFrontCamera(@DrawableRes int iconRes) {
+ mIconFrontCamera = iconRes;
+ return this;
+ }
+
+ public MaterialCamera iconRearCamera(@DrawableRes int iconRes) {
+ mIconRearCamera = iconRes;
+ return this;
+ }
+
+ public MaterialCamera iconPlay(@DrawableRes int iconRes) {
+ mIconPlay = iconRes;
+ return this;
+ }
+
+ public MaterialCamera iconPause(@DrawableRes int iconRes) {
+ mIconPause = iconRes;
+ return this;
+ }
+
+ public MaterialCamera iconRestart(@DrawableRes int iconRes) {
+ mIconRestart = iconRes;
+ return this;
+ }
+
+ public MaterialCamera labelRetry(@StringRes int stringRes) {
+ mLabelRetry = stringRes;
+ return this;
+ }
+
+ @Deprecated
+ /*
+ This has been replaced with labelConfirm
+ */
+ public MaterialCamera labelUseVideo(@StringRes int stringRes) {
+ mLabelConfirm = stringRes;
+ return this;
+ }
+
+ public MaterialCamera labelConfirm(@StringRes int stringRes) {
+ mLabelConfirm = stringRes;
+ return this;
+ }
+
+ /** Will take a still shot instead of recording. */
+ public MaterialCamera stillShot() {
+ mStillShot = true;
+ return this;
+ }
+
+ public MaterialCamera autoRecordWithDelayMs(
+ @IntRange(from = -1, to = Long.MAX_VALUE) long delayMillis) {
+ mAutoRecord = delayMillis;
+ return this;
+ }
+
+ public MaterialCamera autoRecordWithDelaySec(
+ @IntRange(from = -1, to = Long.MAX_VALUE) int delaySeconds) {
+ mAutoRecord = delaySeconds * 1000;
+ return this;
+ }
+
+ public Intent getIntent() {
+ Log.d(TAG, "getIntent: has camera2: " + CameraUtil.hasCamera2(mContext, mStillShot));
+ Log.d(TAG, "getIntent: force camera1: " + mForceCamera1);
+ final Class> cls =
+// !mForceCamera1 && CameraUtil.hasCamera2(mContext, mStillShot)
+ !mForceCamera1
+ ? CaptureActivity2.class
+ : CaptureActivity.class;
+ Intent intent =
+ new Intent(mContext, cls)
+ .putExtra(CameraIntentKey.LENGTH_LIMIT, mLengthLimit)
+ .putExtra(CameraIntentKey.ALLOW_RETRY, mAllowRetry)
+ .putExtra(CameraIntentKey.AUTO_SUBMIT, mAutoSubmit)
+ .putExtra(CameraIntentKey.SAVE_DIR, mSaveDir)
+ .putExtra(CameraIntentKey.PRIMARY_COLOR, mPrimaryColor)
+ .putExtra(CameraIntentKey.SHOW_PORTRAIT_WARNING, mShowPortraitWarning)
+ .putExtra(CameraIntentKey.ALLOW_CHANGE_CAMERA, mAllowChangeCamera)
+ .putExtra(CameraIntentKey.DEFAULT_TO_FRONT_FACING, mDefaultToFrontFacing)
+ .putExtra(CameraIntentKey.COUNTDOWN_IMMEDIATELY, mCountdownImmediately)
+ .putExtra(CameraIntentKey.RETRY_EXITS, mRetryExists)
+ .putExtra(CameraIntentKey.RESTART_TIMER_ON_RETRY, mRestartTimerOnRetry)
+ .putExtra(CameraIntentKey.CONTINUE_TIMER_IN_PLAYBACK, mContinueTimerInPlayback)
+ .putExtra(CameraIntentKey.STILL_SHOT, mStillShot)
+ .putExtra(CameraIntentKey.AUTO_RECORD, mAutoRecord)
+ .putExtra(CameraIntentKey.AUDIO_DISABLED, mAudioDisabled);
+
+ if (mVideoEncodingBitRate > 0)
+ intent.putExtra(CameraIntentKey.VIDEO_BIT_RATE, mVideoEncodingBitRate);
+ if (mAudioEncodingBitRate > 0)
+ intent.putExtra(CameraIntentKey.AUDIO_ENCODING_BIT_RATE, mAudioEncodingBitRate);
+ if (mVideoFrameRate > 0) intent.putExtra(CameraIntentKey.VIDEO_FRAME_RATE, mVideoFrameRate);
+ if (mVideoPreferredHeight > 0)
+ intent.putExtra(CameraIntentKey.VIDEO_PREFERRED_HEIGHT, mVideoPreferredHeight);
+ if (mVideoPreferredAspect > 0f)
+ intent.putExtra(CameraIntentKey.VIDEO_PREFERRED_ASPECT, mVideoPreferredAspect);
+ if (mMaxFileSize > -1) intent.putExtra(CameraIntentKey.MAX_ALLOWED_FILE_SIZE, mMaxFileSize);
+ if (mQualityProfile > -1) intent.putExtra(CameraIntentKey.QUALITY_PROFILE, mQualityProfile);
+
+ if (mIconRecord != 0) intent.putExtra(CameraIntentKey.ICON_RECORD, mIconRecord);
+ if (mIconStop != 0) intent.putExtra(CameraIntentKey.ICON_STOP, mIconStop);
+ if (mIconFrontCamera != 0) intent.putExtra(CameraIntentKey.ICON_FRONT_CAMERA, mIconFrontCamera);
+ if (mIconRearCamera != 0) intent.putExtra(CameraIntentKey.ICON_REAR_CAMERA, mIconRearCamera);
+ if (mIconPlay != 0) intent.putExtra(CameraIntentKey.ICON_PLAY, mIconPlay);
+ if (mIconPause != 0) intent.putExtra(CameraIntentKey.ICON_PAUSE, mIconPause);
+ if (mIconRestart != 0) intent.putExtra(CameraIntentKey.ICON_RESTART, mIconRestart);
+ if (mLabelRetry != 0) intent.putExtra(CameraIntentKey.LABEL_RETRY, mLabelRetry);
+ if (mLabelConfirm != 0) intent.putExtra(CameraIntentKey.LABEL_CONFIRM, mLabelConfirm);
+
+ return intent;
+ }
+
+ public void start(int requestCode) {
+ if (mIsFragment && mSupportFragment != null)
+ mSupportFragment.startActivityForResult(getIntent(), requestCode);
+ else if (mIsFragment && mAppFragment != null)
+ mAppFragment.startActivityForResult(getIntent(), requestCode);
+ else mActivityContext.startActivityForResult(getIntent(), requestCode);
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/TimeLimitReachedException.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/TimeLimitReachedException.java
new file mode 100644
index 0000000..68bfcfd
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/TimeLimitReachedException.java
@@ -0,0 +1,9 @@
+package tabian.com.instagramclone2.materialcamera;
+
+/** @author Aidan Follestad (afollestad) */
+public class TimeLimitReachedException extends Exception {
+
+ public TimeLimitReachedException() {
+ super("You've reached the time limit without starting a recording.");
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/AutoFitTextureView.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/AutoFitTextureView.java
new file mode 100644
index 0000000..e8bba44
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/AutoFitTextureView.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.TextureView;
+
+/** A {@link TextureView} that can be adjusted to a specified aspect ratio. */
+class AutoFitTextureView extends TextureView {
+
+ private int mRatioWidth = 0;
+ private int mRatioHeight = 0;
+
+ public AutoFitTextureView(Context context) {
+ this(context, null);
+ }
+
+ public AutoFitTextureView(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
+ super(context, attrs, defStyle);
+ }
+
+ /**
+ * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
+ * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
+ * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
+ *
+ * @param width Relative horizontal size
+ * @param height Relative vertical size
+ */
+ public void setAspectRatio(int width, int height) {
+ if (width < 0 || height < 0) {
+ throw new IllegalArgumentException("Size cannot be negative.");
+ }
+ mRatioWidth = width;
+ mRatioHeight = height;
+ requestLayout();
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ int width = MeasureSpec.getSize(widthMeasureSpec);
+ int height = MeasureSpec.getSize(heightMeasureSpec);
+ if (0 == mRatioWidth || 0 == mRatioHeight) {
+ setMeasuredDimension(width, height);
+ } else {
+ if (width < height * mRatioWidth / mRatioHeight) {
+ setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
+ } else {
+ setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
+ }
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCameraFragment.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCameraFragment.java
new file mode 100644
index 0000000..cf974c6
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCameraFragment.java
@@ -0,0 +1,618 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.annotation.SuppressLint;
+import android.app.Activity;
+import android.app.Fragment;
+import android.content.Intent;
+import android.content.pm.ActivityInfo;
+import android.content.res.ColorStateList;
+import android.graphics.drawable.Drawable;
+import android.graphics.drawable.RippleDrawable;
+import android.media.MediaRecorder;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.support.annotation.DrawableRes;
+import android.support.annotation.NonNull;
+import android.support.v4.content.ContextCompat;
+import android.support.v4.graphics.drawable.DrawableCompat;
+import android.support.v7.content.res.AppCompatResources;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.ImageButton;
+import android.widget.ImageView;
+import android.widget.Switch;
+import android.widget.TextView;
+
+import com.afollestad.materialdialogs.DialogAction;
+import com.afollestad.materialdialogs.MaterialDialog;
+
+import java.io.File;
+
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.materialcamera.MaterialCamera;
+import tabian.com.instagramclone2.materialcamera.util.CameraUtil;
+import tabian.com.instagramclone2.materialcamera.util.Degrees;
+
+import static android.app.Activity.RESULT_CANCELED;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.CAMERA_POSITION_BACK;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_ALWAYS_ON;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_AUTO;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_OFF;
+
+
+/** @author Aidan Follestad (afollestad) */
+abstract class BaseCameraFragment extends Fragment
+ implements CameraUriInterface, View.OnClickListener {
+
+ private static final String TAG = "BaseCameraFragment";
+ private static final int RESULT_START_CAMERA = 4567;
+ private static final int RESULT_START_VIDEO = 4589;
+
+ protected ImageButton mButtonVideo;
+ protected ImageButton mButtonStillshot;
+ protected ImageButton mButtonFacing;
+ protected ImageButton mButtonFlash;
+ protected ImageButton mButtonFlashVideo;
+ protected TextView mRecordDuration;
+// protected TextView mDelayStartCountdown;
+ protected Switch mVideoSwitch;
+
+ private boolean mIsRecording;
+ protected String mOutputUri;
+ protected BaseCaptureInterface mInterface;
+ protected Handler mPositionHandler;
+ protected Handler mVideoPlayingHandler;
+ protected Runnable mVideoPlayingRunnable;
+ protected MediaRecorder mMediaRecorder;
+ private int mIconTextColor;
+ private int mIconTextColorDark;
+ private int mRecordButtonColor;
+
+ protected static void LOG(Object context, String message) {
+ Log.d(
+ context instanceof Class>
+ ? ((Class>) context).getSimpleName()
+ : context.getClass().getSimpleName(),
+ message);
+ }
+
+ private final Runnable mPositionUpdater =
+ new Runnable() {
+ @Override
+ public void run() {
+ if (mInterface == null || mRecordDuration == null) return;
+ final long mRecordStart = mInterface.getRecordingStart();
+ final long mRecordEnd = mInterface.getRecordingEnd();
+ if (mRecordStart == -1 && mRecordEnd == -1) return;
+ final long now = System.currentTimeMillis();
+ if (mRecordEnd != -1) {
+ if (now >= mRecordEnd) {
+ stopRecordingVideo(true);
+ } else {
+ final long diff = mRecordEnd - now;
+ mRecordDuration.setText(String.format("-%s", CameraUtil.getDurationString(diff)));
+ }
+ } else {
+ mRecordDuration.setText(CameraUtil.getDurationString(now - mRecordStart));
+ }
+ if (mPositionHandler != null) mPositionHandler.postDelayed(this, 1000);
+ }
+ };
+
+ @Override
+ public final View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ return inflater.inflate(R.layout.mcam_fragment_videocapture, container, false);
+ }
+
+
+protected void setImageRes(ImageView iv, @DrawableRes int res) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
+ && iv.getBackground() instanceof RippleDrawable) {
+ if(iv == mButtonVideo){
+ RippleDrawable rd = (RippleDrawable) iv.getBackground();
+ rd.setColor(ColorStateList.valueOf(CameraUtil.adjustAlpha(mRecordButtonColor, 0.3f)));
+ }
+ else if(iv == mButtonStillshot){
+ RippleDrawable rd = (RippleDrawable) iv.getBackground();
+ rd.setColor(ColorStateList.valueOf(CameraUtil.adjustAlpha(mIconTextColorDark, 0.3f)));
+ }
+ else{
+ RippleDrawable rd = (RippleDrawable) iv.getBackground();
+ rd.setColor(ColorStateList.valueOf(CameraUtil.adjustAlpha(mIconTextColor, 0.3f)));
+ }
+
+ }
+ if(iv == mButtonVideo){
+ Drawable d = AppCompatResources.getDrawable(iv.getContext(), res);
+ d = DrawableCompat.wrap(d.mutate());
+ DrawableCompat.setTint(d, mRecordButtonColor);
+ iv.setImageDrawable(d);
+ }
+ else if(iv == mButtonStillshot){
+ Drawable d = AppCompatResources.getDrawable(iv.getContext(), res);
+ d = DrawableCompat.wrap(d.mutate());
+ DrawableCompat.setTint(d, mIconTextColorDark);
+ iv.setImageDrawable(d);
+ }
+ else{
+ Drawable d = AppCompatResources.getDrawable(iv.getContext(), res);
+ d = DrawableCompat.wrap(d.mutate());
+ DrawableCompat.setTint(d, mIconTextColor);
+ iv.setImageDrawable(d);
+ }
+ }
+
+ @SuppressLint("SetTextI18n")
+ @Override
+ public void onViewCreated(View view, Bundle savedInstanceState) {
+ super.onViewCreated(view, savedInstanceState);
+
+// mDelayStartCountdown = (TextView) view.findViewById(R.id.delayStartCountdown);
+// mButtonVideo = (ImageButton) view.findViewById(R.id.video);
+ mButtonVideo = (ImageButton) view.findViewById(R.id.videoButton);
+ mButtonStillshot = (ImageButton) view.findViewById(R.id.stillshot);
+ mRecordDuration = (TextView) view.findViewById(R.id.recordDuration);
+ mButtonFacing = (ImageButton) view.findViewById(R.id.facing);
+ mVideoSwitch = (Switch) view.findViewById(R.id.video_switch);
+ mButtonFlash = (ImageButton) view.findViewById(R.id.flashStillshot);
+ mButtonFlashVideo = (ImageButton) view.findViewById(R.id.flashVideo);
+ setupFlashMode();
+
+ mButtonVideo.setOnClickListener(this);
+ mButtonStillshot.setOnClickListener(this);
+ mButtonFacing.setOnClickListener(this);
+ mButtonFlash.setOnClickListener(this);
+ mButtonFlashVideo.setOnClickListener(this);
+ mVideoSwitch.setOnClickListener(this);
+
+ int primaryColor = getArguments().getInt(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.PRIMARY_COLOR);
+ if (CameraUtil.isColorDark(primaryColor)) {
+ mIconTextColor = ContextCompat.getColor(getActivity(), R.color.mcam_color_light);
+ mIconTextColorDark = ContextCompat.getColor(getActivity(), R.color.mcam_color_dark);
+ primaryColor = CameraUtil.darkenColor(primaryColor);
+ } else {
+ mIconTextColor = ContextCompat.getColor(getActivity(), R.color.mcam_color_dark);
+ mIconTextColorDark = ContextCompat.getColor(getActivity(), R.color.mcam_color_dark);
+ }
+ mRecordButtonColor = ContextCompat.getColor(getActivity(), R.color.colordarkRed);
+ view.findViewById(R.id.controlsFrame);
+// .setBackgroundColor(primaryColor);
+ mRecordDuration.setTextColor(mIconTextColor);
+
+ if (mMediaRecorder != null && mIsRecording) {
+ setImageRes(mButtonVideo, mInterface.iconStop());
+ } else {
+ setImageRes(mButtonVideo, mInterface.iconRecord());
+ mInterface.setDidRecord(false);
+ }
+
+ if (savedInstanceState != null) mOutputUri = savedInstanceState.getString("output_uri");
+
+ if (mInterface.useStillshot()) {
+ mButtonVideo.setVisibility(View.GONE);
+ mRecordDuration.setVisibility(View.GONE);
+ mButtonStillshot.setVisibility(View.VISIBLE);
+ setImageRes(mButtonStillshot, mInterface.iconStillshot());
+ mButtonFlash.setVisibility(View.VISIBLE);
+ mButtonFlashVideo.setVisibility(View.GONE);
+ }
+ else{
+ mButtonFlash.setVisibility(View.GONE);
+ mButtonFlashVideo.setVisibility(View.VISIBLE);
+ }
+
+// if (mInterface.autoRecordDelay() < 1000) {
+// mDelayStartCountdown.setVisibility(View.GONE);
+// } else {
+// mDelayStartCountdown.setText(Long.toString(mInterface.autoRecordDelay() / 1000));
+// }
+ setupFlashModeVideo();
+
+ if(mInterface.useStillshot()){
+ mVideoSwitch.setChecked(true);
+ }
+
+ if (mInterface.shouldHideCameraFacing() || CameraUtil.isChromium()) {
+ mButtonFacing.setVisibility(View.GONE);
+ if(!mInterface.useStillshot()){
+ mButtonFlashVideo.setVisibility(View.GONE);
+ }
+ } else {
+ setImageRes(
+ mButtonFacing,
+ mInterface.getCurrentCameraPosition() == CAMERA_POSITION_BACK
+ ? mInterface.iconFrontCamera()
+ : mInterface.iconRearCamera());
+ }
+ }
+
+ protected void onFlashModesLoaded() {
+ if (getCurrentCameraPosition() != BaseCaptureActivity.CAMERA_POSITION_FRONT) {
+ invalidateFlash(false);
+ }
+ }
+
+ private boolean mDidAutoRecord = false;
+ private Handler mDelayHandler;
+ private int mDelayCurrentSecond = -1;
+
+ protected void onCameraOpened() {
+ if (mDidAutoRecord
+ || mInterface == null
+ || mInterface.useStillshot()
+ || mInterface.autoRecordDelay() < 0
+ || getActivity() == null) {
+// mDelayStartCountdown.setVisibility(View.GONE);
+ mDelayHandler = null;
+ return;
+ }
+ mDidAutoRecord = true;
+ mButtonFacing.setVisibility(View.GONE);
+ mButtonFlashVideo.setVisibility(View.GONE);
+
+ if (mInterface.autoRecordDelay() == 0) {
+// mDelayStartCountdown.setVisibility(View.GONE);
+ mIsRecording = startRecordingVideo();
+ mDelayHandler = null;
+ return;
+ }
+
+ mDelayHandler = new Handler();
+ mButtonVideo.setEnabled(false);
+
+ if (mInterface.autoRecordDelay() < 1000) {
+ // Less than a second delay
+// mDelayStartCountdown.setVisibility(View.GONE);
+ mDelayHandler.postDelayed(
+ new Runnable() {
+ @Override
+ public void run() {
+ if (!isAdded() || getActivity() == null || mIsRecording) return;
+ mButtonVideo.setEnabled(true);
+ mIsRecording = startRecordingVideo();
+ mDelayHandler = null;
+ }
+ },
+ mInterface.autoRecordDelay());
+ return;
+ }
+
+// mDelayStartCountdown.setVisibility(View.VISIBLE);
+ mDelayCurrentSecond = (int) mInterface.autoRecordDelay() / 1000;
+ mDelayHandler.postDelayed(
+ new Runnable() {
+ @SuppressLint("SetTextI18n")
+ @Override
+ public void run() {
+ if (!isAdded() || getActivity() == null || mIsRecording) return;
+ mDelayCurrentSecond -= 1;
+// mDelayStartCountdown.setText(Integer.toString(mDelayCurrentSecond));
+
+ if (mDelayCurrentSecond == 0) {
+// mDelayStartCountdown.setVisibility(View.GONE);
+ mButtonVideo.setEnabled(true);
+ mIsRecording = startRecordingVideo();
+ mDelayHandler = null;
+ return;
+ }
+
+ mDelayHandler.postDelayed(this, 1000);
+ }
+ },
+ 1000);
+ }
+
+ @Override
+ public void onDestroyView() {
+ super.onDestroyView();
+ mButtonVideo = null;
+ mButtonStillshot = null;
+ mButtonFacing = null;
+ mButtonFlash = null;
+ mButtonFlashVideo = null;
+ mRecordDuration = null;
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ if (mInterface != null && mInterface.hasLengthLimit()) {
+ if (mInterface.countdownImmediately() || mInterface.getRecordingStart() > -1) {
+ if (mInterface.getRecordingStart() == -1)
+ mInterface.setRecordingStart(System.currentTimeMillis());
+ startCounter();
+ } else {
+ mRecordDuration.setText(
+ String.format("-%s", CameraUtil.getDurationString(mInterface.getLengthLimit())));
+ }
+ }
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public final void onAttach(Activity activity) {
+ super.onAttach(activity);
+ mInterface = (BaseCaptureInterface) activity;
+ }
+
+ @NonNull
+ protected final File getOutputMediaFile() {
+ return CameraUtil.makeTempFile(
+ getActivity(), getArguments().getString(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.SAVE_DIR), "VID_", ".mp4");
+ }
+
+ @NonNull
+ protected final File getOutputPictureFile() {
+ return CameraUtil.makeTempFile(
+ getActivity(), getArguments().getString(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.SAVE_DIR), "IMG_", ".jpg");
+ }
+
+ public abstract void openCamera();
+
+
+ public abstract void closeCamera();
+
+ public void cleanup() {
+ closeCamera();
+ releaseRecorder();
+ stopCounter();
+ }
+
+ public abstract void takeStillshot();
+
+ public abstract void onPreferencesUpdated();
+
+ @Override
+ public void onPause() {
+ super.onPause();
+ cleanup();
+ }
+
+ @Override
+ public final void onDetach() {
+ super.onDetach();
+ mInterface = null;
+ }
+
+ public final void startCounter() {
+ if (mPositionHandler == null) mPositionHandler = new Handler();
+ else mPositionHandler.removeCallbacks(mPositionUpdater);
+ mPositionHandler.post(mPositionUpdater);
+ }
+
+ @BaseCaptureActivity.CameraPosition
+ public final int getCurrentCameraPosition() {
+ if (mInterface == null) return BaseCaptureActivity.CAMERA_POSITION_UNKNOWN;
+ return mInterface.getCurrentCameraPosition();
+ }
+
+ public final int getCurrentCameraId() {
+ if (mInterface.getCurrentCameraPosition() == CAMERA_POSITION_BACK)
+ return (Integer) mInterface.getBackCamera();
+ else return (Integer) mInterface.getFrontCamera();
+ }
+
+ public final void stopCounter() {
+ if (mPositionHandler != null) {
+ mPositionHandler.removeCallbacks(mPositionUpdater);
+ mPositionHandler = null;
+ }
+ }
+
+ public final void releaseRecorder() {
+ if (mMediaRecorder != null) {
+ if (mIsRecording) {
+ try {
+ mMediaRecorder.stop();
+ } catch (Throwable t) {
+ //noinspection ResultOfMethodCallIgnored
+ new File(mOutputUri).delete();
+ t.printStackTrace();
+ }
+ mIsRecording = false;
+ Log.d(TAG, "releaseRecorder: releasing media player.");
+ }
+ mMediaRecorder.reset();
+ mMediaRecorder.release();
+ mMediaRecorder = null;
+ }
+ }
+
+ public boolean startRecordingVideo() {
+ if (mInterface != null && mInterface.hasLengthLimit() && !mInterface.countdownImmediately()) {
+ // Countdown wasn't started in onResume, start it now
+ if (mInterface.getRecordingStart() == -1)
+ mInterface.setRecordingStart(System.currentTimeMillis());
+ startCounter();
+ Log.d(TAG, "startRecordingVideo: starting recording session.");
+
+ }
+
+ final int orientation = Degrees.getActivityOrientation(getActivity());
+ //noinspection ResourceType
+ Log.d(TAG, "startRecordingVideo: setting orientation: " + orientation);
+ getActivity().setRequestedOrientation(orientation);
+ mInterface.setDidRecord(true);
+
+// if(mVideoPlayingHandler != null){
+// mVideoPlayingHandler.removeCallbacks(mVideoPlayingRunnable);
+// }
+// mVideoPlayingHandler = new Handler();
+// mVideoPlayingRunnable = new Runnable() {
+// @Override
+// public void run() {
+// mVideoPlayingHandler.postDelayed(mVideoPlayingRunnable, 100);
+// if(mIsRecording){
+// Log.d(TAG, "recording: is recording: " + mIsRecording);
+// }
+// }
+// };
+// mVideoPlayingRunnable.run();
+
+ return true;
+ }
+
+ public void stopRecordingVideo(boolean reachedZero) {
+ getActivity().setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
+ Log.d(TAG, "stopRecordingVideo: ending recording session.");
+
+ }
+
+
+ @Override
+ public final void onSaveInstanceState(Bundle outState) {
+ super.onSaveInstanceState(outState);
+ outState.putString("output_uri", mOutputUri);
+ }
+
+ @Override
+ public final String getOutputUri() {
+ return mOutputUri;
+ }
+
+ protected final void throwError(Exception e) {
+ Activity act = getActivity();
+ if (act != null) {
+ act.setResult(RESULT_CANCELED, new Intent().putExtra(MaterialCamera.ERROR_EXTRA, e));
+ act.finish();
+ }
+ }
+
+ @Override
+ public void onClick(View view) {
+ final int id = view.getId();
+ if (id == R.id.facing) {
+ mInterface.toggleCameraPosition();
+ setImageRes(
+ mButtonFacing,
+ mInterface.getCurrentCameraPosition() == CAMERA_POSITION_BACK
+ ? mInterface.iconFrontCamera()
+ : mInterface.iconRearCamera());
+ closeCamera();
+ openCamera();
+ setupFlashMode();
+ setupFlashModeVideo();
+ }
+// else if (id == R.id.video) {
+ else if (id == R.id.videoButton) {
+ if (mIsRecording) {
+ stopRecordingVideo(false);
+ mIsRecording = false;
+ } else {
+ if (getArguments().getBoolean(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.SHOW_PORTRAIT_WARNING, true)
+ && Degrees.isPortrait(getActivity())) {
+ new MaterialDialog.Builder(getActivity())
+ .title(R.string.mcam_portrait)
+ .content(R.string.mcam_portrait_warning)
+ .positiveText(R.string.mcam_yes)
+ .negativeText(android.R.string.cancel)
+ .onPositive(
+ new MaterialDialog.SingleButtonCallback() {
+ @Override
+ public void onClick(
+ @NonNull MaterialDialog materialDialog,
+ @NonNull DialogAction dialogAction) {
+ mIsRecording = startRecordingVideo();
+ }
+ })
+ .show();
+ } else {
+ mIsRecording = startRecordingVideo();
+ }
+ }
+ setupFlashModeVideo();
+ } else if (id == R.id.stillshot) {
+ takeStillshot();
+ } else if (id == R.id.flashStillshot) {
+ invalidateFlash(true);
+ }
+ else if (id == R.id.flashVideo) {
+ Log.d(TAG, "onClick: toggling video flash.");
+ toggleFlashVideo(true);
+ closeCamera();
+ openCamera();
+ setupFlashMode();
+ setupFlashModeVideo();
+ }
+ else if(id == R.id.video_switch){
+ Log.d(TAG, "onClick: toggling camera mode.");
+ if(mVideoSwitch.isChecked()){ //camera mode is on so switch to video
+ Log.d(TAG, "onClick: video mode is on so switch to camera.");
+ getActivity().setResult(RESULT_START_CAMERA);
+ getActivity().finish();
+ }
+ else{
+ Log.d(TAG, "onClick: camera mode is on so switch to video");
+ getActivity().setResult(RESULT_START_VIDEO);
+ getActivity().finish();
+ }
+ }
+ }
+
+ private void invalidateFlash(boolean toggle) {
+ if (toggle) mInterface.toggleFlashMode();
+ setupFlashMode();
+ onPreferencesUpdated();
+ }
+
+ private void toggleFlashVideo(boolean toggle) {
+ if (toggle) mInterface.toggleFlashModeVideo();
+ setupFlashModeVideo();
+ onPreferencesUpdated();
+ }
+
+ private void setupFlashMode() {
+ if (mInterface.shouldHideFlash()) {
+ mButtonFlash.setVisibility(View.GONE);
+ return;
+ }
+ else {
+ mButtonFlash.setVisibility(View.VISIBLE);
+ }
+
+ final int res;
+ switch (mInterface.getFlashMode()) {
+ case FLASH_MODE_AUTO:
+ res = mInterface.iconFlashAuto();
+ break;
+ case FLASH_MODE_ALWAYS_ON:
+ res = mInterface.iconFlashOn();
+ break;
+ case FLASH_MODE_OFF:
+ default:
+ res = mInterface.iconFlashOff();
+ }
+
+ setImageRes(mButtonFlash, res);
+
+ }
+
+ private void setupFlashModeVideo() {
+ Log.d(TAG, "setupFlashModeVideo: setting up flash mode for video.");
+
+ if(mIsRecording){
+ mButtonFlashVideo.setVisibility(View.GONE);
+ }
+ else{
+ mButtonFlashVideo.setVisibility(View.VISIBLE);
+ }
+
+
+ final int res;
+ switch (mInterface.getFlashModeVideo()) {
+ case FLASH_MODE_ALWAYS_ON:
+ res = mInterface.iconFlashOn();
+ break;
+ case FLASH_MODE_OFF:
+ default:
+ res = mInterface.iconFlashOff();
+ }
+
+ setImageRes(mButtonFlashVideo, res);
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCaptureActivity.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCaptureActivity.java
new file mode 100644
index 0000000..3bea593
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCaptureActivity.java
@@ -0,0 +1,958 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.Manifest;
+import android.app.AlertDialog;
+import android.app.Fragment;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.pm.PackageManager;
+import android.graphics.Bitmap;
+import android.graphics.Color;
+import android.media.CamcorderProfile;
+import android.net.Uri;
+import android.os.AsyncTask;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.support.annotation.DrawableRes;
+import android.support.annotation.IntDef;
+import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
+import android.support.annotation.StringRes;
+import android.support.v4.app.ActivityCompat;
+import android.support.v4.content.ContextCompat;
+import android.support.v7.app.AppCompatActivity;
+import android.support.v7.app.AppCompatDelegate;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.Window;
+import android.view.WindowManager;
+import android.widget.FrameLayout;
+import android.widget.ProgressBar;
+import android.widget.Toast;
+
+import com.afollestad.materialdialogs.MaterialDialog;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.Utils.FilePaths;
+import tabian.com.instagramclone2.Utils.ImageManager;
+import tabian.com.instagramclone2.Utils.RotateBitmap;
+import tabian.com.instagramclone2.materialcamera.ICallback;
+import tabian.com.instagramclone2.materialcamera.MaterialCamera;
+import tabian.com.instagramclone2.materialcamera.TimeLimitReachedException;
+import tabian.com.instagramclone2.materialcamera.util.CameraUtil;
+import tabian.com.instagramclone2.videocompressor.file.FileUtils;
+import tabian.com.instagramclone2.videocompressor.video.MediaController;
+
+
+/** @author Aidan Follestad (afollestad) */
+public abstract class BaseCaptureActivity extends AppCompatActivity
+ implements BaseCaptureInterface {
+
+ private static final String TAG = "BaseCaptureActivity";
+ private static final int RESULT_ADD_NEW_STORY = 7891;
+ private static final int PROGRESS_BAR_ID = 5544;
+
+ private int mCameraPosition = CAMERA_POSITION_UNKNOWN;
+ private int mFlashMode = FLASH_MODE_OFF;
+ private boolean mRequestingPermission;
+ private long mRecordingStart = -1;
+ private long mRecordingEnd = -1;
+ private long mLengthLimit = -1;
+ private Object mFrontCameraId;
+ private Object mBackCameraId;
+ private boolean mDidRecord = false;
+ private List mFlashModes;
+ private ProgressBar mProgressBar;
+ private AlertDialog mAlertDialog;
+ private String mUri = null;
+ private String mUploadUri = null;
+ private File tempFile;
+ private Boolean mDeleteCompressedMedia = false;
+
+ public static final int PERMISSION_RC = 69;
+
+ @IntDef({CAMERA_POSITION_UNKNOWN, CAMERA_POSITION_BACK, CAMERA_POSITION_FRONT})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface CameraPosition {}
+
+ public static final int CAMERA_POSITION_UNKNOWN = 0;
+ public static final int CAMERA_POSITION_FRONT = 1;
+ public static final int CAMERA_POSITION_BACK = 2;
+
+ @IntDef({FLASH_MODE_OFF, FLASH_MODE_ALWAYS_ON, FLASH_MODE_AUTO})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface FlashMode {}
+
+ public static final int FLASH_MODE_OFF = 0;
+ public static final int FLASH_MODE_ALWAYS_ON = 1;
+ public static final int FLASH_MODE_AUTO = 2;
+
+ @Override
+ protected final void onSaveInstanceState(Bundle outState) {
+ super.onSaveInstanceState(outState);
+ outState.putInt("camera_position", mCameraPosition);
+ outState.putBoolean("requesting_permission", mRequestingPermission);
+ outState.putLong("recording_start", mRecordingStart);
+ outState.putLong("recording_end", mRecordingEnd);
+ outState.putLong(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.LENGTH_LIMIT, mLengthLimit);
+ if (mFrontCameraId instanceof String) {
+ outState.putString("front_camera_id_str", (String) mFrontCameraId);
+ outState.putString("back_camera_id_str", (String) mBackCameraId);
+ } else {
+ if (mFrontCameraId != null) outState.putInt("front_camera_id_int", (Integer) mFrontCameraId);
+ if (mBackCameraId != null) outState.putInt("back_camera_id_int", (Integer) mBackCameraId);
+ }
+ outState.putInt("flash_mode", mFlashMode);
+ }
+
+ @Override
+ protected final void onCreate(Bundle savedInstanceState) {
+ AppCompatDelegate.setCompatVectorFromResourcesEnabled(true);
+ super.onCreate(savedInstanceState);
+
+ if (!CameraUtil.hasCamera(this)) {
+ new MaterialDialog.Builder(this)
+ .title(R.string.mcam_error)
+ .content(R.string.mcam_video_capture_unsupported)
+ .positiveText(android.R.string.ok)
+ .dismissListener(
+ new DialogInterface.OnDismissListener() {
+ @Override
+ public void onDismiss(DialogInterface dialog) {
+ finish();
+ }
+ })
+ .show();
+ return;
+ }
+ setContentView(R.layout.mcam_activity_videocapture);
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ final int primaryColor = getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.PRIMARY_COLOR, 0);
+ final boolean isPrimaryDark = CameraUtil.isColorDark(primaryColor);
+ final Window window = getWindow();
+ window.setStatusBarColor(CameraUtil.darkenColor(primaryColor));
+ window.setNavigationBarColor(isPrimaryDark ? primaryColor : Color.BLUE);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ final View view = window.getDecorView();
+ int flags = view.getSystemUiVisibility();
+ flags |= View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR;
+ view.setSystemUiVisibility(flags);
+ }
+ }
+
+ if (null == savedInstanceState) {
+ checkPermissions();
+ mLengthLimit = getIntent().getLongExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.LENGTH_LIMIT, -1);
+ } else {
+ mCameraPosition = savedInstanceState.getInt("camera_position", -1);
+ mRequestingPermission = savedInstanceState.getBoolean("requesting_permission", false);
+ mRecordingStart = savedInstanceState.getLong("recording_start", -1);
+ mRecordingEnd = savedInstanceState.getLong("recording_end", -1);
+ mLengthLimit = savedInstanceState.getLong(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.LENGTH_LIMIT, -1);
+ if (savedInstanceState.containsKey("front_camera_id_str")) {
+ mFrontCameraId = savedInstanceState.getString("front_camera_id_str");
+ mBackCameraId = savedInstanceState.getString("back_camera_id_str");
+ } else {
+ mFrontCameraId = savedInstanceState.getInt("front_camera_id_int");
+ mBackCameraId = savedInstanceState.getInt("back_camera_id_int");
+ }
+ mFlashMode = savedInstanceState.getInt("flash_mode");
+ }
+
+ getWindow()
+ .addFlags(
+ WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
+ | WindowManager.LayoutParams.FLAG_FULLSCREEN);
+ }
+
+ private void checkPermissions() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
+ showInitialRecorder();
+ return;
+ }
+ final boolean cameraGranted =
+ ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)
+ == PackageManager.PERMISSION_GRANTED;
+ final boolean audioGranted =
+ ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO)
+ == PackageManager.PERMISSION_GRANTED;
+
+ final boolean audioNeeded = !useStillshot() && !audioDisabled();
+
+ String[] perms = null;
+ if (cameraGranted) {
+ if (audioNeeded && !audioGranted) {
+ perms = new String[] {Manifest.permission.RECORD_AUDIO};
+ }
+ } else {
+ if (audioNeeded && !audioGranted) {
+ perms = new String[] {Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO};
+ } else {
+ perms = new String[] {Manifest.permission.CAMERA};
+ }
+ }
+
+ if (perms != null) {
+ ActivityCompat.requestPermissions(this, perms, PERMISSION_RC);
+ mRequestingPermission = true;
+ } else {
+ showInitialRecorder();
+ }
+ }
+
+ @Override
+ protected final void onPause() {
+ Log.d(TAG, "onPause: called.");
+ super.onPause();
+ if (!isFinishing() && !isChangingConfigurations() && !mRequestingPermission) finish();
+ }
+
+ @Override
+ public final void onBackPressed() {
+ Fragment frag = getFragmentManager().findFragmentById(R.id.container);
+ if (frag != null) {
+ if (frag instanceof PlaybackVideoFragment && allowRetry()) {
+ onRetry(((tabian.com.instagramclone2.materialcamera.internal.CameraUriInterface) frag).getOutputUri());
+ return;
+ } else if (frag instanceof tabian.com.instagramclone2.materialcamera.internal.BaseCameraFragment) {
+ ((tabian.com.instagramclone2.materialcamera.internal.BaseCameraFragment) frag).cleanup();
+ } else if (frag instanceof BaseGalleryFragment && allowRetry()) {
+ onRetry(((tabian.com.instagramclone2.materialcamera.internal.CameraUriInterface) frag).getOutputUri());
+ return;
+ }
+ }
+// deleteTempFile();
+ finish();
+ }
+
+ @NonNull
+ public abstract Fragment getFragment();
+
+ public final Fragment createFragment() {
+ Fragment frag = getFragment();
+ frag.setArguments(getIntent().getExtras());
+ return frag;
+ }
+
+ @Override
+ public void setRecordingStart(long start) {
+ mRecordingStart = start;
+ if (start > -1 && hasLengthLimit()) setRecordingEnd(mRecordingStart + getLengthLimit());
+ else setRecordingEnd(-1);
+ }
+
+ @Override
+ public long getRecordingStart() {
+ return mRecordingStart;
+ }
+
+ @Override
+ public void setRecordingEnd(long end) {
+ mRecordingEnd = end;
+ }
+
+ @Override
+ public long getRecordingEnd() {
+ return mRecordingEnd;
+ }
+
+ @Override
+ public long getLengthLimit() {
+ return mLengthLimit;
+ }
+
+ @Override
+ public boolean hasLengthLimit() {
+ return getLengthLimit() > -1;
+ }
+
+ @Override
+ public boolean countdownImmediately() {
+ return getIntent().getBooleanExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.COUNTDOWN_IMMEDIATELY, false);
+ }
+
+ @Override
+ public void setCameraPosition(int position) {
+ mCameraPosition = position;
+ }
+
+ @Override
+ public void toggleCameraPosition() {
+ if (getCurrentCameraPosition() == CAMERA_POSITION_FRONT) {
+ // Front, go to back if possible
+ if (getBackCamera() != null) setCameraPosition(CAMERA_POSITION_BACK);
+ } else {
+ // Back, go to front if possible
+ if (getFrontCamera() != null) setCameraPosition(CAMERA_POSITION_FRONT);
+ }
+ }
+
+ @Override
+ public int getCurrentCameraPosition() {
+ return mCameraPosition;
+ }
+
+ @Override
+ public Object getCurrentCameraId() {
+ if (getCurrentCameraPosition() == CAMERA_POSITION_FRONT) return getFrontCamera();
+ else return getBackCamera();
+ }
+
+ @Override
+ public void setFrontCamera(Object id) {
+ mFrontCameraId = id;
+ }
+
+ @Override
+ public Object getFrontCamera() {
+ return mFrontCameraId;
+ }
+
+ @Override
+ public void setBackCamera(Object id) {
+ mBackCameraId = id;
+ }
+
+ @Override
+ public Object getBackCamera() {
+ return mBackCameraId;
+ }
+
+ private void showInitialRecorder() {
+ getFragmentManager().beginTransaction().replace(R.id.container, createFragment()).commit();
+ }
+
+ @Override
+ public final void onRetry(@Nullable String outputUri) {
+ if (outputUri != null) deleteOutputFile(outputUri);
+ if (!shouldAutoSubmit() || restartTimerOnRetry()) setRecordingStart(-1);
+ if (getIntent().getBooleanExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.RETRY_EXITS, false)) {
+ setResult(
+ RESULT_OK,
+ new Intent().putExtra(MaterialCamera.STATUS_EXTRA, MaterialCamera.STATUS_RETRY));
+ finish();
+ return;
+ }
+ getFragmentManager().beginTransaction().replace(R.id.container, createFragment()).commit();
+ }
+
+ @Override
+ public final void onShowPreview(@Nullable final String outputUri, boolean countdownIsAtZero) {
+ if ((shouldAutoSubmit() && (countdownIsAtZero || !allowRetry() || !hasLengthLimit()))
+ || outputUri == null) {
+ if (outputUri == null) {
+ setResult(
+ RESULT_CANCELED,
+ new Intent().putExtra(MaterialCamera.ERROR_EXTRA, new TimeLimitReachedException()));
+ finish();
+ return;
+ }
+ useMedia(outputUri);
+ } else {
+ if (!hasLengthLimit() || !continueTimerInPlayback()) {
+ // No countdown or countdown should not continue through playback, reset timer to 0
+ setRecordingStart(-1);
+ }
+ Fragment frag =
+ PlaybackVideoFragment.newInstance(
+ outputUri, allowRetry(), getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.PRIMARY_COLOR, 0));
+ getFragmentManager().beginTransaction().replace(R.id.container, frag).commit();
+ }
+ }
+
+ @Override
+ public void onShowStillshot(String outputUri) {
+ if (shouldAutoSubmit()) {
+ useMedia(outputUri);
+ } else {
+ Fragment frag =
+ StillshotPreviewFragment.newInstance(
+ outputUri, allowRetry(), getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.PRIMARY_COLOR, 0));
+ getFragmentManager().beginTransaction().replace(R.id.container, frag).commit();
+ }
+ }
+
+ @Override
+ public final boolean allowRetry() {
+ return getIntent().getBooleanExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ALLOW_RETRY, true);
+ }
+
+ @Override
+ public final boolean shouldAutoSubmit() {
+ return getIntent().getBooleanExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.AUTO_SUBMIT, false);
+ }
+
+ private void deleteOutputFile(@Nullable String uri) {
+ if (uri != null)
+ //noinspection ResultOfMethodCallIgnored
+ new File(Uri.parse(uri).getPath()).delete();
+ }
+
+ @Override
+ protected final void onActivityResult(int requestCode, int resultCode, Intent data) {
+ super.onActivityResult(requestCode, resultCode, data);
+ if (requestCode == PERMISSION_RC) showInitialRecorder();
+ }
+
+ @Override
+ public final void onRequestPermissionsResult(
+ int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
+ super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+ mRequestingPermission = false;
+ if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
+ new MaterialDialog.Builder(this)
+ .title(R.string.mcam_permissions_needed)
+ .content(R.string.mcam_video_perm_warning)
+ .positiveText(android.R.string.ok)
+ .dismissListener(
+ new DialogInterface.OnDismissListener() {
+ @Override
+ public void onDismiss(DialogInterface dialog) {
+ finish();
+ }
+ })
+ .show();
+ } else {
+ showInitialRecorder();
+ }
+ }
+
+ @Override
+ protected void onDestroy() {
+ super.onDestroy();
+// deleteTempFile();
+ }
+
+ @Override
+ public final void useMedia(String uri) {
+ if (uri != null) {
+ Log.d(TAG, "useMedia: upload uri: " + uri);
+ mUri = uri;
+// setResult(
+// Activity.RESULT_OK,
+// getIntent()
+// .putExtra(MaterialCamera.STATUS_EXTRA, MaterialCamera.STATUS_RECORDED)
+// .setDataAndType(Uri.parse(uri), useStillshot() ? "image/jpeg" : "video/mp4"));
+ saveMediaToMemory(uri);
+ }
+// finish();
+ }
+
+
+
+
+ @Override
+ public void addToStory(String uri) {
+ Log.d(TAG, "addToStory: adding file to story.");
+ initProgressBar();
+ if(isMediaVideo(uri)){
+ if(mUploadUri == null){
+ Log.d(TAG, "addToStory: Video was not saved. Beginning compression.");
+ mDeleteCompressedMedia = true;
+ saveTempAndCompress(uri);
+ }
+ else{
+ Log.d(TAG, "addToStory: video has been saved. Now uploading.");
+ Log.d(TAG, "addToStory: upload uri: " + mUploadUri);
+ finishActivityAndUpload();
+ }
+ }
+ else{
+ if(mUploadUri == null){
+ Log.d(TAG, "addToStory: Image was not saved. Now uploading");
+ mDeleteCompressedMedia = true;
+ mUploadUri = uri;
+ finishActivityAndUpload();
+ }
+ else{
+ Log.d(TAG, "addToStory: Image has been saved. Now uploading.");
+ Log.d(TAG, "addToStory: upload uri: " + mUploadUri);
+ finishActivityAndUpload();
+ }
+ }
+
+ }
+
+ private void finishActivityAndUpload(){
+ Log.d(TAG, "finishActivityAndUpload: called.");
+ if(mDeleteCompressedMedia){
+ setResult(
+ RESULT_ADD_NEW_STORY,
+ getIntent()
+ .putExtra(MaterialCamera.DELETE_UPLOAD_FILE_EXTRA, true)
+ .putExtra(MaterialCamera.STATUS_EXTRA, MaterialCamera.STATUS_RECORDED)
+ .setDataAndType(Uri.parse(mUploadUri), useStillshot() ? "image/jpeg" : "video/mp4"));
+ }
+ else{
+ setResult(
+ RESULT_ADD_NEW_STORY,
+ getIntent()
+ .putExtra(MaterialCamera.DELETE_UPLOAD_FILE_EXTRA, false)
+ .putExtra(MaterialCamera.STATUS_EXTRA, MaterialCamera.STATUS_RECORDED)
+ .setDataAndType(Uri.parse(mUploadUri), useStillshot() ? "image/jpeg" : "video/mp4"));
+ }
+
+ finish();
+ }
+
+ private void initProgressBar(){
+// RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(
+// RelativeLayout.LayoutParams.MATCH_PARENT,
+// RelativeLayout.LayoutParams.MATCH_PARENT
+// );
+// RelativeLayout relativeLayout = new RelativeLayout(this);
+// relativeLayout.bringToFront();
+// relativeLayout.setLayoutParams(layoutParams);
+////
+// FrameLayout frameLayout = ((Activity)this).findViewById(R.id.container);
+// frameLayout.addView(relativeLayout);
+//
+// RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
+// 250,
+// RelativeLayout.LayoutParams.WRAP_CONTENT
+// );
+// params.addRule(RelativeLayout.CENTER_IN_PARENT);
+// mProgressBar = new ProgressBar(this);
+// mProgressBar.setId(PROGRESS_BAR_ID);
+// mProgressBar.setLayoutParams(params);
+// mProgressBar.setVisibility(View.VISIBLE);
+// mProgressBar.bringToFront();
+// relativeLayout.addView(mProgressBar);
+// Drawable progressDrawable = mProgressBar.getIndeterminateDrawable().mutate();
+// progressDrawable.setColorFilter(Color.WHITE, android.graphics.PorterDuff.Mode.SRC_IN);
+// mProgressBar.setProgressDrawable(progressDrawable);
+
+ // retrieve display dimensions
+// Rect displayRectangle = new Rect();
+// Window window = this.getWindow();
+// window.getDecorView().getWindowVisibleDisplayFrame(displayRectangle);
+
+ LayoutInflater li = LayoutInflater.from(this);
+ View layout = li.inflate(R.layout.layout_processing_dialog, null);
+ FrameLayout.LayoutParams params = new FrameLayout.LayoutParams(
+ 150,
+ FrameLayout.LayoutParams.WRAP_CONTENT
+ );
+ layout.setLayoutParams(params);
+ AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this);
+ alertDialogBuilder.setView(layout);
+ mAlertDialog = alertDialogBuilder.create();
+ mAlertDialog.setCancelable(false);
+ mAlertDialog.show();
+ }
+
+
+ private void saveTempAndCompress(String uri){
+ //save temporary file for compression
+ String fileName = uri.substring(uri.indexOf("Stories/") + 8);
+ tempFile = FileUtils.saveTempFile(fileName, this, Uri.parse(uri));
+
+ //delete the original
+ deleteOutputFile(uri);
+
+ //compress temp file and save new compressed version in "/Stories/"
+ new VideoCompressor().execute();
+ }
+
+
+ private void saveMediaToMemory(String uri){
+ Log.d(TAG, "saveMediaToMemory: saving media to memory.");
+ Log.d(TAG, "saveMediaToMemory: uri: " + uri);
+
+ initProgressBar();
+
+ if(isMediaVideo(uri)){
+
+ saveTempAndCompress(uri);
+ }
+ else{
+ Bitmap bm = null;
+ RotateBitmap rotateBitmap = new RotateBitmap();
+ try{
+ bm = rotateBitmap.HandleSamplingAndRotationBitmap(this, Uri.parse(uri));
+ }catch (IOException e){
+ e.printStackTrace();
+ }
+
+ //delete the old file
+ deleteOutputFile(uri);
+
+ saveBitmapToDisk(bm);
+ }
+
+ }
+
+
+ class VideoCompressor extends AsyncTask {
+// class VideoCompressor extends AsyncTask {
+
+ @Override
+ protected void onPreExecute() {
+ super.onPreExecute();
+ showProgressBar();
+ Log.d(TAG,"Start video compression");
+ }
+
+ @Override
+ protected String doInBackground(Void... voids) {
+// protected boolean doInBackground(Void... voids) {
+
+ return MediaController.getInstance().convertVideo(tempFile.getPath());
+ }
+
+ @Override
+ protected void onPostExecute(String filePath) {
+// protected void onPostExecute(Boolean compressed) {
+ super.onPostExecute(filePath);
+// super.onPostExecute(compressed);
+ hideProgressBar();
+ if(!filePath.equals("")){
+// if(compressed){
+ mUploadUri = filePath;
+ Log.d(TAG,"Compression successfully!");
+ if(mDeleteCompressedMedia){
+ finishActivityAndUpload();
+ }
+ }
+ }
+ }
+
+ private void deleteTempFile(){
+ if(tempFile != null && tempFile.exists()){
+ tempFile.delete();
+ }
+ }
+
+
+ private void saveBitmapToDisk(final Bitmap bm){
+ final ICallback callback = new ICallback() {
+ @Override
+ public void done(Exception e) {
+ if (e == null) {
+ hideProgressBar();
+ Log.d(TAG, "saveBitmapToDisk: saved file to disk.");
+ Toast.makeText(BaseCaptureActivity.this, "saved", Toast.LENGTH_SHORT).show();
+ } else {
+ e.printStackTrace();
+ hideProgressBar();
+ Toast.makeText(BaseCaptureActivity.this, "something went wrong", Toast.LENGTH_SHORT).show();
+ }
+ }
+ };
+ Log.d(TAG, "saveBitmapToDisk: saving to disc.");
+ final Handler handler = new Handler();
+ new Thread() {
+ @Override
+ public void run() {
+ try {
+ FileOutputStream out = null;
+ FileInputStream fis = null;
+ try {
+ FilePaths filePaths = new FilePaths();
+ String timeStamp =
+ new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault()).format(new Date());
+// String path = Environment.getExternalStorageDirectory().toString();
+ File file = new File(filePaths.STORIES + "/IMG_" + timeStamp + ".jpg");
+ out = new FileOutputStream(file);
+ bm.compress(Bitmap.CompressFormat.JPEG, ImageManager.IMAGE_SAVE_QUALITY, out);
+
+ File imagefile = new File(file.getPath());
+ try {
+ fis = new FileInputStream(imagefile);
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ }
+ mUploadUri = file.getPath();
+
+ Log.d(TAG, "saveBitmapToDisk: new uri: " + mUploadUri);
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ try {
+ if (out != null) {
+ out.close();
+ }
+ if(fis != null){
+ fis.close();
+ }
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ callback.done(null);
+ }
+ });
+ } catch (final Exception e) {
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ callback.done(e);
+ }
+ });
+ }
+ }
+ }.start();
+
+ }
+
+ private boolean isMediaVideo(String uri){
+ if(uri.contains(".mp4") || uri.contains(".wmv") || uri.contains(".flv") || uri.contains(".avi")){
+ return true;
+ }
+ return false;
+ }
+
+ private void showProgressBar(){
+// if(mProgressBar != null){
+// mProgressBar.setVisibility(View.VISIBLE);
+// }
+ if(mAlertDialog != null){
+ mAlertDialog.show();
+ }
+ }
+
+ private void hideProgressBar(){
+// if(mProgressBar != null){
+// mProgressBar.setVisibility(View.INVISIBLE);
+// }
+ if(mAlertDialog != null){
+ mAlertDialog.dismiss();
+ }
+ }
+
+ @Override
+ public void setDidRecord(boolean didRecord) {
+ mDidRecord = didRecord;
+ }
+
+ @Override
+ public boolean didRecord() {
+ return mDidRecord;
+ }
+
+ @Override
+ public int getFlashMode() {
+ return mFlashMode;
+ }
+
+ @Override
+ public int getFlashModeVideo() {
+ return mFlashMode;
+ }
+
+ @Override
+ public void toggleFlashMode() {
+ if (mFlashModes != null) {
+ mFlashMode = mFlashModes.get((mFlashModes.indexOf(mFlashMode) + 1) % mFlashModes.size());
+ }
+ }
+
+
+
+ @Override
+ public void toggleFlashModeVideo() {
+ Log.d(TAG, "toggleFlashModeVideo: toggling video flash mode.");
+ if (mFlashModes != null) {
+ Log.d(TAG, "toggleFlashModeVideo: flash mode is not null");
+ if(mFlashMode == FLASH_MODE_ALWAYS_ON){
+ mFlashMode = FLASH_MODE_OFF;
+ }
+ else{
+ mFlashMode = FLASH_MODE_ALWAYS_ON;
+ }
+ }
+ }
+
+ @Override
+ public boolean restartTimerOnRetry() {
+ return getIntent().getBooleanExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.RESTART_TIMER_ON_RETRY, false);
+ }
+
+ @Override
+ public boolean continueTimerInPlayback() {
+ return getIntent().getBooleanExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.CONTINUE_TIMER_IN_PLAYBACK, false);
+ }
+
+ @Override
+ public int videoEncodingBitRate(int defaultVal) {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.VIDEO_BIT_RATE, defaultVal);
+ }
+
+ @Override
+ public int audioEncodingBitRate(int defaultVal) {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.AUDIO_ENCODING_BIT_RATE, defaultVal);
+ }
+
+ @Override
+ public int videoFrameRate(int defaultVal) {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.VIDEO_FRAME_RATE, defaultVal);
+ }
+
+ @Override
+ public float videoPreferredAspect() {
+ return getIntent().getFloatExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.VIDEO_PREFERRED_ASPECT, 4f / 3f);
+ }
+
+ @Override
+ public int videoPreferredHeight() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.VIDEO_PREFERRED_HEIGHT, 720);
+ }
+
+ @Override
+ public long maxAllowedFileSize() {
+ return getIntent().getLongExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.MAX_ALLOWED_FILE_SIZE, -1);
+ }
+
+ @Override
+ public int qualityProfile() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.QUALITY_PROFILE, CamcorderProfile.QUALITY_HIGH);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconPause() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_PAUSE, R.drawable.evp_action_pause);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconPlay() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_PLAY, R.drawable.evp_action_play);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconRestart() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_RESTART, R.drawable.evp_action_restart);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconRearCamera() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_REAR_CAMERA, R.drawable.mcam_camera_rear);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconFrontCamera() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_FRONT_CAMERA, R.drawable.mcam_camera_front);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconStop() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_STOP, R.drawable.mcam_action_stop);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconRecord() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_RECORD, R.drawable.mcam_action_capture);
+ }
+
+ @StringRes
+ @Override
+ public int labelRetry() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.LABEL_RETRY, R.string.mcam_retry);
+ }
+
+ @Deprecated
+ @StringRes
+ @Override
+ public int labelUseVideo() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.LABEL_CONFIRM, R.string.mcam_use_video);
+ }
+
+ @StringRes
+ @Override
+ public int labelConfirm() {
+ return getIntent()
+ .getIntExtra(
+ tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.LABEL_CONFIRM,
+ useStillshot() ? R.string.mcam_use_stillshot : R.string.mcam_use_video);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconStillshot() {
+ return getIntent()
+ .getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_STILL_SHOT, R.drawable.mcam_action_stillshot);
+ }
+
+// @Override
+// public void setUseStillshot(boolean bool) {
+// getIntent().putExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.STILL_SHOT, bool);
+// }
+
+ @Override
+ public boolean useStillshot() {
+ return getIntent().getBooleanExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.STILL_SHOT, false);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconFlashAuto() {
+ return getIntent()
+ .getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_FLASH_AUTO, R.drawable.mcam_action_flash_auto);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconFlashOn() {
+ return getIntent().getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_FLASH_ON, R.drawable.mcam_action_flash);
+ }
+
+ @DrawableRes
+ @Override
+ public int iconFlashOff() {
+ return getIntent()
+ .getIntExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ICON_FLASH_OFF, R.drawable.mcam_action_flash_off);
+ }
+
+ @Override
+ public void setFlashModes(List modes) {
+ mFlashModes = modes;
+ }
+
+ @Override
+ public boolean shouldHideFlash() {
+ return !useStillshot() || mFlashModes == null;
+ }
+
+ @Override
+ public long autoRecordDelay() {
+ return getIntent().getLongExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.AUTO_RECORD, -1);
+ }
+
+ @Override
+ public boolean audioDisabled() {
+ return getIntent().getBooleanExtra(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.AUDIO_DISABLED, false);
+ }
+
+ @Override
+ public boolean shouldHideCameraFacing() {
+ return !getIntent().getBooleanExtra(CameraIntentKey.ALLOW_CHANGE_CAMERA, false);
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCaptureInterface.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCaptureInterface.java
new file mode 100644
index 0000000..ac58676
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseCaptureInterface.java
@@ -0,0 +1,149 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.support.annotation.DrawableRes;
+import android.support.annotation.Nullable;
+import android.support.annotation.StringRes;
+
+import java.util.List;
+
+import tabian.com.instagramclone2.materialcamera.internal.*;
+
+/** @author Aidan Follestad (afollestad) */
+public interface BaseCaptureInterface {
+
+ void onRetry(@Nullable String outputUri);
+
+ void onShowPreview(@Nullable String outputUri, boolean countdownIsAtZero);
+
+ void onShowStillshot(String outputUri);
+
+ void setRecordingStart(long start);
+
+ void setRecordingEnd(long end);
+
+ long getRecordingStart();
+
+ long getRecordingEnd();
+
+ boolean hasLengthLimit();
+
+ boolean countdownImmediately();
+
+ long getLengthLimit();
+
+ void setCameraPosition(int position);
+
+ void toggleCameraPosition();
+
+ Object getCurrentCameraId();
+
+ @tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.CameraPosition
+ int getCurrentCameraPosition();
+
+ void setFrontCamera(Object id);
+
+ void setBackCamera(Object id);
+
+ Object getFrontCamera();
+
+ Object getBackCamera();
+
+ void useMedia(String uri);
+
+ void addToStory(String uri);
+
+ boolean shouldAutoSubmit();
+
+ boolean allowRetry();
+
+ void setDidRecord(boolean didRecord);
+
+ boolean didRecord();
+
+ boolean restartTimerOnRetry();
+
+ boolean continueTimerInPlayback();
+
+ int videoEncodingBitRate(int defaultVal);
+
+ int audioEncodingBitRate(int defaultVal);
+
+ int videoFrameRate(int defaultVal);
+
+ int videoPreferredHeight();
+
+ float videoPreferredAspect();
+
+ long maxAllowedFileSize();
+
+ int qualityProfile();
+
+ @DrawableRes
+ int iconRecord();
+
+ @DrawableRes
+ int iconStop();
+
+ @DrawableRes
+ int iconFrontCamera();
+
+ @DrawableRes
+ int iconRearCamera();
+
+ @DrawableRes
+ int iconPlay();
+
+ @DrawableRes
+ int iconPause();
+
+ @DrawableRes
+ int iconRestart();
+
+ @StringRes
+ int labelRetry();
+
+ @Deprecated
+ @StringRes
+ int labelUseVideo();
+
+ @StringRes
+ int labelConfirm();
+
+ @DrawableRes
+ int iconStillshot();
+
+ /** @return true if we only want to take photographs instead of video capture */
+ boolean useStillshot();
+
+// void setUseStillshot(boolean bool);
+
+ void toggleFlashMode();
+
+ void toggleFlashModeVideo();
+
+
+ @tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FlashMode
+ int getFlashMode();
+
+ @tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FlashMode
+ int getFlashModeVideo();
+
+ @DrawableRes
+ int iconFlashAuto();
+
+ @DrawableRes
+ int iconFlashOn();
+
+ @DrawableRes
+ int iconFlashOff();
+
+ void setFlashModes(List modes);
+
+ boolean shouldHideFlash();
+
+ long autoRecordDelay();
+
+ boolean audioDisabled();
+
+ boolean shouldHideCameraFacing();
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseGalleryFragment.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseGalleryFragment.java
new file mode 100644
index 0000000..76daf4c
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/BaseGalleryFragment.java
@@ -0,0 +1,83 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.content.pm.ActivityInfo;
+import android.os.Bundle;
+import android.support.v4.content.ContextCompat;
+import android.view.View;
+import android.widget.RelativeLayout;
+
+import com.afollestad.materialdialogs.MaterialDialog;
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.materialcamera.util.CameraUtil;
+
+
+public abstract class BaseGalleryFragment extends Fragment
+ implements tabian.com.instagramclone2.materialcamera.internal.CameraUriInterface, View.OnClickListener {
+
+ BaseCaptureInterface mInterface;
+ int mPrimaryColor;
+ String mOutputUri;
+ View mControlsFrame;
+// Button mRetry;
+ RelativeLayout mRetry;
+ RelativeLayout mSaveStory;
+ RelativeLayout mAddToStory;
+// Button mConfirm;
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+ mInterface = (BaseCaptureInterface) activity;
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ if (getActivity() != null)
+ getActivity().setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
+ }
+
+ @Override
+ public void onViewCreated(View view, Bundle savedInstanceState) {
+ super.onViewCreated(view, savedInstanceState);
+ mOutputUri = getArguments().getString("output_uri");
+ mControlsFrame = view.findViewById(R.id.controlsFrame);
+// mRetry = (Button) view.findViewById(R.id.retry);
+ mRetry = (RelativeLayout) view.findViewById(R.id.retry);
+ mSaveStory = (RelativeLayout) view.findViewById(R.id.save_story);
+ mAddToStory = view.findViewById(R.id.add_to_story);
+
+// mPrimaryColor = getArguments().getInt(tabian.com.instagramclonetest.materialcamera.internal.CameraIntentKey.PRIMARY_COLOR);
+ if (CameraUtil.isColorDark(mPrimaryColor)) {
+// mPrimaryColor = CameraUtil.darkenColor(mPrimaryColor);
+ final int textColor = ContextCompat.getColor(view.getContext(), R.color.mcam_color_light);
+// mRetry.setTextColor(textColor);
+// mConfirm.setTextColor(textColor);
+ } else {
+ final int textColor = ContextCompat.getColor(view.getContext(), R.color.mcam_color_dark);
+// mRetry.setTextColor(textColor);
+// mConfirm.setTextColor(textColor);
+ }
+// mControlsFrame.setBackgroundColor(mPrimaryColor);
+
+ mRetry.setVisibility(
+ getArguments().getBoolean(CameraIntentKey.ALLOW_RETRY, true) ? View.VISIBLE : View.GONE);
+ }
+
+ @Override
+ public String getOutputUri() {
+ return getArguments().getString("output_uri");
+ }
+
+ void showDialog(String title, String errorMsg) {
+ new MaterialDialog.Builder(getActivity())
+ .title(title)
+ .content(errorMsg)
+ .positiveText(android.R.string.ok)
+ .show();
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/Camera2Fragment.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/Camera2Fragment.java
new file mode 100644
index 0000000..86bf727
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/Camera2Fragment.java
@@ -0,0 +1,1315 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+
+import android.Manifest;
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.app.Dialog;
+import android.app.DialogFragment;
+import android.content.Context;
+import android.content.pm.ActivityInfo;
+import android.content.pm.PackageManager;
+import android.graphics.ImageFormat;
+import android.graphics.Matrix;
+import android.graphics.Point;
+import android.graphics.RectF;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.media.CamcorderProfile;
+import android.media.Image;
+import android.media.ImageReader;
+import android.media.MediaRecorder;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
+import android.support.v4.app.ActivityCompat;
+import android.support.v4.content.ContextCompat;
+import android.util.Log;
+import android.util.Size;
+import android.util.SparseIntArray;
+import android.view.Surface;
+import android.view.TextureView;
+import android.view.View;
+import android.widget.Toast;
+
+import com.afollestad.materialdialogs.DialogAction;
+import com.afollestad.materialdialogs.MaterialDialog;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.materialcamera.util.CameraUtil;
+import tabian.com.instagramclone2.materialcamera.util.Degrees;
+
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.CAMERA_POSITION_BACK;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.CAMERA_POSITION_FRONT;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.CAMERA_POSITION_UNKNOWN;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_ALWAYS_ON;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_AUTO;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_OFF;
+
+/** @author Aidan Follestad (afollestad) */
+@TargetApi(Build.VERSION_CODES.LOLLIPOP)
+public class Camera2Fragment extends BaseCameraFragment implements View.OnClickListener {
+
+ private static final String TAG = "Camera2Fragment";
+
+ private CameraDevice mCameraDevice;
+ private CameraCaptureSession mPreviewSession;
+ private AutoFitTextureView mTextureView;
+ /** An {@link ImageReader} that handles still image capture. */
+ private ImageReader mImageReader;
+
+ private Size mPreviewSize;
+ private Size mVideoSize;
+ @Degrees.DegreeUnits
+ private int mDisplayOrientation;
+ private boolean mAfAvailable;
+
+ /** {@link CaptureRequest.Builder} for the camera preview */
+ private CaptureRequest.Builder mPreviewBuilder;
+ /** {@link CaptureRequest} generated by {@link #mPreviewBuilder} */
+ private CaptureRequest mPreviewRequest;
+
+ private HandlerThread mBackgroundThread;
+ private Handler mBackgroundHandler;
+ private final Semaphore mCameraOpenCloseLock = new Semaphore(1);
+
+ private final TextureView.SurfaceTextureListener mSurfaceTextureListener =
+ new TextureView.SurfaceTextureListener() {
+ @Override
+ public void onSurfaceTextureAvailable(
+ SurfaceTexture surfaceTexture, int width, int height) {
+ openCamera();
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(
+ SurfaceTexture surfaceTexture, int width, int height) {
+ configureTransform(width, height);
+ }
+
+ @Override
+ public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
+ return true;
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
+ }
+ };
+
+ private final CameraDevice.StateCallback mStateCallback =
+ new CameraDevice.StateCallback() {
+ @Override
+ public void onOpened(@NonNull CameraDevice cameraDevice) {
+ mCameraOpenCloseLock.release();
+ mCameraDevice = cameraDevice;
+ startPreview();
+ if (null != mTextureView) {
+ configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
+ }
+ onCameraOpened();
+ }
+
+ @Override
+ public void onDisconnected(@NonNull CameraDevice cameraDevice) {
+ mCameraOpenCloseLock.release();
+ cameraDevice.close();
+ mCameraDevice = null;
+ }
+
+ @Override
+ public void onError(@NonNull CameraDevice cameraDevice, int error) {
+ mCameraOpenCloseLock.release();
+ cameraDevice.close();
+ mCameraDevice = null;
+
+ String errorMsg = "Unknown camera error";
+ switch (error) {
+ case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
+ errorMsg = "Camera is already in use.";
+ break;
+ case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
+ errorMsg = "Max number of cameras are open, close previous cameras first.";
+ break;
+ case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
+ errorMsg = "Camera is disabled, e.g. due to device policies.";
+ break;
+ case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
+ errorMsg = "Camera device has encountered a fatal error, please try again.";
+ break;
+ case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
+ errorMsg = "Camera service has encountered a fatal error, please try again.";
+ break;
+ }
+ throwError(new Exception(errorMsg));
+ }
+ };
+
+ /** Conversion from screen rotation to JPEG orientation. */
+ private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
+
+ static {
+ ORIENTATIONS.append(Surface.ROTATION_0, 90);
+ ORIENTATIONS.append(Surface.ROTATION_90, 0);
+ ORIENTATIONS.append(Surface.ROTATION_180, 270);
+ ORIENTATIONS.append(Surface.ROTATION_270, 180);
+ }
+
+ /**
+ * The current state of camera state for taking pictures.
+ *
+ * @see #mCaptureCallback
+ */
+ private int mState = STATE_PREVIEW;
+
+ /** Camera state: Showing camera preview. */
+ private static final int STATE_PREVIEW = 0;
+
+ /** Camera state: Waiting for the focus to be locked. */
+ private static final int STATE_WAITING_LOCK = 1;
+
+ /** Camera state: Waiting for the exposure to be precapture state. */
+ private static final int STATE_WAITING_PRECAPTURE = 2;
+
+ /** Camera state: Waiting for the exposure state to be something other than precapture. */
+ private static final int STATE_WAITING_NON_PRECAPTURE = 3;
+
+ /** Camera state: Picture was taken. */
+ private static final int STATE_PICTURE_TAKEN = 4;
+
+ /** Max preview width that is guaranteed by Camera2 API */
+ private static final int MAX_PREVIEW_WIDTH = 1920;
+
+ /** Max preview height that is guaranteed by Camera2 API */
+ private static final int MAX_PREVIEW_HEIGHT = 1080;
+
+ /** A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */
+ private CameraCaptureSession.CaptureCallback mCaptureCallback =
+ new CameraCaptureSession.CaptureCallback() {
+
+ private void process(CaptureResult result) {
+ switch (mState) {
+ case STATE_PREVIEW: {
+ // We have nothing to do when the camera preview is working normally.
+ break;
+ }
+ case STATE_WAITING_LOCK: {
+ Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
+ if (afState == null) {
+ captureStillPicture();
+ } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState
+ || CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
+ // CONTROL_AE_STATE can be null on some devices
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
+ mState = STATE_PICTURE_TAKEN;
+ captureStillPicture();
+ } else {
+ runPrecaptureSequence();
+ }
+ }
+ break;
+ }
+ case STATE_WAITING_PRECAPTURE: {
+ // CONTROL_AE_STATE can be null on some devices
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ if (aeState == null
+ || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE
+ || aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED
+ || aeState == CameraMetadata.CONTROL_AE_STATE_CONVERGED) {
+ mState = STATE_WAITING_NON_PRECAPTURE;
+ }
+ break;
+ }
+ case STATE_WAITING_NON_PRECAPTURE: {
+ // CONTROL_AE_STATE can be null on some devices
+ Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
+ if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
+ mState = STATE_PICTURE_TAKEN;
+ captureStillPicture();
+ }
+ break;
+ }
+ }
+ }
+
+ @Override
+ public void onCaptureProgressed(
+ @NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request,
+ @NonNull CaptureResult partialResult) {
+ process(partialResult);
+ }
+
+ @Override
+ public void onCaptureCompleted(
+ @NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request,
+ @NonNull TotalCaptureResult result) {
+ process(result);
+ }
+ };
+
+ public static Camera2Fragment newInstance() {
+ Camera2Fragment fragment = new Camera2Fragment();
+ fragment.setRetainInstance(true);
+ return fragment;
+ }
+
+ private static Size chooseVideoSize(BaseCaptureInterface ci, Size[] choices) {
+ Size backupSize = null;
+ for (Size size : choices) {
+ Log.d(TAG, "chooseVideoSize: size: " + size);
+ if (size.getHeight() <= ci.videoPreferredHeight()) {
+ if (size.getWidth() == size.getHeight() * ci.videoPreferredAspect()) return size;
+ if (ci.videoPreferredHeight() >= size.getHeight()) backupSize = size;
+ }
+ }
+ if (backupSize != null) return backupSize;
+ LOG(Camera2Fragment.class, "Couldn't find any suitable video size");
+ return choices[choices.length - 1];
+ }
+
+ private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) {
+ // Collect the supported resolutions that are at least as big as the preview Surface
+ List bigEnough = new ArrayList<>();
+ int w = aspectRatio.getWidth();
+ int h = aspectRatio.getHeight();
+ for (Size option : choices) {
+ Log.d(TAG, "chooseOptimalSize: size: " + option);
+ if (option.getHeight() == option.getWidth() * h / w
+ && option.getWidth() >= width
+ && option.getHeight() >= height) {
+ bigEnough.add(option);
+ }
+ }
+
+ // Pick the smallest of those, assuming we found any
+ if (bigEnough.size() > 0) {
+ return Collections.min(bigEnough, new CompareSizesByArea());
+ } else {
+ LOG(Camera2Fragment.class, "Couldn't find any suitable preview size");
+ return aspectRatio;
+ }
+ }
+
+ /**
+ * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that is
+ * at least as large as the respective texture view size, and that is at most as large as the
+ * respective max size, and whose aspect ratio matches with the specified value. If such size
+ * doesn't exist, choose the largest one that is at most as large as the respective max size, and
+ * whose aspect ratio matches with the specified value.
+ *
+ * @param choices The list of sizes that the camera supports for the intended output class
+ * @param textureViewWidth The width of the texture view relative to sensor coordinate
+ * @param textureViewHeight The height of the texture view relative to sensor coordinate
+ * @param maxWidth The maximum width that can be chosen
+ * @param maxHeight The maximum height that can be chosen
+ * @param aspectRatio The aspect ratio
+ * @return The optimal {@code Size}, or an arbitrary one if none were big enough
+ */
+ private static Size chooseOptimalSize(
+ Size[] choices,
+ int textureViewWidth,
+ int textureViewHeight,
+ int maxWidth,
+ int maxHeight,
+ Size aspectRatio) {
+
+ // Collect the supported resolutions that are at least as big as the preview Surface
+ List bigEnough = new ArrayList<>();
+ // Collect the supported resolutions that are smaller than the preview Surface
+ List notBigEnough = new ArrayList<>();
+ int w = aspectRatio.getWidth();
+ int h = aspectRatio.getHeight();
+ for (Size option : choices) {
+ if (option.getWidth() <= maxWidth
+ && option.getHeight() <= maxHeight
+ && option.getHeight() == option.getWidth() * h / w) {
+ if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) {
+ bigEnough.add(option);
+ } else {
+ notBigEnough.add(option);
+ }
+ }
+ }
+
+ // Pick the smallest of those big enough. If there is no one big enough, pick the
+ // largest of those not big enough.
+ if (bigEnough.size() > 0) {
+ return Collections.min(bigEnough, new CompareSizesByArea());
+ } else if (notBigEnough.size() > 0) {
+ return Collections.max(notBigEnough, new CompareSizesByArea());
+ } else {
+ LOG(Camera2Fragment.class, "Couldn't find any suitable preview size");
+ return choices[0];
+ }
+ }
+
+ @Override
+ public void onViewCreated(final View view, Bundle savedInstanceState) {
+ super.onViewCreated(view, savedInstanceState);
+ mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
+ }
+
+ @Override
+ public void onDestroyView() {
+ super.onDestroyView();
+ try {
+ mTextureView.getSurfaceTexture().release();
+ } catch (Throwable ignored) {
+ }
+ mTextureView = null;
+
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ startBackgroundThread();
+ if (mTextureView.isAvailable()) {
+ openCamera();
+ } else {
+ mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
+ }
+ }
+
+ @Override
+ public void onPause() {
+ stopBackgroundThread();
+ super.onPause();
+ }
+
+ /** Starts a background thread and its {@link Handler}. */
+ private void startBackgroundThread() {
+ mBackgroundThread = new HandlerThread("CameraBackground");
+ mBackgroundThread.start();
+ mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
+ }
+
+ private void stopBackgroundThread() {
+ stopCounter();
+ mBackgroundThread.quitSafely();
+ try {
+ mBackgroundThread.join();
+ mBackgroundThread = null;
+ mBackgroundHandler = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void openCamera() {
+ final int width = mTextureView.getWidth();
+ final int height = mTextureView.getHeight();
+
+ Log.d(TAG, "openCamera: texture view width: " + width);
+ Log.d(TAG, "openCamera: texture view height: " + height);
+
+ final Activity activity = getActivity();
+ if (null == activity || activity.isFinishing()) return;
+
+ final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+ try {
+ if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
+ throwError(new Exception("Time out waiting to lock camera opening."));
+ return;
+ }
+
+ if (mInterface.getFrontCamera() == null || mInterface.getBackCamera() == null) {
+ for (String cameraId : manager.getCameraIdList()) {
+ if (cameraId == null) continue;
+ if (mInterface.getFrontCamera() != null && mInterface.getBackCamera() != null) break;
+ CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
+ //noinspection ConstantConditions
+ int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
+ if (facing == CameraCharacteristics.LENS_FACING_FRONT)
+ mInterface.setFrontCamera(cameraId);
+ else if (facing == CameraCharacteristics.LENS_FACING_BACK)
+ mInterface.setBackCamera(cameraId);
+ }
+ }
+
+ switch (mInterface.getCurrentCameraPosition()) {
+ case CAMERA_POSITION_FRONT:
+ setImageRes(mButtonFacing, mInterface.iconRearCamera());
+ break;
+ case CAMERA_POSITION_BACK:
+ setImageRes(mButtonFacing, mInterface.iconFrontCamera());
+ break;
+ case CAMERA_POSITION_UNKNOWN:
+ default:
+ if (getArguments().getBoolean(CameraIntentKey.DEFAULT_TO_FRONT_FACING, false)) {
+ // Check front facing first
+ if (mInterface.getFrontCamera() != null) {
+ setImageRes(mButtonFacing, mInterface.iconRearCamera());
+ mInterface.setCameraPosition(CAMERA_POSITION_FRONT);
+ } else {
+ setImageRes(mButtonFacing, mInterface.iconFrontCamera());
+ if (mInterface.getBackCamera() != null)
+ mInterface.setCameraPosition(CAMERA_POSITION_BACK);
+ else mInterface.setCameraPosition(CAMERA_POSITION_UNKNOWN);
+ }
+ } else {
+ // Check back facing first
+ if (mInterface.getBackCamera() != null) {
+ setImageRes(mButtonFacing, mInterface.iconFrontCamera());
+ mInterface.setCameraPosition(CAMERA_POSITION_BACK);
+ } else {
+ setImageRes(mButtonFacing, mInterface.iconRearCamera());
+ if (mInterface.getFrontCamera() != null)
+ mInterface.setCameraPosition(CAMERA_POSITION_FRONT);
+ else mInterface.setCameraPosition(CAMERA_POSITION_UNKNOWN);
+ }
+ }
+ break;
+ }
+
+ // Choose the sizes for camera preview and video recording
+ CameraCharacteristics characteristics =
+ manager.getCameraCharacteristics((String) mInterface.getCurrentCameraId());
+ StreamConfigurationMap map =
+ characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ assert map != null;
+
+ // For still image captures, we use the largest available size.
+ Size largest =
+ Collections.max(
+ Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
+ // Find out if we need to swap dimension to get the preview size relative to sensor
+ // coordinate.
+ int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+ //noinspection ConstantConditions,ResourceType
+ @Degrees.DegreeUnits final int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+
+ @Degrees.DegreeUnits int deviceRotation = Degrees.getDisplayRotation(getActivity());
+ mDisplayOrientation =
+ Degrees.getDisplayOrientation(
+ sensorOrientation,
+ deviceRotation,
+ getCurrentCameraPosition() == CAMERA_POSITION_FRONT);
+ Log.d(
+ "Camera2Fragment",
+ String.format(
+ "Orientations: Sensor = %dËš, Device = %dËš, Display = %dËš",
+ sensorOrientation, deviceRotation, mDisplayOrientation));
+
+ if (mInterface.useStillshot()) {
+ boolean swappedDimensions = false;
+ switch (displayRotation) {
+ case Surface.ROTATION_0:
+ case Surface.ROTATION_180:
+ if (sensorOrientation == Degrees.DEGREES_90
+ || sensorOrientation == Degrees.DEGREES_270) {
+ swappedDimensions = true;
+ }
+ break;
+ case Surface.ROTATION_90:
+ case Surface.ROTATION_270:
+ if (sensorOrientation == Degrees.DEGREES_0
+ || sensorOrientation == Degrees.DEGREES_180) {
+ swappedDimensions = true;
+ }
+ break;
+ default:
+ Log.e("stillshot", "Display rotation is invalid: " + displayRotation);
+ }
+
+ Point displaySize = new Point();
+ activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
+ int rotatedPreviewWidth = width;
+ int rotatedPreviewHeight = height;
+ int maxPreviewWidth = displaySize.x;
+ int maxPreviewHeight = displaySize.y;
+
+ Log.d(TAG, "openCamera: max preview width: " + maxPreviewWidth);
+ Log.d(TAG, "openCamera: max preview height: " + maxPreviewHeight);
+ Log.d(TAG, "openCamera: max preview rotated width: " + rotatedPreviewWidth);
+ Log.d(TAG, "openCamera: max preview rotated height: " + rotatedPreviewHeight);
+
+ if (swappedDimensions) {
+ rotatedPreviewWidth = height;
+ rotatedPreviewHeight = width;
+ maxPreviewWidth = displaySize.y;
+ maxPreviewHeight = displaySize.x;
+ }
+
+ if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
+ maxPreviewWidth = MAX_PREVIEW_WIDTH;
+ }
+
+ if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
+ maxPreviewHeight = MAX_PREVIEW_HEIGHT;
+ }
+
+ // Danger, W.R.! Attempting to use too large a preview size could exceed the camera
+ // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
+ // garbage capture data.
+// mPreviewSize =
+// chooseOptimalSize(
+// map.getOutputSizes(SurfaceTexture.class),
+// rotatedPreviewWidth,
+// rotatedPreviewHeight,
+// maxPreviewWidth,
+// maxPreviewHeight,
+// largest);
+
+
+ mVideoSize =
+ chooseVideoSize(
+ (BaseCaptureInterface) activity, map.getOutputSizes(MediaRecorder.class));
+
+ mPreviewSize =
+ chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);
+
+// mImageReader =
+// ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, 2);
+
+ mImageReader =
+ ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.JPEG, 2);
+
+ mImageReader.setOnImageAvailableListener(
+ new ImageReader.OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Image image = reader.acquireNextImage();
+ ByteBuffer buffer = image.getPlanes()[0].getBuffer();
+ final byte[] bytes = new byte[buffer.remaining()];
+ buffer.get(bytes);
+
+ final File outputPic = getOutputPictureFile();
+
+ FileOutputStream output = null;
+ try {
+ output = new FileOutputStream(outputPic);
+ output.write(bytes);
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ image.close();
+ if (null != output) {
+ try {
+ output.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ Log.d("stillshot", "picture saved to disk - jpeg, size: " + bytes.length);
+ mOutputUri = Uri.fromFile(outputPic).toString();
+ mInterface.onShowStillshot(mOutputUri);
+ }
+ },
+ mBackgroundHandler);
+ } else {
+ mMediaRecorder = new MediaRecorder();
+
+ mVideoSize =
+ chooseVideoSize(
+ (BaseCaptureInterface) activity, map.getOutputSizes(MediaRecorder.class));
+
+ mPreviewSize =
+ chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);
+
+
+ Log.d(TAG, "openCamera: preview size: " + mPreviewSize.toString());
+ }
+
+ int orientation = VideoStreamView.getScreenOrientation(activity);
+ Log.d(TAG, "openCamera: ORIENTATION: " + orientation);
+ if (orientation == ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE
+ || orientation == ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE) {
+ Log.d(TAG, "openCamera: orientation is landscape.");
+ Log.d(TAG, "openCamera: orientation: " + orientation);
+ Log.d(TAG, "openCamera: preview width: " + mPreviewSize.getWidth());
+ Log.d(TAG, "openCamera: preview height: " + mPreviewSize.getHeight());
+
+ mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+ }
+ else {
+ Log.d(TAG, "openCamera: orientation is portrait.");
+ Log.d(TAG, "openCamera: orientation: " + orientation);
+ Log.d(TAG, "openCamera: preview width: " + mPreviewSize.getWidth());
+ Log.d(TAG, "openCamera: preview height: " + mPreviewSize.getHeight());
+ mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
+ }
+
+ mAfAvailable = false;
+ int[] afModes = characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
+ if (afModes != null) {
+ for (int i : afModes) {
+ if (i != 0) {
+ mAfAvailable = true;
+ break;
+ }
+ }
+ }
+
+ configureTransform(width, height);
+
+ mInterface.setFlashModes(CameraUtil.getSupportedFlashModes(getActivity(), characteristics));
+ onFlashModesLoaded();
+
+ // noinspection ResourceType
+ if (ActivityCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
+ // TODO: Consider calling
+ // ActivityCompat#requestPermissions
+ // here to request the missing permissions, and then overriding
+ // public void onRequestPermissionsResult(int requestCode, String[] permissions,
+ // int[] grantResults)
+ // to handle the case where the user grants the permission. See the documentation
+ // for ActivityCompat#requestPermissions for more details.
+ return;
+ }
+ manager.openCamera((String) mInterface.getCurrentCameraId(), mStateCallback, null);
+ } catch (CameraAccessException e) {
+ throwError(new Exception("Cannot access the camera.", e));
+ } catch (NullPointerException e) {
+ // Currently an NPE is thrown when the Camera2API is used but not supported on the
+ // device this code runs.
+ new ErrorDialog().show(getFragmentManager(), "dialog");
+ } catch (InterruptedException e) {
+ throwError(new Exception("Interrupted while trying to lock camera opening.", e));
+ }
+
+
+ //Possible resolution is camera won't focus which seems to happen with samsung video.
+// mTextureView.setOnTouchListener(new View.OnTouchListener() {
+// @Override
+// public boolean onTouch(View view, MotionEvent motionEvent) {
+//
+//
+// final int actionMasked = motionEvent.getActionMasked();
+// if (actionMasked != MotionEvent.ACTION_DOWN) {
+// return false;
+// }
+//// if (mManualFocusEngaged) {
+//// Log.d(TAG, "Manual focus already engaged");
+//// return true;
+//// }
+//
+// // Orientation
+// CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+//
+// try {
+// CameraCharacteristics characteristics =
+// manager.getCameraCharacteristics(mCameraDevice.getId());
+//
+// final Rect sensorArraySize = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+//
+// //TODO: here I just flip x,y, but this needs to correspond with the sensor orientation (via SENSOR_ORIENTATION)
+// final int y = (int)((motionEvent.getX() / (float)view.getWidth()) * (float)sensorArraySize.height());
+// final int x = (int)((motionEvent.getY() / (float)view.getHeight()) * (float)sensorArraySize.width());
+// final int halfTouchWidth = 150; //(int)motionEvent.getTouchMajor(); //TODO: this doesn't represent actual touch size in pixel. Values range in [3, 10]...
+// final int halfTouchHeight = 150; //(int)motionEvent.getTouchMinor();
+// MeteringRectangle focusAreaTouch = new MeteringRectangle(Math.max(x - halfTouchWidth, 0),
+// Math.max(y - halfTouchHeight, 0),
+// halfTouchWidth * 2,
+// halfTouchHeight * 2,
+// MeteringRectangle.METERING_WEIGHT_MAX - 1);
+// } catch (CameraAccessException e) {
+// e.printStackTrace();
+// }
+//
+// CameraCaptureSession.CaptureCallback captureCallbackHandler = new CameraCaptureSession.CaptureCallback() {
+// @Override
+// public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
+// super.onCaptureCompleted(session, request, result);
+//// mManualFocusEngaged = false;
+//
+// if (request.getTag() == "FOCUS_TAG") {
+// //the focus trigger is complete -
+// //resume repeating (preview surface will get frames), clear AF trigger
+// mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, null);
+// try {
+// mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, null);
+// } catch (CameraAccessException e) {
+// e.printStackTrace();
+// }
+// }
+// }
+//
+// @Override
+// public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
+// super.onCaptureFailed(session, request, failure);
+// Log.e(TAG, "Manual AF failure: " + failure);
+//// mManualFocusEngaged = false;
+// }
+// };
+//
+// //first stop the existing repeating request
+// try {
+// mPreviewSession.stopRepeating();
+//
+// //cancel any existing AF trigger (repeated touches, etc.)
+// mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
+// mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
+// mPreviewSession.capture(mPreviewBuilder.build(), captureCallbackHandler, mBackgroundHandler);
+//
+//// //Now add a new AF trigger with focus region
+//// if (isMeteringAreaAFSupported()) {
+//// mPreviewBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[]{focusAreaTouch});
+//// }
+// mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
+// mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
+// mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
+// mPreviewBuilder.setTag("FOCUS_TAG"); //we'll capture this later for resuming the preview
+//
+// //then we ask for a single request (not repeating!)
+// mPreviewSession.capture(mPreviewBuilder.build(), captureCallbackHandler, mBackgroundHandler);
+//// mManualFocusEngaged = true;
+// } catch (CameraAccessException e) {
+// e.printStackTrace();
+// }
+//
+// return true;
+// }
+// });
+ }
+
+
+ @Override
+ public void closeCamera() {
+ try {
+ if (mOutputUri != null) {
+ final File outputFile = new File(Uri.parse(mOutputUri).getPath());
+ if (outputFile.length() == 0) outputFile.delete();
+ }
+ mCameraOpenCloseLock.acquire();
+ if (null != mCameraDevice) {
+ mCameraDevice.close();
+ mCameraDevice = null;
+ }
+ if (null != mMediaRecorder) {
+ mMediaRecorder.release();
+ mMediaRecorder = null;
+ }
+ } catch (InterruptedException e) {
+ throwError(new Exception("Interrupted while trying to lock camera opening.", e));
+ } finally {
+ mCameraOpenCloseLock.release();
+ }
+ }
+
+ private void deleteOutputFile(@Nullable String uri) {
+ if (uri != null)
+ //noinspection ResultOfMethodCallIgnored
+ new File(Uri.parse(uri).getPath()).delete();
+ }
+
+
+ @Override
+ public void onPreferencesUpdated() {
+ Log.d(TAG, "onPreferencesUpdated: called.");
+ if (mInterface == null
+ || !mInterface.useStillshot()
+ || mPreviewSession == null
+ || mPreviewBuilder == null) {
+ return;
+ }
+ setFlashMode(mPreviewBuilder);
+ mPreviewRequest = mPreviewBuilder.build();
+ try {
+ mPreviewSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
+ } catch (Throwable t) {
+ t.printStackTrace();
+ }
+ }
+
+
+ private void startPreview() {
+ if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize) return;
+ try {
+ if (!mInterface.useStillshot()) {
+ if (!setUpMediaRecorder()) {
+ return;
+ }
+ }
+ SurfaceTexture texture = mTextureView.getSurfaceTexture();
+ assert texture != null;
+ texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
+
+ List surfaces = new ArrayList<>();
+ Surface previewSurface = new Surface(texture);
+ surfaces.add(previewSurface);
+ if (mInterface.useStillshot()) {
+ mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ mPreviewBuilder.addTarget(previewSurface);
+
+ surfaces.add(mImageReader.getSurface());
+ } else {
+ mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+
+ mPreviewBuilder.addTarget(previewSurface);
+ Surface recorderSurface = mMediaRecorder.getSurface();
+ surfaces.add(recorderSurface);
+ mPreviewBuilder.addTarget(recorderSurface);
+ }
+
+ mCameraDevice.createCaptureSession(
+ surfaces,
+ new CameraCaptureSession.StateCallback() {
+ @Override
+ public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
+ if (mCameraDevice == null) {
+ return;
+ }
+ mPreviewSession = cameraCaptureSession;
+ updatePreview();
+ }
+
+ @Override
+ public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
+ throwError(new Exception("Camera configuration failed"));
+ }
+ },
+ mBackgroundHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void updatePreview() {
+ if (null == mCameraDevice) {
+ return;
+ }
+
+ try {
+ if (mInterface.useStillshot()) {
+ mPreviewBuilder.set(
+// CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO
+ CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE
+ );
+ // Flash is automatically enabled when necessary.
+ setFlashMode(mPreviewBuilder);
+
+ // Finally, we start displaying the camera preview.
+ mPreviewRequest = mPreviewBuilder.build();
+ mPreviewSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
+ } else {
+// mPreviewBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
+ setFlashMode(mPreviewBuilder);
+ setUpCaptureRequestBuilder(mPreviewBuilder);
+ mPreviewRequest = mPreviewBuilder.build();
+ mPreviewSession.setRepeatingRequest(mPreviewRequest, null, mBackgroundHandler);
+ }
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
+ builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
+ }
+
+ private void configureTransform(int viewWidth, int viewHeight) {
+ Activity activity = getActivity();
+ if (null == mTextureView || null == mPreviewSize || null == activity) {
+ return;
+ }
+ Log.d(TAG, "configureTransform: view width: " + viewWidth);
+ Log.d(TAG, "configureTransform: view height: " + viewHeight);
+ int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+ Matrix matrix = new Matrix();
+ RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
+ RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
+ float centerX = viewRect.centerX();
+ float centerY = viewRect.centerY();
+ if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
+ bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
+ matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
+ float scale =
+ Math.max(
+ (float) viewHeight / mPreviewSize.getHeight(),
+ (float) viewWidth / mPreviewSize.getWidth());
+ matrix.postScale(scale, scale, centerX, centerY);
+ matrix.postRotate(90 * (rotation - 2), centerX, centerY);
+ } else if (Surface.ROTATION_180 == rotation) {
+ matrix.postRotate(180, centerX, centerY);
+ }
+ mTextureView.setTransform(matrix);
+
+ }
+
+ private boolean setUpMediaRecorder() {
+ final Activity activity = getActivity();
+ if (null == activity) return false;
+ final BaseCaptureInterface captureInterface = (BaseCaptureInterface) activity;
+ if (mMediaRecorder == null) mMediaRecorder = new MediaRecorder();
+
+ boolean canUseAudio = true;
+ boolean audioEnabled = !mInterface.audioDisabled();
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
+ canUseAudio =
+ ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO)
+ == PackageManager.PERMISSION_GRANTED;
+
+ if (canUseAudio && audioEnabled) {
+ mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
+ } else if (audioEnabled) {
+ Toast.makeText(getActivity(), R.string.mcam_no_audio_access, Toast.LENGTH_LONG).show();
+ }
+ mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
+
+ final CamcorderProfile profile = CamcorderProfile.get(0, mInterface.qualityProfile());
+ mMediaRecorder.setOutputFormat(profile.fileFormat);
+ mMediaRecorder.setVideoFrameRate(mInterface.videoFrameRate(profile.videoFrameRate));
+// mMediaRecorder.setVideoSize( mVideoSize.getHeight(), mVideoSize.getWidth());
+ mMediaRecorder.setVideoSize( mVideoSize.getWidth(), mVideoSize.getHeight());
+ mMediaRecorder.setVideoEncodingBitRate(mInterface.videoEncodingBitRate(profile.videoBitRate));
+ mMediaRecorder.setVideoEncoder(profile.videoCodec);
+
+ if (canUseAudio && audioEnabled) {
+ mMediaRecorder.setAudioEncodingBitRate(mInterface.audioEncodingBitRate(profile.audioBitRate));
+ mMediaRecorder.setAudioChannels(profile.audioChannels);
+ mMediaRecorder.setAudioSamplingRate(profile.audioSampleRate);
+ mMediaRecorder.setAudioEncoder(profile.audioCodec);
+ }
+
+ Uri uri = Uri.fromFile(getOutputMediaFile());
+ mOutputUri = uri.toString();
+ mMediaRecorder.setOutputFile(uri.getPath());
+
+ if (captureInterface.maxAllowedFileSize() > 0) {
+ mMediaRecorder.setMaxFileSize(captureInterface.maxAllowedFileSize());
+ mMediaRecorder.setOnInfoListener(
+ new MediaRecorder.OnInfoListener() {
+ @Override
+ public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
+ if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
+ Toast.makeText(
+ getActivity(), R.string.mcam_file_size_limit_reached, Toast.LENGTH_SHORT)
+ .show();
+ stopRecordingVideo(false);
+ }
+ }
+ });
+ }
+
+ Log.d(TAG, "setUpMediaRecorder: orientation: " + mDisplayOrientation);
+ Log.d(TAG, "setUpMediaRecorder: video width: " + mVideoSize.getWidth());
+ Log.d(TAG, "setUpMediaRecorder: video height: " + mVideoSize.getHeight());
+ mMediaRecorder.setOrientationHint(mDisplayOrientation);
+
+ try {
+ mMediaRecorder.prepare();
+ return true;
+ } catch (Throwable e) {
+ throwError(new Exception("Failed to prepare the media recorder: " + e.getMessage(), e));
+ return false;
+ }
+ }
+
+ @Override
+ public boolean startRecordingVideo() {
+ super.startRecordingVideo();
+ try {
+ // UI
+ setImageRes(mButtonVideo, mInterface.iconStop());
+ setFlashMode(mPreviewBuilder);
+ if (!CameraUtil.isChromium()){
+ mButtonFacing.setVisibility(View.GONE);
+ mButtonFlash.setVisibility(View.GONE);
+ }
+
+ // Only start counter if count down wasn't already started
+ if (!mInterface.hasLengthLimit()) {
+ mInterface.setRecordingStart(System.currentTimeMillis());
+ startCounter();
+ }
+
+ // Start recording
+ mMediaRecorder.start();
+ mButtonVideo.setEnabled(false);
+ mButtonVideo.postDelayed(
+ new Runnable() {
+ @Override
+ public void run() {
+ mButtonVideo.setEnabled(true);
+ }
+ },
+ 200);
+
+ return true;
+ } catch (Throwable t) {
+ t.printStackTrace();
+ mInterface.setRecordingStart(-1);
+ stopRecordingVideo(false);
+ throwError(new Exception("Failed to start recording: " + t.getMessage(), t));
+ }
+ return false;
+ }
+
+ @Override
+ public void stopRecordingVideo(boolean reachedZero) {
+ super.stopRecordingVideo(reachedZero);
+
+ closeCamera();
+
+ if(reachedZero)
+ openCamera();
+
+ stopCounter();
+
+ if (mInterface.hasLengthLimit()
+ && mInterface.shouldAutoSubmit()
+ && (mInterface.getRecordingStart() < 0 || mMediaRecorder == null)) {
+// stopCounter();
+// releaseRecorder();
+ mInterface.onShowPreview(mOutputUri, reachedZero);
+ return;
+ }
+
+ if (!mInterface.didRecord()) mOutputUri = null;
+
+// releaseRecorder();
+ setImageRes(mButtonVideo, mInterface.iconRecord());
+ if (!CameraUtil.isChromium()){
+ mButtonFacing.setVisibility(View.VISIBLE);
+ mButtonFlash.setVisibility(View.VISIBLE);
+
+ }
+ if (mInterface.getRecordingStart() > -1 && getActivity() != null)
+ mInterface.onShowPreview(mOutputUri, reachedZero);
+
+ stopCounter();
+
+
+ }
+
+ @Override
+ /**
+ * @link http://pierrchen.blogspot.si/2015/01/android-camera2-api-explained.html
+ * @link
+ * https://github.com/googlesamples/android-Camera2Basic/blob/master/Application/src/main/java/com/example/android/camera2basic/Camera2BasicFragment.java
+ */
+ public void takeStillshot() {
+ lockFocus();
+ }
+
+ private void lockFocus() {
+ Log.d(TAG, "lockFocus: taking still shot.");
+ try {
+ if (mAfAvailable) {
+ // This is how to tell the camera to lock focus.
+ mPreviewBuilder.set(
+ CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
+ // Tell #mCaptureCallback to wait for the lock.
+ mState = STATE_WAITING_LOCK;
+ } else {
+ runPrecaptureSequence();
+ return;
+ }
+
+ setFlashMode(mPreviewBuilder);
+
+ mPreviewSession.capture(mPreviewBuilder.build(), mCaptureCallback, mBackgroundHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Run the precapture sequence for capturing a still image. This method should be called when we
+ * get a response in {@link #mCaptureCallback} from {@link #lockFocus()}.
+ */
+ private void runPrecaptureSequence() {
+ try {
+ // This is how to tell the camera to trigger.
+ mPreviewBuilder.set(
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
+
+ // Tell #mCaptureCallback to wait for the precapture sequence to be set.
+ mState = STATE_WAITING_PRECAPTURE;
+ setFlashMode(mPreviewBuilder);
+
+ mPreviewSession.capture(mPreviewBuilder.build(), mCaptureCallback, mBackgroundHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Capture a still picture. This method should be called when we get a response in {@link
+ * #mCaptureCallback} from both {@link #takeStillshot()}.
+ */
+ private void captureStillPicture() {
+ try {
+ final Activity activity = getActivity();
+ if (null == activity || null == mCameraDevice) {
+ return;
+ }
+ // This is the CaptureRequest.Builder that we use to take a picture.
+ final CaptureRequest.Builder captureBuilder =
+ mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+ captureBuilder.addTarget(mImageReader.getSurface());
+
+ // Use the same AE and AF modes as the preview.
+ captureBuilder.set(
+ CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+ setFlashMode(captureBuilder);
+
+ // Orientation
+ CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+
+ CameraCharacteristics characteristics =
+ manager.getCameraCharacteristics(mCameraDevice.getId());
+
+ //noinspection ConstantConditions,ResourceType
+ @Degrees.DegreeUnits
+ final int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+
+ // default camera orientation used to be 90 degrees, for Nexus 5X, 6P it is 270 degrees
+ if (sensorOrientation == Degrees.DEGREES_270) {
+ displayRotation += 2 % 3;
+ }
+
+ captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(displayRotation));
+
+ CameraCaptureSession.CaptureCallback CaptureCallback =
+ new CameraCaptureSession.CaptureCallback() {
+
+ @Override
+ public void onCaptureCompleted(
+ @NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request,
+ @NonNull TotalCaptureResult result) {
+ Log.d("stillshot", "onCaptureCompleted");
+ unlockFocus();
+ }
+ };
+
+ mPreviewSession.stopRepeating();
+ mPreviewSession.capture(captureBuilder.build(), CaptureCallback, null);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Unlock the focus. This method should be called when still image capture sequence is finished.
+ */
+ private void unlockFocus() {
+ try {
+ // Reset the auto-focus trigger
+ mPreviewBuilder.set(
+ CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
+ setFlashMode(mPreviewBuilder);
+ mPreviewSession.capture(mPreviewBuilder.build(), mCaptureCallback, mBackgroundHandler);
+ // After this, the camera will go back to the normal state of preview.
+ mState = STATE_PREVIEW;
+ mPreviewSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+ private void setFlashMode(CaptureRequest.Builder requestBuilder) {
+ Log.d(TAG, "setFlashMode: called.");
+ mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
+
+ int aeMode;
+ int flashMode;
+ if(mInterface.useStillshot()){
+ switch (mInterface.getFlashMode()) {
+ case FLASH_MODE_AUTO:
+ aeMode = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH;
+ flashMode = CameraMetadata.FLASH_MODE_SINGLE;
+ break;
+ case FLASH_MODE_ALWAYS_ON:
+ aeMode = CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH;
+ flashMode = CameraMetadata.FLASH_MODE_TORCH;
+ break;
+ case FLASH_MODE_OFF:
+ default:
+ aeMode = CaptureRequest.CONTROL_AE_MODE_ON;
+ flashMode = CameraMetadata.FLASH_MODE_OFF;
+ break;
+ }
+ requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode);
+ requestBuilder.set(CaptureRequest.FLASH_MODE, flashMode);
+ }
+ else if(!mInterface.useStillshot()){
+ switch (mInterface.getFlashModeVideo()) {
+ case FLASH_MODE_ALWAYS_ON:
+// aeMode = CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH;
+//// aeMode = CaptureRequest.CONTROL_AE_MODE_ON;
+// flashMode = CameraMetadata.FLASH_MODE_TORCH;
+ mPreviewBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH);
+ Log.d(TAG, "setFlashMode: video flash mode is ON.");
+ break;
+ case FLASH_MODE_OFF:
+ Log.d(TAG, "setFlashMode: video flash mode is OFF");
+ mPreviewBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ default:
+// aeMode = CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH;
+// aeMode = CaptureRequest.CONTROL_AE_MODE_ON;
+// flashMode = CameraMetadata.FLASH_MODE_OFF;
+ mPreviewBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ Log.d(TAG, "setFlashMode: video flash mode is OFF");
+ break;
+ }
+//// requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode);
+//// requestBuilder.set(CaptureRequest.FLASH_MODE, flashMode);
+ }
+
+ }
+
+
+ static class CompareSizesByArea implements Comparator {
+ @Override
+ public int compare(Size lhs, Size rhs) {
+ // We cast here to ensure the multiplications won't overflow
+ return Long.signum(
+ (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
+ }
+ }
+
+ public static class ErrorDialog extends DialogFragment {
+ @Override
+ public Dialog onCreateDialog(Bundle savedInstanceState) {
+ final Activity activity = getActivity();
+ return new MaterialDialog.Builder(activity)
+ .content("This device doesn't support the Camera2 API.")
+ .positiveText(android.R.string.ok)
+ .onAny(
+ new MaterialDialog.SingleButtonCallback() {
+ @Override
+ public void onClick(
+ @NonNull MaterialDialog materialDialog, @NonNull DialogAction dialogAction) {
+ activity.finish();
+ }
+ })
+ .build();
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraFragment.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraFragment.java
new file mode 100644
index 0000000..1f58956
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraFragment.java
@@ -0,0 +1,634 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.Manifest;
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.content.pm.PackageManager;
+import android.graphics.Point;
+import android.hardware.Camera;
+import android.media.CamcorderProfile;
+import android.media.MediaRecorder;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Bundle;
+import android.support.v4.content.ContextCompat;
+import android.util.Log;
+import android.view.View;
+import android.widget.RelativeLayout;
+import android.widget.Toast;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.materialcamera.ICallback;
+import tabian.com.instagramclone2.materialcamera.util.CameraUtil;
+import tabian.com.instagramclone2.materialcamera.util.Degrees;
+import tabian.com.instagramclone2.materialcamera.util.ImageUtil;
+import tabian.com.instagramclone2.materialcamera.util.ManufacturerUtil;
+
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.CAMERA_POSITION_BACK;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.CAMERA_POSITION_FRONT;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.CAMERA_POSITION_UNKNOWN;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_ALWAYS_ON;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_AUTO;
+import static tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity.FLASH_MODE_OFF;
+
+/** @author Aidan Follestad (afollestad) */
+@SuppressWarnings("deprecation")
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
+public class CameraFragment extends BaseCameraFragment implements View.OnClickListener {
+
+ private static final String TAG = "CameraFragment";
+
+ tabian.com.instagramclone2.materialcamera.internal.CameraPreview mPreviewView;
+ RelativeLayout mPreviewFrame;
+
+ private Camera.Size mVideoSize;
+ private Camera mCamera;
+ private Point mWindowSize;
+ private int mDisplayOrientation;
+ private boolean mIsAutoFocusing;
+ List mFlashModes;
+
+ public static CameraFragment newInstance() {
+ CameraFragment fragment = new CameraFragment();
+ fragment.setRetainInstance(true);
+ return fragment;
+ }
+
+ private static Camera.Size chooseVideoSize(BaseCaptureInterface ci, List choices) {
+ Camera.Size backupSize = null;
+ for (Camera.Size size : choices) {
+ if (size.height <= ci.videoPreferredHeight()) {
+ if (size.width == size.height * ci.videoPreferredAspect()) return size;
+ if (ci.videoPreferredHeight() >= size.height) backupSize = size;
+ }
+ }
+ if (backupSize != null) return backupSize;
+ LOG(CameraFragment.class, "Couldn't find any suitable video size");
+ return choices.get(choices.size() - 1);
+ }
+
+ private static Camera.Size chooseOptimalSize(
+ List choices, int width, int height, Camera.Size aspectRatio) {
+ // Collect the supported resolutions that are at least as big as the preview Surface
+ List bigEnough = new ArrayList<>();
+ int w = aspectRatio.width;
+ int h = aspectRatio.height;
+ for (Camera.Size option : choices) {
+ if (option.height == width * h / w && option.width >= width && option.height >= height) {
+ bigEnough.add(option);
+ }
+ }
+
+ // Pick the smallest of those, assuming we found any
+ if (bigEnough.size() > 0) {
+ return Collections.min(bigEnough, new CompareSizesByArea());
+ } else {
+ LOG(CameraFragment.class, "Couldn't find any suitable preview size");
+ return aspectRatio;
+ }
+ }
+
+ @Override
+ public void onViewCreated(final View view, Bundle savedInstanceState) {
+ super.onViewCreated(view, savedInstanceState);
+ mPreviewFrame = (RelativeLayout) view.findViewById(R.id.rootFrame);
+ mPreviewFrame.setOnClickListener(this);
+ }
+
+ @Override
+ public void onDestroyView() {
+ super.onDestroyView();
+ try {
+ mPreviewView.getHolder().getSurface().release();
+ } catch (Throwable ignored) {
+ }
+ mPreviewFrame = null;
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ openCamera();
+ }
+
+ @Override
+ public void onPause() {
+ if (mCamera != null) mCamera.lock();
+ super.onPause();
+ }
+
+ @Override
+ public void onClick(View view) {
+ if (view.getId() == R.id.rootFrame) {
+ if (mCamera == null || mIsAutoFocusing) return;
+ try {
+ mIsAutoFocusing = true;
+ mCamera.cancelAutoFocus();
+ mCamera.autoFocus(
+ new Camera.AutoFocusCallback() {
+ @Override
+ public void onAutoFocus(boolean success, Camera camera) {
+ mIsAutoFocusing = false;
+ if (!success)
+ Toast.makeText(getActivity(), "Unable to auto-focus!", Toast.LENGTH_SHORT).show();
+ }
+ });
+ } catch (Throwable t) {
+ t.printStackTrace();
+ }
+ } else {
+ super.onClick(view);
+ }
+ }
+
+ @Override
+ public void openCamera() {
+ final Activity activity = getActivity();
+ if (null == activity || activity.isFinishing()) return;
+ try {
+ final int mBackCameraId =
+ mInterface.getBackCamera() != null ? (Integer) mInterface.getBackCamera() : -1;
+ final int mFrontCameraId =
+ mInterface.getFrontCamera() != null ? (Integer) mInterface.getFrontCamera() : -1;
+ if (mBackCameraId == -1 || mFrontCameraId == -1) {
+ int numberOfCameras = Camera.getNumberOfCameras();
+ if (numberOfCameras == 0) {
+ throwError(new Exception("No cameras are available on this device."));
+ return;
+ }
+
+ for (int i = 0; i < numberOfCameras; i++) {
+ //noinspection ConstantConditions
+ if (mFrontCameraId != -1 && mBackCameraId != -1) break;
+ Camera.CameraInfo info = new Camera.CameraInfo();
+ Camera.getCameraInfo(i, info);
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT && mFrontCameraId == -1) {
+ mInterface.setFrontCamera(i);
+ } else if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK && mBackCameraId == -1) {
+ mInterface.setBackCamera(i);
+ }
+ }
+ }
+
+ switch (getCurrentCameraPosition()) {
+ case CAMERA_POSITION_FRONT:
+ setImageRes(mButtonFacing, mInterface.iconRearCamera());
+ break;
+ case CAMERA_POSITION_BACK:
+ setImageRes(mButtonFacing, mInterface.iconFrontCamera());
+ break;
+ case CAMERA_POSITION_UNKNOWN:
+ default:
+ if (getArguments().getBoolean(CameraIntentKey.DEFAULT_TO_FRONT_FACING, false)) {
+ // Check front facing first
+ if (mInterface.getFrontCamera() != null
+ && (Integer) mInterface.getFrontCamera() != -1) {
+ setImageRes(mButtonFacing, mInterface.iconRearCamera());
+ mInterface.setCameraPosition(CAMERA_POSITION_FRONT);
+ } else {
+ setImageRes(mButtonFacing, mInterface.iconFrontCamera());
+ if (mInterface.getBackCamera() != null && (Integer) mInterface.getBackCamera() != -1)
+ mInterface.setCameraPosition(CAMERA_POSITION_BACK);
+ else mInterface.setCameraPosition(CAMERA_POSITION_UNKNOWN);
+ }
+ } else {
+ // Check back facing first
+ if (mInterface.getBackCamera() != null && (Integer) mInterface.getBackCamera() != -1) {
+ setImageRes(mButtonFacing, mInterface.iconFrontCamera());
+ mInterface.setCameraPosition(CAMERA_POSITION_BACK);
+ } else {
+ setImageRes(mButtonFacing, mInterface.iconRearCamera());
+ if (mInterface.getFrontCamera() != null
+ && (Integer) mInterface.getFrontCamera() != -1)
+ mInterface.setCameraPosition(CAMERA_POSITION_FRONT);
+ else mInterface.setCameraPosition(CAMERA_POSITION_UNKNOWN);
+ }
+ }
+ break;
+ }
+
+ if (mWindowSize == null) mWindowSize = new Point();
+ activity.getWindowManager().getDefaultDisplay().getSize(mWindowSize);
+ final int toOpen = getCurrentCameraId();
+ mCamera = Camera.open(toOpen == -1 ? 0 : toOpen);
+ Camera.Parameters parameters = mCamera.getParameters();
+ List videoSizes = parameters.getSupportedVideoSizes();
+ if (videoSizes == null || videoSizes.size() == 0)
+ videoSizes = parameters.getSupportedPreviewSizes();
+ mVideoSize = chooseVideoSize((BaseCaptureActivity) activity, videoSizes);
+ Camera.Size previewSize =
+ chooseOptimalSize(
+ parameters.getSupportedPreviewSizes(), mWindowSize.x, mWindowSize.y, mVideoSize);
+
+ if (ManufacturerUtil.isSamsungGalaxyS3()) {
+ parameters.setPreviewSize(
+ ManufacturerUtil.SAMSUNG_S3_PREVIEW_WIDTH, ManufacturerUtil.SAMSUNG_S3_PREVIEW_HEIGHT);
+ } else {
+ parameters.setPreviewSize(previewSize.width, previewSize.height);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) parameters.setRecordingHint(true);
+ }
+
+ parameters.setPreviewSize(
+ ManufacturerUtil.SAMSUNG_S3_PREVIEW_WIDTH, ManufacturerUtil.SAMSUNG_S3_PREVIEW_HEIGHT);
+
+ Camera.Size mStillShotSize =
+ getHighestSupportedStillShotSize(parameters.getSupportedPictureSizes());
+ Log.d(TAG, "openCamera: highest supported stillshot size: " + parameters.getSupportedPictureSizes());
+
+ int width = mStillShotSize.width;
+ int height = mStillShotSize.height;
+ parameters.setPictureSize(width, height);
+
+// int width = ManufacturerUtil.SAMSUNG_S3_PREVIEW_WIDTH;
+// int height = ManufacturerUtil.SAMSUNG_S3_PREVIEW_HEIGHT;
+// parameters.setPictureSize(width, height);
+//
+// Log.d(TAG, "openCamera: width: " + width);
+// Log.d(TAG, "openCamera: height: " + height);
+
+ setCameraDisplayOrientation(parameters);
+ mCamera.setParameters(parameters);
+
+ // NOTE: onFlashModesLoaded should not be called while modifying camera parameters as
+ // the flash parameters set in setupFlashMode will then be overwritten
+ mFlashModes = CameraUtil.getSupportedFlashModes(this.getActivity(), parameters);
+ mInterface.setFlashModes(mFlashModes);
+ onFlashModesLoaded();
+
+ createPreview();
+ mMediaRecorder = new MediaRecorder();
+
+ onCameraOpened();
+ } catch (IllegalStateException e) {
+ throwError(new Exception("Cannot access the camera.", e));
+ } catch (RuntimeException e2) {
+ throwError(
+ new Exception("Cannot access the camera, you may need to restart your device.", e2));
+ }
+ }
+
+ private Camera.Size getHighestSupportedStillShotSize(List supportedPictureSizes) {
+ Collections.sort(
+ supportedPictureSizes,
+ new Comparator() {
+ @Override
+ public int compare(Camera.Size lhs, Camera.Size rhs) {
+ if (lhs.height * lhs.width > rhs.height * rhs.width) return -1;
+ return 1;
+ }
+ });
+ Camera.Size maxSize = supportedPictureSizes.get(0);
+ Log.d("CameraFragment", "Using resolution: " + maxSize.width + "x" + maxSize.height);
+ return maxSize;
+ }
+
+ @SuppressWarnings("WrongConstant")
+ private void setCameraDisplayOrientation(Camera.Parameters parameters) {
+ Camera.CameraInfo info = new Camera.CameraInfo();
+ Camera.getCameraInfo(getCurrentCameraId(), info);
+ final int deviceOrientation = Degrees.getDisplayRotation(getActivity());
+ mDisplayOrientation =
+ Degrees.getDisplayOrientation(
+ info.orientation,
+ deviceOrientation,
+ info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
+ Log.d(
+ TAG,
+ String.format(
+ "Orientations: Sensor = %dËš, Device = %dËš, Display = %dËš",
+ info.orientation, deviceOrientation, mDisplayOrientation));
+
+ int previewOrientation;
+ int jpegOrientation;
+ if (CameraUtil.isChromium()) {
+ previewOrientation = 0;
+ jpegOrientation = 0;
+ } else {
+ jpegOrientation = previewOrientation = mDisplayOrientation;
+
+ if (Degrees.isPortrait(deviceOrientation)
+ && getCurrentCameraPosition() == CAMERA_POSITION_FRONT)
+ previewOrientation = Degrees.mirror(mDisplayOrientation);
+ }
+
+ parameters.setRotation(jpegOrientation);
+ mCamera.setDisplayOrientation(previewOrientation);
+ }
+
+ private void createPreview() {
+ Activity activity = getActivity();
+ if (activity == null) return;
+ if (mWindowSize == null) mWindowSize = new Point();
+ activity.getWindowManager().getDefaultDisplay().getSize(mWindowSize);
+ mPreviewView = new tabian.com.instagramclone2.materialcamera.internal.CameraPreview(getActivity(), mCamera);
+ if (mPreviewFrame.getChildCount() > 0 && mPreviewFrame.getChildAt(0) instanceof tabian.com.instagramclone2.materialcamera.internal.CameraPreview)
+ mPreviewFrame.removeViewAt(0);
+ mPreviewFrame.addView(mPreviewView, 0);
+ Log.d(TAG, "createPreview: window size x: " + mWindowSize.x);
+ Log.d(TAG, "createPreview: window size y: " + mWindowSize.y);
+ mPreviewView.setAspectRatio(mWindowSize.x, mWindowSize.y);
+ }
+
+ @Override
+ public void closeCamera() {
+ try {
+ if (mCamera != null) {
+ try {
+ mCamera.lock();
+ } catch (Throwable ignored) {
+ }
+ mCamera.release();
+ mCamera = null;
+ }
+ } catch (IllegalStateException e) {
+ throwError(new Exception("Illegal state while trying to close camera.", e));
+ }
+ }
+
+ private boolean prepareMediaRecorder() {
+ try {
+ final Activity activity = getActivity();
+ if (null == activity) return false;
+ final BaseCaptureInterface captureInterface = (BaseCaptureInterface) activity;
+
+ setCameraDisplayOrientation(mCamera.getParameters());
+ mMediaRecorder = new MediaRecorder();
+ mCamera.stopPreview();
+ mCamera.unlock();
+ mMediaRecorder.setCamera(mCamera);
+
+ boolean canUseAudio = true;
+ boolean audioEnabled = !mInterface.audioDisabled();
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
+ canUseAudio =
+ ContextCompat.checkSelfPermission(activity, Manifest.permission.RECORD_AUDIO)
+ == PackageManager.PERMISSION_GRANTED;
+
+ if (canUseAudio && audioEnabled) {
+ mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
+ } else if (audioEnabled) {
+ Toast.makeText(getActivity(), R.string.mcam_no_audio_access, Toast.LENGTH_LONG).show();
+ }
+ mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.DEFAULT);
+
+ final CamcorderProfile profile =
+ CamcorderProfile.get(getCurrentCameraId(), mInterface.qualityProfile());
+ mMediaRecorder.setOutputFormat(profile.fileFormat);
+ mMediaRecorder.setVideoFrameRate(mInterface.videoFrameRate(profile.videoFrameRate));
+ mMediaRecorder.setVideoSize(mVideoSize.width, mVideoSize.height);
+ mMediaRecorder.setVideoEncodingBitRate(mInterface.videoEncodingBitRate(profile.videoBitRate));
+ mMediaRecorder.setVideoEncoder(profile.videoCodec);
+
+ if (canUseAudio && audioEnabled) {
+ mMediaRecorder.setAudioEncodingBitRate(
+ mInterface.audioEncodingBitRate(profile.audioBitRate));
+ mMediaRecorder.setAudioChannels(profile.audioChannels);
+ mMediaRecorder.setAudioSamplingRate(profile.audioSampleRate);
+ mMediaRecorder.setAudioEncoder(profile.audioCodec);
+ }
+
+ Uri uri = Uri.fromFile(getOutputMediaFile());
+ mOutputUri = uri.toString();
+ mMediaRecorder.setOutputFile(uri.getPath());
+
+ if (captureInterface.maxAllowedFileSize() > 0) {
+ mMediaRecorder.setMaxFileSize(captureInterface.maxAllowedFileSize());
+ mMediaRecorder.setOnInfoListener(
+ new MediaRecorder.OnInfoListener() {
+ @Override
+ public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
+ if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
+ Toast.makeText(
+ getActivity(), R.string.mcam_file_size_limit_reached, Toast.LENGTH_SHORT)
+ .show();
+ stopRecordingVideo(false);
+ }
+ }
+ });
+ }
+
+ mMediaRecorder.setOrientationHint(mDisplayOrientation);
+ mMediaRecorder.setPreviewDisplay(mPreviewView.getHolder().getSurface());
+
+ try {
+ mMediaRecorder.prepare();
+ return true;
+ } catch (Throwable e) {
+ throwError(new Exception("Failed to prepare the media recorder: " + e.getMessage(), e));
+ return false;
+ }
+ } catch (Throwable t) {
+ try {
+ mCamera.lock();
+ } catch (IllegalStateException e) {
+ throwError(new Exception("Failed to re-lock camera: " + e.getMessage(), e));
+ return false;
+ }
+ t.printStackTrace();
+ throwError(new Exception("Failed to begin recording: " + t.getMessage(), t));
+ return false;
+ }
+ }
+
+ @Override
+ public boolean startRecordingVideo() {
+ super.startRecordingVideo();
+ if (prepareMediaRecorder()) {
+ try {
+ // UI
+ setImageRes(mButtonVideo, mInterface.iconStop());
+// setupFlashMode();
+ if (!CameraUtil.isChromium()) {
+ mButtonFacing.setVisibility(View.GONE);
+ mButtonFlash.setVisibility(View.GONE);
+ }
+
+ // Only start counter if count down wasn't already started
+ if (!mInterface.hasLengthLimit()) {
+ mInterface.setRecordingStart(System.currentTimeMillis());
+ startCounter();
+ }
+
+
+ // Start recording
+ mMediaRecorder.start();
+
+ mButtonVideo.setEnabled(false);
+ mButtonVideo.postDelayed(
+ new Runnable() {
+ @Override
+ public void run() {
+ mButtonVideo.setEnabled(true);
+ }
+ },
+ 200);
+
+ return true;
+ } catch (Throwable t) {
+ t.printStackTrace();
+ mInterface.setRecordingStart(-1);
+ stopRecordingVideo(false);
+ throwError(new Exception("Failed to start recording: " + t.getMessage(), t));
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public void stopRecordingVideo(final boolean reachedZero) {
+ super.stopRecordingVideo(reachedZero);
+
+ if (mInterface.hasLengthLimit()
+ && mInterface.shouldAutoSubmit()
+ && (mInterface.getRecordingStart() < 0 || mMediaRecorder == null)) {
+ stopCounter();
+ if (mCamera != null) {
+ try {
+ mCamera.lock();
+ } catch (Throwable t) {
+ t.printStackTrace();
+ }
+ }
+ releaseRecorder();
+ closeCamera();
+ mButtonFacing.postDelayed(
+ new Runnable() {
+ @Override
+ public void run() {
+ mInterface.onShowPreview(mOutputUri, reachedZero);
+ }
+ },
+ 100);
+ return;
+ }
+
+ if (mCamera != null) mCamera.lock();
+ releaseRecorder();
+ closeCamera();
+
+ if (!mInterface.didRecord()) mOutputUri = null;
+
+ setImageRes(mButtonVideo, mInterface.iconRecord());
+ if (!CameraUtil.isChromium()) {
+ mButtonFacing.setVisibility(View.VISIBLE);
+ mButtonFlash.setVisibility(View.VISIBLE);
+ }
+ if (mInterface.getRecordingStart() > -1 && getActivity() != null)
+ mInterface.onShowPreview(mOutputUri, reachedZero);
+
+ stopCounter();
+ }
+
+ private void setupFlashMode() {
+ String flashMode = null;
+ if(mInterface.useStillshot()){
+ switch (mInterface.getFlashMode()) {
+ case FLASH_MODE_AUTO:
+ flashMode = Camera.Parameters.FLASH_MODE_AUTO;
+ break;
+ case FLASH_MODE_ALWAYS_ON:
+ flashMode = Camera.Parameters.FLASH_MODE_ON;
+ break;
+ case FLASH_MODE_OFF:
+ flashMode = Camera.Parameters.FLASH_MODE_OFF;
+ default:
+ break;
+ }
+ if (flashMode != null) {
+ Camera.Parameters parameters = mCamera.getParameters();
+ parameters.setFlashMode(flashMode);
+ mCamera.setParameters(parameters);
+ }
+ }
+ else if(!mInterface.useStillshot()){
+ switch (mInterface.getFlashModeVideo()) {
+ case FLASH_MODE_ALWAYS_ON:
+ flashMode = Camera.Parameters.FLASH_MODE_ON;
+ Log.d(TAG, "setFlashMode: video flash mode is ON.");
+ break;
+ case FLASH_MODE_OFF:
+ Log.d(TAG, "setFlashMode: video flash mode is OFF.");
+ flashMode = Camera.Parameters.FLASH_MODE_OFF;
+ default:
+ break;
+ }
+ if (flashMode != null) {
+// mCamera.lock();
+ Camera.Parameters parameters = mCamera.getParameters();
+ parameters.setFlashMode(flashMode);
+ mCamera.setParameters(parameters);
+ mCamera.startPreview();
+ }
+ }
+ }
+
+ @Override
+ public void onPreferencesUpdated() {
+ setupFlashMode();
+ }
+
+ @Override
+ public void takeStillshot() {
+ Log.d(TAG, "takeStillshot: taking stillshot.");
+ Camera.ShutterCallback shutterCallback =
+ new Camera.ShutterCallback() {
+ public void onShutter() {
+ //Log.d(TAG, "onShutter'd");
+ }
+ };
+ Camera.PictureCallback rawCallback =
+ new Camera.PictureCallback() {
+ public void onPictureTaken(byte[] data, Camera camera) {
+ Log.d(TAG, "onPictureTaken - raw. Raw is null: " + (data == null));
+ }
+ };
+ Camera.PictureCallback jpegCallback =
+ new Camera.PictureCallback() {
+ public void onPictureTaken(final byte[] data, Camera camera) {
+ Log.d(TAG, "onPictureTaken - jpeg, size: " + data.length);
+ final File outputPic = getOutputPictureFile();
+ // lets save the image to disk
+ ImageUtil.saveToDiskAsync(
+ data,
+ outputPic,
+ new ICallback() {
+ @Override
+ public void done(Exception e) {
+ if (e == null) {
+ Log.d(TAG, "Picture saved to disk - jpeg, size: " + data.length);
+ mOutputUri = Uri.fromFile(outputPic).toString();
+ mInterface.onShowStillshot(mOutputUri);
+ //mCamera.startPreview();
+ mButtonStillshot.setEnabled(true);
+ } else {
+ throwError(e);
+ }
+ }
+ }
+ );
+ }
+ };
+
+ // if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
+ // // We could have configurable shutter sound here
+ // mCamera.enableShutterSound(false);
+ // }
+
+ mButtonStillshot.setEnabled(false);
+ mCamera.takePicture(shutterCallback, rawCallback, jpegCallback);
+ }
+
+ static class CompareSizesByArea implements Comparator {
+ @Override
+ public int compare(Camera.Size lhs, Camera.Size rhs) {
+ // We cast here to ensure the multiplications won't overflow
+ return Long.signum((long) lhs.width * lhs.height - (long) rhs.width * rhs.height);
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraIntentKey.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraIntentKey.java
new file mode 100644
index 0000000..0ce03d9
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraIntentKey.java
@@ -0,0 +1,46 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+/** @author Aidan Follestad (afollestad) */
+public class CameraIntentKey {
+
+ private CameraIntentKey() {}
+
+ public static final String LENGTH_LIMIT = "length_limit";
+ public static final String ALLOW_RETRY = "allow_retry";
+ public static final String AUTO_SUBMIT = "auto_submit";
+ public static final String SAVE_DIR = "save_dir";
+ public static final String PRIMARY_COLOR = "primary_color";
+ public static final String SHOW_PORTRAIT_WARNING = "show_portrait_warning";
+ public static final String DEFAULT_TO_FRONT_FACING = "default_to_front_facing";
+ public static final String COUNTDOWN_IMMEDIATELY = "countdown_immediately";
+ public static final String RETRY_EXITS = "retry_exits";
+ public static final String RESTART_TIMER_ON_RETRY = "restart_timer_on_retry";
+ public static final String CONTINUE_TIMER_IN_PLAYBACK = "continue_timer_in_playback";
+ public static final String VIDEO_BIT_RATE = "video_bit_rate";
+ public static final String AUDIO_ENCODING_BIT_RATE = "audio_encoding_bit_rate";
+ public static final String AUDIO_DISABLED = "audio_disabled";
+ public static final String VIDEO_FRAME_RATE = "video_frame_rate";
+ public static final String VIDEO_PREFERRED_HEIGHT = "video_preferred_height";
+ public static final String VIDEO_PREFERRED_ASPECT = "video_preferred_aspect";
+ public static final String MAX_ALLOWED_FILE_SIZE = "max_allowed_file_size";
+ public static final String QUALITY_PROFILE = "quality_profile";
+ public static final String ALLOW_CHANGE_CAMERA = "allow_change_camera";
+
+ public static final String ICON_RECORD = "icon_record";
+ public static final String ICON_STOP = "icon_stop";
+ public static final String ICON_FRONT_CAMERA = "icon_front_camera";
+ public static final String ICON_REAR_CAMERA = "icon_rear_camera";
+ public static final String ICON_PLAY = "icon_play";
+ public static final String ICON_PAUSE = "icon_pause";
+ public static final String ICON_RESTART = "icon_restart";
+ public static final String ICON_STILL_SHOT = "icon_still_shot";
+ public static final String ICON_FLASH_AUTO = "icon_flash_auto";
+ public static final String ICON_FLASH_ON = "icon_flash_on";
+ public static final String ICON_FLASH_OFF = "icon_flash_off";
+
+ public static final String LABEL_RETRY = "label_retry";
+ public static final String LABEL_CONFIRM = "label_confirm";
+ public static final String STILL_SHOT = "still_shot";
+
+ public static final String AUTO_RECORD = "auto_record";
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraPreview.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraPreview.java
new file mode 100644
index 0000000..6a8da88
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraPreview.java
@@ -0,0 +1,91 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.hardware.Camera;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+@SuppressWarnings("deprecation")
+@SuppressLint("ViewConstructor")
+class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
+
+ private static final String TAG = "SF-CameraPreview";
+
+ protected final SurfaceHolder mHolder;
+ private final Camera mCamera;
+ private int mRatioWidth = 0;
+ private int mRatioHeight = 0;
+
+ public CameraPreview(Context context, Camera camera) {
+ super(context);
+ mCamera = camera;
+ mHolder = getHolder();
+ mHolder.addCallback(this);
+ mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+ }
+
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ try {
+ mCamera.setPreviewDisplay(holder);
+ mCamera.startPreview();
+ } catch (Throwable e) {
+ Log.d(TAG, "Error setting camera preview: " + e.getMessage());
+ }
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ mHolder.removeCallback(this);
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
+ if (mHolder.getSurface() == null) return;
+ try {
+ mCamera.stopPreview();
+ } catch (Exception ignored) {
+ }
+ try {
+ mCamera.setPreviewDisplay(mHolder);
+ mCamera.startPreview();
+ } catch (Exception e) {
+ Log.d(TAG, "Error starting camera preview: " + e.getMessage());
+ }
+ }
+
+ /**
+ * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
+ * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
+ * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
+ *
+ * @param width Relative horizontal size
+ * @param height Relative vertical size
+ */
+ public void setAspectRatio(int width, int height) {
+ if (width < 0 || height < 0) {
+ throw new IllegalArgumentException("Size cannot be negative.");
+ }
+ mRatioWidth = width;
+ mRatioHeight = height;
+ requestLayout();
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ int width = MeasureSpec.getSize(widthMeasureSpec);
+ int height = MeasureSpec.getSize(heightMeasureSpec);
+ if (0 == mRatioWidth || 0 == mRatioHeight) {
+ setMeasuredDimension(width, height);
+ } else {
+ if (width < height * mRatioWidth / mRatioHeight) {
+ setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
+ } else {
+ setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
+ }
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraUriInterface.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraUriInterface.java
new file mode 100644
index 0000000..3b6f17a
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/CameraUriInterface.java
@@ -0,0 +1,8 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+/** @author Aidan Follestad (afollestad) */
+interface CameraUriInterface {
+
+ String getOutputUri();
+
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/PlaybackVideoFragment.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/PlaybackVideoFragment.java
new file mode 100644
index 0000000..3b6b3ea
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/PlaybackVideoFragment.java
@@ -0,0 +1,225 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.content.pm.ActivityInfo;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.support.annotation.Nullable;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+
+import com.afollestad.materialdialogs.MaterialDialog;
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.easyvideoplayer.EasyVideoCallback;
+import tabian.com.instagramclone2.easyvideoplayer.EasyVideoPlayer;
+import tabian.com.instagramclone2.materialcamera.util.CameraUtil;
+
+
+/** @author Aidan Follestad (afollestad) */
+public class PlaybackVideoFragment extends Fragment
+ implements CameraUriInterface, EasyVideoCallback {
+
+ private static final String TAG = "PlaybackVideoFragment";
+
+ private EasyVideoPlayer mPlayer;
+ private String mOutputUri;
+ private BaseCaptureInterface mInterface;
+
+ private Handler mCountdownHandler;
+ private final Runnable mCountdownRunnable =
+ new Runnable() {
+ @Override
+ public void run() {
+ if (mPlayer != null) {
+ long diff = mInterface.getRecordingEnd() - System.currentTimeMillis();
+ if (diff <= 0) {
+ useVideo();
+ return;
+ }
+ mPlayer.setBottomLabelText(String.format("-%s", CameraUtil.getDurationString(diff)));
+ if (mCountdownHandler != null) mCountdownHandler.postDelayed(mCountdownRunnable, 200);
+ }
+ }
+ };
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+ mInterface = (BaseCaptureInterface) activity;
+ }
+
+ public static PlaybackVideoFragment newInstance(
+ String outputUri, boolean allowRetry, int primaryColor) {
+ PlaybackVideoFragment fragment = new PlaybackVideoFragment();
+ fragment.setRetainInstance(true);
+ Bundle args = new Bundle();
+ args.putString("output_uri", outputUri);
+ args.putBoolean(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ALLOW_RETRY, allowRetry);
+ args.putInt(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.PRIMARY_COLOR, primaryColor);
+ fragment.setArguments(args);
+ return fragment;
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ if (getActivity() != null)
+ getActivity().setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
+ }
+
+ @Override
+ public void onPause() {
+ super.onPause();
+ if (mPlayer != null) {
+ mPlayer.release();
+ mPlayer.reset();
+ mPlayer = null;
+ }
+ }
+
+ @Nullable
+ @Override
+ public View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ return inflater.inflate(R.layout.mcam_fragment_videoplayback, container, false);
+
+ }
+
+ @Override
+ public void onViewCreated(View view, Bundle savedInstanceState) {
+ super.onViewCreated(view, savedInstanceState);
+
+ mPlayer = (EasyVideoPlayer) view.findViewById(R.id.playbackView);
+ mPlayer.setCallback(this);
+
+ mPlayer.setSubmitTextRes(mInterface.labelConfirm());
+ mPlayer.setRetryTextRes(mInterface.labelRetry());
+ mPlayer.setPlayDrawableRes(mInterface.iconPlay());
+ mPlayer.setPauseDrawableRes(mInterface.iconPause());
+
+ if (getArguments().getBoolean(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ALLOW_RETRY, true))
+ mPlayer.setLeftAction(EasyVideoPlayer.LEFT_ACTION_RETRY);
+ mPlayer.setRightAction(EasyVideoPlayer.RIGHT_ACTION_SUBMIT);
+
+ mPlayer.setThemeColor(getArguments().getInt(CameraIntentKey.PRIMARY_COLOR));
+ mOutputUri = getArguments().getString("output_uri");
+
+ if (mInterface.hasLengthLimit()
+ && mInterface.shouldAutoSubmit()
+ && mInterface.continueTimerInPlayback()) {
+ final long diff = mInterface.getRecordingEnd() - System.currentTimeMillis();
+ mPlayer.setBottomLabelText(String.format("-%s", CameraUtil.getDurationString(diff)));
+ startCountdownTimer();
+ }
+
+ mPlayer.setSource(Uri.parse(mOutputUri));
+ }
+
+ private void startCountdownTimer() {
+ if (mCountdownHandler == null) mCountdownHandler = new Handler();
+ else mCountdownHandler.removeCallbacks(mCountdownRunnable);
+ mCountdownHandler.post(mCountdownRunnable);
+ }
+
+ @Override
+ public void onDestroyView() {
+ super.onDestroyView();
+ if (mCountdownHandler != null) {
+ mCountdownHandler.removeCallbacks(mCountdownRunnable);
+ mCountdownHandler = null;
+ }
+ if (mPlayer != null) {
+ mPlayer.release();
+ mPlayer = null;
+ }
+ }
+
+ private void useVideo() {
+ if (mPlayer != null) {
+ mPlayer.release();
+ mPlayer = null;
+ }
+ if (mInterface != null) mInterface.useMedia(mOutputUri);
+ }
+
+ @Override
+ public String getOutputUri() {
+ return getArguments().getString("output_uri");
+ }
+
+ @Override
+ public void onStarted(EasyVideoPlayer player) {}
+
+ @Override
+ public void onPaused(EasyVideoPlayer player) {}
+
+ @Override
+ public void onPreparing(EasyVideoPlayer player) {}
+
+ @Override
+ public void onPrepared(EasyVideoPlayer player) {}
+
+ @Override
+ public void onBuffering(int percent) {}
+
+ @Override
+ public void onError(EasyVideoPlayer player, Exception e) {
+ new MaterialDialog.Builder(getActivity())
+ .title(R.string.mcam_error)
+ .content(e.getMessage())
+ .positiveText(android.R.string.ok)
+ .show();
+ }
+
+
+
+ @Override
+ public void onCompletion(EasyVideoPlayer player) {}
+
+ @Override
+ public void onRetry(EasyVideoPlayer player, Uri source) {
+ if (mInterface != null) mInterface.onRetry(mOutputUri);
+ }
+
+ @Override
+ public void onSubmit(EasyVideoPlayer player, Uri source) {
+ useVideo();
+ }
+
+ @Override
+ public void onClickVideoFrame(EasyVideoPlayer player) {
+ Log.d(TAG, "onClickVideoFrame: clicked video frame");
+ }
+
+ @Override
+ public void addToStory(EasyVideoPlayer player, Uri source) {
+ Log.d(TAG, "addToStory: adding new video story.");
+ mInterface.addToStory(mOutputUri);
+ }
+
+ @Override
+ public void saveStory(EasyVideoPlayer player, Uri source) {
+ Log.d(TAG, "saveStory: saving new video story.");
+ mInterface.useMedia(mOutputUri);
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/StillshotPreviewFragment.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/StillshotPreviewFragment.java
new file mode 100644
index 0000000..d169a9f
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/StillshotPreviewFragment.java
@@ -0,0 +1,174 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.graphics.Bitmap;
+import android.net.Uri;
+import android.os.Bundle;
+import android.support.annotation.Nullable;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.ViewTreeObserver;
+import android.widget.ImageView;
+
+import com.bumptech.glide.Glide;
+
+import java.io.File;
+
+import tabian.com.instagramclone2.R;
+
+
+public class StillshotPreviewFragment extends tabian.com.instagramclone2.materialcamera.internal.BaseGalleryFragment {
+
+ private static final String TAG = "StillshotPreviewFragmen";
+
+ private ImageView mImageView;
+
+ /**
+ * Reference to the bitmap, in case 'onConfigurationChange' event comes, so we do not recreate the
+ * bitmap
+ */
+ private static Bitmap mBitmap;
+
+ public static StillshotPreviewFragment newInstance(
+ String outputUri, boolean allowRetry, int primaryColor) {
+ final StillshotPreviewFragment fragment = new StillshotPreviewFragment();
+ fragment.setRetainInstance(true);
+ Bundle args = new Bundle();
+ args.putString("output_uri", outputUri);
+ args.putBoolean(tabian.com.instagramclone2.materialcamera.internal.CameraIntentKey.ALLOW_RETRY, allowRetry);
+ args.putInt(CameraIntentKey.PRIMARY_COLOR, primaryColor);
+ fragment.setArguments(args);
+ return fragment;
+ }
+
+ @Nullable
+ @Override
+ public View onCreateView(
+ LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
+ return inflater.inflate(R.layout.mcam_fragment_stillshot, container, false);
+ }
+
+ @Override
+ public void onViewCreated(View view, Bundle savedInstanceState) {
+ super.onViewCreated(view, savedInstanceState);
+ mImageView = (ImageView) view.findViewById(R.id.stillshot_imageview);
+
+// if(mInterface.useStillshot()){
+// mConfirm.setText(getString(R.string.mcam_use_stillshot));
+// }
+// else{
+// mConfirm.setText(getString(R.string.mcam_use_video));
+// }
+// mConfirm.setText(mInterface.labelConfirm());
+// mRetry.setText(mInterface.labelRetry());
+
+ mRetry.setOnClickListener(this);
+ mSaveStory.setOnClickListener(this);
+ mAddToStory.setOnClickListener(this);
+
+ mImageView
+ .getViewTreeObserver()
+ .addOnPreDrawListener(
+ new ViewTreeObserver.OnPreDrawListener() {
+ @Override
+ public boolean onPreDraw() {
+ setImageBitmap();
+ mImageView.getViewTreeObserver().removeOnPreDrawListener(this);
+
+ return true;
+ }
+ });
+ }
+
+ @Override
+ public void onDestroyView() {
+ super.onDestroyView();
+ if (mBitmap != null && !mBitmap.isRecycled()) {
+ try {
+ mBitmap.recycle();
+ mBitmap = null;
+ } catch (Throwable t) {
+ t.printStackTrace();
+ }
+ }
+ }
+
+ /** Sets bitmap to ImageView widget */
+ private void setImageBitmap() {
+ final int width = mImageView.getMeasuredWidth();
+ final int height = mImageView.getMeasuredHeight();
+
+// // TODO IMPROVE MEMORY USAGE HERE, ESPECIALLY ON LOW-END DEVICES.
+// if (mBitmap == null)
+// Log.d(TAG, "setImageBitmap: image uri: " + mOutputUri);
+// mBitmap = ImageUtil.getRotatedBitmap(Uri.parse(mOutputUri).getPath(), width, height);
+//
+// if (mBitmap == null)
+// showDialog(
+// getString(R.string.mcam_image_preview_error_title),
+// getString(R.string.mcam_image_preview_error_message));
+// else {
+// mImageView.setImageBitmap(mBitmap);
+// getActivity().getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
+// WindowManager.LayoutParams.FLAG_FULLSCREEN);
+// DisplayMetrics displaymetrics = new DisplayMetrics();
+// getActivity().getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
+// mImageView.setMinimumWidth(displaymetrics.widthPixels);
+// mImageView.setMinimumHeight(displaymetrics.heightPixels);
+// Glide.with(getActivity())
+// .asBitmap()
+// .load(mOutputUri)
+// .into(mImageView);
+
+// }
+
+ Log.d(TAG, "setImageBitmap: output uri: " + mOutputUri);
+ Glide.with(getActivity())
+ .asBitmap()
+ .load(mOutputUri)
+ .into(mImageView);
+ }
+
+ @Override
+ public void onClick(View v) {
+ if (v.getId() == R.id.retry) mInterface.onRetry(mOutputUri);
+ else if (v.getId() == R.id.save_story) mInterface.useMedia(mOutputUri);
+ else if(v.getId() == R.id.add_to_story) mInterface.addToStory(mOutputUri);
+ }
+
+
+ @Override
+ public void onDestroy() {
+ Log.d(TAG, "onDestroy: called.");
+ super.onDestroy();
+ if(mOutputUri != null){
+ Log.d(TAG, "onDestroy: cleaning up files.");
+ deleteOutputFile(mOutputUri);
+ mOutputUri = null;
+ }
+ }
+
+
+ private void deleteOutputFile(@Nullable String uri) {
+ if (uri != null)
+ //noinspection ResultOfMethodCallIgnored
+ new File(Uri.parse(uri).getPath()).delete();
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/VideoStreamView.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/VideoStreamView.java
new file mode 100644
index 0000000..27e0a96
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/internal/VideoStreamView.java
@@ -0,0 +1,367 @@
+package tabian.com.instagramclone2.materialcamera.internal;
+
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.content.Context;
+import android.content.pm.ActivityInfo;
+import android.media.AudioManager;
+import android.media.MediaPlayer;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.support.annotation.IntDef;
+import android.support.annotation.NonNull;
+import android.support.annotation.Size;
+import android.util.AttributeSet;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import com.afollestad.materialdialogs.MaterialDialog;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import tabian.com.instagramclone2.R;
+
+/** @author Aidan Follestad (afollestad) */
+public class VideoStreamView extends SurfaceView
+ implements SurfaceHolder.Callback,
+ MediaPlayer.OnPreparedListener,
+ MediaPlayer.OnCompletionListener,
+ MediaPlayer.OnErrorListener,
+ MediaPlayer.OnBufferingUpdateListener,
+ MediaPlayer.OnVideoSizeChangedListener {
+
+ @Override
+ public void onPrepared(MediaPlayer mp) {
+ new Handler()
+ .postDelayed(
+ new Runnable() {
+ @Override
+ public void run() {
+ start(mContext);
+ if (!mAutoPlay) pause();
+ }
+ },
+ 250);
+ if (mCallback != null) mCallback.onPrepared(mp);
+ }
+
+ @Override
+ public void onCompletion(MediaPlayer mp) {
+ if (mCallback != null) mCallback.onCompleted();
+ }
+
+ @Override
+ public boolean onError(MediaPlayer mp, int what, int extra) {
+ mCallback.onError(mp, what, extra);
+ return true;
+ }
+
+ @Override
+ public void onBufferingUpdate(MediaPlayer mp, int percent) {
+ if (mCallback != null) mCallback.onBuffer(percent);
+ }
+
+ @Override
+ public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
+ requestLayout();
+ }
+
+ public interface Callback {
+ void onPrepared(MediaPlayer mp);
+
+ void onCompleted();
+
+ void onError(MediaPlayer mp, int what, int extra);
+
+ void onBuffer(int percent);
+ }
+
+ public VideoStreamView(Context context) {
+ super(context);
+ initPlayer();
+ }
+
+ public VideoStreamView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ initPlayer();
+ }
+
+ public VideoStreamView(Context context, AttributeSet attrs, int defStyleAttr) {
+ super(context, attrs, defStyleAttr);
+ initPlayer();
+ }
+
+ @TargetApi(Build.VERSION_CODES.LOLLIPOP)
+ public VideoStreamView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
+ super(context, attrs, defStyleAttr, defStyleRes);
+ initPlayer();
+ }
+
+ public void saveInstanceState(Bundle to) {
+ to.putParcelable("uri", mUri);
+ }
+
+ public void restoreInstanceState(Bundle from, Callback callback) {
+ if (from != null) {
+ mUri = from.getParcelable("uri");
+ mCallback = callback;
+ }
+ }
+
+ private void initPlayer() {
+ if (isInEditMode()) return;
+ else if (mPlayer != null) {
+ if (mPlayer.isPlaying()) mPlayer.stop();
+ mPlayer.reset();
+ mPlayer.release();
+ mPlayer = null;
+ }
+ mPlayer = new MediaPlayer();
+ mPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
+ }
+
+ private Activity mContext;
+ protected Uri mUri;
+ private Callback mCallback;
+ protected MediaPlayer mPlayer;
+ private boolean mAutoPlay;
+
+ public void setAutoPlay(boolean autoPlay) {
+ mAutoPlay = autoPlay;
+ }
+
+ public void setURI(@NonNull Activity context, @NonNull Uri uri, @NonNull Callback callback) {
+ mContext = context;
+ mUri = uri;
+ mCallback = callback;
+ initPlayer();
+ mPlayer.setOnPreparedListener(this);
+ mPlayer.setOnCompletionListener(this);
+ mPlayer.setOnErrorListener(this);
+ mPlayer.setOnBufferingUpdateListener(this);
+ mPlayer.setOnVideoSizeChangedListener(this);
+ try {
+ mPlayer.setDataSource(context, uri);
+ mPlayer.prepareAsync();
+ } catch (Throwable e) {
+ Log.d("VideoStreamView", "Failed to setDataSource/prepareAsync: " + e.getMessage());
+ e.printStackTrace();
+ new MaterialDialog.Builder(mContext)
+ .title(R.string.mcam_error)
+ .content(e.getMessage())
+ .positiveText(android.R.string.ok)
+ .show();
+ }
+ }
+
+ public boolean start(Activity context) {
+ mContext = context;
+ if (mPlayer == null) {
+ initPlayer();
+ setURI(mContext, mUri, mCallback);
+ return false;
+ }
+ try {
+ mPlayer.setDisplay(getHolder());
+ mPlayer.start();
+ } catch (IllegalArgumentException | IllegalStateException e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ public void seekTo(int msec) {
+ if (mPlayer == null) return;
+ mPlayer.seekTo(msec);
+ }
+
+ public int getCurrentPosition() {
+ if (mPlayer == null) return -1;
+ final int currentPosition = mPlayer.getCurrentPosition();
+ int currentPositionAdjusted = currentPosition - 500;
+ if (currentPositionAdjusted < 0) currentPositionAdjusted = 0;
+ return currentPositionAdjusted;
+ }
+
+ public int getDuration() {
+ if (mPlayer == null) return -1;
+ return mPlayer.getDuration();
+ }
+
+ public boolean isPlaying() {
+ return mPlayer != null && mPlayer.isPlaying();
+ }
+
+ public void pause() {
+ if (mPlayer != null) mPlayer.pause();
+ }
+
+ public void stop() {
+ if (mPlayer != null) mPlayer.stop();
+ }
+
+ public void release() {
+ if (mPlayer != null) {
+ if (mPlayer.isPlaying()) mPlayer.stop();
+ mPlayer.reset();
+ mPlayer.release();
+ mPlayer = null;
+ }
+ }
+
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ mPlayer.setDisplay(holder);
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ holder.addCallback(this);
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ holder.removeCallback(this);
+ }
+
+ @Override
+ protected void onDetachedFromWindow() {
+ super.onDetachedFromWindow();
+ if (mPlayer != null) mPlayer.release();
+ }
+
+ // public enum Orientation {
+ // Portrait(Configuration.ORIENTATION_PORTRAIT),
+ // Landscape(Configuration.ORIENTATION_LANDSCAPE);
+ //
+ // int mValue;
+ //
+ // Orientation(int value) {
+ // mValue = value;
+ // }
+ //
+ // public static Orientation from(int value) {
+ // switch (value) {
+ // default:
+ // return Portrait;
+ // case Configuration.ORIENTATION_LANDSCAPE:
+ // return Landscape;
+ // }
+ // }
+ // }
+
+ @IntDef({
+ ActivityInfo.SCREEN_ORIENTATION_PORTRAIT,
+ ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE,
+ ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT,
+ ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ActivityOrientation {}
+
+ @ActivityOrientation
+ public static int getScreenOrientation(Activity context) {
+ int rotation = context.getWindowManager().getDefaultDisplay().getRotation();
+ DisplayMetrics dm = new DisplayMetrics();
+ context.getWindowManager().getDefaultDisplay().getMetrics(dm);
+ int width = dm.widthPixels;
+ int height = dm.heightPixels;
+ int orientation;
+ // if the device's natural orientation is portrait:
+ if ((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) && height > width
+ || (rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270)
+ && width > height) {
+ switch (rotation) {
+ case Surface.ROTATION_0:
+ orientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
+ break;
+ case Surface.ROTATION_90:
+ orientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
+ break;
+ case Surface.ROTATION_180:
+ orientation = ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
+ break;
+ case Surface.ROTATION_270:
+ orientation = ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
+ break;
+ default:
+ Log.e("VideoStreamView", "Unknown screen orientation. Defaulting to portrait.");
+ orientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
+ break;
+ }
+ }
+ // if the device's natural orientation is landscape or if the device
+ // is square:
+ else {
+ switch (rotation) {
+ case Surface.ROTATION_0:
+ orientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
+ break;
+ case Surface.ROTATION_90:
+ orientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
+ break;
+ case Surface.ROTATION_180:
+ orientation = ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
+ break;
+ case Surface.ROTATION_270:
+ orientation = ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
+ break;
+ default:
+ Log.e("VideoStreamView", "Unknown screen orientation. Defaulting to landscape.");
+ orientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
+ break;
+ }
+ }
+
+ return orientation;
+ }
+
+ @Size(value = 2)
+ private int[] getDimensions(int orientation, float videoWidth, float videoHeight) {
+ final float aspectRatio = videoWidth / videoHeight;
+ int width;
+ int height;
+ if (orientation == ActivityInfo.SCREEN_ORIENTATION_PORTRAIT
+ || orientation == ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT) {
+ width = getMeasuredWidth();
+ height = (int) ((float) width / aspectRatio);
+ if (height > getMeasuredHeight()) {
+ height = getMeasuredHeight();
+ width = (int) ((float) height * aspectRatio);
+ }
+ } else {
+ height = getMeasuredHeight();
+ width = (int) ((float) height * aspectRatio);
+ if (width > getMeasuredWidth()) {
+ width = getMeasuredWidth();
+ height = (int) ((float) width / aspectRatio);
+ }
+ }
+ return new int[] {width, height};
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ try {
+ if (mPlayer != null) {
+ final float videoWidth = (float) mPlayer.getVideoWidth();
+ final float videoHeight = (float) mPlayer.getVideoHeight();
+ if (videoWidth == 0 || videoHeight == 0) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ return;
+ }
+ int[] dimensions = getDimensions(getScreenOrientation(mContext), videoWidth, videoHeight);
+ setMeasuredDimension(dimensions[0], dimensions[1]);
+ }
+ } catch (Throwable e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/CameraUtil.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/CameraUtil.java
new file mode 100644
index 0000000..5153ea8
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/CameraUtil.java
@@ -0,0 +1,192 @@
+package tabian.com.instagramclone2.materialcamera.util;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.graphics.Color;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.os.Build;
+import android.support.annotation.ColorInt;
+import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+import java.util.concurrent.TimeUnit;
+
+import tabian.com.instagramclone2.materialcamera.internal.BaseCaptureActivity;
+
+/** @author Aidan Follestad (afollestad) */
+public class CameraUtil {
+
+ private CameraUtil() {}
+
+ public static boolean isChromium() {
+ return Build.BRAND.equalsIgnoreCase("chromium")
+ && Build.MANUFACTURER.equalsIgnoreCase("chromium");
+ }
+
+ public static String getDurationString(long durationMs) {
+ return String.format(
+ Locale.getDefault(),
+ "%02d:%02d",
+ TimeUnit.MILLISECONDS.toMinutes(durationMs),
+ TimeUnit.MILLISECONDS.toSeconds(durationMs)
+ - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(durationMs)));
+ }
+
+ @SuppressWarnings({"ConstantConditions", "ResultOfMethodCallIgnored"})
+ public static File makeTempFile(
+ @NonNull Context context, @Nullable String saveDir, String prefix, String extension) {
+ if (saveDir == null) saveDir = context.getExternalCacheDir().getAbsolutePath();
+ final String timeStamp =
+ new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault()).format(new Date());
+ final File dir = new File(saveDir);
+ dir.mkdirs();
+ return new File(dir, prefix + timeStamp + extension);
+ }
+
+ public static boolean hasCamera(Context context) {
+ return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)
+ || context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT);
+ }
+
+ public static List getSupportedFlashModes(
+ Context context, Camera.Parameters parameters) {
+ //check has system feature for flash
+ if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
+ List modes = parameters.getSupportedFlashModes();
+ if (modes == null
+ || (modes.size() == 1 && modes.get(0).equals(Camera.Parameters.FLASH_MODE_OFF))) {
+ return null; //not supported
+ } else {
+ ArrayList flashModes = new ArrayList<>();
+ for (String mode : modes) {
+ switch (mode) {
+ case Camera.Parameters.FLASH_MODE_AUTO:
+ if (!flashModes.contains(BaseCaptureActivity.FLASH_MODE_AUTO))
+ flashModes.add(BaseCaptureActivity.FLASH_MODE_AUTO);
+ break;
+ case Camera.Parameters.FLASH_MODE_ON:
+ if (!flashModes.contains(BaseCaptureActivity.FLASH_MODE_ALWAYS_ON))
+ flashModes.add(BaseCaptureActivity.FLASH_MODE_ALWAYS_ON);
+ break;
+ case Camera.Parameters.FLASH_MODE_OFF:
+ if (!flashModes.contains(BaseCaptureActivity.FLASH_MODE_OFF))
+ flashModes.add(BaseCaptureActivity.FLASH_MODE_OFF);
+ break;
+ default:
+ break;
+ }
+ }
+ return flashModes;
+ }
+ } else {
+ return null; //not supported
+ }
+ }
+
+ // TODO: Take a hard look at how this works
+ // Camera2
+ public static List getSupportedFlashModes(
+ Context context, CameraCharacteristics characteristics) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
+ return null; //doesn't support camera2
+ } else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) {
+ Boolean flashAvailable = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
+ if (flashAvailable == null || !flashAvailable) return null;
+
+ int[] modes = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
+ if (modes == null
+ || (modes.length == 1 && modes[0] == CameraCharacteristics.CONTROL_AE_MODE_OFF)) {
+ return null; //not supported
+ } else {
+ ArrayList flashModes = new ArrayList<>(3);
+ for (int mode : modes) {
+ switch (mode) {
+ case CameraCharacteristics.CONTROL_AE_MODE_ON_AUTO_FLASH:
+ if (!flashModes.contains(BaseCaptureActivity.FLASH_MODE_AUTO))
+ flashModes.add(BaseCaptureActivity.FLASH_MODE_AUTO);
+ break;
+ case CameraCharacteristics.CONTROL_AE_MODE_ON_ALWAYS_FLASH:
+ if (!flashModes.contains(BaseCaptureActivity.FLASH_MODE_ALWAYS_ON))
+ flashModes.add(BaseCaptureActivity.FLASH_MODE_ALWAYS_ON);
+ break;
+ case CameraCharacteristics.CONTROL_AE_MODE_ON:
+ if (!flashModes.contains(BaseCaptureActivity.FLASH_MODE_OFF))
+ flashModes.add(BaseCaptureActivity.FLASH_MODE_OFF);
+ default:
+ break;
+ }
+ }
+ return flashModes;
+ }
+ }
+ return null; //not supported
+ }
+
+ @TargetApi(Build.VERSION_CODES.LOLLIPOP)
+ public static boolean hasCamera2(Context context, boolean stillShot) {
+ if (context == null) return false;
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) return false;
+ if (stillShot && ManufacturerUtil.isSamsungDevice()) return false;
+ try {
+ CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ String[] idList = manager.getCameraIdList();
+ boolean notNull = true;
+ if (idList.length == 0) {
+ notNull = false;
+ } else {
+ for (final String str : idList) {
+ if (str == null || str.trim().isEmpty()) {
+ notNull = false;
+ break;
+ }
+ final CameraCharacteristics characteristics = manager.getCameraCharacteristics(str);
+ //noinspection ConstantConditions
+ final int supportLevel =
+ characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ if (supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ notNull = false;
+ break;
+ }
+ }
+ }
+ return notNull;
+ } catch (Throwable t) {
+ t.printStackTrace();
+ return false;
+ }
+ }
+
+ @ColorInt
+ public static int darkenColor(@ColorInt int color) {
+ float[] hsv = new float[3];
+ Color.colorToHSV(color, hsv);
+ hsv[2] *= 0.8f; // value component
+ color = Color.HSVToColor(hsv);
+ return color;
+ }
+
+ public static boolean isColorDark(int color) {
+ double darkness =
+ 1
+ - (0.299 * Color.red(color) + 0.587 * Color.green(color) + 0.114 * Color.blue(color))
+ / 255;
+ return darkness >= 0.5;
+ }
+
+ public static int adjustAlpha(int color, @SuppressWarnings("SameParameterValue") float factor) {
+ int alpha = Math.round(Color.alpha(color) * factor);
+ int red = Color.red(color);
+ int green = Color.green(color);
+ int blue = Color.blue(color);
+ return Color.argb(alpha, red, green, blue);
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/Degrees.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/Degrees.java
new file mode 100644
index 0000000..d08cd7b
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/Degrees.java
@@ -0,0 +1,136 @@
+package tabian.com.instagramclone2.materialcamera.util;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.pm.ActivityInfo;
+import android.support.annotation.IntDef;
+import android.support.annotation.NonNull;
+import android.util.Log;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/** @author Aidan Follestad (afollestad) */
+public class Degrees {
+
+ @IntDef({
+ ActivityInfo.SCREEN_ORIENTATION_PORTRAIT,
+ ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE,
+ ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT,
+ ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ @interface ActivityOrientation {}
+
+ @IntDef({DEGREES_0, DEGREES_90, DEGREES_180, DEGREES_270, DEGREES_360})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface DegreeUnits {}
+
+ public static final int DEGREES_0 = 0;
+ public static final int DEGREES_90 = 90;
+ public static final int DEGREES_180 = 180;
+ public static final int DEGREES_270 = 270;
+ static final int DEGREES_360 = 360;
+
+ private Degrees() {}
+
+ @DegreeUnits
+ public static int mirror(@DegreeUnits int orientation) {
+ switch (orientation) {
+ case DEGREES_0:
+ case DEGREES_360:
+ return DEGREES_180;
+ case DEGREES_90:
+ return DEGREES_270;
+ case DEGREES_180:
+ return DEGREES_0;
+ case DEGREES_270:
+ return DEGREES_90;
+ }
+ return DEGREES_0;
+ }
+
+ @SuppressWarnings("ResourceType")
+ @DegreeUnits
+ private static int naturalize(@DegreeUnits int orientation) {
+ if (orientation == 360) orientation = 0;
+ else if (orientation > 360) {
+ do {
+ orientation = orientation - 360;
+ } while (orientation > 360);
+ } else if (orientation < 0) {
+ do {
+ orientation = 360 + orientation;
+ } while (orientation < 0);
+ }
+ return orientation;
+ }
+
+ @DegreeUnits
+ public static int getDisplayRotation(Context context) {
+ WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+ int rotation = windowManager.getDefaultDisplay().getRotation();
+ switch (rotation) {
+ case Surface.ROTATION_0:
+ return DEGREES_0;
+ case Surface.ROTATION_90:
+ return DEGREES_90;
+ case Surface.ROTATION_180:
+ return DEGREES_180;
+ case Surface.ROTATION_270:
+ return DEGREES_270;
+ }
+ return DEGREES_0;
+ }
+
+ @SuppressWarnings("ResourceType")
+ @DegreeUnits
+ public static int getDisplayOrientation(
+ @DegreeUnits int sensorOrientation,
+ @DegreeUnits int displayOrientation,
+ boolean front) {
+ final boolean isLandscape = isLandscape(displayOrientation);
+ if (displayOrientation == DEGREES_0) displayOrientation = DEGREES_360;
+ int result = sensorOrientation - displayOrientation;
+ result = Degrees.naturalize(result);
+ if (isLandscape && front) result = mirror(result);
+ return result;
+ }
+
+ @ActivityOrientation
+ public static int getActivityOrientation(@NonNull Activity context) {
+ @DegreeUnits final int rotation = getDisplayRotation(context);
+ switch (rotation) {
+ case DEGREES_0:
+ case DEGREES_360:
+ return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
+ case DEGREES_90:
+ return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;
+ case DEGREES_180:
+ return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
+ case DEGREES_270:
+ return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
+ default:
+ Log.e("Degrees", "Unknown screen orientation. Defaulting to portrait.");
+ return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
+ }
+ }
+
+ public static boolean isPortrait(Context activity) {
+ return isPortrait(getDisplayRotation(activity));
+ }
+
+ public static boolean isLandscape(Context activity) {
+ return isLandscape(getDisplayRotation(activity));
+ }
+
+ public static boolean isPortrait(@DegreeUnits int degrees) {
+ return degrees == DEGREES_0 || degrees == DEGREES_180 || degrees == DEGREES_360;
+ }
+
+ private static boolean isLandscape(@DegreeUnits int degrees) {
+ return degrees == DEGREES_90 || degrees == DEGREES_270;
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/ImageUtil.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/ImageUtil.java
new file mode 100644
index 0000000..f7f4087
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/ImageUtil.java
@@ -0,0 +1,163 @@
+package tabian.com.instagramclone2.materialcamera.util;
+
+
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Matrix;
+import android.media.ExifInterface;
+import android.os.Handler;
+import android.support.annotation.Nullable;
+import android.util.Log;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.zip.Deflater;
+
+import tabian.com.instagramclone2.materialcamera.ICallback;
+
+import static tabian.com.instagramclone2.materialcamera.util.Degrees.DEGREES_270;
+import static tabian.com.instagramclone2.materialcamera.util.Degrees.DEGREES_90;
+
+/** Created by tomiurankar on 06/03/16. */
+public class ImageUtil {
+ private static final String TAG = "ImageUtil";
+ /**
+ * Saves byte[] array to disk
+ *
+ * @param input byte array
+ * @param output path to output file
+ * @param callback will always return in originating thread
+ */
+ public static void saveToDiskAsync(
+ final byte[] input, final File output, final ICallback callback) {
+ final Handler handler = new Handler();
+ new Thread() {
+ @Override
+ public void run() {
+ try {
+ FileOutputStream outputStream = new FileOutputStream(output);
+// outputStream.write(input);
+ outputStream.write(compress(input));
+ outputStream.flush();
+ outputStream.close();
+
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ callback.done(null);
+ }
+ });
+ } catch (final Exception e) {
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ callback.done(e);
+ }
+ });
+ }
+ }
+ }.start();
+ }
+
+ public static byte[] compress(byte[] data) throws IOException {
+ Deflater deflater = new Deflater();
+ deflater.setInput(data);
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream(data.length);
+ deflater.finish();
+ byte[] buffer = new byte[1024];
+ while (!deflater.finished()) {
+ int count = deflater.deflate(buffer); // returns the generated code... index
+ outputStream.write(buffer, 0, count);
+ }
+ outputStream.close();
+ byte[] output = outputStream.toByteArray();
+ Log.d(TAG, "Original: " + data.length / 1024 + " Kb");
+ Log.d(TAG, "Compressed: " + output.length / 1024 + " Kb");
+ return output;
+ }
+
+ /**
+ * Rotates the bitmap per their EXIF flag. This is a recursive function that will be called again
+ * if the image needs to be downsized more.
+ *
+ * @param inputFile Expects an JPEG file if corrected orientation wants to be set.
+ * @return rotated bitmap or null
+ */
+ @Nullable
+ public static Bitmap getRotatedBitmap(String inputFile, int reqWidth, int reqHeight) {
+ final int rotationInDegrees = getExifDegreesFromJpeg(inputFile);
+
+ Log.d(TAG, "getRotatedBitmap: rotation: " + rotationInDegrees);
+ final BitmapFactory.Options opts = new BitmapFactory.Options();
+ opts.inJustDecodeBounds = true;
+ BitmapFactory.decodeFile(inputFile, opts);
+ opts.inSampleSize = calculateInSampleSize(opts, reqWidth, reqHeight, rotationInDegrees);
+ opts.inJustDecodeBounds = false;
+
+ Log.d(TAG, "getRotatedBitmap: input file: " + inputFile);
+ final Bitmap origBitmap = BitmapFactory.decodeFile(inputFile, opts);
+ Log.d(TAG, "getRotatedBitmap: original bitmap: " + origBitmap);
+
+ if (origBitmap == null) return null;
+
+ Matrix matrix = new Matrix();
+ matrix.preRotate(rotationInDegrees);
+ // we need not check if the rotation is not needed, since the below function will then return the same bitmap. Thus no memory loss occurs.
+
+ return Bitmap.createBitmap(
+ origBitmap, 0, 0, origBitmap.getWidth(), origBitmap.getHeight(), matrix, true);
+ }
+
+ private static int calculateInSampleSize(
+ BitmapFactory.Options options, int reqWidth, int reqHeight, int rotationInDegrees) {
+
+ // Raw height and width of image
+ final int height;
+ final int width;
+ int inSampleSize = 1;
+
+ // Check for rotation
+ if (rotationInDegrees == DEGREES_90 || rotationInDegrees == DEGREES_270) {
+ width = options.outHeight;
+ height = options.outWidth;
+ } else {
+ height = options.outHeight;
+ width = options.outWidth;
+ }
+
+ if (height > reqHeight || width > reqWidth) {
+ // Calculate ratios of height and width to requested height and width
+ final int heightRatio = Math.round((float) height / (float) reqHeight);
+ final int widthRatio = Math.round((float) width / (float) reqWidth);
+
+ // Choose the smallest ratio as inSampleSize value, this will guarantee
+ // a final image with both dimensions larger than or equal to the
+ // requested height and width.
+ inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio;
+ }
+ return inSampleSize;
+ }
+
+ private static int getExifDegreesFromJpeg(String inputFile) {
+ try {
+ final ExifInterface exif = new ExifInterface(inputFile);
+ final int exifOrientation =
+ exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
+ if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_90) {
+ return 90;
+ } else if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_180) {
+ return 180;
+ } else if (exifOrientation == ExifInterface.ORIENTATION_ROTATE_270) {
+ return 270;
+ }
+ } catch (IOException e) {
+ Log.e("exif", "Error when trying to get exif data from : " + inputFile, e);
+ }
+ return 0;
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/ManufacturerUtil.java b/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/ManufacturerUtil.java
new file mode 100644
index 0000000..c40b232
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/materialcamera/util/ManufacturerUtil.java
@@ -0,0 +1,29 @@
+package tabian.com.instagramclone2.materialcamera.util;
+
+import android.os.Build;
+
+/**
+ * This class exists to provide a place to define device specific information as some
+ * manufacturers/devices require specific camera setup/requirements.
+ */
+public class ManufacturerUtil {
+
+ public ManufacturerUtil() {}
+
+ // Samsung device info
+ private static final String SAMSUNG_MANUFACTURER = "samsung";
+
+ // Samsung Galaxy S3 info
+ private static final String SAMSUNG_S3_DEVICE_COMMON_PREFIX = "d2";
+ public static final Integer SAMSUNG_S3_PREVIEW_WIDTH = 640;
+ public static final Integer SAMSUNG_S3_PREVIEW_HEIGHT = 480;
+
+ // Samsung Galaxy helper functions
+ static boolean isSamsungDevice() {
+ return SAMSUNG_MANUFACTURER.equals(Build.MANUFACTURER.toLowerCase());
+ }
+
+ public static boolean isSamsungGalaxyS3() {
+ return Build.DEVICE.startsWith(SAMSUNG_S3_DEVICE_COMMON_PREFIX);
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/models/Story.java b/app/src/main/java/tabian/com/instagramclone2/models/Story.java
new file mode 100644
index 0000000..b25a64b
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/models/Story.java
@@ -0,0 +1,95 @@
+package tabian.com.instagramclone2.models;
+
+/**
+ * Created by User on 1/7/2018.
+ */
+
+public class Story {
+
+ private String user_id;
+ private String timestamp;
+ private String image_url;
+ private String video_url;
+ private String story_id;
+ private String views;
+ private String duration;
+
+ public Story(String user_id, String timestamp, String image_url, String video_url, String story_id, String views, String duration) {
+ this.user_id = user_id;
+ this.timestamp = timestamp;
+ this.image_url = image_url;
+ this.video_url = video_url;
+ this.story_id = story_id;
+ this.views = views;
+ this.duration = duration;
+ }
+
+ public Story() {
+
+ }
+
+ public String getUser_id() {
+ return user_id;
+ }
+
+ public void setUser_id(String user_id) {
+ this.user_id = user_id;
+ }
+
+
+ public String getTimestamp() {
+ return timestamp;
+ }
+
+ public void setTimestamp(String timestamp) {
+ this.timestamp = timestamp;
+ }
+
+ public String getImage_url() {
+ return image_url;
+ }
+
+ public void setImage_url(String image_url) {
+ this.image_url = image_url;
+ }
+
+ public String getVideo_url() {
+ return video_url;
+ }
+
+ public void setVideo_url(String video_url) {
+ this.video_url = video_url;
+ }
+
+ public String getStory_id() {
+ return story_id;
+ }
+
+ public void setStory_id(String story_id) {
+ this.story_id = story_id;
+ }
+
+ public String getViews() {
+ return views;
+ }
+
+ public void setViews(String views) {
+ this.views = views;
+ }
+
+ public String getDuration() {
+ return duration;
+ }
+
+ public void setDuration(String duration) {
+ this.duration = duration;
+ }
+
+ @Override
+ public String toString() {
+ return "Story{" +
+ ", image_url='" + image_url + '\'' +
+ ", video_url='" + video_url + '\'' +
+ ", story_id='" + story_id + '\'' ;
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/models/UserStories.java b/app/src/main/java/tabian/com/instagramclone2/models/UserStories.java
new file mode 100644
index 0000000..a978428
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/models/UserStories.java
@@ -0,0 +1,68 @@
+package tabian.com.instagramclone2.models;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.util.ArrayList;
+
+/**
+ * Created by User on 1/7/2018.
+ */
+
+public class UserStories implements Parcelable{
+
+ private ArrayList media;
+ private UserAccountSettings user_account_settings;
+
+ public UserStories(ArrayList media, UserAccountSettings user_account_settings) {
+ this.media = media;
+ this.user_account_settings = user_account_settings;
+ }
+
+ public UserStories() {
+
+ }
+
+
+ protected UserStories(Parcel in) {
+ user_account_settings = in.readParcelable(UserAccountSettings.class.getClassLoader());
+ }
+
+ public static final Creator CREATOR = new Creator() {
+ @Override
+ public UserStories createFromParcel(Parcel in) {
+ return new UserStories(in);
+ }
+
+ @Override
+ public UserStories[] newArray(int size) {
+ return new UserStories[size];
+ }
+ };
+
+ public ArrayList getMedia() {
+ return media;
+ }
+
+ public void setMedia(ArrayList media) {
+ this.media = media;
+ }
+
+ public UserAccountSettings getUser_account_settings() {
+ return user_account_settings;
+ }
+
+ public void setUser_account_settings(UserAccountSettings user_account_settings) {
+ this.user_account_settings = user_account_settings;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel parcel, int i) {
+ parcel.writeParcelable(user_account_settings, i);
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/opengl/AddToStoryDialog.java b/app/src/main/java/tabian/com/instagramclone2/opengl/AddToStoryDialog.java
new file mode 100644
index 0000000..3f794a3
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/opengl/AddToStoryDialog.java
@@ -0,0 +1,81 @@
+package tabian.com.instagramclone2.opengl;
+
+import android.Manifest;
+import android.app.DialogFragment;
+import android.content.pm.PackageManager;
+import android.os.Build;
+import android.os.Bundle;
+import android.support.annotation.NonNull;
+import android.support.annotation.Nullable;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.LinearLayout;
+import android.widget.Toast;
+
+import tabian.com.instagramclone2.Home.HomeActivity;
+import tabian.com.instagramclone2.R;
+
+
+/**
+ * Created by User on 1/8/2018.
+ */
+
+public class AddToStoryDialog extends DialogFragment {
+
+ private static final String TAG = "AddToStoryDialog";
+ private static final int MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE = 29;
+
+ //widgets
+ private LinearLayout layout;
+
+ @Nullable
+ @Override
+ public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, Bundle savedInstanceState) {
+ View view = inflater.inflate(R.layout.dialog_add_to_story, container, false);
+ layout = view.findViewById(R.id.linLayout1);
+
+
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (getActivity().checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED
+ || getActivity().checkSelfPermission(Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
+ requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO},
+ MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE);
+ } else {
+ Log.d(TAG, "Already granted access");
+ init();
+ }
+ }
+
+
+ return view;
+ }
+
+ private void init(){
+ layout.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ Log.d(TAG, "onClick: preparing to record new story.");
+ ((HomeActivity)getActivity()).openNewStoryActivity();
+ getDialog().dismiss();
+ }
+ });
+ }
+
+ @Override
+ public void onRequestPermissionsResult(int requestCode, @NonNull String permissions[], @NonNull int[] grantResults) {
+ switch (requestCode) {
+ case MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE: {
+ if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
+ Log.d(TAG, "Permission Granted");
+ init();
+ } else {
+ Log.d(TAG, "Permission Failed");
+ Toast.makeText(getActivity().getBaseContext(), "You must allow permission to record audio to your mobile device.", Toast.LENGTH_SHORT).show();
+ getActivity().finish();
+ }
+ }
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/opengl/MyGLRenderer.java b/app/src/main/java/tabian/com/instagramclone2/opengl/MyGLRenderer.java
new file mode 100644
index 0000000..5a19c05
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/opengl/MyGLRenderer.java
@@ -0,0 +1,4467 @@
+package tabian.com.instagramclone2.opengl;
+
+
+import android.app.Activity;
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Color;
+import android.graphics.SurfaceTexture;
+import android.graphics.drawable.Drawable;
+import android.net.Uri;
+import android.opengl.GLES11Ext;
+import android.opengl.GLSurfaceView;
+import android.opengl.GLU;
+import android.opengl.GLUtils;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+import android.view.Surface;
+import android.view.View;
+import android.widget.LinearLayout;
+import android.widget.ProgressBar;
+import android.widget.RelativeLayout;
+import android.widget.TextView;
+import android.widget.Toast;
+
+import com.bumptech.glide.Glide;
+import com.bumptech.glide.request.target.SimpleTarget;
+import com.bumptech.glide.request.transition.Transition;
+import com.google.android.exoplayer2.ExoPlayerFactory;
+import com.google.android.exoplayer2.Player;
+import com.google.android.exoplayer2.SimpleExoPlayer;
+import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory;
+import com.google.android.exoplayer2.source.ExtractorMediaSource;
+import com.google.android.exoplayer2.source.MediaSource;
+import com.google.android.exoplayer2.trackselection.AdaptiveTrackSelection;
+import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
+import com.google.android.exoplayer2.trackselection.TrackSelection;
+import com.google.android.exoplayer2.upstream.DataSource;
+import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
+import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
+import com.google.android.exoplayer2.util.Util;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+import de.hdodenhof.circleimageview.CircleImageView;
+import tabian.com.instagramclone2.R;
+
+
+import static java.lang.Math.round;
+
+
+/**
+ * Created by User on 11/27/2017.
+ */
+
+public class MyGLRenderer implements GLSurfaceView.Renderer{
+ private static final String TAG = "fsnklsnflkds";
+
+ private static final int MEDIA_TIMEOUT = 15000; //15 seconds or 15000 milliseconds
+ private static final int VIDEO_REFRESH_COUNT_LIMIT = 5; // if the video fails to load 10 frames then force it
+ private static final int INIT_VIDEO_PROGRESS_BAR = 33;
+ private static final int UPDATE_UI_WITH_VIDEO_PROGRESS = 11;
+ private static final int REMOVE_PROGRESS_BAR_CALLBACKS = 22;
+ private static final int HIDE_PROGRESS_BAR = 44;
+ private static final float STEP_SIZE = 2.0f;
+ private static final float STARTING_STEP_SIZE = 6.0f;
+ private static final int SURFACE_1 = 0;
+ private static final int SURFACE_2 = 1;
+ private static final int SURFACE_3 = 2;
+ private static final int SURFACE_4 = 3;
+ private static final int STARTING_SURFACE = 0;
+ private static final int STARTING_SURFACE_PLUS_ONE = 1;
+ private static final int STARTING_SURFACE_MINUS_ONE = 3;
+ private static final int BACKGROUND_SURFACE = 4;
+ private int mCurrentSurface = SURFACE_1;
+ private static final int PLAYER_ONE = 1;
+ private static final int PLAYER_ONE_SECONDARY = -1;
+ private static final int PLAYER_TWO = 2;
+ private static final int PLAYER_TWO_SECONDARY = -2;
+ private static final int PLAYER_THREE = 3;
+ private static final int PLAYER_THREE_SECONDARY = -3;
+ private static final int PLAYER_FOUR = 4;
+ private static final int PLAYER_FOUR_SECONDARY = -4;
+ private static final int ACTIVE_PLAYER = 1; // currently the 'active' player on a surface
+ private static final int PAUSED_PLAYER = 2; // currently 'active' but paused
+ private static final int NOT_ACTIVE_PLAYER = 0; // currently NOT the 'active' player on a surface
+ public int mPlayerState = ACTIVE_PLAYER;
+ public int mSecondaryPlayerState = NOT_ACTIVE_PLAYER;
+ public int mPlayer2State = ACTIVE_PLAYER;
+ public int mSecondaryPlayer2State = NOT_ACTIVE_PLAYER;
+ public int mPlayer3State = ACTIVE_PLAYER;
+ public int mSecondaryPlayer3State = NOT_ACTIVE_PLAYER;
+ public int mPlayer4State = ACTIVE_PLAYER;
+ public int mSecondaryPlayer4State = NOT_ACTIVE_PLAYER;
+ private boolean hasFirstVideo1Played = false;
+ private boolean hasFirstVideo2Played = false;
+ private boolean hasFirstVideo3Played = false;
+ private boolean hasFirstVideo4Played = false;
+ private ProgressBar mProgressBar;
+ private Runnable mVideoRetryRunnable;
+ private int mVideoRetryTimer = 0;
+ private static final int VIDEO_RETRY_TIMEOUT = 4000;
+ private int videoRetryTimer = 0;
+ private int frameAvailableCount = 0;
+
+ private Handler mVideoSurface1Handler;
+ private boolean initVideoTexture1 = false;
+
+ private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter();
+ public SimpleExoPlayer mPlayer, mSecondaryPlayer, mPlayer2, mSecondaryPlayer2, mPlayer3, mSecondaryPlayer3, mPlayer4, mSecondaryPlayer4;
+ private DefaultTrackSelector mTrackSelector;
+ private TrackSelection.Factory mVideoTrackSelectionFactory;
+ private boolean playWhenReady = false;
+
+ private Context mContext;
+ private static final float pi = 3.14159f;
+ private boolean mStop = false;
+ public boolean mRotateClockwise = false;
+ public boolean mRotateCounterClockwise = false;
+
+
+ private float screenHeight = 0f;
+ private float screenWidth = 0f;
+ private float screenRatio = 0f;
+ private int numFaces = 4;
+
+ private float[][] widthMatrix = new float[6][4];
+ private float[][] heightMatrix = new float[6][4];
+ private float[][] depthMatrix = new float[6][4];
+
+ private FloatBuffer[][] mVertexBuffers;
+ private FloatBuffer vertexBuffer1;
+ private FloatBuffer vertexBuffer2;
+ private FloatBuffer vertexBuffer3;
+ private FloatBuffer vertexBuffer4;
+
+ private FloatBuffer textureBuffer1;
+ private FloatBuffer textureBuffer2;
+ private FloatBuffer textureBuffer3;
+ private FloatBuffer textureBuffer4;
+
+ private float[] texCoords1;
+ private float[] texCoords2;
+ private float[] texCoords3;
+ private float[] texCoords4;
+
+
+ private ArrayList mVertices = new ArrayList<>();
+ private ArrayList mResources = new ArrayList<>();
+
+ private SurfaceTexture mSurfaceTexture;
+ private Surface mSurface;
+ private SurfaceTexture mSurfaceTexture2;
+ private Surface mSurface2;
+ private SurfaceTexture mSurfaceTexture3;
+ private Surface mSurface3;
+ private SurfaceTexture mSurfaceTexture4;
+ private Surface mSurface4;
+ private HashMap> videoMediaSources = new HashMap<>();
+// private MediaSource[][] videoMediaSources;
+ private boolean mUpdateST;
+ private boolean mUpdateST2;
+ private boolean mUpdateST3;
+ private boolean mUpdateST4;
+ private MyGLSurfaceView mGLView;
+
+
+ private static float angleRectangle = 0f;
+ private static float settledAngle = 0f;
+
+ private int mVideo1Index = 0;
+ private int mVideo2Index = 0;
+ private int mVideo3Index = 0;
+ private int mVideo4Index = 0;
+ private boolean mFirstRotationSurface3 = true;
+ private boolean mFirstRotationSurface4 = true;
+ private int mNumRotations = 0;
+ private int mHighestNumberMedia = 0;
+ private int mNumResources = 0; //number of users who have uploaded stories
+ private int mStartingResourceIndex = 0; //starting index
+ private JSONArray mResourceIndices = new JSONArray();
+ private boolean isImage1Set, isImage2Set, isImage3Set, isImage4Set = false;
+ private static float depth = 0f;
+ private static float dx = 0;
+ private static float startPositionX = 0;
+ private static float endPositionX = 0f;
+ private static float position = 0.0f;
+
+ //Video Progress Bars
+ private MyProgressBar mCurrentProgressBar;
+ private RelativeLayout mRelativeLayout;
+ private LinearLayout mLinearLayout;
+ private LinearLayout mLinearLayout2;
+ private Handler mProgressHandler;
+ private Runnable mProgressRunnable;
+ private Runnable mProgressBarInitRunnable;
+ private Handler mProgressBarInitHandler;
+ private int mCurrentProgress = 0;
+ private int mTotalDuration = 0;
+ private int[] mIds;
+ private boolean isProgressBarsInitialized = false;
+
+
+ private float[][] colors = { // Colors of the 6 faces
+ {1.0f, 0.5f, 0.0f, 1.0f}, // 0. orange
+ {1.0f, 0.0f, 1.0f, 1.0f}, // 1. violet
+ {0.0f, 0.0f, 0.0f, 0.0f}, // 5. black
+ {0.0f, 0.0f, 1.0f, 1.0f}, // 3. blue
+ {1.0f, 0.0f, 0.0f, 1.0f}, // 4. red
+ {0.0f, 0.0f, 0.0f, 0.0f} // 5. white
+ };
+
+ private float[] backgroundVertices = {
+ -20, -20, 1,
+ 20, -20, 1,
+ -20, 20, 1,
+ 20, 20, 1,
+ };
+
+ private int[] textureId1 = new int[1];
+ private int[] textureId2 = new int[1];
+ private int[] textureId3 = new int[1];
+ private int[] textureId4 = new int[1];
+
+
+ private boolean isRotationEnabled = true;
+ private boolean mAllowRotationClockwise = true;
+ private float mAngleFinished = 0f;
+ private float mStartingAngle = 0f;
+ private ArrayList mTargets = new ArrayList<>();
+ private float mDefaultImageWidthScaleFactor = 0f;
+ private float mDefaultImageHeightScaleFactor = 0f;
+
+
+ public float getPosition() {
+ return position;
+ }
+
+ public void setPosition(float position) {
+
+ if(angleRectangle <= mAngleFinished - 1){
+ mAllowRotationClockwise = false;
+ angleRectangle = mAngleFinished;
+ correctRotation();
+ }
+ else{
+ mAllowRotationClockwise = true;
+ }
+
+ if(isRotationEnabled && !mRotateClockwise && !mRotateCounterClockwise){
+ if(mAllowRotationClockwise){
+ dx = (position) - MyGLRenderer.position;
+ if(mNumRotations > 0){
+ if(Math.abs(dx) < 100 && Math.abs(dx) > 1){
+ Log.d(TAG, "setPosition: dx: " + dx);
+ updateAngle(dx);
+ }
+ }
+ else if(mNumRotations == 0 && dx < 0){
+ if(Math.abs(dx) < 100 && Math.abs(dx) > 1){
+ Log.d(TAG, "setPosition: dx: " + dx);
+ updateAngle(dx);
+ }
+ }
+ MyGLRenderer.position = position;
+ Log.d(TAG, "setPosition: position: " + MyGLRenderer.position);
+ }
+ else{
+ dx = (position) - MyGLRenderer.position;
+ if(dx > 1 && dx < 100){
+ Log.d(TAG, "setPosition: dx: " + dx);
+ updateAngle(dx);
+ }
+ MyGLRenderer.position = position;
+ Log.d(TAG, "setPosition: position: " + MyGLRenderer.position);
+ }
+ }
+ }
+
+
+ public void updateAngle(float displacement) {
+ Log.d(TAG, "updateAngle: displacement:" + displacement);
+ if(angleRectangle + (displacement) * ( 90 / screenWidth) <= 0){
+ angleRectangle = angleRectangle + (displacement) * ( 90 / screenWidth);
+ Log.d(TAG, "updateAngle: angle:" + angleRectangle);
+ }
+ }
+
+ public float getStartPositionX() {
+ return startPositionX;
+ }
+
+ public void setStartPositionX(float positionX) {
+ MyGLRenderer.startPositionX = positionX;
+ }
+
+ public float getDx() {
+ return dx;
+ }
+
+ public void setDx(float dx) {
+ }
+
+ public boolean isStopped() {
+// Log.d(TAG, "isStopped: " + mStop);
+ return mStop;
+ }
+
+ public void setStopped(boolean stop) {
+// Log.d(TAG, "setStopped: " + stop);
+ mStop = stop;
+ endPositionX = getPosition();
+ MyGLRenderer.position = screenWidth / 2;
+ }
+
+
+// ArrayList mMedia = new ArrayList<>();
+ private JSONArray mUserStories = new JSONArray();
+
+ public MyGLRenderer(Context context, float height, float width, JSONArray userStories, MyGLSurfaceView mGLView, int resourceIndex) throws JSONException{
+ mContext = context;
+ this.screenRatio = height / width;
+ this.screenHeight = height;
+ this.screenWidth = width;
+ this.mGLView = mGLView;
+ mUserStories = userStories;
+// this.mMedia = media;
+ this.mStartingResourceIndex = resourceIndex;
+// mNumResources = media.size();
+ mNumResources = mUserStories.length();
+ mRelativeLayout = ((Activity) mContext).findViewById(R.id.parent_layout);
+ mLinearLayout = ((Activity)mContext).findViewById(R.id.linLayout);
+ mLinearLayout2 = ((Activity)mContext).findViewById(R.id.linLayout2);
+ initProgressBar();
+ initPlayers();
+
+ int indices = mNumResources;
+ if(mNumResources <= 4){
+ indices = 4;
+ }
+ for(int i = 0; i < indices; i++){
+ mResources.add(i, null);
+ try{
+// int numMediaIndices = mMedia.get(i).getMedia().size();
+ int numMediaIndices = mUserStories.getJSONObject(i).getJSONArray(mContext.getString(R.string.user_stories)).length();
+ if(numMediaIndices > mHighestNumberMedia){
+ mHighestNumberMedia = numMediaIndices;
+ }
+ }catch (NullPointerException e){
+ e.printStackTrace();
+ break;
+ }
+ catch (JSONException e){
+ e.printStackTrace();
+ }
+
+ }
+
+// if(mMedia.get(0).getMedia() != null){
+ if(mUserStories.getJSONObject(0).getJSONArray(mContext.getString(R.string.user_stories)) != null){
+ initResourceIndices();
+ initDefaultImage();
+
+ rotateToStartingIndex();
+ mAngleFinished = -90 * (mNumResources - 1);
+ initBlock();
+ }
+ else{
+ Toast.makeText(mContext, "there is no media.", Toast.LENGTH_SHORT).show();
+ ((Activity) mContext).finish();
+ }
+
+ }
+
+ private float[][] rotationMatrix = {
+ {0, -1, 2, 1},
+ {1, 0, -1, 2},
+ {2, 1, 0, -1},
+ {-1, 2, 1, 0}
+ };
+
+ private void rotateToStartingIndex(){
+ Log.d(TAG, "rotateToStartingIndex: rotating to starting index.");
+
+ Log.d(TAG, "rotateToStartingIndex: starting resource index: " + mStartingResourceIndex);
+ angleRectangle = -90 * mStartingResourceIndex;
+ settledAngle = angleRectangle;
+ mNumRotations = (int) Math.abs(angleRectangle / 90);
+ Log.d(TAG, "rotateToStartingIndex: rotating block to angle: " + angleRectangle);
+ Log.d(TAG, "rotateToStartingIndex: settled angle: " + settledAngle);
+ Log.d(TAG, "rotateToStartingIndex: number rotations: " + mNumRotations);
+ try{
+
+ int fullBlockRotations = (int) mNumRotations / 4;
+ Log.d(TAG, "rotateToStartingIndex: full block rotations: " + fullBlockRotations );
+ int startingRotationIndex = mNumRotations - (fullBlockRotations * 4);
+ Log.d(TAG, "rotateToStartingIndex: starting rotation index: " + startingRotationIndex);
+
+ JSONObject object1 = mResourceIndices.getJSONObject(SURFACE_1);
+ object1.put(mContext.getString(R.string.rotations), rotationMatrix[startingRotationIndex][0]);
+ if(rotationMatrix[startingRotationIndex][0] == 0){
+ mCurrentSurface = SURFACE_1;
+ Log.d(TAG, "rotateToStartingIndex: current surface: " + 1);
+ }
+ Log.d(TAG, "rotateToStartingIndex: surface1 starting rotation index: " + rotationMatrix[startingRotationIndex][0]);
+ mResourceIndices.put(SURFACE_1, object1);
+
+ JSONObject object2 = mResourceIndices.getJSONObject(SURFACE_2);
+ object2.put(mContext.getString(R.string.rotations), rotationMatrix[startingRotationIndex][1]);
+ if(rotationMatrix[startingRotationIndex][1] == 0){
+ mCurrentSurface = SURFACE_2;
+ Log.d(TAG, "rotateToStartingIndex: current surface: " + 2);
+ }
+ Log.d(TAG, "rotateToStartingIndex: surface2 starting rotation index: " + rotationMatrix[startingRotationIndex][1]);
+ mResourceIndices.put(SURFACE_2, object2);
+
+ JSONObject object3 = mResourceIndices.getJSONObject(SURFACE_3);
+ object3.put(mContext.getString(R.string.rotations), rotationMatrix[startingRotationIndex][2]);
+ if(rotationMatrix[startingRotationIndex][2] == 0){
+ mCurrentSurface = SURFACE_3;
+ Log.d(TAG, "rotateToStartingIndex: current surface: " + 3);
+ }
+ Log.d(TAG, "rotateToStartingIndex: surface3 starting rotation index: " + rotationMatrix[startingRotationIndex][2]);
+ mResourceIndices.put(SURFACE_3, object3);
+
+ JSONObject object4 = mResourceIndices.getJSONObject(SURFACE_4);
+ object4.put(mContext.getString(R.string.rotations), rotationMatrix[startingRotationIndex][3]);
+ if(rotationMatrix[startingRotationIndex][3] == 0){
+ mCurrentSurface = SURFACE_4;
+ Log.d(TAG, "rotateToStartingIndex: current surface: " + 4);
+ }
+ Log.d(TAG, "rotateToStartingIndex: surface4 starting rotation index: " + rotationMatrix[startingRotationIndex][3]);
+ mResourceIndices.put(SURFACE_4, object4);
+
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+
+ //init the first 4 surfaces
+ if(mNumRotations > 0){
+ try{
+
+ int surface1MediaIndexMultiple = (int) mNumRotations / 3;
+ Log.d(TAG, "rotateToStartingIndex: surface1MediaIndexMultiple: " + surface1MediaIndexMultiple);
+ // if(0 + (4 * surface1MediaIndexMultiple) < mMedia.size()){
+ if(0 + (4 * surface1MediaIndexMultiple) < mNumResources){
+ // getMedia(mMedia.get(0 + (4 * surface1MediaIndexMultiple)).getMedia(), 0); // 0
+ getMedia(mUserStories.getJSONObject(0 + (4 * surface1MediaIndexMultiple))
+ .getJSONArray(mContext.getString(R.string.user_stories)), 0); // 0
+ }
+ else{
+// getMedia(mMedia.get(0 + (4 * (surface1MediaIndexMultiple - 1))).getMedia(), 0); // 0
+ getMedia(mUserStories.getJSONObject(0 + (4 * (surface1MediaIndexMultiple - 1)))
+ .getJSONArray(mContext.getString(R.string.user_stories)), 0); // 0
+ }
+
+ int surface2MediaIndexMultiple = (int) (mNumRotations - 1) / 3;
+ Log.d(TAG, "rotateToStartingIndex: surface2MediaIndexMultiple: " + surface2MediaIndexMultiple);
+ if((1 + (4 * surface2MediaIndexMultiple)) < mNumResources){
+// getMedia(mMedia.get(1 + (4 * surface2MediaIndexMultiple)).getMedia(), 1); // 1
+ getMedia(mUserStories.getJSONObject(1 + (4 * surface2MediaIndexMultiple))
+ .getJSONArray(mContext.getString(R.string.user_stories)), 1);
+ }
+ else{
+// getMedia(mMedia.get(1 + (4 * (surface2MediaIndexMultiple - 1))).getMedia(), 1); // 1
+ getMedia(mUserStories.getJSONObject(1 + (4 * (surface2MediaIndexMultiple - 1)))
+ .getJSONArray(mContext.getString(R.string.user_stories)), 1);
+ }
+
+ int surface3MediaIndexMultiple = (int) (mNumRotations - 2) / 3;
+ Log.d(TAG, "rotateToStartingIndex: surface3MediaIndexMultiple: " + surface3MediaIndexMultiple);
+ if((2 + (4 * surface3MediaIndexMultiple)) < mNumResources){
+// getMedia(mMedia.get(2 + (4 * surface3MediaIndexMultiple)).getMedia(), 2); // 2
+ getMedia(mUserStories.getJSONObject(2 + (4 * surface3MediaIndexMultiple))
+ .getJSONArray(mContext.getString(R.string.user_stories)), 2);
+ }
+ else{
+// getMedia(mMedia.get(2 + (4 * (surface3MediaIndexMultiple - 1))).getMedia(), 2); // 2
+ getMedia(mUserStories.getJSONObject(2 + (4 * (surface3MediaIndexMultiple - 1)))
+ .getJSONArray(mContext.getString(R.string.user_stories)), 2);
+ }
+
+ int surface4MediaIndexMultiple = (int) (mNumRotations - 3) / 3;
+ Log.d(TAG, "rotateToStartingIndex: surface4MediaIndexMultiple: " + surface4MediaIndexMultiple);
+ if((3 + (4 * surface4MediaIndexMultiple)) < mNumResources){
+// getMedia(mMedia.get(3 + (4 * surface4MediaIndexMultiple)).getMedia(), 3); // 3
+ getMedia(mUserStories.getJSONObject(3 + (4 * surface3MediaIndexMultiple ))
+ .getJSONArray(mContext.getString(R.string.user_stories)), 3);
+ }
+ else{
+// getMedia(mMedia.get(3 + (4 * (surface4MediaIndexMultiple - 1))).getMedia(), 3); // 3
+ getMedia(mUserStories.getJSONObject(3 + (4 * (surface3MediaIndexMultiple - 1)))
+ .getJSONArray(mContext.getString(R.string.user_stories)), 3);
+ }
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+
+ }
+ else if(mNumRotations == 0){
+ try{
+// getMedia(mMedia.get(0).getMedia(), 0); // 0
+// getMedia(mMedia.get(1).getMedia(), 1); // 1
+// getMedia(mMedia.get(2).getMedia(), 2); // 2
+// getMedia(mMedia.get(3).getMedia(), 3); // 3
+
+ getMedia(mUserStories.getJSONObject(0).getJSONArray(mContext.getString(R.string.user_stories)), 0); // 0
+ getMedia(mUserStories.getJSONObject(1).getJSONArray(mContext.getString(R.string.user_stories)), 1); // 1
+ getMedia(mUserStories.getJSONObject(2).getJSONArray(mContext.getString(R.string.user_stories)), 2); // 2
+ getMedia(mUserStories.getJSONObject(3).getJSONArray(mContext.getString(R.string.user_stories)), 3); // 3
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+
+
+ }
+
+ private void initDefaultImage(){
+ Log.d(TAG, "initDefaultImage: preparing default image scale factors.");
+ Bitmap bitmap = BitmapFactory.decodeResource(mContext.getResources(),
+ R.drawable.android_construction);
+ float imageHeight = bitmap.getHeight();
+ float imageWidth = bitmap.getWidth();
+ float heightScaleFactor = 1f;
+ float widthScaleFactor = 1f;
+
+ Log.d(TAG, "initDefaultImage: IMAGE WIDTH: " + imageWidth);
+ Log.d(TAG, "initDefaultImage: IMAGE HEIGHT: " + imageHeight);
+
+ if (imageWidth > imageHeight) {
+ //scale the height to match the width (#1)
+ heightScaleFactor = (widthScaleFactor * imageHeight) / (screenRatio * imageWidth);
+ } else if (imageHeight > imageWidth) {
+ if (imageWidth < screenWidth) {
+ //scale the width to match the height (#2)
+// widthScaleFactor = (imageWidth / imageHeight) * screenRatio;
+ if (screenWidth / imageWidth < 2) {
+ heightScaleFactor = 2 - (screenWidth / imageWidth);
+ }
+ } else if (imageWidth > screenWidth) {
+ // This one doesn't work for all cases. But the image sizes are so weird
+ // that we shouldn't have to worry about it
+ heightScaleFactor = (widthScaleFactor * imageHeight) / (screenRatio * imageWidth);
+ }
+ }
+ mDefaultImageWidthScaleFactor = widthScaleFactor;
+ mDefaultImageHeightScaleFactor = heightScaleFactor;
+ Log.d(TAG, "initDefaultImage: WSF: " + widthScaleFactor);
+ Log.d(TAG, "initDefaultImage: HSF: " + heightScaleFactor);
+ }
+
+
+
+ private void initResourceIndices() {
+
+ try{
+ JSONObject resource = new JSONObject();
+ resource.put(mContext.getString(R.string.surface_number), SURFACE_1);
+ resource.put(mContext.getString(R.string.rotations), 0);
+ resource.put(mContext.getString(R.string.resource_index), 0);
+ mResourceIndices.put(SURFACE_1, resource);
+ JSONArray mediaIndexArray1 = new JSONArray();
+ int mediaIndex = 0;
+ //populate a bunch of media indices for each potential media.
+ for(int i = 0; i < mHighestNumberMedia; i++){
+ JSONObject mediaIndexObject = new JSONObject();
+ mediaIndexObject.put(mContext.getString(R.string.media_index), mediaIndex);
+ mediaIndexArray1.put(i, mediaIndexObject);
+ }
+ resource.put(mContext.getString(R.string.media_index), mediaIndexArray1);
+ mResourceIndices.put(SURFACE_1).put(resource);
+ Log.d(TAG, "initResourceIndices: resourceIndices1: " + mResourceIndices.get(SURFACE_1));
+
+ resource = new JSONObject();
+ resource.put(mContext.getString(R.string.surface_number), SURFACE_2);
+ resource.put(mContext.getString(R.string.rotations), -1);
+ resource.put(mContext.getString(R.string.resource_index), 1);
+ mResourceIndices.put(SURFACE_2, resource);
+ JSONArray mediaIndexArray2 = new JSONArray();
+ //populate a bunch of media indices for each potential media.
+ for(int i = 0; i < mHighestNumberMedia; i++){
+ JSONObject mediaIndexObject = new JSONObject();
+ mediaIndexObject.put(mContext.getString(R.string.media_index), mediaIndex);
+ mediaIndexArray2.put(i, mediaIndexObject);
+ }
+ resource.put(mContext.getString(R.string.media_index), mediaIndexArray2);
+ mResourceIndices.put(SURFACE_2).put(resource);
+ Log.d(TAG, "initResourceIndices: resourceIndices2: " + mResourceIndices.get(SURFACE_2));
+
+ resource = new JSONObject();
+ resource.put(mContext.getString(R.string.surface_number), SURFACE_3);
+ resource.put(mContext.getString(R.string.rotations), 2);
+ resource.put(mContext.getString(R.string.resource_index), 2);
+ mResourceIndices.put(SURFACE_3, resource);
+ JSONArray mediaIndexArray3 = new JSONArray();
+ //populate a bunch of media indices for each potential media.
+ for(int i = 0; i < mHighestNumberMedia; i++){
+ JSONObject mediaIndexObject = new JSONObject();
+ mediaIndexObject.put(mContext.getString(R.string.media_index), mediaIndex);
+ mediaIndexArray3.put(i, mediaIndexObject);
+ }
+ resource.put(mContext.getString(R.string.media_index), mediaIndexArray3);
+ mResourceIndices.put(SURFACE_3).put(resource);
+ Log.d(TAG, "initResourceIndices: resourceIndices3: " + mResourceIndices.get(SURFACE_3));
+
+ resource = new JSONObject();
+ resource.put(mContext.getString(R.string.surface_number), SURFACE_4);
+ resource.put(mContext.getString(R.string.rotations), 1);
+ resource.put(mContext.getString(R.string.resource_index), 3);
+ mResourceIndices.put(SURFACE_4, resource);
+ JSONArray mediaIndexArray4 = new JSONArray();
+ //populate a bunch of media indices for each potential media.
+ for(int i = 0; i < mHighestNumberMedia; i++){
+ JSONObject mediaIndexObject = new JSONObject();
+ mediaIndexObject.put(mContext.getString(R.string.media_index), mediaIndex);
+ mediaIndexArray4.put(i, mediaIndexObject);
+ }
+ resource.put(mContext.getString(R.string.media_index), mediaIndexArray4);
+ mResourceIndices.put(SURFACE_4).put(resource);
+ Log.d(TAG, "initResourceIndices: resourceIndices4: " + mResourceIndices.get(SURFACE_4));
+
+ }catch (JSONException e){
+ Log.e(TAG, "initResourceIndices: JSONException: " + e.getMessage() );
+ }
+
+ }
+
+ private boolean isMediaVideo(String uri){
+ if(uri.contains(".mp4") || uri.contains(".wmv") || uri.contains(".flv") || uri.contains(".avi")){
+ return true;
+ }
+ return false;
+ }
+
+// private void getMedia(final ArrayList mediaSource, final int surfaceIndex){
+ private void getMedia(final JSONArray mediaSource, final int surfaceIndex){
+ Log.d(TAG, "getMedia: getting images from urls");
+
+ Log.d(TAG, "getMedia: media source length: " + mediaSource.length());
+ Log.d(TAG, "getMedia: getting media for surface index: " + surfaceIndex);
+// if(mResources.get(surfaceIndex) == null) {
+ for(int i = 0; i < mediaSource.length(); i++) {
+
+ try{
+ final int count = i;
+ String videoUri = "";
+ try{
+ videoUri = mediaSource.getJSONObject(count).get(mContext.getString(R.string.field_video_uri)).toString();
+ }catch (JSONException e){
+ e.printStackTrace();
+ videoUri = "";
+ }
+// if (isMediaVideo(videoUri)) {
+ if (!videoUri.equals("")) {
+ JSONObject object = new JSONObject();
+ try {
+ object.put(mContext.getString(R.string.media_type), mContext.getString(R.string.video_uri));
+ object.put(mContext.getString(R.string.video_uri), videoUri);
+ object.put(mContext.getString(R.string.media_source), buildMediaSource(Uri.parse(videoUri)));
+ object.put(mContext.getString(R.string.duration), mediaSource.getJSONObject(count).get(mContext.getString(R.string.field_duration)));
+ Log.d(TAG, "getMedia: duration: " + mediaSource.getJSONObject(count).get(mContext.getString(R.string.field_duration)));
+ try{
+ JSONArray jsonArray = mResources.get(surfaceIndex);
+ jsonArray.put(count, object);
+ mResources.set(surfaceIndex, jsonArray);
+ Log.d(TAG, "getMedia: RESOURCES FOR " + (surfaceIndex + 1) + ": " + mResources.get(surfaceIndex));
+ Log.d(TAG, "getMedia: setting video for surface " + (surfaceIndex + 1) + ", " + object.get(mContext.getString(R.string.media_type)));
+ Log.d(TAG, "getMedia: setting video for surface " + (surfaceIndex + 1) + ", " + + count);
+ }catch (NullPointerException e) {
+ Log.e(TAG, "onResourceReady: First video added to resources array. " + e.getMessage() );
+ JSONArray jsonArray = new JSONArray();
+ jsonArray.put(count, object);
+ mResources.set(surfaceIndex, jsonArray);
+ Log.d(TAG, "getMedia: RESOURCES FOR " + (surfaceIndex + 1) + ": " + mResources.get(surfaceIndex));
+ Log.d(TAG, "getMedia: * setting video for surface " + (surfaceIndex + 1) + ", " + object.get(mContext.getString(R.string.media_type)));
+ Log.d(TAG, "getMedia: * setting video for surface " + (surfaceIndex + 1) + ", " + + count);
+ }
+ if(surfaceIndex == SURFACE_1){
+ mVideo1Index++;
+ }
+ else if(surfaceIndex == SURFACE_2){
+ mVideo2Index++;
+ }
+ else if(surfaceIndex == SURFACE_3){
+ mVideo3Index++;
+ }
+ else if(surfaceIndex == SURFACE_4){
+ mVideo4Index++;
+ }
+ if(mVideo1Index == 1 || mVideo2Index == 1 || mVideo3Index == 1 || mVideo4Index == 1){
+ Log.d(TAG, "getMedia: buffering first video for surface " + (surfaceIndex + 1));
+ bufferFirstVideo(surfaceIndex, count);
+ }
+ else if(mVideo1Index == 2 || mVideo2Index == 2 || mVideo3Index == 2 || mVideo4Index == 2){
+ Log.d(TAG, "getMedia: buffering second video for surface " + (surfaceIndex + 1));
+ bufferNextVideo(surfaceIndex);
+ }
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+
+ } else {
+
+ SimpleTarget target = new SimpleTarget() {
+
+ @Override
+ public void onResourceReady(Bitmap bitmap, Transition transition) {
+ Log.d(TAG, "getMedia: new bitmap ready.");
+
+
+ float imageHeight = bitmap.getHeight();
+ float imageWidth = bitmap.getWidth();
+ float heightScaleFactor = 1f;
+ float widthScaleFactor = 1f;
+
+ // Log.d(TAG, "getMedia: IMAGE WIDTH: " + imageWidth);
+ // Log.d(TAG, "getMedia: IMAGE HEIGHT: " + imageHeight);
+
+ if (imageWidth > imageHeight) {
+ //scale the height to match the width (#1)
+ heightScaleFactor = (widthScaleFactor * imageHeight) / (screenRatio * imageWidth);
+ } else if (imageHeight > imageWidth) {
+ if (imageWidth < screenWidth) {
+ //scale the width to match the height (#2)
+ // widthScaleFactor = (imageWidth / imageHeight) * screenRatio;
+ if (screenWidth / imageWidth < 2) {
+ heightScaleFactor = 2 - (screenWidth / imageWidth);
+ }
+ } else if (imageWidth > screenWidth) {
+ // This one doesn't work for all cases. But the image sizes are so weird
+ // that we shouldn't have to worry about it
+ heightScaleFactor = (widthScaleFactor * imageHeight) / (screenRatio * imageWidth);
+ }
+ }
+
+ // Log.d(TAG, "getMedia: count: " + count );
+ // Log.d(TAG, "getMedia: wsf, hsf: " + widthScaleFactor + ", " + heightScaleFactor);
+
+ JSONObject object = new JSONObject();
+ try {
+ // Log.d(TAG, "getMedia: getting " + count + " resource.");
+ object.put(mContext.getString(R.string.media_type), mContext.getString(R.string.encoded_bitmap));
+ // object.put(mContext.getString(R.string.encoded_bitmap), encodeToBase64(bitmap, Bitmap.CompressFormat.JPEG, 100));
+ ByteArrayOutputStream stream = new ByteArrayOutputStream();
+ bitmap.compress(Bitmap.CompressFormat.JPEG, 100, stream);
+ byte[] byteArray = stream.toByteArray();
+ Bitmap compressedBitmap = BitmapFactory.decodeByteArray(byteArray,0,byteArray.length);
+ object.put(mContext.getString(R.string.encoded_bitmap), compressedBitmap);
+ object.put(mContext.getString(R.string.width_scale_factor), widthScaleFactor);
+ object.put(mContext.getString(R.string.height_scale_factor), heightScaleFactor);
+
+ try{
+ JSONArray jsonArray = mResources.get(surfaceIndex);
+ jsonArray.put(count, object);
+ mResources.set(surfaceIndex, jsonArray);
+ Log.d(TAG, "onResourceReady: RESOURCES FOR " + (surfaceIndex + 1) + ": " + mResources.get(surfaceIndex));
+ Log.d(TAG, "onResourceReady: setting image for surface " + (surfaceIndex + 1) + ", " + object.get(mContext.getString(R.string.media_type)));
+ Log.d(TAG, "onResourceReady: setting image for surface " + (surfaceIndex + 1) + ", " + + count);
+ }catch (NullPointerException e) {
+ Log.e(TAG, "onResourceReady: First photo added to resources array." + e.getMessage() );
+ e.printStackTrace();
+ JSONArray jsonArray = new JSONArray();
+ jsonArray.put(count, object);
+ mResources.set(surfaceIndex, jsonArray);
+ Log.d(TAG, "onResourceReady: RESOURCES FOR " + (surfaceIndex + 1) + ": " + mResources.get(surfaceIndex));
+ Log.d(TAG, "onResourceReady: * setting image for surface " + (surfaceIndex + 1) + ", " + object.get(mContext.getString(R.string.media_type)));
+ Log.d(TAG, "onResourceReady: * setting image for surface " + (surfaceIndex + 1) + ", " + + count);
+ }
+ }
+ catch (JSONException e) {
+ e.printStackTrace();
+ }
+ }
+ };
+ String imageUri = mediaSource.getJSONObject(count).get(mContext.getString(R.string.field_image_uri)).toString();
+ Glide.with(mContext.getApplicationContext())
+ .asBitmap()
+ .load(imageUri)
+ .into(target);
+ mTargets.add(target);
+ }
+ // printCurrentResources();
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+// }
+
+ }
+
+
+ private void setImageToIndex(GL10 gl, int surfaceNumber, boolean imageRenderError){
+
+// if(surfaceNumber == mCurrentSurface){
+// hideProgressBar();
+// }
+ try {
+ final int resourceIndex = mResourceIndices.getJSONObject(surfaceNumber).getInt(mContext.getString(R.string.resource_index));
+ final int mediaIndex = mResourceIndices.getJSONObject(surfaceNumber).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+
+ if(mediaIndex >= 0){
+ String resourceType = "";
+ try{
+ resourceType = mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.media_type)).toString();
+ }catch (JSONException e){
+// Log.e(TAG, "setImageToIndex: JSONException: " + e.getMessage() );
+ resourceType = mContext.getString(R.string.image_load_error);
+ }
+ catch (NullPointerException e) {
+ Log.e(TAG, "setImageToIndex: NullPointerException: " + e.getMessage());
+ resourceType = mContext.getString(R.string.image_load_error);
+ }
+ if(resourceType.equals(mContext.getString(R.string.image_load_error)) || resourceType.equals(mContext.getString(R.string.encoded_bitmap))){
+ if(!isImage1Set && surfaceNumber == SURFACE_1) {
+ Log.d(TAG, "setImageToIndex: setting image to surface: " + (SURFACE_1 + 1) + ", Media Index: " + mediaIndex);
+
+ printCurrentResources();
+ if(mCurrentSurface == SURFACE_1 && mediaIndex == 0){
+ initProgressBars();
+ }
+
+ gl.glEnable(GL10.GL_TEXTURE_2D);
+ gl.glGenTextures(1, textureId1, 0);
+ gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId1[0]);
+ gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
+ gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
+
+// Bitmap bitmap = decodeBase64(mResources.get(mResourceIndices[0][0][0]).getJSONObject(mMediaIndex).get(mContext.getString(R.string.encoded_bitmap)).toString());
+ Bitmap bitmap = null;
+ if(imageRenderError){
+ bitmap = BitmapFactory.decodeResource(mContext.getResources(), R.drawable.android_construction);
+ Log.d(TAG, "setImageToIndex: setting default image to surface 1.");
+ }
+ else{
+ Log.d(TAG, "setImageToIndex: setting resource image to surface 1.");
+// bitmap = decodeBase64(mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.encoded_bitmap)).toString());
+ bitmap = (Bitmap) mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.encoded_bitmap));
+ isImage1Set = true;
+ }
+ Log.d(TAG, "setImageToIndex: image1 media index: " + mediaIndex);
+ Log.d(TAG, "setImageToIndex: image1 bitmap: " + bitmap);
+ GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
+
+ }
+ if(!isImage2Set && surfaceNumber == SURFACE_2) {
+ Log.d(TAG, "setImageToIndex: setting image to surface: " + (SURFACE_2 + 1) + ", Media Index: " + mediaIndex);
+
+ if(mCurrentSurface == SURFACE_2 && mediaIndex == 0){
+ initProgressBars();
+ }
+
+ gl.glEnable(GL10.GL_TEXTURE_2D);
+ gl.glGenTextures(1, textureId2, 0);
+ gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId2[0]);
+ gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
+ gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
+
+// Bitmap bitmap = decodeBase64(mResources.get(mResourceIndices[0][0][0]).getJSONObject(mMediaIndex).get(mContext.getString(R.string.encoded_bitmap)).toString());
+ Bitmap bitmap = null;
+ if(imageRenderError){
+ bitmap = BitmapFactory.decodeResource(mContext.getResources(),
+ R.drawable.android_construction);
+ }
+ else{
+// bitmap = decodeBase64(mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.encoded_bitmap)).toString());
+ bitmap = (Bitmap) mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.encoded_bitmap));
+ isImage2Set = true;
+ }
+ GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
+
+
+ }
+ if(!isImage3Set && surfaceNumber == SURFACE_3) {
+ Log.d(TAG, "setImageToIndex: setting image to surface: " + (SURFACE_3 + 1) + ", Media Index: " + mediaIndex);
+
+ if(mCurrentSurface == SURFACE_3 && mediaIndex == 0){
+ initProgressBars();
+ }
+
+ gl.glEnable(GL10.GL_TEXTURE_2D);
+ gl.glGenTextures(1, textureId3, 0);
+ gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId3[0]);
+ gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
+ gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
+
+// Bitmap bitmap = decodeBase64(mResources.get(mResourceIndices[0][0][0]).getJSONObject(mMediaIndex).get(mContext.getString(R.string.encoded_bitmap)).toString());
+ Bitmap bitmap = null;
+ if(imageRenderError){
+ bitmap = BitmapFactory.decodeResource(mContext.getResources(),
+ R.drawable.android_construction);
+ }
+ else{
+// bitmap = decodeBase64(mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.encoded_bitmap)).toString());
+ bitmap = (Bitmap) mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.encoded_bitmap));
+ isImage3Set = true;
+ }
+ GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
+
+ }
+ if(!isImage4Set && surfaceNumber == SURFACE_4) {
+ Log.d(TAG, "setImageToIndex: setting image to surface: " + (SURFACE_4 + 1) + ", Media Index: " + mediaIndex);
+
+ if(mCurrentSurface == SURFACE_4 && mediaIndex == 0){
+ initProgressBars();
+ }
+
+ gl.glEnable(GL10.GL_TEXTURE_2D);
+ gl.glGenTextures(1, textureId4, 0);
+ gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId4[0]);
+ gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
+ gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
+
+// Bitmap bitmap = decodeBase64(mResources.get(mResourceIndices[0][0][0]).getJSONObject(mMediaIndex).get(mContext.getString(R.string.encoded_bitmap)).toString());
+ Bitmap bitmap = null;
+ if(imageRenderError){
+ bitmap = BitmapFactory.decodeResource(mContext.getResources(),
+ R.drawable.android_construction);
+ }
+ else {
+// bitmap = decodeBase64(mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.encoded_bitmap)).toString());
+ bitmap = (Bitmap) mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.encoded_bitmap));
+ isImage4Set = true;
+ }
+ GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
+
+
+ }
+
+ }
+ //scale the media
+// gl.glScalef(mScaleFactors[mSurfaceIndex][mMediaIndex][0], mScaleFactors[mSurfaceIndex][mMediaIndex][1], 1);
+// gl.glScalef(mScaleFactors[mSurfaceIndex][mMediaIndices[mSurfaceIndex][0]][0], mScaleFactors[mSurfaceIndex][mMediaIndices[mSurfaceIndex][0]][1], 1);
+ }
+ } catch (NullPointerException e) {
+ Log.e(TAG, "setImageToIndex: NullPointerException: " + e.getMessage());
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+ public void onDrawFrame(GL10 gl) {
+
+ // Draw background color
+ gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
+
+
+// Log.d(TAG, "onDrawFrame: angle: " + angleRectangle);
+ setDepth(gl);
+ if (Math.abs(startPositionX - endPositionX) > (screenWidth / 2) && isStopped() && isRotationEnabled && !mRotateCounterClockwise && !mRotateClockwise) {
+ setStopped(false);
+ if (startPositionX > endPositionX && mAllowRotationClockwise) {
+ mRotateClockwise = true;
+ } else if (startPositionX < endPositionX) {
+ mRotateCounterClockwise = true;
+ }
+ } else if (Math.abs(startPositionX - endPositionX) < (screenWidth / 2) && isStopped() && isRotationEnabled && !mRotateCounterClockwise && !mRotateClockwise) {
+ setStopped(false);
+ if (startPositionX > endPositionX) {
+ mRotateCounterClockwise = true;
+ } else if (startPositionX < endPositionX && mAllowRotationClockwise) {
+ mRotateClockwise = true;
+ }
+ } else if (!mRotateCounterClockwise && !mRotateClockwise && !isStopped()) {
+// Log.d(TAG, "onDrawFrame: THIS ONE" + angleRectangle);
+ gl.glRotatef(angleRectangle, 0.0f, -1.0f, 0.0f);
+ }
+
+ if (mRotateClockwise && mAllowRotationClockwise) {
+// Log.d(TAG, "onDrawFrame: OR THIS ONE" + angleRectangle);
+ rotateClockwise(gl);
+ } else if (mRotateCounterClockwise) {
+// Log.d(TAG, "onDrawFrame: MAYBE THIS ONE: " + angleRectangle);
+ rotateCounterClockwise(gl);
+ }
+
+ //////////////////////////////
+ //surface 1 logic
+ try {
+ final int surface1ResourceIndex = mResourceIndices.getJSONObject(SURFACE_1).getInt(mContext.getString(R.string.resource_index));
+ final int surface1MediaIndex = mResourceIndices.getJSONObject(SURFACE_1).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surface1ResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+// Log.d(TAG, "onDrawFrame: surface1 media index: " + surface1MediaIndex);
+// Log.d(TAG, "onDrawFrame: surface1 resource index: " + surface1ResourceIndex);
+// Log.d(TAG, "onDrawFrame: surface1 num rotations: " + surface1NumRotations);
+ if (surface1MediaIndex >= 0) {
+ String resourceType = "";
+ boolean imageRenderError = false;
+ try {
+ resourceType = mResources.get(surface1ResourceIndex).getJSONObject(surface1MediaIndex).get(mContext.getString(R.string.media_type)).toString();
+ } catch (JSONException e) {
+// Log.e(TAG, "onDrawFrame: error getting bitmap: " + e.getMessage() );
+ imageRenderError = true;
+ }
+
+ if (resourceType.equals(mContext.getString(R.string.encoded_bitmap)) || imageRenderError) {
+// Log.d(TAG, "onDrawFrame: rendering SURFACE 1 image");
+
+ float widthScaleFactor = 1f;
+ float heightScaleFactor = 1f;
+ if (imageRenderError && !isImage1Set) {
+ widthScaleFactor = mDefaultImageWidthScaleFactor;
+ heightScaleFactor = mDefaultImageHeightScaleFactor;
+ } else {
+ widthScaleFactor = Float.valueOf(String.valueOf(mResources.get(surface1ResourceIndex).getJSONObject(surface1MediaIndex).get(mContext.getString(R.string.width_scale_factor))));
+ heightScaleFactor = Float.valueOf(String.valueOf(mResources.get(surface1ResourceIndex).getJSONObject(surface1MediaIndex).get(mContext.getString(R.string.height_scale_factor))));
+ }
+
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[SURFACE_1][0] * widthScaleFactor, heightMatrix[SURFACE_1][0] * heightScaleFactor, depthMatrix[SURFACE_1][0],
+ widthMatrix[SURFACE_1][1] * widthScaleFactor, heightMatrix[SURFACE_1][1] * heightScaleFactor, depthMatrix[SURFACE_1][1],
+ widthMatrix[SURFACE_1][2] * widthScaleFactor, heightMatrix[SURFACE_1][2] * heightScaleFactor, depthMatrix[SURFACE_1][2],
+ widthMatrix[SURFACE_1][3] * widthScaleFactor, heightMatrix[SURFACE_1][3] * heightScaleFactor, depthMatrix[SURFACE_1][3],
+ };
+ if (vertexBuffer1 != null) {
+ vertexBuffer1.clear();
+ }
+ ByteBuffer vbb = ByteBuffer.allocateDirect(imageVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ vertexBuffer1 = vbb.asFloatBuffer();
+
+ vertexBuffer1.put(imageVertices);
+ vertexBuffer1.position(0);
+
+ //surface1 image media
+ gl.glLoadIdentity();
+ gl.glEnable(GL10.GL_TEXTURE_2D); //ENABLE IMAGE TEXTURES
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer1);
+
+ setImageToIndex(gl, SURFACE_1, imageRenderError);
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+ gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer1);
+
+ gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
+ gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId1[0]);
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glDisable(GL10.GL_TEXTURE_2D);
+
+
+// if (mVertexBuffers[SURFACE_1][1] == null && widthScaleFactor != -1) {
+ if (widthScaleFactor != -1) {
+// Log.d(TAG, "SCALE FACTOR: WSF: " + widthScaleFactor);
+// Log.d(TAG, "SCALE FACTOR: HSF: " + heightScaleFactor);
+
+ float[] topVertices = {
+ widthMatrix[SURFACE_1][0] * widthScaleFactor, screenRatio * heightScaleFactor, depthMatrix[SURFACE_1][0],
+ widthMatrix[SURFACE_1][1] * widthScaleFactor, screenRatio * heightScaleFactor, depthMatrix[SURFACE_1][1],
+ widthMatrix[SURFACE_1][2] * widthScaleFactor, heightMatrix[SURFACE_1][2], depthMatrix[SURFACE_1][2],
+ widthMatrix[SURFACE_1][3] * widthScaleFactor, heightMatrix[SURFACE_1][3], depthMatrix[SURFACE_1][3],
+ };
+
+ vbb = ByteBuffer.allocateDirect(topVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferTop = vbb.asFloatBuffer();
+ mVertexBuffers[SURFACE_1][1] = bufferTop.put(topVertices);
+ mVertexBuffers[SURFACE_1][1].position(0);
+ }
+// if (mVertexBuffers[SURFACE_1][2] == null && widthScaleFactor != -1) {
+ if (widthScaleFactor != -1) {
+// Log.d(TAG, "SCALE FACTOR: WSF: " + widthScaleFactor);
+// Log.d(TAG, "SCALE FACTOR: HSF: " + heightScaleFactor);
+
+ float[] botVertices = {
+ widthMatrix[SURFACE_1][0] * widthScaleFactor, heightMatrix[SURFACE_1][0], depthMatrix[SURFACE_1][0],
+ widthMatrix[SURFACE_1][1] * widthScaleFactor, heightMatrix[SURFACE_1][1], depthMatrix[SURFACE_1][1],
+ widthMatrix[SURFACE_1][2] * widthScaleFactor, -screenRatio * heightScaleFactor, depthMatrix[SURFACE_1][2],
+ widthMatrix[SURFACE_1][3] * widthScaleFactor, -screenRatio * heightScaleFactor, depthMatrix[SURFACE_1][3],
+ };
+
+ vbb = ByteBuffer.allocateDirect(botVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferBot = vbb.asFloatBuffer();
+ mVertexBuffers[SURFACE_1][2] = bufferBot.put(botVertices);
+ mVertexBuffers[SURFACE_1][2].position(0);
+ }
+
+
+ if (mVertexBuffers[SURFACE_1][2] != null) {
+ //surface1 bot
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[SURFACE_1][2]);
+
+ gl.glColor4f(0f, 0f, 0.0f, 1); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+
+ }
+
+ if (mVertexBuffers[SURFACE_1][1] != null) {
+ //surface1 top
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[SURFACE_1][1]);
+
+ gl.glColor4f(0f, 0f, 0.0f, 1); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ }
+ } else if (mResources.get(surface1ResourceIndex).getJSONObject(surface1MediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+
+// Log.d(TAG, "onDrawFrame: rendering SURFACE 1 video");
+
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[SURFACE_1][0], heightMatrix[SURFACE_1][0], depthMatrix[SURFACE_1][0],
+ widthMatrix[SURFACE_1][1], heightMatrix[SURFACE_1][1], depthMatrix[SURFACE_1][1],
+ widthMatrix[SURFACE_1][2], heightMatrix[SURFACE_1][2], depthMatrix[SURFACE_1][2],
+ widthMatrix[SURFACE_1][3], heightMatrix[SURFACE_1][3], depthMatrix[SURFACE_1][3],
+ };
+ if (vertexBuffer1 != null) {
+ vertexBuffer1.clear();
+ }
+ ByteBuffer vbb = ByteBuffer.allocateDirect(imageVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ vertexBuffer1 = vbb.asFloatBuffer();
+
+ vertexBuffer1.put(imageVertices);
+ vertexBuffer1.position(0);
+
+ if (mPlayerState == ACTIVE_PLAYER && !mPlayer.getPlayWhenReady() && mCurrentSurface == SURFACE_1) {
+ Log.d(TAG, "onDrawFrame: playing player 1");
+ mPlayer.setPlayWhenReady(true);
+// retryPlayer = false;
+// startProgressBar();
+ } else if (mSecondaryPlayerState == ACTIVE_PLAYER && !mSecondaryPlayer.getPlayWhenReady() && mCurrentSurface == SURFACE_1) {
+ Log.d(TAG, "onDrawFrame: playing secondary player 1");
+ mSecondaryPlayer.setPlayWhenReady(true);
+// retrySecondaryPlayer = false;
+// startProgressBar();
+ }
+
+
+ // surface 1 video media
+ gl.glLoadIdentity();
+ gl.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); //ENABLE VIDEO TEXTURES
+
+// if(initVideoTexture1){
+// Log.d(TAG, "onDrawFrame: initializing video texture1.");
+// gl.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId1[0]);
+// gl.glGenTextures(1, textureId1 , 0);
+// gl.glTexParameterf( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT );
+// gl.glTexParameterf( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT );
+// gl.glTexParameterf( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST );
+// gl.glTexParameterf( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR );
+// initVideoTexture1 = false;
+// }
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer1);
+
+// gl.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId1[0]);
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(-(angleRectangle), 0.0f, -1.0f, 0.0f);
+ gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer1);
+
+// gl.glGenTextures(1, textureId1, 0);
+// gl.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId1[0]); //causes error (doesn't crash)
+// gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
+// gl.glTexEnvf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
+
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); //DISABLE VIDEO TEXTURES
+
+
+ if (mUpdateST) {
+ try {
+ //hideProgressBar();
+ mSurfaceTexture.updateTexImage();
+
+ Log.d(TAG, "onDrawFrame: updating surface1 frame");
+
+ } catch (IllegalStateException e) {
+ e.printStackTrace();
+ Log.e(TAG, "onFrameAvailable: IllegalStateException surface1: " + e.getMessage());
+ } catch (RuntimeException e) {
+ e.printStackTrace();
+ Log.e(TAG, "onFrameAvailable: RuntimeException surface1: " + e.getMessage());
+ }
+ mUpdateST = false;
+ }
+ }
+ }
+ } catch (JSONException e) {
+ e.printStackTrace();
+ } catch (NullPointerException e) {
+// e.printStackTrace();
+ }
+
+
+ //////////////////////////////
+ //surface 2 logic
+ try {
+ final int surface2ResourceIndex = mResourceIndices.getJSONObject(SURFACE_2).getInt(mContext.getString(R.string.resource_index));
+ final int surface2MediaIndex = mResourceIndices.getJSONObject(SURFACE_2).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surface2ResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+// Log.d(TAG, "onDrawFrame: surface1 media index: " + surface1MediaIndex);
+// Log.d(TAG, "onDrawFrame: surface1 resource index: " + surface1ResourceIndex);
+// Log.d(TAG, "onDrawFrame: surface1 num rotations: " + surface1NumRotations);
+
+ if (surface2MediaIndex >= 0) {
+ String resourceType = "";
+ boolean imageRenderError = false;
+ try {
+ resourceType = mResources.get(surface2ResourceIndex).getJSONObject(surface2MediaIndex).get(mContext.getString(R.string.media_type)).toString();
+ } catch (JSONException e) {
+// Log.e(TAG, "onDrawFrame: error getting bitmap: " + e.getMessage() );
+ imageRenderError = true;
+ }
+ if (resourceType.equals(mContext.getString(R.string.encoded_bitmap)) || imageRenderError) {
+// Log.d(TAG, "onDrawFrame: rendering surface 2 image");
+
+ float widthScaleFactor = 1f;
+ float heightScaleFactor = 1f;
+ if (imageRenderError) {
+ widthScaleFactor = mDefaultImageWidthScaleFactor;
+ heightScaleFactor = mDefaultImageHeightScaleFactor;
+ } else {
+ widthScaleFactor = Float.valueOf(String.valueOf(mResources.get(surface2ResourceIndex).getJSONObject(surface2MediaIndex).get(mContext.getString(R.string.width_scale_factor))));
+ heightScaleFactor = Float.valueOf(String.valueOf(mResources.get(surface2ResourceIndex).getJSONObject(surface2MediaIndex).get(mContext.getString(R.string.height_scale_factor))));
+ }
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[SURFACE_2][0] * widthScaleFactor, heightMatrix[SURFACE_2][0] * heightScaleFactor, depthMatrix[SURFACE_2][0],
+ widthMatrix[SURFACE_2][1] * widthScaleFactor, heightMatrix[SURFACE_2][1] * heightScaleFactor, depthMatrix[SURFACE_2][1],
+ widthMatrix[SURFACE_2][2] * widthScaleFactor, heightMatrix[SURFACE_2][2] * heightScaleFactor, depthMatrix[SURFACE_2][2],
+ widthMatrix[SURFACE_2][3] * widthScaleFactor, heightMatrix[SURFACE_2][3] * heightScaleFactor, depthMatrix[SURFACE_2][3],
+ };
+ if (vertexBuffer2 != null) {
+ vertexBuffer2.clear();
+ }
+ ByteBuffer vbb = ByteBuffer.allocateDirect(imageVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ vertexBuffer2 = vbb.asFloatBuffer();
+
+ vertexBuffer2.put(imageVertices);
+ vertexBuffer2.position(0);
+
+ //surface2 image media
+ gl.glLoadIdentity();
+ gl.glEnable(GL10.GL_TEXTURE_2D); //ENABLE IMAGE TEXTURES
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer2);
+
+ setImageToIndex(gl, SURFACE_2, imageRenderError);
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+ gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer2);
+
+
+ gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
+ gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId2[0]);
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glDisable(GL10.GL_TEXTURE_2D);
+
+
+// if (mVertexBuffers[SURFACE_2][1] == null && widthScaleFactor != -1) {
+ if (widthScaleFactor != -1) {
+// Log.d(TAG, "SCALE FACTOR: WSF: " + widthScaleFactor);
+// Log.d(TAG, "SCALE FACTOR: HSF: " + heightScaleFactor);
+
+ float[] topVertices = {
+ widthMatrix[SURFACE_2][0] * widthScaleFactor, screenRatio * heightScaleFactor, depthMatrix[SURFACE_2][0],
+ widthMatrix[SURFACE_2][1] * widthScaleFactor, screenRatio * heightScaleFactor, depthMatrix[SURFACE_2][1],
+ widthMatrix[SURFACE_2][2] * widthScaleFactor, heightMatrix[SURFACE_2][2], depthMatrix[SURFACE_2][2],
+ widthMatrix[SURFACE_2][3] * widthScaleFactor, heightMatrix[SURFACE_2][3], depthMatrix[SURFACE_2][3],
+ };
+ vbb = ByteBuffer.allocateDirect(topVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferTop = vbb.asFloatBuffer();
+ mVertexBuffers[SURFACE_2][1] = bufferTop.put(topVertices);
+ mVertexBuffers[SURFACE_2][1].position(0);
+ }
+// if (mVertexBuffers[SURFACE_2][2] == null && widthScaleFactor != -1) {
+ if (widthScaleFactor != -1) {
+// Log.d(TAG, "SCALE FACTOR: WSF: " + widthScaleFactor);
+// Log.d(TAG, "SCALE FACTOR: HSF: " + heightScaleFactor);
+
+ float[] botVertices = {
+ widthMatrix[SURFACE_2][0] * widthScaleFactor, heightMatrix[SURFACE_2][0], depthMatrix[SURFACE_2][0],
+ widthMatrix[SURFACE_2][1] * widthScaleFactor, heightMatrix[SURFACE_2][1], depthMatrix[SURFACE_2][1],
+ widthMatrix[SURFACE_2][2] * widthScaleFactor, -screenRatio * heightScaleFactor, depthMatrix[SURFACE_2][2],
+ widthMatrix[SURFACE_2][3] * widthScaleFactor, -screenRatio * heightScaleFactor, depthMatrix[SURFACE_2][3],
+ };
+ vbb = ByteBuffer.allocateDirect(botVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferBot = vbb.asFloatBuffer();
+ mVertexBuffers[SURFACE_2][2] = bufferBot.put(botVertices);
+ mVertexBuffers[SURFACE_2][2].position(0);
+ }
+
+
+ if (mVertexBuffers[SURFACE_2][2] != null) {
+ //surface2 bot
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[SURFACE_2][2]);
+
+ gl.glColor4f(0f, 0f, 0.0f, 1); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ }
+
+ if (mVertexBuffers[SURFACE_2][1] != null) {
+ //surface2 top
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[SURFACE_2][1]);
+
+ gl.glColor4f(0f, 0f, 0.0f, 1); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ }
+ } else if (mResources.get(surface2ResourceIndex).getJSONObject(surface2MediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+// Log.d(TAG, "onDrawFrame: rendering surface 2 video");
+
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[SURFACE_2][0], heightMatrix[SURFACE_2][0], depthMatrix[SURFACE_2][0],
+ widthMatrix[SURFACE_2][1], heightMatrix[SURFACE_2][1], depthMatrix[SURFACE_2][1],
+ widthMatrix[SURFACE_2][2], heightMatrix[SURFACE_2][2], depthMatrix[SURFACE_2][2],
+ widthMatrix[SURFACE_2][3], heightMatrix[SURFACE_2][3], depthMatrix[SURFACE_2][3],
+ };
+ if (vertexBuffer2 != null) {
+ vertexBuffer2.clear();
+ }
+ ByteBuffer vbb = ByteBuffer.allocateDirect(imageVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ vertexBuffer2 = vbb.asFloatBuffer();
+
+ vertexBuffer2.put(imageVertices);
+ vertexBuffer2.position(0);
+
+ if (mPlayer2State == ACTIVE_PLAYER && !mPlayer2.getPlayWhenReady() && mCurrentSurface == SURFACE_2) {
+// Log.d(TAG, "onDrawFrame: playing player 2");
+ mPlayer2.setPlayWhenReady(true);
+// retryPlayer2 = false;
+ } else if (mSecondaryPlayer2State == ACTIVE_PLAYER && !mSecondaryPlayer2.getPlayWhenReady() && mCurrentSurface == SURFACE_2) {
+ Log.d(TAG, "onDrawFrame: playing secondary player 2");
+ mSecondaryPlayer2.setPlayWhenReady(true);
+// retrySecondaryPlayer2 = false;
+ }
+
+
+ // surface 2 video media
+ gl.glLoadIdentity();
+ gl.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); //ENABLE VIDEO TEXTURES
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer2);
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(-(angleRectangle), 0.0f, -1.0f, 0.0f);
+ gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer2);
+//
+// gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
+// gl.glGenTextures(1, textureId2, 0);
+// gl.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId2[0]); //causes error (doesn't crash)
+
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); //ENABLE VIDEO TEXTURES
+
+ if (mUpdateST2) {
+ try {
+// hideProgressBar();
+ mSurfaceTexture2.updateTexImage();
+// Log.d(TAG, "onDrawFrame: updating surface2 frame");
+
+ } catch (IllegalStateException e) {
+ e.printStackTrace();
+ Log.e(TAG, "onFrameAvailable: IllegalStateException surface2: " + e.getMessage());
+ } catch (RuntimeException e) {
+ e.printStackTrace();
+ Log.e(TAG, "onFrameAvailable: RuntimeException surface2: " + e.getMessage());
+ }
+ mUpdateST2 = false;
+ }
+ }
+ }
+ } catch (JSONException e) {
+ e.printStackTrace();
+ } catch (NullPointerException e) {
+// e.printStackTrace();
+ }
+
+ //////////////////////////////
+ //surface 3 logic
+ try {
+ final int surface3ResourceIndex = mResourceIndices.getJSONObject(SURFACE_3).getInt(mContext.getString(R.string.resource_index));
+ final int surface3MediaIndex = mResourceIndices.getJSONObject(SURFACE_3).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surface3ResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+// Log.d(TAG, "onDrawFrame: surface3 media index: " + surface3MediaIndex);
+// Log.d(TAG, "onDrawFrame: surface3 resource index: " + surface3ResourceIndex);
+ if (surface3MediaIndex >= 0) {
+ String resourceType = "";
+ boolean imageRenderError = false;
+ try {
+ resourceType = mResources.get(surface3ResourceIndex).getJSONObject(surface3MediaIndex).get(mContext.getString(R.string.media_type)).toString();
+ } catch (JSONException e) {
+// Log.e(TAG, "onDrawFrame: error getting bitmap: " + e.getMessage() );
+ imageRenderError = true;
+ }
+ if (resourceType.equals(mContext.getString(R.string.encoded_bitmap)) || imageRenderError) {
+// Log.d(TAG, "onDrawFrame: rendering SURFACE 1 image");
+
+ float widthScaleFactor = 1f;
+ float heightScaleFactor = 1f;
+ if (imageRenderError) {
+ widthScaleFactor = mDefaultImageWidthScaleFactor;
+ heightScaleFactor = mDefaultImageHeightScaleFactor;
+ } else {
+ widthScaleFactor = Float.valueOf(String.valueOf(mResources.get(surface3ResourceIndex).getJSONObject(surface3MediaIndex).get(mContext.getString(R.string.width_scale_factor))));
+ heightScaleFactor = Float.valueOf(String.valueOf(mResources.get(surface3ResourceIndex).getJSONObject(surface3MediaIndex).get(mContext.getString(R.string.height_scale_factor))));
+ }
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[SURFACE_3][0] * widthScaleFactor, heightMatrix[SURFACE_3][0] * heightScaleFactor, depthMatrix[SURFACE_3][0],
+ widthMatrix[SURFACE_3][1] * widthScaleFactor, heightMatrix[SURFACE_3][1] * heightScaleFactor, depthMatrix[SURFACE_3][1],
+ widthMatrix[SURFACE_3][2] * widthScaleFactor, heightMatrix[SURFACE_3][2] * heightScaleFactor, depthMatrix[SURFACE_3][2],
+ widthMatrix[SURFACE_3][3] * widthScaleFactor, heightMatrix[SURFACE_3][3] * heightScaleFactor, depthMatrix[SURFACE_3][3],
+ };
+ if (vertexBuffer3 != null) {
+ vertexBuffer3.clear();
+ }
+ ByteBuffer vbb = ByteBuffer.allocateDirect(imageVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ vertexBuffer3 = vbb.asFloatBuffer();
+
+ vertexBuffer3.put(imageVertices);
+ vertexBuffer3.position(0);
+
+ //surface3 image media
+ gl.glLoadIdentity();
+ gl.glEnable(GL10.GL_TEXTURE_2D); //ENABLE IMAGE TEXTURES
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer3);
+
+ setImageToIndex(gl, SURFACE_3, imageRenderError);
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+ gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer3);
+
+
+ gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
+ gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId3[0]);
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glDisable(GL10.GL_TEXTURE_2D);
+
+
+// if (mVertexBuffers[SURFACE_3][1] == null && widthScaleFactor != -1) {
+ if (widthScaleFactor != -1) {
+// Log.d(TAG, "SCALE FACTOR: WSF: " + widthScaleFactor);
+// Log.d(TAG, "SCALE FACTOR: HSF: " + heightScaleFactor);
+
+ float[] topVertices = {
+ widthMatrix[SURFACE_3][0] * widthScaleFactor, screenRatio * heightScaleFactor, depthMatrix[SURFACE_3][0],
+ widthMatrix[SURFACE_3][1] * widthScaleFactor, screenRatio * heightScaleFactor, depthMatrix[SURFACE_3][1],
+ widthMatrix[SURFACE_3][2] * widthScaleFactor, heightMatrix[SURFACE_3][2], depthMatrix[SURFACE_3][2],
+ widthMatrix[SURFACE_3][3] * widthScaleFactor, heightMatrix[SURFACE_3][3], depthMatrix[SURFACE_3][3],
+ };
+
+ vbb = ByteBuffer.allocateDirect(topVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferTop = vbb.asFloatBuffer();
+ mVertexBuffers[SURFACE_3][1] = bufferTop.put(topVertices);
+ mVertexBuffers[SURFACE_3][1].position(0);
+ }
+// if (mVertexBuffers[SURFACE_3][2] == null && widthScaleFactor != -1) {
+ if (widthScaleFactor != -1) {
+// Log.d(TAG, "SCALE FACTOR: WSF: " + widthScaleFactor);
+// Log.d(TAG, "SCALE FACTOR: HSF: " + heightScaleFactor);
+
+ float[] botVertices = {
+ widthMatrix[SURFACE_3][0] * widthScaleFactor, heightMatrix[SURFACE_3][0], depthMatrix[SURFACE_3][0],
+ widthMatrix[SURFACE_3][1] * widthScaleFactor, heightMatrix[SURFACE_3][1], depthMatrix[SURFACE_3][1],
+ widthMatrix[SURFACE_3][2] * widthScaleFactor, -screenRatio * heightScaleFactor, depthMatrix[SURFACE_3][2],
+ widthMatrix[SURFACE_3][3] * widthScaleFactor, -screenRatio * heightScaleFactor, depthMatrix[SURFACE_3][3],
+ };
+ vbb = ByteBuffer.allocateDirect(botVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferBot = vbb.asFloatBuffer();
+ mVertexBuffers[SURFACE_3][2] = bufferBot.put(botVertices);
+ mVertexBuffers[SURFACE_3][2].position(0);
+ }
+
+
+ if (mVertexBuffers[SURFACE_3][2] != null) {
+ //surface3 bot
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[SURFACE_3][2]);
+
+ gl.glColor4f(0f, 0f, 0.0f, 1); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ }
+
+ if (mVertexBuffers[SURFACE_3][1] != null) {
+ //surface3 top
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[SURFACE_3][1]);
+
+ gl.glColor4f(0f, 0f, 0.0f, 1); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ }
+ } else if (mResources.get(surface3ResourceIndex).getJSONObject(surface3MediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+// Log.d(TAG, "onDrawFrame: rendering SURFACE 1 video");
+
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[SURFACE_3][0], heightMatrix[SURFACE_3][0], depthMatrix[SURFACE_3][0],
+ widthMatrix[SURFACE_3][1], heightMatrix[SURFACE_3][1], depthMatrix[SURFACE_3][1],
+ widthMatrix[SURFACE_3][2], heightMatrix[SURFACE_3][2], depthMatrix[SURFACE_3][2],
+ widthMatrix[SURFACE_3][3], heightMatrix[SURFACE_3][3], depthMatrix[SURFACE_3][3],
+ };
+ if (vertexBuffer3 != null) {
+ vertexBuffer3.clear();
+ }
+ ByteBuffer vbb = ByteBuffer.allocateDirect(imageVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ vertexBuffer3 = vbb.asFloatBuffer();
+
+ vertexBuffer3.put(imageVertices);
+ vertexBuffer3.position(0);
+
+
+ if (mPlayer3State == ACTIVE_PLAYER && !mPlayer3.getPlayWhenReady() && mCurrentSurface == SURFACE_3) {
+ Log.d(TAG, "onDrawFrame: playing player 3");
+ mPlayer3.setPlayWhenReady(true);
+// retryPlayer3 = false;
+ } else if (mSecondaryPlayer3State == ACTIVE_PLAYER && !mSecondaryPlayer3.getPlayWhenReady() && mCurrentSurface == SURFACE_3) {
+ Log.d(TAG, "onDrawFrame: playing secondary player 3");
+ mSecondaryPlayer3.setPlayWhenReady(true);
+// retrySecondaryPlayer3 = false;
+ }
+
+ // surface 3 video media
+ gl.glLoadIdentity();
+ gl.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); //ENABLE VIDEO TEXTURES
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer3);
+
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(-(angleRectangle), 0.0f, -1.0f, 0.0f);
+ gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer3);
+
+// gl.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId1[0]); //causes error (doesn't crash)
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); //ENABLE VIDEO TEXTURES
+
+// //double check to make sure the video is playing properly
+// if (mPlayer3State == ACTIVE_PLAYER && mPlayer3.getPlayWhenReady() && mCurrentSurface == SURFACE_3
+// && mPlayer3.getPlaybackState() != Player.STATE_READY && retryPlayer3 && mPlayer3.getCurrentPosition() < 0) {
+// retryPlayer3 = false;
+// Log.d(TAG, "onDrawFrame: player3 is trying to play.");
+// Log.d(TAG, "onDrawFrame: player3 playback state: " + mPlayer3.getPlaybackState());
+// retryPlayVideo(SURFACE_3, surface3ResourceIndex, surface3MediaIndex);
+// } else if (mSecondaryPlayer3State == ACTIVE_PLAYER && mSecondaryPlayer3.getPlayWhenReady() && mCurrentSurface == SURFACE_3
+// && mSecondaryPlayer3.getPlaybackState() != Player.STATE_READY && retrySecondaryPlayer3 && mSecondaryPlayer3.getCurrentPosition() < 0) {
+// retrySecondaryPlayer3 = false;
+// Log.d(TAG, "onDrawFrame: secondary player3 is trying to play.");
+// Log.d(TAG, "onDrawFrame: secondary player3 playback state: " + mSecondaryPlayer3.getPlaybackState());
+// retryPlayVideo(SURFACE_3, surface3ResourceIndex, surface3MediaIndex);
+// }
+// else if(mSecondaryPlayer3.getPlaybackState() != Player.STATE_READY || mPlayer3.getPlaybackState() != Player.STATE_READY){
+// hideProgressBar();
+// }
+
+ if (mUpdateST3) {
+ try {
+// hideProgressBar();
+ mSurfaceTexture3.updateTexImage();
+// Log.d(TAG, "onDrawFrame: updating surface3 frame");
+
+ } catch (IllegalStateException e) {
+ Log.e(TAG, "onFrameAvailable: IllegalStateException: " + e.getMessage());
+ } catch (RuntimeException e) {
+ Log.e(TAG, "onFrameAvailable: RuntimeException: " + e.getMessage());
+ }
+ mUpdateST3 = false;
+ }
+ }
+ }
+ } catch (JSONException e) {
+ e.printStackTrace();
+ } catch (NullPointerException e) {
+// e.printStackTrace();
+ }
+
+
+ //////////////////////////////
+ //surface 4 logic
+ try {
+ final int surface4ResourceIndex = mResourceIndices.getJSONObject(SURFACE_4).getInt(mContext.getString(R.string.resource_index));
+ final int surface4MediaIndex = mResourceIndices.getJSONObject(SURFACE_4).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surface4ResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+// Log.d(TAG, "onDrawFrame: surface4 media index: " + surface4MediaIndex);
+// Log.d(TAG, "onDrawFrame: surface4 resource index: " + surface4ResourceIndex);
+ if (surface4MediaIndex >= 0) {
+ String resourceType = "";
+ boolean imageRenderError = false;
+ try {
+ resourceType = mResources.get(surface4ResourceIndex).getJSONObject(surface4MediaIndex).get(mContext.getString(R.string.media_type)).toString();
+ } catch (JSONException e) {
+// Log.e(TAG, "onDrawFrame: error getting bitmap: " + e.getMessage() );
+ imageRenderError = true;
+ }
+ if (resourceType.equals(mContext.getString(R.string.encoded_bitmap)) || imageRenderError) {
+// Log.d(TAG, "onDrawFrame: rendering SURFACE 1 image");
+
+ float widthScaleFactor = 1f;
+ float heightScaleFactor = 1f;
+ if (imageRenderError) {
+ widthScaleFactor = mDefaultImageWidthScaleFactor;
+ heightScaleFactor = mDefaultImageHeightScaleFactor;
+ } else {
+ widthScaleFactor = Float.valueOf(String.valueOf(mResources.get(surface4ResourceIndex).getJSONObject(surface4MediaIndex).get(mContext.getString(R.string.width_scale_factor))));
+ heightScaleFactor = Float.valueOf(String.valueOf(mResources.get(surface4ResourceIndex).getJSONObject(surface4MediaIndex).get(mContext.getString(R.string.height_scale_factor))));
+ }
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[SURFACE_4][0] * widthScaleFactor, heightMatrix[SURFACE_4][0] * heightScaleFactor, depthMatrix[SURFACE_4][0],
+ widthMatrix[SURFACE_4][1] * widthScaleFactor, heightMatrix[SURFACE_4][1] * heightScaleFactor, depthMatrix[SURFACE_4][1],
+ widthMatrix[SURFACE_4][2] * widthScaleFactor, heightMatrix[SURFACE_4][2] * heightScaleFactor, depthMatrix[SURFACE_4][2],
+ widthMatrix[SURFACE_4][3] * widthScaleFactor, heightMatrix[SURFACE_4][3] * heightScaleFactor, depthMatrix[SURFACE_4][3],
+ };
+ if (vertexBuffer4 != null) {
+ vertexBuffer4.clear();
+ }
+ ByteBuffer vbb = ByteBuffer.allocateDirect(imageVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ vertexBuffer4 = vbb.asFloatBuffer();
+
+ vertexBuffer4.put(imageVertices);
+ vertexBuffer4.position(0);
+
+ //surface4 image media
+ gl.glLoadIdentity();
+ gl.glEnable(GL10.GL_TEXTURE_2D); //ENABLE IMAGE TEXTURES
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer4);
+
+ setImageToIndex(gl, SURFACE_4, imageRenderError);
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+ gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer4);
+
+
+ gl.glTexEnvf(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE, GL10.GL_REPLACE);
+ gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId4[0]);
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glDisable(GL10.GL_TEXTURE_2D);
+
+
+// if (mVertexBuffers[SURFACE_4][1] == null && widthScaleFactor != -1) {
+ if (widthScaleFactor != -1) {
+// Log.d(TAG, "SCALE FACTOR: WSF: " + widthScaleFactor);
+// Log.d(TAG, "SCALE FACTOR: HSF: " + heightScaleFactor);
+
+ float[] topVertices = {
+ widthMatrix[SURFACE_4][0] * widthScaleFactor, screenRatio * heightScaleFactor, depthMatrix[SURFACE_4][0],
+ widthMatrix[SURFACE_4][1] * widthScaleFactor, screenRatio * heightScaleFactor, depthMatrix[SURFACE_4][1],
+ widthMatrix[SURFACE_4][2] * widthScaleFactor, heightMatrix[SURFACE_4][2], depthMatrix[SURFACE_4][2],
+ widthMatrix[SURFACE_4][3] * widthScaleFactor, heightMatrix[SURFACE_4][3], depthMatrix[SURFACE_4][3],
+ };
+
+ vbb = ByteBuffer.allocateDirect(topVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferTop = vbb.asFloatBuffer();
+ mVertexBuffers[SURFACE_4][1] = bufferTop.put(topVertices);
+ mVertexBuffers[SURFACE_4][1].position(0);
+ }
+// if (mVertexBuffers[SURFACE_4][2] == null && widthScaleFactor != -1) {
+ if (widthScaleFactor != -1) {
+// Log.d(TAG, "SCALE FACTOR: WSF: " + widthScaleFactor);
+// Log.d(TAG, "SCALE FACTOR: HSF: " + heightScaleFactor);
+
+ float[] botVertices = {
+ widthMatrix[SURFACE_4][0] * widthScaleFactor, heightMatrix[SURFACE_4][0], depthMatrix[SURFACE_4][0],
+ widthMatrix[SURFACE_4][1] * widthScaleFactor, heightMatrix[SURFACE_4][1], depthMatrix[SURFACE_4][1],
+ widthMatrix[SURFACE_4][2] * widthScaleFactor, -screenRatio * heightScaleFactor, depthMatrix[SURFACE_4][2],
+ widthMatrix[SURFACE_4][3] * widthScaleFactor, -screenRatio * heightScaleFactor, depthMatrix[SURFACE_4][3],
+ };
+
+ vbb = ByteBuffer.allocateDirect(botVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferBot = vbb.asFloatBuffer();
+ mVertexBuffers[SURFACE_4][2] = bufferBot.put(botVertices);
+ mVertexBuffers[SURFACE_4][2].position(0);
+ }
+
+
+ if (mVertexBuffers[SURFACE_4][2] != null) {
+ //surface4 bot
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[SURFACE_4][2]);
+
+ gl.glColor4f(0f, 0f, 0.0f, 1); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ }
+
+ if (mVertexBuffers[SURFACE_4][1] != null) {
+ //surface4 top
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(angleRectangle, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[SURFACE_4][1]);
+
+ gl.glColor4f(0f, 0f, 0.0f, 1); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ }
+ } else if (mResources.get(surface4ResourceIndex).getJSONObject(surface4MediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+// Log.d(TAG, "onDrawFrame: rendering SURFACE 1 video");
+
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[SURFACE_4][0], heightMatrix[SURFACE_4][0], depthMatrix[SURFACE_4][0],
+ widthMatrix[SURFACE_4][1], heightMatrix[SURFACE_4][1], depthMatrix[SURFACE_4][1],
+ widthMatrix[SURFACE_4][2], heightMatrix[SURFACE_4][2], depthMatrix[SURFACE_4][2],
+ widthMatrix[SURFACE_4][3], heightMatrix[SURFACE_4][3], depthMatrix[SURFACE_4][3],
+ };
+ if (vertexBuffer4 != null) {
+ vertexBuffer4.clear();
+ }
+ ByteBuffer vbb = ByteBuffer.allocateDirect(imageVertices.length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+ vertexBuffer4 = vbb.asFloatBuffer();
+
+ vertexBuffer4.put(imageVertices);
+ vertexBuffer4.position(0);
+
+
+ if (mPlayer4State == ACTIVE_PLAYER && !mPlayer4.getPlayWhenReady() && mCurrentSurface == SURFACE_4) {
+ Log.d(TAG, "onDrawFrame: playing player 4");
+ mPlayer4.setPlayWhenReady(true);
+// retryPlayer4 = false;
+ } else if (mSecondaryPlayer4State == ACTIVE_PLAYER && !mSecondaryPlayer4.getPlayWhenReady() && mCurrentSurface == SURFACE_4) {
+ Log.d(TAG, "onDrawFrame: playing secondary player 4");
+ mSecondaryPlayer4.setPlayWhenReady(true);
+// retrySecondaryPlayer4 = false;
+ }
+
+ // surface 4 video media
+ gl.glLoadIdentity();
+ gl.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); //ENABLE VIDEO TEXTURES
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer4);
+
+
+ gl.glTranslatef(0, 0, depth);
+ gl.glRotatef(-(angleRectangle), 0.0f, -1.0f, 0.0f);
+ gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer4);
+
+// gl.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId1[0]); //causes error (doesn't crash)
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
+ gl.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); //ENABLE VIDEO TEXTURES
+
+ //double check to make sure the video is playing properly
+// if (mPlayer4State == ACTIVE_PLAYER && mPlayer4.getPlayWhenReady() && mCurrentSurface == SURFACE_4
+// && mPlayer4.getPlaybackState() != Player.STATE_READY && retryPlayer4 && mPlayer4.getCurrentPosition() < 0) {
+// retryPlayer4 = false;
+// Log.d(TAG, "onDrawFrame: player4 is trying to play.");
+// Log.d(TAG, "onDrawFrame: player4 playback state: " + mPlayer4.getPlaybackState());
+// retryPlayVideo(SURFACE_4, surface4ResourceIndex, surface4MediaIndex);
+// } else if (mSecondaryPlayer4State == ACTIVE_PLAYER && mSecondaryPlayer4.getPlayWhenReady() && mCurrentSurface == SURFACE_4
+// && mSecondaryPlayer4.getPlaybackState() != Player.STATE_READY && retrySecondaryPlayer4 && mSecondaryPlayer4.getCurrentPosition() < 0) {
+// retrySecondaryPlayer4 = false;
+// Log.d(TAG, "onDrawFrame: secondary player4 is trying to play.");
+// Log.d(TAG, "onDrawFrame: secondary player4 playback state: " + mSecondaryPlayer4.getPlaybackState());
+// retryPlayVideo(SURFACE_4, surface4ResourceIndex, surface4MediaIndex);
+// }
+// else if(mSecondaryPlayer4.getPlaybackState() != Player.STATE_READY || mPlayer4.getPlaybackState() != Player.STATE_READY){
+// hideProgressBar();
+// }
+
+ if (mUpdateST4) {
+ try {
+// hideProgressBar();
+ mSurfaceTexture4.updateTexImage();
+// Log.d(TAG, "onDrawFrame: updating surface4 frame");
+
+ } catch (IllegalStateException e) {
+ Log.e(TAG, "onFrameAvailable: IllegalStateException: " + e.getMessage());
+ } catch (RuntimeException e) {
+ Log.e(TAG, "onFrameAvailable: RuntimeException: " + e.getMessage());
+ }
+ mUpdateST4 = false;
+ }
+ }
+ }
+ } catch (JSONException e) {
+ e.printStackTrace();
+ } catch (NullPointerException e) {
+// e.printStackTrace();
+ }
+
+// if((isImage1Set || hasFirstVideo1Played) && (isImage2Set || hasFirstVideo2Played)
+// && (isImage3Set || hasFirstVideo3Played) && (isImage4Set || hasFirstVideo4Played)) {
+ ByteBuffer vBackground = ByteBuffer.allocateDirect(backgroundVertices.length * 6 * 4);
+ vBackground.order(ByteOrder.nativeOrder());
+ FloatBuffer bufferBackground = vBackground.asFloatBuffer();
+ mVertexBuffers[BACKGROUND_SURFACE][1] = bufferBackground.put(backgroundVertices);
+ mVertexBuffers[BACKGROUND_SURFACE][1].position(0);
+
+ gl.glLoadIdentity();
+ gl.glDisable(GL10.GL_TEXTURE_2D); //DISABLE TEXTURE WHEN BUILDING FACES
+
+ gl.glTranslatef(0, 0, -9f);
+ gl.glRotatef(0, 0.0f, 1.0f, 0.0f);
+
+ gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
+ gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffers[BACKGROUND_SURFACE][1]);
+
+ gl.glColor4f(1f, 1f, 1.0f, 1.0f); //set the block black
+ gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
+
+ gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
+// }
+ }
+
+
+
+
+ public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+ Log.d(TAG, "onSurfaceCreated: SURFACE CREATED.");
+ gl.glClearDepthf(1.0f); // Set depth's clear-value to farthest
+ gl.glEnable(GL10.GL_DEPTH_TEST); // Enables depth-buffer for hidden surface removal
+ gl.glDepthFunc(GL10.GL_LEQUAL); // The type of depth testing to do
+// gl.glDepthFunc(GL10.GL_NEVER);
+ gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST); // nice perspective view
+ gl.glShadeModel(GL10.GL_SMOOTH); // Enable smooth shading of color
+ gl.glDisable(GL10.GL_DITHER); // Disable dithering for better performance
+// gl.glFrontFace(GL10.GL_CCW);
+ }
+
+
+
+
+ public void onSurfaceChanged(GL10 gl, int width, int height) {
+ Log.d(TAG, "onSurfaceChanged: SURFACE CHANGED.");
+ if (height == 0) height = 1; // To prevent divide by zero
+ float aspect = (float)width / height;
+
+ // Set the viewport (display area) to cover the entire window
+ gl.glViewport(0, 0, width, height);
+
+ // Setup perspective projection, with aspect ratio matches viewport
+ gl.glMatrixMode(GL10.GL_PROJECTION); // Select projection matrix
+ gl.glLoadIdentity(); // Reset projection matrix
+ // Use perspective projection
+ GLU.gluPerspective(gl, 45, aspect, 0.1f, 100.f);
+
+ gl.glMatrixMode(GL10.GL_MODELVIEW); // Select model-view matrix
+ gl.glLoadIdentity(); // Reset
+ }
+
+
+ private void incrementSurface(){
+ Log.d(TAG, "incrementSurface: incrementing current surface.");
+
+ if(mCurrentSurface == SURFACE_1){
+ mCurrentSurface = SURFACE_2;
+ Log.d(TAG, "incrementSurface: CURRENT SURFACE: SURFACE 2");
+ }
+ else if(mCurrentSurface == SURFACE_2){
+ mCurrentSurface = SURFACE_3;
+ Log.d(TAG, "incrementSurface: CURRENT SURFACE: SURFACE 3");
+ }
+ else if(mCurrentSurface == SURFACE_3){
+ mCurrentSurface = SURFACE_4;
+ Log.d(TAG, "incrementSurface: CURRENT SURFACE: SURFACE 4");
+ }
+ else if(mCurrentSurface == SURFACE_4){
+ mCurrentSurface = SURFACE_1;
+ Log.d(TAG, "incrementSurface: CURRENT SURFACE: SURFACE 1");
+ }
+ updateRotations(1);
+ }
+
+ private void deincrementSurface(){
+ Log.d(TAG, "deincrementSurface: deincrementing current surface.");
+
+ if(mCurrentSurface == SURFACE_1){
+ mCurrentSurface = SURFACE_4;
+ Log.d(TAG, "deincrementSurface: CURRENT SURFACE: SURFACE 4");
+ }
+ else if(mCurrentSurface == SURFACE_2){
+ mCurrentSurface = SURFACE_1;
+ Log.d(TAG, "deincrementSurface: CURRENT SURFACE: SURFACE 1");
+ }
+ else if(mCurrentSurface == SURFACE_3){
+ mCurrentSurface = SURFACE_2;
+ Log.d(TAG, "deincrementSurface: CURRENT SURFACE: SURFACE 2");
+ }
+ else if(mCurrentSurface == SURFACE_4){
+ mCurrentSurface = SURFACE_3;
+ Log.d(TAG, "deincrementSurface: CURRENT SURFACE: SURFACE 3");
+ }
+ updateRotations(-1);
+ }
+
+ private void updateRotations(final int direction){
+ if(direction > 0){
+ mNumRotations++;
+ }
+ else{
+ mNumRotations--;
+ }
+ Log.d(TAG, "updateRotations: NUM ROTATIONS: " + mNumRotations);
+ try{
+ int numRotations1 = mResourceIndices.getJSONObject(SURFACE_1).getInt(mContext.getString(R.string.rotations));
+ numRotations1 = numRotations1 + direction;
+ JSONObject object1 = mResourceIndices.getJSONObject(SURFACE_1);
+ object1.put(mContext.getString(R.string.rotations), numRotations1);
+ mResourceIndices.put(SURFACE_1, object1);
+ Log.d(TAG, "updateRotations: num rotations1: " + numRotations1);
+
+ int numRotations2 = mResourceIndices.getJSONObject(SURFACE_2).getInt(mContext.getString(R.string.rotations));
+ numRotations2 = numRotations2 + direction;
+ JSONObject object2 = mResourceIndices.getJSONObject(SURFACE_2);
+ object2.put(mContext.getString(R.string.rotations), numRotations2);
+ mResourceIndices.put(SURFACE_2, object2);
+ Log.d(TAG, "updateRotations: num rotations2: " + numRotations2);
+
+ int numRotations3 = mResourceIndices.getJSONObject(SURFACE_3).getInt(mContext.getString(R.string.rotations));
+ numRotations3 = numRotations3 + direction;
+ JSONObject object3 = mResourceIndices.getJSONObject(SURFACE_3);
+ object3.put(mContext.getString(R.string.rotations), numRotations3);
+ mResourceIndices.put(SURFACE_3, object3);
+ Log.d(TAG, "updateRotations: num rotations3: " + numRotations3);
+
+ int numRotations4 = mResourceIndices.getJSONObject(SURFACE_4).getInt(mContext.getString(R.string.rotations));
+ numRotations4 = numRotations4 + direction;
+ JSONObject object4 = mResourceIndices.getJSONObject(SURFACE_4);
+ object4.put(mContext.getString(R.string.rotations), numRotations4);
+ mResourceIndices.put(SURFACE_4, object4);
+ Log.d(TAG, "updateRotations: num rotations4: " + numRotations4);
+
+ if(numRotations1 % 3 == 0 && numRotations1 != 0){
+ int temp = numRotations1;
+ if(direction > 0){
+ numRotations1 = -1;
+ }
+ else{
+ numRotations1 = 1;
+ }
+
+ if(mSurfaceTexture == null){
+ initVideoSurface1(PLAYER_ONE);
+ }
+ object1.put(mContext.getString(R.string.rotations), numRotations1);
+ mResourceIndices.put(SURFACE_1, object1);
+ Log.d(TAG, "updateRotations: surface1 transition. Num rotations from: " + temp + " to " + numRotations1);
+
+ // Get a handler that can be used to post to the main thread
+ Handler mainHandler = new Handler(mContext.getMainLooper());
+ Runnable myRunnable = new Runnable() {
+ @Override
+ public void run() {
+ try{
+ //update the resource index to show new resource
+ int oldResourceIndex = mResourceIndices.getJSONObject(SURFACE_1).getInt(mContext.getString(R.string.resource_index));
+
+ int newResourceIndex = oldResourceIndex + 4*direction;
+
+// if(newResourceIndex < mMedia.size() && direction > 0){
+ if(newResourceIndex < mNumResources){
+// getMedia(mMedia.get(newResourceIndex), newResourceIndex);
+// JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_1);
+// tempObj.put(mContext.getString(R.string.resource_index), newResourceIndex);
+// mResourceIndices.put(SURFACE_1, tempObj);
+// Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_1).toString());
+//
+// //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_1, oldResourceIndex, direction);
+
+ if(direction > 0){
+// getMedia(mMedia.get(newResourceIndex).getMedia(), newResourceIndex);
+ getMedia(mUserStories.getJSONObject(newResourceIndex)
+ .getJSONArray(mContext.getString(R.string.user_stories)), newResourceIndex);
+ JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_1);
+ tempObj.put(mContext.getString(R.string.resource_index), newResourceIndex);
+ mResourceIndices.put(SURFACE_1, tempObj);
+
+ //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_1, newResourceIndex, direction);
+
+ Log.d(TAG, "updateRotations: old resource index: " + oldResourceIndex);
+ Log.d(TAG, "updateRotations: new resource index: " + newResourceIndex);
+ Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_1).toString());
+ }
+ else if(direction < 0 && oldResourceIndex > mNumRotations && oldResourceIndex >= 4){
+// getMedia(mMedia.get(oldResourceIndex - 4).getMedia(), oldResourceIndex - 4);
+ getMedia(mUserStories.getJSONObject(oldResourceIndex - 4)
+ .getJSONArray(mContext.getString(R.string.user_stories)), oldResourceIndex - 4);
+ JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_1);
+ tempObj.put(mContext.getString(R.string.resource_index), oldResourceIndex - 4);
+ mResourceIndices.put(SURFACE_1, tempObj);
+ //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_1, oldResourceIndex - 4, direction);
+
+ Log.d(TAG, "updateRotations: old resource index: " + oldResourceIndex);
+ Log.d(TAG, "updateRotations: new resource index: " + (oldResourceIndex - 4));
+ Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_1).toString());
+ }
+// Log.d(TAG, "updateRotations: surface# 1 resource indices: " + mResourceIndices.getJSONObject(SURFACE_1));
+// Log.d(TAG, "updateRotations: surface# 2 resource indices: " + mResourceIndices.getJSONObject(SURFACE_2));
+// Log.d(TAG, "updateRotations: surface# 3 resource indices: " + mResourceIndices.getJSONObject(SURFACE_3));
+// Log.d(TAG, "updateRotations: surface# 4 resource indices: " + mResourceIndices.getJSONObject(SURFACE_4));
+
+ }
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+ };
+ mainHandler.post(myRunnable);
+ }
+ else if(numRotations2 % 3 == 0 && numRotations2 != 0){
+ int temp = numRotations2;
+ if(direction > 0){
+ numRotations2 = -1;
+ }
+ else{
+ numRotations2 = 1;
+ }
+ if(mSurfaceTexture2 == null){
+ initVideoSurface1(PLAYER_TWO);
+ }
+ object2.put(mContext.getString(R.string.rotations), numRotations2);
+ mResourceIndices.put(SURFACE_2, object2);
+ Log.d(TAG, "updateRotations: surface2 transition. Num rotations from: " + temp + " to " + numRotations2);
+
+ // Get a handler that can be used to post to the main thread
+ Handler mainHandler = new Handler(mContext.getMainLooper());
+ Runnable myRunnable = new Runnable() {
+ @Override
+ public void run() {
+ try{
+ //update the resource index to show new resource
+ int oldResourceIndex = mResourceIndices.getJSONObject(SURFACE_2).getInt(mContext.getString(R.string.resource_index));
+
+ int newResourceIndex = oldResourceIndex + 4*direction;
+
+ if(newResourceIndex < mNumResources){
+// getMedia(mMedia.get(newResourceIndex), newResourceIndex);
+//
+// JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_2);
+// tempObj.put(mContext.getString(R.string.resource_index), newResourceIndex);
+// mResourceIndices.put(SURFACE_2, tempObj);
+// Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_2).toString());
+//
+// //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_2, oldResourceIndex, direction);
+
+ if(direction > 0){
+// getMedia(mMedia.get(newResourceIndex).getMedia(), newResourceIndex);
+ getMedia(mUserStories.getJSONObject(newResourceIndex)
+ .getJSONArray(mContext.getString(R.string.user_stories)), newResourceIndex);
+ JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_2);
+ tempObj.put(mContext.getString(R.string.resource_index), newResourceIndex);
+ mResourceIndices.put(SURFACE_2, tempObj);
+
+ //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_2, newResourceIndex, direction);
+
+ Log.d(TAG, "updateRotations: old resource index: " + oldResourceIndex);
+ Log.d(TAG, "updateRotations: new resource index: " + newResourceIndex);
+ Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_2).toString());
+ }
+ else if(direction < 0 && oldResourceIndex > mNumRotations && oldResourceIndex >= 4){
+// getMedia(mMedia.get(oldResourceIndex - 4).getMedia(), oldResourceIndex - 4);
+ getMedia(mUserStories.getJSONObject(oldResourceIndex - 4)
+ .getJSONArray(mContext.getString(R.string.user_stories)), oldResourceIndex - 4);
+ JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_2);
+ tempObj.put(mContext.getString(R.string.resource_index), oldResourceIndex - 4);
+ mResourceIndices.put(SURFACE_2, tempObj);
+ //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_2, oldResourceIndex - 4, direction);
+
+ Log.d(TAG, "updateRotations: old resource index: " + oldResourceIndex);
+ Log.d(TAG, "updateRotations: new resource index: " + (oldResourceIndex - 4));
+ Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_2).toString());
+ }
+// Log.d(TAG, "updateRotations: surface# 1 resource indices: " + mResourceIndices.getJSONObject(SURFACE_1));
+// Log.d(TAG, "updateRotations: surface# 2 resource indices: " + mResourceIndices.getJSONObject(SURFACE_2));
+// Log.d(TAG, "updateRotations: surface# 3 resource indices: " + mResourceIndices.getJSONObject(SURFACE_3));
+// Log.d(TAG, "updateRotations: surface# 4 resource indices: " + mResourceIndices.getJSONObject(SURFACE_4));
+
+ }
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+ };
+ mainHandler.post(myRunnable);
+ }
+ else if(numRotations3 % 3 == 0 && numRotations3 != 0){
+ int temp = numRotations3;
+ if(direction > 0){
+ numRotations3 = -1;
+ }
+ else{
+ numRotations3 = 1;
+ }
+ if(mSurfaceTexture3 == null){
+ initVideoSurface1(PLAYER_THREE);
+ }
+ object3.put(mContext.getString(R.string.rotations), numRotations3);
+ mResourceIndices.put(SURFACE_3, object3);
+ Log.d(TAG, "updateRotations: surface3 transition. Num rotations from: " + temp + " to " + numRotations3);
+
+ // special case for first rotation for surface 3. Disable transition
+ if(!mFirstRotationSurface3){
+ if(mNumRotations == 0){
+ mFirstRotationSurface3 = true;
+ }
+
+ // Get a handler that can be used to post to the main thread
+ Handler mainHandler = new Handler(mContext.getMainLooper());
+ Runnable myRunnable = new Runnable() {
+ @Override
+ public void run() {
+ try{
+ //update the resource index to show new resource
+ int oldResourceIndex = mResourceIndices.getJSONObject(SURFACE_3).getInt(mContext.getString(R.string.resource_index));
+
+ int newResourceIndex = oldResourceIndex + 4*direction;
+
+// if(newResourceIndex < mMedia.size() && direction > 0){
+ if(newResourceIndex < mNumResources){
+// getMedia(mMedia.get(newResourceIndex), newResourceIndex);
+//
+// JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_3);
+// tempObj.put(mContext.getString(R.string.resource_index), newResourceIndex);
+// mResourceIndices.put(SURFACE_3, tempObj);
+// Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_3).toString());
+//
+// //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_3, oldResourceIndex, direction);
+
+ if(direction > 0){
+// getMedia(mMedia.get(newResourceIndex).getMedia(), newResourceIndex);
+ getMedia(mUserStories.getJSONObject(newResourceIndex)
+ .getJSONArray(mContext.getString(R.string.user_stories)), newResourceIndex);
+ JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_3);
+ tempObj.put(mContext.getString(R.string.resource_index), newResourceIndex);
+ mResourceIndices.put(SURFACE_3, tempObj);
+
+ //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_3, newResourceIndex, direction);
+
+ Log.d(TAG, "updateRotations: old resource index: " + oldResourceIndex);
+ Log.d(TAG, "updateRotations: new resource index: " + newResourceIndex);
+ Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_3).toString());
+ }
+ else if(direction < 0 && oldResourceIndex > mNumRotations && oldResourceIndex >= 4){
+// getMedia(mMedia.get(oldResourceIndex - 4).getMedia(), oldResourceIndex - 4);
+ getMedia(mUserStories.getJSONObject(oldResourceIndex - 4)
+ .getJSONArray(mContext.getString(R.string.user_stories)), oldResourceIndex - 4);
+ JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_3);
+ tempObj.put(mContext.getString(R.string.resource_index), oldResourceIndex - 4);
+ mResourceIndices.put(SURFACE_3, tempObj);
+ //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_3, oldResourceIndex - 4, direction);
+
+ Log.d(TAG, "updateRotations: old resource index: " + oldResourceIndex);
+ Log.d(TAG, "updateRotations: new resource index: " + (oldResourceIndex - 4));
+ Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_3).toString());
+ }
+// Log.d(TAG, "updateRotations: surface# 1 resource indices: " + mResourceIndices.getJSONObject(SURFACE_1));
+// Log.d(TAG, "updateRotations: surface# 2 resource indices: " + mResourceIndices.getJSONObject(SURFACE_2));
+// Log.d(TAG, "updateRotations: surface# 3 resource indices: " + mResourceIndices.getJSONObject(SURFACE_3));
+// Log.d(TAG, "updateRotations: surface# 4 resource indices: " + mResourceIndices.getJSONObject(SURFACE_4));
+
+ }
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+ };
+ mainHandler.post(myRunnable);
+ }
+ else{
+ Log.d(TAG, "updateRotations: first rotation of surface 3.");
+ mFirstRotationSurface3 = false;
+// isImage3Set = false;
+
+ }
+
+ }
+
+ else if(numRotations4 % 3 == 0 && numRotations4 != 0){
+ int temp = numRotations4;
+ if(direction > 0){
+ numRotations4 = -1;
+ }
+ else{
+ numRotations4 = 1;
+ }
+ if(mSurfaceTexture4 == null){
+ initVideoSurface1(PLAYER_FOUR);
+ }
+ object4.put(mContext.getString(R.string.rotations), numRotations4);
+ mResourceIndices.put(SURFACE_4, object4);
+ Log.d(TAG, "updateRotations: surface4 transition. Num rotations from: " + temp + " to " + numRotations4);
+
+ // special case for first rotation for surface 4. Disable transition
+ // Also use mNumRotations to catch special case for restarting completely
+
+ if(!mFirstRotationSurface4){
+ if(mNumRotations == 0){
+ mFirstRotationSurface4 = true;
+
+ }
+ // Get a handler that can be used to post to the main thread
+ Handler mainHandler = new Handler(mContext.getMainLooper());
+ Runnable myRunnable = new Runnable() {
+ @Override
+ public void run() {
+ try{
+ int oldResourceIndex = mResourceIndices.getJSONObject(SURFACE_4).getInt(mContext.getString(R.string.resource_index));
+
+ int newResourceIndex = oldResourceIndex + 4*direction;
+// if(oldResourceIndex == 3){
+// return;
+// }
+
+// if(newResourceIndex < mMedia.size() && direction > 0){
+ if(newResourceIndex < mNumResources){
+ if(direction > 0){
+// getMedia(mMedia.get(newResourceIndex).getMedia(), newResourceIndex);
+ getMedia(mUserStories.getJSONObject(newResourceIndex)
+ .getJSONArray(mContext.getString(R.string.user_stories)), newResourceIndex);
+ JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_4);
+ tempObj.put(mContext.getString(R.string.resource_index), newResourceIndex);
+ mResourceIndices.put(SURFACE_4, tempObj);
+ //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_4, newResourceIndex, direction);
+
+ Log.d(TAG, "updateRotations: old resource index: " + oldResourceIndex);
+ Log.d(TAG, "updateRotations: new resource index: " + newResourceIndex);
+ Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_4).toString());
+ }
+ else if(direction < 0 && oldResourceIndex > mNumRotations && oldResourceIndex >= 4){
+// getMedia(mMedia.get(oldResourceIndex - 4).getMedia(), oldResourceIndex - 4);
+ getMedia(mUserStories.getJSONObject(oldResourceIndex - 4)
+ .getJSONArray(mContext.getString(R.string.user_stories)), oldResourceIndex - 4);
+ JSONObject tempObj = mResourceIndices.getJSONObject(SURFACE_4);
+ tempObj.put(mContext.getString(R.string.resource_index), oldResourceIndex - 4);
+ mResourceIndices.put(SURFACE_4, tempObj);
+ //when a full rotation occurs, reset the media of that index
+// resetMediaIndex(SURFACE_4, oldResourceIndex - 4, direction);
+
+ Log.d(TAG, "updateRotations: old resource index: " + oldResourceIndex);
+ Log.d(TAG, "updateRotations: new resource index: " + (oldResourceIndex - 4));
+ Log.d(TAG, "updateRotations: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(SURFACE_4).toString());
+ }
+
+// Log.d(TAG, "updateRotations: surface# 1 resource indices: " + mResourceIndices.getJSONObject(SURFACE_1));
+// Log.d(TAG, "updateRotations: surface# 2 resource indices: " + mResourceIndices.getJSONObject(SURFACE_2));
+// Log.d(TAG, "updateRotations: surface# 3 resource indices: " + mResourceIndices.getJSONObject(SURFACE_3));
+// Log.d(TAG, "updateRotations: surface# 4 resource indices: " + mResourceIndices.getJSONObject(SURFACE_4));
+
+ }
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+ };
+ mainHandler.post(myRunnable);
+ }
+ else{
+ Log.d(TAG, "updateRotations: first rotation of surface 4.");
+ mFirstRotationSurface4 = false;
+ }
+ }
+
+ Log.d(TAG, "updateRotations: surface# 1 resource indices: " + mResourceIndices.getJSONObject(SURFACE_1));
+ Log.d(TAG, "updateRotations: surface# 2 resource indices: " + mResourceIndices.getJSONObject(SURFACE_2));
+ Log.d(TAG, "updateRotations: surface# 3 resource indices: " + mResourceIndices.getJSONObject(SURFACE_3));
+ Log.d(TAG, "updateRotations: surface# 4 resource indices: " + mResourceIndices.getJSONObject(SURFACE_4));
+
+
+ hideProgressBar();
+ //setup progress bars for the media sources
+ isProgressBarsInitialized = false;
+ initProgressBars();
+ //create runnable that updates progress bars once the progress bars are initialized.
+ mProgressBarInitHandler = new Handler(Looper.getMainLooper());
+ mProgressBarInitRunnable = new Runnable() {
+ @Override
+ public void run() {
+ mProgressBarInitHandler.postDelayed(mProgressBarInitRunnable, 200);
+ Log.d(TAG, "rotateCounterClockwise: checking to see if progress bars are initialized.");
+ if(isProgressBarsInitialized){
+ try{
+ if(mCurrentSurface == SURFACE_1) {
+ final int surface1ResourceIndex = mResourceIndices.getJSONObject(SURFACE_1).getInt(mContext.getString(R.string.resource_index));
+ final int surface1MediaIndex = mResourceIndices.getJSONObject(SURFACE_1).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surface1ResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ Log.d(TAG, "correctRotation: RESOURCE SURFACE OBJECT: " + mResources.get(surface1ResourceIndex).getJSONObject(surface1MediaIndex).get(mContext.getString(R.string.media_type)));
+ if(mResources.get(surface1ResourceIndex).getJSONObject(surface1MediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+ Log.d(TAG, "correctRotation: playing video on surface 1.");
+ setProgressBars(0); //by passing 0 it won't fill the most recent progress bar
+ restartPlayer1();
+ startProgressBar();
+ }
+ else{
+ setProgressBars(1);
+ restartProgressBarRunnable();
+ }
+ }
+ else if(mCurrentSurface == SURFACE_2) {
+ final int surface2ResourceIndex = mResourceIndices.getJSONObject(SURFACE_2).getInt(mContext.getString(R.string.resource_index));
+ final int surface2MediaIndex = mResourceIndices.getJSONObject(SURFACE_2).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surface2ResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ Log.d(TAG, "correctRotation: RESOURCE SURFACE OBJECT: " + mResources.get(surface2ResourceIndex).getJSONObject(surface2MediaIndex).get(mContext.getString(R.string.media_type)));
+ if(mResources.get(surface2ResourceIndex).getJSONObject(surface2MediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+ Log.d(TAG, "correctRotation: playing video on surface 2.");
+ setProgressBars(0);
+ restartPlayer2();
+ startProgressBar();
+ }
+ else{
+ setProgressBars(1);
+ restartProgressBarRunnable();
+ }
+ }
+ else if(mCurrentSurface == SURFACE_3) {
+ final int surface3ResourceIndex = mResourceIndices.getJSONObject(SURFACE_3).getInt(mContext.getString(R.string.resource_index));
+ final int surface3MediaIndex = mResourceIndices.getJSONObject(SURFACE_3).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surface3ResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ Log.d(TAG, "correctRotation: RESOURCE SURFACE OBJECT: " + mResources.get(surface3ResourceIndex).getJSONObject(surface3MediaIndex).get(mContext.getString(R.string.media_type)));
+ if(mResources.get(surface3ResourceIndex).getJSONObject(surface3MediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+ Log.d(TAG, "correctRotation: playing video on surface 3.");
+ setProgressBars(0);
+ restartPlayer3();
+ startProgressBar();
+ }
+ else{
+ setProgressBars(1);
+ restartProgressBarRunnable();
+ }
+ }
+ else if(mCurrentSurface == SURFACE_4) {
+ final int surface4ResourceIndex = mResourceIndices.getJSONObject(SURFACE_4).getInt(mContext.getString(R.string.resource_index));
+ final int surface4MediaIndex = mResourceIndices.getJSONObject(SURFACE_4).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surface4ResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ Log.d(TAG, "correctRotation: RESOURCE SURFACE OBJECT: " + mResources.get(surface4ResourceIndex).getJSONObject(surface4MediaIndex).get(mContext.getString(R.string.media_type)));
+ if(mResources.get(surface4ResourceIndex).getJSONObject(surface4MediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+ Log.d(TAG, "correctRotation: playing video on surface 4.");
+ setProgressBars(0);
+ restartPlayer4();
+ startProgressBar();
+ }
+ else{
+ setProgressBars(1);
+ restartProgressBarRunnable();
+ }
+ }
+
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ mProgressBarInitHandler.removeCallbacks(mProgressBarInitRunnable);
+ }
+ }
+ };
+ mProgressBarInitRunnable.run();
+ renderContinuously();
+
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+
+ private void printCurrentResources(){
+ Log.d(TAG, "printCurrentResources: printing resources");
+
+ int surface1ResourceIndex = 0;
+ try{
+ surface1ResourceIndex = mResourceIndices.getJSONObject(SURFACE_1).getInt(mContext.getString(R.string.resource_index));
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ Log.d(TAG, "printCurrentResources: SURFACE 1 \n---------------------------------------------------------------------------------------");
+ for(int i = 0; i < mHighestNumberMedia; i++){
+ try {
+ Log.d(TAG, "printCurrentResources: SURFACE 1 " + i + " : " + mResources.get(surface1ResourceIndex).getJSONObject(i).get(mContext.getString(R.string.media_type)));
+ } catch (JSONException e) {
+ Log.e(TAG, "printCurrentResources: SURFACE 1 JSONEXCEPTION: " + e.getMessage() );
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ }
+
+ int surface2ResourceIndex = 0;
+ try{
+ surface2ResourceIndex = mResourceIndices.getJSONObject(SURFACE_2).getInt(mContext.getString(R.string.resource_index));
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ Log.d(TAG, "printCurrentResources: SURFACE 2 \n---------------------------------------------------------------------------------------");
+ for(int i = 0; i < mHighestNumberMedia; i++){
+ try {
+ Log.d(TAG, "printCurrentResources: SURFACE 2 " + i + " : " + mResources.get(surface2ResourceIndex).getJSONObject(i).get(mContext.getString(R.string.media_type)));
+ } catch (JSONException e) {
+ Log.e(TAG, "printCurrentResources: SURFACE 2 JSONEXCEPTION: " + e.getMessage() );
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ }
+
+ int surface3ResourceIndex = 0;
+ try{
+ surface3ResourceIndex = mResourceIndices.getJSONObject(SURFACE_3).getInt(mContext.getString(R.string.resource_index));
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ Log.d(TAG, "printCurrentResources: SURFACE 3 \n---------------------------------------------------------------------------------------");
+ for(int i = 0; i < mHighestNumberMedia; i++){
+ try {
+ Log.d(TAG, "printCurrentResources: SURFACE 3 " + i + " : " + mResources.get(surface3ResourceIndex).getJSONObject(i).get(mContext.getString(R.string.media_type)));
+ } catch (JSONException e) {
+ Log.e(TAG, "printCurrentResources: SURFACE 3 JSONEXCEPTION: " + e.getMessage() );
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+ int surface4ResourceIndex = 0;
+ try{
+ surface4ResourceIndex = mResourceIndices.getJSONObject(SURFACE_4).getInt(mContext.getString(R.string.resource_index));
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ Log.d(TAG, "printCurrentResources: SURFACE 4 \n---------------------------------------------------------------------------------------");
+ for(int i = 0; i < mHighestNumberMedia; i++){
+ try {
+ Log.d(TAG, "printCurrentResources: SURFACE 4 " + i + " : " + mResources.get(surface4ResourceIndex).getJSONObject(i).get(mContext.getString(R.string.media_type)));
+ } catch (JSONException e) {
+ Log.e(TAG, "printCurrentResources: SURFACE 4 JSONEXCEPTION: " + e.getMessage() );
+ }
+ catch (NullPointerException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+// int surface5ResourceIndex = 0;
+// try{
+// surface5ResourceIndex = mResourceIndices.getJSONObject(SURFACE_1).getInt(mContext.getString(R.string.resource_index));
+// } catch (JSONException e) {
+// e.printStackTrace();
+// }
+// catch (NullPointerException e) {
+// e.printStackTrace();
+// }
+// Log.d(TAG, "printCurrentResources: SURFACE 5 \n---------------------------------------------------------------------------------------");
+// for(int i = 0; i < mHighestNumberMedia; i++){
+// try {
+// Log.d(TAG, "printCurrentResources: SURFACE 5 " + i + " : " + mResources.get(4).getJSONObject(i).get(mContext.getString(R.string.media_type)));
+// } catch (JSONException e) {
+// Log.e(TAG, "printCurrentResources: SURFACE 5 JSONEXCEPTION: " + e.getMessage() );
+// }
+// catch (NullPointerException e) {
+// Log.e(TAG, "printCurrentResources: SURFACE 5 NullPointerException: " + e.getMessage() );
+// }
+// }
+//
+//
+// int surface6ResourceIndex = 0;
+// try{
+// surface6ResourceIndex = mResourceIndices.getJSONObject(SURFACE_2).getInt(mContext.getString(R.string.resource_index));
+// } catch (JSONException e) {
+// e.printStackTrace();
+// }
+// catch (NullPointerException e) {
+// e.printStackTrace();
+// }
+// Log.d(TAG, "printCurrentResources: SURFACE 6 \n---------------------------------------------------------------------------------------");
+// for(int i = 0; i < mHighestNumberMedia; i++){
+// try {
+// Log.d(TAG, "printCurrentResources: SURFACE 6 " + i + " : " + mResources.get(5).getJSONObject(i).get(mContext.getString(R.string.media_type)));
+// } catch (JSONException e) {
+// Log.e(TAG, "printCurrentResources: SURFACE 6 JSONEXCEPTION: " + e.getMessage() );
+// }
+// catch (NullPointerException e) {
+// e.printStackTrace();
+// }
+// }
+ }
+
+// private void resetMediaIndex(int surfaceNum, int resourceIndex, int direction) {
+// Log.d(TAG, "resetMediaIndex: direction: " + direction);
+// Log.d(TAG, "resetMediaIndex: resetting media index for resource " + resourceIndex);
+//
+// stopPlayers();
+//
+// resetPlayerDefaults(surfaceNum);
+// if(surfaceNum == SURFACE_1){
+// hasFirstVideo1Played = false;
+// isImage1Set = false;
+// }
+// else if(surfaceNum == SURFACE_2){
+// hasFirstVideo2Played = false;
+// isImage2Set = false;
+// }
+// else if(surfaceNum == SURFACE_3){
+// hasFirstVideo3Played = false;
+// isImage3Set = false;
+// }
+// else if(surfaceNum == SURFACE_4){
+// hasFirstVideo4Played = false;
+// isImage4Set = false;
+// }
+//
+// Log.d(TAG, "resetMediaIndex: resource index: " + resourceIndex);
+//
+//
+// int numResourcesForIndex = 0;
+// try{
+// numResourcesForIndex = mResources.get(resourceIndex).length();
+// }catch (NullPointerException e){
+// e.printStackTrace();
+// }
+// Log.d(TAG, "resetMediaIndex: num resources for index: " + numResourcesForIndex);
+//// Log.d(TAG, "resetMediaIndex: media index: " + mediaIndex);
+// boolean foundFirstVideo = false;
+// for (int i = 0; i < numResourcesForIndex; i++) {
+// Log.d(TAG, "resetMediaIndex: i: " + i);
+// String mediaType = "";
+// try {
+// mediaType = mResources.get(resourceIndex).getJSONObject(i).getString(mContext.getString(R.string.media_type));
+// } catch (NullPointerException e) {
+// mediaType = "none";
+// e.printStackTrace();
+// }
+// catch (JSONException e) {
+// mediaType = "none";
+// e.printStackTrace();
+// }
+// // find the first video and buffer it
+// if(mediaType.equals(mContext.getString(R.string.video_uri)) && !foundFirstVideo){
+// foundFirstVideo = true;
+// bufferFirstVideo(surfaceNum, i);
+// }
+// else if(mediaType.equals(mContext.getString(R.string.video_uri))){
+// bufferNextVideo(surfaceNum, i);
+// break;
+// }
+// }
+// }
+
+ private void resetPlayerDefaults(int surfaceNum){
+ if(surfaceNum == SURFACE_1){
+ Log.d(TAG, "resetPlayerDefaults: resetting player 1 to defaults.");
+ setPlayerState(ACTIVE_PLAYER);
+ setSecondaryPlayerState(NOT_ACTIVE_PLAYER);
+ mPlayer.release();
+ mSecondaryPlayer.release();
+ initPlayer1();
+ }
+ else if(surfaceNum == SURFACE_2){
+ Log.d(TAG, "resetPlayerDefaults: resetting player 2 to defaults.");
+ setPlayer2State(ACTIVE_PLAYER);
+ setSecondaryPlayer2State(NOT_ACTIVE_PLAYER);
+ mPlayer2.release();
+ mSecondaryPlayer2.release();
+ initPlayer2();
+ }
+ else if(surfaceNum == SURFACE_3){
+ Log.d(TAG, "resetPlayerDefaults: resetting player 3 to defaults.");
+ setPlayer3State(ACTIVE_PLAYER);
+ setSecondaryPlayer3State(NOT_ACTIVE_PLAYER);
+ mPlayer3.release();
+ mSecondaryPlayer3.release();
+ initPlayer3();
+ }
+ else if(surfaceNum == SURFACE_4){
+ Log.d(TAG, "resetPlayerDefaults: resetting player 4 to defaults.");
+ setPlayer4State(ACTIVE_PLAYER);
+ setSecondaryPlayer4State(NOT_ACTIVE_PLAYER);
+ mPlayer4.release();
+ mSecondaryPlayer4.release();
+ initPlayer4();
+ }
+ }
+
+
+ private void rotateCounterClockwise(GL10 gl){
+// Log.d(TAG, "rotateCounterClockwise: rotating.");
+// Log.d(TAG, "rotateCounterClockwise: ");
+
+ if(angleRectangle > 0){
+ angleRectangle = 0;
+ mRotateCounterClockwise = false;
+ if(mCurrentSurface == SURFACE_1){
+ unpausePlayer1();
+ }
+ else if(mCurrentSurface == SURFACE_2){
+ unpausePlayer2();
+ }
+ else if(mCurrentSurface == SURFACE_3){
+ unpausePlayer3();
+ }
+ else if(mCurrentSurface == SURFACE_4){
+ unpausePlayer4();
+ }
+ }
+ else if(angleRectangle < mAngleFinished){
+ angleRectangle = mAngleFinished;
+ mRotateCounterClockwise = false;
+ if(mCurrentSurface == SURFACE_1){
+ unpausePlayer1();
+ }
+ else if(mCurrentSurface == SURFACE_2){
+ unpausePlayer2();
+ }
+ else if(mCurrentSurface == SURFACE_3){
+ unpausePlayer3();
+ }
+ else if(mCurrentSurface == SURFACE_4){
+ unpausePlayer4();
+ }
+ }
+ else{
+ if(Math.abs(angleRectangle) > (Math.abs(settledAngle) + 90)){
+ mRotateCounterClockwise = false;
+ correctRotation();
+ return;
+ }
+ Log.d(TAG, "rotateCounterClockwise: rotating.");
+ if(angleRectangle > settledAngle + 89){
+ mRotateCounterClockwise = false;
+ correctRotation();
+ return;
+ }
+ else if(angleRectangle < settledAngle - 89){
+ mRotateCounterClockwise = false;
+ correctRotation();
+ return;
+ }
+ angleRectangle = angleRectangle + STEP_SIZE;
+// Log.d(TAG, "rotateCounterClockwise: angle: " + angleRectangle);
+// Log.d(TAG, "rotateCounterClockwise: %: " + Math.abs(angleRectangle) % 90);
+ if(Math.abs(angleRectangle) % 90 > 0 && Math.abs(angleRectangle) % 90 < STEP_SIZE){
+ Log.d(TAG, "rotateCounterClockwise: Rotation Complete.");
+ mRotateCounterClockwise = false;
+ correctRotation();
+ }
+ }
+ }
+
+ private void rotateClockwise(GL10 gl){
+// Log.d(TAG, "rotateClockwise: rotating.");
+// Log.d(TAG, "rotateClockwise: ");
+ if(angleRectangle > 0){
+ angleRectangle = 0;
+ mRotateClockwise = false;
+ if(mCurrentSurface == SURFACE_1){
+ unpausePlayer1();
+ }
+ else if(mCurrentSurface == SURFACE_2){
+ unpausePlayer2();
+ }
+ else if(mCurrentSurface == SURFACE_3){
+ unpausePlayer3();
+ }
+ else if(mCurrentSurface == SURFACE_4){
+ unpausePlayer4();
+ }
+ }
+ else if(angleRectangle < mAngleFinished){
+ angleRectangle = mAngleFinished;
+ mRotateClockwise = false;
+ if(mCurrentSurface == SURFACE_1){
+ unpausePlayer1();
+ }
+ else if(mCurrentSurface == SURFACE_2){
+ unpausePlayer2();
+ }
+ else if(mCurrentSurface == SURFACE_3){
+ unpausePlayer3();
+ }
+ else if(mCurrentSurface == SURFACE_4){
+ unpausePlayer4();
+ }
+ }
+ else{
+ if(Math.abs(angleRectangle) > (Math.abs(settledAngle) + 90)){
+ mRotateClockwise = false;
+ correctRotation();
+ return;
+ }
+
+ Log.d(TAG, "rotateClockwise: rotating.");
+ if(angleRectangle > settledAngle + 89){
+ mRotateClockwise = false;
+ correctRotation();
+ return;
+ }
+ else if(angleRectangle < settledAngle - 89){
+ mRotateClockwise = false;
+ correctRotation();
+ return;
+ }
+ angleRectangle = angleRectangle - STEP_SIZE;
+// Log.d(TAG, "rotateClockwise: angle: " + angleRectangle);
+// Log.d(TAG, "rotateClockwise: %: " + Math.abs(angleRectangle) % 90);
+ if(Math.abs(angleRectangle) % 90 > 0 && Math.abs(angleRectangle) % 90 < STEP_SIZE && mStartingAngle < 89){
+// Log.d(TAG, "rotateClockwise: Rotation Complete.");
+ mRotateClockwise = false;
+ correctRotation();
+ }
+ }
+ }
+
+ private void correctRotation(){
+ angleRectangle = round(angleRectangle / 10) * 10;
+ if(angleRectangle > settledAngle + 89){
+ deincrementSurface();
+ settledAngle = angleRectangle;
+ }
+ else if(angleRectangle < settledAngle - 89){
+ incrementSurface();
+ settledAngle = angleRectangle;
+ }
+ else{
+ try{
+ final int surfaceResourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+ final int surfaceMediaIndex = mResourceIndices.getJSONObject(mCurrentSurface).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(surfaceResourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ Log.d(TAG, "correctRotation: RESOURCE SURFACE OBJECT: " + mResources.get(surfaceResourceIndex).getJSONObject(surfaceMediaIndex).get(mContext.getString(R.string.media_type)));
+ if(mCurrentSurface == SURFACE_1) {
+ Log.d(TAG, "correctRotation: playing video on surface 1.");
+ if(mResources.get(surfaceResourceIndex).getJSONObject(surfaceMediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+ unpausePlayer1();
+ }
+ }
+ else if(mCurrentSurface == SURFACE_2) {
+ Log.d(TAG, "correctRotation: playing video on surface 2.");
+ if(mResources.get(surfaceResourceIndex).getJSONObject(surfaceMediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+ unpausePlayer2();
+ }
+ }
+ else if(mCurrentSurface == SURFACE_3) {
+ Log.d(TAG, "correctRotation: playing video on surface 3.");
+ if(mResources.get(surfaceResourceIndex).getJSONObject(surfaceMediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+ unpausePlayer3();
+ }
+ }
+ else if(mCurrentSurface == SURFACE_4) {
+ Log.d(TAG, "correctRotation: playing video on surface 4.");
+ if(mResources.get(surfaceResourceIndex).getJSONObject(surfaceMediaIndex).get(mContext.getString(R.string.media_type)).equals(mContext.getString(R.string.video_uri))) {
+ unpausePlayer4();
+ }
+ }
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+ Log.d(TAG, "correctRotation: angle: " + angleRectangle);
+ }
+
+ private void setDepth(GL10 gl){
+ float max = -5.25f;
+ float min = -6.25f;
+ float A = (max - min) / 2;
+ float z;
+ double temp = A * -Math.abs(Math.sin(2 * (round(angleRectangle) * pi / 180))) + max;
+ String s = String.format("%.2f", temp);
+ z = Float.parseFloat(s);
+ depth = z;
+ }
+
+
+
+ /**
+ * setup the height, width and depth matrices
+ * @param width
+ * @param height
+ */
+ private void setMatrices(float width, float height){
+
+ float[][] heightMatrix = {
+ {-height, -height, height, height},
+ {-height, -height, height, height},
+ {-height, -height, height, height},
+ {-height, -height, height, height},
+ };
+ this.heightMatrix = heightMatrix;
+
+ float[][] widthMatrix = {
+ {-width, width, -width, width},
+ {width, width, width, width},
+ {width, -width, width, -width},
+ {-width, -width, -width, -width},
+ };
+ this.widthMatrix = widthMatrix;
+
+ float[][] depthMatrix = {
+ {width, width, width, width},
+ {width, -width, width, -width},
+ {-width, -width, -width, -width},
+ {-width, width, -width, width},
+ };
+ this.depthMatrix = depthMatrix;
+
+ }
+
+
+
+ public void pausePlayer(){
+ //////////////////
+ // Player1
+ if(mPlayerState == ACTIVE_PLAYER && mPlayer.getPlayWhenReady()){
+ Log.d(TAG, "pausePlayer: pausing Player1");
+ setPlayerState(PAUSED_PLAYER);
+ mPlayer.setPlayWhenReady(false);
+ }
+ else if (mSecondaryPlayerState == ACTIVE_PLAYER && mSecondaryPlayer.getPlayWhenReady()){
+ Log.d(TAG, "pausePlayer: pausing Secondary Player1");
+ setSecondaryPlayerState(PAUSED_PLAYER);
+ mSecondaryPlayer.setPlayWhenReady(false);
+ }
+
+ //////////////////
+ // Player2
+ else if (mPlayer2State == ACTIVE_PLAYER && mPlayer2.getPlayWhenReady()){
+ Log.d(TAG, "pausePlayer: pausing Player2");
+ setPlayer2State(PAUSED_PLAYER);
+ mPlayer2.setPlayWhenReady(false);
+ }
+ else if (mSecondaryPlayer2State == ACTIVE_PLAYER && mSecondaryPlayer2.getPlayWhenReady()){
+ Log.d(TAG, "pausePlayer: pausing Secondary Player2");
+ setSecondaryPlayer2State(PAUSED_PLAYER);
+ mSecondaryPlayer2.setPlayWhenReady(false);
+ }
+
+ //////////////////
+ // Player3
+ else if (mPlayer3State == ACTIVE_PLAYER && mPlayer3.getPlayWhenReady()){
+ Log.d(TAG, "pausePlayer: pausing Player3");
+ setPlayer3State(PAUSED_PLAYER);
+ mPlayer3.setPlayWhenReady(false);
+ }
+ else if (mSecondaryPlayer3State == ACTIVE_PLAYER && mSecondaryPlayer3.getPlayWhenReady()){
+ Log.d(TAG, "pausePlayer: pausing Secondary Player3");
+ setSecondaryPlayer3State(PAUSED_PLAYER);
+ mSecondaryPlayer3.setPlayWhenReady(false);
+ }
+
+ //////////////////
+ // Player4
+ else if (mPlayer4State == ACTIVE_PLAYER && mPlayer4.getPlayWhenReady()){
+ Log.d(TAG, "pausePlayer: pausing Player4");
+ setPlayer4State(PAUSED_PLAYER);
+ mPlayer4.setPlayWhenReady(false);
+ }
+ else if (mSecondaryPlayer4State == ACTIVE_PLAYER && mSecondaryPlayer4.getPlayWhenReady()){
+ Log.d(TAG, "pausePlayer: pausing Secondary Player4");
+ setSecondaryPlayer4State(PAUSED_PLAYER);
+ mSecondaryPlayer4.setPlayWhenReady(false);
+ }
+ }
+
+ private void restartProgressBarRunnable(){
+ Log.d(TAG, "restartProgressBarRunnable: removing progress runnable callback.");
+ if(mProgressRunnable != null){
+ mProgressHandler.removeCallbacks(mProgressRunnable);
+ }
+ }
+
+ public void restartPlayer1(){
+
+// mCurrentProgress = 0;
+ if(mPlayerState == PAUSED_PLAYER && !mPlayer.getPlayWhenReady()){
+ Log.d(TAG, "restartPlayer1: unpausing Player1");
+ mPlayer.seekTo(0);
+
+ setPlayerState(ACTIVE_PLAYER);
+ }
+ else if (mSecondaryPlayerState == PAUSED_PLAYER && !mSecondaryPlayer.getPlayWhenReady()){
+ Log.d(TAG, "restartPlayer1: unpausing secondary Player1");
+ mSecondaryPlayer.seekTo(0);
+ setSecondaryPlayerState(ACTIVE_PLAYER);
+ }
+
+ }
+
+ public void restartPlayer2(){
+// mCurrentProgress = 0;
+ if(mPlayer2State == PAUSED_PLAYER && !mPlayer2.getPlayWhenReady()){
+ Log.d(TAG, "restartPlayer2: unpausing Player2");
+ mPlayer2.seekTo(0);
+ setPlayer2State(ACTIVE_PLAYER);
+ }
+ else if (mSecondaryPlayer2State == PAUSED_PLAYER && !mSecondaryPlayer2.getPlayWhenReady()){
+ Log.d(TAG, "restartPlayer2: unpausing secondary Player2");
+ mSecondaryPlayer2.seekTo(0);
+ setSecondaryPlayer2State(ACTIVE_PLAYER);
+ }
+ }
+
+ public void restartPlayer3(){
+// mCurrentProgress = 0;
+ if(mPlayer3State == PAUSED_PLAYER && !mPlayer3.getPlayWhenReady()){
+ Log.d(TAG, "restartPlayer3: unpausing Player3");
+ mPlayer3.seekTo(0);
+ setPlayer3State(ACTIVE_PLAYER);
+ }
+ else if (mSecondaryPlayer3State == PAUSED_PLAYER && !mSecondaryPlayer3.getPlayWhenReady()){
+ Log.d(TAG, "restartPlayer3: unpausing secondary Player3");
+ mSecondaryPlayer3.seekTo(0);
+ setSecondaryPlayer3State(ACTIVE_PLAYER);
+ }
+ }
+
+ public void restartPlayer4(){
+// mCurrentProgress = 0;
+ if(mPlayer4State == PAUSED_PLAYER && !mPlayer4.getPlayWhenReady()){
+ Log.d(TAG, "restartPlayer4: unpausing Player4");
+ mPlayer4.seekTo(0);
+ setPlayer4State(ACTIVE_PLAYER);
+ }
+ else if (mSecondaryPlayer4State == PAUSED_PLAYER && !mSecondaryPlayer4.getPlayWhenReady()){
+ Log.d(TAG, "restartPlayer4: unpausing secondary Player4");
+ mSecondaryPlayer4.seekTo(0);
+ setSecondaryPlayer4State(ACTIVE_PLAYER);
+ }
+ }
+
+ public void unpausePlayer1(){
+ if(mPlayerState == PAUSED_PLAYER && !mPlayer.getPlayWhenReady()){
+ Log.d(TAG, "unpausePlayer1: unpausing Player1");
+ setPlayerState(ACTIVE_PLAYER);
+ }
+ else if (mSecondaryPlayerState == PAUSED_PLAYER && !mSecondaryPlayer.getPlayWhenReady()){
+ Log.d(TAG, "unpausePlayer1: unpausing secondary Player1");
+ setSecondaryPlayerState(ACTIVE_PLAYER);
+ }
+ }
+
+ public void unpausePlayer2(){
+ if(mPlayer2State == PAUSED_PLAYER && !mPlayer2.getPlayWhenReady()){
+ Log.d(TAG, "unpausePlayer2: unpausing Player2");
+ setPlayer2State(ACTIVE_PLAYER);
+ }
+ else if (mSecondaryPlayer2State == PAUSED_PLAYER && !mSecondaryPlayer2.getPlayWhenReady()){
+ Log.d(TAG, "unpausePlayer2: unpausing secondary Player2");
+ setSecondaryPlayer2State(ACTIVE_PLAYER);
+ }
+ }
+
+ public void unpausePlayer3(){
+ if(mPlayer3State == PAUSED_PLAYER && !mPlayer3.getPlayWhenReady()){
+ Log.d(TAG, "unpausePlayer3: unpausing Player3");
+ setPlayer3State(ACTIVE_PLAYER);
+ }
+ else if (mSecondaryPlayer3State == PAUSED_PLAYER && !mSecondaryPlayer3.getPlayWhenReady()){
+ Log.d(TAG, "unpausePlayer3: unpausing secondary Player3");
+ setSecondaryPlayer3State(ACTIVE_PLAYER);
+ }
+ }
+
+ public void unpausePlayer4(){
+ if(mPlayer4State == PAUSED_PLAYER && !mPlayer4.getPlayWhenReady()){
+ Log.d(TAG, "unpausePlayer4: unpausing Player4");
+ setPlayer4State(ACTIVE_PLAYER);
+ }
+ else if (mSecondaryPlayer4State == PAUSED_PLAYER && !mSecondaryPlayer4.getPlayWhenReady()){
+ Log.d(TAG, "unpausePlayer4: unpausing secondary Player4");
+ setSecondaryPlayer4State(ACTIVE_PLAYER);
+ }
+ }
+
+ private void pauseRendering(){
+ mGLView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+ }
+
+ private void renderContinuously(){
+ mGLView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ }
+
+ public void incrementMediaIndex() throws JSONException {
+ fillCurrentProgressBar();
+ pausePlayer();
+ int resourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+ int mediaIndex = mResourceIndices.getJSONObject(mCurrentSurface).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject( resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ Log.d(TAG, "incrementMediaIndex: media index: " + mediaIndex);
+
+ int numResourcesForIndex = mResources.get(resourceIndex).length();
+ if(mediaIndex < numResourcesForIndex - 1) {
+ Log.d(TAG, "incrementMediaIndex: incrementing index.");
+ mediaIndex++;
+ JSONObject surfaceObject = mResourceIndices.getJSONObject(mCurrentSurface);
+ JSONArray objectArray = surfaceObject.getJSONArray(mContext.getString(R.string.media_index));
+ JSONObject mediaIndexObj = objectArray.getJSONObject(resourceIndex / 4);
+ mediaIndexObj.put(mContext.getString(R.string.media_index), mediaIndex);
+ objectArray.put(resourceIndex / 4, mediaIndexObj);
+ surfaceObject.put(mContext.getString(R.string.media_index), objectArray);
+ mResourceIndices.put(mCurrentSurface, surfaceObject);
+ Log.d(TAG, "incrementMediaIndex: RESOURCE SURFACE OBJECT: " + mResourceIndices.get(mCurrentSurface).toString());
+ mVertexBuffers[mCurrentSurface][1] = null;
+ mVertexBuffers[mCurrentSurface][2] = null;
+ if (mCurrentSurface == SURFACE_1) {
+ isImage1Set = false;
+ } else if (mCurrentSurface == SURFACE_2) {
+ isImage2Set = false;
+ } else if (mCurrentSurface == SURFACE_3) {
+ isImage3Set = false;
+ } else if (mCurrentSurface == SURFACE_4) {
+ isImage4Set = false;
+ }
+
+ playNextVideo();
+ startProgressBar();
+ }
+ else if(mediaIndex == numResourcesForIndex - 1){
+ Log.d(TAG, "incrementMediaIndex: rotating to next surface.");
+ mRotateClockwise = true;
+ }
+
+ }
+
+ private void stopPlayers(){
+ Log.d(TAG, "stopPlayers: stopping players");
+ mPlayer.setPlayWhenReady(false);
+ mSecondaryPlayer.setPlayWhenReady(false);
+ mPlayer2.setPlayWhenReady(false);
+ mSecondaryPlayer2.setPlayWhenReady(false);
+ mPlayer3.setPlayWhenReady(false);
+ mSecondaryPlayer3.setPlayWhenReady(false);
+ mPlayer4.setPlayWhenReady(false);
+ mSecondaryPlayer4.setPlayWhenReady(false);
+ }
+
+ private void bufferFirstVideo(int surfaceNum, int mediaIndex){
+ try {
+ Log.d(TAG, "bufferFirstVideo: buffering first video.");
+ int resourceIndex = mResourceIndices.getJSONObject(surfaceNum).getInt(mContext.getString(R.string.resource_index));
+ Log.d(TAG, "bufferFirstVideo: resource index: " + resourceIndex);
+ Log.d(TAG, "bufferFirstVideo: media index: " + mediaIndex);
+ MediaSource firstMediaSource = (MediaSource) mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.media_source));
+
+ if (firstMediaSource != null) {
+ if(surfaceNum == SURFACE_1 && !hasFirstVideo1Played){
+ mPlayer.setPlayWhenReady(false);
+ Log.d(TAG, "bufferFirstVideo: buffering first video for player 1.");
+ mPlayer.prepare(firstMediaSource);
+ if(mediaIndex == 0){
+ initVideoSurface1(PLAYER_ONE);
+ hasFirstVideo1Played = true;
+ initProgressBars();
+ }
+ }
+ else if(surfaceNum == SURFACE_2 && !hasFirstVideo2Played){
+ mPlayer2.setPlayWhenReady(false);
+ Log.d(TAG, "bufferFirstVideo: buffering first video for player 2.");
+ mPlayer2.prepare(firstMediaSource);
+ if(mediaIndex == 0){
+ initVideoSurface2(PLAYER_TWO);
+ hasFirstVideo2Played = true;
+ initProgressBars();
+ }
+ }
+ else if(surfaceNum == SURFACE_3 && !hasFirstVideo3Played){
+ mPlayer3.setPlayWhenReady(false);
+ Log.d(TAG, "bufferFirstVideo: buffering first video for player 3.");
+ mPlayer3.prepare(firstMediaSource);
+ if(mediaIndex == 0){
+ initVideoSurface3(PLAYER_THREE);
+ hasFirstVideo3Played = true;
+ initProgressBars();
+ }
+ }
+ else if(surfaceNum == SURFACE_4 && !hasFirstVideo4Played){
+ mPlayer4.setPlayWhenReady(false);
+ Log.d(TAG, "bufferFirstVideo: buffering first video for player 4.");
+ mPlayer4.prepare(firstMediaSource);
+ if(mediaIndex == 0){
+ initVideoSurface4(PLAYER_FOUR);
+ hasFirstVideo4Played = true;
+ initProgressBars();
+ }
+ }
+ }
+ } catch (JSONException e){
+ Log.e(TAG, "bufferFirstVideo: NullPointerException: " + e.getMessage() ); //null pointer for if there is no video
+ }
+ }
+
+
+ private void bufferNextVideo(int surfaceNum){
+ try {
+ // Log.d(TAG, "bufferNextVideo: buffering next video");
+ //look for the next video
+ int resourceIndex = mResourceIndices.getJSONObject(surfaceNum).getInt(mContext.getString(R.string.resource_index));
+ int mediaIndex = mResourceIndices.getJSONObject(surfaceNum).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ int numResourcesForIndex = mResources.get(resourceIndex).length();
+ Log.d(TAG, "bufferNextVideo: num resources for index: " + numResourcesForIndex);
+ MediaSource nextMediaSource = null;
+ Log.d(TAG, "bufferNextVideo: media index: " + mediaIndex);
+ for (int i = mediaIndex + 1; i < numResourcesForIndex; i++) {
+ Log.d(TAG, "bufferNextVideo: i: " + i);
+ String mediaType = "";
+ try {
+ mediaType = mResources.get(resourceIndex).getJSONObject(i).getString(mContext.getString(R.string.media_type));
+ } catch (NullPointerException e) {
+ mediaType = "none";
+ e.printStackTrace();
+ }
+ catch (JSONException e) {
+ mediaType = "none";
+ e.printStackTrace();
+ }
+
+ if (mediaType.equals(mContext.getString(R.string.video_uri))) {
+ //then we have our next video resource
+ nextMediaSource = (MediaSource) mResources.get(resourceIndex).getJSONObject(i).get(mContext.getString(R.string.media_source));
+ Log.d(TAG, "bufferNextVideo: media index, next video: " + i +" , " + mResources.get(resourceIndex).getJSONObject(i).get(mContext.getString(R.string.video_uri)));
+ break;
+ }
+ }
+
+
+ if (nextMediaSource != null) {
+ bufferMediaSource(surfaceNum, nextMediaSource);
+ }
+
+ } catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+
+
+ private void bufferMediaSource(int surfaceNum, MediaSource nextMediaSource){
+ Log.d(TAG, "bufferMediaSource: buffering next media source.");
+ Log.d(TAG, "bufferMediaSource: surface #: " + surfaceNum);
+
+ if(surfaceNum == SURFACE_1){
+ if (mPlayerState == ACTIVE_PLAYER) {
+ mSecondaryPlayer.setPlayWhenReady(false);
+ Log.d(TAG, "bufferMediaSource: buffering next video for secondary player.");
+ mSecondaryPlayer.prepare(nextMediaSource);
+ } else if (mSecondaryPlayerState == ACTIVE_PLAYER) {
+ mPlayer.setPlayWhenReady(false);
+ Log.d(TAG, "bufferMediaSource: buffering next video for player 1.");
+ mPlayer.prepare(nextMediaSource);
+ }
+ }
+ else if(surfaceNum == SURFACE_2){
+ if (mPlayer2State == ACTIVE_PLAYER) {
+ mSecondaryPlayer2.setPlayWhenReady(false);
+ Log.d(TAG, "bufferMediaSource: buffering next video for secondary player 2.");
+ mSecondaryPlayer2.prepare(nextMediaSource);
+ } else if (mSecondaryPlayer2State == ACTIVE_PLAYER) {
+ mPlayer2.setPlayWhenReady(false);
+ Log.d(TAG, "bufferMediaSource: buffering next video for player 2.");
+ mPlayer2.prepare(nextMediaSource);
+ }
+ }
+ else if(surfaceNum == SURFACE_3){
+ if (mPlayer3State == ACTIVE_PLAYER) {
+ mSecondaryPlayer3.setPlayWhenReady(false);
+ Log.d(TAG, "bufferMediaSource: buffering next video for secondary player 3.");
+ mSecondaryPlayer3.prepare(nextMediaSource);
+ } else if (mSecondaryPlayer3State == ACTIVE_PLAYER) {
+ mPlayer3.setPlayWhenReady(false);
+ Log.d(TAG, "bufferMediaSource: buffering next video for player 3.");
+ mPlayer3.prepare(nextMediaSource);
+ }
+ }
+ else if(surfaceNum == SURFACE_4){
+ if (mPlayer4State == ACTIVE_PLAYER) {
+ mSecondaryPlayer4.setPlayWhenReady(false);
+ Log.d(TAG, "bufferMediaSource: buffering next video for secondary player 4.");
+ mSecondaryPlayer4.prepare(nextMediaSource);
+ } else if (mSecondaryPlayer4State == ACTIVE_PLAYER) {
+ mPlayer4.setPlayWhenReady(false);
+ Log.d(TAG, "bufferMediaSource: buffering next video for player 4.");
+ mPlayer4.prepare(nextMediaSource);
+ }
+ }
+ }
+
+ private void bufferNextVideo(int surfaceNum, int mediaIndex){
+ try {
+ int resourceIndex = mResourceIndices.getJSONObject(surfaceNum).getInt(mContext.getString(R.string.resource_index));
+ MediaSource nextMediaSource = (MediaSource) mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.media_source));
+
+ if (nextMediaSource != null) {
+ if(surfaceNum == SURFACE_1){
+ if (mPlayerState == ACTIVE_PLAYER) {
+ mSecondaryPlayer.setPlayWhenReady(false);
+ Log.d(TAG, "bufferNextVideo: buffering next video for secondary player1.");
+ mSecondaryPlayer.prepare(nextMediaSource);
+ }
+ else if (mSecondaryPlayerState == ACTIVE_PLAYER) {
+ mPlayer.setPlayWhenReady(false);
+ Log.d(TAG, "bufferNextVideo: buffering next video for player 1.");
+ mPlayer.prepare(nextMediaSource);
+ }
+ }
+ else if(surfaceNum == SURFACE_2){
+ if (mPlayer2State == ACTIVE_PLAYER) {
+ mSecondaryPlayer2.setPlayWhenReady(false);
+ Log.d(TAG, "bufferNextVideo: buffering next video for secondary player 2.");
+ mSecondaryPlayer2.prepare(nextMediaSource);
+ }
+ else if (mSecondaryPlayer2State == ACTIVE_PLAYER) {
+ mPlayer2.setPlayWhenReady(false);
+ Log.d(TAG, "bufferNextVideo: buffering next video for player 2.");
+ mPlayer2.prepare(nextMediaSource);
+ }
+ }
+ else if(surfaceNum == SURFACE_3){
+ if (mPlayer3State == ACTIVE_PLAYER) {
+ mSecondaryPlayer3.setPlayWhenReady(false);
+ Log.d(TAG, "bufferNextVideo: buffering next video for secondary player 3.");
+ mSecondaryPlayer3.prepare(nextMediaSource);
+ }
+ else if (mSecondaryPlayer3State == ACTIVE_PLAYER) {
+ mPlayer3.setPlayWhenReady(false);
+ Log.d(TAG, "bufferNextVideo: buffering next video for player 3.");
+ mPlayer3.prepare(nextMediaSource);
+ }
+ }
+ else if(surfaceNum == SURFACE_4){
+ if (mPlayer4State == ACTIVE_PLAYER) {
+ mSecondaryPlayer4.setPlayWhenReady(false);
+ Log.d(TAG, "bufferNextVideo: buffering next video for secondary player 4.");
+ mSecondaryPlayer4.prepare(nextMediaSource);
+ }
+ else if (mSecondaryPlayer4State == ACTIVE_PLAYER) {
+ mPlayer4.setPlayWhenReady(false);
+ Log.d(TAG, "bufferNextVideo: buffering next video for player 4.");
+ mPlayer4.prepare(nextMediaSource);
+ }
+ }
+ }
+
+ } catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+
+ private void playNextVideo(){
+ try{
+ int resourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+ int mediaIndex = mResourceIndices.getJSONObject(mCurrentSurface).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ String currentMediaType = mResources.get(resourceIndex).getJSONObject(mediaIndex).getString(mContext.getString(R.string.media_type));
+ Log.d(TAG, "playNextVideo: resource: " + mResourceIndices.getJSONObject(mCurrentSurface));
+ if(currentMediaType.equals(mContext.getString(R.string.video_uri))){
+ if(mCurrentSurface == SURFACE_1) {
+ Log.d(TAG, "playNextVideo: current surface is 1.");
+ if(hasFirstVideo1Played){
+ Log.d(TAG, "playNextVideo: first video on surface 1 has played.");
+ if(mPlayerState == ACTIVE_PLAYER || mPlayerState == PAUSED_PLAYER){
+ Log.d(TAG, "playNextVideo: init secondary player1 surface.");
+ setPlayerState(NOT_ACTIVE_PLAYER);
+ initVideoSurface1(PLAYER_ONE_SECONDARY);
+ setSecondaryPlayerState(ACTIVE_PLAYER);
+ }
+ else if(mSecondaryPlayerState == ACTIVE_PLAYER || mSecondaryPlayerState == PAUSED_PLAYER){
+ Log.d(TAG, "playNextVideo: init player1 surface.");
+ setSecondaryPlayerState(NOT_ACTIVE_PLAYER);
+ initVideoSurface1(PLAYER_ONE);
+ setPlayerState(ACTIVE_PLAYER);
+ }
+ }
+ else{
+ Log.d(TAG, "playNextVideo: hasFirstVideoPlayed1 is now TRUE.");
+ initVideoSurface1(PLAYER_ONE);
+ hasFirstVideo1Played = true;
+ }
+
+ bufferNextVideo(SURFACE_1);
+ }
+ else if(mCurrentSurface == SURFACE_2) {
+ Log.d(TAG, "playNextVideo: current surface is 2.");
+ if(hasFirstVideo2Played){
+ Log.d(TAG, "playNextVideo: first video on surface 2 has played.");
+ if(mPlayer2State == ACTIVE_PLAYER || mPlayer2State == PAUSED_PLAYER){
+ Log.d(TAG, "playNextVideo: init secondary player2 surface.");
+ setPlayer2State(NOT_ACTIVE_PLAYER);
+ initVideoSurface2(PLAYER_TWO_SECONDARY);
+ setSecondaryPlayer2State(ACTIVE_PLAYER);
+ }
+ else if(mSecondaryPlayer2State == ACTIVE_PLAYER || mSecondaryPlayer2State == PAUSED_PLAYER){
+ Log.d(TAG, "playNextVideo: init player2 surface.");
+ setSecondaryPlayer2State(NOT_ACTIVE_PLAYER);
+ initVideoSurface2(PLAYER_TWO);
+ setPlayer2State(ACTIVE_PLAYER);
+ }
+ }
+ else{
+ Log.d(TAG, "playNextVideo: hasFirstVideoPlayed2 is now TRUE.");
+ initVideoSurface2(PLAYER_TWO);
+ hasFirstVideo2Played = true;
+ }
+
+ bufferNextVideo(SURFACE_2);
+ }
+ else if(mCurrentSurface == SURFACE_3) {
+ Log.d(TAG, "playNextVideo: current surface is 3.");
+ if(hasFirstVideo3Played){
+ Log.d(TAG, "playNextVideo: first video on surface 3 has played.");
+ if(mPlayer3State == ACTIVE_PLAYER || mPlayer3State == PAUSED_PLAYER){
+ Log.d(TAG, "playNextVideo: init secondary player3 surface.");
+ setPlayer3State(NOT_ACTIVE_PLAYER);
+ initVideoSurface3(PLAYER_THREE_SECONDARY);
+ setSecondaryPlayer3State(ACTIVE_PLAYER);
+ }
+ else if(mSecondaryPlayer3State == ACTIVE_PLAYER || mSecondaryPlayer3State == PAUSED_PLAYER){
+ Log.d(TAG, "playNextVideo: init player3 surface.");
+ setSecondaryPlayer3State(NOT_ACTIVE_PLAYER);
+ initVideoSurface3(PLAYER_THREE);
+ setPlayer3State(ACTIVE_PLAYER);
+ }
+ }
+ else{
+ Log.d(TAG, "playNextVideo: hasFirstVideoPlayed3 is now TRUE.");
+ initVideoSurface3(PLAYER_THREE);
+ hasFirstVideo3Played = true;
+ }
+
+ bufferNextVideo(SURFACE_3);
+ }
+ else if(mCurrentSurface == SURFACE_4) {
+ Log.d(TAG, "playNextVideo: current surface is 4.");
+ if(hasFirstVideo4Played){
+ Log.d(TAG, "playNextVideo: first video on surface 4 has played.");
+ if(mPlayer4State == ACTIVE_PLAYER || mPlayer4State == PAUSED_PLAYER){
+ Log.d(TAG, "playNextVideo: init secondary player4 surface.");
+ setPlayer4State(NOT_ACTIVE_PLAYER);
+ initVideoSurface4(PLAYER_FOUR_SECONDARY);
+ setSecondaryPlayer4State(ACTIVE_PLAYER);
+ }
+ else if(mSecondaryPlayer4State == ACTIVE_PLAYER || mSecondaryPlayer4State == PAUSED_PLAYER){
+ Log.d(TAG, "playNextVideo: init player4 surface.");
+ setSecondaryPlayer4State(NOT_ACTIVE_PLAYER);
+ initVideoSurface4(PLAYER_FOUR);
+ setPlayer4State(ACTIVE_PLAYER);
+ }
+ }
+ else{
+ Log.d(TAG, "playNextVideo: hasFirstVideoPlayed4 is now TRUE.");
+ initVideoSurface4(PLAYER_FOUR);
+ hasFirstVideo4Played = true;
+ }
+
+ bufferNextVideo(SURFACE_4);
+ }
+ }
+
+ } catch (JSONException e){
+ Log.e(TAG, "bufferNextVideo: NullPointerException: " + e.getMessage() ); //null pointer for if there is no video
+ }
+ }
+
+
+ private void initVideoSurface1(final int player){
+ mSurfaceTexture = new SurfaceTexture(textureId1[0]);
+
+ mSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ mUpdateST = true;
+ mGLView.requestRender();
+
+ }
+ });
+
+ mSurface = new Surface(mSurfaceTexture);
+ if(player == PLAYER_ONE){
+ mPlayer.setVideoSurface(mSurface);
+ }
+ else if(player == PLAYER_ONE_SECONDARY){
+ mSecondaryPlayer.setVideoSurface(mSurface);
+ }
+
+ }
+
+
+
+ private void initVideoSurface2(final int player){
+
+ mSurfaceTexture2 = new SurfaceTexture(textureId2[0]);
+
+ mSurfaceTexture2.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ mUpdateST2 = true;
+ mGLView.requestRender();
+ }
+ });
+
+ mSurface2 = new Surface(mSurfaceTexture2);
+ if(player == PLAYER_TWO){
+ mPlayer2.setVideoSurface(mSurface2);
+ }
+ else if(player == PLAYER_TWO_SECONDARY){
+ mSecondaryPlayer2.setVideoSurface(mSurface2);
+ }
+ }
+
+ private void initVideoSurface3(final int player){
+
+ mSurfaceTexture3 = new SurfaceTexture(textureId3[0]);
+ mSurfaceTexture3.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ mUpdateST3 = true;
+ mGLView.requestRender();
+ }
+ });
+ mSurface3 = new Surface(mSurfaceTexture3);
+ if(player == PLAYER_THREE){
+ mPlayer3.setVideoSurface(mSurface3);
+ }
+ else if(player == PLAYER_THREE_SECONDARY){
+ mSecondaryPlayer3.setVideoSurface(mSurface3);
+ }
+ }
+
+ private void initVideoSurface4(final int player){
+ mSurfaceTexture4 = new SurfaceTexture(textureId4[0]);
+ mSurfaceTexture4.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ mUpdateST4 = true;
+ mGLView.requestRender();
+ }
+ });
+ mSurface4 = new Surface(mSurfaceTexture4);
+ if(player == PLAYER_FOUR){
+ mPlayer4.setVideoSurface(mSurface4);
+ }
+ else if(player == PLAYER_FOUR_SECONDARY){
+ mSecondaryPlayer4.setVideoSurface(mSurface4);
+ }
+ }
+
+
+
+ private void setSecondaryPlayerState(int state){
+ mSecondaryPlayerState = state;
+ }
+ private void setPlayerState(int state){
+ mPlayerState = state;
+ }
+ private void setSecondaryPlayer2State(int state){
+ mSecondaryPlayer2State = state;
+ }
+ private void setPlayer2State(int state){
+ mPlayer2State = state;
+ }
+ private void setSecondaryPlayer3State(int state){
+ mSecondaryPlayer3State = state;
+ }
+ private void setPlayer3State(int state){
+ mPlayer3State = state;
+ }
+ private void setSecondaryPlayer4State(int state){
+ mSecondaryPlayer4State = state;
+ }
+ private void setPlayer4State(int state){
+ mPlayer4State = state;
+ }
+
+
+ private void initBlock(){
+ float screenRatio = screenHeight / screenWidth;
+ float width = 1;
+ float height = screenRatio;
+ Log.d(TAG, "initBlock: SCREEN WIDTH: " + screenWidth);
+ Log.d(TAG, "initBlock: SCREEN HEIGHT: " + screenHeight);
+
+ setMatrices(width, height);
+ float widthScaleFactor = 1f;
+ float heightScaleFactor = 1f;
+
+ //set the vertices and bitmaps to textures
+ for (int i = 0; i < numFaces; i++) {
+ Log.d(TAG, "initBlock: adding vertices to list.");
+
+ // Define the vertices for this face
+ float[] imageVertices = {
+ widthMatrix[i][0] * widthScaleFactor, heightMatrix[i][0] * heightScaleFactor, depthMatrix[i][0],
+ widthMatrix[i][1] * widthScaleFactor, heightMatrix[i][1] * heightScaleFactor, depthMatrix[i][1],
+ widthMatrix[i][2] * widthScaleFactor, heightMatrix[i][2] * heightScaleFactor, depthMatrix[i][2],
+ widthMatrix[i][3] * widthScaleFactor, heightMatrix[i][3] * heightScaleFactor, depthMatrix[i][3],
+ };
+ mVertices.add(imageVertices);
+
+ }
+//
+ mVertexBuffers = new FloatBuffer[5][3];
+//
+ ByteBuffer vbb = ByteBuffer.allocateDirect(mVertices.get(0).length * 6 * 4);
+ vbb.order(ByteOrder.nativeOrder());
+
+ float[] tempTexCoords = { // Allocate texture buffer. An float has 4 bytes. Repeat for 4 faces.
+ 0.0f, 1.0f,
+ 1.0f, 1.0f,
+ 0.0f, 0.0f,
+ 1.0f, 0.0f
+ };
+
+ texCoords1 = tempTexCoords;
+ texCoords2 = tempTexCoords;
+ texCoords3 = tempTexCoords;
+ texCoords4 = tempTexCoords;
+
+
+ // Setup texture-coords-array buffer, in float. An float has 4 bytes (NEW)
+ // There's 8 coordinates per face and 4 texture coordinate buffers
+ // so 8 x 4 x 4 or texCoords.length x 4 x 4
+ ByteBuffer tbb = ByteBuffer.allocateDirect(texCoords1.length * 4 * 4);
+ tbb.order(ByteOrder.nativeOrder());
+ textureBuffer1 = tbb.asFloatBuffer();
+ textureBuffer2 = tbb.asFloatBuffer();
+ textureBuffer3 = tbb.asFloatBuffer();
+ textureBuffer4 = tbb.asFloatBuffer();
+
+ textureBuffer1.put(texCoords1);
+ textureBuffer1.position(0);
+
+ textureBuffer2.put(texCoords2);
+ textureBuffer2.position(0);
+
+ textureBuffer3.put(texCoords3);
+ textureBuffer3.position(0);
+
+ textureBuffer4.put(texCoords4);
+ textureBuffer4.position(0);
+
+// rotateToStartingIndex();
+ }
+
+
+ public void releasePlayers(){
+ mPlayer.release();
+ mSecondaryPlayer.release();
+ mPlayer2.release();
+ mSecondaryPlayer2.release();
+ mPlayer3.release();
+ mSecondaryPlayer3.release();
+ mPlayer4.release();
+ mSecondaryPlayer4.release();
+
+ removeAllCallbacks();
+ }
+
+ private void removeAllCallbacks(){
+ if(mProgressBarInitHandler != null){
+ mProgressBarInitHandler.removeCallbacks(mProgressBarInitRunnable);
+ }
+ if(mProgressHandler != null){
+ mProgressHandler.removeCallbacks(mProgressRunnable);
+ }
+
+ }
+
+ public void releasePlayer1(){
+ mPlayer.release();
+ }
+
+ public void releasePlayer2(){
+ mSecondaryPlayer.release();
+ }
+
+ private void initPlayers(){
+ mVideoTrackSelectionFactory = new AdaptiveTrackSelection.Factory(BANDWIDTH_METER);
+ mTrackSelector = new DefaultTrackSelector(mVideoTrackSelectionFactory);
+
+ initPlayer1();
+ initPlayer2();
+ initPlayer3();
+ initPlayer4();
+ }
+
+ private void initPlayer1(){
+ mPlayer = ExoPlayerFactory.newSimpleInstance(mContext, mTrackSelector);
+ mSecondaryPlayer = ExoPlayerFactory.newSimpleInstance(mContext, mTrackSelector);
+ mPlayer.setPlayWhenReady(false);
+ mSecondaryPlayer.setPlayWhenReady(false);
+ }
+
+ private void initPlayer2(){
+ mPlayer2 = ExoPlayerFactory.newSimpleInstance(mContext, mTrackSelector);
+ mSecondaryPlayer2 = ExoPlayerFactory.newSimpleInstance(mContext, mTrackSelector);
+ mPlayer2.setPlayWhenReady(false);
+ mSecondaryPlayer2.setPlayWhenReady(false);
+ }
+
+ private void initPlayer3(){
+ mPlayer3 = ExoPlayerFactory.newSimpleInstance(mContext, mTrackSelector);
+ mSecondaryPlayer3 = ExoPlayerFactory.newSimpleInstance(mContext, mTrackSelector);
+ mPlayer3.setPlayWhenReady(false);
+ mSecondaryPlayer3.setPlayWhenReady(false);
+ }
+
+ private void initPlayer4(){
+ mPlayer4 = ExoPlayerFactory.newSimpleInstance(mContext, mTrackSelector);
+ mSecondaryPlayer4 = ExoPlayerFactory.newSimpleInstance(mContext, mTrackSelector);
+ mPlayer4.setPlayWhenReady(false);
+ mSecondaryPlayer4.setPlayWhenReady(false);
+ }
+
+
+ public MediaSource buildMediaSource(Uri uri){
+ // Produces DataSource instances through which media data is loaded.
+ DataSource.Factory dataSourceFactory = new DefaultDataSourceFactory(mContext,
+ Util.getUserAgent(mContext, "MitchsApp"), null);
+ return new ExtractorMediaSource(uri, dataSourceFactory, new DefaultExtractorsFactory(), null, null);
+ }
+
+ private void showProgressBar(){
+ Log.d(TAG, "showProgressBar: showing progress bar.");
+// ProgressBar mProgressBar = ((Activity)mContext).findViewById(PROGRESS_BAR_ID);
+// mProgressBar.setVisibility(View.VISIBLE);
+// // Get a handler that can be used to post to the main thread
+ Handler mainHandler = new Handler(mContext.getMainLooper());
+ Runnable myRunnable = new Runnable() {
+ @Override
+ public void run() {
+ ProgressBar mProgressBar = ((Activity)mContext).findViewById(PROGRESS_BAR_ID);
+ mProgressBar.bringToFront();
+ mProgressBar.setVisibility(View.VISIBLE);
+ }
+ };
+ mainHandler.post(myRunnable);
+ }
+
+ private void hideProgressBar(){
+ Log.d(TAG, "hideProgressBar: hiding progress bar.");
+// ProgressBar mProgressBar = ((Activity)mContext).findViewById(PROGRESS_BAR_ID);
+// mProgressBar.setVisibility(View.INVISIBLE);
+ // Get a handler that can be used to post to the main thread
+ Handler mainHandler = new Handler(mContext.getMainLooper());
+ Runnable myRunnable = new Runnable() {
+ @Override
+ public void run() {
+ ProgressBar mProgressBar = ((Activity)mContext).findViewById(PROGRESS_BAR_ID);
+ mProgressBar.setVisibility(View.INVISIBLE);
+ }
+ };
+ mainHandler.post(myRunnable);
+ }
+
+ private static final int PROGRESS_BAR_ID = 123456;
+ private void initProgressBar(){
+ Log.d(TAG, "initProgressBar: initializing progress bar.");
+
+ //post to main ui thread
+ Handler handler = new Handler(mContext.getMainLooper());
+ Runnable runnable = new Runnable() {
+ @Override
+ public void run() {
+ RelativeLayout.LayoutParams rlp = new RelativeLayout.LayoutParams(180,180);
+ rlp.addRule(RelativeLayout.CENTER_IN_PARENT);
+
+ ProgressBar mProgressBar = new ProgressBar(mContext, null, android.R.attr.progressBarStyleLarge);
+ mProgressBar.setId(PROGRESS_BAR_ID);
+ mProgressBar.setVisibility(View.INVISIBLE);
+ mProgressBar.setLayoutParams(rlp);
+ mRelativeLayout.addView(mProgressBar);
+
+ }
+ };
+ handler.post(runnable);
+ }
+
+ private void initProgressBars(){
+ Log.d(TAG, "initProgressBars: initializing progress bar widgets.");
+
+ try{
+ //remove the previous surface's progress bars
+ Handler handler = new Handler(mContext.getMainLooper());
+ Runnable runnable = new Runnable() {
+ @Override
+ public void run() {
+ mLinearLayout.removeAllViews();
+ mLinearLayout2.removeAllViews();
+ }
+ };
+ handler.post(runnable);
+
+ //divide the width evenly between the media sources for this index
+ final int resourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+// int numSources = mMedia.get(resourceIndex).getMedia().size();
+ int numSources = mUserStories.getJSONObject(resourceIndex)
+ .getJSONArray(mContext.getString(R.string.user_stories)).length();
+ final int mediaIndex = mResourceIndices.getJSONObject(mCurrentSurface).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject(resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ Log.d(TAG, "initProgressBars: media index: " + mediaIndex);
+ Log.d(TAG, "initProgressBars: sources: " + numSources);
+ final int width = ((int) screenWidth / numSources) - (int) (screenWidth * 0.01);
+ mIds = new int[numSources];
+
+ //make the progress bars and add them to the layout
+ //their id's will be saved in the 'mIds' array
+ for(int i = 0; i < numSources; i++){
+ Log.d(TAG, "initProgressBars: i: " + i);
+
+ final int count = i;
+ //take action on main UI thread
+ Handler mainHandler = new Handler(mContext.getMainLooper());
+ Runnable myRunnable = new Runnable() {
+ @Override
+ public void run() {
+ Log.d(TAG, "initProgressBars: adding a progress bar. count: " + count);
+ LinearLayout.LayoutParams layoutParams = new LinearLayout.LayoutParams(
+ width,
+ 5
+// LinearLayout.LayoutParams.WRAP_CONTENT
+ );
+
+ layoutParams.setMargins(5, 0 ,0, 0);
+ MyProgressBar progressBar = new MyProgressBar(mContext,
+ null,
+ android.R.attr.progressBarStyleHorizontal);
+ progressBar.setLayoutParams(layoutParams);
+
+ Drawable progressDrawable = progressBar.getProgressDrawable().mutate();
+ progressDrawable.setColorFilter(Color.WHITE, android.graphics.PorterDuff.Mode.SRC_IN);
+ progressBar.setProgressDrawable(progressDrawable);
+
+ progressBar.setId(count);
+ mIds[count] = progressBar.getId();
+ mLinearLayout.addView(progressBar);
+
+ if(count == 0){
+ //set the flag so we know the progress bars are initialized
+ isProgressBarsInitialized = true;
+ startProgressBar();
+
+ LinearLayout.LayoutParams imageViewParams = new LinearLayout.LayoutParams(
+ 100,
+ 100
+ );
+
+ //get the profile image from array
+ int resourceIndex = -1;
+ String profileUrl = "";
+ String username = "";
+ try {
+ resourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+ profileUrl = mUserStories.getJSONObject(resourceIndex).getJSONObject(mContext.getString(R.string.user_account_settings))
+ .get(mContext.getString(R.string.field_profile_photo)).toString();
+ username = mUserStories.getJSONObject(resourceIndex).getJSONObject(mContext.getString(R.string.user_account_settings))
+ .get(mContext.getString(R.string.field_username)).toString();
+ Log.d(TAG, "initProgressBars: got the profile url: " + profileUrl);
+ Log.d(TAG, "initProgressBars: got the username: " + username);
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+
+ imageViewParams.setMargins(15, 8, 0, 0);
+ //add circle image view
+ CircleImageView profileImage = new CircleImageView(mContext);
+ profileImage.setVisibility(View.VISIBLE);
+ profileImage.bringToFront();
+ profileImage.setLayoutParams(imageViewParams);
+ if(!profileUrl.equals("")){
+ Glide.with(mContext)
+ .load(profileUrl)
+ .into(profileImage);
+ }
+ else{
+ profileImage.setImageDrawable(mContext.getResources().getDrawable(R.drawable.android_construction));
+ }
+
+ LinearLayout.LayoutParams textViewParams = new LinearLayout.LayoutParams(
+ LinearLayout.LayoutParams.WRAP_CONTENT,
+ LinearLayout.LayoutParams.WRAP_CONTENT
+ );
+ textViewParams.setMargins(20, 25, 0, 0);
+ TextView name = new TextView(mContext);
+ name.setVisibility(View.VISIBLE);
+ name.bringToFront();
+ name.setTextSize(14f);
+ name.setTextColor(Color.WHITE);
+ name.setLayoutParams(textViewParams);
+ if(!username.equals("")){
+ name.setText(username);
+ }
+ else{
+ name.setText("N/A");
+ }
+
+ mLinearLayout2.addView(profileImage);
+ mLinearLayout2.addView(name);
+ mLinearLayout2.bringToFront();
+ mLinearLayout2.setVisibility(View.VISIBLE);
+ }
+
+ }
+ };
+ mainHandler.post(myRunnable);
+
+ }
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+
+ private void setProgressBars(int number){
+ Log.d(TAG, "setProgressBars: setting progress bar to match media index.");
+ try{
+ int resourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+ int surfaceMediaIndexCount = mResourceIndices.getJSONObject(mCurrentSurface).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject( resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ Log.d(TAG, "setProgressBars: media index count: " + surfaceMediaIndexCount);
+ //iterate through the media index and fill the progress bars
+ for(int i = 0; i < surfaceMediaIndexCount + number; i++){
+ Log.d(TAG, "setProgressBars: filling progress bar with id = " + i);
+ MyProgressBar progressBar = ((Activity) mContext).findViewById(mIds[i]);
+
+ progressBar.setMax(1);
+ progressBar.setProgress(1);
+
+ //MyProgressBar stuff
+ progressBar.setCurrentProgress(1);
+ progressBar.setTotalDuration(1);
+ }
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+
+// startProgressBar(); //fill the current bar
+ }
+
+
+
+ private void startProgressBar(){
+ Log.d(TAG, "startProgressBar: starting progress bar.");
+ try{
+ int resourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+ int surfaceMediaIndex = mResourceIndices.getJSONObject(mCurrentSurface).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject( resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ mCurrentProgressBar = ((Activity) mContext).findViewById(mIds[surfaceMediaIndex]);
+
+ if(mCurrentProgressBar != null){
+ Log.d(TAG, "startProgressBar: current progress bar is not null.");
+
+ String resourceType = "";
+ boolean imageRenderError = false;
+ try{
+ resourceType = mResources.get(resourceIndex).getJSONObject(surfaceMediaIndex).get(mContext.getString(R.string.media_type)).toString();
+ }catch (JSONException e){
+ imageRenderError = true;
+ }
+
+ mCurrentProgress = 0;
+ if (resourceType.equals(mContext.getString(R.string.encoded_bitmap)) || imageRenderError) {
+ Log.d(TAG, "startProgressBar: next resource is an image.");
+ mCurrentProgress = 1;
+ mCurrentProgressBar.setMax(mCurrentProgress);
+ mCurrentProgressBar.setProgress(mCurrentProgress);
+
+ //MyProgressBar stuff
+ mCurrentProgressBar.setCurrentProgress(mCurrentProgress);
+ mCurrentProgressBar.setTotalDuration(mCurrentProgress);
+
+ //hide the circular progress bar if it's showing
+ hideProgressBar();
+ }
+ else if(resourceType.equals(mContext.getString(R.string.video_uri))){
+ Log.d(TAG, "startProgressBar: next resource is a video.");
+
+ //get the total duration
+ try{
+ mTotalDuration = Integer.parseInt(mResources.get(resourceIndex).getJSONObject(surfaceMediaIndex)
+ .get(mContext.getString(R.string.duration)).toString()) * 1000;
+ }catch (JSONException e){
+ mTotalDuration = MEDIA_TIMEOUT;
+ }
+
+ if(getCurrentPlayer() == PLAYER_ONE){
+ Log.d(TAG, "startProgressBar: starting progress bar for player1");
+ startProgressRunnable(mPlayer);
+ }
+ else if(getCurrentPlayer() == PLAYER_ONE_SECONDARY){
+ Log.d(TAG, "startProgressBar: starting progress bar for secondary player1");
+ startProgressRunnable(mSecondaryPlayer);
+ }
+ else if(getCurrentPlayer() == PLAYER_TWO){
+ Log.d(TAG, "startProgressBar: starting progress bar for player2");
+ startProgressRunnable(mPlayer2);
+ }
+ else if(getCurrentPlayer() == PLAYER_TWO_SECONDARY){
+ Log.d(TAG, "startProgressBar: starting progress bar for secondary player2");
+ startProgressRunnable(mSecondaryPlayer2);
+ }
+ else if(getCurrentPlayer() == PLAYER_THREE){
+ Log.d(TAG, "startProgressBar: starting progress bar for player3");
+ startProgressRunnable(mPlayer3);
+ }
+ else if(getCurrentPlayer() == PLAYER_THREE_SECONDARY){
+ Log.d(TAG, "startProgressBar: starting progress bar for secondary player3");
+ startProgressRunnable(mSecondaryPlayer3);
+ }
+ else if(getCurrentPlayer() == PLAYER_FOUR){
+ Log.d(TAG, "startProgressBar: starting progress bar for player4");
+ startProgressRunnable(mPlayer4);
+ }
+ else if(getCurrentPlayer() == PLAYER_FOUR_SECONDARY){
+ Log.d(TAG, "startProgressBar: starting progress bar for secondary player4");
+ startProgressRunnable(mSecondaryPlayer4);
+ }
+ }
+ }
+
+ }catch (JSONException e){
+ e.printStackTrace();
+ }
+ }
+
+ private void startProgressRunnable(final Player player_){
+ Log.d(TAG, "startProgressRunnable: attempting to start progress runnable.");
+ if(player_ != null){
+ Log.d(TAG, "startProgressRunnable: starting the progress runnable for progress updates.");
+
+ if(mProgressRunnable != null){
+ Log.d(TAG, "startProgressRunnable: TIMEOUT.");
+ mProgressHandler.removeCallbacks(mProgressRunnable);
+ mProgressRunnable = null;
+ }
+ mCurrentProgress = 0;
+ frameAvailableCount = 0;
+ videoRetryTimer = 0;
+ mProgressHandler = new Handler(Looper.getMainLooper()){
+ @Override
+ public void handleMessage(Message msg) {
+ if(msg.what == INIT_VIDEO_PROGRESS_BAR){
+ Log.d(TAG, "startProgressRunnable: initializing progress bar for video: " + mCurrentProgress);
+ mCurrentProgressBar.setMax(mTotalDuration);
+ mCurrentProgressBar.setProgress(mCurrentProgress);
+
+ //MyProgressBar stuff
+ mCurrentProgressBar.setCurrentProgress(mCurrentProgress);
+ mCurrentProgressBar.setTotalDuration(mTotalDuration);
+
+ //show the circular progress bar
+ showProgressBar();
+ }
+ else if(msg.what == UPDATE_UI_WITH_VIDEO_PROGRESS){
+ Log.d(TAG, "startProgressRunnable: updating UI thread with progress: " + mCurrentProgress);
+ mCurrentProgressBar.setProgress(mCurrentProgress);
+ }
+ else if(msg.what == REMOVE_PROGRESS_BAR_CALLBACKS){
+ Log.d(TAG, "startProgressRunnable: updating UI thread with progress: " + mCurrentProgress);
+ mProgressHandler.removeCallbacks(mProgressRunnable);
+ }
+ else if(msg.what == HIDE_PROGRESS_BAR){
+ Log.d(TAG, "startProgressRunnable: Hiding circular progress bar from UI Thread");
+ hideProgressBar();
+ }
+ else if(msg.what == getCurrentPlayer()) {
+ Log.d(TAG, "startProgressRunnable: Retrying video playback.");
+ showProgressBar();
+ try {
+ int resourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+ int mediaIndex = mResourceIndices.getJSONObject(mCurrentSurface).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject( resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ MediaSource nextMediaSource = (MediaSource) mResources.get(resourceIndex).getJSONObject(mediaIndex).get(mContext.getString(R.string.media_source));
+ if(getCurrentPlayer() == PLAYER_ONE){
+ Log.d(TAG, "startProgressRunnable: attempting to restart player1");
+ mPlayer.prepare(nextMediaSource);
+ mPlayer.setPlayWhenReady(true);
+ }
+ else if(getCurrentPlayer() == PLAYER_ONE_SECONDARY){
+ Log.d(TAG, "startProgressRunnable: attempting to restart secondary player1");
+ mSecondaryPlayer.prepare(nextMediaSource);
+ mSecondaryPlayer.setPlayWhenReady(true);
+ }
+ else if(getCurrentPlayer() == PLAYER_TWO){
+ Log.d(TAG, "startProgressRunnable: attempting to restart player2");
+ mPlayer2.prepare(nextMediaSource);
+ mPlayer2.setPlayWhenReady(true);
+ }
+ else if(getCurrentPlayer() == PLAYER_TWO_SECONDARY){
+ Log.d(TAG, "startProgressRunnable: attempting to restart secondary player2");
+ mSecondaryPlayer2.prepare(nextMediaSource);
+ mSecondaryPlayer2.setPlayWhenReady(true);
+ }
+ else if(getCurrentPlayer() == PLAYER_THREE){
+ Log.d(TAG, "startProgressRunnable: attempting to restart player3");
+ mPlayer3.prepare(nextMediaSource);
+ mPlayer3.setPlayWhenReady(true);
+ }
+ else if(getCurrentPlayer() == PLAYER_THREE_SECONDARY){
+ Log.d(TAG, "startProgressRunnable: attempting to restart secondary player3");
+ mSecondaryPlayer3.prepare(nextMediaSource);
+ mSecondaryPlayer3.setPlayWhenReady(true);
+ }
+ else if(getCurrentPlayer() == PLAYER_FOUR){
+ Log.d(TAG, "startProgressRunnable: attempting to restart player4");
+ mPlayer4.prepare(nextMediaSource);
+ mPlayer4.setPlayWhenReady(true);
+ }
+ else if(getCurrentPlayer() == PLAYER_FOUR_SECONDARY){
+ Log.d(TAG, "startProgressRunnable: attempting to restart secondary player4");
+ mSecondaryPlayer4.prepare(nextMediaSource);
+ mSecondaryPlayer4.setPlayWhenReady(true);
+ }
+
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ };
+ mProgressRunnable = new Runnable() {
+ @Override
+ public void run() {
+ mProgressHandler.postDelayed(this, 200);
+ int progress = 0;
+ boolean isPlaying = false;
+ if(getCurrentPlayer() == PLAYER_ONE){
+ progress = (int) mPlayer.getCurrentPosition();
+ if(mPlayer.getPlayWhenReady()){
+ isPlaying = true;
+ }
+ if(!mUpdateST){
+ frameAvailableCount++;
+ Log.d(TAG, "startProgressRunnable: player1 frame Available count: " + frameAvailableCount);
+ if(frameAvailableCount >= VIDEO_REFRESH_COUNT_LIMIT){
+ Log.d(TAG, "startProgressRunnable: forcing frame refresh on player1.");
+ mUpdateST = true;
+ }
+ }
+ else{
+ frameAvailableCount = 0;
+ }
+ Log.d(TAG, "startProgressRunnable: player 1 current progress: " + progress);
+ }
+ else if(getCurrentPlayer() == PLAYER_ONE_SECONDARY){
+ progress = (int) mSecondaryPlayer.getCurrentPosition();
+ if(mSecondaryPlayer.getPlayWhenReady()){
+ isPlaying = true;
+ }
+ if(!mUpdateST){
+ frameAvailableCount++;
+ Log.d(TAG, "startProgressRunnable: secondary player1 frame Available count: " + frameAvailableCount);
+ if(frameAvailableCount >= VIDEO_REFRESH_COUNT_LIMIT){
+ Log.d(TAG, "startProgressRunnable: forcing frame refresh on secondary player1.");
+ mUpdateST = true;
+ }
+ }
+ else{
+ frameAvailableCount = 0;
+ }
+ Log.d(TAG, "startProgressRunnable: current progress: " + progress);
+ }
+ else if(getCurrentPlayer() == PLAYER_TWO){
+ progress = (int) mPlayer2.getCurrentPosition();
+ if(mPlayer2.getPlayWhenReady()){
+ isPlaying = true;
+ }
+ if(!mUpdateST2){
+ frameAvailableCount++;
+ Log.d(TAG, "startProgressRunnable: player2 frame Available count: " + frameAvailableCount);
+ if(frameAvailableCount >= VIDEO_REFRESH_COUNT_LIMIT){
+ Log.d(TAG, "startProgressRunnable: forcing frame refresh on player2.");
+ mUpdateST2 = true;
+ }
+ }
+ else{
+ frameAvailableCount = 0;
+ }
+ Log.d(TAG, "startProgressRunnable: current progress: " + progress);
+ }
+ else if(getCurrentPlayer() == PLAYER_TWO_SECONDARY){
+ progress = (int) mSecondaryPlayer2.getCurrentPosition();
+ if(mSecondaryPlayer2.getPlayWhenReady()){
+ isPlaying = true;
+ }
+ if(!mUpdateST2){
+ frameAvailableCount++;
+ Log.d(TAG, "startProgressRunnable: secondary player2 frame Available count: " + frameAvailableCount);
+ if(frameAvailableCount >= VIDEO_REFRESH_COUNT_LIMIT){
+ Log.d(TAG, "startProgressRunnable: forcing frame refresh on secondary player2.");
+ mUpdateST2 = true;
+ }
+ }
+ else{
+ frameAvailableCount = 0;
+ }
+ Log.d(TAG, "startProgressRunnable: current progress: " + progress);
+ }
+ else if(getCurrentPlayer() == PLAYER_THREE){
+ progress = (int) mPlayer3.getCurrentPosition();
+ if(mPlayer3.getPlayWhenReady()){
+ isPlaying = true;
+ }
+ if(!mUpdateST3){
+ frameAvailableCount++;
+ Log.d(TAG, "startProgressRunnable: player3 frame Available count: " + frameAvailableCount);
+ if(frameAvailableCount >= VIDEO_REFRESH_COUNT_LIMIT){
+ Log.d(TAG, "startProgressRunnable: forcing frame refresh on player3.");
+ mUpdateST3 = true;
+ }
+ }
+ else{
+ frameAvailableCount = 0;
+ }
+ Log.d(TAG, "startProgressRunnable: current progress: " + progress);
+ }
+ else if(getCurrentPlayer() == PLAYER_THREE_SECONDARY){
+ progress = (int) mSecondaryPlayer3.getCurrentPosition();
+ if(mSecondaryPlayer3.getPlayWhenReady()){
+ isPlaying = true;
+ }
+ if(!mUpdateST3){
+ frameAvailableCount++;
+ Log.d(TAG, "startProgressRunnable: secondary player3 frame Available count: " + frameAvailableCount);
+ if(frameAvailableCount >= VIDEO_REFRESH_COUNT_LIMIT){
+ Log.d(TAG, "startProgressRunnable: forcing frame refresh on secondary player3.");
+ mUpdateST3= true;
+ }
+ }
+ else{
+ frameAvailableCount = 0;
+ }
+ Log.d(TAG, "startProgressRunnable: current progress: " + progress);
+ }
+ else if(getCurrentPlayer() == PLAYER_FOUR){
+ progress = (int) mPlayer4.getCurrentPosition();
+ if(mPlayer4.getPlayWhenReady()){
+ isPlaying = true;
+ }
+ if(!mUpdateST4){
+ frameAvailableCount++;
+ Log.d(TAG, "startProgressRunnable: player4 frame Available count: " + frameAvailableCount);
+ if(frameAvailableCount >= VIDEO_REFRESH_COUNT_LIMIT){
+ Log.d(TAG, "startProgressRunnable: forcing frame refresh on player4.");
+ mUpdateST4 = true;
+ }
+ }
+ else{
+ frameAvailableCount = 0;
+ }
+ Log.d(TAG, "startProgressRunnable: current progress: " + progress);
+ }
+ else if(getCurrentPlayer() == PLAYER_FOUR_SECONDARY){
+ progress = (int) mSecondaryPlayer4.getCurrentPosition();
+ if(mSecondaryPlayer4.getPlayWhenReady()){
+ isPlaying = true;
+ }
+ if(!mUpdateST4){
+ frameAvailableCount++;
+ Log.d(TAG, "startProgressRunnable: secondary player4 frame Available count: " + frameAvailableCount);
+ if(frameAvailableCount >= VIDEO_REFRESH_COUNT_LIMIT){
+ Log.d(TAG, "startProgressRunnable: forcing frame refresh on secondary player4.");
+ mUpdateST4 = true;
+ }
+ }
+ else{
+ frameAvailableCount = 0;
+ }
+ Log.d(TAG, "startProgressRunnable: current progress: " + progress);
+ }
+
+ if(mCurrentProgress == 0){
+ mCurrentProgress = 1;
+ Log.d(TAG, "startProgressRunnable: dispatching message from progress handler. progress: " + mCurrentProgress);
+ mProgressHandler.dispatchMessage(Message.obtain(mProgressHandler, INIT_VIDEO_PROGRESS_BAR));
+ }
+ if (progress > 0 && isPlaying) {
+ //hide the progress bar if it's showing
+ mProgressHandler.dispatchMessage(Message.obtain(mProgressHandler, HIDE_PROGRESS_BAR));
+ mCurrentProgress += 200;
+ Log.d(TAG, "startProgressRunnable: dispatching message from progress handler. progress: " + mCurrentProgress);
+ mProgressHandler.dispatchMessage(Message.obtain(mProgressHandler, UPDATE_UI_WITH_VIDEO_PROGRESS));
+ }
+ if(mCurrentProgress >= mTotalDuration){
+ Log.d(TAG, "startProgressRunnable: DONE.");
+ mProgressHandler.dispatchMessage(Message.obtain(mProgressHandler, REMOVE_PROGRESS_BAR_CALLBACKS));
+ }
+ if(progress == 0 && isPlaying){ // video is not playing. Might have to retry
+ videoRetryTimer += 200;
+ if(videoRetryTimer >= 2000){ // Retry playing the video if it's been trying for 3 seconds
+ Log.d(TAG, "startProgressRunnable: attempting to retry playing the video.");
+ videoRetryTimer = 0;
+ mProgressHandler.dispatchMessage(Message.obtain(mProgressHandler, getCurrentPlayer()));
+ }
+ }
+
+// // if the progress has exceeded the total duration then stop the thread.
+// if(mCurrentProgress >= mTotalDuration){
+// Log.d(TAG, "startProgressRunnable: progress has exceeded duration. Stopping thread.");
+// mProgressHandler.removeCallbacks(mProgressRunnable);
+// }
+ }
+ };
+ mProgressRunnable.run();
+
+ }
+ }
+
+ private void fillCurrentProgressBar(){
+ Log.d(TAG, "fillCurrentProgressBar: filling current progress bar.");
+
+ try{
+ //make sure the progress bar is full
+ int resourceIndex = mResourceIndices.getJSONObject(mCurrentSurface).getInt(mContext.getString(R.string.resource_index));
+ int surfaceMediaIndex = mResourceIndices.getJSONObject(mCurrentSurface).getJSONArray(mContext.getString(R.string.media_index))
+ .getJSONObject( resourceIndex / 4).getInt(mContext.getString(R.string.media_index));
+ String resourceType = "";
+ boolean imageRenderError = false;
+ try{
+ resourceType = mResources.get(resourceIndex).getJSONObject(surfaceMediaIndex).get(mContext.getString(R.string.media_type)).toString();
+ }catch (JSONException e){
+ imageRenderError = true;
+ Log.e(TAG, "fillCurrentProgressBar: JSONException: " + e.getMessage() );
+ }
+ catch (NullPointerException e){
+ imageRenderError = true;
+ Log.e(TAG, "fillCurrentProgressBar: NullPointerException: " + e.getMessage() );
+ }
+
+ if (resourceType.equals(mContext.getString(R.string.encoded_bitmap)) || imageRenderError) {
+ Log.d(TAG, "fillCurrentProgressBar: current resource is an image.");
+ MyProgressBar progressBar = ((Activity) mContext).findViewById(mIds[surfaceMediaIndex]);
+ progressBar.setProgress(1);
+ progressBar.setCurrentProgress(1);
+ }
+ else if(resourceType.equals(mContext.getString(R.string.video_uri))){
+ Log.d(TAG, "fillCurrentProgressBar: current resource is a video.");
+ //fill the previous progress bar before working with the new one
+ if(mCurrentProgressBar != null){
+ mCurrentProgress = MEDIA_TIMEOUT;
+ mCurrentProgressBar.setProgress(mCurrentProgress);
+ if(mProgressRunnable != null){
+ Log.d(TAG, "incrementMediaIndex: TIMEOUT.");
+ mProgressHandler.removeCallbacks(mProgressRunnable);
+ mProgressRunnable = null;
+ }
+ }
+ }
+
+ }catch (JSONException e){
+ Log.d(TAG, "fillCurrentProgressBar: JSONException: " + e.getMessage());
+ e.printStackTrace();
+ }
+ }
+
+ private int getCurrentPlayer(){
+ Log.d(TAG, "getCurrentPlayer: getting the current player.");
+
+ if(mCurrentSurface == SURFACE_1){
+ if (mPlayerState == ACTIVE_PLAYER) {
+ return PLAYER_ONE;
+ }
+ else if (mSecondaryPlayerState == ACTIVE_PLAYER) {
+ return PLAYER_ONE_SECONDARY;
+ }
+ }
+ else if(mCurrentSurface == SURFACE_2){
+ if (mPlayer2State == ACTIVE_PLAYER) {
+ return PLAYER_TWO;
+ }
+ else if (mSecondaryPlayer2State == ACTIVE_PLAYER) {
+ return PLAYER_TWO_SECONDARY;
+ }
+ }
+ else if(mCurrentSurface == SURFACE_3){
+ if (mPlayer3State == ACTIVE_PLAYER) {
+ return PLAYER_THREE;
+ }
+ else if (mSecondaryPlayer3State == ACTIVE_PLAYER) {
+ return PLAYER_THREE_SECONDARY;
+ }
+ }
+ else if(mCurrentSurface == SURFACE_4){
+ if (mPlayer4State == ACTIVE_PLAYER) {
+ return PLAYER_FOUR;
+ }
+ else if (mSecondaryPlayer4State == ACTIVE_PLAYER) {
+ return PLAYER_FOUR_SECONDARY;
+ }
+ }
+ return 0;
+ }
+
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/java/tabian/com/instagramclone2/opengl/MyGLSurfaceView.java b/app/src/main/java/tabian/com/instagramclone2/opengl/MyGLSurfaceView.java
new file mode 100644
index 0000000..40c0cc6
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/opengl/MyGLSurfaceView.java
@@ -0,0 +1,215 @@
+package tabian.com.instagramclone2.opengl;
+
+import android.content.Context;
+import android.graphics.PixelFormat;
+import android.opengl.GLSurfaceView;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.MotionEvent;
+import android.widget.ProgressBar;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+
+
+/**
+ * Created by User on 11/27/2017.
+ */
+
+class MyGLSurfaceView extends GLSurfaceView {
+
+ private static final String TAG = "MyGLSurfaceView";
+
+ private int CLICK_ACTION_THRESHOLD = 15;
+ private float startX;
+ private float startY;
+
+ public MyGLRenderer mRenderer;
+ private static final float pi = 3.14159f;
+ private float width;
+ private float height;
+ private boolean down = false;
+// private ArrayList mMedia = new ArrayList<>();
+// private JSONArray mUserStories = new JSONArray();
+ private Context mContext;
+ private ProgressBar mProgressBar;
+
+ public MyGLSurfaceView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ }
+
+// public MyGLSurfaceView(Context context, float height, float width, ArrayList media, int resourceIndex){
+// super(context);
+// mContext = context;
+// this.width = width;
+// this.height = height;
+// mMedia = media;
+//
+//// setEGLConfigChooser(8, 8, 8, 8, 16, 0);
+//// getHolder().setFormat(PixelFormat.TRANSLUCENT);
+//// setZOrderOnTop(true);
+//
+// try {
+// mRenderer = new MyGLRenderer(mContext, height, width, media, MyGLSurfaceView.this, resourceIndex);
+// } catch (JSONException e) {
+// e.printStackTrace();
+// }
+// setRenderer(mRenderer);
+// setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+//
+// }
+
+ public void setConfig(Context context, float height, float width, JSONArray userStories, int resourceIndex){
+ mContext = context;
+ this.width = width;
+ this.height = height;
+// mUserStories = userStories;
+ setEGLConfigChooser(8, 8, 8, 8, 16, 0);
+ getHolder().setFormat(PixelFormat.TRANSLUCENT);
+// setEGLContextClientVersion(1);
+// setZOrderOnTop(true);
+
+ try {
+// for(int i = 0; i < mMedia.size(); i++){
+// Log.d(TAG, "setConfig: " + media.get(i));
+// }
+ mRenderer = new MyGLRenderer(mContext, height, width, userStories, MyGLSurfaceView.this, resourceIndex);
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ setRenderer(mRenderer);
+ setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
+ }
+
+
+ @Override
+ public boolean onTouchEvent(MotionEvent e) {
+ float x = e.getX();
+ Log.d(TAG, "onTouchEvent: POSITION: " + x);
+
+ switch (e.getAction()) {
+
+ case MotionEvent.ACTION_UP:
+
+ if(down && !mRenderer.mRotateCounterClockwise && !mRenderer.mRotateClockwise){
+ float endX = e.getX();
+ float endY = e.getY();
+ if (isAClick(startX, endX, startY, endY)) {
+ Log.d(TAG, "onTouchEvent: detected a click");
+ Log.d(TAG, "onTouchEvent: ACTION_UP");
+ try {
+ mRenderer.incrementMediaIndex();
+ } catch (JSONException e1) {
+ e1.printStackTrace();
+ }
+ }
+ else{
+ mRenderer.setStopped(true);
+ }
+ down = false;
+ }
+ break;
+
+
+ case MotionEvent.ACTION_DOWN:
+ if(!mRenderer.mRotateCounterClockwise && !mRenderer.mRotateClockwise) {
+ Log.d(TAG, "onTouchEvent: ACTION_DOWN");
+ mRenderer.setStartPositionX(e.getX());
+ mRenderer.setStopped(false);
+ startX = e.getX();
+ startY = e.getY();
+ Log.d(TAG, "onTouchEvent: starting position: " + e.getX());
+
+ mRenderer.pausePlayer();
+ //set this so the ACTION_UP flag doesn't trigger when you first touch the screen
+ down = true;
+ }
+
+ case MotionEvent.ACTION_MOVE:
+ if(!mRenderer.mRotateCounterClockwise && !mRenderer.mRotateClockwise) {
+ Log.d(TAG, "onTouchEvent: ACTION_MOVE");
+
+ float endX = e.getX();
+ float endY = e.getY();
+ if (!isAClick(startX, endX, startY, endY)) {
+ if(Math.abs(startX - x) < 800){
+ mRenderer.setPosition(x);
+ }
+ }
+
+ break;
+ }
+ }
+
+
+ return true;
+ }
+
+
+
+ private boolean isAClick(float startX, float endX, float startY, float endY) {
+ float differenceX = Math.abs(startX - endX);
+ float differenceY = Math.abs(startY - endY);
+ Log.d(TAG, "isAClick: differenceX: " + differenceX);
+ Log.d(TAG, "isAClick: differenceY: " + differenceY);
+ if(!mRenderer.mRotateCounterClockwise && !mRenderer.mRotateClockwise){
+ return !(differenceX > CLICK_ACTION_THRESHOLD || differenceY > CLICK_ACTION_THRESHOLD);
+ }
+ return false;
+ }
+
+// private void getImageBitmaps(final int startPosition, final int endPosition){
+// Log.d(TAG, "getImageBitmaps: getting images from urls");
+//
+// for(int i = startPosition; i < endPosition; i++) {
+// final int count = i;
+// Target target = new Target() {
+// @Override
+// public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) {
+// Log.d(TAG, "onBitmapLoaded: done loading bitmap.");
+// mBitmaps.add(bitmap);
+// if(count == endPosition){
+// mRenderer = new MyGLRenderer(mContext, height, width, mBitmaps, MyGLSurfaceView.this);
+// // Set the Renderer for drawing on the GLSurfaceView
+// setRenderer(mRenderer);
+// setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+// }
+// }
+//
+// @Override
+// public void onBitmapFailed(Drawable errorDrawable) {
+//
+// }
+//
+// @Override
+// public void onPrepareLoad(Drawable placeHolderDrawable) {
+//
+// }
+// };
+// Picasso.with(mContext)
+// .load(Image.IMAGE_URLS[i])
+// .into(target);
+// targets.add(target);
+// }
+//
+// }
+
+ public void reset(){
+ mRenderer.setStopped(false);
+ mRenderer.setDx(0);
+ mRenderer.setStartPositionX(0);
+ }
+
+
+}
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/java/tabian/com/instagramclone2/opengl/MyProgressBar.java b/app/src/main/java/tabian/com/instagramclone2/opengl/MyProgressBar.java
new file mode 100644
index 0000000..2bdb8bc
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/opengl/MyProgressBar.java
@@ -0,0 +1,43 @@
+package tabian.com.instagramclone2.opengl;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.widget.ProgressBar;
+
+/**
+ * Created by User on 11/27/2017.
+ */
+
+public class MyProgressBar extends ProgressBar {
+
+ public int totalDuration = 0;
+ public int currentProgress = 0;
+
+ public MyProgressBar(Context context) {
+ super(context);
+ }
+
+ public MyProgressBar(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ }
+
+ public MyProgressBar(Context context, AttributeSet attrs, int defStyleAttr) {
+ super(context, attrs, defStyleAttr);
+ }
+
+ public int getTotalDuration() {
+ return totalDuration;
+ }
+
+ public void setTotalDuration(int totalDuration) {
+ this.totalDuration = totalDuration;
+ }
+
+ public int getCurrentProgress() {
+ return currentProgress;
+ }
+
+ public void setCurrentProgress(int currentProgress) {
+ this.currentProgress = currentProgress;
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/opengl/NewStoryActivity.java b/app/src/main/java/tabian/com/instagramclone2/opengl/NewStoryActivity.java
new file mode 100644
index 0000000..4371154
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/opengl/NewStoryActivity.java
@@ -0,0 +1,218 @@
+package tabian.com.instagramclone2.opengl;
+
+
+import android.app.Activity;
+import android.content.Intent;
+import android.os.Bundle;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.WindowManager;
+import android.widget.Toast;
+
+import java.io.File;
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.Utils.FilePaths;
+import tabian.com.instagramclone2.materialcamera.MaterialCamera;
+
+
+/**
+ * Created by User on 1/8/2018.
+ */
+
+public class NewStoryActivity extends Activity {
+
+ public static final String TAG = "NewStoryActivity";
+
+ private final static int CAMERA_RQ = 6969;
+ private final static int DEFAULT_BITRATE = 1024000;
+ private static final int RESULT_START_CAMERA = 4567;
+ private static final int RESULT_START_VIDEO = 4589;
+ private static final int RESULT_ADD_NEW_STORY = 7891;
+ private MaterialCamera mMaterialCamera;
+
+
+ private int mStartType = RESULT_START_VIDEO;
+
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_new_story);
+
+ init();
+ }
+
+ private void init(){
+ Log.d(TAG, "init: initializing material camera.");
+
+ getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
+ WindowManager.LayoutParams.FLAG_FULLSCREEN);
+ DisplayMetrics displaymetrics = new DisplayMetrics();
+ this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
+
+// File saveFolder = new File(Environment.getExternalStorageDirectory(), "Stories/" + getTimestamp());
+ FilePaths filePaths = new FilePaths();
+ File saveFolder = new File(filePaths.STORIES);
+ try{
+ if (!saveFolder.mkdirs());
+ }catch (RuntimeException e){
+ e.printStackTrace();
+ }
+
+ mMaterialCamera = new MaterialCamera(this); // Constructor takes an Activity
+
+ if(mStartType == RESULT_START_VIDEO) {
+ Log.d(TAG, "init: starting camera with VIDEO enabled.");
+ mMaterialCamera
+ .allowRetry(true) // Whether or not 'Retry' is visible during playback
+ .autoSubmit(false) // Whether or not user is allowed to playback videos after recording. This can affect other things, discussed in the next section.
+ .saveDir(saveFolder) // The folder recorded videos are saved to
+// .primaryColorAttr(R.attr.colorPrimary) // The theme color used for the camera, defaults to colorPrimary of Activity in the constructor
+ .showPortraitWarning(false) // Whether or not a warning is displayed if the user presses record in portrait orientation
+ .defaultToFrontFacing(false) // Whether or not the camera will initially show the front facing camera
+// .allowChangeCamera(true) // Allows the user to change cameras.
+ .retryExits(false) // If true, the 'Retry' button in the playback screen will exit the camera instead of going back to the recorder
+ .restartTimerOnRetry(false) // If true, the countdown timer is reset to 0 when the user taps 'Retry' in playback
+ .continueTimerInPlayback(false) // If true, the countdown timer will continue to go down during playback, rather than pausing.
+ //Play with the encoding bitrate to change quality and size.
+ .videoEncodingBitRate(DEFAULT_BITRATE * 5) // Sets a custom bit rate for video recording.
+ .audioEncodingBitRate(50000) // Sets a custom bit rate for audio recording.
+ .videoFrameRate(30) // Sets a custom frame rate (FPS) for video recording.
+// .qualityProfile(MaterialCamera.QUALITY_1080P) // Sets a quality profile, manually setting bit rates or frame rates with other settings will overwrite individual quality profile settings
+ .videoPreferredHeight(720) // Sets a preferred height for the recorded video output.
+ .videoPreferredAspect(16f / 9f) // Sets a preferred aspect ratio for the recorded video output.
+ .maxAllowedFileSize(1024 * 1024 * 40) // Sets a max file size of 4MB, recording will stop if file reaches this limit. Keep in mind, the FAT file system has a file size limit of 4GB.
+ .iconRecord(R.drawable.mcam_action_capture) // Sets a custom icon for the button used to start recording
+ .iconStop(R.drawable.mcam_action_stop) // Sets a custom icon for the button used to stop recording
+ .iconFrontCamera(R.drawable.mcam_camera_front) // Sets a custom icon for the button used to switch to the front camera
+ .iconRearCamera(R.drawable.mcam_camera_rear) // Sets a custom icon for the button used to switch to the rear camera
+ .iconPlay(R.drawable.evp_action_play) // Sets a custom icon used to start playback
+ .iconPause(R.drawable.evp_action_pause) // Sets a custom icon used to pause playback
+ .iconRestart(R.drawable.evp_action_restart) // Sets a custom icon used to restart playback
+ .labelRetry(R.string.mcam_retry) // Sets a custom button label for the button used to retry recording, when available
+ .labelConfirm(R.string.mcam_use_video) // Sets a custom button label for the button used to confirm/submit a recording
+// .autoRecordWithDelaySec(5) // The video camera will start recording automatically after a 5 second countdown. This disables switching between the front and back camera initially.
+// .autoRecordWithDelayMs(5000) // Same as the above, expressed with milliseconds instead of seconds.
+ .audioDisabled(false) // Set to true to record video without any audio.
+ .countdownSeconds(15f)
+ .start(CAMERA_RQ);
+ }
+ else{
+ Log.d(TAG, "init: starting camera with STILLSHOT enabled.");
+ mMaterialCamera
+ .allowRetry(true) // Whether or not 'Retry' is visible during playback
+ .autoSubmit(false) // Whether or not user is allowed to playback videos after recording. This can affect other things, discussed in the next section.
+ .saveDir(saveFolder) // The folder recorded videos are saved to
+// .primaryColorAttr(R.attr.colorPrimary) // The theme color used for the camera, defaults to colorPrimary of Activity in the constructor
+ .showPortraitWarning(false) // Whether or not a warning is displayed if the user presses record in portrait orientation
+ .defaultToFrontFacing(false) // Whether or not the camera will initially show the front facing camera
+// .allowChangeCamera(true) // Allows the user to change cameras.
+ .retryExits(false) // If true, the 'Retry' button in the playback screen will exit the camera instead of going back to the recorder
+ .restartTimerOnRetry(false) // If true, the countdown timer is reset to 0 when the user taps 'Retry' in playback
+ .continueTimerInPlayback(false) // If true, the countdown timer will continue to go down during playback, rather than pausing.
+ .videoEncodingBitRate(DEFAULT_BITRATE * 5) // Sets a custom bit rate for video recording.
+ .audioEncodingBitRate(50000) // Sets a custom bit rate for audio recording.
+ .videoFrameRate(30) // Sets a custom frame rate (FPS) for video recording.
+// .qualityProfile(MaterialCamera.QUALITY_1080P) // Sets a quality profile, manually setting bit rates or frame rates with other settings will overwrite individual quality profile settings
+ .videoPreferredHeight(720) // Sets a preferred height for the recorded video output.
+ .videoPreferredAspect(16f / 9f) // Sets a preferred aspect ratio for the recorded video output.
+ .maxAllowedFileSize(1024 * 1024 * 10) // Sets a max file size of 4MB, recording will stop if file reaches this limit. Keep in mind, the FAT file system has a file size limit of 4GB.
+ .iconRecord(R.drawable.mcam_action_capture) // Sets a custom icon for the button used to start recording
+ .iconStop(R.drawable.mcam_action_stop) // Sets a custom icon for the button used to stop recording
+ .iconFrontCamera(R.drawable.mcam_camera_front) // Sets a custom icon for the button used to switch to the front camera
+ .iconRearCamera(R.drawable.mcam_camera_rear) // Sets a custom icon for the button used to switch to the rear camera
+ .iconPlay(R.drawable.evp_action_play) // Sets a custom icon used to start playback
+ .iconPause(R.drawable.evp_action_pause) // Sets a custom icon used to pause playback
+ .iconRestart(R.drawable.evp_action_restart) // Sets a custom icon used to restart playback
+ .labelRetry(R.string.mcam_retry) // Sets a custom button label for the button used to retry recording, when available
+ .labelConfirm(R.string.mcam_use_video) // Sets a custom button label for the button used to confirm/submit a recording
+// .autoRecordWithDelaySec(5) // The video camera will start recording automatically after a 5 second countdown. This disables switching between the front and back camera initially.
+// .autoRecordWithDelayMs(5000) // Same as the above, expressed with milliseconds instead of seconds.
+ .audioDisabled(false) // Set to true to record video without any audio.
+ .countdownSeconds(15f)
+ .stillShot()
+ .start(CAMERA_RQ);
+ }
+ }
+
+
+
+ @Override
+ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+ super.onActivityResult(requestCode, resultCode, data);
+
+ Log.d(TAG, "onActivityResult: incoming result.");
+ // Received recording or error from MaterialCamera
+ if (requestCode == CAMERA_RQ) {
+
+ if (resultCode == RESULT_OK) {
+ Log.d(TAG, "onActivityResult: result is OK.");
+// Toast.makeText(this, "Saved to: " + data.getDataString(), Toast.LENGTH_LONG).show();
+ }
+ else if(resultCode == RESULT_START_CAMERA){
+ Log.d(TAG, "onActivityResult: got activity result. Opening Camera.");
+ mStartType = RESULT_START_CAMERA;
+ init();
+ }
+ else if(resultCode == RESULT_START_VIDEO){
+ Log.d(TAG, "onActivityResult: got activity result. Opening video.");
+ mStartType = RESULT_START_VIDEO;
+ init();
+ }
+ else if(resultCode == RESULT_ADD_NEW_STORY){
+ Log.d(TAG, "onActivityResult: preparing to add new story.");
+ Log.d(TAG, "onActivityResult: upload uri: " + data.getData());
+ if(data.hasExtra(MaterialCamera.DELETE_UPLOAD_FILE_EXTRA)){
+ setResult(
+ RESULT_ADD_NEW_STORY,
+ getIntent()
+ .putExtra(MaterialCamera.DELETE_UPLOAD_FILE_EXTRA, true)
+ .putExtra(MaterialCamera.STATUS_EXTRA, MaterialCamera.STATUS_RECORDED)
+ .setDataAndType(data.getData(), data.getType()));
+ finish();
+ }
+ else{
+ setResult(
+ RESULT_ADD_NEW_STORY,
+ getIntent()
+ .putExtra(MaterialCamera.DELETE_UPLOAD_FILE_EXTRA, false)
+ .putExtra(MaterialCamera.STATUS_EXTRA, MaterialCamera.STATUS_RECORDED)
+ .setDataAndType(data.getData(), data.getType()));
+ finish();
+ }
+
+
+ }
+ else if(data != null) {
+ Log.d(TAG, "onActivityResult: something went wrong.");
+ Exception e = (Exception) data.getSerializableExtra(MaterialCamera.ERROR_EXTRA);
+ e.printStackTrace();
+ Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
+ }
+ }
+ }
+
+
+
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/java/tabian/com/instagramclone2/opengl/OpenGLES10Activity.java b/app/src/main/java/tabian/com/instagramclone2/opengl/OpenGLES10Activity.java
new file mode 100644
index 0000000..c49118a
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/opengl/OpenGLES10Activity.java
@@ -0,0 +1,180 @@
+package tabian.com.instagramclone2.opengl;
+
+
+import android.os.Bundle;
+import android.support.annotation.Nullable;
+import android.support.v7.app.AppCompatActivity;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.WindowManager;
+
+import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+
+import java.util.ArrayList;
+
+import tabian.com.instagramclone2.R;
+import tabian.com.instagramclone2.models.UserStories;
+
+
+/**
+ * Created by User on 11/27/2017.
+ */
+
+public class OpenGLES10Activity extends AppCompatActivity{
+
+ private static final String TAG = "OpenGLES10Activity";
+ private static final int RELEASE_PLAYER = 0;
+ private static final int INITIALIZE_PLAYER = 1;
+ private static final int LOADING_ERROR = 2;
+ private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter();
+
+
+ private ArrayList mMedia = new ArrayList<>();
+ private JSONArray mMasterStoriesArray = new JSONArray();
+ private MyGLSurfaceView mGLView;
+ private float mScreenWidth;
+ private float mScreenHeight;
+
+ private int resourceIndex = 0;
+ private ArrayList mUserStories = new ArrayList<>();
+
+
+ @Override
+ protected void onCreate(@Nullable Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_opengl);
+// initSurfaceView();
+ getIncomingIntent();
+ }
+
+
+ private void getIncomingIntent(){
+ Log.d(TAG, "getIncomingIntent: checking for incoming intent.");
+
+ if (getIntent().hasExtra(getString(R.string.user_stories)) && getIntent().hasExtra(getString(R.string.resource_index))) {
+ Log.d(TAG, "getIncomingIntent: found extras.");
+// mUserStories = getIntent().getParcelableArrayListExtra(getString(R.string.user_stories));
+// for(int i = 0; i < mUserStories.size(); i++){
+// Log.d(TAG, "getIncomingIntent: " + mUserStories.get(i).getMedia().toString());
+// }
+ String jsonArray = getIntent().getStringExtra(getString(R.string.user_stories));
+// JSONArray array = null;
+ try {
+ mMasterStoriesArray = new JSONArray(jsonArray);
+
+// for(int i = 0; i < mMasterStoriesArray.length(); i++){
+// Log.d(TAG, "getIncomingIntent: username: " + mMasterStoriesArray.getJSONObject(i).getJSONObject(getString(R.string.user_account_settings)).get(getString(R.string.field_username)));
+// int numStories = mMasterStoriesArray.getJSONObject(i).getJSONArray(getString(R.string.user_stories)).length();
+// for(int j = 0; j < numStories; j++){
+// Log.d(TAG, "getIncomingIntent: story #: " + j);
+// Log.d(TAG, "getIncomingIntent: user_stories: " + mMasterStoriesArray.getJSONObject(i).getJSONArray(getString(R.string.user_stories)).getJSONObject(j).toString());
+//
+// }
+// }
+
+ resourceIndex = getIntent().getIntExtra(getString(R.string.resource_index), 0);
+ initSurfaceView();
+
+ } catch (JSONException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+
+ private void initSurfaceView(){
+ mGLView = findViewById(R.id.my_gl_surfaceview);
+
+ getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
+ WindowManager.LayoutParams.FLAG_FULLSCREEN);
+ DisplayMetrics displaymetrics = new DisplayMetrics();
+ this.getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
+ mScreenWidth = displaymetrics.widthPixels;
+ mScreenHeight = displaymetrics.heightPixels;
+
+ Log.d(TAG, "initSurfaceView: screen width: " + mScreenWidth);
+ Log.d(TAG, "initSurfaceView: screen height: " + mScreenHeight);
+
+ //continue here
+ mGLView.setConfig(this, mScreenHeight, mScreenWidth, mMasterStoriesArray, resourceIndex);
+
+ }
+
+
+
+ @Override
+ public void onResume() {
+ Log.d(TAG, "onResume: called.");
+ super.onResume();
+ if(mGLView == null){
+// initSurfaceView();
+ }
+ }
+
+ @Override
+ public void onPause() {
+ Log.d(TAG, "onPause: called.");
+ super.onPause();
+ if(mGLView != null){
+ mGLView.onPause();
+ mGLView.reset();
+ releasePlayers();
+ }
+ }
+
+
+
+ @Override
+ public void onStop() {
+ Log.d(TAG, "onStop: called.");
+ super.onStop();
+ if(mGLView != null){
+ mGLView.reset();
+ releasePlayers();
+ }
+ }
+
+
+ @Override
+ public void onStart() {
+ Log.d(TAG, "onStart: called.");
+ super.onStart();
+// initSurfaceView();
+ }
+
+ @Override
+ public void onDestroy() {
+ Log.d(TAG, "onDestroy: called.");
+ super.onDestroy();
+ if(mGLView != null){
+ mGLView.reset();
+ releasePlayers();
+ }
+ }
+
+ private void releasePlayers() {
+ mGLView.mRenderer.releasePlayers();
+ }
+
+
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/java/tabian/com/instagramclone2/videocompressor/Config.java b/app/src/main/java/tabian/com/instagramclone2/videocompressor/Config.java
new file mode 100644
index 0000000..fa1682e
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/videocompressor/Config.java
@@ -0,0 +1,26 @@
+package tabian.com.instagramclone2.videocompressor;
+
+/*
+* By Jorge E. Hernandez (@lalongooo) 2015
+* */
+
+/*
+* Static class to define general configuration values of the application
+* */
+public class Config {
+
+ /**
+ * Application root directory. All media files wi'll be stored here.
+ */
+ public static final String VIDEO_COMPRESSOR_APPLICATION_DIR_NAME = "Stories";
+
+ /**
+ * Application folder for video files
+ */
+ public static final String VIDEO_COMPRESSOR_COMPRESSED_VIDEOS_DIR = "";
+
+ /**
+ * Application folder for video files
+ */
+ public static final String VIDEO_COMPRESSOR_TEMP_DIR = "/Temp/";
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/videocompressor/VideoCompressorApplication.java b/app/src/main/java/tabian/com/instagramclone2/videocompressor/VideoCompressorApplication.java
new file mode 100644
index 0000000..702ac65
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/videocompressor/VideoCompressorApplication.java
@@ -0,0 +1,17 @@
+package tabian.com.instagramclone2.videocompressor;/*
+* By Jorge E. Hernandez (@lalongooo) 2015
+* */
+
+import android.app.Application;
+
+import tabian.com.instagramclone2.videocompressor.file.FileUtils;
+
+public class VideoCompressorApplication extends Application {
+
+ @Override
+ public void onCreate() {
+ super.onCreate();
+ FileUtils.createApplicationFolder();
+ }
+
+}
\ No newline at end of file
diff --git a/app/src/main/java/tabian/com/instagramclone2/videocompressor/file/FileUtils.java b/app/src/main/java/tabian/com/instagramclone2/videocompressor/file/FileUtils.java
new file mode 100644
index 0000000..b6e7c5e
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/videocompressor/file/FileUtils.java
@@ -0,0 +1,77 @@
+package tabian.com.instagramclone2.videocompressor.file;
+
+/*
+* By Jorge E. Hernandez (@lalongooo) 2015
+* */
+
+import android.content.ContentResolver;
+import android.content.Context;
+import android.net.Uri;
+import android.os.Environment;
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import tabian.com.instagramclone2.videocompressor.Config;
+
+
+public class FileUtils {
+
+ private static final String TAG = "FileUtils";
+
+ public static void createApplicationFolder() {
+ File f = new File(Environment.getExternalStorageDirectory(), File.separator + Config.VIDEO_COMPRESSOR_APPLICATION_DIR_NAME);
+ f.mkdirs();
+ f = new File(Environment.getExternalStorageDirectory(), File.separator + Config.VIDEO_COMPRESSOR_APPLICATION_DIR_NAME + Config.VIDEO_COMPRESSOR_COMPRESSED_VIDEOS_DIR);
+ f.mkdirs();
+ f = new File(Environment.getExternalStorageDirectory(), File.separator + Config.VIDEO_COMPRESSOR_APPLICATION_DIR_NAME + Config.VIDEO_COMPRESSOR_TEMP_DIR);
+ f.mkdirs();
+ }
+
+ public static File saveTempFile(String fileName, Context context, Uri uri) {
+
+ File mFile = null;
+ ContentResolver resolver = context.getContentResolver();
+ InputStream in = null;
+ FileOutputStream out = null;
+
+ try {
+ in = resolver.openInputStream(uri);
+
+ File tempFolder = new File(Environment.getExternalStorageDirectory().getPath() + File.separator + Config.VIDEO_COMPRESSOR_APPLICATION_DIR_NAME + Config.VIDEO_COMPRESSOR_TEMP_DIR);
+ if(!tempFolder.exists()){
+ tempFolder.mkdir();
+ }
+ mFile = new File(Environment.getExternalStorageDirectory().getPath() + File.separator + Config.VIDEO_COMPRESSOR_APPLICATION_DIR_NAME + Config.VIDEO_COMPRESSOR_TEMP_DIR, fileName);
+
+ out = new FileOutputStream(mFile, false);
+ byte[] buffer = new byte[1024];
+ int read;
+ while ((read = in.read(buffer)) != -1) {
+ out.write(buffer, 0, read);
+ }
+ out.flush();
+ } catch (IOException e) {
+ Log.e(TAG, "", e);
+ } finally {
+ if (in != null) {
+ try {
+ in.close();
+ } catch (IOException e) {
+ Log.e(TAG, "", e);
+ }
+ }
+ if (out != null) {
+ try {
+ out.close();
+ } catch (IOException e) {
+ Log.e(TAG, "", e);
+ }
+ }
+ }
+ return mFile;
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/videocompressor/video/InputSurface.java b/app/src/main/java/tabian/com/instagramclone2/videocompressor/video/InputSurface.java
new file mode 100644
index 0000000..0ff3ce9
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/videocompressor/video/InputSurface.java
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package tabian.com.instagramclone2.videocompressor.video;
+
+import android.annotation.TargetApi;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.os.Build;
+import android.view.Surface;
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class InputSurface {
+ private static final boolean VERBOSE = false;
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+ private static final int EGL_OPENGL_ES2_BIT = 4;
+ private EGLDisplay mEGLDisplay;
+ private EGLContext mEGLContext;
+ private EGLSurface mEGLSurface;
+ private Surface mSurface;
+
+ public InputSurface(Surface surface) {
+ if (surface == null) {
+ throw new NullPointerException();
+ }
+ mSurface = surface;
+ eglSetup();
+ }
+
+ private void eglSetup() {
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+ mEGLDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
+ }
+
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL14.EGL_NONE
+ };
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
+ }
+
+ int[] attrib_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, attrib_list, 0);
+ checkEglError("eglCreateContext");
+ if (mEGLContext == null) {
+ throw new RuntimeException("null context");
+ }
+
+ int[] surfaceAttribs = {
+ EGL14.EGL_NONE
+ };
+ mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
+ surfaceAttribs, 0);
+ checkEglError("eglCreateWindowSurface");
+ if (mEGLSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ }
+
+ public void release() {
+ if (EGL14.eglGetCurrentContext().equals(mEGLContext)) {
+ EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
+ }
+ EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ mSurface.release();
+ mEGLDisplay = null;
+ mEGLContext = null;
+ mEGLSurface = null;
+ mSurface = null;
+ }
+
+ public void makeCurrent() {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ public boolean swapBuffers() {
+ return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
+ }
+
+ public Surface getSurface() {
+ return mSurface;
+ }
+
+ public void setPresentationTime(long nsecs) {
+ EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
+ }
+
+ private void checkEglError(String msg) {
+ boolean failed = false;
+ int error;
+ while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ failed = true;
+ }
+ if (failed) {
+ throw new RuntimeException("EGL error encountered (see log)");
+ }
+ }
+}
diff --git a/app/src/main/java/tabian/com/instagramclone2/videocompressor/video/MP4Builder.java b/app/src/main/java/tabian/com/instagramclone2/videocompressor/video/MP4Builder.java
new file mode 100644
index 0000000..db9d810
--- /dev/null
+++ b/app/src/main/java/tabian/com/instagramclone2/videocompressor/video/MP4Builder.java
@@ -0,0 +1,445 @@
+/*
+ * This is the source code of Telegram for Android v. 1.7.x.
+ * It is licensed under GNU GPL v. 2 or later.
+ * You should have received a copy of the license in this archive (see LICENSE).
+ *
+ * Copyright Nikolai Kudashov, 2013-2014.
+ */
+
+package tabian.com.instagramclone2.videocompressor.video;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+
+import com.coremedia.iso.BoxParser;
+import com.coremedia.iso.IsoFile;
+import com.coremedia.iso.IsoTypeWriter;
+import com.coremedia.iso.boxes.Box;
+import com.coremedia.iso.boxes.Container;
+import com.coremedia.iso.boxes.DataEntryUrlBox;
+import com.coremedia.iso.boxes.DataInformationBox;
+import com.coremedia.iso.boxes.DataReferenceBox;
+import com.coremedia.iso.boxes.FileTypeBox;
+import com.coremedia.iso.boxes.HandlerBox;
+import com.coremedia.iso.boxes.MediaBox;
+import com.coremedia.iso.boxes.MediaHeaderBox;
+import com.coremedia.iso.boxes.MediaInformationBox;
+import com.coremedia.iso.boxes.MovieBox;
+import com.coremedia.iso.boxes.MovieHeaderBox;
+import com.coremedia.iso.boxes.SampleSizeBox;
+import com.coremedia.iso.boxes.SampleTableBox;
+import com.coremedia.iso.boxes.SampleToChunkBox;
+import com.coremedia.iso.boxes.StaticChunkOffsetBox;
+import com.coremedia.iso.boxes.SyncSampleBox;
+import com.coremedia.iso.boxes.TimeToSampleBox;
+import com.coremedia.iso.boxes.TrackBox;
+import com.coremedia.iso.boxes.TrackHeaderBox;
+import com.googlecode.mp4parser.DataSource;
+import com.googlecode.mp4parser.util.Matrix;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.channels.WritableByteChannel;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+
+@TargetApi(16)
+public class MP4Builder {
+
+ private InterleaveChunkMdat mdat = null;
+ private Mp4Movie currentMp4Movie = null;
+ private FileOutputStream fos = null;
+ private FileChannel fc = null;
+ private long dataOffset = 0;
+ private long writedSinceLastMdat = 0;
+ private boolean writeNewMdat = true;
+ private HashMap