Gradle dependencies
compile group: 'androidx.leanback', name: 'leanback', version: '1.2.0-alpha04'
- groupId: androidx.leanback
- artifactId: leanback
- version: 1.2.0-alpha04
Artifact androidx.leanback:leanback:1.2.0-alpha04 it located at Google repository (https://maven.google.com/)
Androidx artifact mapping:
androidx.leanback:leanback com.android.support:leanback-v17
Androidx class mapping:
androidx.leanback.app.SearchSupportFragment android.support.v17.leanback.app.SearchSupportFragment
Overview
A fragment to handle searches. An application will supply an implementation
of the SearchSupportFragment.SearchResultProvider interface to handle the search and return
an ObjectAdapter containing the results. The results are rendered
into a RowsSupportFragment, in the same way that they are in a BrowseSupportFragment.
A SpeechRecognizer object will be created for which your application will need to declare
android.permission.RECORD_AUDIO in AndroidManifest file. If app's target version is >= 23 and
the device version is >= 23, a permission dialog will show first time using speech recognition.
0 will be used as requestCode in requestPermissions() call.
SearchSupportFragment.setSpeechRecognitionCallback(SpeechRecognitionCallback) is deprecated.
Speech recognition is automatically started when fragment is created, but
not when fragment is restored from an instance state. Activity may manually
call SearchSupportFragment.startRecognition(), typically in onNewIntent().
Summary
Methods |
---|
public static Bundle | createArgs(Bundle args, java.lang.String query)
|
public static Bundle | createArgs(Bundle args, java.lang.String query, java.lang.String title)
|
public void | displayCompletions(CompletionInfo completions[])
Displays the completions shown by the IME. |
public void | displayCompletions(java.util.List<java.lang.String> completions)
Displays the completions shown by the IME. |
public Drawable | getBadgeDrawable()
Returns the badge drawable in the search bar. |
public Intent | getRecognizerIntent()
Returns an intent that can be used to request speech recognition. |
public RowsSupportFragment | getRowsSupportFragment()
Returns RowsSupportFragment that shows result rows. |
public java.lang.String | getTitle()
Returns the title set in the search bar. |
public static SearchSupportFragment | newInstance(java.lang.String query)
Creates a search fragment with a given search query. |
public void | onCreate(Bundle savedInstanceState)
Called to do initial creation of a fragment. |
public View | onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
Called to have the fragment instantiate its user interface view. |
public void | onDestroy()
Called when the fragment is no longer in use. |
public void | onDestroyView()
Called when the view previously created by Fragment.onCreateView(LayoutInflater, ViewGroup, Bundle) has
been detached from the fragment. |
public void | onPause()
Called when the Fragment is no longer resumed. |
public void | onRequestPermissionsResult(int requestCode, java.lang.String permissions[], int[] grantResults[])
Callback for the result from requesting permissions. |
public void | onResume()
Called when the fragment is visible to the user and actively running. |
public void | onStart()
Called when the Fragment is visible to the user. |
public void | setBadgeDrawable(Drawable drawable)
Sets the badge drawable that will be shown inside the search bar next to
the title. |
public void | setOnItemViewClickedListener(OnItemViewClickedListener listener)
Sets an item clicked listener for the results. |
public void | setOnItemViewSelectedListener(OnItemViewSelectedListener listener)
Sets an item selection listener for the results. |
public void | setSearchAffordanceColors(SearchOrbView.Colors colors)
Sets background color of not-listening state search orb. |
public void | setSearchAffordanceColorsInListening(SearchOrbView.Colors colors)
Sets background color of listening state search orb. |
public void | setSearchQuery(Intent intent, boolean submit)
Sets the text of the search query based on the in
the given intent, and optionally submit the query. |
public void | setSearchQuery(java.lang.String query, boolean submit)
Sets the text of the search query and optionally submits the query. |
public void | setSearchResultProvider(SearchSupportFragment.SearchResultProvider searchResultProvider)
Sets the search provider that is responsible for returning results for the
search query. |
public void | setSpeechRecognitionCallback(SpeechRecognitionCallback callback)
Sets this callback to have the fragment pass speech recognition requests
to the activity rather than using a SpeechRecognizer object. |
public void | setTitle(java.lang.String title)
Sets the title string to be be shown in an empty search bar. |
public void | startRecognition()
Starts speech recognition. |
from Fragment | dump, equals, getActivity, getAllowEnterTransitionOverlap, getAllowReturnTransitionOverlap, getArguments, getChildFragmentManager, getContext, getDefaultViewModelCreationExtras, getDefaultViewModelProviderFactory, getEnterTransition, getExitTransition, getFragmentManager, getHost, getId, getLayoutInflater, getLayoutInflater, getLifecycle, getLoaderManager, getParentFragment, getParentFragmentManager, getReenterTransition, getResources, getRetainInstance, getReturnTransition, getSavedStateRegistry, getSharedElementEnterTransition, getSharedElementReturnTransition, getString, getString, getTag, getTargetFragment, getTargetRequestCode, getText, getUserVisibleHint, getView, getViewLifecycleOwner, getViewLifecycleOwnerLiveData, getViewModelStore, hashCode, hasOptionsMenu, instantiate, instantiate, isAdded, isDetached, isHidden, isInLayout, isMenuVisible, isRemoving, isResumed, isStateSaved, isVisible, onActivityCreated, onActivityResult, onAttach, onAttach, onAttachFragment, onConfigurationChanged, onContextItemSelected, onCreateAnimation, onCreateAnimator, onCreateContextMenu, onCreateOptionsMenu, onDestroyOptionsMenu, onDetach, onGetLayoutInflater, onHiddenChanged, onInflate, onInflate, onLowMemory, onMultiWindowModeChanged, onOptionsItemSelected, onOptionsMenuClosed, onPictureInPictureModeChanged, onPrepareOptionsMenu, onPrimaryNavigationFragmentChanged, onSaveInstanceState, onStop, onViewCreated, onViewStateRestored, postponeEnterTransition, postponeEnterTransition, registerForActivityResult, registerForActivityResult, registerForContextMenu, requestPermissions, requireActivity, requireArguments, requireContext, requireFragmentManager, requireHost, requireParentFragment, requireView, setAllowEnterTransitionOverlap, setAllowReturnTransitionOverlap, setArguments, setEnterSharedElementCallback, setEnterTransition, setExitSharedElementCallback, setExitTransition, setHasOptionsMenu, setInitialSavedState, setMenuVisibility, setReenterTransition, setRetainInstance, setReturnTransition, setSharedElementEnterTransition, setSharedElementReturnTransition, setTargetFragment, setUserVisibleHint, shouldShowRequestPermissionRationale, startActivity, startActivity, startActivityForResult, startActivityForResult, startIntentSenderForResult, startPostponedEnterTransition, toString, unregisterForContextMenu |
from java.lang.Object | clone, finalize, getClass, notify, notifyAll, wait, wait, wait |
Constructors
public
SearchSupportFragment()
Methods
public void
onRequestPermissionsResult(int requestCode, java.lang.String permissions[], int[] grantResults[])
Deprecated: This method has been deprecated in favor of using the Activity Result API
which brings increased type safety via an ActivityResultContract and the prebuilt
contracts for common intents available in
ActivityResultContracts, provides hooks for
testing, and allow receiving results in separate, testable classes independent from your
fragment. Use
Fragment.registerForActivityResult(ActivityResultContract, ActivityResultCallback) passing
in a ActivityResultContracts.RequestMultiplePermissions object for the ActivityResultContract and
handling the result in the callback.
Callback for the result from requesting permissions. This method
is invoked for every call on Fragment.requestPermissions(String[], int).
Note: It is possible that the permissions request interaction
with the user is interrupted. In this case you will receive empty permissions
and results arrays which should be treated as a cancellation.
Parameters:
requestCode: The request code passed in Fragment.requestPermissions(String[], int).
permissions: The requested permissions. Never null.
grantResults: The grant results for the corresponding permissions
which is either android.content.pm.PackageManager
or android.content.pm.PackageManager
. Never null.
See also: Fragment.requestPermissions(String[], int)
public static Bundle
createArgs(Bundle args, java.lang.String query)
Parameters:
args: Bundle to use for the arguments, if null a new Bundle will be created.
public static Bundle
createArgs(Bundle args, java.lang.String query, java.lang.String title)
Creates a search fragment with a given search query.
You should only use this if you need to start the search fragment with a
pre-filled query.
Parameters:
query: The search query to begin with.
Returns:
A new SearchSupportFragment.
public void
onCreate(Bundle savedInstanceState)
Called to do initial creation of a fragment. This is called after
Fragment.onAttach(Activity) and before
Fragment.onCreateView(LayoutInflater, ViewGroup, Bundle).
Note that this can be called while the fragment's activity is
still in the process of being created. As such, you can not rely
on things like the activity's content view hierarchy being initialized
at this point. If you want to do work once the activity itself is
created, add a LifecycleObserver on the
activity's Lifecycle, removing it when it receives the
callback.
Any restored child fragments will be created before the base
Fragment.onCreate
method returns.
Parameters:
savedInstanceState: If the fragment is being re-created from
a previous saved state, this is the state.
public View
onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState)
Called to have the fragment instantiate its user interface view.
This is optional, and non-graphical fragments can return null. This will be called between
Fragment.onCreate(Bundle) and Fragment.onViewCreated(View, Bundle).
A default View can be returned by calling Fragment.Fragment(int) in your
constructor. Otherwise, this method returns null.
It is recommended to only inflate the layout in this method and move
logic that operates on the returned View to Fragment.onViewCreated(View, Bundle).
If you return a View from here, you will later be called in
Fragment.onDestroyView() when the view is being released.
Parameters:
inflater: The LayoutInflater object that can be used to inflate
any views in the fragment,
container: If non-null, this is the parent view that the fragment's
UI should be attached to. The fragment should not add the view itself,
but this can be used to generate the LayoutParams of the view.
savedInstanceState: If non-null, this fragment is being re-constructed
from a previous saved state as given here.
Returns:
Return the View for the fragment's UI, or null.
Called when the Fragment is visible to the user. This is generally
tied to of the containing
Activity's lifecycle.
Called when the fragment is visible to the user and actively running.
This is generally
tied to of the containing
Activity's lifecycle.
Called when the Fragment is no longer resumed. This is generally
tied to of the containing
Activity's lifecycle.
public void
onDestroyView()
Called when the view previously created by Fragment.onCreateView(LayoutInflater, ViewGroup, Bundle) has
been detached from the fragment. The next time the fragment needs
to be displayed, a new view will be created. This is called
after Fragment.onStop() and before Fragment.onDestroy(). It is called
regardless of whether Fragment.onCreateView(LayoutInflater, ViewGroup, Bundle) returned a
non-null view. Internally it is called after the view's state has
been saved but before it has been removed from its parent.
Called when the fragment is no longer in use. This is called
after Fragment.onStop() and before Fragment.onDetach().
Returns RowsSupportFragment that shows result rows. RowsSupportFragment is initialized after
SearchSupportFragment.onCreateView().
Returns:
RowsSupportFragment that shows result rows.
public void
startRecognition()
Starts speech recognition. Typical use case is that
activity receives onNewIntent() call when user clicks a MIC button.
Note that SearchSupportFragment automatically starts speech recognition
at first time created, there is no need to call startRecognition()
when fragment is created.
Sets the search provider that is responsible for returning results for the
search query.
Sets an item selection listener for the results.
Parameters:
listener: The item selection listener to be invoked when an item in
the search results is selected.
Sets an item clicked listener for the results.
Parameters:
listener: The item clicked listener to be invoked when an item in
the search results is clicked.
public void
setTitle(java.lang.String title)
Sets the title string to be be shown in an empty search bar. The title
may be placed in a call-to-action, such as "Search title" or
"Speak to search title".
public java.lang.String
getTitle()
Returns the title set in the search bar.
public void
setBadgeDrawable(Drawable drawable)
Sets the badge drawable that will be shown inside the search bar next to
the title.
public Drawable
getBadgeDrawable()
Returns the badge drawable in the search bar.
Sets background color of not-listening state search orb.
Parameters:
colors: SearchOrbView.Colors.
Sets background color of listening state search orb.
Parameters:
colors: SearchOrbView.Colors.
public void
displayCompletions(java.util.List<java.lang.String> completions)
Displays the completions shown by the IME. An application may provide
a list of query completions that the system will show in the IME.
Parameters:
completions: A list of completions to show in the IME. Setting to
null or empty will clear the list.
public void
displayCompletions(CompletionInfo completions[])
Displays the completions shown by the IME. An application may provide
a list of query completions that the system will show in the IME.
Parameters:
completions: A list of completions to show in the IME. Setting to
null or empty will clear the list.
Deprecated: Launching voice recognition activity is no longer supported. App should declare
android.permission.RECORD_AUDIO in AndroidManifest file.
Sets this callback to have the fragment pass speech recognition requests
to the activity rather than using a SpeechRecognizer object.
public void
setSearchQuery(java.lang.String query, boolean submit)
Sets the text of the search query and optionally submits the query. Either
onQueryTextChange or
onQueryTextSubmit will be
called on the provider if it is set.
Parameters:
query: The search query to set.
submit: Whether to submit the query.
public void
setSearchQuery(Intent intent, boolean submit)
Sets the text of the search query based on the in
the given intent, and optionally submit the query. If more than one result is present
in the results list, the first will be used.
Parameters:
intent: Intent received from a speech recognition service.
submit: Whether to submit the query.
public Intent
getRecognizerIntent()
Returns an intent that can be used to request speech recognition.
Built from the base plus
extras:
- set to
- set to true
- set to the search bar hint text
For handling the intent returned from the service, see
SearchSupportFragment.setSearchQuery(Intent, boolean).
Source
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package androidx.leanback.app;
import static android.content.pm.PackageManager.PERMISSION_GRANTED;
import android.Manifest;
import android.content.Intent;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Handler;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.CompletionInfo;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.leanback.R;
import androidx.leanback.widget.BrowseFrameLayout;
import androidx.leanback.widget.ObjectAdapter;
import androidx.leanback.widget.ObjectAdapter.DataObserver;
import androidx.leanback.widget.OnItemViewClickedListener;
import androidx.leanback.widget.OnItemViewSelectedListener;
import androidx.leanback.widget.Presenter.ViewHolder;
import androidx.leanback.widget.Row;
import androidx.leanback.widget.RowPresenter;
import androidx.leanback.widget.SearchBar;
import androidx.leanback.widget.SearchOrbView;
import androidx.leanback.widget.SpeechRecognitionCallback;
import androidx.leanback.widget.VerticalGridView;
import java.util.ArrayList;
import java.util.List;
/**
* A fragment to handle searches. An application will supply an implementation
* of the {@link SearchResultProvider} interface to handle the search and return
* an {@link ObjectAdapter} containing the results. The results are rendered
* into a {@link RowsSupportFragment}, in the same way that they are in a {@link
* BrowseSupportFragment}.
*
* <p>A SpeechRecognizer object will be created for which your application will need to declare
* android.permission.RECORD_AUDIO in AndroidManifest file. If app's target version is >= 23 and
* the device version is >= 23, a permission dialog will show first time using speech recognition.
* 0 will be used as requestCode in requestPermissions() call.
* {@link #setSpeechRecognitionCallback(SpeechRecognitionCallback)} is deprecated.
* </p>
* <p>
* Speech recognition is automatically started when fragment is created, but
* not when fragment is restored from an instance state. Activity may manually
* call {@link #startRecognition()}, typically in onNewIntent().
* </p>
*/
public class SearchSupportFragment extends Fragment {
static final String TAG = SearchSupportFragment.class.getSimpleName();
static final boolean DEBUG = false;
private static final String EXTRA_LEANBACK_BADGE_PRESENT = "LEANBACK_BADGE_PRESENT";
private static final String ARG_PREFIX = SearchSupportFragment.class.getCanonicalName();
private static final String ARG_QUERY = ARG_PREFIX + ".query";
private static final String ARG_TITLE = ARG_PREFIX + ".title";
static final long SPEECH_RECOGNITION_DELAY_MS = 300;
static final int RESULTS_CHANGED = 0x1;
static final int QUERY_COMPLETE = 0x2;
static final int AUDIO_PERMISSION_REQUEST_CODE = 0;
/**
* Search API to be provided by the application.
*/
public static interface SearchResultProvider {
/**
* <p>Method invoked some time prior to the first call to onQueryTextChange to retrieve
* an ObjectAdapter that will contain the results to future updates of the search query.</p>
*
* <p>As results are retrieved, the application should use the data set notification methods
* on the ObjectAdapter to instruct the SearchSupportFragment to update the results.</p>
*
* @return ObjectAdapter The result object adapter.
*/
public ObjectAdapter getResultsAdapter();
/**
* <p>Method invoked when the search query is updated.</p>
*
* <p>This is called as soon as the query changes; it is up to the application to add a
* delay before actually executing the queries if needed.
*
* <p>This method might not always be called before onQueryTextSubmit gets called, in
* particular for voice input.
*
* @param newQuery The current search query.
* @return whether the results changed as a result of the new query.
*/
public boolean onQueryTextChange(String newQuery);
/**
* Method invoked when the search query is submitted, either by dismissing the keyboard,
* pressing search or next on the keyboard or when voice has detected the end of the query.
*
* @param query The query entered.
* @return whether the results changed as a result of the query.
*/
public boolean onQueryTextSubmit(String query);
}
final DataObserver mAdapterObserver = new DataObserver() {
@Override
public void onChanged() {
// onChanged() may be called multiple times e.g. the provider add
// rows to ArrayObjectAdapter one by one.
mHandler.removeCallbacks(mResultsChangedCallback);
mHandler.post(mResultsChangedCallback);
}
};
final Handler mHandler = new Handler();
final Runnable mResultsChangedCallback = new Runnable() {
@Override
public void run() {
if (DEBUG) Log.v(TAG, "results changed, new size " + mResultAdapter.size());
if (mRowsSupportFragment != null
&& mRowsSupportFragment.getAdapter() != mResultAdapter) {
if (!(mRowsSupportFragment.getAdapter() == null && mResultAdapter.size() == 0)) {
mRowsSupportFragment.setAdapter(mResultAdapter);
mRowsSupportFragment.setSelectedPosition(0);
}
}
updateSearchBarVisibility();
mStatus |= RESULTS_CHANGED;
if ((mStatus & QUERY_COMPLETE) != 0) {
updateFocus();
}
}
};
/**
* Runs when a new provider is set AND when the fragment view is created.
*/
private final Runnable mSetSearchResultProvider = new Runnable() {
@Override
public void run() {
if (mRowsSupportFragment == null) {
// We'll retry once we have a rows fragment
return;
}
// Retrieve the result adapter
ObjectAdapter adapter = mProvider.getResultsAdapter();
if (DEBUG) Log.v(TAG, "Got results adapter " + adapter);
if (adapter != mResultAdapter) {
boolean firstTime = mResultAdapter == null;
releaseAdapter();
mResultAdapter = adapter;
if (mResultAdapter != null) {
mResultAdapter.registerObserver(mAdapterObserver);
}
if (DEBUG) {
Log.v(TAG, "mResultAdapter " + mResultAdapter + " size "
+ (mResultAdapter == null ? 0 : mResultAdapter.size()));
}
// delay the first time to avoid setting a empty result adapter
// until we got first onChange() from the provider
if (!(firstTime && (mResultAdapter == null || mResultAdapter.size() == 0))) {
mRowsSupportFragment.setAdapter(mResultAdapter);
}
executePendingQuery();
}
if (DEBUG) {
Log.v(TAG, "mAutoStartRecognition " + mAutoStartRecognition
+ " mResultAdapter " + mResultAdapter
+ " adapter " + mRowsSupportFragment.getAdapter());
}
if (mAutoStartRecognition) {
mHandler.removeCallbacks(mStartRecognitionRunnable);
mHandler.postDelayed(mStartRecognitionRunnable, SPEECH_RECOGNITION_DELAY_MS);
} else {
updateFocus();
}
}
};
final Runnable mStartRecognitionRunnable = new Runnable() {
@Override
public void run() {
mAutoStartRecognition = false;
mSearchBar.startRecognition();
}
};
RowsSupportFragment mRowsSupportFragment;
SearchBar mSearchBar;
SearchResultProvider mProvider;
String mPendingQuery = null;
OnItemViewSelectedListener mOnItemViewSelectedListener;
private OnItemViewClickedListener mOnItemViewClickedListener;
ObjectAdapter mResultAdapter;
private SpeechRecognitionCallback mSpeechRecognitionCallback;
private String mTitle;
private Drawable mBadgeDrawable;
private ExternalQuery mExternalQuery;
private SpeechRecognizer mSpeechRecognizer;
int mStatus;
boolean mAutoStartRecognition = true;
private boolean mIsPaused;
private boolean mPendingStartRecognitionWhenPaused;
private SearchBar.SearchBarPermissionListener mPermissionListener =
new SearchBar.SearchBarPermissionListener() {
@Override
public void requestAudioPermission() {
requestPermissions(new String[]{Manifest.permission.RECORD_AUDIO},
AUDIO_PERMISSION_REQUEST_CODE);
}
};
boolean mSpeechRecognizerEnabled;
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions,
int[] grantResults) {
if (requestCode == AUDIO_PERMISSION_REQUEST_CODE && permissions.length > 0) {
if (permissions[0].equals(Manifest.permission.RECORD_AUDIO)
&& grantResults[0] == PERMISSION_GRANTED) {
startRecognition();
}
}
}
/**
* @param args Bundle to use for the arguments, if null a new Bundle will be created.
*/
public static Bundle createArgs(Bundle args, String query) {
return createArgs(args, query, null);
}
public static Bundle createArgs(Bundle args, String query, String title) {
if (args == null) {
args = new Bundle();
}
args.putString(ARG_QUERY, query);
args.putString(ARG_TITLE, title);
return args;
}
/**
* Creates a search fragment with a given search query.
*
* <p>You should only use this if you need to start the search fragment with a
* pre-filled query.
*
* @param query The search query to begin with.
* @return A new SearchSupportFragment.
*/
public static SearchSupportFragment newInstance(String query) {
SearchSupportFragment fragment = new SearchSupportFragment();
Bundle args = createArgs(null, query);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
if (mAutoStartRecognition) {
mAutoStartRecognition = savedInstanceState == null;
}
super.onCreate(savedInstanceState);
}
@Override
@Nullable
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container,
@Nullable Bundle savedInstanceState) {
View root = inflater.inflate(R.layout.lb_search_fragment, container, false);
BrowseFrameLayout searchFrame = root.findViewById(R.id.lb_search_frame);
mSearchBar = (SearchBar) searchFrame.findViewById(R.id.lb_search_bar);
mSearchBar.setSearchBarListener(new SearchBar.SearchBarListener() {
@Override
public void onSearchQueryChange(String query) {
if (DEBUG) Log.v(TAG, String.format("onSearchQueryChange %s %s", query,
null == mProvider ? "(null)" : mProvider));
if (null != mProvider) {
retrieveResults(query);
} else {
mPendingQuery = query;
}
}
@Override
public void onSearchQuerySubmit(String query) {
if (DEBUG) Log.v(TAG, String.format("onSearchQuerySubmit %s", query));
submitQuery(query);
}
@Override
public void onKeyboardDismiss(String query) {
if (DEBUG) Log.v(TAG, String.format("onKeyboardDismiss %s", query));
queryComplete();
}
});
mSearchBar.setSpeechRecognitionCallback(mSpeechRecognitionCallback);
mSearchBar.setPermissionListener(mPermissionListener);
applyExternalQuery();
readArguments(getArguments());
if (null != mBadgeDrawable) {
setBadgeDrawable(mBadgeDrawable);
}
if (null != mTitle) {
setTitle(mTitle);
}
// Inject the RowsSupportFragment in the results container
if (getChildFragmentManager().findFragmentById(R.id.lb_results_frame) == null) {
mRowsSupportFragment = new RowsSupportFragment();
getChildFragmentManager().beginTransaction()
.replace(R.id.lb_results_frame, mRowsSupportFragment).commit();
} else {
mRowsSupportFragment = (RowsSupportFragment) getChildFragmentManager()
.findFragmentById(R.id.lb_results_frame);
}
mRowsSupportFragment.setOnItemViewSelectedListener(new OnItemViewSelectedListener() {
@Override
public void onItemSelected(ViewHolder itemViewHolder, Object item,
RowPresenter.ViewHolder rowViewHolder, Row row) {
if (DEBUG) {
int position = mRowsSupportFragment.getSelectedPosition();
Log.v(TAG, String.format("onItemSelected %d", position));
}
updateSearchBarVisibility();
if (null != mOnItemViewSelectedListener) {
mOnItemViewSelectedListener.onItemSelected(itemViewHolder, item,
rowViewHolder, row);
}
}
});
mRowsSupportFragment.setOnItemViewClickedListener(mOnItemViewClickedListener);
mRowsSupportFragment.setExpand(true);
if (null != mProvider) {
onSetSearchResultProvider();
}
// See b/26894680, use a custom focus search listener to support navigate up/down.
searchFrame.setOnFocusSearchListener(new BrowseFrameLayout.OnFocusSearchListener() {
@Override
public View onFocusSearch(View focused, int direction) {
if (mRowsSupportFragment != null && mRowsSupportFragment.getView() != null
&& mRowsSupportFragment.getView().hasFocus()) {
if (direction == View.FOCUS_UP) {
if (mSpeechRecognizerEnabled) {
return mSearchBar.findViewById(R.id.lb_search_bar_speech_orb);
} else {
return mSearchBar;
}
}
} else if (mSearchBar.hasFocus() && direction == View.FOCUS_DOWN) {
if (mRowsSupportFragment.getView() != null
&& mResultAdapter != null && mResultAdapter.size() > 0) {
return mRowsSupportFragment.getView();
}
}
return null;
}
});
if (!isSpeechRecognizerAvailable()) {
if (mSearchBar.hasFocus()) {
mSearchBar.findViewById(R.id.lb_search_text_editor).requestFocus();
}
mSearchBar.findViewById(R.id.lb_search_bar_speech_orb).setFocusable(false);
} else {
mSpeechRecognizerEnabled = true;
}
return root;
}
@Override
public void onStart() {
super.onStart();
VerticalGridView list = mRowsSupportFragment.getVerticalGridView();
int mContainerListAlignTop =
getResources().getDimensionPixelSize(R.dimen.lb_search_browse_rows_align_top);
list.setItemAlignmentOffset(0);
list.setItemAlignmentOffsetPercent(VerticalGridView.ITEM_ALIGN_OFFSET_PERCENT_DISABLED);
list.setWindowAlignmentOffset(mContainerListAlignTop);
list.setWindowAlignmentOffsetPercent(VerticalGridView.WINDOW_ALIGN_OFFSET_PERCENT_DISABLED);
list.setWindowAlignment(VerticalGridView.WINDOW_ALIGN_NO_EDGE);
}
@Override
public void onResume() {
super.onResume();
mIsPaused = false;
if (mSpeechRecognitionCallback == null && null == mSpeechRecognizer
&& mSpeechRecognizerEnabled) {
mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(getContext());
mSearchBar.setSpeechRecognizer(mSpeechRecognizer);
}
if (mPendingStartRecognitionWhenPaused) {
mPendingStartRecognitionWhenPaused = false;
mSearchBar.startRecognition();
} else {
// Ensure search bar state consistency when using external recognizer
mSearchBar.stopRecognition();
}
}
@Override
public void onPause() {
releaseRecognizer();
mIsPaused = true;
super.onPause();
}
@Override
public void onDestroyView() {
mSearchBar = null;
mRowsSupportFragment = null;
super.onDestroyView();
}
@Override
public void onDestroy() {
releaseAdapter();
super.onDestroy();
}
/**
* Returns RowsSupportFragment that shows result rows. RowsSupportFragment is initialized after
* SearchSupportFragment.onCreateView().
*
* @return RowsSupportFragment that shows result rows.
*/
public RowsSupportFragment getRowsSupportFragment() {
return mRowsSupportFragment;
}
private void releaseRecognizer() {
if (null != mSpeechRecognizer) {
mSearchBar.setSpeechRecognizer(null);
mSpeechRecognizer.destroy();
mSpeechRecognizer = null;
}
}
/**
* Starts speech recognition. Typical use case is that
* activity receives onNewIntent() call when user clicks a MIC button.
* Note that SearchSupportFragment automatically starts speech recognition
* at first time created, there is no need to call startRecognition()
* when fragment is created.
*/
public void startRecognition() {
if (mIsPaused) {
mPendingStartRecognitionWhenPaused = true;
} else {
mSearchBar.startRecognition();
}
}
/**
* Sets the search provider that is responsible for returning results for the
* search query.
*/
public void setSearchResultProvider(SearchResultProvider searchResultProvider) {
if (mProvider != searchResultProvider) {
mProvider = searchResultProvider;
onSetSearchResultProvider();
}
}
/**
* Sets an item selection listener for the results.
*
* @param listener The item selection listener to be invoked when an item in
* the search results is selected.
*/
public void setOnItemViewSelectedListener(OnItemViewSelectedListener listener) {
mOnItemViewSelectedListener = listener;
}
/**
* Sets an item clicked listener for the results.
*
* @param listener The item clicked listener to be invoked when an item in
* the search results is clicked.
*/
public void setOnItemViewClickedListener(OnItemViewClickedListener listener) {
if (listener != mOnItemViewClickedListener) {
mOnItemViewClickedListener = listener;
if (mRowsSupportFragment != null) {
mRowsSupportFragment.setOnItemViewClickedListener(mOnItemViewClickedListener);
}
}
}
/**
* Sets the title string to be be shown in an empty search bar. The title
* may be placed in a call-to-action, such as "Search <i>title</i>" or
* "Speak to search <i>title</i>".
*/
public void setTitle(String title) {
mTitle = title;
if (null != mSearchBar) {
mSearchBar.setTitle(title);
}
}
/**
* Returns the title set in the search bar.
*/
public String getTitle() {
if (null != mSearchBar) {
return mSearchBar.getTitle();
}
return null;
}
/**
* Sets the badge drawable that will be shown inside the search bar next to
* the title.
*/
public void setBadgeDrawable(Drawable drawable) {
mBadgeDrawable = drawable;
if (null != mSearchBar) {
mSearchBar.setBadgeDrawable(drawable);
}
}
/**
* Returns the badge drawable in the search bar.
*/
public Drawable getBadgeDrawable() {
if (null != mSearchBar) {
return mSearchBar.getBadgeDrawable();
}
return null;
}
/**
* Sets background color of not-listening state search orb.
*
* @param colors SearchOrbView.Colors.
*/
public void setSearchAffordanceColors(SearchOrbView.Colors colors) {
if (mSearchBar != null) {
mSearchBar.setSearchAffordanceColors(colors);
}
}
/**
* Sets background color of listening state search orb.
*
* @param colors SearchOrbView.Colors.
*/
public void setSearchAffordanceColorsInListening(SearchOrbView.Colors colors) {
if (mSearchBar != null) {
mSearchBar.setSearchAffordanceColorsInListening(colors);
}
}
/**
* Displays the completions shown by the IME. An application may provide
* a list of query completions that the system will show in the IME.
*
* @param completions A list of completions to show in the IME. Setting to
* null or empty will clear the list.
*/
public void displayCompletions(List<String> completions) {
mSearchBar.displayCompletions(completions);
}
/**
* Displays the completions shown by the IME. An application may provide
* a list of query completions that the system will show in the IME.
*
* @param completions A list of completions to show in the IME. Setting to
* null or empty will clear the list.
*/
public void displayCompletions(CompletionInfo[] completions) {
mSearchBar.displayCompletions(completions);
}
/**
* Sets this callback to have the fragment pass speech recognition requests
* to the activity rather than using a SpeechRecognizer object.
* @deprecated Launching voice recognition activity is no longer supported. App should declare
* android.permission.RECORD_AUDIO in AndroidManifest file.
*/
@Deprecated
public void setSpeechRecognitionCallback(SpeechRecognitionCallback callback) {
mSpeechRecognitionCallback = callback;
if (mSearchBar != null) {
mSearchBar.setSpeechRecognitionCallback(mSpeechRecognitionCallback);
}
if (callback != null) {
releaseRecognizer();
}
}
/**
* Sets the text of the search query and optionally submits the query. Either
* {@link SearchResultProvider#onQueryTextChange onQueryTextChange} or
* {@link SearchResultProvider#onQueryTextSubmit onQueryTextSubmit} will be
* called on the provider if it is set.
*
* @param query The search query to set.
* @param submit Whether to submit the query.
*/
public void setSearchQuery(String query, boolean submit) {
if (DEBUG) Log.v(TAG, "setSearchQuery " + query + " submit " + submit);
if (query == null) {
return;
}
mExternalQuery = new ExternalQuery(query, submit);
applyExternalQuery();
if (mAutoStartRecognition) {
mAutoStartRecognition = false;
mHandler.removeCallbacks(mStartRecognitionRunnable);
}
}
/**
* Sets the text of the search query based on the {@link RecognizerIntent#EXTRA_RESULTS} in
* the given intent, and optionally submit the query. If more than one result is present
* in the results list, the first will be used.
*
* @param intent Intent received from a speech recognition service.
* @param submit Whether to submit the query.
*/
public void setSearchQuery(Intent intent, boolean submit) {
ArrayList<String> matches = intent.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (matches != null && matches.size() > 0) {
setSearchQuery(matches.get(0), submit);
}
}
/**
* Returns an intent that can be used to request speech recognition.
* Built from the base {@link RecognizerIntent#ACTION_RECOGNIZE_SPEECH} plus
* extras:
*
* <ul>
* <li>{@link RecognizerIntent#EXTRA_LANGUAGE_MODEL} set to
* {@link RecognizerIntent#LANGUAGE_MODEL_FREE_FORM}</li>
* <li>{@link RecognizerIntent#EXTRA_PARTIAL_RESULTS} set to true</li>
* <li>{@link RecognizerIntent#EXTRA_PROMPT} set to the search bar hint text</li>
* </ul>
*
* For handling the intent returned from the service, see
* {@link #setSearchQuery(Intent, boolean)}.
*/
public Intent getRecognizerIntent() {
Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
if (mSearchBar != null && mSearchBar.getHint() != null) {
recognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, mSearchBar.getHint());
}
recognizerIntent.putExtra(EXTRA_LEANBACK_BADGE_PRESENT, mBadgeDrawable != null);
return recognizerIntent;
}
void retrieveResults(String searchQuery) {
if (DEBUG) Log.v(TAG, "retrieveResults " + searchQuery);
if (mProvider.onQueryTextChange(searchQuery)) {
mStatus &= ~QUERY_COMPLETE;
}
}
void submitQuery(String query) {
queryComplete();
if (null != mProvider) {
mProvider.onQueryTextSubmit(query);
}
}
void queryComplete() {
if (DEBUG) Log.v(TAG, "queryComplete");
mStatus |= QUERY_COMPLETE;
focusOnResults();
}
void updateSearchBarVisibility() {
int position = mRowsSupportFragment != null ? mRowsSupportFragment.getSelectedPosition() : -1;
mSearchBar.setVisibility(position <=0 || mResultAdapter == null
|| mResultAdapter.size() == 0 ? View.VISIBLE : View.GONE);
}
void updateFocus() {
if (mResultAdapter != null && mResultAdapter.size() > 0
&& mRowsSupportFragment != null && mRowsSupportFragment.getAdapter() == mResultAdapter) {
focusOnResults();
} else {
mSearchBar.requestFocus();
}
}
private void focusOnResults() {
if (mRowsSupportFragment == null || mRowsSupportFragment.getVerticalGridView() == null
|| mResultAdapter.size() == 0) {
return;
}
if (mRowsSupportFragment.getVerticalGridView().requestFocus()) {
mStatus &= ~RESULTS_CHANGED;
}
}
private void onSetSearchResultProvider() {
mHandler.removeCallbacks(mSetSearchResultProvider);
mHandler.post(mSetSearchResultProvider);
}
void releaseAdapter() {
if (mResultAdapter != null) {
mResultAdapter.unregisterObserver(mAdapterObserver);
mResultAdapter = null;
}
}
void executePendingQuery() {
if (null != mPendingQuery && null != mResultAdapter) {
String query = mPendingQuery;
mPendingQuery = null;
retrieveResults(query);
}
}
private void applyExternalQuery() {
if (mExternalQuery == null || mSearchBar == null) {
return;
}
mSearchBar.setSearchQuery(mExternalQuery.mQuery);
if (mExternalQuery.mSubmit) {
submitQuery(mExternalQuery.mQuery);
}
mExternalQuery = null;
}
private void readArguments(Bundle args) {
if (null == args) {
return;
}
if (args.containsKey(ARG_QUERY)) {
setSearchQuery(args.getString(ARG_QUERY));
}
if (args.containsKey(ARG_TITLE)) {
setTitle(args.getString(ARG_TITLE));
}
}
private void setSearchQuery(String query) {
mSearchBar.setSearchQuery(query);
}
boolean isSpeechRecognizerAvailable() {
return SpeechRecognizer.isRecognitionAvailable(getContext());
}
static class ExternalQuery {
String mQuery;
boolean mSubmit;
ExternalQuery(String query, boolean submit) {
mQuery = query;
mSubmit = submit;
}
}
}