1
0
Fork 0

Closes #4711 - Extract VoiceSearchActivity (#5502)

master
Tiger Oakes 2019-09-27 07:54:29 -07:00 committed by Sawyer Blatz
parent 3f41a4e9c6
commit 88aa519210
8 changed files with 239 additions and 102 deletions

View File

@ -123,6 +123,8 @@
android:resource="@mipmap/ic_launcher" />
</activity>
<activity android:name=".widget.VoiceSearchActivity" />
<activity
android:name=".browser.BrowserPerformanceTestActivity"
android:enabled="${isRaptorEnabled}"

View File

@ -7,17 +7,14 @@ package org.mozilla.fenix
import android.app.Activity
import android.content.Intent
import android.os.Bundle
import android.speech.RecognizerIntent
import androidx.annotation.VisibleForTesting
import kotlinx.coroutines.MainScope
import kotlinx.coroutines.launch
import mozilla.components.feature.intent.processing.TabIntentProcessor
import org.mozilla.fenix.components.metrics.Event
import org.mozilla.fenix.customtabs.AuthCustomTabActivity
import org.mozilla.fenix.customtabs.AuthCustomTabActivity.Companion.EXTRA_AUTH_CUSTOM_TAB
import org.mozilla.fenix.customtabs.ExternalAppBrowserActivity
import org.mozilla.fenix.ext.components
import org.mozilla.fenix.ext.metrics
import org.mozilla.fenix.ext.settings
import org.mozilla.fenix.home.intent.StartSearchIntentProcessor
@ -26,19 +23,10 @@ import org.mozilla.fenix.home.intent.StartSearchIntentProcessor
*/
class IntentReceiverActivity : Activity() {
// Holds the intent that initially started this activity
// so that it can persist through the speech activity.
private var previousIntent: Intent? = null
@VisibleForTesting
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
previousIntent = savedInstanceState?.get(PREVIOUS_INTENT) as Intent?
if (previousIntent?.getBooleanExtra(SPEECH_PROCESSING, false) == true) {
return
}
MainScope().launch {
// The intent property is nullable, but the rest of the code below
// assumes it is not. If it's null, then we make a new one and open
@ -58,17 +46,12 @@ class IntentReceiverActivity : Activity() {
val intentProcessors =
components.intentProcessors.externalAppIntentProcessors + tabIntentProcessor
if (intent.getBooleanExtra(SPEECH_PROCESSING, false)) {
previousIntent = intent
displaySpeechRecognizer()
} else {
intentProcessors.any { it.process(intent) }
setIntentActivity(intent, tabIntentProcessor)
intentProcessors.any { it.process(intent) }
setIntentActivity(intent, tabIntentProcessor)
startActivity(intent)
startActivity(intent)
finish()
}
finish()
}
/**
@ -121,42 +104,7 @@ class IntentReceiverActivity : Activity() {
intent.putExtra(HomeActivity.OPEN_TO_BROWSER, openToBrowser)
}
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
outState.putParcelable(PREVIOUS_INTENT, previousIntent)
}
private fun displaySpeechRecognizer() {
val intentSpeech = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply {
putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
}
this.metrics.track(Event.SearchWidgetVoiceSearchPressed)
startActivityForResult(intentSpeech, SPEECH_REQUEST_CODE)
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == SPEECH_REQUEST_CODE && resultCode == RESULT_OK) {
val spokenText: String? =
data?.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS)?.let { results ->
results[0]
}
previousIntent?.let {
it.putExtra(SPEECH_PROCESSING, spokenText)
it.putExtra(HomeActivity.OPEN_TO_BROWSER_AND_LOAD, true)
startActivity(it)
}
}
finish()
}
companion object {
const val SPEECH_REQUEST_CODE = 0
const val SPEECH_PROCESSING = "speech_processing"
const val PREVIOUS_INTENT = "previous_intent"
const val ACTION_OPEN_TAB = "org.mozilla.fenix.OPEN_TAB"
const val ACTION_OPEN_PRIVATE_TAB = "org.mozilla.fenix.OPEN_PRIVATE_TAB"
}

View File

@ -8,7 +8,7 @@ import android.content.Intent
import androidx.navigation.NavController
import org.mozilla.fenix.BrowserDirection
import org.mozilla.fenix.HomeActivity
import org.mozilla.fenix.IntentReceiverActivity
import org.mozilla.fenix.widget.VoiceSearchActivity.Companion.SPEECH_PROCESSING
/**
* The search widget has a microphone button to let users search with their voice.
@ -22,7 +22,7 @@ class SpeechProcessingIntentProcessor(
return if (intent.extras?.getBoolean(HomeActivity.OPEN_TO_BROWSER_AND_LOAD) == true) {
out.putExtra(HomeActivity.OPEN_TO_BROWSER_AND_LOAD, false)
activity.openToBrowserAndLoad(
searchTermOrURL = intent.getStringExtra(IntentReceiverActivity.SPEECH_PROCESSING).orEmpty(),
searchTermOrURL = intent.getStringExtra(SPEECH_PROCESSING).orEmpty(),
newTab = true,
from = BrowserDirection.FromGlobal,
forceSearch = true

View File

@ -10,20 +10,20 @@ import android.appwidget.AppWidgetManager.OPTION_APPWIDGET_MIN_WIDTH
import android.appwidget.AppWidgetProvider
import android.content.Context
import android.content.Intent
import android.os.Build
import android.os.Bundle
import android.speech.RecognizerIntent
import android.view.View
import android.widget.RemoteViews
import androidx.annotation.Dimension
import androidx.annotation.Dimension.DP
import org.mozilla.fenix.HomeActivity
import org.mozilla.fenix.IntentReceiverActivity
import org.mozilla.fenix.R
import org.mozilla.fenix.home.intent.StartSearchIntentProcessor
import org.mozilla.fenix.ext.settings
import android.os.Build
import androidx.appcompat.widget.AppCompatDrawableManager
import androidx.core.graphics.drawable.toBitmap
import org.mozilla.fenix.HomeActivity
import org.mozilla.fenix.R
import org.mozilla.fenix.ext.settings
import org.mozilla.fenix.home.intent.StartSearchIntentProcessor
import org.mozilla.fenix.widget.VoiceSearchActivity.Companion.SPEECH_PROCESSING
@Suppress("TooManyFunctions")
class SearchWidgetProvider : AppWidgetProvider() {
@ -109,17 +109,15 @@ class SearchWidgetProvider : AppWidgetProvider() {
}
private fun createVoiceSearchIntent(context: Context): PendingIntent? {
val voiceIntent = Intent(context, IntentReceiverActivity::class.java)
.let { intent ->
intent.flags = Intent.FLAG_ACTIVITY_NEW_TASK or Intent.FLAG_ACTIVITY_CLEAR_TASK
intent.putExtra(IntentReceiverActivity.SPEECH_PROCESSING, true)
}
val voiceIntent = Intent(context, VoiceSearchActivity::class.java).apply {
flags = Intent.FLAG_ACTIVITY_NEW_TASK or Intent.FLAG_ACTIVITY_CLEAR_TASK
putExtra(SPEECH_PROCESSING, true)
}
val intentSpeech = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH)
return intentSpeech.resolveActivity(context.packageManager)?.let {
PendingIntent.getActivity(context,
REQUEST_CODE_VOICE, voiceIntent, 0)
PendingIntent.getActivity(context, REQUEST_CODE_VOICE, voiceIntent, 0)
}
}

View File

@ -0,0 +1,100 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.fenix.widget
import android.content.ComponentName
import android.content.Intent
import android.os.Bundle
import android.speech.RecognizerIntent
import androidx.appcompat.app.AppCompatActivity
import org.mozilla.fenix.HomeActivity
import org.mozilla.fenix.IntentReceiverActivity
import org.mozilla.fenix.components.metrics.Event
import org.mozilla.fenix.ext.metrics
/**
* Launches voice recognition then uses it to start a new web search.
*/
class VoiceSearchActivity : AppCompatActivity() {
/**
* Holds the intent that initially started this activity
* so that it can persist through the speech activity.
*/
private var previousIntent: Intent? = null
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
outState.putParcelable(PREVIOUS_INTENT, previousIntent)
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// Retrieve the previous intent from the saved state
previousIntent = savedInstanceState?.get(PREVIOUS_INTENT) as Intent?
if (previousIntent.isForSpeechProcessing()) {
// Don't reopen the speech recognizer
return
}
// The intent property is nullable, but the rest of the code below assumes it is not.
val intent = intent?.let { Intent(intent) } ?: Intent()
if (intent.isForSpeechProcessing()) {
previousIntent = intent
displaySpeechRecognizer()
} else {
finish()
}
}
/**
* Displays a speech recognizer popup that listens for input from the user.
*/
private fun displaySpeechRecognizer() {
val intentSpeech = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply {
putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
}
metrics.track(Event.SearchWidgetVoiceSearchPressed)
startActivityForResult(intentSpeech, SPEECH_REQUEST_CODE)
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == SPEECH_REQUEST_CODE && resultCode == RESULT_OK) {
val spokenText = data?.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS)?.first()
val context = this
previousIntent?.apply {
component = ComponentName(context, IntentReceiverActivity::class.java)
putExtra(SPEECH_PROCESSING, spokenText)
putExtra(HomeActivity.OPEN_TO_BROWSER_AND_LOAD, true)
startActivity(this)
}
}
finish()
}
/**
* Returns true if the [SPEECH_PROCESSING] extra is present and set to true.
* Returns false if the intent is null.
*/
private fun Intent?.isForSpeechProcessing(): Boolean =
this?.getBooleanExtra(SPEECH_PROCESSING, false) == true
companion object {
internal const val SPEECH_REQUEST_CODE = 0
internal const val PREVIOUS_INTENT = "org.mozilla.fenix.previous_intent"
/**
* In [VoiceSearchActivity] activity, used to store if the speech processing should start.
* In [IntentReceiverActivity] activity, used to store the search terms.
*/
const val SPEECH_PROCESSING = "speech_processing"
}
}

View File

@ -5,26 +5,20 @@
package org.mozilla.fenix
import android.content.Intent
import android.speech.RecognizerIntent.ACTION_RECOGNIZE_SPEECH
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.ObsoleteCoroutinesApi
import kotlinx.coroutines.test.runBlockingTest
import mozilla.components.support.test.argumentCaptor
import mozilla.components.support.test.robolectric.testContext
import org.junit.Assert.assertEquals
import org.junit.Test
import org.junit.runner.RunWith
import org.mockito.Mockito.eq
import org.mockito.Mockito.never
import org.mockito.Mockito.spy
import org.mockito.Mockito.verify
import org.mockito.Mockito.`when`
import org.robolectric.annotation.Config
import org.mockito.Mockito.never
import org.mockito.Mockito.verify
import org.mozilla.fenix.ext.components
import org.mozilla.fenix.ext.settings
import org.robolectric.Robolectric
import org.robolectric.RobolectricTestRunner
import org.robolectric.annotation.Config
@ObsoleteCoroutinesApi
@ExperimentalCoroutinesApi
@ -66,24 +60,4 @@ class IntentReceiverActivityTest {
verify(testContext.components.intentProcessors.intentProcessor).process(intent)
}
}
@Test
fun `process intent with speech processing set to true`() {
runBlockingTest {
val intent = Intent()
intent.putExtra(IntentReceiverActivity.SPEECH_PROCESSING, true)
val activity = spy(Robolectric.buildActivity(IntentReceiverActivity::class.java, intent).get())
activity.processIntent(intent)
val speechIntent = argumentCaptor<Intent>()
// Not using mockk here because process is a suspend function
// and mockito makes this easier to read.
verify(testContext.components.intentProcessors.privateIntentProcessor, never()).process(intent)
verify(testContext.components.intentProcessors.intentProcessor, never()).process(intent)
verify(activity).startActivityForResult(speechIntent.capture(), eq(IntentReceiverActivity.SPEECH_REQUEST_CODE))
assertEquals(ACTION_RECOGNIZE_SPEECH, speechIntent.value.action)
}
}
}

View File

@ -14,8 +14,8 @@ import org.junit.Test
import org.junit.runner.RunWith
import org.mozilla.fenix.BrowserDirection
import org.mozilla.fenix.HomeActivity
import org.mozilla.fenix.IntentReceiverActivity
import org.mozilla.fenix.TestApplication
import org.mozilla.fenix.widget.VoiceSearchActivity.Companion.SPEECH_PROCESSING
import org.robolectric.RobolectricTestRunner
import org.robolectric.annotation.Config
@ -81,7 +81,7 @@ class SpeechProcessingIntentProcessorTest {
val activity: HomeActivity = mockk(relaxed = true)
val intent = Intent().apply {
putExtra(HomeActivity.OPEN_TO_BROWSER_AND_LOAD, true)
putExtra(IntentReceiverActivity.SPEECH_PROCESSING, "hello world")
putExtra(SPEECH_PROCESSING, "hello world")
}
val processor = SpeechProcessingIntentProcessor(activity)
processor.process(intent, mockk(), mockk(relaxed = true))

View File

@ -0,0 +1,115 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.fenix.widget
import android.app.Activity
import android.content.ComponentName
import android.content.Intent
import android.os.Bundle
import android.speech.RecognizerIntent.ACTION_RECOGNIZE_SPEECH
import android.speech.RecognizerIntent.EXTRA_LANGUAGE_MODEL
import android.speech.RecognizerIntent.EXTRA_RESULTS
import android.speech.RecognizerIntent.LANGUAGE_MODEL_FREE_FORM
import androidx.appcompat.app.AppCompatActivity.RESULT_OK
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.ObsoleteCoroutinesApi
import org.junit.Assert.assertEquals
import org.junit.Assert.assertFalse
import org.junit.Assert.assertNull
import org.junit.Assert.assertTrue
import org.junit.Before
import org.junit.Test
import org.junit.runner.RunWith
import org.mozilla.fenix.HomeActivity.Companion.OPEN_TO_BROWSER_AND_LOAD
import org.mozilla.fenix.IntentReceiverActivity
import org.mozilla.fenix.TestApplication
import org.mozilla.fenix.widget.VoiceSearchActivity.Companion.PREVIOUS_INTENT
import org.mozilla.fenix.widget.VoiceSearchActivity.Companion.SPEECH_PROCESSING
import org.mozilla.fenix.widget.VoiceSearchActivity.Companion.SPEECH_REQUEST_CODE
import org.robolectric.Robolectric
import org.robolectric.RobolectricTestRunner
import org.robolectric.Shadows
import org.robolectric.android.controller.ActivityController
import org.robolectric.annotation.Config
import org.robolectric.shadows.ShadowActivity
@ObsoleteCoroutinesApi
@ExperimentalCoroutinesApi
@RunWith(RobolectricTestRunner::class)
@Config(application = TestApplication::class)
class VoiceSearchActivityTest {
private lateinit var controller: ActivityController<VoiceSearchActivity>
private lateinit var activity: Activity
private lateinit var shadow: ShadowActivity
@Before
fun setup() {
val intent = Intent()
intent.putExtra(SPEECH_PROCESSING, true)
controller = Robolectric.buildActivity(VoiceSearchActivity::class.java, intent)
activity = controller.get()
shadow = Shadows.shadowOf(activity)
}
@Test
fun `process intent with speech processing set to true`() {
controller.create()
val intentForResult = shadow.peekNextStartedActivityForResult()
assertEquals(SPEECH_REQUEST_CODE, intentForResult.requestCode)
assertEquals(ACTION_RECOGNIZE_SPEECH, intentForResult.intent.action)
assertEquals(LANGUAGE_MODEL_FREE_FORM, intentForResult.intent.getStringExtra(EXTRA_LANGUAGE_MODEL))
}
@Test
fun `process intent with speech processing set to false`() {
val intent = Intent()
intent.putExtra(SPEECH_PROCESSING, false)
val controller = Robolectric.buildActivity(VoiceSearchActivity::class.java, intent)
val activity = controller.get()
controller.create()
assertTrue(activity.isFinishing)
}
@Test
fun `process intent with speech processing in previous intent set to true`() {
val savedInstanceState = Bundle()
val previousIntent = Intent().apply {
putExtra(SPEECH_PROCESSING, true)
}
savedInstanceState.putParcelable(PREVIOUS_INTENT, previousIntent)
controller.create(savedInstanceState)
assertFalse(activity.isFinishing)
assertNull(shadow.peekNextStartedActivityForResult())
}
@Test
fun `handle speech result`() {
controller.create()
val resultIntent = Intent().apply {
putStringArrayListExtra(EXTRA_RESULTS, arrayListOf("hello world"))
}
shadow.receiveResult(
shadow.peekNextStartedActivityForResult().intent,
RESULT_OK,
resultIntent
)
val browserIntent = shadow.peekNextStartedActivity()
assertTrue(activity.isFinishing)
assertEquals(ComponentName(activity, IntentReceiverActivity::class.java), browserIntent.component)
assertEquals("hello world", browserIntent.getStringExtra(SPEECH_PROCESSING))
assertTrue(browserIntent.getBooleanExtra(OPEN_TO_BROWSER_AND_LOAD, false))
}
}