Fixed many voice add bugs by decoupling the voice recognizer from the quickadd bar somewhat

pull/14/head 4.3.0
Sam Bosley 12 years ago
parent ca04c32112
commit 3f37ada030

@ -11,16 +11,21 @@ import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.speech.SpeechRecognizer;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.view.View;
import android.view.WindowManager.BadTokenException;
import android.widget.EditText;
import android.widget.Toast;
import com.timsu.astrid.R;
import com.todoroo.aacenc.RecognizerApi.RecognizerApiListener;
import com.todoroo.andlib.service.ContextManager;
import com.todoroo.andlib.utility.AndroidUtilities;
import com.todoroo.andlib.utility.DialogUtilities;
import com.todoroo.astrid.actfm.TagUpdatesFragment;
import com.todoroo.astrid.api.AstridApiConstants;
import com.todoroo.astrid.api.Filter;
@ -38,7 +43,9 @@ import com.todoroo.astrid.service.StatisticsConstants;
import com.todoroo.astrid.service.StatisticsService;
import com.todoroo.astrid.subtasks.SubtasksListFragment;
import com.todoroo.astrid.ui.DateChangedAlerts;
import com.todoroo.astrid.ui.QuickAddBar;
import com.todoroo.astrid.utility.AstridPreferences;
import com.todoroo.astrid.voice.VoiceRecognizer;
/**
* This wrapper activity contains all the glue-code to handle the callbacks between the different
@ -54,7 +61,8 @@ import com.todoroo.astrid.utility.AstridPreferences;
public class AstridActivity extends FragmentActivity
implements FilterListFragment.OnFilterItemClickedListener,
TaskListFragment.OnTaskListItemClickedListener,
TaskEditFragment.OnTaskEditDetailsClickedListener {
TaskEditFragment.OnTaskEditDetailsClickedListener,
RecognizerApiListener {
public static final int LAYOUT_SINGLE = 0;
public static final int LAYOUT_DOUBLE = 1;
@ -332,6 +340,48 @@ public class AstridActivity extends FragmentActivity
return fragment;
}
// Voice recognizer callbacks
@Override
public void onSpeechResult(String result) {
TaskListFragment tlf = getTaskListFragment();
if (tlf != null) {
EditText box = tlf.quickAddBar.getQuickAddBox();
if (box != null)
box.setText(result);
}
}
@Override
public void onSpeechError(int error) {
TaskListFragment tlf = getTaskListFragment();
if (tlf != null) {
QuickAddBar quickAdd = tlf.quickAddBar;
if (quickAdd != null) {
VoiceRecognizer vr = quickAdd.getVoiceRecognizer();
if (vr != null)
vr.cancel();
}
}
int errorStr = 0;
switch(error) {
case SpeechRecognizer.ERROR_NETWORK:
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
errorStr = R.string.speech_err_network;
break;
case SpeechRecognizer.ERROR_NO_MATCH:
Toast.makeText(this, R.string.speech_err_no_match, Toast.LENGTH_LONG).show();
break;
default:
errorStr = R.string.speech_err_default;
break;
}
if (errorStr > 0)
DialogUtilities.okDialog(this, getString(errorStr), null);
}
/**
* @return LAYOUT_SINGLE, LAYOUT_DOUBLE, or LAYOUT_TRIPLE
*/

@ -694,11 +694,10 @@ ViewPager.OnPageChangeListener, EditNoteActivity.UpdatesChangedListener {
R.id.voiceAddNoteButton);
voiceAddNoteButton.setVisibility(View.VISIBLE);
int prompt = R.string.voice_edit_note_prompt;
voiceNoteAssistant = new VoiceInputAssistant(TaskEditFragment.this,
voiceAddNoteButton, notesEditText, REQUEST_VOICE_RECOG);
voiceNoteAssistant = new VoiceInputAssistant(voiceAddNoteButton, REQUEST_VOICE_RECOG);
voiceNoteAssistant.setAppend(true);
voiceNoteAssistant.setLanguageModel(RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
voiceNoteAssistant.configureMicrophoneButton(prompt);
voiceNoteAssistant.configureMicrophoneButton(TaskEditFragment.this, prompt);
}
loadMoreContainer();
}
@ -1231,8 +1230,7 @@ ViewPager.OnPageChangeListener, EditNoteActivity.UpdatesChangedListener {
&& resultCode == Activity.RESULT_OK) {
// handle the result of voice recognition, put it into the
// appropiate textfield
voiceNoteAssistant.handleActivityResult(requestCode, resultCode,
data);
voiceNoteAssistant.handleActivityResult(requestCode, resultCode, data, notesEditText);
// write the voicenote into the model, or it will be deleted by
// onResume.populateFields

@ -526,7 +526,7 @@ public class TaskListFragment extends ListFragment implements OnScrollListener,
protected void setupQuickAddBar() {
quickAddBar = (QuickAddBar) getView().findViewById(R.id.taskListFooter);
quickAddBar.initialize(getActivity(), this, mListener);
quickAddBar.initialize((AstridActivity) getActivity(), this, mListener);
getListView().setOnTouchListener(new OnTouchListener() {
@Override

@ -14,7 +14,6 @@ import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.speech.SpeechRecognizer;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.KeyEvent;
@ -27,10 +26,8 @@ import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.TextView.OnEditorActionListener;
import android.widget.Toast;
import com.timsu.astrid.R;
import com.todoroo.aacenc.RecognizerApi.RecognizerApiListener;
import com.todoroo.andlib.service.Autowired;
import com.todoroo.andlib.service.ContextManager;
import com.todoroo.andlib.service.DependencyInjectionService;
@ -71,7 +68,7 @@ import com.todoroo.astrid.voice.VoiceRecognizer;
* @author Tim Su <tim@astrid.com>
*
*/
public class QuickAddBar extends LinearLayout implements RecognizerApiListener {
public class QuickAddBar extends LinearLayout {
private ImageButton voiceAddButton;
private ImageButton quickAddButton;
@ -97,7 +94,7 @@ public class QuickAddBar extends LinearLayout implements RecognizerApiListener {
private VoiceRecognizer voiceRecognizer;
private Activity activity;
private AstridActivity activity;
private TaskListFragment fragment;
public QuickAddBar(Context context, AttributeSet attrs, int defStyle) {
@ -112,7 +109,7 @@ public class QuickAddBar extends LinearLayout implements RecognizerApiListener {
super(context);
}
public void initialize(Activity myActivity, TaskListFragment myFragment,
public void initialize(AstridActivity myActivity, TaskListFragment myFragment,
final OnTaskListItemClickedListener mListener) {
activity = myActivity;
fragment = myFragment;
@ -417,7 +414,7 @@ public class QuickAddBar extends LinearLayout implements RecognizerApiListener {
public boolean onActivityResult(int requestCode, int resultCode, Intent data) {
// handle the result of voice recognition, put it into the textfield
if (voiceRecognizer.handleActivityResult(requestCode, resultCode, data)) {
if (voiceRecognizer.handleActivityResult(requestCode, resultCode, data, quickAddBox)) {
// if user wants, create the task directly (with defaultvalues)
// after saying it
Flags.set(Flags.TLA_RESUMED_FROM_VOICE_ADD);
@ -439,47 +436,26 @@ public class QuickAddBar extends LinearLayout implements RecognizerApiListener {
return false;
}
public VoiceRecognizer getVoiceRecognizer() {
return voiceRecognizer;
}
public void startVoiceRecognition() {
if (VoiceRecognizer.speechRecordingAvailable(activity) && currentVoiceFile == null) {
currentVoiceFile = Long.toString(DateUtilities.now());
}
voiceRecognizer.startVoiceRecognition(activity, currentVoiceFile);
voiceRecognizer.startVoiceRecognition(activity, fragment, currentVoiceFile);
}
public void setupRecognizerApi() {
voiceRecognizer = VoiceRecognizer.instantiateVoiceRecognizer(activity, this, fragment, voiceAddButton, quickAddBox);
voiceRecognizer = VoiceRecognizer.instantiateVoiceRecognizer(activity, activity, voiceAddButton);
}
public void destroyRecognizerApi() {
voiceRecognizer.destroyRecognizerApi();
}
@Override
public void onSpeechResult(String result) {
quickAddBox.setText(result);
}
@Override
public void onSpeechError(int error) {
voiceRecognizer.cancel();
int errorStr = 0;
switch(error) {
case SpeechRecognizer.ERROR_NETWORK:
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
errorStr = R.string.speech_err_network;
break;
case SpeechRecognizer.ERROR_NO_MATCH:
Toast.makeText(activity, R.string.speech_err_no_match, Toast.LENGTH_LONG).show();
break;
default:
errorStr = R.string.speech_err_default;
break;
}
if (errorStr > 0)
DialogUtilities.okDialog(activity, activity.getString(errorStr), null);
}
public void hideKeyboard() {
InputMethodManager imm = (InputMethodManager) activity.getSystemService(

@ -52,9 +52,7 @@ public class VoiceInputAssistant {
*/
private int requestCode = VOICE_RECOGNITION_REQUEST_CODE;
private Activity activity;
private final Fragment fragment;
private final ImageButton voiceButton;
private final EditText textField;
private boolean append = false;
private String languageModel = RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH;
@ -84,9 +82,7 @@ public class VoiceInputAssistant {
* a microphone-button themselves.
*/
public VoiceInputAssistant() {
this.fragment = null;
this.voiceButton = null;
this.textField = null;
}
/**
@ -96,9 +92,7 @@ public class VoiceInputAssistant {
*/
public VoiceInputAssistant(Activity activity) {
this.activity = activity;
this.fragment = null;
this.voiceButton = null;
this.textField = null;
}
/**
@ -109,13 +103,9 @@ public class VoiceInputAssistant {
* @param voiceButton the microphone-Button
* @param textField the textfield that should get the resulttext
*/
public VoiceInputAssistant(Fragment fragment, ImageButton voiceButton, EditText textField) {
Assert.assertNotNull("Each VoiceInputAssistant must be bound to a fragment!", fragment);
public VoiceInputAssistant(ImageButton voiceButton) {
Assert.assertNotNull("A VoiceInputAssistant without a voiceButton makes no sense!", voiceButton);
Assert.assertNotNull("You have to specify a textfield that is bound to this VoiceInputAssistant!!", textField);
this.fragment = fragment;
this.voiceButton = voiceButton;
this.textField = textField;
}
/**
@ -133,8 +123,8 @@ public class VoiceInputAssistant {
* @param requestCode has to be unique in a single fragment-context,
* dont use VOICE_RECOGNITION_REQUEST_CODE, this is reserved for the other constructor
*/
public VoiceInputAssistant(Fragment fragment, ImageButton voiceButton, EditText textField, int requestCode) {
this(fragment, voiceButton, textField);
public VoiceInputAssistant(ImageButton voiceButton, int requestCode) {
this(voiceButton);
if (requestCode == VOICE_RECOGNITION_REQUEST_CODE)
throw new InvalidParameterException("You have to specify a unique requestCode for this VoiceInputAssistant!");
this.requestCode = requestCode;
@ -148,14 +138,11 @@ public class VoiceInputAssistant {
* @param voiceButton the microphone-Button
* @param textField the textfield that should get the resulttext
*/
public VoiceInputAssistant(Activity activity, ImageButton voiceButton, EditText textField) {
public VoiceInputAssistant(Activity activity, ImageButton voiceButton) {
Assert.assertNotNull("Each VoiceInputAssistant must be bound to a activity!", activity);
Assert.assertNotNull("A VoiceInputAssistant without a voiceButton makes no sense!", voiceButton);
Assert.assertNotNull("You have to specify a textfield that is bound to this VoiceInputAssistant!!", textField);
this.activity = activity;
this.fragment = null;
this.voiceButton = voiceButton;
this.textField = textField;
}
/**
@ -173,8 +160,8 @@ public class VoiceInputAssistant {
* @param requestCode has to be unique in a single fragment-context,
* dont use VOICE_RECOGNITION_REQUEST_CODE, this is reserved for the other constructor
*/
public VoiceInputAssistant(Activity activity, ImageButton voiceButton, EditText textField, int requestCode) {
this(activity, voiceButton, textField);
public VoiceInputAssistant(Activity activity, ImageButton voiceButton, int requestCode) {
this(activity, voiceButton);
if (requestCode == VOICE_RECOGNITION_REQUEST_CODE)
throw new InvalidParameterException("You have to specify a unique requestCode for this VoiceInputAssistant!");
this.requestCode = requestCode;
@ -186,7 +173,7 @@ public class VoiceInputAssistant {
*
* @param prompt Specify the R.string.string_id resource for the prompt-text during voice-recognition here
*/
public void startVoiceRecognitionActivity(int prompt) {
public void startVoiceRecognitionActivity(Fragment fragment, int prompt) {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
@ -215,7 +202,7 @@ public class VoiceInputAssistant {
* @param data
* @return
*/
public boolean handleActivityResult(int activityRequestCode, int resultCode, Intent data) {
public boolean handleActivityResult(int activityRequestCode, int resultCode, Intent data, EditText textField) {
boolean result = false;
// handle the result of voice recognition, put it into the textfield
if (activityRequestCode == this.requestCode) {
@ -272,12 +259,12 @@ public class VoiceInputAssistant {
return null;
}
public void configureMicrophoneButton(final int prompt) {
public void configureMicrophoneButton(final Fragment fragment, final int prompt) {
if (Preferences.getBoolean(R.string.p_voiceInputEnabled, true) && VoiceRecognizer.voiceInputAvailable(ContextManager.getContext())) {
voiceButton.setVisibility(View.VISIBLE);
voiceButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
startVoiceRecognitionActivity(prompt);
startVoiceRecognitionActivity(fragment, prompt);
}
});
} else {

@ -62,8 +62,7 @@ public class VoiceRecognizer {
private static VoiceRecognizer instance = null;
public static VoiceRecognizer instantiateVoiceRecognizer(Context context, RecognizerApiListener listener,
Fragment fragment, ImageButton voiceAddButton, EditText quickAddBox) {
public static VoiceRecognizer instantiateVoiceRecognizer(Context context, RecognizerApiListener listener, ImageButton voiceAddButton) {
synchronized(VoiceRecognizer.class) {
if (instance == null)
instance = new VoiceRecognizer();
@ -76,13 +75,12 @@ public class VoiceRecognizer {
instance.recognizerApi = new RecognizerApi(context);
instance.recognizerApi.setListener(listener);
} else {
instance.voiceInputAssistant = new VoiceInputAssistant(fragment,
voiceAddButton, quickAddBox);
instance.voiceInputAssistant = new VoiceInputAssistant(voiceAddButton);
}
return instance;
}
public void startVoiceRecognition(Context context, String currentVoiceFile) {
public void startVoiceRecognition(Context context, Fragment fragment, String currentVoiceFile) {
if (speechRecordingAvailable(context) && recognizerApi != null) {
recognizerApi.setTemporaryFile(currentVoiceFile);
recognizerApi.start(Constants.PACKAGE,
@ -92,13 +90,13 @@ public class VoiceRecognizer {
int prompt = R.string.voice_edit_title_prompt;
if (Preferences.getBoolean(R.string.p_voiceInputCreatesTask, false))
prompt = R.string.voice_create_prompt;
voiceInputAssistant.startVoiceRecognitionActivity(prompt);
voiceInputAssistant.startVoiceRecognitionActivity(fragment, prompt);
}
}
public boolean handleActivityResult(int requestCode, int resultCode, Intent data) {
public boolean handleActivityResult(int requestCode, int resultCode, Intent data, EditText textField) {
if (instance != null && instance.voiceInputAssistant != null)
return instance.voiceInputAssistant.handleActivityResult(requestCode, resultCode, data);
return instance.voiceInputAssistant.handleActivityResult(requestCode, resultCode, data, textField);
return false;
}

Loading…
Cancel
Save