标签:
接下来就进入聊天界面了,我的界面效果如下几个图所示:
这其中包括两个点:仿微信按住说话功能,表情管理
第一个,按住说话 按钮的功能,通过重写Button完成,
/**
* 控制录音Button
* 1、重写onTouchEvent;(changeState方法、wantToCancel方法、reset方法);
* 2、编写AudioDialogManage、并与该类AudioRecorderButton进行整合;
* 3、编写AudioManage、并与该类AudioRecorderButton进行整合;
*/
package com.ppl.myvoice.save;
import android.content.Context;
import android.os.Environment;
import android.os.Handler;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Button;
import com.ppl.get_loc.R;
import com.ppl.myvoice.save.AudioManage.AudioStateListenter;
/**
* 控制录音Button
* 1、重写onTouchEvent;(changeState方法、wantToCancel方法、reset方法);
* 2、编写AudioDialogManage、并与该类AudioRecorderButton进行整合;
* 3、编写AudioManage、并与该类AudioRecorderButton进行整合;
*/
public class AudioRecordrButton extends Button implements AudioStateListenter{
/**
* AudioRecorderButton的三个状态
*/
private static final int STATE_NORMAL = 1; //默认状态
private static final int STATE_RECORDERING = 2; //录音状态
private static final int STATE_WANT_TO_CALCEL = 3; //取消状态
private int mCurState = STATE_NORMAL; // 当前录音状态
private boolean isRecordering = false; // 是否已经开始录音
private boolean mReady; // 是否触发onLongClick
private static final int DISTANCE_Y_CANCEL = 50;
private AudioDialogManage audioDialogManage;
private AudioManage mAudioManage;
/**
* 正常录音完成后的回调
* @author songshi
*
*/
public interface AudioFinishRecorderListenter{
void onFinish(float seconds, String FilePath);
}
private AudioFinishRecorderListenter mListenter;
public void setAudioFinishRecorderListenter(AudioFinishRecorderListenter listenter){
this.mListenter=listenter;
}
//构造方法
public AudioRecordrButton(Context context) {
super(context);
// TODO Auto-generated constructor stub
}
public AudioRecordrButton(Context context, AttributeSet attrs) {
super(context, attrs);
audioDialogManage = new AudioDialogManage(getContext());
String dir = Environment.getExternalStorageDirectory()
+ "/VoiceRecorder"; // 此处需要判断是否有存储卡(外存)
mAudioManage = AudioManage.getInstance(dir);
mAudioManage.setOnAudioStateListenter(this);
setOnLongClickListener(new OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
mReady = true;
// 真正显示应该在audio end prepared以后
mAudioManage.prepareAudio();
//return true;
return false;
}
});
}
/*
* 复写onTouchEvent
* @see android.widget.TextView#onTouchEvent(android.view.MotionEvent)
*/
@Override
public boolean onTouchEvent(MotionEvent event) {
int action = event.getAction(); //获取当前Action
int x = (int) event.getX(); //获取当前的坐标
int y = (int) event.getY();
switch (action) {
case MotionEvent.ACTION_DOWN:
changeState(STATE_RECORDERING);
break;
case MotionEvent.ACTION_MOVE:
// 已经开始录音状态时,根据X、Y的坐标,判断是否想要取消
if (isRecordering) {
if (wantToCancel(x, y)) {
changeState(STATE_WANT_TO_CALCEL);
} else {
changeState(STATE_RECORDERING);
}
}
break;
case MotionEvent.ACTION_UP:
if (!mReady) { //没有触发onLongClick
reset();
return super.onTouchEvent(event);
}
if (!isRecordering || mTime < 0.7f) { //录音时间过短
audioDialogManage.tooShort();
mAudioManage.cancel();
mHandler.sendEmptyMessageDelayed(MSG_DIALOG_DIMISS, 1300);// 延迟,1.3秒以后关闭“时间过短对话框”
}
else if (mCurState == STATE_RECORDERING) { //正常录制结束
audioDialogManage.dimissDialog();
// release
mAudioManage.release();
// callbackToAct
// 正常录制结束,回调录音时间和录音文件完整路径——在播放的时候需要使用
if(mListenter!=null){
mListenter.onFinish(mTime, mAudioManage.getCurrentFilePath());
}
} else if (mCurState == STATE_WANT_TO_CALCEL) {
// cancel
audioDialogManage.dimissDialog();
mAudioManage.cancel();
}
reset();
break;
}
return super.onTouchEvent(event);
}
/**
* 恢复状态以及一些标志位
*/
private void reset() {
isRecordering = false;
mReady = false; //是否触发onLongClick
mTime = 0;
changeState(STATE_NORMAL);
}
private boolean wantToCancel(int x, int y) {
// 判断手指的滑动是否超出范围
if (x < 0 || x > getWidth()) {
return true;
}
if (y < -DISTANCE_Y_CANCEL || y > getHeight() + DISTANCE_Y_CANCEL) {
return true;
}
return false;
}
/**
* 改变Button的背景和文本、展示不同状态的录音提示对话框
* @param state
*/
private void changeState(int state) {
if (mCurState != state) {
mCurState = state;
switch (state) {
case STATE_NORMAL:
setBackgroundResource(R.drawable.btn_recorder_normal);
setText(R.string.str_recorder_normal);
break;
case STATE_RECORDERING:
setBackgroundResource(R.drawable.btn_recorder_recordering);
setText(R.string.str_recorder_recording);
if (isRecordering) {
// 更新Dialog.recording()
audioDialogManage.recording();
}
break;
case STATE_WANT_TO_CALCEL:
setBackgroundResource(R.drawable.btn_recorder_recordering);
setText(R.string.str_recorder_want_cancel);
// 更新Dialog.wantCancel()
audioDialogManage.wantToCancel();
break;
}
}
}
/*
* 实现“准备完毕”接口
* (non-Javadoc)
* @see songshi.voicenotes.recorder.AudioManage.AudioStateListenter#wellPrepared()
*/
@Override
public void wellPrepared() {
// TODO Auto-generated method stub
mHandler.sendEmptyMessage(MSG_AUDIO_PREPARED);
}
private static final int MSG_AUDIO_PREPARED = 0x110; //准备完全
private static final int MSG_VOICE_CHANGE = 0x111; //声音改变
private static final int MSG_DIALOG_DIMISS = 0x112; //销毁对话框
/**
* 接收子线程数据,并用此数据配合主线程更新UI
* Handler运行在主线程(UI线程)中,它与子线程通过Message对象传递数据。
* Handler接受子线程传过来的(子线程用sedMessage()方法传弟)Message对象,把这些消息放入主线程队列中,配合主线程进行更新UI。
*/
private Handler mHandler = new Handler() {
public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case MSG_AUDIO_PREPARED: //216:mHandler.sendEmptyMessage(MSG_AUDIO_PREPARED);
audioDialogManage.showRecorderingDialog();
isRecordering = true;
//已经在录制,同时开启一个获取音量、并且计时的线程
new Thread(mGetVoiceLevelRunnable).start();
break;
case MSG_VOICE_CHANGE: //265:mHandler.sendEmptyMessage(MSG_VOICE_CHANGE);
audioDialogManage.updateVoiceLevel(mAudioManage
.getVoiceLevel(7));
break;
//这里在Handler里面处理DIALOG_DIMISS,是因为想让该对话框显示一段时间,延迟关闭,——详见125行
case MSG_DIALOG_DIMISS: //125:mHandler.sendEmptyMessageDelayed(MSG_DIALOG_DIMISS, 1300);
audioDialogManage.dimissDialog();
break;
}
};
};
private float mTime; //开始录音时,计时;(在reset()中置空)
/**
* 获取音量大小的Runnable
*/
private Runnable mGetVoiceLevelRunnable = new Runnable() {
@Override
public void run() {
while (isRecordering) {
try {
Thread.sleep(100);
mTime += 0.1f;
mHandler.sendEmptyMessage(MSG_VOICE_CHANGE);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
};
}
上面即可完成 按住说话 的功能。
下面介绍表情的显示;效果如下图:
表情包括可选表情和表情导航,
方法 GridView+ViewPager
XML代码如下:
<android.support.v4.view.ViewPager
android:id="@+id/vp_emjo"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:layout_marginBottom="5dip"
android:gravity="center_horizontal"
android:orientation="horizontal"
android:padding="2dip" >
<ImageView
android:id="@+id/iv_e1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/login_point" />
<ImageView
android:id="@+id/iv_e2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/login_point" />
<ImageView
android:id="@+id/iv_e3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/login_point" />
<ImageView
android:id="@+id/iv_e4"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/login_point" />
<ImageView
android:id="@+id/iv_e5"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/login_point" />
<ImageView
android:id="@+id/iv_e6"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/login_point" />
</LinearLayout>
接下来 介绍 表情的显示,如何让表情在输入框 与 聊天对话框中显示,先介绍两个Java的小知识点,我本身是做C和VC++的,因为要做Android才知道一点点Java的,闲话少叙上干货,Pattern 与 Matcher
这俩是Java的正则表达式 功能是用正则表达式所定制的模式来对字符串进行匹配工作。
下面是表情显示的代码:
package com.ppl.get_loc.chat.emos;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.TextUtils;
import android.text.style.ImageSpan;
public class FaceTextUtils {
public static List<FaceText> faceTexts = new ArrayList<FaceText>();
static {
faceTexts.add(new FaceText("\\face_0"));
faceTexts.add(new FaceText("\\face_1"));
faceTexts.add(new FaceText("\\face_2"));
faceTexts.add(new FaceText("\\face_3"));
faceTexts.add(new FaceText("\\face_4"));
faceTexts.add(new FaceText("\\face_5"));
faceTexts.add(new FaceText("\\face_6"));
faceTexts.add(new FaceText("\\face_7"));
faceTexts.add(new FaceText("\\face_8"));
faceTexts.add(new FaceText("\\face_9"));
faceTexts.add(new FaceText("\\face_10"));
faceTexts.add(new FaceText("\\face_11"));
faceTexts.add(new FaceText("\\face_12"));
faceTexts.add(new FaceText("\\face_13"));
faceTexts.add(new FaceText("\\face_14"));
faceTexts.add(new FaceText("\\face_15"));
faceTexts.add(new FaceText("\\face_16"));
faceTexts.add(new FaceText("\\face_17"));
faceTexts.add(new FaceText("\\face_18"));
faceTexts.add(new FaceText("\\face_19"));
faceTexts.add(new FaceText("\\face_20"));
faceTexts.add(new FaceText("\\face_21"));
faceTexts.add(new FaceText("\\face_22"));
faceTexts.add(new FaceText("\\face_23"));
faceTexts.add(new FaceText("\\face_24"));
faceTexts.add(new FaceText("\\face_25"));
faceTexts.add(new FaceText("\\face_26"));
faceTexts.add(new FaceText("\\face_27"));
faceTexts.add(new FaceText("\\face_28"));
faceTexts.add(new FaceText("\\face_29"));
faceTexts.add(new FaceText("\\face_30"));
faceTexts.add(new FaceText("\\face_31"));
faceTexts.add(new FaceText("\\face_32"));
faceTexts.add(new FaceText("\\face_33"));
faceTexts.add(new FaceText("\\face_34"));
faceTexts.add(new FaceText("\\face_35"));
faceTexts.add(new FaceText("\\face_36"));
faceTexts.add(new FaceText("\\face_37"));
faceTexts.add(new FaceText("\\face_38"));
faceTexts.add(new FaceText("\\face_39"));
faceTexts.add(new FaceText("\\face_40"));
faceTexts.add(new FaceText("\\face_41"));
faceTexts.add(new FaceText("\\face_42"));
faceTexts.add(new FaceText("\\face_43"));
faceTexts.add(new FaceText("\\face_44"));
faceTexts.add(new FaceText("\\face_45"));
faceTexts.add(new FaceText("\\face_46"));
faceTexts.add(new FaceText("\\face_47"));
faceTexts.add(new FaceText("\\face_48"));
faceTexts.add(new FaceText("\\face_49"));
faceTexts.add(new FaceText("\\face_50"));
faceTexts.add(new FaceText("\\face_51"));
faceTexts.add(new FaceText("\\face_52"));
faceTexts.add(new FaceText("\\face_53"));
faceTexts.add(new FaceText("\\face_54"));
faceTexts.add(new FaceText("\\face_55"));
faceTexts.add(new FaceText("\\face_56"));
faceTexts.add(new FaceText("\\face_57"));
faceTexts.add(new FaceText("\\face_58"));
faceTexts.add(new FaceText("\\face_59"));
faceTexts.add(new FaceText("\\face_60"));
faceTexts.add(new FaceText("\\face_61"));
faceTexts.add(new FaceText("\\face_62"));
faceTexts.add(new FaceText("\\face_63"));
faceTexts.add(new FaceText("\\face_64"));
faceTexts.add(new FaceText("\\face_65"));
faceTexts.add(new FaceText("\\face_66"));
faceTexts.add(new FaceText("\\face_67"));
faceTexts.add(new FaceText("\\face_68"));
faceTexts.add(new FaceText("\\face_69"));
faceTexts.add(new FaceText("\\face_70"));
faceTexts.add(new FaceText("\\face_71"));
faceTexts.add(new FaceText("\\face_72"));
faceTexts.add(new FaceText("\\face_73"));
faceTexts.add(new FaceText("\\face_74"));
faceTexts.add(new FaceText("\\face_75"));
faceTexts.add(new FaceText("\\face_76"));
faceTexts.add(new FaceText("\\face_77"));
faceTexts.add(new FaceText("\\face_78"));
faceTexts.add(new FaceText("\\face_79"));
faceTexts.add(new FaceText("\\face_80"));
faceTexts.add(new FaceText("\\face_81"));
faceTexts.add(new FaceText("\\face_82"));
faceTexts.add(new FaceText("\\face_83"));
faceTexts.add(new FaceText("\\face_84"));
faceTexts.add(new FaceText("\\face_85"));
faceTexts.add(new FaceText("\\face_86"));
faceTexts.add(new FaceText("\\face_87"));
faceTexts.add(new FaceText("\\face_88"));
faceTexts.add(new FaceText("\\face_89"));
faceTexts.add(new FaceText("\\face_90"));
faceTexts.add(new FaceText("\\face_91"));
faceTexts.add(new FaceText("\\face_92"));
faceTexts.add(new FaceText("\\face_93"));
faceTexts.add(new FaceText("\\face_94"));
faceTexts.add(new FaceText("\\face_95"));
faceTexts.add(new FaceText("\\face_96"));
faceTexts.add(new FaceText("\\face_97"));
faceTexts.add(new FaceText("\\face_98"));
faceTexts.add(new FaceText("\\face_99"));
faceTexts.add(new FaceText("\\face_100"));
faceTexts.add(new FaceText("\\face_101"));
faceTexts.add(new FaceText("\\face_102"));
faceTexts.add(new FaceText("\\face_103"));
faceTexts.add(new FaceText("\\face_104"));
faceTexts.add(new FaceText("\\face_105"));
faceTexts.add(new FaceText("\\face_106"));
faceTexts.add(new FaceText("\\face_107"));
faceTexts.add(new FaceText("\\face_108"));
faceTexts.add(new FaceText("\\face_109"));
faceTexts.add(new FaceText("\\face_110"));
faceTexts.add(new FaceText("\\emotion_del_normal"));
faceTexts.add(new FaceText("\\emotion_del_down"));
}
public static String parse(String s) {
for (FaceText faceText : faceTexts) {
s = s.replace("\\" + faceText.text, faceText.text);
s = s.replace(faceText.text, "\\" + faceText.text);
}
return s;
}
/**
* toSpannableString
* @return SpannableString
* @throws
*/
public static SpannableString toSpannableString(Context context, String text) {
if (!TextUtils.isEmpty(text)) {
SpannableString spannableString = new SpannableString(text);
int start = 0;
Pattern pattern = Pattern.compile("\\\\face_[0-9]{1,3}", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(text);
while (matcher.find()) {
String faceText = matcher.group();
String key = faceText.substring(1);
BitmapFactory.Options options = new BitmapFactory.Options();
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(),
context.getResources().getIdentifier(key, "drawable", context.getPackageName()), options);
ImageSpan imageSpan = new ImageSpan(context, bitmap);
int startIndex = text.indexOf(faceText, start);
int endIndex = startIndex + faceText.length();
if (startIndex >= 0)
spannableString.setSpan(imageSpan, startIndex, endIndex, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
start = (endIndex - 1);
}
return spannableString;
} else {
return new SpannableString("");
}
}
public static SpannableString toSpannableString(Context context, String text, SpannableString spannableString) {
int start = 0;
Pattern pattern = Pattern.compile("\\\\ue[a-z0-9]{3}", Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(text);
while (matcher.find()) {
String faceText = matcher.group();
String key = faceText.substring(1);
BitmapFactory.Options options = new BitmapFactory.Options();
// options.inSampleSize = 2;
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), context.getResources()
.getIdentifier(key, "drawable", context.getPackageName()), options);
ImageSpan imageSpan = new ImageSpan(context, bitmap);
int startIndex = text.indexOf(faceText, start);
int endIndex = startIndex + faceText.length();
if (startIndex >= 0)
spannableString.setSpan(imageSpan, startIndex, endIndex, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
start = (endIndex - 1);
}
return spannableString;
}
}
向聊天框里输入表情的代码如下:
EmoteAdapter emoAdapter = (EmoteAdapter) itemAdapter;
if(position == emoAdapter.getCount() - 1){
//点击 删除 按钮
et_input.dispatchKeyEvent(new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DEL));
}else{
//点击了表情,则添加到输入框中
sendText = et_input.getText().toString();
FaceText name = (FaceText) gridAdapter.getItem(position);
String key = name.text.toString();
int start = et_input.getSelectionStart();
SpannableString aps = FaceTextUtils.toSpannableString(getApplicationContext(), sendText + key);
et_input.setText(aps);
// 定位光标位置
CharSequence info = et_input.getText();
if (info instanceof Spannable) {
Spannable spanText = (Spannable) info;
Selection.setSelection(spanText,
start + key.length());
}
}
在发送框里显示的代码如下:
SpannableString spS = FaceTextUtils.toSpannableString(mContext, item.getContent());
viewHolder.tv_message.setText(spS);
最终效果如下图所示:
至此,完成 聊天中,表情添加与显示。
标签:
原文地址:http://blog.csdn.net/iliupp/article/details/51919106