BLACKBERRY
AUDIO RECORDING
Here
I write a small code for blackberry audio recoding , here I create
small screen which having 3 button one for record another for play
then one for stop playing sound and last one is cancel the screen
package
com;
import
java.util.Timer;
import
java.util.TimerTask;
import
net.rim.device.api.applicationcontrol.ApplicationPermissions;
import
net.rim.device.api.applicationcontrol.ApplicationPermissionsManager;
import
net.rim.device.api.system.Backlight;
import
net.rim.device.api.system.Bitmap;
import
net.rim.device.api.system.Characters;
import
net.rim.device.api.system.Display;
import
net.rim.device.api.ui.Color;
import
net.rim.device.api.ui.Field;
import
net.rim.device.api.ui.FieldChangeListener;
import
net.rim.device.api.ui.Font;
import
net.rim.device.api.ui.FontFamily;
import
net.rim.device.api.ui.Graphics;
import
net.rim.device.api.ui.Manager;
import
net.rim.device.api.ui.XYEdges;
import
net.rim.device.api.ui.XYRect;
import
net.rim.device.api.ui.component.BitmapField;
import
net.rim.device.api.ui.component.ButtonField;
import
net.rim.device.api.ui.component.Dialog;
import
net.rim.device.api.ui.component.LabelField;
import
net.rim.device.api.ui.container.HorizontalFieldManager;
import
net.rim.device.api.ui.container.MainScreen;
import
net.rim.device.api.ui.container.PopupScreen;
import
net.rim.device.api.ui.container.VerticalFieldManager;
import
net.rim.device.api.ui.decor.Background;
import
net.rim.device.api.ui.decor.BackgroundFactory;
import
net.rim.device.api.ui.decor.Border;
import
net.rim.device.api.ui.decor.BorderFactory;
public
class
RecordSoundScreen extends
MainScreen {
private
Meeting_Submission TargetContext;
private
RecordSoundScreen CurrScreen;
private
final
static
int
_ALPHA
= 100;
private
CustomLabel_withImage Start;
private
CustomLabel_withImage Stop;
private
CustomLabel_withImage PlayStop;
private
CustomLabel_withImage Cancel;
private
VerticalFieldManager vfMain,
vfSub,
vfPanel;
private
HorizontalFieldManager hfAdd,
hfGray,
hfButtons;
private
AudioRecorderThread myAudioRecorderThread
= null;
private
CustomLabel_withImage RecordBtn;
private
Manager MyManager;
private
Bitmap mystartbmpfoc;
private
Bitmap mystartbmpunfoc;
public
byte[]
_audioArray;
private
LabelField TimerText;
private
FontFamily firstFF,
secondFF;
private
Font firstFont,
bigFont,
fontUnderLine;
private
Timer MyTimer;
private
int
count
= 0;
private
TimerClass TimerClassInstance;
private
int
RecordedSoundLengthinSecs;
private
int
MaximumRecordingTime;
public
String RecordingState
= "";
public
RecordSoundScreen(Meeting_Submission context) {
super(NO_HORIZONTAL_SCROLL
| NO_VERTICAL_SCROLL);
TargetContext
= context;
CurrScreen
= this;
ClosePermission();
MaximumRecordingTime
= 121;
setFont();
MyManager
= CurrScreen.getMainManager();
DrawPanel();
}
public
boolean
ClosePermission() {
ApplicationPermissionsManager
apm = ApplicationPermissionsManager
.getInstance();
ApplicationPermissions
original = apm.getApplicationPermissions();
if
(original.getPermission(ApplicationPermissions.PERMISSION_MEDIA)
== ApplicationPermissions.VALUE_DENY
&&
original
.getPermission(ApplicationPermissions.PERMISSION_RECORDING)
== ApplicationPermissions.VALUE_DENY)
{
return
false;
}
return
false;
}
private
void
StartTimer(int
maxtime) {
TimerClassInstance
= new
TimerClass(this,
TimerText);
TimerClassInstance.callTheTimer(maxtime);
}
private
void
StopTimer() {
TimerClassInstance.timerTask.cancel();
RecordedSoundLengthinSecs
= MaximumRecordingTime
-
TimerClassInstance.secs;
//
Dialog.alert("Recorded time is " +RecordedSoundLengthinSecs
);
}
public
void
TimerComplete() {
if
(TimerClassInstance.TotalSecs
== MaximumRecordingTime)
{
//
Dialog.alert("Stopping Sound Recording");
if
(RecordingState
== "Record")
{
StopRecording();
}
else
if
(RecordingState
== "Play")
{
StopPlayingRecordedSound();
}
}
//
TimerClassInstance.callTheTimer(20);
}
public
void
TimerUpdate(int
SecsCompleted) {
}
public
void
setFont() {
try
{
firstFF
= FontFamily.forName("Times
New Roman");//
BBMillbank
firstFont
= firstFF.getFont(Font.BOLD,
16);
secondFF
= FontFamily.forName("Arial");//
BBMillbank
fontUnderLine
= secondFF.getFont(Font.PLAIN,
14);
bigFont
= firstFF.getFont(Font.BOLD,
22);
}
catch
(Exception e) {
System.out.println("Error
" + e);
}
}
protected
void
paintBackground(Graphics g) {
XYRect
myExtent = getExtent();
int
color = g.getColor();
int
alpha = g.getGlobalAlpha();
g.setGlobalAlpha(_ALPHA);
g.setColor(Color.BLACK);
g.fillRect(0,
0, getWidth(), getHeight());
g.setColor(color);
g.setGlobalAlpha(alpha);
}
private
void
DrawPanel() {
final
Bitmap BgBitmap = Bitmap.getBitmapResource("popup_box_down.png");
BitmapField
Bg =
new
BitmapField(BgBitmap);
mystartbmpfoc
= Bitmap.getBitmapResource("end_meeting_sel.png");
mystartbmpunfoc
= Bitmap.getBitmapResource("end_meeting_desel.png");
final
Bitmap imgHeader = Bitmap
.getBitmapResource("popup_box_heading.png");
final
Bitmap imgBG
= Bitmap.getBitmapResource("trans_bg.png");
vfMain
= new
VerticalFieldManager(VerticalFieldManager.USE_ALL_HEIGHT
|
VerticalFieldManager.USE_ALL_HEIGHT)
{
protected
void
paintBackground(Graphics g) {
XYRect
myExtent = getExtent();
int
color
= g.getColor();
int
alpha
= g.getGlobalAlpha();
g.setColor(Color.BLACK);
g.fillRect(0,
0, getWidth(), getHeight());
g.setColor(Color.BLACK);
}
};
vfSub
= new
VerticalFieldManager();
hfAdd
= new
HorizontalFieldManager(HorizontalFieldManager.USE_ALL_WIDTH)
{
public
void
paint(Graphics g) {
g.drawBitmap(0,
0, imgHeader.getWidth(), imgHeader.getHeight(),
imgHeader,
0, 0);
//
g.clear();
super.paint(g);
invalidate();
}
};
LabelField
lblAddText = new
LabelField("Audio Note")
{
protected
void
paint(Graphics graphics) {
graphics.setColor(Color.WHITE);
super.paint(graphics);
}
};
hfAdd.add(lblAddText);
vfSub.add(hfAdd);
hfGray
= new
HorizontalFieldManager(
HorizontalFieldManager.USE_ALL_WIDTH
|
HorizontalFieldManager.USE_ALL_HEIGHT);
vfPanel
= new
VerticalFieldManager() {
public
void
paint(Graphics g) {
g.drawBitmap(0,
0, BgBitmap.getWidth(), BgBitmap.getHeight(),
BgBitmap,
0, 0);
//
g.clear();
super.paint(g);
invalidate();
}
};
TimerText
= new
LabelField("02:00");
TimerText.setFont(bigFont);
TimerText.setMargin(30,
0, 0, 115);
vfPanel.add(TimerText);
AddRecordBtns();
vfPanel.add(hfGray);
vfSub.add(vfPanel);
vfSub.setMargin(50,
0, 0, 16);
vfMain.add(vfSub);
add(vfMain);
}
private
void
AddRecordBtns() {
hfButtons
= new
HorizontalFieldManager();
Start
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
mystartbmpfoc,
"Play",
0xFFFFFF);
Cancel
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
mystartbmpfoc,
"Cancel",
0xFFFFFF);
Bitmap
RecordBmp = Bitmap.getBitmapResource("record_sel.png");
RecordBtn
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
RecordBmp,
"Record",
0xFFFFFF);
hfButtons.add(RecordBtn);
hfButtons.add(Start);
hfButtons.add(Cancel);
hfButtons.setMargin(20,
0, 0, 5);
hfGray.add(hfButtons);
RecordBtn.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
StartRecording();
}
});
Start.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
StartPlayingRecordedSound();
}
});
Cancel.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
CloseScreen();
}
});
}
private
void
CloseScreen() {
//
Dialog.alert("Recording Close");
CheckIfSoundRecorded();
//
Dialog.alert("Recording Close");
//
CurrScreen.close();
}
private
void
StartRecording() {
RecordingState
= "Record";
hfGray.delete(hfButtons);
AddPlayBtns();
StartTimer(MaximumRecordingTime);
myAudioRecorderThread
= new
AudioRecorderThread();
myAudioRecorderThread.run();
//
if(( Display.getProperties() &
//
Display.DISPLAY_PROPERTY_REQUIRES_BACKLIGHT) != 0 )
{
Backlight.enable(true,
3000);
}
}
private
void
StopRecording() {
RecordingState
= "";
//
Dialog.alert("Recording Stop");
hfGray.delete(hfButtons);
StopTimer();
AddRecordBtns();
myAudioRecorderThread.stop();
}
private
void
StartPlayingRecordedSound() {
if
(myAudioRecorderThread
!= null)
{
//
Dialog.alert("Recording Play");
if
(myAudioRecorderThread._audioArray
!= null)
{
RecordingState
= "Play";
hfGray.delete(hfButtons);
StartTimer(RecordedSoundLengthinSecs);
myAudioRecorderThread.play();
AddPauseBtns();
}
}
else
{
Dialog.alert("Not
Sound Yet Recorded");
}
}
private
void
StopPlayingRecordedSound() {
RecordingState
= "";
hfGray.delete(hfButtons);
StopTimer();
myAudioRecorderThread.stop();
AddRecordBtns();
}
private
void
AddPlayBtns() {
hfButtons
= new
HorizontalFieldManager();
Start
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
mystartbmpfoc,
"Play",
0xFFFFFF);
Stop
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
mystartbmpfoc,
"Stop",
0xFFFFFF);
Cancel
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
mystartbmpfoc,
"Cancel",
0xFFFFFF);
Bitmap
RecordBmp
= Bitmap.getBitmapResource("record_sel.png");
hfButtons.add(Stop);
hfButtons.add(Start);
hfButtons.add(Cancel);
hfButtons.setMargin(20,
0, 0, 5);
hfGray.add(hfButtons);
Stop.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
StopRecording();
}
});
Cancel.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
CloseScreen();
}
});
Start.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
//
TODO
Auto-generated method stub
}
});
}
private
void
AddPauseBtns() {
hfButtons
= new
HorizontalFieldManager();
PlayStop
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
mystartbmpfoc,
"Stop",
0xFFFFFF);
Cancel
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
mystartbmpfoc,
"Cancel",
0xFFFFFF);
Bitmap
RecordBmp = Bitmap.getBitmapResource("record_sel.png");
RecordBtn
= new
CustomLabel_withImage(30, 20, mystartbmpunfoc,
RecordBmp,
"Record",
0xFFFFFF);
hfButtons.add(RecordBtn);
hfButtons.add(PlayStop);
hfButtons.add(Cancel);
hfButtons.setMargin(20,
0, 0, 5);
hfGray.add(hfButtons);
RecordBtn.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
StartRecording();
}
});
Cancel.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
CloseScreen();
}
});
PlayStop.setChangeListener(new
FieldChangeListener() {
public
void
fieldChanged(Field field, int
context) {
StopPlayingRecordedSound();
}
});
}
private
void
CheckIfSoundRecorded() {
if
(myAudioRecorderThread
!= null)
{
//
Dialog.alert("420");
if
(myAudioRecorderThread._audioArray
!= null)
{
//
Dialog.alert("420 420");
_audioArray
= myAudioRecorderThread._audioArray;
TargetContext._audioArray
= _audioArray;
TargetContext.AudioRecorded(myAudioRecorderThread._audioArray);
CurrScreen.close();
}
else
{
//
Dialog.alert("420 420 420");
TargetContext.AudioNotRecorded();
CurrScreen.close();
}
}
else
{
//
Dialog.alert("420 420 420 420 " + TargetContext);
TargetContext.AudioNotRecorded();
CurrScreen.close();
}
}
public
boolean
keyChar(char
character, int
status, int
time) {
switch
(character) {
case
Characters.ESCAPE:
TargetContext.AudioNotRecorded();
CurrScreen.close();
return
true;
case
Characters.ENTER:
return
true;
default:
return
super.keyChar(character,
status, time);
}
}
}
After
recoding the sound up-to 120 seconds we will convert it on byte array
which will encrypted and posted on the HTTP connection and save it on
the data base as a byte array.
Below
code is for posting the byte array over the HTTP connection. Convert
the byte array on to the encodeBase64 for the encription .
public
byte[]
_audioArray
= null;
private
AudioRecorderThread myAudioRecorderThread;
_postData.append("audio",
encodeBase64(_audioArray,0,
_audioArray.length));
public
String encodeBase64(byte[]
toEncode, int
offset, int
length) {
ByteArrayOutputStream
byteArrayOutputStream = new
ByteArrayOutputStream(
length);
Base64OutputStream
base64OutputStream = new
Base64OutputStream(
byteArrayOutputStream);
try
{
base64OutputStream.write(toEncode,
offset, length);
base64OutputStream.flush();
base64OutputStream.close();
}
catch
(IOException ioe) {
System.out.println("Error
in encodeBase64() : " +
ioe.toString());
return
null;
}
return
byteArrayOutputStream.toString();
}
Below
code is for listening the audio on blackberry device without
streaming just we call the saved byte array via xml reading then
decode it and play it
XML
Code:-
String
strAudio;
private
void
ReadXML() {
//
TODO
Auto-generated method stub
try
{
httpURL="http://project_name/file_name.php;
if
(WLANInfo.getWLANState()
== WLANInfo.WLAN_STATE_CONNECTED)
{
htpconn=(HttpConnection)Connector.open(httpURL+";interface=wifi",Connector.READ_WRITE);
}
else
{
htpconn=(HttpConnection)Connector.open(httpURL+";deviceside=true");
}
int
res=htpconn.getResponseCode();
docBuilderFactory
= DocumentBuilderFactory.newInstance();
docBuilder
= docBuilderFactory.newDocumentBuilder();
inputStream=htpconn.openInputStream();
Document doc = docBuilder.parse(inputStream);
doc.getDocumentElement ().normalize ();
System.out.println
("Root element of the doc is "
+
doc.getDocumentElement().getNodeName());
//audio
data
NodeList firstAudioList = ((org.w3c.dom.Element)
firstPersonElement).getElementsByTagName("audio");
int
child_lng_audio=firstAudioList.getLength();
for(int
j=0;j<child_lng_audio;j++)
{
Node innernode=firstAudioList.item(j);
Node firstPersonElement1
= innernode;
NamedNodeMap attributes
= innernode.getAttributes();
Node
value=firstAudioList.item(j).getChildNodes().item(0);
strAudio=value.toString();
}
}
catch(Exception
e){}
}
put
the below code on constructor of class
private
AudioRecorderThread myAudioRecorderThread
= null;
myAudioRecorderThread
= new
AudioRecorderThread();
Play
function
private
void
Play_audio()
{
try
{
byte[]
decoded = Base64InputStream.decode(strAudio);
//myAudioRecorderThread.run();
myAudioRecorderThread._audioArray=decoded;
myAudioRecorderThread.play();
}
catch(Exception
e)
{
SOP("error
at Play mp3 "+e.getMessage());
}
}
Thank
you
Mahesh
Nawale
From last few days my mind was stuck. Finally I got the solution. Thanks Mahesh. Keep it up !!
ReplyDeleteHey Good work Mahesh looks like u have taken lot of efforts to make it happen ...
ReplyDelete