Commit cb2f21ad authored by mistermad's avatar mistermad

- Added "undo" button in Elicitation mode

- Modifying existing "undo" buttons to ImageButton
- Support phone screens in Elicitation mode
- Added a heading in the summary of segments of the mode Respeaking and Translating
- Logs will only work in debug mode
parent 132407ab
......@@ -6,11 +6,11 @@ android {
useLibrary 'org.apache.http.legacy'
defaultConfig {
applicationId "org.getalp.ligaikuma"
minSdkVersion 16
minSdkVersion 14
targetSdkVersion 25
multiDexEnabled = true
versionCode 1
versionName "1.0"
versionCode 2
versionName "2.0"
//testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
......
......@@ -49,7 +49,6 @@ public class Aikuma extends android.app.Application {
private static SharedPreferences preferences;
private static List<Language> languages;
public static Context appContext = null;
/**
......
......@@ -45,6 +45,7 @@ import com.google.android.gms.common.GooglePlayServicesUtil;
import com.musicg.wave.Wave;
import org.apache.commons.io.FileUtils;
import org.getalp.ligaikuma.lig_aikuma.lig_aikuma.BuildConfig;
import org.getalp.ligaikuma.lig_aikuma.lig_aikuma.R;
import org.getalp.ligaikuma.lig_aikuma.model.Recording;
import org.getalp.ligaikuma.lig_aikuma.service.GoogleCloudService;
......@@ -108,7 +109,7 @@ public class MainActivity extends ListActivity {
googleAuthToken = settings.getString(AikumaSettings.SETTING_AUTH_TOKEN_KEY, null);
googleIdToken = settings.getString(AikumaSettings.SETTING_ID_TOKEN_KEY, null);
googleAPIScope = AikumaSettings.getScope();
Log.i(TAG, "Account: " + emailAccount + ", scope: " + googleAPIScope);
if(BuildConfig.DEBUG)Log.i(TAG, "Account: " + emailAccount + ", scope: " + googleAPIScope);
AikumaSettings.setUserId(emailAccount);
showUserAccount(emailAccount, null);
......@@ -158,7 +159,7 @@ public class MainActivity extends ListActivity {
try {
updateRecordingView(Recording.read(verName, ownerId, recordingId));
} catch (IOException e) {
Log.e(TAG, e.getMessage());
if(BuildConfig.DEBUG)Log.e(TAG, e.getMessage());
}
}
}
......@@ -169,7 +170,7 @@ public class MainActivity extends ListActivity {
try {
Recording.indexAll();
} catch (IOException e) {
Log.e(TAG, e.getMessage());
if(BuildConfig.DEBUG)Log.e(TAG, e.getMessage());
}
//TODO: Update existing files
......@@ -204,7 +205,7 @@ public class MainActivity extends ListActivity {
super.onResume();
List<Recording> recordings = Recording.readAll();
Log.i(TAG, "num: " +recordings.size());
if(BuildConfig.DEBUG)Log.i(TAG, "num: " +recordings.size());
// Filter the recordings for originals
originals = new ArrayList<Recording>();
......@@ -213,7 +214,7 @@ public class MainActivity extends ListActivity {
originals.add(recording);
}
}
Log.i(TAG, "original num: " + originals.size());
if(BuildConfig.DEBUG)Log.i(TAG, "original num: " + originals.size());
adapter = new RecordingArrayAdapter(this, originals);
/*
......@@ -221,9 +222,8 @@ public class MainActivity extends ListActivity {
adapter.getFilter().filter(searchView.getQuery());
}*/
setListAdapter(adapter);
if (listViewState != null) {
if (listViewState != null)
getListView().onRestoreInstanceState(listViewState);
}
MainActivity.locationDetector.start();
}
......@@ -269,7 +269,7 @@ public class MainActivity extends ListActivity {
private void checkDate() {
Calendar calendar = Calendar.getInstance();
int year = calendar.get(Calendar.YEAR);
Log.i(TAG, "year: " + year);
if(BuildConfig.DEBUG)Log.i(TAG, "year: " + year);
if(year < 2000) {
new AlertDialog.Builder(this)
.setTitle("Set the current date correctly")
......@@ -293,8 +293,7 @@ public class MainActivity extends ListActivity {
* @param message String to display
*/
public void showProgressDialog(String message) {
progressDialog =
ProgressDialog.show(this, "Update", message);
progressDialog = ProgressDialog.show(this, "Update", message);
}
/**
......@@ -321,10 +320,7 @@ public class MainActivity extends ListActivity {
public void showUserAccount(String userId, String token) {
TextView userIdView = (TextView) findViewById(R.id.userIdView);
if(userId != null) {
if(token != null)
userIdView.setTextColor(Color.BLACK);
else
userIdView.setTextColor(Color.GRAY);
userIdView.setTextColor((token != null)?Color.BLACK:Color.GRAY);
userIdView.setText(userId);
} else {
userIdView.setText("");
......@@ -356,7 +352,6 @@ public class MainActivity extends ListActivity {
new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
// TODO Auto-generated method stub
if(!hasFocus) {
searchMenuItem.collapseActionView();
//searchView.setQuery("", false);
......@@ -369,7 +364,6 @@ public class MainActivity extends ListActivity {
new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
// TODO Auto-generated method stub
adapter.getFilter().filter(query);
searchView.clearFocus();
return true;
......@@ -377,7 +371,6 @@ public class MainActivity extends ListActivity {
@Override
public boolean onQueryTextChange(String newText) {
// TODO Auto-generated method stub
adapter.getFilter().filter(newText);
return true;
}
......@@ -402,7 +395,7 @@ public class MainActivity extends ListActivity {
AikumaSettings.getCurrentUserId());
syncIntent.putExtra(GoogleCloudService.TOKEN_KEY,
AikumaSettings.getCurrentUserToken());
syncIntent.putExtra("forceSync", forceSync);
syncIntent.putExtra("forceSync", true);
startService(syncIntent);
}
} else {
......@@ -454,7 +447,7 @@ public class MainActivity extends ListActivity {
public void getAccountToken() {
int statusCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(this);
if (statusCode == ConnectionResult.SUCCESS) {
Log.i(TAG, "getAccountToken");
if(BuildConfig.DEBUG)Log.i(TAG, "getAccountToken");
//TODO: Sign-out, Sign-in with other accounts
if(AikumaSettings.getCurrentUserId() == null)
......@@ -507,8 +500,7 @@ public class MainActivity extends ListActivity {
AikumaSettings.setUserId(emailAccount);
showUserAccount(emailAccount, null);
menuBehaviour.setSignInState(true);
settings.edit().putString(
AikumaSettings.SETTING_OWNER_ID_KEY, emailAccount).commit();
settings.edit().putString(AikumaSettings.SETTING_OWNER_ID_KEY, emailAccount).commit();
if (Aikuma.isDeviceOnline()) {
new GetTokenTask(emailAccount, googleAPIScope,
......@@ -623,12 +615,11 @@ public class MainActivity extends ListActivity {
AikumaSettings.setUserToken(googleAuthToken);
AikumaSettings.setUserIdToken(googleIdToken);
} catch (IOException e) {
// TODO Auto-generated catch block
Log.e(TAG, e.getMessage());
if(BuildConfig.DEBUG)Log.e(TAG, e.getMessage());
return false;
}
Log.i(TAG, "access token: " + googleAuthToken);
Log.i(TAG, "identity token: " + googleIdToken);
if(BuildConfig.DEBUG)Log.i(TAG, "access token: " + googleAuthToken);
if(BuildConfig.DEBUG)Log.i(TAG, "identity token: " + googleIdToken);
return true;
}
......@@ -659,7 +650,7 @@ public class MainActivity extends ListActivity {
// Show the user some UI through the activity.
handleException(userRecoverableException);
} catch (GoogleAuthException fatalException) {
Log.e(TAG, "Unrecoverable error " + fatalException.getMessage());
if(BuildConfig.DEBUG)Log.e(TAG, "Unrecoverable error " + fatalException.getMessage());
}
}
}
......@@ -736,14 +727,14 @@ public class MainActivity extends ListActivity {
builder.setTitle("Import audio file");
if(mFileList == null) {
Log.e("importfile", "Showing file picker before loading the file list");
if(BuildConfig.DEBUG)Log.e("importfile", "Showing file picker before loading the file list");
dialog = builder.create();
return dialog;
}
builder.setItems(mFileList, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mChosenFile = mFileList[which];
Log.i("importfile", "mChosenFile: " + mChosenFile);
if(BuildConfig.DEBUG)Log.i("importfile", "mChosenFile: " + mChosenFile);
mPath = new File(mPath, mChosenFile);
if (mPath.isDirectory()) {
loadFileList(mPath, ".wav");
......
......@@ -34,8 +34,7 @@ public class Audio {
*/
public static void playThroughEarpiece(Activity activity, boolean toSetMode) {
AudioManager audioManager = getAudioManager(activity);
if(toSetMode)
audioManager.setMode(AudioManager.MODE_IN_CALL);
if(toSetMode) audioManager.setMode(AudioManager.MODE_IN_CALL);
audioManager.setSpeakerphoneOn(false);
}
......
......@@ -6,6 +6,7 @@ package org.getalp.ligaikuma.lig_aikuma.audio;
import android.util.Log;
import org.getalp.ligaikuma.lig_aikuma.lig_aikuma.BuildConfig;
import org.getalp.ligaikuma.lig_aikuma.model.Recording;
import org.getalp.ligaikuma.lig_aikuma.model.Segments;
import org.getalp.ligaikuma.lig_aikuma.model.Segments.Segment;
......@@ -191,7 +192,7 @@ public class InterleavedPlayer extends Player {
private class OriginalMarkerReachedListener extends
MarkedPlayer.OnMarkerReachedListener {
public void onMarkerReached(MarkedPlayer p) {
Log.i("release", "original onMarker reached, completedOnce = " + completedOnce);
if(BuildConfig.DEBUG)Log.i("release", "original onMarker reached, completedOnce = " + completedOnce);
original.pause();
playRespeaking();
}
......@@ -204,7 +205,7 @@ public class InterleavedPlayer extends Player {
private class RespeakingMarkerReachedListener extends
MarkedPlayer.OnMarkerReachedListener {
public void onMarkerReached(MarkedPlayer p) {
Log.i("release", "respeaking onMarker reached, completedOnce = " + completedOnce);
if(BuildConfig.DEBUG)Log.i("release", "respeaking onMarker reached, completedOnce = " + completedOnce);
respeaking.pause();
if(!completedOnce) {
advanceOriginalSegment();
......
......@@ -196,11 +196,11 @@ public class MarkedPlayer extends SimplePlayer
}
/* For later debugging
* (This is commented out because of too many logs)
Log.i(TAG, "notification marker position msec: " +
if(debug)Log.i(TAG, "notification marker position msec: " +
getNotificationMarkerPositionMsec() +
"\ngetCurentMsec(): " + getCurrentMsec());
Log.i(TAG, "notification marker position sample: " +
if(debug)Log.i(TAG, "notification marker position sample: " +
msecToSample(getNotificationMarkerPositionMsec()) +
"\ngetCurentMsec() as sample: " +
msecToSample(getCurrentMsec()));
......@@ -225,7 +225,7 @@ public class MarkedPlayer extends SimplePlayer
*/
private Thread notificationMarkerLoop;
private static final String TAG = "MarkedPlayer";
//private static final String TAG = "MarkedPlayer";
private static int count = 0;
//////////////////////////////////////////////////////////////////////////
......
......@@ -8,6 +8,7 @@ import android.media.AudioManager;
import android.media.MediaPlayer;
import android.util.Log;
import org.getalp.ligaikuma.lig_aikuma.lig_aikuma.BuildConfig;
import org.getalp.ligaikuma.lig_aikuma.model.Recording;
import java.io.File;
......@@ -40,11 +41,11 @@ public class SimplePlayer extends Player implements Sampler
setRecording(recording);
mediaPlayer = new MediaPlayer();
mediaPlayer.setAudioStreamType((playThroughSpeaker)?AudioManager.STREAM_MUSIC:AudioManager.STREAM_VOICE_CALL);
Log.i("SimplePlayer", "Media Player - data source: " + recording.getFile().getCanonicalPath());
if(BuildConfig.DEBUG)Log.i("SimplePlayer", "Media Player - data source: " + recording.getFile().getCanonicalPath());
mediaPlayer.setDataSource(recording.getFile().getCanonicalPath());
Log.i("SimplePlayer", "Media Player - preparing...");
if(BuildConfig.DEBUG)Log.i("SimplePlayer", "Media Player - preparing...");
mediaPlayer.prepare();
Log.i("SimplePlayer", "Media Player - sample rate: " + recording.getSampleRate());
if(BuildConfig.DEBUG)Log.i("SimplePlayer", "Media Player - sample rate: " + recording.getSampleRate());
setSampleRate(recording.getSampleRate());
}
......
......@@ -19,7 +19,7 @@ public interface AudioHandler {
* @param justChanged Whether the caller has just changed
* from silence to audio.
*/
public void audioTriggered(short[] buffer, boolean justChanged);
void audioTriggered(short[] buffer, boolean justChanged);
/**
* The caller has detected silent audio.
......@@ -28,5 +28,5 @@ public interface AudioHandler {
* @param justChanged Whether the caller has just changed
* from audio to silence.
*/
public void silenceTriggered(short[] buffer, boolean justChanged);
void silenceTriggered(short[] buffer, boolean justChanged);
}
......@@ -17,6 +17,6 @@ public interface AudioListener {
*
* @param buffer An array of samples.
*/
public void onBufferFull(short[] buffer);
void onBufferFull(short[] buffer);
}
......@@ -7,6 +7,7 @@ package org.getalp.ligaikuma.lig_aikuma.audio.record;
import android.util.Log;
import org.getalp.ligaikuma.lig_aikuma.audio.Sampler;
import org.getalp.ligaikuma.lig_aikuma.lig_aikuma.BuildConfig;
import org.getalp.ligaikuma.lig_aikuma.model.Recording;
import org.getalp.ligaikuma.lig_aikuma.model.Segments;
......@@ -38,8 +39,7 @@ public class Mapper {
private Long originalStartOfSegment = 0L;
private Long originalEndOfSegment;
private Long respeakingStartOfSegment = 0L;
private Long respeakingEndOfSegment;
/** The mapping file */
private File mappingFile;
......@@ -54,11 +54,8 @@ public class Mapper {
try {
reader = new BufferedReader(new InputStreamReader(new FileInputStream(this.mappingFile)));
restoreFromMappingFile();
Log.d("mapper generation", "yes");
} catch (FileNotFoundException e) {
e.printStackTrace();
if(BuildConfig.DEBUG)Log.d("mapper generation", "yes");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
......@@ -87,27 +84,23 @@ public class Mapper {
* @return The start of the original segment; or 0L if there is none.
*/
public Long getOriginalStartSample() {
if (originalStartOfSegment != null) {
return originalStartOfSegment;
} else {
return 0L;
}
return (originalStartOfSegment != null)? originalStartOfSegment: 0L;
}
private void restoreFromMappingFile() throws IOException {
String line;
while ((line = reader.readLine()) != null && (line.isEmpty() || line.split(":").length <= 1)) { continue; }
Log.d("line", line);
while ((line = reader.readLine()) != null && (line.isEmpty() || line.split(":").length <= 1));
if(BuildConfig.DEBUG)Log.d("line", line);
do {
String[] pair = line.split(":");
String[] strFirstSeg = pair[0].split(",");
String[] strSecondSeg = pair[1].split(",");
Segments.Segment original = new Segments.Segment(Long.parseLong(strFirstSeg[0]), Long.parseLong(strFirstSeg[1]));
Log.d("original seg ", original.getStartSample() + " -> " + original.getEndSample());
if(BuildConfig.DEBUG)Log.d("original seg ", original.getStartSample() + " -> " + original.getEndSample());
Segments.Segment rspkSeg = new Segments.Segment(Long.parseLong(strSecondSeg[0]), Long.parseLong(strSecondSeg[1]));
Log.d("rspkSeg seg ", rspkSeg.getStartSample() + " -> " + rspkSeg.getEndSample());
if(BuildConfig.DEBUG)Log.d("rspkSeg seg ", rspkSeg.getStartSample() + " -> " + rspkSeg.getEndSample());
segments.put(original, rspkSeg);
Log.d("segments size", ""+segments.getSegmentMap().size());
if(BuildConfig.DEBUG)Log.d("segments size", ""+segments.getSegmentMap().size());
originalStartOfSegment = original.getEndSample();
} while((line = reader.readLine()) != null && !line.isEmpty() && line.split(":").length > 1);
}
......@@ -123,7 +116,7 @@ public class Mapper {
// originalStartOfSegment
if (originalEndOfSegment != null) {
originalStartOfSegment = original.getCurrentSample();
Log.d("mark", "mark original segment : " + original.getCurrentSample());
if(BuildConfig.DEBUG)Log.d("mark", "mark original segment : " + original.getCurrentSample());
}
}
......@@ -149,24 +142,19 @@ public class Mapper {
* @return Returns true if a segment gets stored; false otherwise.
*/
public boolean store(Sampler original, Sampler respoken) {
//If we're not respeaking and still playing an original segment, do
//nothing
if (originalEndOfSegment == null) {
return false;
}
//If we're not respeaking and still playing an original segment, do nothing
if (originalEndOfSegment == null) return false;
//Otherwise lets end this respeaking segment
respeakingEndOfSegment = respoken.getCurrentSample();
Long respeakingEndOfSegment = respoken.getCurrentSample();
//And store these two segments
Segments.Segment originalSegment;
try {
originalSegment = new Segments.Segment(originalStartOfSegment,
originalEndOfSegment);
originalSegment = new Segments.Segment(originalStartOfSegment, originalEndOfSegment);
} catch (IllegalArgumentException e) {
// This could only have happened if no original had been recorded at all.
originalSegment = new Segments.Segment(0l, 0l);
}
Segments.Segment respeakingSegment = new Segments.Segment(respeakingStartOfSegment,
respeakingEndOfSegment);
Segments.Segment respeakingSegment = new Segments.Segment(respeakingStartOfSegment, respeakingEndOfSegment);
segments.put(originalSegment, respeakingSegment);
//Now we say we're marking the start of the new original and respekaing
//segments
......@@ -174,9 +162,6 @@ public class Mapper {
respeakingStartOfSegment = respoken.getCurrentSample();
//We currently have no end for these segments.
originalEndOfSegment = null;
respeakingEndOfSegment = null;
return true;
}
......
......@@ -9,8 +9,12 @@ import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.media.audiofx.AudioEffect;
import android.media.audiofx.AutomaticGainControl;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import org.getalp.ligaikuma.lig_aikuma.lig_aikuma.BuildConfig;
import java.util.Arrays;
/**
......@@ -46,7 +50,7 @@ public class Microphone {
AudioFormat.CHANNEL_IN_MONO
);
boolean acgOff = ensureAGCIsOff();
Log.i("agc", "Is the ACG is definitely off: " + acgOff);
if(BuildConfig.DEBUG)Log.i("agc", "Is the ACG is definitely off: " + acgOff);
if (physicalMicrophone.getState() != AudioRecord.STATE_INITIALIZED) {
throw new MicException("Microphone failed to initialize");
};
......@@ -58,24 +62,13 @@ public class Microphone {
*
* @return true if ACG is guaranteed to be off; false otherwise.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
private boolean ensureAGCIsOff() {
int audioSessionId = physicalMicrophone.getAudioSessionId();
try {
AutomaticGainControl agc = AutomaticGainControl.create(audioSessionId);
if (agc == null) {
//The device does not implement automatic gain control.
return true;
}
if (agc.getEnabled()) {
int result = agc.setEnabled(false);
if (result == AudioEffect.SUCCESS) {
return true;
} else {
return false;
}
} else {
return true;
}
//The device does not implement automatic gain control.
return agc == null || !agc.getEnabled() || agc.setEnabled(false) == AudioEffect.SUCCESS;
} catch (NoSuchMethodError e) {
// In such circumstances the device is using an API < 16, which
// means there is no AutomaticGainControl class available. The
......@@ -108,10 +101,10 @@ public class Microphone {
public void listen(final MicrophoneListener callback) {
// If there is already a thread listening then kill it and ensure it's
// dead before creating a new thread.
Log.i("thread", "listen");
if (t != null) {
if(BuildConfig.DEBUG)Log.i("thread", "listen");
if(t != null) {
t.interrupt();
while (t.isAlive()) {}
while(t.isAlive());
}
// Simply reads and reads...
......@@ -131,9 +124,8 @@ public class Microphone {
*/
public void stop() throws MicException {
physicalMicrophone.stop();
if (physicalMicrophone.getState() != AudioRecord.RECORDSTATE_STOPPED) {
if (physicalMicrophone.getState() != AudioRecord.RECORDSTATE_STOPPED)
throw new MicException("Failed to stop the microphone.");
}
}
/**
......@@ -160,20 +152,10 @@ public class Microphone {
int sampleRate, int audioFormat, int channelConfig) {
// Sample size.
int sampleSize;
if (audioFormat == AudioFormat.ENCODING_PCM_16BIT) {
sampleSize = 16;
} else {
sampleSize = 8;
}
int sampleSize = (audioFormat == AudioFormat.ENCODING_PCM_16BIT)? 16: 8;
// Channels.
int numberOfChannels;
if (channelConfig == AudioFormat.CHANNEL_IN_MONO) {
numberOfChannels = 1;
} else {
numberOfChannels = 2;
}
int numberOfChannels = (channelConfig == AudioFormat.CHANNEL_IN_MONO)? 1: 2;
// Calculate buffer size.
/** The period used for callbacks to onBufferFull. */
......@@ -192,20 +174,18 @@ public class Microphone {
// Wait until something is heard.
while (true) {
Log.i("thread", "read");
if (physicalMicrophone.read(buffer, 0, buffer.length) <= 0) {
if(BuildConfig.DEBUG)Log.i("thread", "read");
if (physicalMicrophone.read(buffer, 0, buffer.length) <= 0)
break;
}
if (Thread.interrupted()) {
Log.i("thread", "interrupted");
if(BuildConfig.DEBUG)Log.i("thread", "interrupted");
return;
}
// Hand the callback a copy of the buffer.
if (callback != null) {
if (callback != null)
callback.onBufferFull(Arrays.copyOf(buffer, buffer.length));
}
}
}
......
......@@ -17,6 +17,6 @@ public interface MicrophoneListener {
*
* @param buffer An array of samples.
*/
public void onBufferFull(short[] buffer);
void onBufferFull(short[] buffer);
}
......@@ -14,6 +14,7 @@ import android.media.AudioRecord;
import android.util.Log;
import org.getalp.ligaikuma.lig_aikuma.audio.Sampler;
import org.getalp.ligaikuma.lig_aikuma.lig_aikuma.BuildConfig;
/**
......@@ -111,9 +112,7 @@ public class PCMWriter implements Sampler {
}
public RandomAccessFile getRandomAccessWriter() {
return randomAccessWriter;
}
//public RandomAccessFile getRandomAccessWriter() {return randomAccessWriter;}
/**
* Write the given byte buffer to the file.
......@@ -131,15 +130,15 @@ public class PCMWriter implements Sampler {
// Remember larger payload.
//
payloadSize += buffer.length;
Log.d("payLoadSize", "payLoadSize = " + payloadSize);
if(BuildConfig.DEBUG)Log.d("payLoadSize", "payLoadSize = " + payloadSize);
} catch (IOException e) {
Log.e(PCMWriter.class.getName(),
if(BuildConfig.DEBUG)Log.e(PCMWriter.class.getName(),
"Error occured in updateListener, recording is aborted");
}
if (sampleSize == 16) {
this.currentSample += buffer.length / 2;
Log.d("currentSaple", "currentSample = " + this.currentSample);
if(BuildConfig.DEBUG)Log.d("currentSaple", "currentSample = " + this.currentSample);
} else {
//Assume sample size is 8.
this.currentSample += buffer.length;
......@@ -163,9 +162,8 @@ public class PCMWriter implements Sampler {
*/
public void write(short[] buffer, int len) {
byte[] byteBuffer = new byte[len * 2];
for (int i = 0; i < len; i++) {
for(int i = 0; i < len; i++) {
short sample = buffer[i];
// TODO Use Java helpers?
byteBuffer[i * 2] = (byte) sample;
byteBuffer[i * 2 + 1] = (byte) (sample >>> 8);
}
......@@ -186,19 +184,11 @@ public class PCMWriter implements Sampler {
// Sample size.
//
if (audioFormat == AudioFormat.ENCODING_PCM_16BIT) {
sampleSize = 16;
} else {
sampleSize = 8;
}
sampleSize = (short) ((audioFormat == AudioFormat.ENCODING_PCM_16BIT)? 16: 8);
// Channels.
//
if (channelConfig == AudioFormat.CHANNEL_IN_MONO) {
numberOfChannels = 1;
} else {
numberOfChannels = 2;
}
numberOfChannels = (short) ((channelConfig == AudioFormat.CHANNEL_IN_MONO)? 1: 2);
// These are needed to save the file correctly.