Byte array audio encryption - java

I have a byte array which have a lines that contains bytes of microphone audio.
I want to encrypt it line by line, I'm not sure if it encrypts the byte array line by line because when I play the audio it play the clear voice
This is the encryption method
if (AudioSystem.isLineSupported(Port.Info.SPEAKER)) {
try {
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(
TargetDataLine.class, format);
final TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
line.start();
Runnable runner = new Runnable() {
int bufferSize = (int) format.getSampleRate()
* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
public void run() {
out = new ByteArrayOutputStream();
running = true;
System.out.println("running..........");
int lines = 0;
try {
while (running) {
int count
= line.read(buffer, 0, buffer.length);
if (count > 0) {
out.write(buffer, 0, count);
lines++;
System.out.println("lines written =" + lines);
encryptBuff();
}
}
out.close();
} catch (IOException e) {
System.err.println("I/O problems: " + e);
System.exit(-1);
} catch (Exception ex) {
Logger.getLogger(SC.class.getName()).log(Level.SEVERE, null, ex);
}
}
};
Thread captureThread = new Thread(runner);
captureThread.start();
} catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-2);
}
and this is they playing audio code
private void playAudio() {
try {
byte audio[] = out.toByteArray();
InputStream input
= new ByteArrayInputStream(audio);
final AudioFormat format = getFormat();
final AudioInputStream ais
= new AudioInputStream(input, format,
audio.length / format.getFrameSize());
DataLine.Info info = new DataLine.Info(
SourceDataLine.class, format);
final SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format);
line.start();
Runnable runner = new Runnable() {
int bufferSize = (int) format.getSampleRate()
* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
public void run() {
try {
int count;
while ((count = ais.read(
buffer, 0, buffer.length)) != -1) {
if (count > 0) {
line.write(buffer, 0, count);
}
}
line.drain();
line.close();
} catch (IOException e) {
System.err.println("I/O problems: " + e);
System.exit(-3);
}
}
};
Thread playThread = new Thread(runner);
playThread.start();
} catch (LineUnavailableException e) {
System.err.println("Line unavailable: " + e);
System.exit(-4);
}
}

Related

I can't find the file on my phone while using new File() function

I'm trying to make a voice changer but when i can't find the file.
this is my code.
this code is after recording the voice and adding the effect.
File file = new File(Environment.getExternalStorageDirectory(), "test.pcm");
int shortSizeInBytes = Short.SIZE/Byte.SIZE;
int bufferSizeInBytes = (int)(file.length()/shortSizeInBytes);
short[] audioData = new short[bufferSizeInBytes];
try {
InputStream inputStream = new FileInputStream(file);
BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
DataInputStream dataInputStream = new DataInputStream(bufferedInputStream);
int i = 0;
while(dataInputStream.available() > 0){
audioData[i] = dataInputStream.readShort();
i++;
}
dataInputStream.close();
int Freq = Integer.parseInt( txtFreq.getText().toString());
AudioTrack audioTrack = new AudioTrack(
3,
Freq,
2,
2,
bufferSizeInBytes,
1);
audioTrack.play();
audioTrack.write(audioData, 0, bufferSizeInBytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
so what is the directory where i can find the file?

How to record sound from the microphone?

I'm trying to figure out how to record sound from the microphone using my MacBookPro. I'm not sure if this is a problem with the code or if this is a problem with hidden permissions within MacOS.
I've been through most of the tutorials and tried various different solutions to the point that I've created the test case below.
public class AudioLinesTester {
private static final Logger LOGGER = LoggerFactory.getLogger(AudioLinesTester.class);
public static void main(String[] args) {
LOGGER.debug("Starting Line tester");
AudioFormat format = new AudioFormat(8000.0f, 8, 1, true, true);
Mixer.Info[] infos = AudioSystem.getMixerInfo();
Map<Line.Info, byte[]> sounds = new HashMap<Line.Info, byte[]>();
Stream.of(infos).forEach(mixerInfo -> {
Mixer mixer = AudioSystem.getMixer(mixerInfo);
Line.Info[] lineInfos = mixer.getTargetLineInfo();
if (lineInfos.length > 0 && lineInfos[0].getLineClass().equals(TargetDataLine.class)) {
// The name of the AudioDevice
LOGGER.debug("Line Name: {}", mixerInfo.getName());
// The type of audio device
LOGGER.debug("Line Description: {}", mixerInfo.getDescription());
for (Line.Info lineInfo : lineInfos) {
LOGGER.debug("\t---{}", lineInfo);
Line line;
try {
line = mixer.getLine(lineInfo);
TargetDataLine microphone = AudioSystem.getTargetDataLine(format, mixerInfo);
microphone.open(format, 100352);
LOGGER.debug("Listening on {}", microphone);
ByteArrayOutputStream out = new ByteArrayOutputStream();
int numBytesRead;
int CHUNK_SIZE = 1024;
byte[] data = new byte[microphone.getBufferSize() / 5];
microphone.start();
int bytesRead = 0;
try {
// Just so I can test if recording my mic works...
while (bytesRead < 100000) {
numBytesRead = microphone.read(data, 0, CHUNK_SIZE);
bytesRead = bytesRead + numBytesRead;
// LOGGER.debug("Bytes read from the microphone : {} ", bytesRead);
out.write(data, 0, numBytesRead);
}
} catch (Exception e) {
LOGGER.error("Error trying to read from the microphone", e);
}
byte[] read = out.toByteArray();
LOGGER.debug("Have read {} bytes: {}", read.length, read);
sounds.put(lineInfo, read);
// microphone.drain();
microphone.stop();
microphone.close();
} catch (LineUnavailableException e) {
LOGGER.error("Something went wrong {}", e);
return;
}
LOGGER.debug("\t-----{}", line);
}
}
});
sounds.forEach((k,v) -> {
LOGGER.debug("Trying to play the data capture on {}", k);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
LOGGER.debug("Got a new output line: {}", info);
SourceDataLine line;
try {
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format, v.length);
line.start();
long now = System.currentTimeMillis();
LOGGER.debug("buffer size: {}", line.available());
int written = line.write(v, 0, v.length);
line.drain();
line.close();
LOGGER.debug("{} bytes written.", written);
long total = System.currentTimeMillis() - now;
LOGGER.debug("{}ms.", total);
} catch (LineUnavailableException e) {
LOGGER.error("Unable to play the sound", e);
}
});
LOGGER.debug("Ending Lines tester");
}
}
I was hoping to get some output from something, but I'm just getting byte arrays populated with 0.
Is anyone able to help?
Ok, so after a lot of to-ing and fro-ing and fighting with the laptop... The solution was to install a new version of Eclipse, at which point the Mac asked if I wanted to allow Java permission to access to microphone.
Hopefully this will help someone out there.

Detect silence getting user input via voice

I'm trying to detect the silence in order to stop recording the audio from a mic. My current code is:
public byte[] getRecord() throws AudioException {
try {
// Reset the flag
stopped = false;
// Start a new thread to wait during listening
Thread stopper = new Thread(() -> {
try {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
br.readLine();
stopped = true;
} catch (IOException ex) {
ex.printStackTrace();
}
stop();
});
// Start the thread that can stop the record
stopper.start();
return record();
} catch (Exception e) {
throw new LineUnavailableException("Unable to record your voice", e);
}
}
private byte[] record() throws LineUnavailableException {
AudioFormat format = AudioUtil.getAudioFormat(audioConf);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// Checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
return null;
}
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
microphone.start();
System.out.println("Listening, tap enter to stop ...");
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int numBytesRead;
byte[] data = new byte[microphone.getBufferSize() / 5];
// Begin audio capture.
microphone.start();
// Here, stopped is a global boolean set by another thread.
while (!stopped) {
// Read the next chunk of data from the TargetDataLine.
numBytesRead = microphone.read(data, 0, data.length);
// Save this chunk of data.
byteArrayOutputStream.write(data, 0, numBytesRead);
}
return byteArrayOutputStream.toByteArray();
}
At the moment I stop the recording using a shell but I'd like to know how I can stop it in the while loop.
After a lot of tries now it seems to work. This is the updated code:
private byte[] record() throws LineUnavailableException {
AudioFormat format = AudioUtil.getAudioFormat(audioConf);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// Checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
return null;
}
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
microphone.start();
System.out.println("Listening, tap enter to stop ...");
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int numBytesRead;
byte[] data = new byte[microphone.getBufferSize() / 5];
short[] shorts = new short[data.length / 2];
long startSilence = 0;
boolean pause = false;
// Begin audio capture.
microphone.start();
// Here, stopped is a global boolean set by another thread.
while (!stopped) {
// Read the next chunk of data from the TargetDataLine.
numBytesRead = microphone.read(data, 0, data.length);
ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shorts);
// Save this chunk of data.
byteArrayOutputStream.write(data, 0, numBytesRead);
double rms = 0;
for (int i = 0; i < shorts.length; i++) {
double normal = shorts[i] / 32768f;
rms += normal * normal;
}
rms = Math.sqrt(rms / shorts.length);
System.out.println("Listening, rms is " + rms);
if (rms < 0.1) {
long now = System.currentTimeMillis();
if (now - startSilence > 5000 && pause)
break;
if (!pause) {
startSilence = now;
System.out.println("Listening, new silence at " + startSilence);
}
pause = true;
} else
pause = false;
}
return byteArrayOutputStream.toByteArray();
}

Android, decode mp3, mix few audio and encode to pcm ( output are too fast)

I have question about Android decode mp3, mix few audio and encode to m4a (aac). For that I use Jlayer for android to decode mp3, audiotrack to play song, and MediaCodec with mediaformat to encode pcm. The problem is my output after encode is too fast for example: I should have 5 sec audio mix but instead I got ~ 1,5 sec. I thinking that I lose somewhere audio frames, but I dont sure about that. Thanks for answer.
(output file is ~ 25% faster that should be)
Decode mp3 code:
public void decodeMP3toPCM(Resources res, int resource) throws BitstreamException, DecoderException, IOException {
InputStream inputStream = new BufferedInputStream(res.openRawResource(resource), 1152);
Bitstream bitstream = new Bitstream(inputStream);
Decoder decoder = new Decoder();
boolean done = false;
while (!done) {
Header frameHeader = bitstream.readFrame();
if (frameHeader == null) {
done = true;
} else {
SampleBuffer output = (SampleBuffer) decoder.decodeFrame(frameHeader, bitstream);
mTimeCount += frameHeader.ms_per_frame();
short[] pcm = output.getBuffer();
mDataBuffer.addFrame(mViewId, pcm);
mReadedFrames++;
mAudioTrack.write(pcm, 0, pcm.length);
}
bitstream.closeFrame();
}
inputStream.close();
}
encode:
public class AudioEncoder {
private MediaCodec mediaCodec;
private BufferedOutputStream outputStream;
private String mediaType = "audio/mp4a-latm";
public AudioEncoder(String filePath) throws IOException {
File f = new File(filePath);
touch(f);
try {
outputStream = new BufferedOutputStream(new FileOutputStream(f));
} catch (Exception e) {
e.printStackTrace();
}
try {
//mediaCodec = MediaCodec.createEncoderByType(mediaType);
mediaCodec = MediaCodec.createByCodecName("OMX.google.aac.encoder");
} catch (IOException e) {
e.printStackTrace();
}
mediaCodec = MediaCodec.createEncoderByType(mediaType);
final int kSampleRates[] = { 8000, 11025, 22050, 44100, 48000 };
final int kBitRates[] = { 64000, 128000 };
MediaFormat mediaFormat = MediaFormat.createAudioFormat(mediaType,kSampleRates[3],2);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 4608);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, kBitRates[1]);
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}
public void close() {
try {
mediaCodec.stop();
mediaCodec.release();
outputStream.flush();
outputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public synchronized void offerEncoder(byte[] input) {
Log.e("synchro ", input.length + " is coming");
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
int outBitsSize = bufferInfo.size;
int outPacketSize = outBitsSize + 7; // 7 is ADTS size
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + outBitsSize);
byte[] outData = new byte[outPacketSize];
addADTStoPacket(outData, outPacketSize);
outputBuffer.get(outData, 7, outBitsSize);
outputBuffer.position(bufferInfo.offset);
outputStream.write(outData, 0, outData.length);
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
private void addADTStoPacket(byte[] packet, int packetLen) {
int profile = 2; //AAC LC
//39=MediaCodecInfo.CodecProfileLevel.AACObjectELD;
int freqIdx = 4; //44.1KHz
int chanCfg = 2; //CPE
// fill in ADTS data
packet[0] = (byte) 0xFF;
packet[1] = (byte) 0xF9;
packet[2] = (byte) (((profile - 1) << 6) + (freqIdx << 2) + (chanCfg >> 2));
packet[3] = (byte) (((chanCfg & 3) << 6) + (packetLen >> 11));
packet[4] = (byte) ((packetLen & 0x7FF) >> 3);
packet[5] = (byte) (((packetLen & 7) << 5) + 0x1F);
packet[6] = (byte) 0xFC;
}
public void touch(File f) {
try {
if (!f.exists())
f.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
}

--Java-- Server not correctly playing Audio from Client --Real Time Streaming--

I have a client and server application.
The server requests an audio stream and the client handles well.
The client proceeds to setup an AudioFormat(Note: settings are the same on both ends) and TargetDataLine. It Then writes the data to a Socket output stream using a ByteArrayOutput Stream.
The server receives the Data and reads it in a threaded method. During each buffer read it it saves to a AudioInputStream which is passed to a playSound method which is threaded and synchronized to proceed to play the sound.
When i make the playSound method non threaded it works well but slightly glitchy.
Also i know having the play sound non threaded can cause sound frames to jam up
Any help is well appreciated, and any ways I can make this audio stream more efficient and fast is also welcomed.
Client:
private void captureAudio() throws CommandException {
Socket session = new Socket(host_, port_);
try {
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(
TargetDataLine.class, format);
final TargetDataLine line = (TargetDataLine)
AudioSystem.getLine(info);
line.open(format);
line.start();
int bufferSize = (int)format.getSampleRate() * format.getFrameSize();
byte buffer[] = new byte[bufferSize];
running = true;
try {
while (running) {
int count = line.read(buffer, 0, buffer.length);
if (count > 0) {
BufferedOutputStream out_ = null;
out_ = new BufferedOutputStream(socket_.getOutputStream());
out_.write(buffer, 0, count);
out_.flush();
}
}
out_.close();
line.close();
} catch (IOException e) {
throw new CommandException("I/O problems: " + e,Command.TRANSFER_ERROR);
}
} catch (LineUnavailableException e) {
throw new CommandException("Line unavailable: " + e,Command.ERROR);
}
else {
throw new CommandException("Unable to Connect to Server",Command.CONNECTION_ERROR);
}
}
private AudioFormat getFormat() {
float sampleRate = 16000;
int sampleSizeInBits = 16;
int channels = 2;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate,sampleSizeInBits, channels, signed, bigEndian);
}
Server:
public void readSocket(final Socket socket) {
new Thread() {
#Override
public void run() {
InputStream input;
try {
input = socket.getInputStream();
final AudioFormat format = getFormat();
int bufferSize = (int)format.getSampleRate() * format.getFrameSize();
byte buffer[] = new byte[bufferSize];
int bytesRead;
while (((bytesRead = input.read(buffer, 0, bufferSize)) != -1 ) ) {
if (bytesRead > 0) {
play(new AudioInputStream(input, format, buffer.length / format.getFrameSize()));
}
}
socket.close();
} catch (Exception ex) {
}
}
}.start();
}
private AudioFormat getFormat() {
float sampleRate = 16000;
int sampleSizeInBits = 16;
int channels = 2;
boolean signed = true;
boolean bigEndian = true;
return new AudioFormat(sampleRate,
sampleSizeInBits, channels, signed, bigEndian);
}
private synchronized void play(final AudioInputStream ais) {
new Thread() {
#Override
public void run() {
try {
final AudioFormat format = getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
SourceDataLine line = (SourceDataLine)AudioSystem.getLine(info);
line.open(format);
line.start();
int bufferSize = (int) format.getSampleRate()
* format.getFrameSize();
byte buffer[] = new byte[bufferSize];
int count;
while ((count = ais.read(buffer, 0, buffer.length)) != -1) {
if (count > 0) {
line.write(buffer, 0, count);
}
}
line.drain();
line.close();
ais.close();
} catch (LineUnavailableException ex) {
} catch (IOException ex) {
}
}
}.start();
}

Categories