diff --git a/configs/codeplug.example.yml b/configs/codeplug.example.yml
index c5228da..d81df55 100644
--- a/configs/codeplug.example.yml
+++ b/configs/codeplug.example.yml
@@ -45,6 +45,8 @@ zones:
algo: "aes"
# Ignored now, we use dvmfne KMM support (This will be used in the future to ovveride FNE KMM support)
encryptionKey: null
+ # Voice mode DMR ("dmr"), P25 ("p25")
+ mode: "p25"
- name: "Channel 2"
system: "System 1"
tgid: "15002"
diff --git a/dvmconsole/AmbeNative.cs b/dvmconsole/AmbeNative.cs
index 8197ec0..1ab8b02 100644
--- a/dvmconsole/AmbeNative.cs
+++ b/dvmconsole/AmbeNative.cs
@@ -11,7 +11,9 @@
*
*/
+using System.Diagnostics;
using System.Runtime.InteropServices;
+using fnecore;
namespace dvmconsole
{
@@ -356,7 +358,7 @@ namespace dvmconsole
///
public void encode(short[] samples, out byte[] codeword, bool encodeDMR = false)
{
- codeword = new byte[this.frameLengthInBytes];
+ codeword = new byte[frameLengthInBytes];
if (samples == null)
throw new NullReferenceException("samples");
@@ -402,6 +404,8 @@ namespace dvmconsole
// is this to be a DMR codeword?
if (mode == AmbeMode.HALF_RATE && encodeDMR)
{
+ codeword = new byte[FneSystemBase.AMBE_BUF_LEN];
+
byte[] bits = new byte[49];
for (int i = 0; i < 49; i++)
bits[i] = (byte)codewordBits[i];
diff --git a/dvmconsole/Codeplug.cs b/dvmconsole/Codeplug.cs
index 9fa4371..cb1dfeb 100644
--- a/dvmconsole/Codeplug.cs
+++ b/dvmconsole/Codeplug.cs
@@ -158,6 +158,10 @@ namespace dvmconsole
///
///
public string KeyId { get; set; }
+ ///
+ ///
+ ///
+ public string Mode { get; set; } = "p25";
/*
** Methods
@@ -200,8 +204,32 @@ namespace dvmconsole
return EncryptionKey.Split(',').Select(s => Convert.ToByte(s.Trim(), 16)).ToArray();
}
+
+ ///
+ ///
+ ///
+ ///
+ public ChannelMode GetChannelMode()
+ {
+ if (Enum.TryParse(typeof(ChannelMode), Mode, ignoreCase: true, out var result))
+ {
+ return (ChannelMode)result;
+ }
+
+ return ChannelMode.P25;
+ }
} // public class Channel
+ ///
+ ///
+ ///
+ public enum ChannelMode
+ {
+ DMR = 0,
+ NXDN = 1,
+ P25 = 2
+ } // public enum ChannelMode
+
///
/// Helper to return a system by looking up a
///
diff --git a/dvmconsole/Controls/ChannelBox.xaml.cs b/dvmconsole/Controls/ChannelBox.xaml.cs
index e57841d..78cdf6e 100644
--- a/dvmconsole/Controls/ChannelBox.xaml.cs
+++ b/dvmconsole/Controls/ChannelBox.xaml.cs
@@ -19,7 +19,7 @@ using System.Windows;
using System.Windows.Controls;
using System.Windows.Input;
using System.Windows.Media;
-
+using fnecore.DMR;
using fnecore.P25;
namespace dvmconsole.Controls
@@ -52,9 +52,15 @@ namespace dvmconsole.Controls
public byte[] netLDU1 = new byte[9 * 25];
public byte[] netLDU2 = new byte[9 * 25];
- public int p25N { get; set; } = 0;
- public int p25SeqNo { get; set; } = 0;
- public int p25Errs { get; set; } = 0;
+ public int p25N = 0;
+ public int p25SeqNo = 0;
+ public int p25Errs = 0;
+
+ public byte dmrN = 0;
+ public int dmrSeqNo = 0;
+ public int ambeCount = 0;
+ public byte[] ambeBuffer = new byte[FneSystemBase.DMR_AMBE_LENGTH_BYTES];
+ public EmbeddedData embeddedData = new EmbeddedData();
public byte[] mi = new byte[P25Defines.P25_MI_LENGTH]; // Message Indicator
public byte algId = 0; // Algorithm ID
diff --git a/dvmconsole/FneSystemBase.DMR.cs b/dvmconsole/FneSystemBase.DMR.cs
index a782e91..977892c 100644
--- a/dvmconsole/FneSystemBase.DMR.cs
+++ b/dvmconsole/FneSystemBase.DMR.cs
@@ -11,6 +11,7 @@
*
*/
+using System.Diagnostics;
using fnecore;
using fnecore.DMR;
@@ -110,6 +111,15 @@ namespace dvmconsole
///
protected override void DMRDataReceived(object sender, DMRDataReceivedEvent e)
{
+ DateTime pktTime = DateTime.Now;
+
+ byte[] data = new byte[DMR_FRAME_LENGTH_BYTES];
+ Buffer.BlockCopy(e.Data, 20, data, 0, DMR_FRAME_LENGTH_BYTES);
+ byte bits = e.Data[15];
+
+ if (e.CallType == CallType.GROUP)
+ mainWindow.DMRDataReceived(e, pktTime);
+
return;
}
} // public abstract partial class FneSystemBase : fnecore.FneSystemBase
diff --git a/dvmconsole/MainWindow.xaml.cs b/dvmconsole/MainWindow.xaml.cs
index f1008ed..0046f37 100644
--- a/dvmconsole/MainWindow.xaml.cs
+++ b/dvmconsole/MainWindow.xaml.cs
@@ -33,11 +33,10 @@ using dvmconsole.Controls;
using Constants = fnecore.Constants;
using fnecore;
+using fnecore.DMR;
using fnecore.P25;
-using fnecore.P25.LC.TSBK;
using fnecore.P25.KMM;
-using System;
-using System.Windows.Media;
+using fnecore.P25.LC.TSBK;
namespace dvmconsole
{
@@ -605,7 +604,12 @@ namespace dvmconsole
foreach (byte[] audioChunk in channel.chunkedPCM)
{
if (audioChunk.Length == PCM_SAMPLES_LENGTH)
- P25EncodeAudioFrame(audioChunk, fne, channel, cpgChannel, system);
+ {
+ if (cpgChannel.GetChannelMode() == Codeplug.ChannelMode.P25)
+ P25EncodeAudioFrame(audioChunk, fne, channel, cpgChannel, system);
+ else if (cpgChannel.GetChannelMode() == Codeplug.ChannelMode.DMR)
+ DMREncodeAudioFrame(audioChunk, fne, channel, cpgChannel, system);
+ }
}
DateTime nextPacketTime = startTime.AddMilliseconds((i + 1) * 100);
@@ -811,7 +815,12 @@ namespace dvmconsole
foreach (byte[] chunk in channel.chunkedPCM)
{
if (chunk.Length == PCM_SAMPLES_LENGTH)
- P25EncodeAudioFrame(chunk, fne, channel, cpgChannel, system);
+ {
+ if (cpgChannel.GetChannelMode() == Codeplug.ChannelMode.P25)
+ P25EncodeAudioFrame(chunk, fne, channel, cpgChannel, system);
+ else if (cpgChannel.GetChannelMode() == Codeplug.ChannelMode.DMR)
+ DMREncodeAudioFrame(chunk, fne, channel, cpgChannel, system);
+ }
else
Log.WriteLine("bad sample length: " + chunk.Length);
}
@@ -1150,7 +1159,12 @@ namespace dvmconsole
Buffer.BlockCopy(combinedAudio, offset, chunk, 0, size);
if (chunk.Length == 320)
- P25EncodeAudioFrame(chunk, fne, channel, cpgChannel, system);
+ {
+ if (cpgChannel.GetChannelMode() == Codeplug.ChannelMode.P25)
+ P25EncodeAudioFrame(chunk, fne, channel, cpgChannel, system);
+ else if (cpgChannel.GetChannelMode() == Codeplug.ChannelMode.DMR)
+ DMREncodeAudioFrame(chunk, fne, channel, cpgChannel, system);
+ }
}
});
@@ -1424,7 +1438,10 @@ namespace dvmconsole
else
{
e.VolumeMeterLevel = 0;
- handler.SendP25TDU(srcId, dstId, false);
+ if (cpgChannel.GetChannelMode() == Codeplug.ChannelMode.P25)
+ handler.SendP25TDU(srcId, dstId, false);
+ else if (cpgChannel.GetChannelMode() == Codeplug.ChannelMode.DMR)
+ handler.SendDMRTerminator(srcId, dstId, 1, e.dmrSeqNo, e.dmrN, e.embeddedData);
}
}
@@ -2255,6 +2272,189 @@ namespace dvmconsole
}
}
+ ///
+ /// Helper to encode and transmit PCM audio as DMR AMBE frames.
+ ///
+ ///
+ ///
+ ///
+ private void DMREncodeAudioFrame(byte[] pcm, PeerSystem handler, ChannelBox channel, Codeplug.Channel cpgChannel, Codeplug.System system)
+ {
+ try
+ {
+ byte slot = 1; // TODO: Support both time slots
+
+ byte[] data = null, dmrpkt = null;
+ channel.dmrN = (byte)(channel.dmrSeqNo % 6);
+ if (channel.ambeCount == FneSystemBase.AMBE_PER_SLOT)
+ {
+ ushort pktSeq = 0;
+
+ // is this the intitial sequence?
+ if (channel.dmrSeqNo == 0)
+ {
+ pktSeq = handler.peer.pktSeq(true);
+
+ // send DMR voice header
+ data = new byte[FneSystemBase.DMR_FRAME_LENGTH_BYTES];
+
+ // generate DMR LC
+ LC dmrLC = new LC();
+ dmrLC.FLCO = (byte)DMRFLCO.FLCO_GROUP;
+ dmrLC.SrcId = uint.Parse(system.Rid);
+ dmrLC.DstId = uint.Parse(cpgChannel.Tgid);
+ channel.embeddedData.SetLC(dmrLC);
+
+ // generate the Slot Type
+ SlotType slotType = new SlotType();
+ slotType.DataType = (byte)DMRDataType.VOICE_LC_HEADER;
+ slotType.GetData(ref data);
+
+ FullLC.Encode(dmrLC, ref data, DMRDataType.VOICE_LC_HEADER);
+
+ // generate DMR network frame
+ dmrpkt = new byte[FneSystemBase.DMR_PACKET_SIZE];
+ handler.CreateDMRMessage(ref dmrpkt, uint.Parse(system.Rid), uint.Parse(cpgChannel.Tgid), slot, FrameType.VOICE_SYNC, (byte)channel.dmrSeqNo, 0);
+ Buffer.BlockCopy(data, 0, dmrpkt, 20, FneSystemBase.DMR_FRAME_LENGTH_BYTES);
+
+ handler.peer.SendMaster(new Tuple(Constants.NET_FUNC_PROTOCOL, Constants.NET_PROTOCOL_SUBFUNC_DMR), dmrpkt, pktSeq, channel.TxStreamId);
+
+ channel.dmrSeqNo++;
+ }
+
+ pktSeq = handler.peer.pktSeq();
+
+ // send DMR voice
+ data = new byte[FneSystemBase.DMR_FRAME_LENGTH_BYTES];
+
+ Buffer.BlockCopy(channel.ambeBuffer, 0, data, 0, 13);
+ data[13U] = (byte)(channel.ambeBuffer[13U] & 0xF0);
+ data[19U] = (byte)(channel.ambeBuffer[13U] & 0x0F);
+ Buffer.BlockCopy(channel.ambeBuffer, 14, data, 20, 13);
+
+ FrameType frameType = FrameType.VOICE_SYNC;
+ if (channel.dmrN == 0)
+ frameType = FrameType.VOICE_SYNC;
+ else
+ {
+ frameType = FrameType.VOICE;
+
+ byte lcss = channel.embeddedData.GetData(ref data, channel.dmrN);
+
+ // generated embedded signalling
+ EMB emb = new EMB();
+ emb.ColorCode = 0;
+ emb.LCSS = lcss;
+ emb.Encode(ref data);
+ }
+
+ // generate DMR network frame
+ dmrpkt = new byte[FneSystemBase.DMR_PACKET_SIZE];
+ handler.CreateDMRMessage(ref dmrpkt, uint.Parse(system.Rid), uint.Parse(cpgChannel.Tgid), 1, frameType, (byte)channel.dmrSeqNo, channel.dmrN);
+ Buffer.BlockCopy(data, 0, dmrpkt, 20, FneSystemBase.DMR_FRAME_LENGTH_BYTES);
+
+ handler.peer.SendMaster(new Tuple(Constants.NET_FUNC_PROTOCOL, Constants.NET_PROTOCOL_SUBFUNC_DMR), dmrpkt, pktSeq, channel.TxStreamId);
+
+ channel.dmrSeqNo++;
+
+ FneUtils.Memset(channel.ambeBuffer, 0, 27);
+ channel.ambeCount = 0;
+ }
+
+ int smpIdx = 0;
+ short[] samples = new short[MBE_SAMPLES_LENGTH];
+ for (int pcmIdx = 0; pcmIdx < pcm.Length; pcmIdx += 2)
+ {
+ samples[smpIdx] = (short)((pcm[pcmIdx + 1] << 8) + pcm[pcmIdx + 0]);
+ smpIdx++;
+ }
+
+ // encode PCM samples into AMBE codewords
+ byte[] ambe = null;
+
+ if (channel.ExternalVocoderEnabled)
+ {
+ if (channel.ExtHalfRateVocoder == null)
+ channel.ExtHalfRateVocoder = new AmbeVocoder(false);
+
+ channel.ExtHalfRateVocoder.encode(samples, out ambe, true);
+ }
+ else
+ {
+ if (channel.Encoder == null)
+ channel.Encoder = new MBEEncoder(MBE_MODE.DMR_AMBE);
+
+ channel.Encoder.encode(samples, ambe);
+ }
+
+ Buffer.BlockCopy(ambe, 0, channel.ambeBuffer, channel.ambeCount * 9, FneSystemBase.AMBE_BUF_LEN);
+
+ channel.ambeCount++;
+ } catch (Exception ex)
+ {
+ Log.StackTrace(ex);
+ }
+ }
+
+ ///
+ /// Helper to decode and playback DMR AMBE frames as PCM audio.
+ ///
+ ///
+ ///
+ private void DMRDecodeAudioFrame(byte[] ambe, DMRDataReceivedEvent e, PeerSystem system, ChannelBox channel)
+ {
+ try
+ {
+ // Log.Logger.Debug($"FULL AMBE {FneUtils.HexDump(ambe)}");
+ for (int n = 0; n < FneSystemBase.AMBE_PER_SLOT; n++)
+ {
+ byte[] ambePartial = new byte[FneSystemBase.AMBE_BUF_LEN];
+ for (int i = 0; i < FneSystemBase.AMBE_BUF_LEN; i++)
+ ambePartial[i] = ambe[i + (n * 9)];
+
+ short[] samples = null;
+ int errs = 0;
+
+ // do we have the external vocoder library?
+ if (channel.ExternalVocoderEnabled)
+ {
+ if (channel.ExtHalfRateVocoder == null)
+ channel.ExtHalfRateVocoder = new AmbeVocoder(false);
+
+ errs = channel.ExtHalfRateVocoder.decode(ambePartial, out samples);
+ }
+ else
+ {
+ samples = new short[FneSystemBase.MBE_SAMPLES_LENGTH];
+ errs = channel.Decoder.decode(ambePartial, samples);
+ }
+
+ if (samples != null)
+ {
+ //Log.WriteLine($"({system.SystemName}) DMRD: Traffic *VOICE FRAME * PEER {e.PeerId} SRC_ID {e.SrcId} TGID {e.DstId} TS {e.Slot + 1} VC{e.n}.{n} ERRS {errs} [STREAM ID {e.StreamId}]");
+ // Log.Logger.Debug($"PARTIAL AMBE {FneUtils.HexDump(ambePartial)}");
+ // Log.Logger.Debug($"SAMPLE BUFFER {FneUtils.HexDump(samples)}");
+
+ int pcmIdx = 0;
+ byte[] pcm = new byte[samples.Length * 2];
+ for (int smpIdx = 0; smpIdx < samples.Length; smpIdx++)
+ {
+ pcm[pcmIdx + 0] = (byte)(samples[smpIdx] & 0xFF);
+ pcm[pcmIdx + 1] = (byte)((samples[smpIdx] >> 8) & 0xFF);
+ pcmIdx += 2;
+ }
+
+ //Log.WriteLine($"PCM BYTE BUFFER {FneUtils.HexDump(pcm)}");
+ audioManager.AddTalkgroupStream(e.DstId.ToString(), pcm);
+ }
+ }
+ }
+ catch (Exception ex)
+ {
+ Log.WriteError($"Audio Decode Exception: {ex.Message}");
+ }
+ }
+
///
///
///
@@ -2299,6 +2499,124 @@ namespace dvmconsole
}
}
+ ///
+ /// Event handler used to process incoming DMR data.
+ ///
+ ///
+ ///
+ public void DMRDataReceived(DMRDataReceivedEvent e, DateTime pktTime)
+ {
+ Dispatcher.Invoke(() =>
+ {
+ foreach (ChannelBox channel in selectedChannelsManager.GetSelectedChannels())
+ {
+ if (channel.SystemName == PLAYBACKSYS || channel.ChannelName == PLAYBACKCHNAME || channel.DstId == PLAYBACKTG)
+ continue;
+
+ Codeplug.System system = Codeplug.GetSystemForChannel(channel.ChannelName);
+ Codeplug.Channel cpgChannel = Codeplug.GetChannelByName(channel.ChannelName);
+
+ if (cpgChannel.GetChannelMode() != Codeplug.ChannelMode.DMR)
+ continue;
+
+ PeerSystem handler = fneSystemManager.GetFneSystem(system.Name);
+
+ if (!channel.IsEnabled || channel.Name == PLAYBACKCHNAME)
+ continue;
+
+ if (cpgChannel.Tgid != e.DstId.ToString())
+ continue;
+
+ if (!systemStatuses.ContainsKey(cpgChannel.Name + e.Slot))
+ systemStatuses[cpgChannel.Name + e.Slot] = new SlotStatus();
+
+ if (channel.Decoder == null)
+ channel.Decoder = new MBEDecoder(MBE_MODE.DMR_AMBE);
+
+ byte[] data = new byte[FneSystemBase.DMR_FRAME_LENGTH_BYTES];
+ Buffer.BlockCopy(e.Data, 20, data, 0, FneSystemBase.DMR_FRAME_LENGTH_BYTES);
+ byte bits = e.Data[15];
+
+ // is this a new call stream?
+ if (e.StreamId != systemStatuses[cpgChannel.Name + e.Slot].RxStreamId)
+ {
+ channel.IsReceiving = true;
+ systemStatuses[cpgChannel.Name + e.Slot].RxStart = pktTime;
+ Log.WriteLine($"({system.Name}) DMRD: Traffic *CALL START * PEER {e.PeerId} SRC_ID {e.SrcId} TGID {e.DstId} [STREAM ID {e.StreamId}]");
+
+ // if we can, use the LC from the voice header as to keep all options intact
+ if ((e.FrameType == FrameType.DATA_SYNC) && (e.DataType == DMRDataType.VOICE_LC_HEADER))
+ {
+ LC lc = FullLC.Decode(data, DMRDataType.VOICE_LC_HEADER);
+ systemStatuses[cpgChannel.Name + e.Slot].DMR_RxLC = lc;
+ }
+ else // if we don't have a voice header; don't wait to decode it, just make a dummy header
+ systemStatuses[cpgChannel.Name + e.Slot].DMR_RxLC = new LC()
+ {
+ SrcId = e.SrcId,
+ DstId = e.DstId
+ };
+
+ systemStatuses[cpgChannel.Name + e.Slot].DMR_RxPILC = new PrivacyLC();
+ Log.WriteLine($"({system.Name}) TS {e.Slot + 1} [STREAM ID {e.StreamId}] RX_LC {FneUtils.HexDump(systemStatuses[cpgChannel.Name + e.Slot].DMR_RxLC.GetBytes())}");
+
+ callHistoryWindow.AddCall(cpgChannel.Name, (int)e.SrcId, (int)e.DstId);
+ callHistoryWindow.ChannelKeyed(cpgChannel.Name, (int)e.SrcId, false); // TODO: Encrypted state
+
+ string alias = string.Empty;
+
+ try
+ {
+ alias = AliasTools.GetAliasByRid(system.RidAlias, (int)e.SrcId);
+ }
+ catch (Exception) { }
+
+ if (string.IsNullOrEmpty(alias))
+ channel.LastSrcId = "Last ID: " + e.SrcId;
+ else
+ channel.LastSrcId = "Last: " + alias;
+
+ channel.Background = ChannelBox.GREEN_GRADIENT;
+ }
+
+ // if we can, use the PI LC from the PI voice header as to keep all options intact
+ if ((e.FrameType == FrameType.DATA_SYNC) && (e.DataType == DMRDataType.VOICE_PI_HEADER))
+ {
+ PrivacyLC lc = FullLC.DecodePI(data);
+ systemStatuses[cpgChannel.Name + e.Slot].DMR_RxPILC = lc;
+ //Log.WriteLine($"({SystemName}) DMRD: Traffic *CALL PI PARAMS * PEER {e.PeerId} DST_ID {e.DstId} TS {e.Slot + 1} ALGID {lc.AlgId} KID {lc.KId} [STREAM ID {e.StreamId}]");
+ //Log.WriteLine($"({SystemName}) TS {e.Slot + 1} [STREAM ID {e.StreamId}] RX_PI_LC {FneUtils.HexDump(systemStatuses[cpgChannel.Name + e.Slot].DMR_RxPILC.GetBytes())}");
+ }
+
+ if ((e.FrameType == FrameType.DATA_SYNC) && (e.DataType == DMRDataType.TERMINATOR_WITH_LC) && (systemStatuses[cpgChannel.Name + e.Slot].RxType != FrameType.TERMINATOR))
+ {
+ channel.IsReceiving = false;
+ TimeSpan callDuration = pktTime - systemStatuses[cpgChannel.Name + e.Slot].RxStart;
+ Log.WriteLine($"({system.Name}) DMRD: Traffic *CALL END * PEER {e.PeerId} SRC_ID {e.SrcId} TGID {e.DstId} DUR {callDuration} [STREAM ID {e.StreamId}]");
+ channel.Background = ChannelBox.BLUE_GRADIENT;
+ channel.VolumeMeterLevel = 0;
+ callHistoryWindow.ChannelUnkeyed(cpgChannel.Name, (int)e.SrcId);
+ }
+
+ if (e.FrameType == FrameType.VOICE_SYNC || e.FrameType == FrameType.VOICE)
+ {
+ byte[] ambe = new byte[FneSystemBase.DMR_AMBE_LENGTH_BYTES];
+ Buffer.BlockCopy(data, 0, ambe, 0, 14);
+ ambe[13] &= 0xF0;
+ ambe[13] |= (byte)(data[19] & 0x0F);
+ Buffer.BlockCopy(data, 20, ambe, 14, 13);
+ DMRDecodeAudioFrame(ambe, e, handler, channel);
+ }
+
+ systemStatuses[cpgChannel.Name + e.Slot].RxRFS = e.SrcId;
+ systemStatuses[cpgChannel.Name + e.Slot].RxType = e.FrameType;
+ systemStatuses[cpgChannel.Name + e.Slot].RxTGId = e.DstId;
+ systemStatuses[cpgChannel.Name + e.Slot].RxTime = pktTime;
+ systemStatuses[cpgChannel.Name + e.Slot].RxStreamId = e.StreamId;
+ }
+ });
+ }
+
///
/// Event handler used to process incoming P25 data.
///
@@ -2325,6 +2643,9 @@ namespace dvmconsole
Codeplug.System system = Codeplug.GetSystemForChannel(channel.ChannelName);
Codeplug.Channel cpgChannel = Codeplug.GetChannelByName(channel.ChannelName);
+ if (cpgChannel.GetChannelMode() != Codeplug.ChannelMode.P25)
+ continue;
+
bool isEmergency = false;
bool encrypted = false;
diff --git a/dvmconsole/VocoderInterop.cs b/dvmconsole/VocoderInterop.cs
index 4b6181a..281a3f8 100644
--- a/dvmconsole/VocoderInterop.cs
+++ b/dvmconsole/VocoderInterop.cs
@@ -11,7 +11,9 @@
*
*/
+using System.Diagnostics;
using System.Runtime.InteropServices;
+using fnecore;
namespace dvmconsole
{
@@ -319,19 +321,22 @@ namespace dvmconsole
throw new NullReferenceException("Input MBE codeword is null!");
char[] bits = null;
+ int bitCount = 0;
// Set up based on mode
if (mode == MBE_MODE.DMR_AMBE)
{
if (codeword.Length != AMBE_CODEWORD_SAMPLES)
throw new ArgumentOutOfRangeException($"AMBE codeword length is != {AMBE_CODEWORD_SAMPLES}");
- bits = new char[AMBE_CODEWORD_BITS];
+ bitCount = AMBE_CODEWORD_BITS;
+ bits = new char[bitCount];
}
else if (mode == MBE_MODE.IMBE_88BIT)
{
if (codeword.Length != IMBE_CODEWORD_SAMPLES)
throw new ArgumentOutOfRangeException($"IMBE codeword length is != {IMBE_CODEWORD_SAMPLES}");
- bits = new char[IMBE_CODEWORD_BITS];
+ bitCount = IMBE_CODEWORD_BITS;
+ bits = new char[bitCount];
}
if (bits == null)
@@ -341,19 +346,8 @@ namespace dvmconsole
int errs = decoder.decodeBits(codeword, bits);
// Copy
- if (mode == MBE_MODE.DMR_AMBE)
- {
- // Copy bits
- mbeBits = new byte[AMBE_CODEWORD_BITS];
- Array.Copy(bits, mbeBits, AMBE_CODEWORD_BITS);
-
- }
- else if (mode == MBE_MODE.IMBE_88BIT)
- {
- // Copy bits
- mbeBits = new byte[IMBE_CODEWORD_BITS];
- Array.Copy(bits, mbeBits, IMBE_CODEWORD_BITS);
- }
+ for (int i = 0; i < bitCount; i++)
+ mbeBits[i] = (byte)(bits[i] & 0x01);
return errs;
}
@@ -369,7 +363,9 @@ namespace dvmconsole
public void Encode([In] byte[] mbeBits, [Out] byte[] codeword)
{
if (mbeBits == null)
+ {
throw new NullReferenceException("Input MBE bit array is null!");
+ }
char[] bits = null;
@@ -377,45 +373,49 @@ namespace dvmconsole
if (mode == MBE_MODE.DMR_AMBE)
{
if (mbeBits.Length != AMBE_CODEWORD_BITS)
+ {
throw new ArgumentOutOfRangeException($"AMBE codeword bit length is != {AMBE_CODEWORD_BITS}");
+ }
bits = new char[AMBE_CODEWORD_BITS];
- Array.Copy(mbeBits, bits, AMBE_CODEWORD_BITS);
+ for (int i = 0; i < mbeBits.Length; i++)
+ bits[i] = (char)(mbeBits[i] & 0x01);
}
else if (mode == MBE_MODE.IMBE_88BIT)
{
if (mbeBits.Length != IMBE_CODEWORD_BITS)
- throw new ArgumentOutOfRangeException($"IMBE codeword bit length is != {AMBE_CODEWORD_BITS}");
+ {
+ throw new ArgumentOutOfRangeException($"IMBE codeword bit length is != {IMBE_CODEWORD_BITS}");
+ }
bits = new char[IMBE_CODEWORD_BITS];
- Array.Copy(mbeBits, bits, IMBE_CODEWORD_BITS);
+ for (int i = 0; i < mbeBits.Length; i++)
+ bits[i] = (char)(mbeBits[i] & 0x01);
}
if (bits == null)
+ {
throw new ArgumentException("Bit array did not get set up properly!");
+ }
// Encode samples
if (mode == MBE_MODE.DMR_AMBE)
{
// Create output array
byte[] codewords = new byte[AMBE_CODEWORD_SAMPLES];
-
// Encode
encoder.encodeBits(bits, codewords);
-
// Copy
- codeword = new byte[AMBE_CODEWORD_SAMPLES];
- Array.Copy(codewords, codeword, IMBE_CODEWORD_SAMPLES);
+ for (int i = 0; i < AMBE_CODEWORD_SAMPLES; i++)
+ codeword[i] = codewords[i];
}
else if (mode == MBE_MODE.IMBE_88BIT)
{
// Create output array
byte[] codewords = new byte[IMBE_CODEWORD_SAMPLES];
-
// Encode
encoder.encodeBits(bits, codewords);
-
// Copy
- codeword = new byte[IMBE_CODEWORD_SAMPLES];
- Array.Copy(codewords, codeword, IMBE_CODEWORD_SAMPLES);
+ for (int i = 0; i < IMBE_CODEWORD_SAMPLES; i++)
+ codeword[i] = codewords[i];
}
}
} // public class MBEInterleaver