Jump to content

  • Log In with Google      Sign In   
  • Create Account

Banner advertising on our site currently available from just $5!


1. Learn about the promo. 2. Sign up for GDNet+. 3. Set up your advert!


tgraupmann

Member Since 16 Apr 2003
Offline Last Active Dec 17 2013 09:36 AM

Posts I've Made

In Topic: Can you help me plot out a long-term plan?

17 December 2013 - 01:26 AM

Here's a tip.

 

Some years ago, Torque3D made the mistake of ditching OpenGL for DirectX. This resulted in future stupicide because with a cross-platform engine it prevented porting to Linux, Mac, Android, and iOS. They made some attempts to correct their mistake, but it was too late. There's been a KS attempt, but interest failed.

 

You might as well take the route of Unity and make it easy on yourself.


In Topic: Ecosystem Generator

28 August 2011 - 03:11 PM

<img height="150" src="http://tagenigma.com/a/ecosystemgenerator/2011-08-28%2C%20Ecosystem%20Generator%20Android%20014.png"/>



Here you can see how to publish to Android and get the Ecosystem Generator working on your phone:

In Topic: Paint Video Series

26 July 2011 - 10:26 AM

Just letting you know that Unity 3.4 just shipped!
http://unity3d.com/unity/whats-new/unity-3.4

And the Video Painting Series is compatible with 3.4.

In Topic: Ecosystem Generator

23 July 2011 - 08:50 AM

I've added support to import your own foliage items.

Youtube: http://www.youtube.com/watch?v=NPyG6QzDt08

Vimeo: http://vimeo.com/26701662

Posted Image

In Topic: [Simdx][XAudio2] SourceVoice popping [solved]

19 September 2010 - 03:03 AM

I added the fade in and fade out... and the popping is gone.

I used a 0.15 second fade that seems to do the trick. When the fade starts, I switch to the next channel to play the next sample.

using Emotiv;
using SlimDX;
using SlimDX.XAudio2;
using SlimDX.Multimedia;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Reflection;
using System.Runtime.InteropServices;

namespace EEG_Example_1
{
class EEG_Logger
{
EmoEngine engine; // Access to the EDK is viaa the EmoEngine
int userID = -1; // userID is used to uniquely identify a user's headset

/// <summary>
/// A dictionary of EEG data by channel
/// </summary>
Dictionary<EdkDll.EE_DataChannel_t, List<double>> m_eData = new Dictionary<EdkDll.EE_DataChannel_t, List<double>>();

/// <summary>
/// The sound thread
/// </summary>
Thread m_soundThread = null;

/// <summary>
/// Constructor
/// </summary>
EEG_Logger()
{
// create the engine
engine = EmoEngine.Instance;
engine.UserAdded += new EmoEngine.UserAddedEventHandler(engine_UserAdded_Event);

// connect to Emoengine.
engine.Connect();
//engine.RemoteConnect("127.0.0.1", 1726);
}

void engine_UserAdded_Event(object sender, EmoEngineEventArgs e)
{
Console.WriteLine("User Added Event has occured");

// record the user
userID = (int)e.userId;

// enable data aquisition for this user.
engine.DataAcquisitionEnable((uint)userID, true);

// ask for up to 1 second of buffered data
engine.EE_DataSetBufferSizeInSec(1);

}

static void Main(string[] args)
{
Console.WriteLine("EEG Data Reader Example");

//epoc
EEG_Logger p = new EEG_Logger();

//epoc
p.Run();
}

void Run()
{
//start the sound thread
ThreadStart soundThreadStart = new ThreadStart(WorkerSound);
m_soundThread = new Thread(soundThreadStart);
m_soundThread.Start();

while (true)
{
// Handle any waiting events
engine.ProcessEvents();

// If the user has not yet connected, do not proceed
if ((int)userID == -1)
{
Console.Error.WriteLine("User is not connected");
return;
}

Dictionary<EdkDll.EE_DataChannel_t, double[]> data = engine.GetData((uint)userID);
if (data == null)
{
continue;
}

int bufferSize = data[EdkDll.EE_DataChannel_t.TIMESTAMP].Length;

//fill with new data from raw eeg channels
for (EdkDll.EE_DataChannel_t channel = EdkDll.EE_DataChannel_t.AF3; channel <= EdkDll.EE_DataChannel_t.AF4; ++channel)
{
List<double> rawData;

if (!m_eData.ContainsKey(channel))
{
m_eData.Add(channel, new List<double>());
}

rawData = m_eData[channel];

for (int i = 0; i < bufferSize; i++)
{
double sample = data[channel][i];
rawData.Add(sample);
}
}

Thread.Sleep(0);
}
}

struct SoundItem
{
public AudioBuffer m_audioBuffer;
public DataStream m_dataStream;
public SourceVoice m_sourceVoice;
public byte[] m_waveData;
}

// worker for the sound thread
public void WorkerSound()
{
//random
Random rand = new Random();

//simdx
double SAMPLE_SECONDS = 0.5;

//slimdx - create a device
XAudio2 device = new XAudio2();
MasteringVoice masteringVoice = new MasteringVoice(device);

//slimdx - create a sound format
WaveFormat format = new WaveFormat();
format.AverageBytesPerSecond = 88200;
format.BlockAlignment = 2;
format.Channels = 1;
format.SamplesPerSecond = 44100;
format.BitsPerSample = 16;
format.FormatTag = WaveFormatTag.Pcm;
long cbWaveSize = (long)(SAMPLE_SECONDS * format.SamplesPerSecond * format.BitsPerSample / 8);

//slimdx
double FADE_SECONDS = 0.15;

//slimdx
long fadeSize = (long)(FADE_SECONDS * format.SamplesPerSecond * format.BitsPerSample / 8);

//slimdx - fill with data - 88 piano keys
List<double> freqtable = new List<double>() { 4186.01, 3951.07, 3729.31, 3520, 3322.44, 3135.96, 2959.96, 2793.83,
2637.02, 2489.02, 2349.32, 2217.46, 2093, 1975.53, 1864.66, 1760, 1661.22, 1567.98, 1479.98, 1396.91, 1318.51,
1244.51, 1174.66, 1108.73, 1046.5, 987.77, 932.33, 880, 830.61, 783.99, 739.99, 698.46, 659.26, 622.25, 587.33,
554.37, 523.25, 493.88, 466.16, 440, 415.31, 392, 369.99, 349.23, 329.63, 311.13, 293.67, 277.18, 261.63, 246.94,
233.08, 220, 207.65, 196, 185, 174.61, 164.81, 155.56, 146.83, 138.59, 130.81, 123.47, 116.54, 110, 103.83, 98,
92.5, 87.31, 82.41, 77.78, 73.42, 69.3, 65.41, 61.74, 58.27, 55, 51.91, 49, 46.25, 43.65, 41.2, 38.89, 36.71,
34.65, 32.7, 30.87, 29.14, 27.5 };
freqtable.Reverse();

//slimdx - make a sound buffer
SoundItem[] sounds = new SoundItem[2];
sounds[0].m_audioBuffer = new AudioBuffer();
sounds[1].m_audioBuffer = new AudioBuffer();

//slimdb - set the buffer flags
sounds[0].m_audioBuffer.Flags = BufferFlags.EndOfStream;
sounds[1].m_audioBuffer.Flags = BufferFlags.EndOfStream;

//slimdx - play the sound buffer
sounds[0].m_sourceVoice = new SourceVoice(device, format);
sounds[1].m_sourceVoice = new SourceVoice(device, format);

//time to next buffer
DateTime bufferTimer = DateTime.MinValue;

//slimdx - cycle buffers
int dsIndex = 0;
int dsCount = 2;

//simdx - Create the sample data into memory
sounds[0].m_waveData = new byte[cbWaveSize];
sounds[1].m_waveData = new byte[cbWaveSize];

using (sounds[0].m_dataStream = new DataStream(sounds[0].m_waveData, true, true))
{
using (sounds[1].m_dataStream = new DataStream(sounds[1].m_waveData, true, true))
{
while (true)
{
if (sounds[dsIndex].m_sourceVoice.State.BuffersQueued > 0)
{
Thread.Sleep(0);
continue;
}

//slimdx - fill with more data when the sounds stops
if (bufferTimer < DateTime.Now)
{
bufferTimer = DateTime.Now + TimeSpan.FromSeconds(SAMPLE_SECONDS - FADE_SECONDS);

//slimdx - stop
sounds[dsIndex].m_sourceVoice.Stop();

//slimdx -- get buffer
byte[] waveData = sounds[dsIndex].m_waveData;

//slimdx - clear old data
for (long i = 0; i < waveData.Length; ++i)
{
waveData[i] = 0;
}

// examine the last N seconds of data
//GenerateTone(64, fadeSize, freqtable[41], device, waveData, format); //test
GenerateTone(64, fadeSize, freqtable[rand.Next()%freqtable.Count], device, waveData, format); //test

//slimdx - start
sounds[dsIndex].m_dataStream.Position = 0;
sounds[dsIndex].m_audioBuffer.AudioData = sounds[dsIndex].m_dataStream;
sounds[dsIndex].m_audioBuffer.AudioBytes = (int)sounds[dsIndex].m_dataStream.Length;
sounds[dsIndex].m_sourceVoice.SubmitSourceBuffer(sounds[dsIndex].m_audioBuffer);
sounds[dsIndex].m_sourceVoice.Start();
dsIndex = (dsIndex + 1) % dsCount;
}

Thread.Sleep(0);
}
}
}

//slimdx - cleanup the voice
sounds[0].m_audioBuffer.Dispose();
sounds[1].m_audioBuffer.Dispose();
sounds[0].m_sourceVoice.Dispose();
sounds[1].m_sourceVoice.Dispose();

//slimdx - dispose the device
masteringVoice.Dispose();
device.Dispose();
}

// generate some random tones
static void GenerateTone(double volume, double fadeSize, double frequency, XAudio2 device, byte[] waveData, WaveFormat format)
{
double sampleSize = (format.SamplesPerSecond * format.Channels);
double t = (Math.PI) / sampleSize;
for (long i = 0; (i+1) < waveData.Length; i += 2)
{
double newVolume;
double fade = fadeSize;
if (i < fade)
{
// fade in
newVolume = volume * Math.Min(1, i / fade);
}
else if (i > (waveData.Length - fade))
{
// fade out
newVolume = volume * Math.Min(1, (waveData.Length - i) / fade);
}
else
{
newVolume = volume;
}

double theta = (double)(i * t * frequency);
float amplitude = (int)(Math.Sin((float)theta) * newVolume);
// read existing tone - two bytes to float
float oldAmplitude = (float)((waveData[i] << 8) | ((int)(waveData[i + 1])));
amplitude = (amplitude + oldAmplitude) * 0.5f;
// convert new tone - float to two bytes
int data = (int)amplitude;
waveData[i] = (byte)(data >> 8);
waveData[i + 1] = (byte)data;
}
}
}
}



[Edited by - tgraupmann on September 19, 2010 9:03:34 AM]

PARTNERS