First off, I am not using any kind of game engine, I am modding a game in C# and I am NOT using UnityEngine API so I do not have any Update() functions.
So I am trying to figure out how I could create a timer, some standard out of the box C# timer that would increase the lerp distance over a set speed.
model.rotationM = Vector3.Lerp(model.rotation, model.rotationM, (float)0.016);
NAPI.Entity.SetEntityRotation(model.handle, model.rotationM);
I would like to wrap this in a timer that every 100ms it will increase the float at the end of the lerp by some set amount over the duration of a time, so say I set float speed = 5f;
I want to increase that lerp distance every 100ms for 5 seconds until it reaches its goal.
Is this possible to do?
I've created an example timer class which will slowly increment a value by a given amount until it reaches 100% (1.0):
public class LerpTimer : IDisposable
{
private readonly Timer _timer;
private readonly float _incrementPercentage = 0;
public event EventHandler<float> DoLerp;
public event EventHandler Complete;
private bool _isDisposed = false;
private float _current;
public LerpTimer(double frequencyMs, float incrementPercentage)
{
if (frequencyMs <= 0)
{
throw new ArgumentOutOfRangeException(nameof(frequencyMs), "Frequency must be greater than 1ms.");
}
if (incrementPercentage < 0 || incrementPercentage > 1)
{
throw new ArgumentOutOfRangeException(nameof(incrementPercentage), "Increment percentage must be a value between 0 and 1");
}
_timer = new Timer(frequencyMs);
_timer.Elapsed += _timer_Elapsed;
_incrementPercentage = incrementPercentage;
}
private void _timer_Elapsed(object sender, ElapsedEventArgs e)
{
if (_isDisposed)
{
return;
}
if (this.Current < 1)
{
this.Current = Math.Min(1, this.Current + _incrementPercentage);
this.DoLerp?.Invoke(this, this.Current);
}
if (this.Current >= 1)
{
this._timer.Stop();
this.Complete?.Invoke(this, EventArgs.Empty);
}
}
public float Current
{
get
{
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(LerpTimer));
}
return _current;
}
set => _current = value;
}
public void Start()
{
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(LerpTimer));
}
if (_timer.Enabled)
{
throw new InvalidOperationException("Timer already running.");
}
this.Current = 0;
_timer.Start();
}
public void Stop()
{
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(LerpTimer));
}
if (!_timer.Enabled)
{
throw new InvalidOperationException("Timer not running.");
}
_timer.Stop();
}
public void Dispose()
{
_isDisposed = true;
_timer?.Dispose();
}
}
Sample usage:
var lerpTimer = new LerpTimer(100, 0.016f);
lerpTimer.DoLerp += (sender, value) => {
model.rotationM = Vector3.Lerp(startRotation, endRotation, value);
NAPI.Entity.SetEntityRotation(model.handle, model.rotationM);
};
lerpTimer.Start();
So you would call this once, and then it would keep going until it reaches 100% (endRotation).
It's not necessarily the code you should use, but it should illustrate how you can use a timer to increase the value over time.
Edit to add some clarity to what a lerp function does:
double lerp(double start, double end, double percentage)
{
return start + ((end - start) * percentage);
}
Imagine we call this every 10% from 4 to 125. We would get the following results:
0% 4
10% 16.1
20% 28.2
30% 40.3
40% 52.4
50% 64.5
60% 76.6
70% 88.7
80% 100.8
90% 112.9
100% 125
Try it online
Related
I want there to be 15 minutes between the rewarded ads. I made this:
When you see the ad:
public void HandleUserEarnedReward(object sender, Reward args)
{
DateTime ad= DateTime.Now.AddMinutes(15);
long adTicks = ad.Ticks;
PlayerPrefs.SetInt("ticksVideo", (int)adTicks); }
Countdown:
void Update(){
DateTime currentTime= DateTime.Now;
long currentTicks= currentTime.Ticks;
PlayerPrefs.SetInt("currentTicks", (int)currentTicks);
TimerControl = PlayerPrefs.GetInt("ticksVideo") - PlayerPrefs.GetInt("currentTicks");
string mins = ((int)TimerControl / 600000000).ToString("00"); //600.000.000 ticks per minute
string segs = ((int)TimerControl % 600000000).ToString("00");
TimerString = string.Format("{00}:{01}", mins, segs);
GetComponent<Text>().text = TimerString; }
In DateTime.Now.AddMinutes I enter 15 but the countdown lasts about 50 seconds. On the other hand, the TimerString also does not show the format that I indicate. What's wrong? Should I use TimeSpan?
Edit:
I have 2 classes:
The player watch the ad:
public class AdMob : MonoBehaviour
{
public static bool video = false;
public Button buttonAd;
public GameObject countdown;
public void HandleUserEarnedReward(object sender, Reward args)
{
//Rewards
buttonAd.interactable = false;
countdown.SetActive(true);
video = true;
}
}
The countdown begins:
public class CountdownAd : MonoBehaviour
{
public static float timeSinceLastAd = 0;
void Update(){
if (AdMob.video)
{
timeSinceLastAd += Time.deltaTime;
if (timeSinceLastAd > (60 * 15))
{
buttonAd.interactable = true;
countdown.SetActive(false);
timeSinceLastAd = 0;
AdMob.video = false;
}
} else
{
timeSinceLastAd = 0;
}
}}
EDIT 2:
public class AdMob : MonoBehaviour {
public GameObject countdownGameObject;
public Button adButton;
public Text countdown;
//I hit the adButton and I watch the rewarded ad...
public void HandleUserEarnedReward(object sender, Reward args)
{
//Rewards..
countdownGameObject.SetActive(true);
StartCoroutine(timer(15));
adButton.interactable = false;
}
IEnumerator timer(int lapse)
{
while (lapse > 0)
{
int seconds = lapse % 60;
int minutes = lapse / 60;
countdown.text = $"{lapse / 60: 00}:{lapse % 60:00}";
yield return new WaitForSeconds(1f);
lapse--;
}
countdown.text = "00:00";
//CountDown Finished
gameObject.SetActive(false);
if (lapse == 0)
{
adButton.interactable = true;
countdownGameObject.SetActive(false);
}
}
}
its a sample of countdown linked to the UIText displaying the countdown, i am using a StartCoroutine launched from Start() and dont use Update()
using System.Collections;
using UnityEngine;
using UnityEngine.UI;
public class Countdown : MonoBehaviour
{
private Coroutine coroutine;
private Text UITimer;
void Start()
{
UITimer = GetComponent<Text>();
if (coroutine != null) StopCoroutine(coroutine);
coroutine = StartCoroutine(timer(60*15));
}
IEnumerator timer(int lapse)
{
while (lapse > 0)
{
UITimer.text = $"{lapse / 60:00}:{lapse % 60:00}";
yield return new WaitForSeconds(1f);
lapse--;
}
UITimer.text = "00:00";
//CountDown Finished
gameObject.SetActive(false);
// and all other things
}
}
Your code looks confusing, keep a timer and increment it with Time.delaTime each frame, when the timer is > 15 minutes play ad and reset timer.
float timeSinceLastAd = 0;
void Update(){
timeSinceLastAd += Time.deltaTime;
if (timeSinceLastAd > (60 * 15)) {
PlayAd(); //or whatever your method to play an ad is called
timeSinceLastAd = 0;
}
}
I need a timer that fires every 25ms. I've been comparing the default Timer implementation between Windows 10 and Linux (Ubuntu Server 16.10 and 12.04) on both the dotnet core runtime and the latest mono-runtime.
There are some differences in the timer precision that I don't quite understand.
I'm using the following piece of code to test the Timer:
// inside Main()
var s = new Stopwatch();
var offsets = new List<long>();
const int interval = 25;
using (var t = new Timer((obj) =>
{
offsets.Add(s.ElapsedMilliseconds);
s.Restart();
}, null, 0, interval))
{
s.Start();
Thread.Sleep(5000);
}
foreach(var n in offsets)
{
Console.WriteLine(n);
}
Console.WriteLine(offsets.Average(n => Math.Abs(interval - n)));
On windows it's all over the place:
...
36
25
36
26
36
5,8875 # <-- average timing error
Using dotnet core on linux, it's less all over the place:
...
25
30
27
28
27
2.59776536312849 # <-- average timing error
But the mono Timer is very precise:
...
25
25
24
25
25
25
0.33 # <-- average timing error
Edit: Even on windows, mono still maintains its timing precision:
...
25
25
25
25
25
25
25
24
0.31
What is causing this difference? Is there a benefit to the way the dotnet core runtime does things compared to mono, that justifies the lost precision?
Unfortunately you cannot rely on timers in the .NET framework. The best one has 15 ms frequency even if you want to trigger it in every millisecond. But you can implement a high-resolution timer with microsec precision, too.
Note: This works only when Stopwatch.IsHighResolution returns true. In Windows this is true starting with Windows XP; however, I did not test other frameworks.
public class HiResTimer
{
// The number of ticks per one millisecond.
private static readonly float tickFrequency = 1000f / Stopwatch.Frequency;
public event EventHandler<HiResTimerElapsedEventArgs> Elapsed;
private volatile float interval;
private volatile bool isRunning;
public HiResTimer() : this(1f)
{
}
public HiResTimer(float interval)
{
if (interval < 0f || Single.IsNaN(interval))
throw new ArgumentOutOfRangeException(nameof(interval));
this.interval = interval;
}
// The interval in milliseconds. Fractions are allowed so 0.001 is one microsecond.
public float Interval
{
get { return interval; }
set
{
if (value < 0f || Single.IsNaN(value))
throw new ArgumentOutOfRangeException(nameof(value));
interval = value;
}
}
public bool Enabled
{
set
{
if (value)
Start();
else
Stop();
}
get { return isRunning; }
}
public void Start()
{
if (isRunning)
return;
isRunning = true;
Thread thread = new Thread(ExecuteTimer);
thread.Priority = ThreadPriority.Highest;
thread.Start();
}
public void Stop()
{
isRunning = false;
}
private void ExecuteTimer()
{
float nextTrigger = 0f;
Stopwatch stopwatch = new Stopwatch();
stopwatch.Start();
while (isRunning)
{
float intervalLocal = interval;
nextTrigger += intervalLocal;
float elapsed;
while (true)
{
elapsed = ElapsedHiRes(stopwatch);
float diff = nextTrigger - elapsed;
if (diff <= 0f)
break;
if (diff < 1f)
Thread.SpinWait(10);
else if (diff < 10f)
Thread.SpinWait(100);
else
{
// By default Sleep(1) lasts about 15.5 ms (if not configured otherwise for the application by WinMM, for example)
// so not allowing sleeping under 16 ms. Not sleeping for more than 50 ms so interval changes/stopping can be detected.
if (diff >= 16f)
Thread.Sleep(diff >= 100f ? 50 : 1);
else
{
Thread.SpinWait(1000);
Thread.Sleep(0);
}
// if we have a larger time to wait, we check if the interval has been changed in the meantime
float newInterval = interval;
if (intervalLocal != newInterval)
{
nextTrigger += newInterval - intervalLocal;
intervalLocal = newInterval;
}
}
if (!isRunning)
return;
}
float delay = elapsed - nextTrigger;
if (delay >= ignoreElapsedThreshold)
{
fallouts += 1;
continue;
}
Elapsed?.Invoke(this, new HiResTimerElapsedEventArgs(delay, fallouts));
fallouts = 0;
// restarting the timer in every hour to prevent precision problems
if (stopwatch.Elapsed.TotalHours >= 1d)
{
stopwatch.Restart();
nextTrigger = 0f;
}
}
stopwatch.Stop();
}
private static float ElapsedHiRes(Stopwatch stopwatch)
{
return stopwatch.ElapsedTicks * tickFrequency;
}
}
public class HiResTimerElapsedEventArgs : EventArgs
{
public float Delay { get; }
internal HiResTimerElapsedEventArgs(float delay)
{
Delay = delay;
}
}
Edit 2021: Using the latest version that does not have the issue #hankd mentions in the comments.
Please consider below test program.
I have three tasks with specific interval. [task1-- task1Interval , [task2-- task2Interval , [task3-- task3Interval]
I want to use single timer to execute all three tasks.
Every thing works fine when interval is in integer.
we want to achieve the same functionality with double interval values for e.g [task1Interval-- 0.1 , [task2Interval-- 2.1 , [task3Interval-- 3.1].
Any pointers for the same would be highly appreciated.
public class Class1
{
private System.Timers.Timer _timer;
private int _counter=0;
private int task1Interval = 1;
private int task2Interval = 2;
private int task3Interval = 3;
public void Start()
{
this._timer = new System.Timers.Timer(100);
this._timer.AutoReset = true;
this._timer.Elapsed += new System.Timers.ElapsedEventHandler(this.serviceTimerElapse);
this._timer.Enabled = true;
this._timer.Start();
}
private void serviceTimerElapse(object source, System.Timers.ElapsedEventArgs e)
{
this._counter++;
if (this._counter % task1Interval == 0)
{
task1();
}
if (this._counter % task2Interval == 0)
{
task2();
}
if (this._counter % task3Interval == 0)
{
task3();
}
}
private void task1()
{
Console.WriteLine("task1 started");
}
private void task2()
{
Console.WriteLine("task2 started");
}
private void task3()
{
Console.WriteLine("task3 started");
}
}
This is very common to what you would see in a primitive game engine. You need a world "clock" with some level of granularity and you handle each event within each "tick".
You already known your granularity needs to be the least common denominator among your intervals (in this case 0.1). Then a primitive solution would include counters for each task along with a threshold.
Ex (pseudo code):
private const double TICK = 0.1;
private double TASK_1_THRESHOLD = 0.1;
private double TASK_1_COUNT = 0.0;
private double TASK_2_THRESHOLD = 2.1;
private double TASK_2_COUNT = 0.0;
private double TASK_3_THRESHOLD = 3.1;
private double TASK_3_COUNT = 0.0;
private void tick()
{
TASK_1_COUNT += TICK;
TASK_2_COUNT += TICK;
TASK_3_COUNT += TICK;
if (TASK_1_COUNT >= TASK_1_THRESHOLD) {
TASK_1_COUNT = 0.0;
task1();
}
// do this for each
}
I trying to do the following setup and it works fine when I use a real output.
I´m not sure what the right approach is to do that, I tried to use a Timer and it works for some time, but then fails because it drifts a bit and I get a buffer full exception.
var mixSampleProvider = new MixingSampleProvider(resampleWaveFormat);
mixSampleProvider.AddMixerInput(inputAResampler);
mixSampleProvider.AddMixerInput(inputBResampler);
var mixWaveProvider = new SampleToWaveProvider(mixSampleProvider);
savingWaveProvider = new SavingWaveProvider(mixWaveProvider);
System.Timers.Timer timer = new System.Timers.Timer(98);
timer.Elapsed += (sender, args) =>
{
var count = resampleWaveFormat.AverageBytesPerSecond / 10;
var dummy = new byte[count];
savingWaveProvider.Read(dummy, 0, count);
};
timer.Start();
I have tried to calculate how much I should read on each tick e.g.
var readCount = Math.Min(inputABufferedWaveProvider.BufferedBytes, inputBBufferedWaveProvider.BufferedBytes);
but cannot make it work, and I have tried to use the DataAvailable event, but since there are two input and they are mixed I cannot that to work either.
The resolution of System.Timer.Timer is approximately 15.6ms, based on the Windows clock time. You need to track the time using a more accurate mechanism and adjust your read rates based on the true time rather than the rate of timer ticks.
The most popular method of tracking elapsed time is to use a System.Diagnostics.Stopwatch to determine how much time has actually elapsed since your process started, which you can then use to calculate the number of samples to read to stay in sync.
Here's a IWaveOutput implementation that uses a timer and a stopwatch to figure out how many samples to read from its input:
public class SyncedNullOutput : IWavePlayer
{
// where to read data from
private IWaveProvider _source;
// time measurement
Stopwatch _stopwatch = null;
double _lastTime = 0;
// timer to fire our read method
System.Timers.Timer _timer = null;
PlaybackState _state = PlaybackState.Stopped;
public PlaybackState PlaybackState { get { return _state; } }
public SuncedNullOutput()
{ }
public SyncedNullOutput(IWaveProvider source)
{
Init(source);
}
public void Dispose()
{
Stop();
}
void _timer_Elapsed(object sender, ElapsedEventArgs args)
{
// get total elapsed time, compare to last time
double elapsed = _stopwatch.Elapsed.TotalSeconds;
double deltaTime = elapsed - _lastTime;
_lastTime = elapsed;
// work out number of samples we need to read...
int nSamples = (int)(deltaTime * _source.WaveFormat.SampleRate);
// ...and how many bytes those samples occupy
int nBytes = nSamples * _source.WaveFormat.BlockAlign;
// Read samples from the source
byte[] buffer = new byte[nBytes];
_source.Read(buffer, 0, nBytes);
}
public void Play()
{
if (_state == PlaybackState.Stopped)
{
// create timer
_timer = new System.Timers.Timer(90);
_timer.AutoReset = true;
_timer.Elapsed += _timer_Elapsed;
_timer.Start();
// create stopwatch
_stopwatch = Stopwatch.StartNew();
_lastTime = 0;
}
else if (_state == PlaybackState.Paused)
{
// reset stopwatch
_stopwatch.Reset();
_lastTime = 0;
// restart timer
_timer.Start();
}
_state = PlaybackState.Playing;
}
public void Stop()
{
if (_timer != null)
{
_timer.Stop();
_timer.Dispose();
_timer = null;
}
if (_stopwatch != null)
{
_stopwatch.Stop();
_stopwatch = null;
}
_lastTime = 0;
_state = PlaybackState.Stopped;
}
public void Pause()
{
_timer.Stop();
_state = PlaybackState.Paused;
}
public void Init(IWaveProvider waveProvider)
{
Stop();
_source = waveProvider;
}
public event EventHandler<StoppedEventArgs> PlaybackStopped;
protected void OnPlaybackStopped(Exception exception = null)
{
if (PlaybackStopped != null)
PlaybackStopped(this, new StoppedEventArgs(exception));
}
public float Volume {get;set;}
}
I did some tests with this hooked up to a BufferedWaveProvider that was being fed samples from a default WaveInEvent instance (8kHz PCM 16-bit mono). The timer was ticking at around 93ms instead of the requested 90ms, as judged by the total run time vs number of reads, and the input buffer remained constantly under 3800 bytes in length. Changing to 44.1kHz stereo IeeeFloat format upped the buffer size to just under 80kB... still very manageable, and no overflows. In both cases the data was arriving in blocks just under half the maximum buffer size - 35280 bytes per DataAvailable event vs 76968 bytes maximum buffer length in a 60 second run, with DataAvailable firing every 100ms on average.
Try it out and see how well it works for you.
I have made a little Blackjack game, and I'd like to make the computer wait between each card he pulls, however using System.Threading.Thread.Sleep(int x) does not make the program wait between cards, but makes it wait for x * amount of cards..
I also know that using Thread.Sleep is not a good way, so I'd rather learn a better way as I am creating this program entirely for educative purposes.
I'll add the code underneath which decides whether or not a card should be drawn, and the method that draws the card.
private void ComputerTurn()
{
drawCard.Enabled = false;
finishTurn.Enabled = false;
while (computerTotalScore <= 11)
{
ComputerDrawCard();
}
drawAgain = true;
while (drawAgain)
{
ComputerDrawCard();
if (totalScore <= 21)
{
if (computerTotalScore > totalScore)
{
drawAgain = false;
}
else
{
drawAgain = true;
}
}
else
{
if (computerTotalScore > 16)
{
drawAgain = false;
}
else
{
drawAgain = true;
}
}
}
DecideWinner();
}
public void ComputerDrawCard()
{
cardAlreadyPulled = true;
while (cardAlreadyPulled)
{
cardType = random.Next(0, 4);
cardNumber = random.Next(0, 13);
if (!pulledCards[cardType, cardNumber])
{
cardAlreadyPulled = false;
pulledCards[cardType, cardNumber] = true;
}
}
ComputerUpdateCardPictures();
computerScore = cardScores[cardNumber];
if (computerScore == 1)
{
if (computerTotalScore <= 10)
{
computerScore = 11;
}
else
{
computerScore = 1;
}
}
computerTotalScore += computerScore;
txtComputerCurrentScore.Text = computerScore.ToString();
txtComputerTotalScore.Text = computerTotalScore.ToString();
System.Threading.Thread.Sleep(random.Next(250, 750));
}
There are multiple ways to achieve something like this. I believe what you're attempting to do is simulate a human taking time to do things. I recommend using a combination of expected wait times and a timer to achieve what you want.
class Example
{
public Example()
{
// create a stalled timer
_pulse = new Timer(this.TakeAction);
}
TimeSpan _drawdelay = TimeSpan.FromSeconds(2);
DateTime _lastAction = DateTime.MinValue;
Timer _pulse;
public void Start()
{
// start the timer by asking it to call the worker method ever 0.5 seconds
_pulse.Change(0, 500);
}
public void Stop()
{
// stop the timer by setting the next pulse to infinitely in the future
_pulse.Change(Timeout.Infinite, Timeout.Infinite);
}
void TakeAction(object x)
{
lock (_pulse)
{
DateTime now = DateTime.Now;
if(now - _lastAction > _drawdelay)
{
// do work ...
_lastAction = now;
}
}
}
}
That said, the above code will run into issues if the work being done takes longer than 500 milliseconds to complete. Add thread safety as necessary.
I would add a last time drawn and time between draws members. Then before drawing a card, get the time between now and the last time pulled. If the time is greater than the allowed time between the draws its cool to draw.
private DateTime _lastWrite = DateTime.Now;
private TimeSpan _delay = TimeSpan.FromMilliseconds(100);
public void ComputerDrawCard() {
var now = DateTime.Now;
if (now - _lastWrite < _delay)
return;
_lastWrite = now;
draw card...
}
Here's a gist of an example working correctly.