I have to implement rate limiting to public endpoints using .Net 7 new Rate limiting Middleware.
For now I've settled on a fixedWindow rate limiter.
I've found many differents implementations online, but the only ones I found implementing any kind of filtering on IP/Client are using the globalLimiter which I dont want.
I have many endpoints and I want 2 different limiters on 2 of my public endpoints.
What I want is a mix of the 2 following implementation, which allows me to name the policy to implement it on only the endpoints, and the rate limiting to be per/client.
2 different policies I can assign to each of my endpoints:
builder.Services.AddRateLimiter(options =>
{
options.AddFixedWindowLimiter("myRateLimiter1", options =>
{
options.AutoReplenishment = true;
options.PermitLimit = 1;
options.Window = TimeSpan.FromSeconds(30);
});
options.AddFixedWindowLimiter("myRateLimiter12", options =>
{
options.AutoReplenishment = true;
options.PermitLimit = 1;
options.Window = TimeSpan.FromSeconds(30);
});
});
Filtering clients that is implemented globally
builder.Services.AddRateLimiter(options =>
{
options.GlobalLimiter = PartitionedRateLimiter.Create<HttpContext, string>(httpContext =>
RateLimitPartition.GetFixedWindowLimiter(
partitionKey: httpContext.User.Identity?.Name ?? httpContext.Request.Headers.Host.ToString(),
factory: partition => new FixedWindowRateLimiterOptions
{
AutoReplenishment = true,
PermitLimit = 1,
QueueLimit = 0,
Window = TimeSpan.FromSeconds(30)
}));
});
I also found this implementation that does the job but cant find how to add filtering either
app.UseRateLimiter(new RateLimiterOptions
{
OnRejected = (context, _) =>
{
if (context.Lease.TryGetMetadata(MetadataName.RetryAfter, out var retryAfter))
{
context.HttpContext.Response.Headers.RetryAfter =
((int)retryAfter.TotalSeconds).ToString(NumberFormatInfo.InvariantInfo);
app.Logger.LogWarning("Rate limit exceeded, retry after {RetryAfter} seconds", retryAfter.TotalSeconds);
}
context.HttpContext.Response.StatusCode = StatusCodes.Status429TooManyRequests;
return new ValueTask();
}
}
.AddFixedWindowLimiter("myRateLimiter1", options =>
{
options.AutoReplenishment = true;
options.PermitLimit = 1;
options.Window = TimeSpan.FromSeconds(10);
options.QueueLimit = 0;
}).AddFixedWindowLimiter("myRateLimiter2", options =>
{
options.AutoReplenishment = true;
options.PermitLimit = 1;
options.Window = TimeSpan.FromSeconds(10);
options.QueueLimit = 0;
}));
Add your limiters via RateLimiterOptions.AddPolicy:
builder.Services.AddRateLimiter(options =>
{
options.AddPolicy("myRateLimiter1", context => RateLimitPartition.GetFixedWindowLimiter(
partitionKey: context.User.Identity?.Name ?? context.Request.Headers.Host.ToString(),
factory: partition => new FixedWindowRateLimiterOptions
{
AutoReplenishment = true,
PermitLimit = 1,
QueueLimit = 0,
Window = TimeSpan.FromSeconds(30)
}));
// and the second one
});
Related
I am trying to use primitive code like this:
var pageSize = 100;
var startPosition = 0;
do
{
var searchResponse = client.Search<Bla>(s => s
.Index(indexName)
.Query(q => q.MatchAll()
).From(startPosition).Size(pageSize)
);
startPosition = startPosition + pageSize;
} while (true);
to page over all ingested documents. This breaks the server as the requests are too frequent I believe. I could slow things down by going to sleep for a few milliseconds, but I think this would still not be best practice.
I know there is also the concept of scrolling. How would I use this in my scenario, where I would like to act upon each page's result?
PS:
static void Main(string[] args)
{
var indexName = "document";
var client = GetClient(indexName);
var pageSize = 1000;
var numberOfSlices = 4;
var scrollObserver = client.ScrollAll<Document>("1m", numberOfSlices, s => s
.MaxDegreeOfParallelism(numberOfSlices)
.Search(search => search
.Index(indexName).MatchAll()
.Size(pageSize)
)
).Wait(TimeSpan.FromMinutes(60), r =>
{
// do something with documents from a given response.
var documents = r.SearchResponse.Documents.ToList();
Console.WriteLine(documents[0].Id);
});
}
I am familiar with the observer pattern but not sure what exactly these components mean:
"1m"
numberOfSlices
TimeSpan.FromMinutes(60)
Something along those lines seems to work:
const string indexName = "bla";
var client = GetClient(indexName);
const int scrollTimeout = 1000;
var initialResponse = client.Search<Document>
(scr => scr.Index(indexName)
.From(0)
.Take(100)
.MatchAll()
.Scroll(scrollTimeout))
;
List<XYZ> results;
results = new List<XYZ>();
if (!initialResponse.IsValid || string.IsNullOrEmpty(initialResponse.ScrollId))
throw new Exception(initialResponse.ServerError.Error.Reason);
if (initialResponse.Documents.Any())
results.AddRange(initialResponse.Documents);
var scrollid = initialResponse.ScrollId;
bool isScrollSetHasData = true;
while (isScrollSetHasData)
{
var loopingResponse = client.Scroll<XYZ>(scrollTimeout, scrollid);
if (loopingResponse.IsValid)
{
results.AddRange(loopingResponse.Documents);
scrollid = loopingResponse.ScrollId;
}
isScrollSetHasData = loopingResponse.Documents.Any();
// do some amazing stuff
}
client.ClearScroll(new ClearScrollRequest(scrollid));
I have following code :
void Main()
{
Order order = new Order
{
Catalogen = new List<Catalog>
{
new Catalog
{
Artikels = new List<Artikel>
{
new Artikel{PosNr=1}, new Artikel{PosNr=2}, new Artikel{PosNr=3}
}
},
new Catalog
{
Artikels = new List<Artikel>
{
new Artikel{PosNr=1}, new Artikel{PosNr=2}, new Artikel{PosNr=6}
}
}
}
};
int max=1;
try
{
max = order.Catalogen
.Where(c => c.Artikels.Count > 0)
.Max(c => c.Artikels.Max(a => a.PosNr)) + 1;
}
catch(Exception Ex)
{
max = 1;
}
Console.WriteLine (max);
}
class Artikel {
public int PosNr;
};
class Catalog {
public List<Artikel> Artikels;
};
class Order {
public List<Catalog> Catalogen;
}
Is there a more simple way to get the max posnr taking into account that an ordercatalog can be empty ? The where seems to be needed to consider this fact but it makes the code look clunchy so I am looking for a better way.
var q = from cataloog in order.Catalogen
from artikel in cataloog.Artikels
select artikel.Posnr;
var max = q.Max() + 1;
Alternatively
var max = order.Catalogen.SelectMany(c => c.Artikels).Max(a => a.Posnr) + 1;
Update:
Of course, if there are no Artikels, than the maximum Posnr is undefined, which is reported by Enumerable.Max as an InvalidOperationException.
In your specific case, there is an easy solution for that:
var max = order.Catalogen.SelectMany(c => c.Artikels)
.Select(a => a.Posnr)
.DefaultIfEmpty()
.Max() + 1;
I want to schedule tasks with the Microsoft Solver Framework. For now i have the simple goal to just order the tasks in a queue so that i get a minimal project time. (later i want to have more than one queue). I tried to approach this with the following setup:
Decision:
projectFinish
start
finish
Parameter:
duration
Constraint:
start + duration = finish
not more than one task at a time
projectFinish after all tasks finished
Goal:
minimize projectFinish
Here is my code so far
static void Main(string[] args) {
var data = new List<Task>() {
new Task(){ Duration = 1, Name = "task0"},
new Task(){ Duration = 1, Name = "task1"},
new Task(){ Duration = 1, Name = "task2"},
};
SolveScheduling(data);
}
public class Task {
private static int id_counter = 0;
public Task() { ID = id_counter++; }
public int ID { get; private set; }
public string Name { get; set; }
public double Duration { get; set; }
}
private static void SolveScheduling(IEnumerable<Task> data) {
SolverContext context = SolverContext.GetContext();
Model model = context.CreateModel();
var set = new Set(Domain.Any,"TaskSet");
var projectFinish = new Decision(Domain.IntegerNonnegative, "projectFinish");
model.AddDecision(projectFinish);
var taskSet = new Set(Domain.Any, "tasks");
var durations = new Parameter(Domain.RealNonnegative, "durations", taskSet);
durations.SetBinding(data, "Duration", "Name");
var ids = new Parameter(Domain.Integer, "ids", taskSet);
ids.SetBinding(data, "ID", "Name");
var starts = new Decision(Domain.RealNonnegative, "starts", taskSet);
var finishs = new Decision(Domain.RealNonnegative, "finishs", taskSet);
model.AddDecisions(starts, finishs);
model.AddParameters(durations, ids);
// Constraints
// start + duration = finish
model.AddConstraint("constraint0", Model.ForEach(taskSet, (t) => starts[t] + durations[t] == finishs[t]));
// Tasks after each other
model.AddConstraint("constraint1", Model.ForEach(taskSet, t =>
Model.ForEachWhere(taskSet, t2 => Model.Or(finishs[t] < starts[t2] , starts[t] > finishs[t2]), (t2) => ids[t] != ids[t2])));
// projectFinish after all tasks finished
model.AddConstraint("constraint2", Model.ForEach(taskSet, t => projectFinish >= finishs[t]));
// Goals
model.AddGoal("goal0", GoalKind.Minimize, projectFinish);
Solution solution = context.Solve();//new SimplexDirective());
Report report = solution.GetReport();
Console.WriteLine(#"===== report =====");
Console.Write("{0}", report);
Console.ReadLine();
}
Now the problem is that it takes for ever to solve this (although it are only 3 tasks and 1 queue). What am i missing here and how can i improve the speed of solving.
Update
I found a solution for my problem. If you have any improvements feel free to comment. Here is my code:
SolverContext context = SolverContext.GetContext();
Model model = context.CreateModel();
// === Sets ===
var taskSet = new Set(0,data.Count(), 1);
// === Parameters ===
var duration = new Parameter(Domain.RealNonnegative, "durations", taskSet);
var id = new Parameter(Domain.RealNonnegative, "id", taskSet);
duration.SetBinding(data, "Duration", "ID");
id.SetBinding(data, "ID", "ID");
model.AddParameters(duration, id);
// === Decisions ===
var projectFinish = new Decision(Domain.RealNonnegative, "projectFinish");
var start = new Decision(Domain.RealNonnegative, "starts", taskSet);
var finish = new Decision(Domain.RealNonnegative, "finishs", taskSet);
model.AddDecisions(projectFinish, start, finish);
// === Constraints ===
model.AddConstraint("constraint0", start[0] == 0);
// start + duration = finish
model.AddConstraint("constraint1", Model.ForEach(taskSet, (t) => start[t] + duration[t] == finish[t]));
// projectFinish after all tasks finished
model.AddConstraint("constraint2", Model.ForEach(taskSet, t => projectFinish >= finish[t]));
// not more than one task at a time
model.AddConstraint("constraint3", Model.ForEach(taskSet, t =>
Model.ForEachWhere(taskSet, t2 => Model.Or(finish[t] < start[t2], start[t] > finish[t2]), (t2) => id[t] != id[t2])));
// === Goals ===
model.AddGoal("goal0", GoalKind.Minimize, projectFinish); // minimieren der projekt zeit
// === Solve ===
context.CheckModel();
Solution solution = context.Solve();
I found a solution that works for me. I changed the taskSet
var taskSet = new Set(0, data.Count(), 1);
and added a new constraint
model.AddConstraint("constraint", starts[0] == 0);
I updated the question
I am using Moq and EF6.
I need to query a table and if there are less than 5 dates in the table I need to add more.
So, I do that in my code:
public void FillPartyList(ObservableCollection<Party> argPartyList)
{
argPartyList.Clear();
using (IPartyTrackerDataAccess localDb = Locator.Resolve<IPartyTrackerDataAccess>())
{
InitializeDates(localDb);
var localList = (from tempList in localDb.Parties
where tempList.PartyDate >= DateTime.Now.Date
select tempList);
foreach (Party currentParty in localList)
{
argPartyList.Add(currentParty);
}
}
}
Inside "InitializeDates" I query the database to see how many dates are there and then add enough to make 5. There are rules around creating the dates [e.g. only on Fri or Sat, etc] so this is not trivial. Then, when the method comes back I get the full list from the database and add it to the parameter.
My problem comes when I try to test this. Here's my test.
public void TestInitializeDates()
{
//Expected data
int callCount = 0;
int addParty = 0;
int saveChanges = 0;
ObservableCollection<Party> PartyList = new ObservableCollection<Party>();
//Mock data
IQueryable<Party> partyData = new List<Party>().AsQueryable();
IQueryable<SystemSetting> settingsData = new List<SystemSetting>().AsQueryable();
//Setup mock
Mock<PartyTrackerEntities> mockContext = SetupDbContext();
SetupApplicationSettings(mockContext, settingsData);
SetupPartyDbSet(mockContext, partyData);
Locator.Register<IApplicationSettings, ApplicationSettings>();
//If I keep the following line I get an exception when querying the "database" for parties.
mockContext.Setup(m => m.Parties.Add(It.IsAny<Party>())).Callback(() => addParty = callCount++);
mockContext.Setup(m => m.SaveChanges()).Callback(() => saveChanges = callCount++);
//Run tests
PartyTrackerModel localModel = new PartyTrackerModel();
localModel.FillPartyList(PartyList);
Assert.AreEqual(1, callCount);
Assert.AreEqual(0, saveChanges);
}
private Mock<DbSet<Party>> SetupPartyDbSet(Mock<PartyTrackerEntities> argDbContext, IQueryable<Party> argPartyData)
{
//Mock objects
Mock<DbSet<Party>> mockPartySet = new Mock<DbSet<Party>>();
mockPartySet.As<IQueryable<Party>>().Setup(m => m.Provider).Returns(argPartyData.Provider);
mockPartySet.As<IQueryable<Party>>().Setup(m => m.Expression).Returns(argPartyData.Expression);
mockPartySet.As<IQueryable<Party>>().Setup(m => m.ElementType).Returns(argPartyData.ElementType);
mockPartySet.As<IQueryable<Party>>().Setup(m => m.GetEnumerator()).Returns(argPartyData.GetEnumerator());
argDbContext.Setup(m => m.Parties).Returns(mockPartySet.Object);
return mockPartySet;
}
EDIT: I got distracted and didn't finish the question.
Everything works fine until I add the setup that increments a counter, then the Linq in InitializeDates() fails.
How do I increment the counter and leave the DbSet<> mocking properly? How do I query the "database" and still count the calls to Add? If I setup the Context.Parties.Add() to count calls the linq query blows up.
What am I doing wrong?
EDIT: Adding InitializeDates(). Note: None of this has a problem until I add the Moq.Callback.
private static void InitializeDates(IPartyTrackerDataAccess argLocalDb)
{
IApplicationSettings localAppSettings = Locator.Resolve<IApplicationSettings>();
//using (PartyTrackerEntities LocalDb = new PartyTrackerEntities())
{
//TODO: Move this code to the PartyTrackerDb class.
//Technically this would communicate with the database and
//make sure there are at least 5 parties in the future.
var SqlPartyList = from LinqList in argLocalDb.Parties
where LinqList.PartyDate >= DateTime.Now.Date
select LinqList;
List<Party> PartyList;
try
{
PartyList = SqlPartyList.OrderByDescending(argOrder => argOrder.PartyDate).Take(5).ToList();
}
catch (SqlException)
{
//MessageBox.Show("Cannot connect to database. Please ensure that SQLServer is running and accessable from this machine.");
throw;
}
if (PartyList.Count < 5)
{
DateTime MaxPartyDate = PartyList.Count <= 0
? DateTime.Now.Date.AddDays(-1)
: PartyList.Max(argMaxDate => argMaxDate.PartyDate);
//<= 4 because PartyList.Count is zero-based so this is 5 parties.
for (int ShortDayCount = PartyList.Count; ShortDayCount <= 4; ShortDayCount++)
{
int dateOffset;
decimal coupleDonation;
decimal singleMaleDonation;
decimal singleFemaleDonation;
decimal sponsoredMaleDonation;
if (MaxPartyDate.DayOfWeek == DayOfWeek.Friday) //If the last party in the list is on a Friday, then we need to add a Saturday
{
//Get the Donation amounts
//Get Saturday Donations
coupleDonation = localAppSettings.SatCoupleDonation;
singleMaleDonation = localAppSettings.SatSingleMaleDonation;
singleFemaleDonation = localAppSettings.SatSingleFemaleDonation;
sponsoredMaleDonation = localAppSettings.SatSponsoredMaleDonation;
//Add Sat
dateOffset = 1;
}
else //Otherwise we need to add a Friday
{
//Get the Donation amounts
//Get Friday Donations
coupleDonation = localAppSettings.FriCoupleDonation;
singleMaleDonation = localAppSettings.FriSingleMaleDonation;
singleFemaleDonation = localAppSettings.FriSingleFemaleDonation;
sponsoredMaleDonation = localAppSettings.FriSponsoredMaleDonation;
if (MaxPartyDate.DayOfWeek == DayOfWeek.Saturday)
{
dateOffset = 6;
}
else
{
dateOffset = 5 - (int)MaxPartyDate.DayOfWeek;
}
}
Party NewParty = new Party
{
PartyDate = MaxPartyDate.AddDays(dateOffset),
CoupleDonation = coupleDonation,
SingleMaleDonation = singleMaleDonation,
SingleFemaleDonation = singleFemaleDonation,
SponsoredMaleDonation = sponsoredMaleDonation
};
argLocalDb.Parties.Add(NewParty);
MaxPartyDate = NewParty.PartyDate;
}
argLocalDb.SaveChanges();
}
}
}
I have a list of log entries in Audit class
public class Audit
{
public DateTime TimeStamp { get; set; }
public string User { get; set; }
public string AuditType { get; set; }
}
so a list might look like this;
20140206 11:29:20 Owen Open
20140206 11:29:21 Owen Close
20140206 11:31:20 Owen Open
20140206 11:32:20 Owen Close
20140206 11:42:20 Owen Open
20140206 11:50:00 Owen Acknowledge
This gives us gaps of 1 second, 1 minute, and 40 seconds. So the longest time it was open was the middle pair for 1 minute, then it was acknowledged at 11:50. I'm looking for the date pair where it was open longes, in this case 1 min.
I know I can process the list in sequentially and find the biggest gap using a TimeSpan but I figure there is a neat LINQ way to do it maybe with groups?
UPDATE It's not pretty, but this is the logic in really expanded walk
var audits = notice.AuditEntries.Where(a => a.User == user);
DateTime? currentOpen = null;
DateTime? bestOpen = null;
DateTime? bestClose = null;
foreach (var audit in audits)
{
if (audit.AuditType == "Open")
{
if (currentOpen.HasValue) continue;
currentOpen = audit.TimeStamp;
}
if (audit.AuditType == "Close" || audit.AuditType == "Acknowledge")
{
if (currentOpen.HasValue)
{
DateTime? currentClose = audit.TimeStamp;
if (!bestOpen.HasValue)
{
bestOpen = currentOpen;
bestClose = currentClose;
}
else
{
if (bestClose.Value.Subtract(bestOpen.Value) > currentClose.Value.Subtract(currentOpen.Value))
{
bestOpen = currentOpen;
bestClose = currentClose;
}
}
currentOpen = null;
}
}
}
I think this will do the trick:
IEnumerable<Audit> audits = ...
var longestAuditsByUser = audits.OrderBy(a => a.Timestamp)
// group by user, since presumably we don't want to match an open from one user with a close from another
.GroupBy(a => a.User)
.Select(userAudits =>
{
// first, align each audit entry with it's index within the entries for the user
var indexedAudits = userAudits.Select((audit, index) => new { audit, index });
// create separate sequences for open and close/ack entries
var starts = indexedAudits.Where(t => t.audit.AuditType == "Open");
var ends = indexedAudits.Where(t => t.audit.AuditType == "Close" || t.audit.AuditType == "Acknowledge");
// find the "transactions" by joining starts to ends where start.index = end.index - 1
var pairings = starts.Join(ends, s => s.index, e => e.index - 1, (start, end) => new { start, end });
// find the longest such pairing with Max(). This will throw if no pairings were
// found. If that can happen, consider changing this Select() to SelectMany()
// and returning pairings.OrderByDescending(time).Take(1)
var longestPairingTime = pairings.Max(t => t.end.Timestamp - t.start.Timestamp);
return new { user = userAudits.Key, time = longestPairingTime };
});
// now that we've found the longest time for each user, we can easily find the longest
// overall time as well
var longestOverall = longestAuditsByUser.Max(t => t.time);
Not tested but should work:
var auditGaps = audits
.GroupBy(a => a.User)
.Select(g => new
{
User = g.Key,
MinOpen = g.Where(a => a.AuditType == "Open").Select(a=> a.TimeStamp).Min(),
MaxClosed = g.Where(a => a.AuditType == "Close").Select(a=> a.TimeStamp).Max(),
MaxAcknowledge = g.Where(a => a.AuditType == "Acknowledge").Select(a=> a.TimeStamp).Max()
})
.Select(x => new
{
x.User,
LargestOpenCloseGap = x.MaxClosed - x.MinOpen,
LargestOpenAcknowledgeGap = x.MaxAcknowledge - x.MinOpen
});