Add support for per-user limits on the size of changes that can be made

This commit is contained in:
Tom Hughes 2024-06-18 20:35:09 +01:00
parent 80ccf1d4c2
commit f61ac2586f
5 changed files with 133 additions and 0 deletions

View file

@ -73,6 +73,13 @@ max_changes_per_hour: 100000
days_to_max_changes: 7
importer_changes_per_hour: 1000000
moderator_changes_per_hour: 1000000
# Size limit for changes
min_size_limit: 10000000
initial_size_limit: 30000000
max_size_limit: 5400000000
days_to_max_size_limit: 28
importer_size_limit: 5400000000
moderator_size_limit: 5400000000
# Domain for handling message replies
#messages_domain: "messages.openstreetmap.org"
# MaxMind GeoIPv2 database

View file

@ -0,0 +1,13 @@
class ApiSizeLimit < ActiveRecord::Migration[7.1]
def up
safety_assured do
execute DatabaseFunctions::API_SIZE_LIMIT
end
end
def down
safety_assured do
execute "DROP FUNCTION api_size_limit(bigint)"
end
end
end

View file

@ -176,6 +176,63 @@ CREATE FUNCTION public.api_rate_limit(user_id bigint) RETURNS integer
$$;
--
-- Name: api_size_limit(bigint); Type: FUNCTION; Schema: public; Owner: -
--
CREATE FUNCTION public.api_size_limit(user_id bigint) RETURNS bigint
LANGUAGE plpgsql STABLE
AS $$
DECLARE
min_size_limit int8 := 10000000;
initial_size_limit int8 := 30000000;
max_size_limit int8 := 5400000000;
days_to_max_size_limit int4 := 28;
importer_size_limit int8 := 5400000000;
moderator_size_limit int8 := 5400000000;
roles text[];
last_block timestamp without time zone;
first_change timestamp without time zone;
active_reports int4;
time_since_first_change double precision;
size_limit int8;
BEGIN
SELECT ARRAY_AGG(user_roles.role) INTO STRICT roles FROM user_roles WHERE user_roles.user_id = api_size_limit.user_id;
IF 'moderator' = ANY(roles) THEN
size_limit := moderator_size_limit;
ELSIF 'importer' = ANY(roles) THEN
size_limit := importer_size_limit;
ELSE
SELECT user_blocks.created_at INTO last_block FROM user_blocks WHERE user_blocks.user_id = api_size_limit.user_id ORDER BY user_blocks.created_at DESC LIMIT 1;
IF FOUND THEN
SELECT changesets.created_at INTO first_change FROM changesets WHERE changesets.user_id = api_size_limit.user_id AND changesets.created_at > last_block ORDER BY changesets.created_at LIMIT 1;
ELSE
SELECT changesets.created_at INTO first_change FROM changesets WHERE changesets.user_id = api_size_limit.user_id ORDER BY changesets.created_at LIMIT 1;
END IF;
IF NOT FOUND THEN
first_change := CURRENT_TIMESTAMP AT TIME ZONE 'UTC';
END IF;
SELECT COUNT(*) INTO STRICT active_reports
FROM issues INNER JOIN reports ON reports.issue_id = issues.id
WHERE issues.reported_user_id = api_size_limit.user_id AND issues.status = 'open' AND reports.updated_at >= COALESCE(issues.resolved_at, '1970-01-01');
time_since_first_change := EXTRACT(EPOCH FROM CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - first_change);
size_limit := max_size_limit * POWER(time_since_first_change, 2) / POWER(days_to_max_size_limit * 24 * 60 * 60, 2);
size_limit := GREATEST(initial_size_limit, LEAST(max_size_limit, FLOOR(size_limit)));
size_limit := size_limit / POWER(2, active_reports);
size_limit := GREATEST(min_size_limit, LEAST(max_size_limit, size_limit));
END IF;
RETURN size_limit;
END;
$$;
SET default_tablespace = '';
SET default_table_access_method = heap;
@ -3521,6 +3578,7 @@ INSERT INTO "schema_migrations" (version) VALUES
('23'),
('22'),
('21'),
('20240618193051'),
('20240605134916'),
('20240405083825'),
('20240307181018'),

View file

@ -55,4 +55,58 @@ module DatabaseFunctions
END;
$$ LANGUAGE plpgsql STABLE;
).freeze
API_SIZE_LIMIT = %(
CREATE OR REPLACE FUNCTION api_size_limit(user_id int8)
RETURNS int8
AS $$
DECLARE
min_size_limit int8 := #{Settings.min_size_limit};
initial_size_limit int8 := #{Settings.initial_size_limit};
max_size_limit int8 := #{Settings.max_size_limit};
days_to_max_size_limit int4 := #{Settings.days_to_max_size_limit};
importer_size_limit int8 := #{Settings.importer_size_limit};
moderator_size_limit int8 := #{Settings.moderator_size_limit};
roles text[];
last_block timestamp without time zone;
first_change timestamp without time zone;
active_reports int4;
time_since_first_change double precision;
size_limit int8;
BEGIN
SELECT ARRAY_AGG(user_roles.role) INTO STRICT roles FROM user_roles WHERE user_roles.user_id = api_size_limit.user_id;
IF 'moderator' = ANY(roles) THEN
size_limit := moderator_size_limit;
ELSIF 'importer' = ANY(roles) THEN
size_limit := importer_size_limit;
ELSE
SELECT user_blocks.created_at INTO last_block FROM user_blocks WHERE user_blocks.user_id = api_size_limit.user_id ORDER BY user_blocks.created_at DESC LIMIT 1;
IF FOUND THEN
SELECT changesets.created_at INTO first_change FROM changesets WHERE changesets.user_id = api_size_limit.user_id AND changesets.created_at > last_block ORDER BY changesets.created_at LIMIT 1;
ELSE
SELECT changesets.created_at INTO first_change FROM changesets WHERE changesets.user_id = api_size_limit.user_id ORDER BY changesets.created_at LIMIT 1;
END IF;
IF NOT FOUND THEN
first_change := CURRENT_TIMESTAMP AT TIME ZONE 'UTC';
END IF;
SELECT COUNT(*) INTO STRICT active_reports
FROM issues INNER JOIN reports ON reports.issue_id = issues.id
WHERE issues.reported_user_id = api_size_limit.user_id AND issues.status = 'open' AND reports.updated_at >= COALESCE(issues.resolved_at, '1970-01-01');
time_since_first_change := EXTRACT(EPOCH FROM CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - first_change);
size_limit := max_size_limit * POWER(time_since_first_change, 2) / POWER(days_to_max_size_limit * 24 * 60 * 60, 2);
size_limit := GREATEST(initial_size_limit, LEAST(max_size_limit, FLOOR(size_limit)));
size_limit := size_limit / POWER(2, active_reports);
size_limit := GREATEST(min_size_limit, LEAST(max_size_limit, size_limit));
END IF;
RETURN size_limit;
END;
$$ LANGUAGE plpgsql STABLE;
).freeze
end

View file

@ -2,5 +2,6 @@ namespace :db do
desc "Update database function definitions"
task :update_functions => :environment do
ActiveRecord::Base.connection.execute DatabaseFunctions::API_RATE_LIMIT
ActiveRecord::Base.connection.execute DatabaseFunctions::API_SIZE_LIMIT
end
end