diff --git a/app/models/changeset.rb b/app/models/changeset.rb
index abb494de6..e6bde19a5 100644
--- a/app/models/changeset.rb
+++ b/app/models/changeset.rb
@@ -130,6 +130,8 @@ class Changeset < ApplicationRecord
def update_bbox!(bbox_update)
bbox.expand!(bbox_update)
+ raise OSM::APISizeLimitExceeded if bbox.linear_size > size_limit
+
# update active record. rails 2.1's dirty handling should take care of
# whether this object needs saving or not.
self.min_lon, self.min_lat, self.max_lon, self.max_lat = @bbox.to_a.collect(&:round) if bbox.complete?
@@ -225,4 +227,10 @@ class Changeset < ApplicationRecord
def subscribed?(user)
subscribers.exists?(user.id)
end
+
+ def size_limit
+ @size_limit ||= ActiveRecord::Base.connection.select_value(
+ "SELECT api_size_limit($1)", "api_size_limit", [user_id]
+ )
+ end
end
diff --git a/config/settings.yml b/config/settings.yml
index ec868b651..fa7207721 100644
--- a/config/settings.yml
+++ b/config/settings.yml
@@ -73,6 +73,13 @@ max_changes_per_hour: 100000
days_to_max_changes: 7
importer_changes_per_hour: 1000000
moderator_changes_per_hour: 1000000
+# Size limit for changes
+min_size_limit: 10000000
+initial_size_limit: 30000000
+max_size_limit: 5400000000
+days_to_max_size_limit: 28
+importer_size_limit: 5400000000
+moderator_size_limit: 5400000000
# Domain for handling message replies
#messages_domain: "messages.openstreetmap.org"
# MaxMind GeoIPv2 database
diff --git a/db/migrate/20240618193051_api_size_limit.rb b/db/migrate/20240618193051_api_size_limit.rb
new file mode 100644
index 000000000..ce97a9f0e
--- /dev/null
+++ b/db/migrate/20240618193051_api_size_limit.rb
@@ -0,0 +1,13 @@
+class ApiSizeLimit < ActiveRecord::Migration[7.1]
+ def up
+ safety_assured do
+ execute DatabaseFunctions::API_SIZE_LIMIT
+ end
+ end
+
+ def down
+ safety_assured do
+ execute "DROP FUNCTION api_size_limit(bigint)"
+ end
+ end
+end
diff --git a/db/structure.sql b/db/structure.sql
index 2e8bdbb61..e9d5efc3f 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -176,6 +176,63 @@ CREATE FUNCTION public.api_rate_limit(user_id bigint) RETURNS integer
$$;
+--
+-- Name: api_size_limit(bigint); Type: FUNCTION; Schema: public; Owner: -
+--
+
+CREATE FUNCTION public.api_size_limit(user_id bigint) RETURNS bigint
+ LANGUAGE plpgsql STABLE
+ AS $$
+ DECLARE
+ min_size_limit int8 := 10000000;
+ initial_size_limit int8 := 30000000;
+ max_size_limit int8 := 5400000000;
+ days_to_max_size_limit int4 := 28;
+ importer_size_limit int8 := 5400000000;
+ moderator_size_limit int8 := 5400000000;
+ roles text[];
+ last_block timestamp without time zone;
+ first_change timestamp without time zone;
+ active_reports int4;
+ time_since_first_change double precision;
+ size_limit int8;
+ BEGIN
+ SELECT ARRAY_AGG(user_roles.role) INTO STRICT roles FROM user_roles WHERE user_roles.user_id = api_size_limit.user_id;
+
+ IF 'moderator' = ANY(roles) THEN
+ size_limit := moderator_size_limit;
+ ELSIF 'importer' = ANY(roles) THEN
+ size_limit := importer_size_limit;
+ ELSE
+ SELECT user_blocks.created_at INTO last_block FROM user_blocks WHERE user_blocks.user_id = api_size_limit.user_id ORDER BY user_blocks.created_at DESC LIMIT 1;
+
+ IF FOUND THEN
+ SELECT changesets.created_at INTO first_change FROM changesets WHERE changesets.user_id = api_size_limit.user_id AND changesets.created_at > last_block ORDER BY changesets.created_at LIMIT 1;
+ ELSE
+ SELECT changesets.created_at INTO first_change FROM changesets WHERE changesets.user_id = api_size_limit.user_id ORDER BY changesets.created_at LIMIT 1;
+ END IF;
+
+ IF NOT FOUND THEN
+ first_change := CURRENT_TIMESTAMP AT TIME ZONE 'UTC';
+ END IF;
+
+ SELECT COUNT(*) INTO STRICT active_reports
+ FROM issues INNER JOIN reports ON reports.issue_id = issues.id
+ WHERE issues.reported_user_id = api_size_limit.user_id AND issues.status = 'open' AND reports.updated_at >= COALESCE(issues.resolved_at, '1970-01-01');
+
+ time_since_first_change := EXTRACT(EPOCH FROM CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - first_change);
+
+ size_limit := max_size_limit * POWER(time_since_first_change, 2) / POWER(days_to_max_size_limit * 24 * 60 * 60, 2);
+ size_limit := GREATEST(initial_size_limit, LEAST(max_size_limit, FLOOR(size_limit)));
+ size_limit := size_limit / POWER(2, active_reports);
+ size_limit := GREATEST(min_size_limit, LEAST(max_size_limit, size_limit));
+ END IF;
+
+ RETURN size_limit;
+ END;
+ $$;
+
+
SET default_tablespace = '';
SET default_table_access_method = heap;
@@ -3521,6 +3578,7 @@ INSERT INTO "schema_migrations" (version) VALUES
('23'),
('22'),
('21'),
+('20240618193051'),
('20240605134916'),
('20240405083825'),
('20240307181018'),
diff --git a/lib/bounding_box.rb b/lib/bounding_box.rb
index 0cc4c5fd4..462f45a9f 100644
--- a/lib/bounding_box.rb
+++ b/lib/bounding_box.rb
@@ -88,6 +88,14 @@ class BoundingBox
end
end
+ def linear_size
+ if complete?
+ (max_lon - min_lon) + (max_lat - min_lat)
+ else
+ 0
+ end
+ end
+
def complete?
to_a.exclude?(nil)
end
diff --git a/lib/database_functions.rb b/lib/database_functions.rb
index f9e09ac70..d58736fdf 100644
--- a/lib/database_functions.rb
+++ b/lib/database_functions.rb
@@ -55,4 +55,58 @@ module DatabaseFunctions
END;
$$ LANGUAGE plpgsql STABLE;
).freeze
+
+ API_SIZE_LIMIT = %(
+ CREATE OR REPLACE FUNCTION api_size_limit(user_id int8)
+ RETURNS int8
+ AS $$
+ DECLARE
+ min_size_limit int8 := #{Settings.min_size_limit};
+ initial_size_limit int8 := #{Settings.initial_size_limit};
+ max_size_limit int8 := #{Settings.max_size_limit};
+ days_to_max_size_limit int4 := #{Settings.days_to_max_size_limit};
+ importer_size_limit int8 := #{Settings.importer_size_limit};
+ moderator_size_limit int8 := #{Settings.moderator_size_limit};
+ roles text[];
+ last_block timestamp without time zone;
+ first_change timestamp without time zone;
+ active_reports int4;
+ time_since_first_change double precision;
+ size_limit int8;
+ BEGIN
+ SELECT ARRAY_AGG(user_roles.role) INTO STRICT roles FROM user_roles WHERE user_roles.user_id = api_size_limit.user_id;
+
+ IF 'moderator' = ANY(roles) THEN
+ size_limit := moderator_size_limit;
+ ELSIF 'importer' = ANY(roles) THEN
+ size_limit := importer_size_limit;
+ ELSE
+ SELECT user_blocks.created_at INTO last_block FROM user_blocks WHERE user_blocks.user_id = api_size_limit.user_id ORDER BY user_blocks.created_at DESC LIMIT 1;
+
+ IF FOUND THEN
+ SELECT changesets.created_at INTO first_change FROM changesets WHERE changesets.user_id = api_size_limit.user_id AND changesets.created_at > last_block ORDER BY changesets.created_at LIMIT 1;
+ ELSE
+ SELECT changesets.created_at INTO first_change FROM changesets WHERE changesets.user_id = api_size_limit.user_id ORDER BY changesets.created_at LIMIT 1;
+ END IF;
+
+ IF NOT FOUND THEN
+ first_change := CURRENT_TIMESTAMP AT TIME ZONE 'UTC';
+ END IF;
+
+ SELECT COUNT(*) INTO STRICT active_reports
+ FROM issues INNER JOIN reports ON reports.issue_id = issues.id
+ WHERE issues.reported_user_id = api_size_limit.user_id AND issues.status = 'open' AND reports.updated_at >= COALESCE(issues.resolved_at, '1970-01-01');
+
+ time_since_first_change := EXTRACT(EPOCH FROM CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - first_change);
+
+ size_limit := max_size_limit * POWER(time_since_first_change, 2) / POWER(days_to_max_size_limit * 24 * 60 * 60, 2);
+ size_limit := GREATEST(initial_size_limit, LEAST(max_size_limit, FLOOR(size_limit)));
+ size_limit := size_limit / POWER(2, active_reports);
+ size_limit := GREATEST(min_size_limit, LEAST(max_size_limit, size_limit));
+ END IF;
+
+ RETURN size_limit;
+ END;
+ $$ LANGUAGE plpgsql STABLE;
+ ).freeze
end
diff --git a/lib/osm.rb b/lib/osm.rb
index a0fcef8b9..dd273418e 100644
--- a/lib/osm.rb
+++ b/lib/osm.rb
@@ -364,6 +364,17 @@ module OSM
end
end
+ # Raised when a size limit is exceeded
+ class APISizeLimitExceeded < APIError
+ def initialize
+ super("Size limit exceeded")
+ end
+
+ def status
+ :payload_too_large
+ end
+ end
+
# Helper methods for going to/from mercator and lat/lng.
class Mercator
include Math
diff --git a/lib/tasks/update_functions.rake b/lib/tasks/update_functions.rake
index 605d3c9ad..f10257215 100644
--- a/lib/tasks/update_functions.rake
+++ b/lib/tasks/update_functions.rake
@@ -2,5 +2,6 @@ namespace :db do
desc "Update database function definitions"
task :update_functions => :environment do
ActiveRecord::Base.connection.execute DatabaseFunctions::API_RATE_LIMIT
+ ActiveRecord::Base.connection.execute DatabaseFunctions::API_SIZE_LIMIT
end
end
diff --git a/test/controllers/api/changesets_controller_test.rb b/test/controllers/api/changesets_controller_test.rb
index be1033ea6..17a9ad9d3 100644
--- a/test/controllers/api/changesets_controller_test.rb
+++ b/test/controllers/api/changesets_controller_test.rb
@@ -749,7 +749,11 @@ module Api
end
def test_upload_large_changeset
- auth_header = basic_authorization_header create(:user).email, "test"
+ user = create(:user)
+ auth_header = basic_authorization_header user.email, "test"
+
+ # create an old changeset to ensure we have the maximum rate limit
+ create(:changeset, :user => user, :created_at => Time.now.utc - 28.days)
# create a changeset
put changeset_create_path, :params => "", :headers => auth_header
@@ -1048,14 +1052,14 @@ module Api
diff = <<~CHANGESET
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
CHANGESET
@@ -1329,9 +1333,9 @@ module Api
diff = <<~CHANGESET
-
-
-
+
+
+
@@ -1352,9 +1356,9 @@ module Api
diff = <<~CHANGESET
-
-
-
+
+
+
@@ -1384,9 +1388,9 @@ module Api
diff = <<~CHANGESET
-
-
-
+
+
+
@@ -1407,9 +1411,9 @@ module Api
diff = <<~CHANGESET
-
-
-
+
+
+
@@ -1478,14 +1482,14 @@ module Api
changeset_id = @response.body.to_i
old_way = create(:way)
- create(:way_node, :way => old_way, :node => create(:node, :lat => 1, :lon => 1))
+ create(:way_node, :way => old_way, :node => create(:node, :lat => 0.1, :lon => 0.1))
diff = XML::Document.new
diff.root = XML::Node.new "osmChange"
modify = XML::Node.new "modify"
xml_old_way = xml_node_for_way(old_way)
nd_ref = XML::Node.new "nd"
- nd_ref["ref"] = create(:node, :lat => 3, :lon => 3).id.to_s
+ nd_ref["ref"] = create(:node, :lat => 0.3, :lon => 0.3).id.to_s
xml_old_way << nd_ref
xml_old_way["changeset"] = changeset_id.to_s
modify << xml_old_way
@@ -1498,10 +1502,10 @@ module Api
# check the bbox
changeset = Changeset.find(changeset_id)
- assert_equal 1 * GeoRecord::SCALE, changeset.min_lon, "min_lon should be 1 degree"
- assert_equal 3 * GeoRecord::SCALE, changeset.max_lon, "max_lon should be 3 degrees"
- assert_equal 1 * GeoRecord::SCALE, changeset.min_lat, "min_lat should be 1 degree"
- assert_equal 3 * GeoRecord::SCALE, changeset.max_lat, "max_lat should be 3 degrees"
+ assert_equal 0.1 * GeoRecord::SCALE, changeset.min_lon, "min_lon should be 0.1 degree"
+ assert_equal 0.3 * GeoRecord::SCALE, changeset.max_lon, "max_lon should be 0.3 degrees"
+ assert_equal 0.1 * GeoRecord::SCALE, changeset.min_lat, "min_lat should be 0.1 degree"
+ assert_equal 0.3 * GeoRecord::SCALE, changeset.max_lat, "max_lat should be 0.3 degrees"
end
##
@@ -1798,6 +1802,71 @@ module Api
assert_response :too_many_requests, "upload did not hit rate limit"
end
+ ##
+ # test initial size limit
+ def test_upload_initial_size_limit
+ # create a user
+ user = create(:user)
+
+ # create a changeset that puts us near the initial size limit
+ changeset = create(:changeset, :user => user,
+ :min_lat => (-0.5 * GeoRecord::SCALE).round, :min_lon => (0.5 * GeoRecord::SCALE).round,
+ :max_lat => (0.5 * GeoRecord::SCALE).round, :max_lon => (2.5 * GeoRecord::SCALE).round)
+
+ # create authentication header
+ auth_header = basic_authorization_header user.email, "test"
+
+ # simple diff to create a node
+ diff = <<~CHANGESET
+
+
+
+
+
+
+
+
+ CHANGESET
+
+ # upload it
+ post changeset_upload_path(changeset), :params => diff, :headers => auth_header
+ assert_response :payload_too_large, "upload did not hit size limit"
+ end
+
+ ##
+ # test size limit after one week
+ def test_upload_week_size_limit
+ # create a user
+ user = create(:user)
+
+ # create a changeset to establish our initial edit time
+ create(:changeset, :user => user, :created_at => Time.now.utc - 7.days)
+
+ # create a changeset that puts us near the initial size limit
+ changeset = create(:changeset, :user => user,
+ :min_lat => (-0.5 * GeoRecord::SCALE).round, :min_lon => (0.5 * GeoRecord::SCALE).round,
+ :max_lat => (0.5 * GeoRecord::SCALE).round, :max_lon => (2.5 * GeoRecord::SCALE).round)
+
+ # create authentication header
+ auth_header = basic_authorization_header user.email, "test"
+
+ # simple diff to create a node way and relation using placeholders
+ diff = <<~CHANGESET
+
+
+
+
+
+
+
+
+ CHANGESET
+
+ # upload it
+ post changeset_upload_path(changeset), :params => diff, :headers => auth_header
+ assert_response :payload_too_large, "upload did not hit size limit"
+ end
+
##
# when we make some simple changes we get the same changes back from the
# diff download.
@@ -1829,14 +1898,14 @@ module Api
diff = <<~CHANGESET
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
CHANGESET
@@ -1935,15 +2004,15 @@ module Api
diff = <<~CHANGESET
-
+
-
-
-
+
+
+
-
+
@@ -2034,7 +2103,7 @@ module Api
# FIXME: This should really be moded to a integration test due to the with_controller
def test_changeset_bbox
way = create(:way)
- create(:way_node, :way => way, :node => create(:node, :lat => 3, :lon => 3))
+ create(:way_node, :way => way, :node => create(:node, :lat => 0.3, :lon => 0.3))
auth_header = basic_authorization_header create(:user).email, "test"
@@ -2046,7 +2115,7 @@ module Api
# add a single node to it
with_controller(NodesController.new) do
- xml = ""
+ xml = ""
put node_create_path, :params => xml, :headers => auth_header
assert_response :success, "Couldn't create node."
end
@@ -2054,14 +2123,14 @@ module Api
# get the bounding box back from the changeset
get changeset_show_path(:id => changeset_id)
assert_response :success, "Couldn't read back changeset."
- assert_select "osm>changeset[min_lon='1.0000000']", 1
- assert_select "osm>changeset[max_lon='1.0000000']", 1
- assert_select "osm>changeset[min_lat='2.0000000']", 1
- assert_select "osm>changeset[max_lat='2.0000000']", 1
+ assert_select "osm>changeset[min_lon='0.1000000']", 1
+ assert_select "osm>changeset[max_lon='0.1000000']", 1
+ assert_select "osm>changeset[min_lat='0.2000000']", 1
+ assert_select "osm>changeset[max_lat='0.2000000']", 1
# add another node to it
with_controller(NodesController.new) do
- xml = ""
+ xml = ""
put node_create_path, :params => xml, :headers => auth_header
assert_response :success, "Couldn't create second node."
end
@@ -2069,10 +2138,10 @@ module Api
# get the bounding box back from the changeset
get changeset_show_path(:id => changeset_id)
assert_response :success, "Couldn't read back changeset for the second time."
- assert_select "osm>changeset[min_lon='1.0000000']", 1
- assert_select "osm>changeset[max_lon='2.0000000']", 1
- assert_select "osm>changeset[min_lat='1.0000000']", 1
- assert_select "osm>changeset[max_lat='2.0000000']", 1
+ assert_select "osm>changeset[min_lon='0.1000000']", 1
+ assert_select "osm>changeset[max_lon='0.2000000']", 1
+ assert_select "osm>changeset[min_lat='0.1000000']", 1
+ assert_select "osm>changeset[max_lat='0.2000000']", 1
# add (delete) a way to it, which contains a point at (3,3)
with_controller(WaysController.new) do
@@ -2084,10 +2153,10 @@ module Api
# get the bounding box back from the changeset
get changeset_show_path(:id => changeset_id)
assert_response :success, "Couldn't read back changeset for the third time."
- assert_select "osm>changeset[min_lon='1.0000000']", 1
- assert_select "osm>changeset[max_lon='3.0000000']", 1
- assert_select "osm>changeset[min_lat='1.0000000']", 1
- assert_select "osm>changeset[max_lat='3.0000000']", 1
+ assert_select "osm>changeset[min_lon='0.1000000']", 1
+ assert_select "osm>changeset[max_lon='0.3000000']", 1
+ assert_select "osm>changeset[min_lat='0.1000000']", 1
+ assert_select "osm>changeset[max_lat='0.3000000']", 1
end
##
diff --git a/test/controllers/api/old_nodes_controller_test.rb b/test/controllers/api/old_nodes_controller_test.rb
index badc7301b..7855079cc 100644
--- a/test/controllers/api/old_nodes_controller_test.rb
+++ b/test/controllers/api/old_nodes_controller_test.rb
@@ -40,9 +40,9 @@ module Api
# FIXME: Move this test to being an integration test since it spans multiple controllers
def test_version
private_user = create(:user, :data_public => false)
- private_node = create(:node, :with_history, :version => 4, :changeset => create(:changeset, :user => private_user))
+ private_node = create(:node, :with_history, :version => 4, :lat => 0, :lon => 0, :changeset => create(:changeset, :user => private_user))
user = create(:user)
- node = create(:node, :with_history, :version => 4, :changeset => create(:changeset, :user => user))
+ node = create(:node, :with_history, :version => 4, :lat => 0, :lon => 0, :changeset => create(:changeset, :user => user))
create_list(:node_tag, 2, :node => node)
# Ensure that the current tags are propagated to the history too
propagate_tags(node, node.old_nodes.last)
@@ -65,8 +65,8 @@ module Api
# randomly move the node about
3.times do
# move the node somewhere else
- xml_node["lat"] = precision((rand * 180) - 90).to_s
- xml_node["lon"] = precision((rand * 360) - 180).to_s
+ xml_node["lat"] = precision(rand - 0.5).to_s
+ xml_node["lon"] = precision(rand - 0.5).to_s
with_controller(NodesController.new) do
put api_node_path(nodeid), :params => xml_doc.to_s, :headers => auth_header
assert_response :forbidden, "Should have rejected node update"
@@ -113,8 +113,8 @@ module Api
# randomly move the node about
3.times do
# move the node somewhere else
- xml_node["lat"] = precision((rand * 180) - 90).to_s
- xml_node["lon"] = precision((rand * 360) - 180).to_s
+ xml_node["lat"] = precision(rand - 0.5).to_s
+ xml_node["lon"] = precision(rand - 0.5).to_s
with_controller(NodesController.new) do
put api_node_path(nodeid), :params => xml_doc.to_s, :headers => auth_header
assert_response :success
diff --git a/test/controllers/api/relations_controller_test.rb b/test/controllers/api/relations_controller_test.rb
index e69dcdfe2..982df1dd7 100644
--- a/test/controllers/api/relations_controller_test.rb
+++ b/test/controllers/api/relations_controller_test.rb
@@ -641,15 +641,15 @@ module Api
# box of all its members into the changeset.
def test_tag_modify_bounding_box
relation = create(:relation)
- node1 = create(:node, :lat => 3, :lon => 3)
- node2 = create(:node, :lat => 5, :lon => 5)
+ node1 = create(:node, :lat => 0.3, :lon => 0.3)
+ node2 = create(:node, :lat => 0.5, :lon => 0.5)
way = create(:way)
create(:way_node, :way => way, :node => node1)
create(:relation_member, :relation => relation, :member => way)
create(:relation_member, :relation => relation, :member => node2)
# the relation contains nodes1 and node2 (node1
- # indirectly via the way), so the bbox should be [3,3,5,5].
- check_changeset_modify(BoundingBox.new(3, 3, 5, 5)) do |changeset_id, auth_header|
+ # indirectly via the way), so the bbox should be [0.3,0.3,0.5,0.5].
+ check_changeset_modify(BoundingBox.new(0.3, 0.3, 0.5, 0.5)) do |changeset_id, auth_header|
# add a tag to an existing relation
relation_xml = xml_for_relation(relation)
relation_element = relation_xml.find("//osm/relation").first
@@ -879,14 +879,14 @@ module Api
# still technically valid.
def test_remove_all_members
relation = create(:relation)
- node1 = create(:node, :lat => 3, :lon => 3)
- node2 = create(:node, :lat => 5, :lon => 5)
+ node1 = create(:node, :lat => 0.3, :lon => 0.3)
+ node2 = create(:node, :lat => 0.5, :lon => 0.5)
way = create(:way)
create(:way_node, :way => way, :node => node1)
create(:relation_member, :relation => relation, :member => way)
create(:relation_member, :relation => relation, :member => node2)
- check_changeset_modify(BoundingBox.new(3, 3, 5, 5)) do |changeset_id, auth_header|
+ check_changeset_modify(BoundingBox.new(0.3, 0.3, 0.5, 0.5)) do |changeset_id, auth_header|
relation_xml = xml_for_relation(relation)
relation_xml
.find("//osm/relation/member")
diff --git a/test/factories/node.rb b/test/factories/node.rb
index 392d67a84..bfb8b16fe 100644
--- a/test/factories/node.rb
+++ b/test/factories/node.rb
@@ -16,7 +16,7 @@ FactoryBot.define do
trait :with_history do
after(:create) do |node, _evaluator|
(1..node.version).each do |n|
- create(:old_node, :node_id => node.id, :version => n, :changeset => node.changeset)
+ create(:old_node, :node_id => node.id, :version => n, :latitude => node.latitude, :longitude => node.longitude, :changeset => node.changeset)
end
# For deleted nodes, make sure the most recent old_node is also deleted.
diff --git a/test/models/node_test.rb b/test/models/node_test.rb
index 94cb5ec81..9efe9a9c9 100644
--- a/test/models/node_test.rb
+++ b/test/models/node_test.rb
@@ -98,8 +98,9 @@ class NodeTest < ActiveSupport::TestCase
end
def test_update
- node = create(:node)
- create(:old_node, :node_id => node.id, :version => 1)
+ node = create(:node, :lat => 12.6543, :lon => 65.1234)
+ create(:old_node, :node_id => node.id, :version => 1, :lat => node.lat, :lon => node.lon)
+
node_template = Node.find(node.id)
assert_not_nil node_template