本文整理汇总了Python中seed.tests.util.make_fake_snapshot函数的典型用法代码示例。如果您正苦于以下问题:Python make_fake_snapshot函数的具体用法?Python make_fake_snapshot怎么用?Python make_fake_snapshot使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了make_fake_snapshot函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_match_buildings
def test_match_buildings(self):
"""Good case for testing our matching system."""
bs_data = {
'pm_property_id': 1243,
'tax_lot_id': '435/422',
'property_name': 'Greenfield Complex',
'custom_id_1': 12,
'address_line_1': '555 Database LN.',
'address_line_2': '',
'city': 'Gotham City',
'postal_code': 8999,
}
#Since the change to not match duplicates there needs to be a second record that isn't exactly the same
#to run this test. In this case address_line_2 now has a value of 'A' rather than ''
bs_data_2 = {
'pm_property_id': 1243,
'tax_lot_id': '435/422',
'property_name': 'Greenfield Complex',
'custom_id_1': 12,
'address_line_1': '555 Database LN.',
'address_line_2': 'A',
'city': 'Gotham City',
'postal_code': 8999,
}
# Setup mapped AS snapshot.
snapshot = util.make_fake_snapshot(
self.import_file, bs_data, ASSESSED_BS, is_canon=True,
org=self.fake_org
)
# Different file, but same ImportRecord.
# Setup mapped PM snapshot.
# Should be an identical match.
new_import_file = ImportFile.objects.create(
import_record=self.import_record,
mapping_done=True
)
new_snapshot = util.make_fake_snapshot(
new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org
)
tasks.match_buildings(new_import_file.pk, self.fake_user.pk)
result = BuildingSnapshot.objects.all()[0]
self.assertEqual(result.property_name, snapshot.property_name)
self.assertEqual(result.property_name, new_snapshot.property_name)
# Since these two buildings share a common ID, we match that way.
self.assertEqual(result.confidence, 0.9)
self.assertEqual(
sorted([r.pk for r in result.parents.all()]),
sorted([new_snapshot.pk, snapshot.pk])
)
self.assertGreater(AuditLog.objects.count(), 0)
self.assertEqual(
AuditLog.objects.first().action_note,
'System matched building ID.'
)
示例2: test_get_ancestors
def test_get_ancestors(self):
"""Tests get_ancestors(building), returns all non-composite, non-raw
BuildingSnapshot instances.
"""
bs_data = {
"pm_property_id": 1243,
"tax_lot_id": "435/422",
"property_name": "Greenfield Complex",
"custom_id_1": 1243,
"address_line_1": "555 Database LN.",
"address_line_2": "",
"city": "Gotham City",
"postal_code": 8999,
}
# Setup mapped AS snapshot.
snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org)
# Different file, but same ImportRecord.
# Setup mapped PM snapshot.
# Should be an identical match.
new_import_file = ImportFile.objects.create(
import_record=self.import_record, raw_save_done=True, mapping_done=True
)
new_snapshot = util.make_fake_snapshot(new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org)
tasks.match_buildings(new_import_file.pk, self.fake_user.pk)
result = BuildingSnapshot.objects.filter(source_type=4)[0]
ancestor_pks = set([b.pk for b in get_ancestors(result)])
buildings = BuildingSnapshot.objects.filter(source_type__in=[2, 3]).exclude(pk=result.pk)
building_pks = set([b.pk for b in buildings])
self.assertEqual(ancestor_pks, building_pks)
示例3: test_separates_system_and_possible_match_types
def test_separates_system_and_possible_match_types(self):
"""We save possible matches separately."""
bs1_data = {
"pm_property_id": 123,
"tax_lot_id": "435/422",
"property_name": "Greenfield Complex",
"custom_id_1": 1243,
"address_line_1": "555 NorthWest Databaseer Lane.",
"address_line_2": "",
"city": "Gotham City",
"postal_code": 8999,
}
# This building will have a lot less data to identify it.
bs2_data = {
"pm_property_id": 1243,
"custom_id_1": 1243,
"address_line_1": "555 Database LN.",
"city": "Gotham City",
"postal_code": 8999,
}
new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True)
util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True, org=self.fake_org)
util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org)
tasks.match_buildings(new_import_file.pk, self.fake_user.pk)
self.assertEqual(BuildingSnapshot.objects.filter(match_type=POSSIBLE_MATCH).count(), 0)
self.assertEqual(BuildingSnapshot.objects.filter(match_type=SYSTEM_MATCH).count(), 1)
示例4: setUp
def setUp(self):
self.fake_user = User.objects.create(username='models_test')
self.fake_org = Organization.objects.create()
OrganizationUser.objects.create(
user=self.fake_user, organization=self.fake_org
)
self.import_record = ImportRecord.objects.create(owner=self.fake_user)
self.import_file1 = ImportFile.objects.create(
import_record=self.import_record
)
self.import_file2 = ImportFile.objects.create(
import_record=self.import_record
)
self.bs1 = util.make_fake_snapshot(
self.import_file1,
self.bs1_data,
bs_type=seed_models.ASSESSED_BS,
is_canon=True
)
self.bs2 = util.make_fake_snapshot(
self.import_file2,
self.bs2_data,
bs_type=seed_models.PORTFOLIO_BS,
is_canon=True
)
self.meter = seed_models.Meter.objects.create(
name='test meter',
energy_type=seed_models.ELECTRICITY,
energy_units=seed_models.KILOWATT_HOURS
)
self.meter.building_snapshot.add(self.bs2)
示例5: test_reset_mapped_w_matching_done
def test_reset_mapped_w_matching_done(self):
"""Make sure we don't delete buildings that have been merged."""
self.import_file.matching_done = True
self.import_file.matching_progress = 100
self.import_file.save()
for x in range(10):
test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_BS)
expected = {
'status': 'warning',
'message': 'Mapped buildings already merged'
}
resp = self.client.post(
reverse_lazy("seed:remap_buildings"),
data=json.dumps({
'file_id': self.import_file.pk,
}),
content_type='application/json'
)
self.assertDictEqual(json.loads(resp.content), expected)
# Verify that we haven't deleted those mapped buildings.
self.assertEqual(
BuildingSnapshot.objects.filter(
import_file=self.import_file
).count(),
10
)
示例6: test_match_buildings
def test_match_buildings(self):
"""Good case for testing our matching system."""
bs_data = {
"pm_property_id": 1243,
"tax_lot_id": "435/422",
"property_name": "Greenfield Complex",
"custom_id_1": 12,
"address_line_1": "555 Database LN.",
"address_line_2": "",
"city": "Gotham City",
"postal_code": 8999,
}
# Setup mapped AS snapshot.
snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org)
# Different file, but same ImportRecord.
# Setup mapped PM snapshot.
# Should be an identical match.
new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True)
new_snapshot = util.make_fake_snapshot(new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org)
tasks.match_buildings(new_import_file.pk, self.fake_user.pk)
result = BuildingSnapshot.objects.all()[0]
self.assertEqual(result.property_name, snapshot.property_name)
self.assertEqual(result.property_name, new_snapshot.property_name)
# Since these two buildings share a common ID, we match that way.
self.assertEqual(result.confidence, 0.9)
self.assertEqual(sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk]))
self.assertGreater(AuditLog.objects.count(), 0)
self.assertEqual(AuditLog.objects.first().action_note, "System matched building ID.")
示例7: test_get_ancestors
def test_get_ancestors(self):
"""Tests get_ancestors(building), returns all non-composite, non-raw
BuildingSnapshot instances.
"""
bs_data = {
'pm_property_id': 1243,
'tax_lot_id': '435/422',
'property_name': 'Greenfield Complex',
'custom_id_1': 1243,
'address_line_1': '555 Database LN.',
'address_line_2': '',
'city': 'Gotham City',
'postal_code': 8999,
}
#Since we changed to not match duplicate data make a second record that matches with something slighty changed
#In this case appended a 'A' to the end of address_line_1
bs_data_2 = {
'pm_property_id': 1243,
'tax_lot_id': '435/422',
'property_name': 'Greenfield Complex',
'custom_id_1': 1243,
'address_line_1': '555 Database LN. A',
'address_line_2': '',
'city': 'Gotham City',
'postal_code': 8999,
}
# Setup mapped AS snapshot.
snapshot = util.make_fake_snapshot(
self.import_file, bs_data, ASSESSED_BS, is_canon=True,
org=self.fake_org
)
# Different file, but same ImportRecord.
# Setup mapped PM snapshot.
# Should be an identical match.
new_import_file = ImportFile.objects.create(
import_record=self.import_record,
raw_save_done=True,
mapping_done=True
)
new_snapshot = util.make_fake_snapshot(
new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org
)
tasks.match_buildings(new_import_file.pk, self.fake_user.pk)
result = BuildingSnapshot.objects.filter(source_type=4)[0]
ancestor_pks = set([b.pk for b in get_ancestors(result)])
buildings = BuildingSnapshot.objects.filter(
source_type__in=[2, 3]
).exclude(
pk=result.pk
)
building_pks = set([b.pk for b in buildings])
self.assertEqual(ancestor_pks, building_pks)
示例8: test_delete_organization_buildings
def test_delete_organization_buildings(self):
"""tests the delete builings for an org"""
# start with the normal use case
bs1_data = {
"pm_property_id": 123,
"tax_lot_id": "435/422",
"property_name": "Greenfield Complex",
"custom_id_1": 1243,
"address_line_1": "555 NorthWest Databaseer Lane.",
"address_line_2": "",
"city": "Gotham City",
"postal_code": 8999,
}
# This building will have a lot less data to identify it.
bs2_data = {
"pm_property_id": 1243,
"custom_id_1": 1243,
"address_line_1": "555 Database LN.",
"city": "Gotham City",
"postal_code": 8999,
}
new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True)
snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True)
snapshot.super_organization = self.fake_org
snapshot.save()
snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS)
snapshot.super_organization = self.fake_org
snapshot.save()
tasks.match_buildings(new_import_file.pk, self.fake_user.pk)
# make one more building snapshot in a different org
fake_org_2 = Organization.objects.create()
snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True)
snapshot.super_organization = fake_org_2
snapshot.save()
self.assertGreater(BuildingSnapshot.objects.filter(super_organization=self.fake_org).count(), 0)
tasks.delete_organization_buildings(self.fake_org.pk)
self.assertEqual(BuildingSnapshot.objects.filter(super_organization=self.fake_org).count(), 0)
self.assertGreater(BuildingSnapshot.objects.filter(super_organization=fake_org_2).count(), 0)
# test that the CanonicalBuildings are deleted
self.assertEqual(
CanonicalBuilding.objects.filter(canonical_snapshot__super_organization=self.fake_org).count(), 0
)
# test that other orgs CanonicalBuildings are not deleted
self.assertGreater(
CanonicalBuilding.objects.filter(canonical_snapshot__super_organization=fake_org_2).count(), 0
)
示例9: _add_additional_fake_buildings
def _add_additional_fake_buildings(self):
"""DRY up some test code below where many BSes are needed."""
self.bs3 = util.make_fake_snapshot(
self.import_file1, self.bs1_data, bs_type=seed_models.COMPOSITE_BS,
)
self.bs4 = util.make_fake_snapshot(
self.import_file1, self.bs2_data, bs_type=seed_models.COMPOSITE_BS,
)
self.bs5 = util.make_fake_snapshot(
self.import_file1, self.bs2_data, bs_type=seed_models.COMPOSITE_BS,
)
示例10: test_reset_mapped_w_previous_matches
def test_reset_mapped_w_previous_matches(self):
"""Ensure we ignore mapped buildings with children BSes."""
# Make the raw BSes for us to make new mappings from
for x in range(10):
test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_RAW)
# Simulate existing mapped BSes, which should be deleted.
for x in range(10):
test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_BS)
# Setup our exceptional case: here the first BS has a child, COMPOSITE.
child = test_util.make_fake_snapshot(None, {}, COMPOSITE_BS)
first = BuildingSnapshot.objects.filter(
import_file=self.import_file
)[:1].get()
# We add a child to our first BuildingSnapshot, which should exclude it
# from deletion and thus it should remain after a remapping is issued.
first.children.add(child)
# Here we mark all of the mapped building snapshots. These should all
# get deleted when we remap from the raw snapshots after the call to
# to this function.
for item in BuildingSnapshot.objects.filter(source_type=ASSESSED_BS):
item.property_name = 'Touched'
item.save()
# Ensure we have all 10 mapped BuildingSnapshots saved.
self.assertEqual(
BuildingSnapshot.objects.filter(property_name='Touched').count(),
10
)
self.client.post(
reverse_lazy("seed:remap_buildings"),
data=json.dumps({
'file_id': self.import_file.pk,
}),
content_type='application/json'
)
# Assert that only one remains that was touched, and that it has the
# child.
self.assertEqual(
BuildingSnapshot.objects.filter(property_name='Touched').count(),
1
)
self.assertEqual(
BuildingSnapshot.objects.get(
property_name='Touched'
).children.all()[0],
child
)
示例11: test_match_no_matches
def test_match_no_matches(self):
"""When a canonical exists, but doesn't match, we create a new one."""
bs1_data = {
'pm_property_id': 1243,
'tax_lot_id': '435/422',
'property_name': 'Greenfield Complex',
'custom_id_1': 1243,
'address_line_1': '555 Database LN.',
'address_line_2': '',
'city': 'Gotham City',
'postal_code': 8999,
}
bs2_data = {
'pm_property_id': 9999,
'tax_lot_id': '1231',
'property_name': 'A Place',
'custom_id_1': 0o000111000,
'address_line_1': '44444 Hmmm Ave.',
'address_line_2': 'Apt 4',
'city': 'Gotham City',
'postal_code': 8999,
}
snapshot = util.make_fake_snapshot(
self.import_file, bs1_data, ASSESSED_BS, is_canon=True
)
new_import_file = ImportFile.objects.create(
import_record=self.import_record,
mapping_done=True
)
new_snapshot = util.make_fake_snapshot(
new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org
)
self.assertEqual(BuildingSnapshot.objects.all().count(), 2)
tasks.match_buildings(new_import_file.pk, self.fake_user.pk)
# E.g. we didn't create a match
self.assertEqual(BuildingSnapshot.objects.all().count(), 2)
latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk)
# But we did create another canonical building for the unmatched bs.
self.assertNotEqual(latest_snapshot.canonical_building, None)
self.assertNotEqual(
latest_snapshot.canonical_building.pk,
snapshot.canonical_building.pk
)
self.assertEqual(latest_snapshot.confidence, None)
示例12: test_match_no_canonical_buildings
def test_match_no_canonical_buildings(self):
"""If no canonicals exist, create, but no new BSes."""
bs1_data = {
'pm_property_id': 1243,
'tax_lot_id': '435/422',
'property_name': 'Greenfield Complex',
'custom_id_1': 1243,
'address_line_1': '555 Database LN.',
'address_line_2': '',
'city': 'Gotham City',
'postal_code': 8999,
}
# Note: no Canonical Building is created for this snapshot.
snapshot = util.make_fake_snapshot(
self.import_file, bs1_data, ASSESSED_BS, is_canon=False
)
self.import_file.mapping_done = True
self.import_file.save()
self.assertEqual(snapshot.canonical_building, None)
self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
tasks.match_buildings(self.import_file.pk)
refreshed_snapshot = BuildingSnapshot.objects.get(pk=snapshot.pk)
self.assertNotEqual(refreshed_snapshot.canonical_building, None)
self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
示例13: test_match_no_canonical_buildings
def test_match_no_canonical_buildings(self):
"""If no canonicals exist, create, but no new BSes."""
bs1_data = {
"pm_property_id": 1243,
"tax_lot_id": "435/422",
"property_name": "Greenfield Complex",
"custom_id_1": 1243,
"address_line_1": "555 Database LN.",
"address_line_2": "",
"city": "Gotham City",
"postal_code": 8999,
}
# Note: no Canonical Building is created for this snapshot.
snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=False, org=self.fake_org)
self.import_file.mapping_done = True
self.import_file.save()
self.assertEqual(snapshot.canonical_building, None)
self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
tasks.match_buildings(self.import_file.pk, self.fake_user.pk)
refreshed_snapshot = BuildingSnapshot.objects.get(pk=snapshot.pk)
self.assertNotEqual(refreshed_snapshot.canonical_building, None)
self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
示例14: test_remap_buildings
def test_remap_buildings(self):
"""Test good case for resetting mapping."""
# Make raw BSes, these should stick around.
for x in range(10):
test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_RAW)
# Make "mapped" BSes, these should get removed.
for x in range(10):
test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_BS)
# Set import file like we're done mapping
self.import_file.mapping_done = True
self.import_file.mapping_progress = 100
self.import_file.save()
# Set cache like we're done mapping.
cache_key = decorators.get_prog_key('map_data', self.import_file.pk)
cache.set(cache_key, 100)
resp = self.client.post(
reverse_lazy("seed:remap_buildings"),
data=json.dumps({
'file_id': self.import_file.pk,
}),
content_type='application/json'
)
self.assertEqual(resp.status_code, 200)
self.assertEqual(
BuildingSnapshot.objects.filter(
import_file=self.import_file,
source_type__in=(ASSESSED_BS, PORTFOLIO_BS)
).count(),
0
)
self.assertEqual(
BuildingSnapshot.objects.filter(
import_file=self.import_file,
).count(),
10
)
self.assertEqual(cache.get(cache_key), 0)
示例15: test_handle_id_matches_duplicate_data
def test_handle_id_matches_duplicate_data(self):
"""
Test for handle_id_matches behavior when matching duplicate data
"""
bs_data = {
'pm_property_id': "2360",
'tax_lot_id': '476/460',
'property_name': 'Garfield Complex',
'custom_id_1': "89",
'address_line_1': '12975 Database LN.',
'address_line_2': '',
'city': 'Cartoon City',
'postal_code': "54321",
}
# Setup mapped AS snapshot.
util.make_fake_snapshot(
self.import_file, bs_data, ASSESSED_BS, is_canon=True,
org=self.fake_org
)
# Different file, but same ImportRecord.
# Setup mapped PM snapshot.
# Should be an identical match.
new_import_file = ImportFile.objects.create(
import_record=self.import_record,
mapping_done=True
)
tasks.match_buildings(new_import_file.pk, self.fake_user.pk)
duplicate_import_file = ImportFile.objects.create(
import_record=self.import_record,
mapping_done=True
)
new_snapshot = util.make_fake_snapshot(
duplicate_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org
)
self.assertRaises(tasks.DuplicateDataError, tasks.handle_id_matches, new_snapshot, duplicate_import_file,
self.fake_user.pk)