Skip to content
This repository was archived by the owner on Feb 11, 2026. It is now read-only.
8 changes: 8 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,14 @@ cd nextjs
npm i
```

# Cloning the Production Database
```bash
pg_dump -x -O {pg_connection_string} > meep.psql
docker compose down -v
docker compose up db
psql postgres://postgres:password@127.0.0.1:53333/postgres < meep.psql
```

# Troubleshooting
## Bitwarden
If the Bitwarden CLI isn't working for you, you can download the `.env` files manually, using BitWarden web:
Expand Down
109 changes: 6 additions & 103 deletions hub/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1952,103 +1952,6 @@ async def create_import_record(record):
for column_types in all_column_types:
merge_column_types(combined_column_types, column_types)
await self.update_field_definition_types(combined_column_types)
elif (
self.geography_column
and self.geography_column_type == self.GeographyTypes.WARD
):

async def create_import_record(record):
structured_data = get_update_data(self, record)
column_types = structured_data.pop("column_types")
gss = self.get_record_field(record, self.geography_column)
ward = await Area.objects.filter(
area_type__code="WD23",
gss=gss,
).afirst()
if ward:
coord = ward.point.centroid
postcode_data = await loaders["postcodesIOFromPoint"].load(coord)
else:
logger.warning(
f"Could not find ward for record {self.get_record_id(record)} and gss {gss}"
)
postcode_data = None

update_data = {
**structured_data,
"area": ward,
"point": ward.point,
"postcode_data": postcode_data,
}

await GenericData.objects.aupdate_or_create(
data_type=data_type,
data=self.get_record_id(record),
defaults=update_data,
)

return column_types

all_column_types = await asyncio.gather(
*[create_import_record(record) for record in data]
)
combined_column_types = {}
for column_types in all_column_types:
merge_column_types(combined_column_types, column_types)
await self.update_field_definition_types(combined_column_types)

logger.info(f"Imported {len(data)} records from {self}")
elif (
self.geography_column
and self.geography_column_type == self.GeographyTypes.OUTPUT_AREA
):

async def create_import_record(record):
structured_data = get_update_data(self, record)
column_types = structured_data.pop("column_types")
gss = self.get_record_field(record, self.geography_column)
output_area = await Area.objects.filter(
area_type__code="OA21",
gss=gss,
).afirst()
if output_area:
postcode_data = await geocoding_config.get_postcode_data_for_area(
output_area, loaders, []
)
if postcode_data:
# override lat/lng based output_area with known output area
postcode_data.output_area = output_area.name
postcode_data.codes.output_area = gss
else:
logger.warning(
f"Could not find output area for record {self.get_record_id(record)} and gss {gss}"
)
postcode_data = None

update_data = {
**structured_data,
"area": output_area,
"point": output_area.point,
"postcode_data": postcode_data,
}

await GenericData.objects.aupdate_or_create(
data_type=data_type,
data=self.get_record_id(record),
defaults=update_data,
)

return column_types

all_column_types = await asyncio.gather(
*[create_import_record(record) for record in data]
)
combined_column_types = {}
for column_types in all_column_types:
merge_column_types(combined_column_types, column_types)
await self.update_field_definition_types(combined_column_types)

logger.info(f"Imported {len(data)} records from {self}")

elif (
self.geography_column
Expand Down Expand Up @@ -2749,13 +2652,13 @@ async def deferred_import_all(
priority_enum = None
try:
match member_count:
case (
_
) if member_count < settings.SUPER_QUICK_IMPORT_ROW_COUNT_THRESHOLD:
case _ if (
member_count < settings.SUPER_QUICK_IMPORT_ROW_COUNT_THRESHOLD
):
priority_enum = ProcrastinateQueuePriority.SUPER_QUICK
case (
_
) if member_count < settings.MEDIUM_PRIORITY_IMPORT_ROW_COUNT_THRESHOLD:
case _ if (
member_count < settings.MEDIUM_PRIORITY_IMPORT_ROW_COUNT_THRESHOLD
):
priority_enum = ProcrastinateQueuePriority.MEDIUM
case _ if member_count < settings.LARGE_IMPORT_ROW_COUNT_THRESHOLD:
priority_enum = ProcrastinateQueuePriority.SLOW
Expand Down
1 change: 1 addition & 0 deletions nextjs/src/__generated__/graphql.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion nextjs/src/__generated__/zodSchema.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -58,136 +58,10 @@ import toSpaceCase from 'to-space-case'
import { CREATE_MAP_REPORT } from '../../../reports/ReportList/CreateReportCard'
import ExternalDataSourceBadCredentials from './ExternalDataSourceBadCredentials'
import { ManageSourceSharing } from './ManageSourceSharing'
import { getGeocodingConfigFromGeographyColumn } from './getGeocodingConfigFromGeographyColumn'
import { DELETE_UPDATE_CONFIG, GET_UPDATE_CONFIG } from './graphql-queries'
import importData, { cancelImport } from './importData'

const GET_UPDATE_CONFIG = gql`
query ExternalDataSourceInspectPage($ID: ID!) {
externalDataSource(id: $ID) {
id
name
dataType
remoteUrl
crmType
connectionDetails {
... on AirtableSource {
apiKey
baseId
tableId
}
... on MailchimpSource {
apiKey
listId
}
... on ActionNetworkSource {
apiKey
groupSlug
}
... on TicketTailorSource {
apiKey
}
}
lastImportJob {
id
lastEventAt
status
}
lastUpdateJob {
id
lastEventAt
status
}
autoImportEnabled
autoUpdateEnabled
hasWebhooks
allowUpdates
automatedWebhooks
webhookUrl
webhookHealthcheck
geographyColumn
geographyColumnType
geocodingConfig
usesValidGeocodingConfig
postcodeField
firstNameField
lastNameField
fullNameField
emailField
phoneField
addressField
titleField
descriptionField
imageField
startTimeField
endTimeField
publicUrlField
socialUrlField
canDisplayPointField
isImportScheduled
importProgress {
id
hasForecast
status
total
succeeded
estimatedFinishTime
actualFinishTime
inQueue
numberOfJobsAheadInQueue
sendEmail
}
isUpdateScheduled
updateProgress {
id
hasForecast
status
total
succeeded
estimatedFinishTime
actualFinishTime
inQueue
numberOfJobsAheadInQueue
sendEmail
}
importedDataCount
importedDataGeocodingRate
regionCount: importedDataCountOfAreas(
analyticalAreaType: european_electoral_region
)
constituencyCount: importedDataCountOfAreas(
analyticalAreaType: parliamentary_constituency
)
ladCount: importedDataCountOfAreas(analyticalAreaType: admin_district)
wardCount: importedDataCountOfAreas(analyticalAreaType: admin_ward)
fieldDefinitions(refreshFromSource: true) {
label
value
description
editable
}
updateMapping {
source
sourcePath
destinationColumn
}
sharingPermissions {
id
}
organisation {
id
name
}
}
}
`

const DELETE_UPDATE_CONFIG = gql`
mutation DeleteUpdateConfig($id: String!) {
deleteExternalDataSource(data: { id: $id }) {
id
}
}
`

export default function InspectExternalDataSource({
externalDataSourceId,
name,
Expand Down Expand Up @@ -216,13 +90,46 @@ export default function InspectExternalDataSource({
notifyOnNetworkStatusChange: true,
})

// Begin polling on successful datasource query
useEffect(() => {
if (data?.externalDataSource) {
// Add geocoding config if not present
if (
!!data?.externalDataSource.geographyColumnType &&
!!data?.externalDataSource.geographyColumn
) {
updateMutation({
geocodingConfig: getGeocodingConfigFromGeographyColumn(
data.externalDataSource.geographyColumn,
data.externalDataSource.geographyColumnType
),
})
}

// Begin polling on successful datasource query
const status = data?.externalDataSource?.importProgress?.status
if (
data?.externalDataSource &&
status &&
!['cancelled', 'failed', 'succeeded'].includes(status)
) {
setPollInterval(5000)
}
}, [data])

// Stop polling on unmount
useEffect(() => {
return () => {
setPollInterval(undefined)
}
}, [])

// Stop polling when job is no longer in progress
useEffect(() => {
const status = data?.externalDataSource?.importProgress?.status
if (status && ['cancelled', 'failed', 'succeeded'].includes(status)) {
setPollInterval(undefined)
}
}, [data?.externalDataSource?.importProgress?.status])

const notFound = !loading && !data?.externalDataSource
if (error || notFound) {
return (
Expand Down Expand Up @@ -529,7 +436,6 @@ export default function InspectExternalDataSource({
<UpdateExternalDataSourceFields
crmType={source.crmType}
fieldDefinitions={source.fieldDefinitions}
allowGeocodingConfigChange={!source.usesValidGeocodingConfig}
initialData={{
geographyColumn: source.geographyColumn,
geographyColumnType: source.geographyColumnType,
Expand Down Expand Up @@ -845,9 +751,7 @@ export default function InspectExternalDataSource({
)

function UpdateGecodingConfig({
externalDataSourceId,
geocodingConfig,
fieldDefinitions,
onSubmit,
}: {
externalDataSourceId: string
Expand Down Expand Up @@ -904,6 +808,15 @@ export default function InspectExternalDataSource({
e?: React.BaseSyntheticEvent<object, any, any> | undefined
) {
e?.preventDefault()

// Update the geocoding config
if (!!data?.geographyColumnType && !!data?.geographyColumn) {
data.geocodingConfig = getGeocodingConfigFromGeographyColumn(
data.geographyColumn,
data.geographyColumnType
)
}

const update = client.mutate<
UpdateExternalDataSourceMutation,
UpdateExternalDataSourceMutationVariables
Expand Down
Loading
Loading