Skip to content

Commit a22a663

Browse files
committed
feat(backend): import excel file with new source
1 parent 219e565 commit a22a663

File tree

19 files changed

+532
-56
lines changed

19 files changed

+532
-56
lines changed

packages/backend/src/_common/decorators/transformers/TrimDecorator.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import { Transform } from "class-transformer";
33
export function Trim() {
44
return Transform(({ value }) => {
55
if (typeof value === "string") {
6-
return value.trim();
6+
return value.trim().replace(/\s+/g, " ");
77
}
88
return value;
99
});
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
import { MigrationInterface, QueryRunner } from "typeorm";
2+
import { domifaConfig } from "../config";
3+
4+
export class AutoMigration1739981242344 implements MigrationInterface {
5+
name = "AutoMigration1739981242344";
6+
7+
public async up(queryRunner: QueryRunner): Promise<void> {
8+
if (
9+
domifaConfig().envId === "prod" ||
10+
domifaConfig().envId === "preprod" ||
11+
domifaConfig().envId === "local"
12+
) {
13+
await queryRunner.query(
14+
`ALTER TABLE "open_data_places" ADD "nbDomicilies" integer`
15+
);
16+
await queryRunner.query(
17+
`ALTER TABLE "open_data_places" ADD "nbDomiciliesDomifa" integer`
18+
);
19+
await queryRunner.query(
20+
`ALTER TABLE "open_data_places" ADD "nbAttestations" integer`
21+
);
22+
await queryRunner.query(
23+
`ALTER TABLE "open_data_places" ADD "nbAttestationsDomifa" integer`
24+
);
25+
await queryRunner.query(
26+
`ALTER TABLE "open_data_places" ADD "saturation" text`
27+
);
28+
await queryRunner.query(
29+
`ALTER TABLE "open_data_places" ADD "saturationDetails" text`
30+
);
31+
}
32+
}
33+
34+
public async down(queryRunner: QueryRunner): Promise<void> {
35+
await queryRunner.query(
36+
`ALTER TABLE "open_data_places" DROP COLUMN "saturationDetails"`
37+
);
38+
await queryRunner.query(
39+
`ALTER TABLE "open_data_places" DROP COLUMN "saturation"`
40+
);
41+
await queryRunner.query(
42+
`ALTER TABLE "open_data_places" DROP COLUMN "nbDomiciliesDomifa"`
43+
);
44+
await queryRunner.query(
45+
`ALTER TABLE "open_data_places" DROP COLUMN "nbDomicilies"`
46+
);
47+
}
48+
}
Lines changed: 290 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,290 @@
1+
import {
2+
getDepartementFromCodePostal,
3+
getRegionCodeFromDepartement,
4+
} from "@domifa/common";
5+
import { MigrationInterface } from "typeorm";
6+
import { OpenDataPlaceTable } from "../database/entities/open-data-place";
7+
import { OpenDataPlace } from "../modules/open-data-places/interfaces";
8+
import {
9+
appLogger,
10+
cleanAddress,
11+
cleanCity,
12+
cleanSpaces,
13+
FileManagerService,
14+
padPostalCode,
15+
} from "../util";
16+
import * as XLSX from "xlsx";
17+
import { openDataPlaceRepository } from "../database";
18+
import { getLocation } from "../structures/services/location.service";
19+
import { domifaConfig } from "../config";
20+
import { PassThrough } from "stream";
21+
import { loadDomifaData } from "../modules/open-data-places/services/import-data/load-domifa";
22+
import { loadMssData } from "../modules/open-data-places/services/import-data/load-mss";
23+
import { loadSoliguideData } from "../modules/open-data-places/services/import-data/load-soliguide";
24+
25+
let logs: string[] = [];
26+
27+
// Override console methods to capture logs
28+
const originalConsoleLog = console.log;
29+
const originalConsoleWarn = console.warn;
30+
const originalConsoleError = console.error;
31+
32+
console.log = function (...args) {
33+
const message = args.join(" ");
34+
logs.push(`[INFO] ${new Date().toISOString()} - ${message}`);
35+
originalConsoleLog.apply(console, args);
36+
};
37+
38+
console.warn = function (...args) {
39+
const message = args.join(" ");
40+
logs.push(`[WARN] ${new Date().toISOString()} - ${message}`);
41+
originalConsoleWarn.apply(console, args);
42+
};
43+
44+
console.error = function (...args) {
45+
const message = args.join(" ");
46+
logs.push(`[ERROR] ${new Date().toISOString()} - ${message}`);
47+
originalConsoleError.apply(console, args);
48+
};
49+
50+
export class ManualMigration1739981933530 implements MigrationInterface {
51+
private fileManager: FileManagerService;
52+
53+
constructor() {
54+
this.fileManager = new FileManagerService();
55+
}
56+
57+
public async uploadLogsToS3(path: string) {
58+
try {
59+
const logContent = logs.join("\n");
60+
const stream = new PassThrough();
61+
stream.write(logContent);
62+
stream.end();
63+
64+
await this.fileManager.uploadFile(path, stream);
65+
logs = []; // Clear logs after successful upload
66+
67+
originalConsoleLog(`Logs uploaded to ${path}`);
68+
} catch (error) {
69+
originalConsoleError("Error uploading logs:", error);
70+
throw error;
71+
}
72+
}
73+
74+
public async up(): Promise<void> {
75+
if (
76+
domifaConfig().envId === "prod" ||
77+
domifaConfig().envId === "preprod" ||
78+
domifaConfig().envId === "local"
79+
) {
80+
await loadDomifaData();
81+
await loadSoliguideData();
82+
await loadMssData();
83+
84+
const filePath = `${domifaConfig().upload.bucketRootDir}/file.xlsx`;
85+
const s3Response = await this.fileManager.getObject(filePath);
86+
87+
if (!s3Response?.Body) {
88+
throw new Error("No excel file found in S3");
89+
}
90+
91+
let newPlaces = 0;
92+
let existingPlaces = 0;
93+
let notExistingPlaces = 0;
94+
let updatedPlaces = 0;
95+
const incompletePlaces: any[] = [];
96+
97+
try {
98+
appLogger.info("📑 Lecture du fichier Excel...");
99+
100+
const chunks: Uint8Array[] = [];
101+
for await (const chunk of s3Response.Body as any) {
102+
chunks.push(chunk);
103+
}
104+
const buffer = Buffer.concat(chunks);
105+
106+
const workbook = XLSX.read(buffer, {
107+
type: "buffer",
108+
cellDates: true,
109+
cellNF: false,
110+
cellText: false,
111+
});
112+
113+
const firstSheet = workbook.Sheets[workbook.SheetNames[0]];
114+
const jsonData = XLSX.utils.sheet_to_json<any>(firstSheet, {
115+
header: [
116+
"nom_organisme",
117+
"nom_site",
118+
"adresse",
119+
"commune",
120+
"code_postal",
121+
"nb_personnes_domiciliees",
122+
"nb_attestations",
123+
],
124+
range: 1,
125+
});
126+
127+
appLogger.info(`${jsonData.length} places to import... `);
128+
129+
for await (const row of jsonData) {
130+
// Vérification du nombre de domiciliés
131+
let nbDomicilies = parseInt(row.nb_personnes_domiciliees);
132+
if (isNaN(nbDomicilies)) {
133+
nbDomicilies = 0;
134+
}
135+
136+
let nbAttestations = parseInt(row.nb_attestations);
137+
if (isNaN(nbAttestations)) {
138+
nbAttestations = 0;
139+
}
140+
141+
// Vérification de l'adresse
142+
if (!row.adresse || row.adresse.trim() === "" || !row.code_postal) {
143+
incompletePlaces.push({
144+
...row,
145+
reason: "Adresse manquante",
146+
});
147+
continue;
148+
}
149+
150+
if (!row.nom_organisme) {
151+
incompletePlaces.push({
152+
...row,
153+
reason: "Nom d'organisme manquant",
154+
});
155+
continue;
156+
}
157+
158+
const postalCode = padPostalCode(row.code_postal?.toString());
159+
const departement = getDepartementFromCodePostal(postalCode);
160+
const city = cleanCity(row.commune);
161+
const address = `${cleanAddress(row.adresse)}, ${postalCode} ${city}`;
162+
const position = await getLocation(address);
163+
164+
const organisationName = cleanSpaces(row.nom_organisme || "");
165+
const siteName = cleanSpaces(row.nom_site || "");
166+
167+
const nom = organisationName
168+
? siteName
169+
? `${organisationName} - ${siteName}`
170+
: organisationName
171+
: siteName;
172+
173+
if (!position) {
174+
incompletePlaces.push({
175+
...row,
176+
reason: `Adresse non trouvée: ${address}`,
177+
});
178+
continue;
179+
}
180+
181+
const openDataPlace: Partial<OpenDataPlace> = {
182+
nom,
183+
adresse: cleanAddress(row.adresse),
184+
codePostal: postalCode,
185+
ville: city,
186+
departement,
187+
structureType: "asso",
188+
region: getRegionCodeFromDepartement(departement),
189+
source: "dgcs",
190+
uniqueId: `${departement}_${row.nom_organisme}_${row.nom_site}`
191+
.toLowerCase()
192+
.replace(/\s+/g, "_")
193+
.replace(/[^a-z0-9_]/g, ""),
194+
software: "other",
195+
nbDomicilies,
196+
nbAttestations,
197+
latitude: position.coordinates[1],
198+
longitude: position.coordinates[0],
199+
};
200+
201+
const existingPlace = await openDataPlaceRepository.findOneBy({
202+
source: "dgcs",
203+
uniqueId: openDataPlace.uniqueId,
204+
});
205+
206+
const domifaPlaceExist: OpenDataPlace =
207+
await openDataPlaceRepository.findExistingPlaceFromDomiFa(
208+
openDataPlace.latitude,
209+
openDataPlace.longitude
210+
);
211+
212+
if (domifaPlaceExist) {
213+
console.warn(`✅ [MATCH] [${domifaPlaceExist.domifaStructureId}]`);
214+
console.log(
215+
`- DomiFa: ${domifaPlaceExist.nom} (${domifaPlaceExist.adresse})`
216+
);
217+
console.log(
218+
`- DGCS: ${openDataPlace.nom} (${openDataPlace.adresse})\n`
219+
);
220+
221+
existingPlaces++;
222+
openDataPlace.domifaStructureId =
223+
domifaPlaceExist.domifaStructureId;
224+
openDataPlace.software = "domifa";
225+
openDataPlace.nbDomiciliesDomifa =
226+
domifaPlaceExist.nbDomiciliesDomifa;
227+
228+
await openDataPlaceRepository.update(
229+
{
230+
domifaStructureId: domifaPlaceExist.domifaStructureId,
231+
},
232+
{
233+
uniqueId: openDataPlace.uniqueId,
234+
nbDomicilies,
235+
nbDomiciliesDomifa: domifaPlaceExist.nbDomiciliesDomifa,
236+
}
237+
);
238+
} else {
239+
notExistingPlaces++;
240+
console.warn(
241+
`🔴 [NOT MATCH] ${openDataPlace.nom} ${openDataPlace.adresse}\n`
242+
);
243+
}
244+
245+
if (!existingPlace) {
246+
newPlaces++;
247+
await openDataPlaceRepository.save(
248+
new OpenDataPlaceTable(openDataPlace)
249+
);
250+
} else {
251+
updatedPlaces++;
252+
await openDataPlaceRepository.update(
253+
{
254+
source: "dgcs",
255+
uniqueId: openDataPlace.uniqueId,
256+
},
257+
{
258+
...openDataPlace,
259+
}
260+
);
261+
}
262+
}
263+
264+
// Logs finaux
265+
console.log("✅ Import Excel data done");
266+
console.log(`🆕 ${newPlaces} places added`);
267+
console.log(`🔁 ${updatedPlaces} places updated`);
268+
console.log(`🔁 ${existingPlaces} places in DomiFa`);
269+
console.log(`🔁 ${notExistingPlaces} places not in DomiFa`);
270+
271+
const date = new Date().toISOString().split("T")[0];
272+
await this.uploadLogsToS3(`logs/import-${date}.log`);
273+
274+
if (incompletePlaces.length > 0) {
275+
console.log(`⚠️ ${incompletePlaces.length} places non importées:`);
276+
incompletePlaces.forEach((place) => {
277+
console.log(
278+
`- ${place.nom_organisme || "Sans nom"} : ${place.reason}`
279+
);
280+
});
281+
}
282+
} catch (error) {
283+
appLogger.error("❌ Erreur lors de l'import:", error);
284+
throw error;
285+
}
286+
}
287+
}
288+
289+
public async down(): Promise<void> {}
290+
}

packages/backend/src/_migrations/_init-db/1603812391580-pr-env-create-database.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -301,6 +301,12 @@ async function createTables(queryRunner: QueryRunner) {
301301
"domifaStructureId" int4 NULL,
302302
"soliguideStructureId" int4 NULL,
303303
"mssId" text NULL,
304+
"nbDomicilies" int4 NULL,
305+
"nbDomiciliesDomifa" int4 NULL,
306+
"nbAttestations" int4 NULL,
307+
"nbAttestationsDomifa" int4 NULL,
308+
"saturation" text NULL,
309+
"saturationDetails" text NULL,
304310
CONSTRAINT "PK_f80b64cfb42753deacd8bf6d78d" PRIMARY KEY (uuid),
305311
CONSTRAINT "FK_7ee1e7a8d9441eb76ab7b4aa5a3" FOREIGN KEY ("domifaStructureId") REFERENCES public."structure"(id) ON DELETE CASCADE
306312
);

0 commit comments

Comments
 (0)