From 4ca8cc06c7f1dea9e99fb3977ae0ca2179c684e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rene=20Ka=C3=9Feb=C3=B6hmer?= Date: Fri, 2 May 2025 14:23:27 +0200 Subject: [PATCH] several fixes; Location Van, AssociatedLocation --- .../FillLocationId.py | 24 +++++++----- .../AssociatedLocation_beforetransform.csv | 7 ++++ .../AssociatedLocation_test.csv | 3 -- .../11_upsert_associated_location/export.json | 21 ++++++---- prepared_steps/1_extract_data/queries.json | 4 +- .../2_transform_via_script/TransformScript.py | 39 +++++++++++++------ .../6_upsert_child_location/command.txt | 2 +- .../6_upsert_child_location/export.json | 5 ++- .../export.json | 7 +++- prepared_steps/8_upsert_assets/export.json | 5 +-- 10 files changed, 78 insertions(+), 39 deletions(-) create mode 100644 prepared_steps/11_upsert_associated_location/AssociatedLocation_beforetransform.csv delete mode 100644 prepared_steps/11_upsert_associated_location/AssociatedLocation_test.csv diff --git a/prepared_steps/10_fill_location_id_for_associated_location/FillLocationId.py b/prepared_steps/10_fill_location_id_for_associated_location/FillLocationId.py index 877aa99..f3bf669 100644 --- a/prepared_steps/10_fill_location_id_for_associated_location/FillLocationId.py +++ b/prepared_steps/10_fill_location_id_for_associated_location/FillLocationId.py @@ -1,10 +1,10 @@ import pandas as pd -read_df_al = pd.read_csv('../10_upsert_associated_location/AssociatedLocation.csv', header=0, keep_default_na=False, dtype=str) -read_df_l = pd.read_csv('../5_upsert_child_location/target/Location_insert_target.csv', header=0, keep_default_na=False, dtype=str) +read_df_al = pd.read_csv('../11_upsert_associated_location/AssociatedLocation_beforetransform.csv', header=0, keep_default_na=False, dtype=str) +read_df_l = pd.read_csv('../6_upsert_child_location/target/Location_insert_target.csv', header=0, keep_default_na=False, dtype=str) #Type,ActiveFrom,ActiveTo,ParentRecordId,PKey__c -reindex_columns_al = ['Type','ActiveFrom','ActiveTo','ParentRecordId','PKey__c'] +reindex_columns_al = ['Type','ActiveFrom','ActiveTo','ParentRecordId','Location.PKey__c'] #Errors,ExternalReference,Id,IsInventoryLocation,IsMobile,LocationType,Name,PKey__c reindex_columns_l = ['Errors','ExternalReference','Id','IsInventoryLocation','IsMobile','LocationType','Name','PKey__c'] @@ -12,20 +12,26 @@ reindex_columns_l = ['Errors','ExternalReference','Id','IsInventoryLocation','Is df_al = read_df_al.reindex(reindex_columns_al, axis=1) df_l = read_df_l.reindex(reindex_columns_l, axis=1) +print(df_al) +print(df_l) + # Merge df_al with df_l including Id abse on PKey__c merged_df_al = pd.merge(df_al, - df_l[['Id', 'ExternalReference']], - left_on='PKey__c', - right_on='ExternalReference', + df_l[['Id', 'PKey__c']], + left_on='Location.PKey__c', + right_on='PKey__c', how='left') #drop External Reference -merged_df_al = merged_df_al.drop('ExternalReference', axis=1) +merged_df_al = merged_df_al.drop('Location.PKey__c', axis=1) +merged_df_al = merged_df_al.drop('PKey__c', axis=1) + #Rename columns -merged_df_al.columns = ['Type','ActiveFrom','ActiveTo','ParentRecordId','PKey__c','LocationId'] +merged_df_al.columns = ['Type','ActiveFrom','ActiveTo','ParentRecordId', 'LocationId'] +print(merged_df_al) #safe csv -merged_df_al.to_csv('../10_upsert_associated_location/AssociatedLocation.csv', index=False) +merged_df_al.to_csv('../11_upsert_associated_location/AssociatedLocation.csv', index=False) diff --git a/prepared_steps/11_upsert_associated_location/AssociatedLocation_beforetransform.csv b/prepared_steps/11_upsert_associated_location/AssociatedLocation_beforetransform.csv new file mode 100644 index 0000000..7956e7d --- /dev/null +++ b/prepared_steps/11_upsert_associated_location/AssociatedLocation_beforetransform.csv @@ -0,0 +1,7 @@ +Type,ActiveFrom,ActiveTo,ParentRecordId,Location.PKey__c +Installer (installation),2017-12-01,,001w000001kFZwUAAW,"Plantsoen de Pas 12, 6601 BK WIJCHEN, NL;;;" +Owner,2017-12-03,,0011r00001kXYKXAA4,"Plantsoen de Pas 12, 6601 BK WIJCHEN, NL;;;" +Installer (installation),2017-12-01,,001w000001kFZwUAAW,"Munnikhofsestraat 5, 6691 HG GENDT, NL;;;" +Owner,2017-12-03,,0011r00001kXYKcAAO,"Munnikhofsestraat 5, 6691 HG GENDT, NL;;;" +Installer (installation),2017-12-03,,0012000000eCRx4AAG,"Newtonweg 20, 6101 WX ECHT, NL;;;" +Owner,2017-12-03,,0011r00001kXYNMAA4,"Newtonweg 20, 6101 WX ECHT, NL;;;" diff --git a/prepared_steps/11_upsert_associated_location/AssociatedLocation_test.csv b/prepared_steps/11_upsert_associated_location/AssociatedLocation_test.csv deleted file mode 100644 index 3c4c65c..0000000 --- a/prepared_steps/11_upsert_associated_location/AssociatedLocation_test.csv +++ /dev/null @@ -1,3 +0,0 @@ -Type,ActiveFrom,ActiveTo,ParentRecordId,PKey__c,LocationId -Installer (installation),2018-03-27,,0019Z00000fmCAYQA2,"Mr. P.J.M. Aalberselaan 19, 1181 XJ AMSTELVEEN, NL;;;",1319Z000000ij1RQAQ -Owner,2018-04-06,,0019Z00000fmCAYQA2,"Mr. P.J.M. Aalberselaan 19, 1181 XJ AMSTELVEEN, NL;;;",1319Z000000ij1RQAQ diff --git a/prepared_steps/11_upsert_associated_location/export.json b/prepared_steps/11_upsert_associated_location/export.json index 40dfcd4..620fec6 100644 --- a/prepared_steps/11_upsert_associated_location/export.json +++ b/prepared_steps/11_upsert_associated_location/export.json @@ -1,16 +1,23 @@ { - "allOrNone": true, + "allOrNone": true, "excludeIdsFromCSVFiles": true, - "objects": [ -{ - "query": "SELECT Type,ActiveFrom,ActiveTo,ParentRecordId$Account,PKey__c, LocationId FROM AssociatedLocation", + { + "query": "SELECT Id FROM Location WHERE ParentLocationId != null AND ParentLocation.VisitorAddress.CountryCode = 'NL'", + "operation": "Insert", + "master": false + }, + { + "query": "SELECT Id FROM Account WHERE BillingCountry__c != 'NL'", + "operation": "Insert", + "master": false + }, + { + "query": "SELECT Type,ActiveFrom,ActiveTo,ParentRecordId$Account, LocationId FROM AssociatedLocation", "operation": "Insert", - "externalId": "PKey__c", "useSourceCSVFile": true, "master": true, - "useValuesMapping": true, - "excludedFields": ["PKey__c"] + "useValuesMapping": true } ] } \ No newline at end of file diff --git a/prepared_steps/1_extract_data/queries.json b/prepared_steps/1_extract_data/queries.json index 02a5f97..74477de 100644 --- a/prepared_steps/1_extract_data/queries.json +++ b/prepared_steps/1_extract_data/queries.json @@ -23,7 +23,7 @@ "query": "SELECT Id, Main_Product_Group__c, Family, MaterialType__c, Name, Product_Code__c, ProductCode, EAN_Product_Code__c FROM Product2" },{ "sobject": "SCInstalledBaseRole__c", - "query": "SELECT Id, InstalledBaseLocation__c, Role__c, ValidFrom__c, ValidTo__c, Account__c FROM SCInstalledBaseRole__c WHERE InstalledBaseLocation__r.Country__c = '{country}' AND InstalledBaseLocation__c = 'a1B1r0000099EsfEAE'" + "query": "SELECT Id, InstalledBaseLocation__c, Role__c, ValidFrom__c, ValidTo__c, Account__c FROM SCInstalledBaseRole__c WHERE InstalledBaseLocation__r.Country__c = '{country}' AND InstalledBaseLocation__c IN ('a1B1r000008XXEdEAO', 'a1B1r000008XXEiEAO', 'a1B1r000008XXH8EAO')" },{ "sobject": "SCPriceList__c", "query": "SELECT Id, Name, Brand__r.Name, Country__c from SCPriceList__c WHERE Country__c = '{country}' AND PriceList__c != null" @@ -32,7 +32,7 @@ "query": "SELECT Id, Article__r.Name, Price__c, PriceUnit__c, Pricelist__c, ValidFrom__c, ValidTo__c, Article__r.EANCode__c, Pricelist__r.Brand__r.Name, Pricelist__r.Country__c FROM SCPriceListItem__c WHERE Country__c = '{country}' AND PriceList__c != null AND (ValidTo__c >= TODAY OR ValidFrom__c >= TODAY) AND Article__c = 'a0Gw000001R9slpEAB'" },{ "sobject": "SCResourceAssignment__c", - "query": "SELECT Id, ValidTo__c, ValidFrom__c, Country__c, City__c, PostalCode__c, District__c, Street__c, HouseNo__c, Extension__c, FlatNo__c, Floor__c, GeoY__c, GeoX__c, Resource__c, Resource__r.Employee__r.Name, Stock__c FROM SCResourceAssignment__c WHERE Country__c = '{country}' AND ValidTo__c >= TODAY" + "query": "SELECT Id, ValidTo__c, ValidFrom__c, Country__c, City__c, PostalCode__c, District__c, Street__c, HouseNo__c, Extension__c, FlatNo__c, Floor__c, GeoY__c, GeoX__c, Resource__c, Resource__r.Employee__r.Name, Stock__c, Stock__r.ID2__c FROM SCResourceAssignment__c WHERE Country__c = '{country}' AND ValidTo__c >= TODAY" } ] } \ No newline at end of file diff --git a/prepared_steps/2_transform_via_script/TransformScript.py b/prepared_steps/2_transform_via_script/TransformScript.py index c800890..3200f09 100644 --- a/prepared_steps/2_transform_via_script/TransformScript.py +++ b/prepared_steps/2_transform_via_script/TransformScript.py @@ -42,7 +42,7 @@ reindex_columns_product2 = ['Id','Main_Product_Group__c','Family','MaterialType_ reindex_columns_ibr = ['Id', 'InstalledBaseLocation__c', 'Role__c', 'ValidFrom__c', 'ValidTo__c', 'Account__c'] reindex_columns_pricelist = ['Id', 'Name', 'Brand__r.Name', 'Country__c'] reindex_columns_pricelistitem = ['Id', 'Article__r.Name', 'Article__r.EANCode__c', 'Price__c', 'PriceUnit__c', 'Pricelist__c', 'ValidFrom__c', 'ValidTo__c', 'Pricelist__r.Brand__r.Name', 'Pricelist__r.Country__c'] -reindex_columns_resourceassignment = ['Id', 'ValidTo__c', 'ValidFrom__c', 'Country__c', 'City__c', 'PostalCode__c', 'District__c', 'Street__c', 'HouseNo__c', 'Extension__c', 'FlatNo__c', 'Floor__c', 'GeoY__c', 'GeoX__c', 'Resource__c', 'Resource__r.Employee__r.Name', 'Stock__c'] +reindex_columns_resourceassignment = ['Id', 'ValidTo__c', 'ValidFrom__c', 'Country__c', 'City__c', 'PostalCode__c', 'District__c', 'Street__c', 'HouseNo__c', 'Extension__c', 'FlatNo__c', 'Floor__c', 'GeoY__c', 'GeoX__c', 'Resource__c', 'Resource__r.Employee__r.Name', 'Stock__c', 'Stock__r.ID2__c'] reindex_columns_address_iot = ['Id', 'Country', 'CountryCode', 'Street', 'City', 'ParentId', 'PostalCode'] reindex_columns_location_iot = ['Id', 'Name'] @@ -223,7 +223,7 @@ merged_df_ib = merged_df_ib.drop('Id_x', axis=1) merged_df_ib = merged_df_ib.drop('ProductUnitClass__c', axis=1) merged_df_ib = merged_df_ib.drop('ProductUnitType__c', axis=1) -merged_df_ib.columns = ['Name', 'FSL_1st_Ignition_Date__c', 'InstallDate', 'Kind_of_Energy__c', 'Product2.Product_Code__c', 'SerialNumber', 'Serialnumber_Exception__c', 'Location.ExternalReference'] +merged_df_ib.columns = ['Name', 'FSL_1st_Ignition_Date__c', 'InstallDate', 'Kind_of_Energy__c', 'Product2.Product_Code__c', 'SerialNumber', 'Serialnumber_Exception__c', 'Location.PKey__c'] #merged_df_ib = merged_df_ib.drop('Main_Product_Group__c', axis=1) # assign Main_Product_Group__c based on product2 records merged_df_ib = pd.merge(merged_df_ib, @@ -241,8 +241,11 @@ merged_df_ib = merged_df_ib.replace({'Kind_of_Energy__c': {'4': '3', '5': '3'}}) ## 5. SCInstalledBaseRole__c.csv +print(df_ibr) +print(child_df) + df_ibr = pd.merge(df_ibr, - child_df[['Id', 'ExternalReference']], + child_df[['Id', 'PKey__c']], left_on='InstalledBaseLocation__c', right_on='Id', how='left') @@ -253,9 +256,9 @@ df_ibr = df_ibr.drop('Id_x', axis=1) df_ibr = df_ibr.drop('Id_y', axis=1) df_ibr = df_ibr.drop('InstalledBaseLocation__c', axis=1) -print(df_ibr.columns) +print(df_ibr) -df_ibr.columns = ['Type', 'ActiveFrom', 'ActiveTo', 'ParentRecordId', 'Location.ExternalReference'] +df_ibr.columns = ['Type', 'ActiveFrom', 'ActiveTo', 'ParentRecordId', 'Location.PKey__c'] #remove kind_of_energy__c and kind_of_installation if field dependency to main product group is not correct # Create the mapping dictionary @@ -345,7 +348,7 @@ df_pricelistitem.columns = columns_pricebookentry ##--------------------------------------------------------------------------## # Create van locations from resource assignments -van_columns = ['Id', 'City__c', 'Country__c', 'Street__c', 'HouseNo__c', 'PostalCode__c', 'GeoY__c', 'GeoX__c', 'Resource__r.Employee__r.Name'] +van_columns = ['Id', 'City__c', 'Country__c', 'Street__c', 'HouseNo__c', 'PostalCode__c', 'GeoY__c', 'GeoX__c', 'Resource__r.Employee__r.Name', 'Stock__r.ID2__c'] van_address_columns = ['City__c', 'Country__c', 'Street__c', 'HouseNo__c', 'PostalCode__c', 'GeoY__c', 'GeoX__c', 'ExternalReference'] van_df = df_resourceassignment[van_columns].copy() @@ -362,6 +365,20 @@ van_df['ExternalReference'] = ( van_df['Country__c'].astype(str) ) +# Process Stock__r.ID2__c field +def extract_last_4_digits(value): + if pd.isna(value) or not isinstance(value, str): + return '1000' + try: + last_4 = value[-4:] + if last_4.isdigit(): + return last_4 + return '1000' + except: + return '1000' + +van_df['Stock__r.ID2__c'] = van_df['Stock__r.ID2__c'].apply(extract_last_4_digits) + # Add van records to child_df if not van_df.empty: van_records = pd.DataFrame({ @@ -371,10 +388,13 @@ if not van_df.empty: 'DuplicateCheck__c': 'false', 'IsInventoryLocation': 'true', 'IsMobile': 'true', - 'LocationType': 'Van' + 'LocationType': 'Van', + 'Location_Number__c': van_df['Stock__r.ID2__c'] }) child_df = pd.concat([child_df, van_records], ignore_index=True) +child_df = child_df.drop('Id', axis=1) + # Address and Parent Location for vans van_address_df = van_df[van_address_columns].copy() van_parenlocation_df = van_df[['City__c', 'Country__c', 'Street__c', 'PostalCode__c']].copy() @@ -383,10 +403,7 @@ van_parenlocation_df = van_df[['City__c', 'Country__c', 'Street__c', 'PostalCode van_address_df['Country'] = van_address_df['Country__c'].map(country_mapping) van_address_df['Parent.Name'] = van_address_df['ExternalReference'] van_address_df = van_address_df.drop('HouseNo__c', axis=1) -print(van_address_df) van_address_df.columns = ['City', 'CountryCode', 'Street', 'PostalCode', 'Latitude', 'Longitude', 'PKey__c', 'Country', 'Parent.Name'] -print(van_address_df) - # Only add addresses that don't already exist new_addresses = van_address_df[~van_address_df['PKey__c'].isin(address_df['PKey__c'])] @@ -415,7 +432,7 @@ address_df.to_csv('../4_upsert_address_and_parent_location/Address.csv', index=F parent_df.to_csv('../4_upsert_address_and_parent_location/Location.csv', index=False) child_df.to_csv('../6_upsert_child_location/Location.csv', index=False) merged_df_ib.to_csv('../8_upsert_assets/Asset.csv', index=False) -df_ibr.to_csv('../11_upsert_associated_location/AssociatedLocation.csv', index=False) +df_ibr.to_csv('../11_upsert_associated_location/AssociatedLocation_beforetransform.csv', index=False) df_pricelist.to_csv('../12_insert_pricebook2_and_pricebookentries/Pricebook2.csv', index=False) df_pricelistitem.to_csv('../12_insert_pricebook2_and_pricebookentries/PricebookEntry.csv', index=False) merged_df_location_iot.to_csv('../3_update_address_and_location_data_for_migration/Location.csv', index=False) diff --git a/prepared_steps/6_upsert_child_location/command.txt b/prepared_steps/6_upsert_child_location/command.txt index 4c22459..0389686 100644 --- a/prepared_steps/6_upsert_child_location/command.txt +++ b/prepared_steps/6_upsert_child_location/command.txt @@ -1 +1 @@ -sf sfdmu run --sourceusername csvfile --targetusername rene.kasseboehmer@vaillant.de.devrene \ No newline at end of file +sf sfdmu run --sourceusername rene.kasseboehmer@vaillant.de.devrene --targetusername rene.kasseboehmer@vaillant.de.devrene \ No newline at end of file diff --git a/prepared_steps/6_upsert_child_location/export.json b/prepared_steps/6_upsert_child_location/export.json index 74b7047..6091fa9 100644 --- a/prepared_steps/6_upsert_child_location/export.json +++ b/prepared_steps/6_upsert_child_location/export.json @@ -3,8 +3,9 @@ "excludeIdsFromCSVFiles": true, "objects": [ { - "query": "SELECT Id,Extension__c,Flat__c,Floor__c,ExternalReference,Name,PKey__c,DuplicateCheck__c,IsInventoryLocation,IsMobile,LocationType FROM Location", - "operation": "Insert", + "query": "SELECT Extension__c,Flat__c,Floor__c,ExternalReference,Name,PKey__c,DuplicateCheck__c,IsInventoryLocation,IsMobile,LocationType, Location_Number__c FROM Location", + "operation": "Upsert", + "externalId": "PKey__c", "useSourceCSVFile": true } ] diff --git a/prepared_steps/7_build_lookup_child_to_parent_location/export.json b/prepared_steps/7_build_lookup_child_to_parent_location/export.json index 0216ec7..c0b90cd 100644 --- a/prepared_steps/7_build_lookup_child_to_parent_location/export.json +++ b/prepared_steps/7_build_lookup_child_to_parent_location/export.json @@ -2,8 +2,13 @@ "allOrNone": true, "objects": [ { - "query": "SELECT Id, Name, ExternalReference, ParentLocationId FROM Location WHERE ExternalReference != null AND ExternalReference LIKE '%NL' AND ParentLocationId = null AND VisitorAddressId = null", + "query": "SELECT Id, ParentId, PKey__c FROM Address WHERE CountryCode = 'NL' AND PKey__c != null", + "operation": "Readonly", + "externalId": "PKey__c" + },{ + "query": "SELECT ExternalReference FROM Location WHERE ExternalReference LIKE '%NL' AND ParentLocationId = null AND VisitorAddressId = null", "operation": "Update", + "externalId": "ExternalReference", "beforeUpdateAddons": [ { "module": "core:RecordsTransform", diff --git a/prepared_steps/8_upsert_assets/export.json b/prepared_steps/8_upsert_assets/export.json index 3905330..712cc48 100644 --- a/prepared_steps/8_upsert_assets/export.json +++ b/prepared_steps/8_upsert_assets/export.json @@ -3,10 +3,9 @@ "excludeIdsFromCSVFiles": true, "objects": [ { - "query": "SELECT ExternalReference FROM Location WHERE ExternalReference != null AND ParentLocation.VisitorAddress.CountryCode = 'NL'", + "query": "SELECT PKey__c FROM Location WHERE PKey__c != null AND ParentLocation.VisitorAddress.CountryCode = 'NL'", "operation": "Readonly", - "externalId": "ExternalReference", - "master": false + "externalId": "PKey__c" },{ "query": "SELECT Product_Code__c FROM Product2 WHERE Product_Code__c != null", "operation": "Readonly",