diff --git a/prepared_steps/2/LocationScript.py b/prepared_steps/2/LocationScript.py index 8fc2544..ab1d5b7 100644 --- a/prepared_steps/2/LocationScript.py +++ b/prepared_steps/2/LocationScript.py @@ -5,8 +5,8 @@ country_mapping = { } # Read the input CSV file, assuming the second row is the header -read_df = pd.read_csv('../1/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False) -read_df_ib = pd.read_csv('../1/SCInstalledBase__c.csv', header=0, keep_default_na=False) +read_df = pd.read_csv('../1/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False, dtype=str) +read_df_ib = pd.read_csv('../1/SCInstalledBase__c.csv', header=0, keep_default_na=False, dtype=str) for row in read_df.to_dict('records'): try: # Your processing logic here @@ -45,14 +45,20 @@ merged_df_ib = pd.merge(df_ib, right_on='Id', how='left') -# Handle missing values by setting them to None -merged_df_ib['Extension__c'] = merged_df_ib['Extension__c'].fillna('') -merged_df_ib['FlatNo__c'] = merged_df_ib['FlatNo__c'].fillna('') -merged_df_ib['Floor__c'] = merged_df_ib['Floor__c'].fillna('') - # If there are missing values (no match found), you can fill them with a placeholder merged_df_ib['PKey__c'].fillna('Not Found', inplace=True) +merged_df_ib['PKey__c'] = ( + merged_df_ib['PKey__c'].astype(str) + ';' + + merged_df_ib['Extension__c'].astype(str) + ';' + + merged_df_ib['FlatNo__c'].astype(str) + ';' + + merged_df_ib['Floor__c'].astype(str) +) + +merged_df_ib = merged_df_ib.drop('Extension__c', axis=1) +merged_df_ib = merged_df_ib.drop('FlatNo__c', axis=1) +merged_df_ib = merged_df_ib.drop('Floor__c', axis=1) + ## 1. Address.csv # Columns needed for Address table based on the input CSV structure address_columns = ['City__c', 'Country__c', @@ -114,9 +120,17 @@ child_df['Name'] = ( # Replace any row where 'Floor__c', 'FlatNo__c', and 'Extension__c' are all empty with "HOME" child_df.replace({'Name': {'--': 'HOME'}}, inplace=True) +# Create the 'ExternalReference' column for Asset assignment +child_df['ExternalReference'] = ( + child_df['PKey__c'].astype(str) + ';' + + child_df['Extension__c'].astype(str) + ';' + + child_df['FlatNo__c'].astype(str) + ';' + + child_df['Floor__c'].astype(str) +) + # Rename columns to match the desired format child_df.columns = ['Extension__c', 'Flat__c', 'Floor__c', 'City', 'Country', - 'PostalCode', 'Street', 'PKey__c', 'Name'] + 'PostalCode', 'Street', 'PKey__c', 'Name', 'ExternalReference'] child_df = child_df.drop_duplicates(subset=['Extension__c', 'Flat__c', 'Floor__c','City', 'Country', 'PostalCode', 'Street'], keep='first') @@ -137,7 +151,7 @@ merged_df_ib = merged_df_ib.drop('InstalledBaseLocation__c', axis=1) merged_df_ib = merged_df_ib.drop('InstalledBaseLocation__r.Id', axis=1) merged_df_ib = merged_df_ib.drop('Id_y', axis=1) print(merged_df_ib.columns) -merged_df_ib.columns = ['Product2.EAN_Product_Code__c', 'FSL_1st_Ignition_Date__c', 'Id', 'InstallDate', 'Name', 'Kind_of_Energy__c', 'Kind_of_Installation__c', 'Main_Product_Group__c', 'SerialNumber', 'Serialnumber_Exception__c', 'Location.PKey__c', 'Location.Extension__c', 'Location.Flat__c', 'Location.Floor__c',] +merged_df_ib.columns = ['Product2.EAN_Product_Code__c', 'FSL_1st_Ignition_Date__c', 'Id', 'InstallDate', 'Name', 'Kind_of_Energy__c', 'Kind_of_Installation__c', 'Main_Product_Group__c', 'SerialNumber', 'Serialnumber_Exception__c', 'Location.ExternalReference'] # Write each DataFrame to a separate CSV file address_df.to_csv('../3/Address.csv', index=False) diff --git a/prepared_steps/7/command_dev.txt b/prepared_steps/7/command_dev.txt index 0389686..4c22459 100644 --- a/prepared_steps/7/command_dev.txt +++ b/prepared_steps/7/command_dev.txt @@ -1 +1 @@ -sf sfdmu run --sourceusername rene.kasseboehmer@vaillant.de.devrene --targetusername rene.kasseboehmer@vaillant.de.devrene \ No newline at end of file +sf sfdmu run --sourceusername csvfile --targetusername rene.kasseboehmer@vaillant.de.devrene \ No newline at end of file diff --git a/prepared_steps/7/export.json b/prepared_steps/7/export.json index 5f79196..39b6e30 100644 --- a/prepared_steps/7/export.json +++ b/prepared_steps/7/export.json @@ -3,43 +3,16 @@ "excludeIdsFromCSVFiles": true, "objects": [ { - "query": "SELECT Id, Extension__c,Flat__c,Floor__c,Name,PKey__c FROM Location WHERE ParentLocationId != null AND ParentLocation.VisitorAddress.CountryCode = 'NL'", + "query": "SELECT ExternalReference FROM Location WHERE ExternalReference != null AND ParentLocation.VisitorAddress.CountryCode = 'NL'", "operation": "Readonly", - "externalId": "PKey__c;Extension__c;Flat__c;Floor__c", - "master": false + "externalId": "ExternalReference" },{ - "query": "SELECT Id, EAN_Product_Code__c FROM Product2 WHERE EAN_Product_Code__c != null", + "query": "SELECT EAN_Product_Code__c FROM Product2 WHERE EAN_Product_Code__c != null", "operation": "Readonly", - "externalId": "EAN_Product_Code__c", - "master": false + "externalId": "EAN_Product_Code__c" },{ - "query": "SELECT Product2Id,FSL_1st_Ignition_Date__c,Id,InstallDate,Name,Kind_of_Energy__c,Kind_of_Installation__c,Main_Product_Group__c,SerialNumber,Serialnumber_Exception__c,LocationId FROM Asset", - "operation": "Insert", - "useSourceCSVFile": true, - "beforeUpdateAddons": [ - { - "module": "core:RecordsTransform", - "description": "Updates ParentLocationId with source Address.ParentId based on Pkey__c", - "args": { - "fields": [ - { - "alias": "sourceLocationIdFromPkeyExtensionFlatFloor", - "sourceObject": "Location", - "sourceField": "Id", - "lookupExpression": "source.PKey__c == target.Location.PKey__c && source.Extension__c == target.Location.Extension__c && source.Flat__c == target.Location.Flat__c && source.Floor__c == target.Location.Floor__c", - "lookupSource": "source" - } - ], - "transformations": [ - { - "targetObject": "Asset", - "targetField": "LocationId", - "formula": "formula.sourceLocationIdFromPkeyExtensionFlatFloor" - } - ] - } - } - ] + "query": "SELECT Product2Id,Id,InstallDate,Name,Kind_of_Energy__c,Kind_of_Installation__c,Main_Product_Group__c,SerialNumber,Serialnumber_Exception__c,LocationId FROM Asset", + "operation": "Insert" } ] } \ No newline at end of file