IB import nearly finished
This commit is contained in:
@ -5,8 +5,8 @@ country_mapping = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Read the input CSV file, assuming the second row is the header
|
# Read the input CSV file, assuming the second row is the header
|
||||||
read_df = pd.read_csv('../1/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False)
|
read_df = pd.read_csv('../1/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False, dtype=str)
|
||||||
read_df_ib = pd.read_csv('../1/SCInstalledBase__c.csv', header=0, keep_default_na=False)
|
read_df_ib = pd.read_csv('../1/SCInstalledBase__c.csv', header=0, keep_default_na=False, dtype=str)
|
||||||
for row in read_df.to_dict('records'):
|
for row in read_df.to_dict('records'):
|
||||||
try:
|
try:
|
||||||
# Your processing logic here
|
# Your processing logic here
|
||||||
@ -45,14 +45,20 @@ merged_df_ib = pd.merge(df_ib,
|
|||||||
right_on='Id',
|
right_on='Id',
|
||||||
how='left')
|
how='left')
|
||||||
|
|
||||||
# Handle missing values by setting them to None
|
|
||||||
merged_df_ib['Extension__c'] = merged_df_ib['Extension__c'].fillna('')
|
|
||||||
merged_df_ib['FlatNo__c'] = merged_df_ib['FlatNo__c'].fillna('')
|
|
||||||
merged_df_ib['Floor__c'] = merged_df_ib['Floor__c'].fillna('')
|
|
||||||
|
|
||||||
# If there are missing values (no match found), you can fill them with a placeholder
|
# If there are missing values (no match found), you can fill them with a placeholder
|
||||||
merged_df_ib['PKey__c'].fillna('Not Found', inplace=True)
|
merged_df_ib['PKey__c'].fillna('Not Found', inplace=True)
|
||||||
|
|
||||||
|
merged_df_ib['PKey__c'] = (
|
||||||
|
merged_df_ib['PKey__c'].astype(str) + ';' +
|
||||||
|
merged_df_ib['Extension__c'].astype(str) + ';' +
|
||||||
|
merged_df_ib['FlatNo__c'].astype(str) + ';' +
|
||||||
|
merged_df_ib['Floor__c'].astype(str)
|
||||||
|
)
|
||||||
|
|
||||||
|
merged_df_ib = merged_df_ib.drop('Extension__c', axis=1)
|
||||||
|
merged_df_ib = merged_df_ib.drop('FlatNo__c', axis=1)
|
||||||
|
merged_df_ib = merged_df_ib.drop('Floor__c', axis=1)
|
||||||
|
|
||||||
## 1. Address.csv
|
## 1. Address.csv
|
||||||
# Columns needed for Address table based on the input CSV structure
|
# Columns needed for Address table based on the input CSV structure
|
||||||
address_columns = ['City__c', 'Country__c',
|
address_columns = ['City__c', 'Country__c',
|
||||||
@ -114,9 +120,17 @@ child_df['Name'] = (
|
|||||||
# Replace any row where 'Floor__c', 'FlatNo__c', and 'Extension__c' are all empty with "HOME"
|
# Replace any row where 'Floor__c', 'FlatNo__c', and 'Extension__c' are all empty with "HOME"
|
||||||
child_df.replace({'Name': {'--': 'HOME'}}, inplace=True)
|
child_df.replace({'Name': {'--': 'HOME'}}, inplace=True)
|
||||||
|
|
||||||
|
# Create the 'ExternalReference' column for Asset assignment
|
||||||
|
child_df['ExternalReference'] = (
|
||||||
|
child_df['PKey__c'].astype(str) + ';' +
|
||||||
|
child_df['Extension__c'].astype(str) + ';' +
|
||||||
|
child_df['FlatNo__c'].astype(str) + ';' +
|
||||||
|
child_df['Floor__c'].astype(str)
|
||||||
|
)
|
||||||
|
|
||||||
# Rename columns to match the desired format
|
# Rename columns to match the desired format
|
||||||
child_df.columns = ['Extension__c', 'Flat__c', 'Floor__c', 'City', 'Country',
|
child_df.columns = ['Extension__c', 'Flat__c', 'Floor__c', 'City', 'Country',
|
||||||
'PostalCode', 'Street', 'PKey__c', 'Name']
|
'PostalCode', 'Street', 'PKey__c', 'Name', 'ExternalReference']
|
||||||
|
|
||||||
child_df = child_df.drop_duplicates(subset=['Extension__c', 'Flat__c', 'Floor__c','City', 'Country', 'PostalCode', 'Street'], keep='first')
|
child_df = child_df.drop_duplicates(subset=['Extension__c', 'Flat__c', 'Floor__c','City', 'Country', 'PostalCode', 'Street'], keep='first')
|
||||||
|
|
||||||
@ -137,7 +151,7 @@ merged_df_ib = merged_df_ib.drop('InstalledBaseLocation__c', axis=1)
|
|||||||
merged_df_ib = merged_df_ib.drop('InstalledBaseLocation__r.Id', axis=1)
|
merged_df_ib = merged_df_ib.drop('InstalledBaseLocation__r.Id', axis=1)
|
||||||
merged_df_ib = merged_df_ib.drop('Id_y', axis=1)
|
merged_df_ib = merged_df_ib.drop('Id_y', axis=1)
|
||||||
print(merged_df_ib.columns)
|
print(merged_df_ib.columns)
|
||||||
merged_df_ib.columns = ['Product2.EAN_Product_Code__c', 'FSL_1st_Ignition_Date__c', 'Id', 'InstallDate', 'Name', 'Kind_of_Energy__c', 'Kind_of_Installation__c', 'Main_Product_Group__c', 'SerialNumber', 'Serialnumber_Exception__c', 'Location.PKey__c', 'Location.Extension__c', 'Location.Flat__c', 'Location.Floor__c',]
|
merged_df_ib.columns = ['Product2.EAN_Product_Code__c', 'FSL_1st_Ignition_Date__c', 'Id', 'InstallDate', 'Name', 'Kind_of_Energy__c', 'Kind_of_Installation__c', 'Main_Product_Group__c', 'SerialNumber', 'Serialnumber_Exception__c', 'Location.ExternalReference']
|
||||||
|
|
||||||
# Write each DataFrame to a separate CSV file
|
# Write each DataFrame to a separate CSV file
|
||||||
address_df.to_csv('../3/Address.csv', index=False)
|
address_df.to_csv('../3/Address.csv', index=False)
|
||||||
|
@ -1 +1 @@
|
|||||||
sf sfdmu run --sourceusername rene.kasseboehmer@vaillant.de.devrene --targetusername rene.kasseboehmer@vaillant.de.devrene
|
sf sfdmu run --sourceusername csvfile --targetusername rene.kasseboehmer@vaillant.de.devrene
|
@ -3,43 +3,16 @@
|
|||||||
"excludeIdsFromCSVFiles": true,
|
"excludeIdsFromCSVFiles": true,
|
||||||
"objects": [
|
"objects": [
|
||||||
{
|
{
|
||||||
"query": "SELECT Id, Extension__c,Flat__c,Floor__c,Name,PKey__c FROM Location WHERE ParentLocationId != null AND ParentLocation.VisitorAddress.CountryCode = 'NL'",
|
"query": "SELECT ExternalReference FROM Location WHERE ExternalReference != null AND ParentLocation.VisitorAddress.CountryCode = 'NL'",
|
||||||
"operation": "Readonly",
|
"operation": "Readonly",
|
||||||
"externalId": "PKey__c;Extension__c;Flat__c;Floor__c",
|
"externalId": "ExternalReference"
|
||||||
"master": false
|
|
||||||
},{
|
},{
|
||||||
"query": "SELECT Id, EAN_Product_Code__c FROM Product2 WHERE EAN_Product_Code__c != null",
|
"query": "SELECT EAN_Product_Code__c FROM Product2 WHERE EAN_Product_Code__c != null",
|
||||||
"operation": "Readonly",
|
"operation": "Readonly",
|
||||||
"externalId": "EAN_Product_Code__c",
|
"externalId": "EAN_Product_Code__c"
|
||||||
"master": false
|
|
||||||
},{
|
},{
|
||||||
"query": "SELECT Product2Id,FSL_1st_Ignition_Date__c,Id,InstallDate,Name,Kind_of_Energy__c,Kind_of_Installation__c,Main_Product_Group__c,SerialNumber,Serialnumber_Exception__c,LocationId FROM Asset",
|
"query": "SELECT Product2Id,Id,InstallDate,Name,Kind_of_Energy__c,Kind_of_Installation__c,Main_Product_Group__c,SerialNumber,Serialnumber_Exception__c,LocationId FROM Asset",
|
||||||
"operation": "Insert",
|
"operation": "Insert"
|
||||||
"useSourceCSVFile": true,
|
|
||||||
"beforeUpdateAddons": [
|
|
||||||
{
|
|
||||||
"module": "core:RecordsTransform",
|
|
||||||
"description": "Updates ParentLocationId with source Address.ParentId based on Pkey__c",
|
|
||||||
"args": {
|
|
||||||
"fields": [
|
|
||||||
{
|
|
||||||
"alias": "sourceLocationIdFromPkeyExtensionFlatFloor",
|
|
||||||
"sourceObject": "Location",
|
|
||||||
"sourceField": "Id",
|
|
||||||
"lookupExpression": "source.PKey__c == target.Location.PKey__c && source.Extension__c == target.Location.Extension__c && source.Flat__c == target.Location.Flat__c && source.Floor__c == target.Location.Floor__c",
|
|
||||||
"lookupSource": "source"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"transformations": [
|
|
||||||
{
|
|
||||||
"targetObject": "Asset",
|
|
||||||
"targetField": "LocationId",
|
|
||||||
"formula": "formula.sourceLocationIdFromPkeyExtensionFlatFloor"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
Reference in New Issue
Block a user