started assets

This commit is contained in:
Rene Kaßeböhmer
2025-04-01 16:44:07 +02:00
parent ca549bc0e4
commit 6bc3fd1a00
4 changed files with 87 additions and 5 deletions

View File

@ -4,6 +4,11 @@
{ {
"query": "SELECT Id, City__c, Country__c, GeoY__c, GeoX__c, PostalCode__c, Street__c, Extension__c, HouseNo__c, FlatNo__c, Floor__c FROM SCInstalledBaseLocation__c WHERE Country__c = 'NL' limit 1", "query": "SELECT Id, City__c, Country__c, GeoY__c, GeoX__c, PostalCode__c, Street__c, Extension__c, HouseNo__c, FlatNo__c, Floor__c FROM SCInstalledBaseLocation__c WHERE Country__c = 'NL' limit 1",
"externalId": "Id" "externalId": "Id"
},
{
"query": "SELECT Id, Name, CommissioningDate__c,InstallationDate__c,ProductEnergy__c, ProductUnitClass__c,ArticleNo__c,SerialNo__c, SerialNoException__c, ProductUnitType__c, InstalledBaseLocation__c FROM SCInstalledBase__c WHERE Country__c = 'NL' limit 1",
"externalId": "Id"
} }
] ]
} }

View File

@ -6,6 +6,7 @@ country_mapping = {
# Read the input CSV file, assuming the second row is the header # Read the input CSV file, assuming the second row is the header
read_df = pd.read_csv('../1/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False) read_df = pd.read_csv('../1/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False)
read_df_ib = pd.read_csv('../1/SCInstalledBase__c.csv', header=0, keep_default_na=False)
for row in read_df.to_dict('records'): for row in read_df.to_dict('records'):
try: try:
# Your processing logic here # Your processing logic here
@ -15,14 +16,16 @@ for row in read_df.to_dict('records'):
# Columns for reindexing # Columns for reindexing
reindex_columns = ['City__c','Country__c','Extension__c','FlatNo__c','Floor__c','GeoX__c','GeoY__c','HouseNo__c','Id','PostalCode__c','Street__c'] reindex_columns = ['City__c','Country__c','Extension__c','FlatNo__c','Floor__c','GeoX__c','GeoY__c','HouseNo__c','Id','PostalCode__c','Street__c']
# ArticleNo__c,CommissioningDate__c,Id,InstallationDate__c,InstalledBaseLocation__c,InstalledBaseLocation__r.Id,Name,ProductEnergy__c,ProductUnitClass__c,ProductUnitType__c,SerialNo__c,SerialNoException__c
reindex_columns_ib = ['ArticleNo__c','CommissioningDate__c','Id','InstallationDate__c','InstalledBaseLocation__c','InstalledBaseLocation__r.Id','Name','ProductEnergy__c','ProductUnitClass__c','ProductUnitType__c','SerialNo__c','SerialNoException__c']
# Reindex the columns to match the desired format # Reindex the columns to match the desired format
df = read_df.reindex(reindex_columns, axis=1) df = read_df.reindex(reindex_columns, axis=1)
df_ib = read_df_ib.reindex(reindex_columns_ib, axis=1)
df['Street'] = ( df['Street'] = (
df['Street__c'].astype(str) + ' ' + df['Street__c'].astype(str) + ' ' +
df['HouseNo__c'].astype(str) + ' ' + df['HouseNo__c'].astype(str)
df['Extension__c'].astype(str)
) )
# Remove any trailing spaces that may result from missing values # Remove any trailing spaces that may result from missing values
@ -35,6 +38,21 @@ df['PKey__c'] = (
df['Country__c'].astype(str) df['Country__c'].astype(str)
) )
# Merge df_ib with df including additional columns
merged_df_ib = pd.merge(df_ib,
df[['Id', 'PKey__c', 'Extension__c', 'FlatNo__c', 'Floor__c']],
left_on='InstalledBaseLocation__c',
right_on='Id',
how='left')
# Handle missing values by setting them to None
merged_df_ib['Extension__c'] = merged_df_ib['Extension__c'].fillna('')
merged_df_ib['FlatNo__c'] = merged_df_ib['FlatNo__c'].fillna('')
merged_df_ib['Floor__c'] = merged_df_ib['Floor__c'].fillna('')
# If there are missing values (no match found), you can fill them with a placeholder
merged_df_ib['PKey__c'].fillna('Not Found', inplace=True)
## 1. Address.csv ## 1. Address.csv
# Columns needed for Address table based on the input CSV structure # Columns needed for Address table based on the input CSV structure
address_columns = ['City__c', 'Country__c', address_columns = ['City__c', 'Country__c',
@ -85,18 +103,22 @@ child_columns = ['Extension__c', 'FlatNo__c', 'Floor__c', 'City__c', 'Country__c
# Modify child_df by explicitly creating a new DataFrame # Modify child_df by explicitly creating a new DataFrame
child_df = df[child_columns].copy() # Add .copy() to create an explicit copy child_df = df[child_columns].copy() # Add .copy() to create an explicit copy
# Now create the 'Name' column without any warnings # Create the 'Name' column with simplified logic
child_df['Name'] = ( child_df['Name'] = (
# Check if all three fields are not null; if so, concatenate them
child_df['Floor__c'].astype(str) + '-' + child_df['Floor__c'].astype(str) + '-' +
child_df['FlatNo__c'].astype(str) + '-' + child_df['FlatNo__c'].astype(str) + '-' +
child_df['Extension__c'].astype(str) child_df['Extension__c'].astype(str)
) )
# Replace any row where 'Floor__c', 'FlatNo__c', and 'Extension__c' are all empty with "HOME"
child_df.replace({'Name': {'--': 'HOME'}}, inplace=True)
# Rename columns to match the desired format # Rename columns to match the desired format
child_df.columns = ['Extension', 'Flat', 'Floor', 'City', 'Country', child_df.columns = ['Extension__c', 'Flat__c', 'Floor__c', 'City', 'Country',
'PostalCode', 'Street', 'PKey__c', 'Name'] 'PostalCode', 'Street', 'PKey__c', 'Name']
child_df = child_df.drop_duplicates(subset=['Extension', 'Flat', 'Floor','City', 'Country', 'PostalCode', 'Street'], keep='first') child_df = child_df.drop_duplicates(subset=['Extension__c', 'Flat__c', 'Floor__c','City', 'Country', 'PostalCode', 'Street'], keep='first')
child_df = child_df.drop('Country', axis=1) child_df = child_df.drop('Country', axis=1)
child_df = child_df.drop('PostalCode', axis=1) child_df = child_df.drop('PostalCode', axis=1)
@ -108,10 +130,19 @@ child_df['IsInventoryLocation'] = 'false'
child_df['IsMobile'] = 'false' child_df['IsMobile'] = 'false'
child_df['LocationType'] = 'Site' child_df['LocationType'] = 'Site'
## 4. Assets.csv
#ArticleNo__c,CommissioningDate__c,Id,InstallationDate__c,InstalledBaseLocation__c,InstalledBaseLocation__r.Extension__c,InstalledBaseLocation__r.FlatNo__c,InstalledBaseLocation__r.Floor__c,InstalledBaseLocation__r.Id,Name,ProductEnergy__c,ProductUnitClass__c,ProductUnitType__c,SerialNo__c,SerialNoException__c
merged_df_ib = merged_df_ib.drop('InstalledBaseLocation__c', axis=1)
merged_df_ib = merged_df_ib.drop('InstalledBaseLocation__r.Id', axis=1)
merged_df_ib = merged_df_ib.drop('Id_y', axis=1)
print(merged_df_ib.columns)
merged_df_ib.columns = ['Product2.EAN_Product_Code__c', 'FSL_1st_Ignition_Date__c', 'Id', 'InstallDate', 'Name', 'Kind_of_Energy__c', 'Kind_of_Installation__c', 'Main_Product_Group__c', 'SerialNumber', 'Serialnumber_Exception__c', 'Location.PKey__c', 'Location.Extension__c', 'Location.Flat__c', 'Location.Floor__c',]
# Write each DataFrame to a separate CSV file # Write each DataFrame to a separate CSV file
address_df.to_csv('../3/Address.csv', index=False) address_df.to_csv('../3/Address.csv', index=False)
parent_df.to_csv('../3/Location.csv', index=False) parent_df.to_csv('../3/Location.csv', index=False)
child_df.to_csv('../5/Location.csv', index=False) child_df.to_csv('../5/Location.csv', index=False)
merged_df_ib.to_csv('../7/Asset.csv', index=False)
print('Data has been successfully split into Address.csv, Parent_Location.csv, and Child_Location.csv files with duplicate checks applied.') print('Data has been successfully split into Address.csv, Parent_Location.csv, and Child_Location.csv files with duplicate checks applied.')

View File

@ -0,0 +1 @@
sf sfdmu run --sourceusername rene.kasseboehmer@vaillant.de.devrene --targetusername rene.kasseboehmer@vaillant.de.devrene

View File

@ -0,0 +1,45 @@
{
"allOrNone": true,
"excludeIdsFromCSVFiles": true,
"objects": [
{
"query": "SELECT Id, Extension__c,Flat__c,Floor__c,Name,PKey__c FROM Location WHERE ParentLocationId != null AND ParentLocation.VisitorAddress.CountryCode = 'NL'",
"operation": "Readonly",
"externalId": "PKey__c;Extension__c;Flat__c;Floor__c",
"master": false
},{
"query": "SELECT Id, EAN_Product_Code__c FROM Product2 WHERE EAN_Product_Code__c != null",
"operation": "Readonly",
"externalId": "EAN_Product_Code__c",
"master": false
},{
"query": "SELECT Product2Id,FSL_1st_Ignition_Date__c,Id,InstallDate,Name,Kind_of_Energy__c,Kind_of_Installation__c,Main_Product_Group__c,SerialNumber,Serialnumber_Exception__c,LocationId FROM Asset",
"operation": "Insert",
"useSourceCSVFile": true,
"beforeUpdateAddons": [
{
"module": "core:RecordsTransform",
"description": "Updates ParentLocationId with source Address.ParentId based on Pkey__c",
"args": {
"fields": [
{
"alias": "sourceLocationIdFromPkeyExtensionFlatFloor",
"sourceObject": "Location",
"sourceField": "Id",
"lookupExpression": "source.PKey__c == target.Location.PKey__c && source.Extension__c == target.Location.Extension__c && source.Flat__c == target.Location.Flat__c && source.Floor__c == target.Location.Floor__c",
"lookupSource": "source"
}
],
"transformations": [
{
"targetObject": "Asset",
"targetField": "LocationId",
"formula": "formula.sourceLocationIdFromPkeyExtensionFlatFloor"
}
]
}
}
]
}
]
}