van location finished and integrasted into child location upload

This commit is contained in:
Rene Kaßeböhmer
2025-04-25 13:09:04 +02:00
parent 5aaf4a6d19
commit f832310344
6 changed files with 57 additions and 12 deletions

View File

@ -32,7 +32,7 @@
"query": "SELECT Id, Article__r.Name, Price__c, PriceUnit__c, Pricelist__c, ValidFrom__c, ValidTo__c, Article__r.EANCode__c, Pricelist__r.Brand__r.Name, Pricelist__r.Country__c FROM SCPriceListItem__c WHERE Country__c = '{country}' AND PriceList__c != null AND (ValidTo__c >= TODAY OR ValidFrom__c >= TODAY) AND Article__c = 'a0Gw000001R9slpEAB'" "query": "SELECT Id, Article__r.Name, Price__c, PriceUnit__c, Pricelist__c, ValidFrom__c, ValidTo__c, Article__r.EANCode__c, Pricelist__r.Brand__r.Name, Pricelist__r.Country__c FROM SCPriceListItem__c WHERE Country__c = '{country}' AND PriceList__c != null AND (ValidTo__c >= TODAY OR ValidFrom__c >= TODAY) AND Article__c = 'a0Gw000001R9slpEAB'"
},{ },{
"sobject": "SCResourceAssignment__c", "sobject": "SCResourceAssignment__c",
"query": "SELECT Id, ValidTo__c, ValidFrom__c, Country__c, City__c, District__c, Street__c, HouseNo__c, Extension__c, FlatNo__c, Floor__c, GeoY__c, GeoX__c, Resource__c, Stock__c FROM SCResourceAssignment__c WHERE Country__c = '{country}' AND ValidTo__c >= TODAY" "query": "SELECT Id, ValidTo__c, ValidFrom__c, Country__c, City__c, PostalCode__c, District__c, Street__c, HouseNo__c, Extension__c, FlatNo__c, Floor__c, GeoY__c, GeoX__c, Resource__c, Resource__r.Employee__r.Name, Stock__c FROM SCResourceAssignment__c WHERE Country__c = '{country}' AND ValidTo__c >= TODAY"
} }
] ]
} }

View File

@ -2,8 +2,27 @@ import pandas as pd
from tqdm import tqdm from tqdm import tqdm
country_mapping = { country_mapping = {
'NL': 'Netherlands' 'NL': 'Netherlands',
} 'GB': 'United Kingdom',
'UK': 'United Kingdom',
'IE': 'Ireland',
'DE': 'Germany',
'FR': 'France',
'IT': 'Italy',
'ES': 'Spain',
'PT': 'Portugal',
'BE': 'Belgium',
'AT': 'Austria',
'CH': 'Switzerland',
'NO': 'Norway',
'FI': 'Finland',
'SE': 'Sweden',
'DK': 'Denmark',
'PL': 'Poland',
'CZ': 'Czech Republic',
'HU': 'Hungary',
'RO': 'Romania'
}
# Read the input CSV file, assuming the second row is the header # Read the input CSV file, assuming the second row is the header
read_df = pd.read_csv('../1_extract_data/results/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False, dtype=str) read_df = pd.read_csv('../1_extract_data/results/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False, dtype=str)
@ -23,7 +42,7 @@ reindex_columns_product2 = ['Id','Main_Product_Group__c','Family','MaterialType_
reindex_columns_ibr = ['Id', 'InstalledBaseLocation__c', 'Role__c', 'ValidFrom__c', 'ValidTo__c', 'Account__c'] reindex_columns_ibr = ['Id', 'InstalledBaseLocation__c', 'Role__c', 'ValidFrom__c', 'ValidTo__c', 'Account__c']
reindex_columns_pricelist = ['Id', 'Name', 'Brand__r.Name', 'Country__c'] reindex_columns_pricelist = ['Id', 'Name', 'Brand__r.Name', 'Country__c']
reindex_columns_pricelistitem = ['Id', 'Article__r.Name', 'Article__r.EANCode__c', 'Price__c', 'PriceUnit__c', 'Pricelist__c', 'ValidFrom__c', 'ValidTo__c', 'Pricelist__r.Brand__r.Name', 'Pricelist__r.Country__c'] reindex_columns_pricelistitem = ['Id', 'Article__r.Name', 'Article__r.EANCode__c', 'Price__c', 'PriceUnit__c', 'Pricelist__c', 'ValidFrom__c', 'ValidTo__c', 'Pricelist__r.Brand__r.Name', 'Pricelist__r.Country__c']
reindex_columns_resourceassignment = ['Id', 'ValidTo__c', 'ValidFrom__c', 'Country__c', 'City__c', 'District__c', 'Street__c', 'HouseNo__c', 'Extension__c', 'FlatNo__c', 'Floor__c', 'GeoY__c', 'GeoX__c', 'Resource__c', 'Stock__c'] reindex_columns_resourceassignment = ['Id', 'ValidTo__c', 'ValidFrom__c', 'Country__c', 'City__c', 'PostalCode__c', 'District__c', 'Street__c', 'HouseNo__c', 'Extension__c', 'FlatNo__c', 'Floor__c', 'GeoY__c', 'GeoX__c', 'Resource__c', 'Resource__r.Employee__r.Name', 'Stock__c']
reindex_columns_address_iot = ['Id', 'Country', 'CountryCode', 'Street', 'City', 'ParentId', 'PostalCode'] reindex_columns_address_iot = ['Id', 'Country', 'CountryCode', 'Street', 'City', 'ParentId', 'PostalCode']
reindex_columns_location_iot = ['Id', 'Name'] reindex_columns_location_iot = ['Id', 'Name']
@ -156,6 +175,9 @@ child_columns = ['Id', 'Extension__c', 'FlatNo__c', 'Floor__c', 'City__c', 'Coun
# Modify child_df by explicitly creating a new DataFrame # Modify child_df by explicitly creating a new DataFrame
child_df = df[child_columns].copy() # Add .copy() to create an explicit copy child_df = df[child_columns].copy() # Add .copy() to create an explicit copy
child_df.columns = ['Id', 'Extension__c', 'FlatNo__c', 'Floor__c', 'City', 'Country',
'PostalCode', 'Street', 'ExternalReference']
# Create the 'Name' column with simplified logic # Create the 'Name' column with simplified logic
child_df['Name'] = ( child_df['Name'] = (
# Check if all three fields are not null; if so, concatenate them # Check if all three fields are not null; if so, concatenate them
@ -168,8 +190,8 @@ child_df['Name'] = (
child_df.replace({'Name': {'--': 'HOME'}}, inplace=True) child_df.replace({'Name': {'--': 'HOME'}}, inplace=True)
# Create the 'ExternalReference' column for Asset assignment # Create the 'ExternalReference' column for Asset assignment
child_df['ExternalReference'] = ( child_df['PKey__c'] = (
child_df['PKey__c'].astype(str) + ';' + child_df['ExternalReference'].astype(str) + ';' +
child_df['Extension__c'].astype(str) + ';' + child_df['Extension__c'].astype(str) + ';' +
child_df['FlatNo__c'].astype(str) + ';' + child_df['FlatNo__c'].astype(str) + ';' +
child_df['Floor__c'].astype(str) child_df['Floor__c'].astype(str)
@ -177,7 +199,7 @@ child_df['ExternalReference'] = (
# Rename columns to match the desired format # Rename columns to match the desired format
child_df.columns = ['Id', 'Extension__c', 'Flat__c', 'Floor__c', 'City', 'Country', child_df.columns = ['Id', 'Extension__c', 'Flat__c', 'Floor__c', 'City', 'Country',
'PostalCode', 'Street', 'PKey__c', 'Name', 'ExternalReference'] 'PostalCode', 'Street', 'ExternalReference', 'Name', 'PKey__c']
child_df = child_df.drop_duplicates(subset=['Extension__c', 'Flat__c', 'Floor__c','City', 'Country', 'PostalCode', 'Street'], keep='first') child_df = child_df.drop_duplicates(subset=['Extension__c', 'Flat__c', 'Floor__c','City', 'Country', 'PostalCode', 'Street'], keep='first')
@ -314,8 +336,6 @@ df_pricelistitem = df_pricelistitem.drop('Article__r.EANCode__c', axis=1)
df_pricelistitem.insert(0, 'IsActive', 'true') df_pricelistitem.insert(0, 'IsActive', 'true')
print(df_pricelistitem)
columns_pricebookentry = ['IsActive', 'Product2.Product_Code__c', 'UnitPrice', 'Pricebook2.Name'] columns_pricebookentry = ['IsActive', 'Product2.Product_Code__c', 'UnitPrice', 'Pricebook2.Name']
df_pricelistitem.columns = columns_pricebookentry df_pricelistitem.columns = columns_pricebookentry
@ -324,8 +344,33 @@ df_pricelistitem.columns = columns_pricebookentry
## Location (Van) ## Location (Van)
##--------------------------------------------------------------------------## ##--------------------------------------------------------------------------##
# Create van locations from resource assignments
van_columns = ['Id', 'City__c', 'Country__c', 'Street__c', 'PostalCode__c', 'GeoY__c', 'GeoX__c', 'Resource__r.Employee__r.Name']
van_df = df_resourceassignment[van_columns].copy()
# Create PKey__c for vans
van_df['ExternalReference'] = (
van_df['Street__c'].astype(str) + ', ' +
van_df['PostalCode__c'].astype(str) + ' ' +
van_df['City__c'].astype(str) + ', ' +
van_df['Country__c'].astype(str)
)
# Filter vans where ExternalReference does not exist in parent_df
van_df = van_df[~van_df['ExternalReference'].isin(parent_df['Name'])]
# Add van records to child_df
if not van_df.empty:
van_records = pd.DataFrame({
'Name': van_df['Resource__r.Employee__r.Name'],
'ExternalReference': van_df['ExternalReference'],
'PKey__c': van_df['ExternalReference'] + ';' + 'VAN',
'DuplicateCheck__c': 'false',
'IsInventoryLocation': 'true',
'IsMobile': 'true',
'LocationType': 'Van'
})
child_df = pd.concat([child_df, van_records], ignore_index=True)
# Write each DataFrame to a separate CSV file # Write each DataFrame to a separate CSV file
address_df.to_csv('../4_upsert_address_and_parent_location/Address.csv', index=False) address_df.to_csv('../4_upsert_address_and_parent_location/Address.csv', index=False)

View File

@ -3,7 +3,7 @@
"excludeIdsFromCSVFiles": true, "excludeIdsFromCSVFiles": true,
"objects": [ "objects": [
{ {
"query": "SELECT Extension,Flat,Floor,Name,DuplicateCheck__c,IsInventoryLocation,IsMobile,LocationType,PKey__c, ExternalReference FROM Location", "query": "SELECT Id,Extension__c,Flat__c,Floor__c,ExternalReference,Name,PKey__c,DuplicateCheck__c,IsInventoryLocation,IsMobile,LocationType FROM Location",
"operation": "Insert", "operation": "Insert",
"useSourceCSVFile": true "useSourceCSVFile": true
} }

View File

@ -2,7 +2,7 @@
"allOrNone": true, "allOrNone": true,
"objects": [ "objects": [
{ {
"query": "SELECT Id, Name, Pkey__c, ParentLocationId FROM Location WHERE Pkey__c != null AND PKey__c like '%NL' AND ParentLocationId = null AND VisitorAddressId = null", "query": "SELECT Id, Name, ExternalReference, ParentLocationId FROM Location WHERE ExternalReference != null AND ExternalReference LIKE '%NL' AND ParentLocationId = null AND VisitorAddressId = null",
"operation": "Update", "operation": "Update",
"beforeUpdateAddons": [ "beforeUpdateAddons": [
{ {
@ -14,7 +14,7 @@
"alias": "sourceAddressParentIdFromPkey", "alias": "sourceAddressParentIdFromPkey",
"sourceObject": "Address", "sourceObject": "Address",
"sourceField": "ParentId", "sourceField": "ParentId",
"lookupExpression": "source.PKey__c == target.Pkey__c", "lookupExpression": "source.PKey__c == target.ExternalReference",
"lookupSource": "source" "lookupSource": "source"
} }
], ],