savestate

This commit is contained in:
Rene Kaßeböhmer
2025-05-09 15:41:55 +02:00
parent 14bc0f6dca
commit 8af0ef8e78
17 changed files with 290 additions and 108 deletions

5
.gitignore vendored
View File

@ -27,7 +27,8 @@ SCPriceList__c.csv
SCPriceListItem__c.csv
SCResourceAssignment__c.csv
ContractTemplates.csv
ServiceContract.csv
ServiceContract.csv*
SCContract__c.csv
AssociatedLocation_beforetransform.csv
AssociatedLocation.csv
AssociatedLocation.csv*
ServiceContract_beforetransform.csv

Binary file not shown.

View File

@ -8,7 +8,7 @@
"master": false,
"externalId": "Name"
},{
"query": "SELECT PKey__c,Name,Status,BillingCountryCode,Term,Pricebook2.Name,IsTemplate__c,TemplateCountry__c,FSL_Sold_by__c FROM ServiceContract",
"query": "SELECT PKey__c,Name,Status,BillingCountryCode,Term,Pricebook2Id,IsTemplate__c,TemplateCountry__c,FSL_Sold_by__c FROM ServiceContract",
"operation": "Upsert",
"useValuesMapping": true
}

View File

@ -0,0 +1,48 @@
import pandas as pd
read_df_sc = pd.read_csv('../15_insert_servicecontract/ServiceCOntract_beforetransform.csv', header=0, keep_default_na=False, dtype=str)
read_df_pb2 = pd.read_csv('../12_insert_pricebook2_and_pricebookentries/target/Pricebook2_insert_target.csv', header=0, keep_default_na=False, dtype=str)
read_df_sct = pd.read_csv('../13_insert_servicecontracttemplates_dummies/target/ServiceContract_insert_target.csv', header=0, keep_default_na=False, dtype=str)
reindex_columns_sc = ['PKey__c','TemplateId__r.PKey__c','Status','BillingCountryCode','Term','EndDate','StartDate','AccountId','Service_Recipient__c','IoT_Registration_Status__c','Pricebook2.Name','Name']
reindex_columns_pb2 = ['Brand__c','Business_Type__c','Country__c','Errors','Id','IsActive','Name']
reindex_columns_sct = ['BillingCountryCode','Errors','Id','IsTemplate__c','Name','PKey__c','Pricebook2Id','Term']
# Reindex the columns to match the desired format
df_sc = read_df_sc.reindex(reindex_columns_sc, axis=1)
df_pb2 = read_df_pb2.reindex(reindex_columns_pb2, axis=1)
df_sct = read_df_sct.reindex(reindex_columns_sct, axis=1)
print(df_sc)
print(df_pb2)
print(df_sct)
#
# Merge df_sc with df_pb2 including Id based on Pricebook2.Name
merged_df_sc = pd.merge(df_sc,
df_pb2[['Id', 'Name']],
left_on='Pricebook2.Name',
right_on='Name',
how='left')
merged_df_sc = pd.merge(merged_df_sc,
df_sct[['PKey__c', 'Id']],
left_on='TemplateId__r.PKey__c',
right_on='PKey__c',
how='left')
#drop External Reference
merged_df_sc = merged_df_sc.drop('Pricebook2.Name', axis=1)
merged_df_sc = merged_df_sc.drop('Name_y', axis=1)
merged_df_sc = merged_df_sc.drop('TemplateId__r.PKey__c', axis=1)
merged_df_sc = merged_df_sc.drop('PKey__c_y', axis=1)
print(merged_df_sc)
#Rename columns
merged_df_sc.columns = ['PKey__c','Status','BillingCountryCode','Term','EndDate','StartDate','AccountId','Service_Recipient__c','IoT_Registration_Status__c','Name','Pricebook2Id', 'TemplateId__c']
#safe csv
merged_df_sc.to_csv('../15_insert_servicecontract/ServiceContract.csv', index=False)

View File

@ -0,0 +1 @@
python .\FillServiceContractFields.py

View File

@ -0,0 +1,47 @@
import os
import argparse
from sys import path
path.append('../..')
from sf_auth import get_sf_connection
def insert_service_contracts(context):
"""
Insert ServiceContract records using Bulk API 2.0
Args:
context (str): Salesforce org context (e.g., 'qa2', 'prod')
"""
try:
# Get Salesforce connection
sf = get_sf_connection(context)
csv_file = 'ServiceContract.csv'
print(f'Starting bulk insert of ServiceContract records from {csv_file}...')
# Use bulk API 2.0 to insert records directly from CSV
results = sf.bulk2.ServiceContract.insert(
csv_file,
batch_size=10000,
concurrency=5
)
# Count successes and failures
success_count = sum(1 for result in results if result['success'])
total_count = len(results)
print(f'\nInsertion complete:')
print(f'Total records: {total_count}')
print(f'Successful: {success_count}')
print(f'Failed: {total_count - success_count}')
except Exception as e:
print(f'Error: {str(e)}')
raise
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Insert ServiceContract records via Bulk API')
parser.add_argument('--context', type=str, required=True,
help='Salesforce org context (e.g., "qa2", "prod")')
args = parser.parse_args()
insert_service_contracts(args.context)

View File

@ -0,0 +1,4 @@
ObjectName,FieldName,RawValue,Value
ServiceContract,Status,Active,Active
ServiceContract,Status,Cancelled,Inactive
ServiceContract,Status,Created,Active
1 ObjectName FieldName RawValue Value
2 ServiceContract Status Active Active
3 ServiceContract Status Cancelled Inactive
4 ServiceContract Status Created Active

View File

@ -0,0 +1 @@
sf sfdmu run --sourceusername rene.kasseboehmer@vaillant.de.devrene --targetusername rene.kasseboehmer@vaillant.de.devrene

View File

@ -0,0 +1,23 @@
{
"allOrNone": true,
"excludeIdsFromCSVFiles": true,
"objects": [
{
"query": "SELECT Id FROM Pricebook2 WHERE Name LIKE 'NL%'",
"operation": "Readonly",
"externalId": "Id",
"master": false
},{
"query": "SELECT Id FROM Account WHERE BillingCountry__c = 'NL'",
"operation": "Readonly",
"externalId": "Id",
"master": false
},{
"query": "SELECT PKey__c,Status,BillingCountryCode,AccountId,Service_Recipient__c,IoT_Registration_Status__c,Pricebook2Id, TemplateId__c FROM ServiceContract",
"operation": "Insert",
"useValuesMapping": true,
"useSourceCSVFile": true,
"master": true
}
]
}

View File

@ -0,0 +1,25 @@
import pandas as pd
read_df_sc = pd.read_csv('../15_insert_servicecontract/ServiceContract.csv', header=0, keep_default_na=False, dtype=str)
read_df_sc_inserted = pd.read_csv('../15_insert_servicecontract/target/ServiceContract_insert_target.csv', header=0, keep_default_na=False, dtype=str)
reindex_columns_sc = ['PKey__c','Status','BillingCountryCode','Term','EndDate','StartDate','AccountId','Service_Recipient__c','IoT_Registration_Status__c','Name','Pricebook2Id', 'TemplateId__c']
reindex_columns_sc_inserted = ['AccountId','BillingCountryCode','EndDate','Errors','Id','IoT_Registration_Status__c','Name','PKey__c','Pricebook2Id','StartDate','Term']
# Reindex the columns to match the desired format
df_sc = read_df_sc.reindex(reindex_columns_sc, axis=1)
df_sc_inserted = read_df_sc_inserted.reindex(reindex_columns_sc_inserted, axis=1)
#
# Merge df_sc with df_pb2 including Id based on Pricebook2.Name
merged_df_sc = pd.merge(df_sc_inserted,
df_sc[['PKey__c', 'TemplateId__c']],
left_on='PKey__c',
right_on='PKey__c',
how='left')
save_df_sc = merged_df_sc[['Id', 'TemplateId__c']]
#safe csv
save_df_sc.to_csv('../17_update_servicecontract/ServiceContract.csv', index=False)

View File

@ -0,0 +1 @@
python .\UpdateContractsSelfRerefence.py

View File

@ -0,0 +1 @@
sf sfdmu run --sourceusername rene.kasseboehmer@vaillant.de.devrene --targetusername rene.kasseboehmer@vaillant.de.devrene

View File

@ -0,0 +1,16 @@
{
"allOrNone": true,
"objects": [
{
"query": "SELECT Id FROM ServiceContract WHERE IsTemplate__c = true",
"operation": "Readonly",
"externalId": "Id",
"master": false
},{
"query": "SELECT Id, TemplateId__c FROM ServiceContract",
"operation": "Update",
"externalId": "Id",
"useSourceCSVFile": true
}
]
}

View File

@ -10,8 +10,9 @@ import time
import sys
import json
import pandas as pd
from dotenv import load_dotenv, find_dotenv
from simple_salesforce import Salesforce
from sys import path
path.append('../..')
from sf_auth import get_sf_connection
done = False
@ -25,44 +26,7 @@ def animate():
time.sleep(0.1)
sys.stdout.write('\rDone! ')
def get_credentials(context):
"""
Get credentials for a given context from the .env file
Args:
context (str): Context name (e.g., 'qa2', 'prod')
Returns:
dict: Credentials dictionary with username, password, and security_token
"""
context = context.upper()
# Initialize credentials dictionary
credentials = {
'USERNAME': None,
'PASSWORD': None,
'SECURITY_TOKEN': None
}
if context != 'PROD':
credentials['DOMAIN'] = 'test'
# Load the .env file explicitly from one directory above
env_file = find_dotenv("../.env")
load_dotenv(env_file)
# Load all environment variables
env_vars = os.environ
for key, value in env_vars.items():
#print(f'{context}_SF_', key, value)
if f'{context}_SF_' in key:
credential_key = key.split(f'{context}_SF_')[-1].upper()
credentials[credential_key] = value
return credentials
# ...existing code for animate() function...
def extract_data(object_id, query, output_path='output', context='qa2'):
"""
@ -74,45 +38,15 @@ def extract_data(object_id, query, output_path='output', context='qa2'):
context (str): Context name for credentials (e.g., 'qa2', 'prod')
"""
try:
global done
done = False
# Get credentials based on context
credentials = get_credentials(context)
if not all(credentials.values()):
raise ValueError(f"Missing credentials for context: {context}")
# Get Salesforce connection using the new module
sf = get_sf_connection(context)
if(context == 'prod'):
# Initialize Salesforce bulk connector
sf = Salesforce(
username=credentials['USERNAME'],
password=credentials['PASSWORD'],
security_token=credentials['SECURITY_TOKEN'],
version='62.0'
)
else:
# Initialize Salesforce bulk connector
sf = Salesforce(
username=credentials['USERNAME'],
password=credentials['PASSWORD'],
security_token=credentials['SECURITY_TOKEN'],
domain=credentials['DOMAIN'],
version='62.0'
)
# Create a simple query for the desired object
#soql_query = f"""
# SELECT Id, City__c, Country__c, GeoY__c, GeoX__c, PostalCode__c, Street__c, Extension__c, HouseNo__c, FlatNo__c, Floor__c FROM SCInstalledBaseLocation__c WHERE Country__c = 'NL'
#"""
t = threading.Thread(target=animate)
t.start()
#sf.bulk2.__getattr__("SCInstalledBaseLocation__c").download(
# soql_query, path="./", max_records=2000000
#)
results = sf.bulk2.__getattr__(object_id).query(
query, max_records=2000000
)
@ -124,37 +58,7 @@ def extract_data(object_id, query, output_path='output', context='qa2'):
time.sleep(10)
done = True
t.do_run = False
"""
# Execute the Bulk query job
job = sf.bulk2.__getattr__("SCInstalledBase__c").query(soql_query)
# Polling for job completion (might take a moment)
job_id = job['id']
while True:
status = sf.bulk.job(job_id).get()['status']
if status == 'Complete' or status == 'Closed' :
break
if status == 'Aborted':
exit(1)
if status == 'Failed':
raise ValueError(f'Job failed: {job_id}')
# Get the results
result = sf.bulk.result(job_id)
df = pd.DataFrame(result.records)
# Create output directory if it doesn't exist
os.makedirs(output_path, exist_ok=True)
# Save to CSV file
csv_file = os.path.join(output_path, f'{object_id}_data.csv')
df.to_csv(csv_file, index=False)
print(f'Successfully extracted {len(df)} records from {object_id}')
return csv_file
"""
except Exception as e:
done = True
t.do_run = False

View File

@ -35,7 +35,10 @@
"query": "SELECT Id, ValidTo__c, ValidFrom__c, Country__c, City__c, PostalCode__c, District__c, Street__c, HouseNo__c, Extension__c, FlatNo__c, Floor__c, GeoY__c, GeoX__c, Resource__c, Resource__r.Employee__r.Name, Stock__c, Stock__r.ID2__c FROM SCResourceAssignment__c WHERE Country__c = '{country}' AND ValidTo__c >= TODAY"
},{
"sobject": "ContractTemplates",
"query": "SELECT id, name, TemplateName__c, status__c, Brand__r.Name, Country__c, Runtime__c FROM sccontract__c WHERE Template__c = null AND Country__c = '{country}'"
"query": "SELECT id, name, TemplateName__c, status__c, Brand__r.Name, Country__c, Runtime__c FROM SCContract__c WHERE Template__c = null AND Country__c = '{country}'"
},{
"sobject": "SCContract__c",
"query": "SELECT id, name, Template__c, status__c, Brand__r.Name, Country__c, Runtime__c, EndDate__c, StartDate__c, Account__c, AccountOwner__c, IoT_Registration_Status__c FROM SCContract__c WHERE Template__c != null AND EndDate__c >= TODAY AND Country__c = '{country}' limit 3"
}
]
}

View File

@ -24,6 +24,10 @@ country_mapping = {
'RO': 'Romania'
}
##--------------------------------------------------------------------------##
## Loading Data
##--------------------------------------------------------------------------##
# Read the input CSV file, assuming the second row is the header
read_df = pd.read_csv('../1_extract_data/results/SCInstalledBaseLocation__c.csv', header=0, keep_default_na=False, dtype=str)
read_df_ib = pd.read_csv('../1_extract_data/results/SCInstalledBase__c.csv', header=0, keep_default_na=False, dtype=str)
@ -35,6 +39,7 @@ read_df_resourceassignment = pd.read_csv('../1_extract_data/results/SCResourceAs
read_df_address_iot = pd.read_csv('../1_extract_data/results/Address.csv', header=0, keep_default_na=False, dtype=str)
read_df_location_iot = pd.read_csv('../1_extract_data/results/ParentLocation.csv', header=0, keep_default_na=False, dtype=str)
read_df_servicecontracttemplates = pd.read_csv('../1_extract_data/results/ContractTemplates.csv', header=0, keep_default_na=False, dtype=str)
read_df_servicecontracts = pd.read_csv('../1_extract_data/results/SCContract__c.csv', header=0, keep_default_na=False, dtype=str)
# Columns for reindexing
reindex_columns = ['Id','City__c','Country__c','GeoY__c','GeoX__c','PostalCode__c','Street__c','Extension__c','HouseNo__c','FlatNo__c','Floor__c']
@ -47,6 +52,7 @@ reindex_columns_resourceassignment = ['Id', 'ValidTo__c', 'ValidFrom__c', 'Count
reindex_columns_address_iot = ['Id', 'Country', 'CountryCode', 'Street', 'City', 'ParentId', 'PostalCode']
reindex_columns_location_iot = ['Id', 'Name']
reindex_columns_servicecontracttemplates = ['Id', 'Name', 'TemplateName__c', 'Status__c', 'Brand__r.Name', 'Country__c', 'Runtime__c']
reindex_columns_servicecontracts = ['Id', 'Name', 'Template__c', 'Status__c', 'Brand__r.Name', 'Country__c', 'Runtime__c', 'EndDate__c', 'StartDate__c', 'Account__c', 'AccountOwner__c', 'IoT_Registration_Status__c']
# Reindex the columns to match the desired format
df = read_df.reindex(reindex_columns, axis=1)
@ -59,6 +65,7 @@ df_resourceassignment = read_df_resourceassignment.reindex(reindex_columns_resou
df_address_iot = read_df_address_iot.reindex(reindex_columns_address_iot, axis=1)
df_location_iot = read_df_location_iot.reindex(reindex_columns_location_iot, axis=1)
df_servicecontracttemplates = read_df_servicecontracttemplates.reindex(reindex_columns_servicecontracttemplates, axis=1)
df_servicecontract = read_df_servicecontracts.reindex(reindex_columns_servicecontracts, axis=1)
##--------------------------------------------------------------------------##
## Update for IoT Addresses and Locations
@ -449,7 +456,36 @@ df_servicecontracttemplates['FSL_Sold_by__c'] = 'Service'
# Convert Runtime__c (Term) to natural numbers
df_servicecontracttemplates['Term'] = pd.to_numeric(df_servicecontracttemplates['Term'].str.extract('(\d+)')[0], errors='coerce')
df_servicecontracttemplates['Term'] = df_servicecontracttemplates['Term'].fillna(12).astype(int)
df_servicecontracttemplates['Term'] = df_servicecontracttemplates['Term'].fillna(0).astype(int)
##--------------------------------------------------------------------------##
## Service Contract
##--------------------------------------------------------------------------##
df_servicecontract['Pricebook2.Name'] = (
df_servicecontract['Country__c'].astype(str).fillna('').str.upper() + ' ' +
df_servicecontract['Brand__r.Name'].astype(str).fillna('').str.upper() + ' ' +
"SERVICE"
)
df_servicecontract = df_servicecontract.drop('Name', axis=1)
df_servicecontract = df_servicecontract.drop('Brand__r.Name', axis=1)
df_servicecontract.columns = ['PKey__c', 'TemplateId__r.PKey__c', 'Status', 'BillingCountryCode', 'Term', 'EndDate', 'StartDate', 'AccountId', 'Service_Recipient__c', 'IoT_Registration_Status__c', 'Pricebook2.Name']
df_servicecontract['IoT_Registration_Status__c'] = df_servicecontract['IoT_Registration_Status__c'].replace('', 'Open')
df_servicecontract['Name'] = df_servicecontract['PKey__c']
df_servicecontract['TemplateCountry__c'] = df_servicecontract['BillingCountryCode']
#df_servicecontract = df_servicecontract.drop('TemplateId__r.PKey__c', axis=1)
# Convert Runtime__c (Term) to natural numbers
df_servicecontract['Term'] = pd.to_numeric(df_servicecontract['Term'].str.extract('(\d+)')[0], errors='coerce')
df_servicecontract['Term'] = df_servicecontract['Term'].fillna(0).astype(int)
##--------------------------------------------------------------------------##
## Saving to CSV
##--------------------------------------------------------------------------##
# Write each DataFrame to a separate CSV file
address_df.to_csv('../4_upsert_address_and_parent_location/Address.csv', index=False)
@ -461,6 +497,7 @@ df_pricelist.to_csv('../12_insert_pricebook2_and_pricebookentries/Pricebook2.csv
df_pricelistitem.to_csv('../12_insert_pricebook2_and_pricebookentries/PricebookEntry.csv', index=False)
merged_df_location_iot.to_csv('../3_update_address_and_location_data_for_migration/Location.csv', index=False)
df_servicecontracttemplates.to_csv('../13_insert_servicecontracttemplates_dummies/ServiceContract.csv', index=False)
df_servicecontract.to_csv('../15_insert_servicecontract/ServiceContract_beforetransform.csv', index=False)
## end mapping
print('Data has been successfully transformed and saved to CSV files.')

70
sf_auth.py Normal file
View File

@ -0,0 +1,70 @@
import os
from dotenv import load_dotenv, find_dotenv
from simple_salesforce import Salesforce
def get_credentials(context):
"""
Get credentials for a given context from the .env file
Args:
context (str): Context name (e.g., 'qa2', 'prod')
Returns:
dict: Credentials dictionary with username, password, and security_token
"""
context = context.upper()
# Initialize credentials dictionary
credentials = {
'USERNAME': None,
'PASSWORD': None,
'SECURITY_TOKEN': None
}
if context != 'PROD':
credentials['DOMAIN'] = 'test'
# Load the .env file
env_file = find_dotenv(".env")
load_dotenv(env_file)
# Load all environment variables
env_vars = os.environ
for key, value in env_vars.items():
if f'{context}_SF_' in key:
credential_key = key.split(f'{context}_SF_')[-1].upper()
credentials[credential_key] = value
return credentials
def get_sf_connection(context):
"""
Create Salesforce connection based on context
Args:
context (str): Context name (e.g., 'qa2', 'prod')
Returns:
Salesforce: Authenticated Salesforce connection
"""
credentials = get_credentials(context)
if not all(credentials.values()):
raise ValueError(f"Missing credentials for context: {context}")
if context.lower() == 'prod':
return Salesforce(
username=credentials['USERNAME'],
password=credentials['PASSWORD'],
security_token=credentials['SECURITY_TOKEN'],
version='62.0'
)
else:
return Salesforce(
username=credentials['USERNAME'],
password=credentials['PASSWORD'],
security_token=credentials['SECURITY_TOKEN'],
domain=credentials['DOMAIN'],
version='62.0'
)