wk_LinProg/XML_Scripts/mappingXML.ipynb

226 lines
8.5 KiB
Text

{
"cells": [
{
"cell_type": "code",
"execution_count": 55,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"import xml_writers as writers\n",
"from xml.dom import minidom\n",
"import xml.etree.ElementTree as ET\n",
"import pickle\n",
"import math\n",
"import os\n",
"\n",
"\n",
"output_directory = \"LinProg_test\"\n",
"\n",
"# Load the results of the linear programming (mapping and scheduling)\n",
"with open(\"/home/sfischer/Documents/projects/wk_LinProg/LinProg_Scripts/LinProgResults.json\", \"r\") as json_file:\n",
" data = json.load(json_file)\n",
"\n",
"\n",
"combined_mapping_dict = data[\"combined_mapping_dict\"]\n",
"mapping = data[\"mapping\"]\n",
"shortest_path = data[\"shortest_path\"]\n",
"\n",
"identifier_list=[\"\",\"NoInit\"]\n",
"# identifier =\"\"\n",
"identifier =\"NoInit\"\n",
"TASK_GRAPH_FEEDBACK_PATH='/home/sfischer/Documents/projects/wk_LinProg/LinProg_Scripts/task_graph_feedback.pkl'\n",
"TASK_GRAPH_NOINIT_PATH='/home/sfischer/Documents/projects/wk_LinProg/LinProg_Scripts/task_graph_NoInit.pkl'\n",
"path_list=[TASK_GRAPH_FEEDBACK_PATH,TASK_GRAPH_NOINIT_PATH]\n",
"# with open('/home/sfischer/Documents/projects/wk_LinProg/LinProg_Scripts/task_graph_feedback.pkl', 'rb') as file:\n",
"# task_graph = pickle.load(file)\n",
"# with open('/home/sfischer/Documents/projects/wk_LinProg/LinProg_Scripts/task_graph_NoInit.pkl', 'rb') as file:\n",
"# task_graph = pickle.load(file)"
]
},
{
"cell_type": "code",
"execution_count": 56,
"metadata": {},
"outputs": [],
"source": [
"# ======================================================================\n",
"# write the map file according to the results of linear programming\n",
"# ======================================================================\n",
"for identifier, path in zip(identifier_list, path_list):\n",
"\n",
" with open(path, 'rb') as file:\n",
" task_graph = pickle.load(file)\n",
"\n",
" max_value = float('-inf')\n",
" max_key = None\n",
"\n",
" # Iterate through the dictionary\n",
" for key, values in combined_mapping_dict.items():\n",
" current_max = max(values) # Get the largest number in the current list\n",
" if current_max > max_value:\n",
" max_value = current_max\n",
" max_key = key\n",
"\n",
"\n",
"\n",
" map_writer = writers.MapWriter('map')\n",
" for combined_node in combined_mapping_dict:\n",
" for node in combined_mapping_dict[combined_node]:\n",
" map_writer.add_bindings(tasks=[node],nodes=[int(mapping[str(combined_node)])])\n",
"\n",
" for node in task_graph.nodes():\n",
" if task_graph.nodes[node].get('nodetype') == \"init\":\n",
" \n",
" successors = list(task_graph.successors(node))\n",
" successor=successors[0]\n",
" for combined_node in combined_mapping_dict:\n",
" for node2 in combined_mapping_dict[combined_node]:\n",
" if successor in combined_mapping_dict[combined_node]:\n",
" mappedTo=int(mapping[str(combined_node)])\n",
" map_writer.add_bindings(tasks=[node],nodes=[mappedTo])\n",
"\n",
" node_list=list(task_graph.nodes)\n",
" last_node = node_list.pop()\n",
" #map the final task just to now when to stop simulating\n",
" map_writer.add_bindings(tasks=[last_node],nodes=[int(mapping[str(max_key)])])\n",
" map_writer.add_bindings(tasks=[last_node+1],nodes=[int(mapping[str(max_key)])])\n",
" # Ensure the output directory exists\n",
" output_path = f'../XML/{output_directory}'\n",
" if not os.path.exists(output_path):\n",
" os.makedirs(output_path)\n",
"\n",
" map_writer.write_file(f'{output_path}/map{identifier}.xml')\n",
"\n",
"\n",
"\n",
" # ======================================================================\n",
" # create the map_addition{identifier}.xml file\n",
" # Dummy with all accelerators in the same cluster and no memory\n",
" # ======================================================================\n",
"\n",
"\n",
" \n",
" # Task addresses are mapped uniformly\n",
" # start/end address for memory of tasks. starts after direct CPU communication address\n",
" start_address=0x40001000\n",
" end_address=0x80000000\n",
"\n",
" map_add_writer=writers.Map_AdditionWriter('map_address')\n",
" map_add_writer.clear_all()\n",
" address_range=end_address-start_address\n",
"\n",
" start_range_task=math.floor(address_range/len(task_graph.nodes))\n",
"\n",
" for idx,i in enumerate(task_graph.nodes):\n",
" map_add_writer.add_address_Map(task=i,address=start_address+idx*start_range_task)\n",
"\n",
"\n",
" map_add_writer.change_root('map_cluster')\n",
"\n",
"\n",
" system_accel_num=16\n",
" for i in range(system_accel_num):\n",
" map_add_writer.add_cluster(node=i,cluster=0)\n",
"\n",
"\n",
" map_add_writer.change_root('map_parameters')\n",
"\n",
" map_add_writer.add_address_Map_param(1,start_address,start_address)\n",
"\n",
"\n",
" map_add_writer.change_root('task_parameters')\n",
"\n",
" for i in task_graph.nodes:\n",
" map_add_writer.add_task_param(i,[])\n",
" \n",
" if os.path.exists(f'map_addition{identifier}.xml'):\n",
" os.remove(f'map_addition{identifier}.xml')\n",
"\n",
" output_file_path = f'../XML/{output_directory}/map_addition{identifier}_old.xml'\n",
" if os.path.exists(output_file_path):\n",
" os.remove(output_file_path)\n",
"\n",
" map_add_writer.write_file(f'../XML/{output_directory}/map_addition{identifier}.xml')\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 57,
"metadata": {},
"outputs": [],
"source": [
"# ======================================================================\n",
"# create the map_addition{identifier}.xml file\n",
"# Dummy with all accelerators in the same cluster and no memory\n",
"# ======================================================================\n",
"\n",
"\n",
" \n",
"# # Task addresses are mapped uniformly\n",
"# # start/end address for memory of tasks. starts after direct CPU communication address\n",
"# start_address=0x40001000\n",
"# end_address=0x80000000\n",
"\n",
"# map_add_writer=writers.Map_AdditionWriter('map_address')\n",
"# map_add_writer.clear_all()\n",
"# address_range=end_address-start_address\n",
"\n",
"# start_range_task=math.floor(address_range/len(task_graph.nodes))\n",
"\n",
"# for idx,i in enumerate(task_graph.nodes):\n",
"# map_add_writer.add_address_Map(task=i,address=start_address+idx*start_range_task)\n",
"\n",
"\n",
"# map_add_writer.change_root('map_cluster')\n",
"\n",
"\n",
"# system_accel_num=16\n",
"# for i in range(system_accel_num):\n",
"# map_add_writer.add_cluster(node=i,cluster=0)\n",
"\n",
"\n",
"# map_add_writer.change_root('map_parameters')\n",
"\n",
"# map_add_writer.add_address_Map_param(1,start_address,start_address)\n",
"\n",
"\n",
"# map_add_writer.change_root('task_parameters')\n",
"\n",
"# for i in task_graph.nodes:\n",
"# map_add_writer.add_task_param(i,[])\n",
" \n",
"# if os.path.exists(f'map_addition{identifier}.xml'):\n",
"# os.remove(f'map_addition{identifier}.xml')\n",
"\n",
"# output_file_path = f'../XML/{output_directory}/map_addition_old{identifier}.xml'\n",
"# if os.path.exists(output_file_path):\n",
"# os.remove(output_file_path)\n",
"\n",
"# map_add_writer.write_file(f'../XML/{output_directory}/map_addition{identifier}.xml')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 2
}