diff --git a/CMakeLists.txt b/CMakeLists.txt index 8958ae0112..c245ad6196 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -99,6 +99,7 @@ add_subdirectory(ice40) add_subdirectory(quicklogic) add_subdirectory(testarch) add_subdirectory(tests) +add_subdirectory(sfbuild) get_target_property_required(YAPF env YAPF) add_custom_target( diff --git a/sfbuild/CMakeLists.txt b/sfbuild/CMakeLists.txt new file mode 100644 index 0000000000..0042ce4f26 --- /dev/null +++ b/sfbuild/CMakeLists.txt @@ -0,0 +1,155 @@ +# Installs sfbuild - experimental Symbiflow Build System + +function(INSTALL_DIR) + # Create directory during installation phase + set(options) + set(one_value_args INSTALL_DIRECTORY) + set(multi_value_args) + cmake_parse_arguments( + INSTALL_DIR + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + set(make_dir_code "file(MAKE_DIRECTORY ${INSTALL_DIR_INSTALL_DIRECTORY})") + install(CODE ${make_dir_code}) + +endfunction() + +function(INSTALL_DIR_CONTENT) + # Install files from ROOT_DIRECTORY/FILES_DIRECTORY directory into a FILES_DIRECTORY subdirectory of INSTALL_DIRECTORY + set(options) + set(one_value_args + ROOT_DIRECTORY + FILES_DIRECTORY + DESTINATION) + set(multi_value_args + FILES + INSTALL_OPTS) + cmake_parse_arguments( + INSTALL_DIR_CONTENT + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + if(NOT DEFINED INSTALL_DIR_CONTENT_ROOT_DIRECTORY) + set(INSTALL_DIR_CONTENT_ROOT_DIRECTORY .) + endif() + if(NOT DEFINED INSTALL_DIR_CONTENT_FILES_DIRECTORY) + set(INSTALL_DIR_CONTENT_FILES_DIRECTORY .) + endif() + + set(file_paths) + foreach(file ${INSTALL_DIR_CONTENT_FILES}) + list(APPEND file_paths ${INSTALL_DIR_CONTENT_ROOT_DIRECTORY}/${INSTALL_DIR_CONTENT_FILES_DIRECTORY}/${file}) + endforeach() + + install(FILES ${file_paths} + DESTINATION ${INSTALL_DIR_CONTENT_DESTINATION}/${INSTALL_DIR_CONTENT_FILES_DIRECTORY} + ${INSTALL_DIR_CONTENT_INSTALL_OPTS}) + +endfunction() + + +find_package(Python3 COMPONENTS Interpreter REQUIRED) + +get_target_property_required(VPR env VPR) +get_target_property_required(GENFASM env GENFASM) + +set(SFBUILD_SUPPORTED_PLATFORMS + xc7a50t + xc7a100t) + +# Create required directories +foreach(DIR_PATH ${SFBUILD_DIRECTORIES}) + install_dir(INSTALL_DIRECTORY ${CMAKE_INSTALL_PREFIX}/bin/${DIR_PATH}) +endforeach() + +# Install sfbuild +install_dir_content( + FILES + __init__.py + setup.py + sfbuild.py + sf_cache.py + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +install_dir_content( + FILES __init__.py + FILES_DIRECTORY sf_common + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +install_dir_content( + FILES __init__.py + FILES_DIRECTORY sf_module + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +# Install common modules +install_dir_content( + FILES + fasm.py + ioplace.py + mkdirs.py + pack.py + place_constraints.py + place.py + route.py + synth.py + FILES_DIRECTORY sf_common_modules + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +# Install XC7 modules +install_dir_content( + FILES bitstream.py + FILES_DIRECTORY sf_xc7_modules + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +# Install QuickLogic modules +install_dir_content( + FILES bitstream.py + FILES_DIRECTORY sf_quicklogic_modules + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +# Install platform flow definitions +set(sfbuild_supported_platform_defs) +foreach(SFBUILD_PLATFORM ${SFBUILD_SUPPORTED_PLATFORMS}) + set(sfbuild_platform_def "${SFBUILD_PLATFORM}.json") + list(APPEND sfbuild_supported_platform_defs ${sfbuild_platform_def}) +endforeach() +install_dir_content( + FILES ${sfbuild_supported_platform_defs} + FILES_DIRECTORY platforms + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ GROUP_READ) + +# Detect virtualenv and set pip args accordingly +if(DEFINED ENV{VIRTUAL_ENV} OR DEFINED ENV{CONDA_PREFIX}) + set(_PIP_ARGS) +else() + set(_PIP_ARGS "--user") +endif() + +# Install sfbuild python package *this will allow users to access python modules required to write their Symbiflow Modules. + +# ! THIS IS NOT WORKING. +# For some reason running ${Python3_EXECUTABLE} -m pip install -e ${CMAKE_INSTALL_PREFIX}/bin/sfbuild ${_PIP_ARGS} +# does not install the package correctly. Most likely the pip module is not he one provided by conda, but idk. +# Calling which pip3 in CMake with conda's virtualenv also seems to fail. +# Currently the sfbuild python module needs to be installed manually if you want to be able to access it anywhere. + +# set(PIP3) +# execute_process(COMMAND "which pip3" +# OUTPUT_VARIABLE PIP3) +# set(PYTHON_PKG_INSTALL_CODE "execute_process(COMMAND ${PIP3} install -e ${CMAKE_INSTALL_PREFIX}/bin/sfbuild ${_PIP_ARGS})") +# install(CODE ${PYTHON_PKG_INSTALL_CODE}) \ No newline at end of file diff --git a/sfbuild/__init__.py b/sfbuild/__init__.py new file mode 100644 index 0000000000..ad48ec86c7 --- /dev/null +++ b/sfbuild/__init__.py @@ -0,0 +1 @@ +import sfbuild \ No newline at end of file diff --git a/sfbuild/docs/GettingStarted.md b/sfbuild/docs/GettingStarted.md new file mode 100644 index 0000000000..30a41a2d2e --- /dev/null +++ b/sfbuild/docs/GettingStarted.md @@ -0,0 +1,321 @@ +# sfbuild + +## Getting started + +To use _**sfbuild**_ you need a working python 3 installation which should be icluded +as a part of conda virtual environment set up during symbiflow installation. +_**sfbuild**_ installs along _**Symbiflow**_ with any version of toolchain. However, +only _XC7_ architectures are supported currently and _Quicklogic_ support is a work +in progress. _**sfbuild**_'s installation directory is `bin/sfbuild`, under your +_**Symbiflow**_ installation directory. `sfbuild.py` is the script that you should +run to use _**sfbuild**_. + +To get started with a project that already uses sfbuild, go to the project's +directory and run the following line to build a bitstream: +``` +$ python3 /path/to/sfbuild.py flow.json -p platform_name -t bitstream +``` + +Substitute `platform_name` by the name of the target platform (eg. `x7a50t`). +`flow.json` should be a **project's flow configuration** file included with the +project. If you are unsure if you got the right file, you can check an example of +the contents of such file shown in the "_Using sfbuild to build a target_" section. + +The location of the file containing bitstream will be indicated by sfbuild after the +flow completes. Look for a line like this one on stdout.: + +``` +Target `bitstream` -> build/arty_35/top.bit +``` + +------------------------------------------------------------------------------------- + +## Fundamental concepts + +If you want to create a new sfbuild project, it's highly recommended that you +read this section first. + +### sfbuild + +_**sfbuild**_ is a modular build system designed to handle various +_Verilog-to-bitsream_ flows for FPGAs. It works by wrapping the necessary tools +in python scripts, which are called **sfbuild modules**. The modules are then +referenced in a **platform's flow definition** files along configurations specific +for given platform. These files for come included as a part of _**sfbuild**_ for the +following platforms: + +* x7a50t +* x7a100t +* x7a200t (_soon_) + +You can also write your own **platform's flow definition** file if you want to bring +support to a different device. + +Each project that uses _**sfbuild**_ to perform any flow should include a _.json_ +file describing the project. The purpose of that file is to configure inputs +for the flow and possibly override configuration values if necessary. + +### Modules + +A **module** (also referred to as **sfbuild module** in sistuations where there might +be confusion between Python's _modules_ and sfbuild's _modules_) is a python scripts +that wraps a tool used within **Symbilfow's** ecosystem. The main purpouse of this +wrapper is to provide a unified interface for sfbuild to use and configure the tool +as well as provide information about files required and produced by the tool. + +### Dependecies + +A **dependency** is any file, directory or a list of such that a **module** takes as +its input or produces on its output. + +Modules specify their dependencies by using symbolic names instead of file paths. +The files they produce are also given symbolic names and paths which are either set +through **project's flow configuration** file or derived from the paths of the +dependencies taken by the module. + +### Target + +**Target** is a dependency that the user has asked sfbuild to produce. + +### Flow + +A **flow** is set of **modules** executed in a right order to produce a **target**. + +### .symbicache + +All **dependencies** are tracked by a modification tracking system which stores hashes +of the files (directories get always `'0'` hash) in `.symbicache` file in the root of +the project. When _**sfbuild**_ constructs a **flow**, it will try to omit execution +of modules which would receive the same data on their input. There's a strong +_assumption_ there that a **module**'s output remains unchanged if the input +doconfiguring esn't +change, ie. **modules** are deterministic. + +### Resolution + +A **dependency** is said to be **resolved** if it meets one of the following +critereia: + +* it exists on persistent storage and its hash matches the one stored in .symbicache +* there exists such **flow** that all of the dependieces of its modules are + **resolved** and it produces the **dependency** in question. + +### Platform's flow definition + +**Platform's flow definition** is a piece of data describing a space of flows for a +given platform, serialized into a _JSON_. +It's stored in a file that's named after the device's name under `sfbuild/platforms`. + +**Platform's flow definition** contains a list of modules available for constructing +flows and defines a set of values which the modules can reference. In case of some +modules it may also define a set of parameters used during their construction. +`mkdirs` module uses that to allow production of of multiple directories as separate +dependencies. This however is an experimental feature which possibly will be +removed in favor of having multiple instances of the same module with renameable +ouputs. + +Not all **dependencies** have to be **resolved** at this stage, a **platform's flow +definition** for example won't be able to provide a list of source files needed in a +**flow**. + +### Projects's flow configuration + +Similarly to **platform's flow definition**, **Projects's flow configuration** is a +_JSON_ that is used to configure **modules**. There are however a couple differences +here and there. + +* The most obvious one is that this file is unique for a project and + is provided by the user of _**sfbuild**_. + +* The other difference is that it doesn't list **modules** available for the + platform. + +* All the values provided in **projects's flow configuration** will override those + provided in **platform's flow definition**. + +* It can contain sections with configurations for different platforms. + +* Unlike **platform's flow definition** it can give explicit paths to dependencies. + +* At this stage all mandatory **dependencies** should be resolved. + +Typically **projects's flow configuration** will be used to resolve dependencies +for _HDL source code_ and _device constraints_. + +## Using sfbuild to build a target + +To build a **target** "`target_name`", use the following command: +``` +$ python3 /path/to/sfbuild.py flow.json -p platform_device_name -t target_name +``` +where `flow.json` is a path to **projects's flow configuration** + +For example, let's consider the following +**projects's flow configuration (flow.json)**: + +```json +{ + "dependencies": { + "sources": ["counter.v"], + "xdc": ["arty.xdc"], + "synth_log": "synth.log", + "pack_log": "pack.log", + "top": "top" + }, + "xc7a50t": { + "dependencies": { + "build_dir": "build/arty_35" + } + } +} +``` + +It specifies list of paths to Verilog source files as "`sources`" dependency. +Similarily it also provides an "`XDC`" file with constrains. ("`xdc`" dependency) + +It also names a path for synthesis and logs ("`synth_log`", "`pack_log`"). +These two are optional on-demand outputs, meaning they won't be produces unless +their paths are explicitely set. + +"`top`" value is set to in order to specify the name of top Verilog module, which +is required during synthesis. + +"`build_dir`" is an optional helper dependency. When available, modules will put +their outputs into that directory. It's also an _on-demand_ output of `mkdirs` +module in _xc7a50t_ flow definition, which means that if specified directory does +not exist, `mkdirs` will create it and provide as `build_dir` dependency. + +building a bitstream for *x7a50t* would look like that: + +With this flow configuration, you can build a bitstream for arty_35 using the +following command: + +``` +$ python3 /path/to/sfbuild.py flow.json -p x7a50t -t bitstream +``` + +### Pretend mode + +You can also add a `--pretend` (`-P`) option if you just want to see the results of +dependency resolution for a specified target without building it. This is useful +when you just want to know what files will be generated and where wilh they be +stored. + +### Info mode + +Modules have the ability to include description to the dependencies they produce. + +Running _**sfbuild**_ with `--info` (`-i`) flag allows youn to see descriptions of +these dependencies. This option doesn't require a target to be specified, but you +still have to provuide a flow configuration and platform name. + +This is still an experimental option, most targets currently lack descriptions +and no information whether the output is _on-demand_ is currently displayed. + +Example: +``` +$ python3 /path/to/sfbuild.py flow.json -p x7a50t -i +``` +``` +Platform dependencies/targets: + build_dir: + module: `mk_build_dir` + eblif: Extended BLIF hierarchical sequential designs file + generated by YOSYS + module: `synth` + fasm_extra: + module: `synth` + json: JSON file containing a design generated by YOSYS + module: `synth` + synth_json: + module: `synth` + sdc: + module: `synth` +``` + +_This is only a snippet of the entire output_ + +### Summary of all available sfbuild options + +| long | short | arguments | description | +|------------|:-----:|------------------------|-------------------------------------------------| +| --platform | -p | device name | Specify target device name (eg. x7a100t) | +| --target | -t | target dependency name | Specify target to produce | +| --info | -i | - | Display information about available targets | +| --pretend | -P | - | Resolve dependencies without executing the flow | + +### Dependency resolution display + +sfbuild displays some information about dependencies when requesting a target. + +Here's an example of a possible output when trying to build `bitstream` target: +``` +Project status: + [R] bitstream: bitstream -> build/arty_35/top.bit + [O] build_dir: build/arty_35 + [R] eblif: synth -> build/arty_35/top.eblif + [R] fasm: fasm -> build/arty_35/top.fasm + [R] fasm_extra: synth -> build/arty_35/top_fasm_extra.fasm + [R] io_place: ioplace -> build/arty_35/top.ioplace + [R] json: synth -> build/arty_35/top.json + [R] net: pack -> build/arty_35/top.net + [R] pack_log: pack -> pack.log + [X] pcf: MISSING + [R] place: place -> build/arty_35/top.place + [R] place_constraints: place_constraints -> build/arty_35/top.preplace + [R] route: route -> build/arty_35/top.route + [R] sdc: synth -> build/arty_35/top.sdc + [N] sources: ['counter.v'] + [R] synth_json: synth -> build/arty_35/top_io.json + [R] synth_log: synth -> synth.log + [R] synth_v: synth -> build/arty_35/top_synth.v + [R] timing_rpt: pack -> build/arty_35/pre_pack.report_timing.setup.rpt + [R] util_rpt: pack -> build/arty_35/packing_pin_util.rpt + [O] xdc: ['arty.xdc'] +``` + +The letters in the boxes describe the status of a dependency which's name is next +to the box. + + * **X** - dependency unresolved. This isn't always a bad sign. Some dependencies + are not required to, such as "`pcf`". + * **U** - dependency unreachable. The dependency has a module that could produce + it, but the module's dependencies are unresolved. This doesn't say whether the + dependency was necessary or not. Also note that if the dependenecy plays no role + in building the target the _resolution algorithm_ will skip it and it will be reported as unreachable, even if it can be built as a target. + * **O** - dependency present, unchanged. This dependency is already built and is + confirmed to stay unchanged during flow execution. + * **N** - dependency present, new/changed. This dependency is already present on + the persistent storage, but it was either missing earlier, or + its content changed from the last time. + (WARNING: it won't continue to be reported as "**N**" after a successful build of + any target. This may lead to some false "**O**"s in some complex scenarios. This + should be fixed in the future.) + * **S** - depenendency not present, resolved. This dependency is not + currently available on the persistent storage, however it will be produced within + flow's execution. + * **R** - depenendency present, resolved, requires rebuild. This dependency is + currently available on the persistent storage, however it has to be rebuilt due + to the changes in the project. + +Additional info about a dependency will be displayed next to its name after a +colon: + +* In case of dependencies that are to be built (**S**/**R**), there's a name of a + module that will produce this dependency, followed by "`->`" and a path or list of + paths to file(s)/directory(ies) that will be produced as this dependency. + +* In case of dependencies which do not require execution of any modules, only + a path or list of paths to file(s)/directory(ies) that will be displayed + +* In case of unresolved dependencies (**X**), which are never produced by any + module, a text sying "`MISSING`" will be displayed +* In case of unreachable dependencies, a name of such module that could produce + them will be displayed followed by "`-> ???`". + +There's currently a small issue when printing information about dependencies, where +"**S**"s get recognised as "**R**"s. This doesn't affect the build process tho. + +In the example above file `counter.v` has been modified and is now marked as +"**N**". This couses a bunch of other dependencies to be reqbuilt ("**R**"). +`build_dir` was already present, so it's marked as "**O**". \ No newline at end of file diff --git a/sfbuild/platforms/xc7a100t.json b/sfbuild/platforms/xc7a100t.json new file mode 100644 index 0000000000..bbfcc2d756 --- /dev/null +++ b/sfbuild/platforms/xc7a100t.json @@ -0,0 +1,59 @@ +{ + "values": { + "part_name": "xc7a100tcsg324-1", + "device": "xc7a100t_test", + "bitstream_device": "artix7", + "vpr_options": { + "max_router_iterations": 500, + "routing_failure_predictor": "off", + "router_high_fanout_threshold": -1, + "constant_net_method": "route", + "route_chan_width": 500, + "router_heap": "bucket", + "clock_modeling": "route", + "place_delta_delay_matrix_calculation_method": "dijkstra", + "place_delay_model": "delta", + "router_lookahead": "extended_map", + "check_route": "quick", + "strict_checks": "off", + "allow_dangling_combinational_nodes": "on", + "disable_errors": "check_unbuffered_edges:check_route", + "congested_routing_iteration_threshold": "0.8", + "incremental_reroute_delay_ripup": "off", + "base_cost_type": "delay_normalized_length_bounded", + "bb_factor": 10, + "acc_fac": "0.7", + "astar_fac": "1.8", + "initial_pres_fac": "2.828", + "pres_fac_mult": "1.2", + "check_rr_graph": "off", + "suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment:calculate_average_switch" + } + }, + + "modules": { + "mk_build_dir": "common:mkdirs", + "synth": "common:synth", + "pack": "common:pack", + "ioplace": "common:ioplace", + "place_constraints": "common:place_constraints", + "place": "common:place", + "route": "common:route", + "fasm": "common:fasm", + "bitstream": "xc7:bitstream" + }, + + "module_options": { + "mk_build_dir": { + "params": { + "build_dir": "build/${part_name}" + } + }, + "synth": { + "values": { + "tcl_scripts": "${shareDir}/scripts/xc7", + "techmap": "${shareDir}/techmaps/xc7_vpr/techmap" + } + } + } +} \ No newline at end of file diff --git a/sfbuild/platforms/xc7a50t.json b/sfbuild/platforms/xc7a50t.json new file mode 100644 index 0000000000..4b3d07dbbb --- /dev/null +++ b/sfbuild/platforms/xc7a50t.json @@ -0,0 +1,59 @@ +{ + "values": { + "part_name": "xc7a35tcsg324-1", + "device": "xc7a50t_test", + "bitstream_device": "artix7", + "vpr_options": { + "max_router_iterations": 500, + "routing_failure_predictor": "off", + "router_high_fanout_threshold": -1, + "constant_net_method": "route", + "route_chan_width": 500, + "router_heap": "bucket", + "clock_modeling": "route", + "place_delta_delay_matrix_calculation_method": "dijkstra", + "place_delay_model": "delta", + "router_lookahead": "extended_map", + "check_route": "quick", + "strict_checks": "off", + "allow_dangling_combinational_nodes": "on", + "disable_errors": "check_unbuffered_edges:check_route", + "congested_routing_iteration_threshold": "0.8", + "incremental_reroute_delay_ripup": "off", + "base_cost_type": "delay_normalized_length_bounded", + "bb_factor": 10, + "acc_fac": "0.7", + "astar_fac": "1.8", + "initial_pres_fac": "2.828", + "pres_fac_mult": "1.2", + "check_rr_graph": "off", + "suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment:calculate_average_switch" + } + }, + + "modules": { + "mk_build_dir": "common:mkdirs", + "synth": "common:synth", + "pack": "common:pack", + "ioplace": "common:ioplace", + "place_constraints": "common:place_constraints", + "place": "common:place", + "route": "common:route", + "fasm": "common:fasm", + "bitstream": "xc7:bitstream" + }, + + "module_options": { + "mk_build_dir": { + "params": { + "build_dir": "build/${part_name}" + } + }, + "synth": { + "values": { + "tcl_scripts": "${shareDir}/scripts/xc7", + "techmap": "${shareDir}/techmaps/xc7_vpr/techmap" + } + } + } +} \ No newline at end of file diff --git a/sfbuild/setup.py b/sfbuild/setup.py new file mode 100644 index 0000000000..c52b8e5406 --- /dev/null +++ b/sfbuild/setup.py @@ -0,0 +1,11 @@ +from setuptools import setup + +setup( + name = 'sfbuild', + version = '0.0.1', + author = 'antmicro', + packages = ['sf_module', 'sf_common'], + license = 'ISC', + description = 'Framework required to write Symbiflow modules', + install_requires = ['colorama'] +) \ No newline at end of file diff --git a/sfbuild/sf_cache.py b/sfbuild/sf_cache.py new file mode 100755 index 0000000000..b2f6f01722 --- /dev/null +++ b/sfbuild/sf_cache.py @@ -0,0 +1,116 @@ +#!/usr/bin/python3 + +# This is "SymbiCache". It's used to track changes among dependencies and keep +# the status of the files on a persistent storage. +# Files which are tracked get their checksums calculated and stored in a file. +# If file's checksum differs from the one saved in a file, that means, the file +# has changed. + +import os +import zlib +import json + +from sf_common import fatal + +def get_file_hash(path: str): + with open(path, 'rb') as f: + b = f.read() + return str(zlib.adler32(b)) + +class SymbiCache: + hashes: 'dict[str, dict[str, str]]' + status: 'dict[str, str]' + cachefile_path: str + + def __init__(self, cachefile_path): + """ `chachefile_path` - path to a file used for persistent storage of + checksums. """ + + self.status = {} + self.cachefile_path = cachefile_path + self.load() + + def _try_pop_consumer(self, path: str, consumer: str): + if self.status.get(path) and self.status[path].get(consumer): + self.status[path].pop(consumer) + if len(self.status[path]) == 0: + self.status.pop(path) + if self.hashes.get(path) and self.hashes[path].get(consumer): + self.hashes[path].pop(consumer) + if len(self.hashes[path]) == 0: + self.hashes.pop(path) + + def _try_push_consumer_hash(self, path: str, consumer: str, hash): + if not self.hashes.get(path): + self.hashes[path] = {} + self.hashes[path][consumer] = hash + def _try_push_consumer_status(self, path: str, consumer: str, status): + if not self.status.get(path): + self.status[path] = {} + self.status[path][consumer] = status + + def _get_last_hash(self, path: str, consumer: str): + last_hashes = self.hashes.get(path) + if last_hashes is None: + return None + return last_hashes.get(consumer) + + def update(self, path: str, consumer: str): + """ Add/remove a file to.from the tracked files, update checksum + if necessary and calculate status. + + Multiple hashes are stored per file, one for each consumer module. + "__target" is used as a convention for a "fake" consumer in case the file + is requested as a target and not used by a module within the active flow. + """ + + isdir = os.path.isdir(path) + if not (os.path.isfile(path) or os.path.islink(path) or isdir): + self._try_pop_consumer(path, consumer) + return True + hash = 0 # Directories always get '0' hash. + if not isdir: + hash = get_file_hash(path) + last_hash = self._get_last_hash(path, consumer) + if hash != last_hash: + self._try_push_consumer_status(path, consumer, 'changed') + self._try_push_consumer_hash(path, consumer, hash) + return True + else: + self._try_push_consumer_status(path, consumer, 'same') + return False + + def get_status(self, path: str, consumer: str): + """ Get status for a file with a given path. + returns 'untracked' if the file is not tracked or hasn't been + treated with `update` procedure before calling `get_status`. """ + + statuses = self.status.get(path) + if not statuses: + return 'untracked' + status = statuses.get(consumer) + if not status: + return 'untracked' + return status + + def load(self): + """Loads cache's state from the persistent storage""" + + try: + with open(self.cachefile_path, 'r') as f: + b = f.read() + self.hashes = json.loads(b) + except json.JSONDecodeError as jerr: + print('WARNING: .symbicache is corrupted! ' + 'This will cause flow to re-execute from the beggining.') + self.hashes = {} + except FileNotFoundError: + print('Couldn\'t open .symbicache cache file. ' + 'This will cause flow to re-execute from the beggining.') + self.hashes = {} + + def save(self): + """Saves cache's state to the persistent storage""" + with open(self.cachefile_path, 'w') as f: + b = json.dumps(self.hashes, indent=4) + f.write(b) \ No newline at end of file diff --git a/sfbuild/sf_common/__init__.py b/sfbuild/sf_common/__init__.py new file mode 100644 index 0000000000..f920240c13 --- /dev/null +++ b/sfbuild/sf_common/__init__.py @@ -0,0 +1,150 @@ +import subprocess +import argparse +import os +import shutil +import sys +import re + +# Returns decoded dependency name along with a bool telling whether the +# dependency is required. +# Examples: "required_dep" -> ("required_dep", True) +# "maybe_dep?" -> ("maybe_dep", False) +def decompose_depname(name: str): + required = True + if name[len(name) - 1] == '?': + required = False + name = name[:len(name) - 1] + return name, required + +# Represents argument list for VPR (Versatile Place and Route) +class VprArgs: + arch_dir: str + arch_def: str + lookahead: str + rr_graph: str + rr_graph_xml: str + place_delay: str + device_name: str + eblif: str + optional: list + + def __init__(self, share, device, eblif, vpr_options=[], sdc_file=None): + self.arch_dir = os.path.join(share, 'arch') + self.arch_def = os.path.join(self.arch_dir, device, 'arch.timing.xml') + self.lookahead = \ + os.path.join(self.arch_dir, device, + 'rr_graph_' + device + '.lookahead.bin') + self.rr_graph = \ + os.path.join(self.arch_dir, device, + 'rr_graph_' + device + '.rr_graph.real.bin') + self.rr_graph_xml = \ + os.path.join(self.arch_dir, device, + 'rr_graph_' + device + '.rr_graph.real.xml') + self.place_delay = \ + os.path.join(self.arch_dir, device, + 'rr_graph_' + device + '.place_delay.bin') + self.device_name = device.replace('_', '-') + self.eblif = os.path.realpath(eblif) + self.optional = vpr_options + if sdc_file: + self.optional += ['--sdc_file', sdc_file] + +# Execute subroutine +def sub(*args, env=None, cwd=None): + # print(args) + out = subprocess.run(args, capture_output=True, env=env, cwd=cwd) + if out.returncode != 0: + print(f'[ERROR]: {args[0]} non-zero return code.\n' + f'stderr:\n{out.stderr.decode()}\n\n' + ) + exit(out.returncode) + return out.stdout + +# Execute `vpr` +def vpr(mode: str, vprargs: VprArgs, cwd=None): + modeargs = [] + if mode == 'pack': + modeargs = ['--pack'] + elif mode == 'place': + modeargs = ['--place'] + elif mode == 'route': + modeargs = ['--route'] + + return sub(*(['vpr', + vprargs.arch_def, + vprargs.eblif, + '--device', vprargs.device_name, + '--read_rr_graph', vprargs.rr_graph, + '--read_router_lookahead', vprargs.lookahead, + '--read_placement_delay_lookup', vprargs.place_delay] + + modeargs + vprargs.optional), + cwd=cwd) + +# Converts a dictionary of named options for CLI program to a list. +# Example: { "option_name": "value" } -> [ "--option_name", "value" ] +def options_dict_to_list(opt_dict: dict): + opts = [] + for key, val in opt_dict.items(): + opts.append('--' + key) + if not(type(val) is list and val == []): + opts.append(str(val)) + return opts + +# Emit some noisy warnings +def noisy_warnings(device): + os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings-' + device + '_pack.log' + +# Get current PWD +def my_path(): + mypath = os.path.realpath(sys.argv[0]) + return os.path.dirname(mypath) + +# Save VPR logc (moves the default output file into a desired path) +def save_vpr_log(filename, build_dir=''): + shutil.move(os.path.join(build_dir, 'vpr_stdout.log'), filename) + +# Print a message informing about an error that has occured and terminate program +# with a given return code. +def fatal(code, message): + print(f'[FATAL ERROR]: {message}') + exit(code) + +# ResolutionEnv is used to hold onto mappings for variables used in flow and +# perform text substitutions using those variables. +# Variables can be referred in any "resolvable" string using the following +# syntax: 'Some static text ${variable_name}'. The '${variable_name}' part +# will be replaced by the value associated with name 'variable_name', is such +# mapping exists. +class ResolutionEnv: + values: dict + + def __init__(self, values={}): + self.values = values + + def __copy__(self): + return ResolutionEnv(self.values.copy()) + + # Perform resolution on `s`. + # `s` can be a `str`, a `dict` with arbitrary keys and resolvable values, + # or a `list` of resolvable values. + def resolve(self, s): + if type(s) is str: + match_list = list(re.finditer('\$\{([^${}]*)\}', s)) + # Assupmtion: re.finditer finds matches in a left-to-right order + match_list.reverse() + for match in match_list: + v = self.values.get(match.group(1)) + if not v: + continue + span = match.span() + s = s[:span[0]] + v + s[span[1]:] + elif type(s) is list: + s = list(map(self.resolve, s)) + elif type(s) is dict: + s = dict([(k, self.resolve(v)) for k, v in s.items()]) + return s + + # Add mappings from `values` + def add_values(self, values: dict): + for k, v in values.items(): + self.values[k] = self.resolve(v) diff --git a/sfbuild/sf_common_modules/fasm.py b/sfbuild/sf_common_modules/fasm.py new file mode 100644 index 0000000000..9d12bee1f9 --- /dev/null +++ b/sfbuild/sf_common_modules/fasm.py @@ -0,0 +1,81 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +def concat_fasm(fasm: str, fasm_extra: str, output: str): + fasm_data = None + fasm_extra_data = None + with open(fasm, 'r') as fasm_file, open(fasm_extra, 'r') as fasm_extra_file: + fasm_data = fasm_file.read() + fasm_extra_data = fasm_extra_file.read() + data = fasm_data + '\n' + fasm_extra_data + + with open(output, 'w') as output_file: + output_file.write(data) + +def fasm_output_name(eblif: str): + p = eblif + m = re.match('(.*)\\.[^.]*$', eblif) + if m: + p = m.groups()[0] + return p + '.fasm' + +class FasmModule(Module): + + def map_io(self, ctx: ModuleContext): + mapping = {} + mapping['fasm'] = fasm_output_name(ctx.takes.eblif) + return mapping + + def execute(self, ctx: ModuleContext): + build_dir = os.path.dirname(ctx.takes.eblif) + + vpr_options = [] + if ctx.values.vpr_options: + vpr_options = options_dict_to_list(ctx.values.vpr_options) + + vprargs = VprArgs(ctx.share, ctx.values.device, ctx.takes.eblif, + vpr_options=vpr_options) + + yield 'Generating FASM...' + sub(*(['genfasm', vprargs.arch_def, + os.path.realpath(ctx.takes.eblif), + '--device', vprargs.device_name, + '--read_rr_graph', vprargs.rr_graph + ] + vpr_options), cwd=build_dir) + + default_fasm_output_name = fasm_output_name(ctx.takes.eblif) + if default_fasm_output_name != ctx.outputs.fasm: + shutil.move(default_fasm_output_name, ctx.outputs.fasm) + + if ctx.takes.fasm_extra: + yield 'Appending extra FASM...' + concat_fasm(ctx.outputs.fasm, ctx.takes.fasm_extra, ctx.outputs.fasm) + else: + yield 'No extra FASM to append' + + def __init__(self, _): + self.name = 'fasm' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'net', + 'place', + 'route', + 'fasm_extra?' + ] + self.produces = [ 'fasm' ] + self.values = [ + 'device', + 'vpr_options?' + ] + +do_module(FasmModule) diff --git a/sfbuild/sf_common_modules/ioplace.py b/sfbuild/sf_common_modules/ioplace.py new file mode 100644 index 0000000000..cb665df73c --- /dev/null +++ b/sfbuild/sf_common_modules/ioplace.py @@ -0,0 +1,64 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os + +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +class IOPlaceModule(Module): + def map_io(self, ctx: ModuleContext): + mapping = {} + + p = ctx.takes.net + m = re.match('(.*)\\.[^.]*$', ctx.takes.net) + if m: + p = m.groups()[0] + + mapping['io_place'] = p + '.ioplace' + + return mapping + + def execute(self, ctx: ModuleContext): + io_gen = os.path.join(ctx.share, 'scripts/prjxray_create_ioplace.py') + pinmap = os.path.join(ctx.share, 'arch', ctx.values.device, + ctx.values.part_name, 'pinmap.csv') + + if not os.path.isfile(pinmap) and not os.path.islink(pinmap): + fatal(-1, f'Pinmap file \"{pinmap}\" not found') + + pcf_opts = ['--pcf', ctx.takes.pcf] if ctx.takes.pcf else [] + + yield 'Generating io.place...' + data = sub(*(['python3', io_gen, + '--blif', ctx.takes.eblif, + '--map', pinmap, + '--net', ctx.takes.net] + + pcf_opts)) + + yield 'Saving ioplace data...' + with open(ctx.outputs.io_place, 'wb') as f: + f.write(data) + + def __init__(self, _): + self.name = 'io_place' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'net', + 'pcf?' + ] + self.produces = [ 'io_place' ] + self.values = [ + 'device', + 'part_name' + ] + + + +do_module(IOPlaceModule) \ No newline at end of file diff --git a/sfbuild/sf_common_modules/mkdirs.py b/sfbuild/sf_common_modules/mkdirs.py new file mode 100644 index 0000000000..a6abe97785 --- /dev/null +++ b/sfbuild/sf_common_modules/mkdirs.py @@ -0,0 +1,40 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +""" This module is used as a helper in a abuild chain to automate creating build +directiores. It' currenty the only parametric module, meaning it can take +user-provided input at an early stage in order todetermine its take/produces +I/O. This allows other repesenting configurable directories, such as a build +directory as dependencies and by doing so, allow the dependency algorithm to +lazily create the directories if they become necessary. """ + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +class MkDirsModule(Module): + deps_to_produce: 'dict[str, str]' + + def map_io(self, ctx: ModuleContext): + return ctx.r_env.resolve(self.deps_to_produce) + + def execute(self, ctx: ModuleContext): + outputs = vars(ctx.outputs) + for _, path in outputs.items(): + yield f'Creating directory {path}...' + os.makedirs(path, exist_ok=True) + + def __init__(self, params): + self.name = 'mkdirs' + self.no_of_phases = len(params) if params else 0 + self.takes = [] + self.produces = list(params.keys()) if params else [] + self.values = [] + self.deps_to_produce = params + +do_module(MkDirsModule) \ No newline at end of file diff --git a/sfbuild/sf_common_modules/pack.py b/sfbuild/sf_common_modules/pack.py new file mode 100644 index 0000000000..ac5a42013a --- /dev/null +++ b/sfbuild/sf_common_modules/pack.py @@ -0,0 +1,82 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +import re +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +DEFAULT_TIMING_RPT = 'pre_pack.report_timing.setup.rpt' +DEFAULT_UTIL_RPT = 'packing_pin_util.rpt' + +class PackModule(Module): + def map_io(self, ctx: ModuleContext): + mapping = {} + + p = ctx.takes.eblif + build_dir = os.path.dirname(p) + m = re.match('(.*)\\.[^.]*$', ctx.takes.eblif) + if m: + p = m.groups()[0] + mapping['net'] = p + '.net' + mapping['util_rpt'] = \ + os.path.join(build_dir, DEFAULT_UTIL_RPT) + mapping['timing_rpt'] = \ + os.path.join(build_dir, DEFAULT_TIMING_RPT) + + return mapping + + def execute(self, ctx: ModuleContext): + eblif = os.path.realpath(ctx.takes.eblif) + sdc = os.path.realpath(ctx.takes.sdc) if ctx.takes.sdc else None + vpr_options = [] + if ctx.values.vpr_options: + vpr_options = options_dict_to_list(ctx.values.vpr_options) + vpr_args = VprArgs(ctx.share, ctx.values.device, eblif, sdc_file=sdc, + vpr_options=vpr_options) + build_dir = os.path.dirname(ctx.outputs.net) + + noisy_warnings(ctx.values.device) + + yield 'Packing with VPR...' + vpr('pack', vpr_args, cwd=build_dir) + + og_log = os.path.join(build_dir, 'vpr_stdout.log') + + yield 'Moving/deleting files...' + if ctx.outputs.pack_log: + shutil.move(og_log, ctx.outputs.pack_log) + else: + os.remove(og_log) + + if ctx.outputs.timing_rpt: + shutil.move(os.path.join(build_dir, DEFAULT_TIMING_RPT), + ctx.outputs.timing_rpt) + if ctx.outputs.util_rpt: + shutil.move(os.path.join(build_dir, DEFAULT_UTIL_RPT), + ctx.outputs.util_rpt) + + def __init__(self, _): + self.name = 'pack' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'sdc?' + ] + self.produces = [ + 'net', + 'util_rpt', + 'timing_rpt', + 'pack_log?' + ] + self.values = [ + 'device', + 'vpr_options?' + ] + +do_module(PackModule) \ No newline at end of file diff --git a/sfbuild/sf_common_modules/place.py b/sfbuild/sf_common_modules/place.py new file mode 100644 index 0000000000..0cdeead7ac --- /dev/null +++ b/sfbuild/sf_common_modules/place.py @@ -0,0 +1,92 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +def default_output_name(place_constraints): + p = place_constraints + m = re.match('(.*)\\.[^.]*$', place_constraints) + if m: + p = m.groups()[0] + '.place' + else: + p += '.place' + return p + +def place_constraints_file(ctx: ModuleContext): + dummy =- False + p = ctx.takes.place_constraints + if not p: + p = ctx.takes.io_place + if not p: + dummy = True + p = ctx.takes.eblif + if dummy: + m = re.match('(.*)\\.[^.]*$', p) + if m: + p = m.groups()[0] + '.place' + + return p, dummy + +class PlaceModule(Module): + def map_io(self, ctx: ModuleContext): + mapping = {} + p, _ = place_constraints_file(ctx) + + mapping['place'] = default_output_name(p) + return mapping + + def execute(self, ctx: ModuleContext): + place_constraints, dummy = place_constraints_file(ctx) + place_constraints = os.path.realpath(place_constraints) + if dummy: + with open(place_constraints, 'wb') as f: + f.write(b'') + + build_dir = os.path.dirname(ctx.takes.eblif) + + vpr_options = ['--fix_clusters', place_constraints] + if ctx.values.vpr_options: + vpr_options += options_dict_to_list(ctx.values.vpr_options) + + + yield 'Running VPR...' + vprargs = VprArgs(ctx.share, ctx.values.device, ctx.takes.eblif, + vpr_options=vpr_options) + vpr('place', vprargs, cwd=build_dir) + + # VPR names output on its own. If user requested another name, the + # output file should be moved. + # TODO: This extends the set of names that would cause collisions. + # As for now (22-07-2021), no collision detection is being done, but + # when the problem gets tackled, we should keep in mind that VPR-based + # modules may produce some temporary files with names that differ from + # the ones in flow configuration. + if ctx.is_output_explicit('place'): + output_file = default_output_name(place_constraints) + shutil.move(output_file, ctx.outputs.place) + + yield 'Saving log...' + save_vpr_log('place.log', build_dir=build_dir) + + def __init__(self, _): + self.name = 'place' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'place_constraints?', + 'io_place?' + ] + self.produces = [ 'place' ] + self.values = [ + 'device', + 'vpr_options?' + ] + +do_module(PlaceModule) \ No newline at end of file diff --git a/sfbuild/sf_common_modules/place_constraints.py b/sfbuild/sf_common_modules/place_constraints.py new file mode 100644 index 0000000000..04a9a7235e --- /dev/null +++ b/sfbuild/sf_common_modules/place_constraints.py @@ -0,0 +1,69 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +class IOPlaceModule(Module): + def map_io(self, ctx: ModuleContext): + mapping = {} + net = ctx.takes.net + + p = net + m = re.match('(.*)\\.[^.]*$', net) + if m: + p = m.groups()[0] + + mapping['place_constraints'] = p + '.preplace' + + return mapping + + def execute(self, ctx: ModuleContext): + arch_dir = os.path.join(ctx.share, 'arch') + arch_def = os.path.join(arch_dir, ctx.values.device, 'arch.timing.xml') + + constr_gen = os.path.join(ctx.share, + 'scripts/prjxray_create_place_constraints.py') + vpr_grid_map = os.path.join(ctx.share, 'arch', ctx.values.device, + 'vpr_grid_map.csv') + + if not os.path.isfile(vpr_grid_map) and not os.path.islink(vpr_grid_map): + fatal(-1, f'Gridmap file \"{vpr_grid_map}\" not found') + + database = sub('prjxray-config').decode().replace('\n', '') + + yield 'Generating .place...' + data = sub('python3', constr_gen, + '--net', ctx.takes.net, + '--arch', arch_def, + '--blif', ctx.takes.eblif, + '--vpr_grid_map', vpr_grid_map, + '--input', ctx.takes.io_place, + '--db_root', database, + '--part', ctx.values.part_name) + + yield 'Saving place constraint data...' + with open(ctx.outputs.place_constraints, 'wb') as f: + f.write(data) + + def __init__(self, _): + self.name = 'place_constraints' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'net', + 'io_place' + ] + self.produces = [ 'place_constraints' ] + self.values = [ + 'device', + 'part_name' + ] + +do_module(IOPlaceModule) \ No newline at end of file diff --git a/sfbuild/sf_common_modules/route.py b/sfbuild/sf_common_modules/route.py new file mode 100644 index 0000000000..cd8440570c --- /dev/null +++ b/sfbuild/sf_common_modules/route.py @@ -0,0 +1,56 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +import shutil +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +def route_place_file(eblif: str): + p = eblif + m = re.match('(.*)\\.[^.]*$', eblif) + if m: + p = m.groups()[0] + return p + '.route' + +class RouteModule(Module): + def map_io(self, ctx: ModuleContext): + mapping = {} + mapping['route'] = route_place_file(ctx.takes.eblif) + return mapping + + def execute(self, ctx: ModuleContext): + build_dir = os.path.dirname(ctx.takes.eblif) + + vpr_options = [] + if ctx.values.vpr_options: + vpr_options = options_dict_to_list(ctx.values.vpr_options) + + vprargs = VprArgs(ctx.share, ctx.values.device, ctx.takes.eblif, + vpr_options=vpr_options) + + yield 'Routing with VPR...' + vpr('route', vprargs, cwd=build_dir) + + if ctx.is_output_explicit('route'): + shutil.move(route_place_file(ctx.takes.eblif), ctx.outputs.route) + + yield 'Saving log...' + save_vpr_log('route.log', build_dir=build_dir) + + def __init__(self, _): + self.name = 'route' + self.no_of_phases = 2 + self.takes = [ 'eblif' ] + self.produces = [ 'route' ] + self.values = [ + 'device', + 'vpr_options?' + ] + +do_module(RouteModule) \ No newline at end of file diff --git a/sfbuild/sf_common_modules/synth.py b/sfbuild/sf_common_modules/synth.py new file mode 100755 index 0000000000..11579ce423 --- /dev/null +++ b/sfbuild/sf_common_modules/synth.py @@ -0,0 +1,160 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +import shutil +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +# Setup environmental variables for YOSYS TCL scripts +def yosys_setup_tcl_env(share, build_dir, top, bitstream_device, part, + techmap_path, out_json=None, out_sdc=None, + synth_json=None, out_synth_v=None, out_eblif=None, + out_fasm_extra=None, database_dir=None, use_roi=False, + xdc_files=None): + utils_path = os.path.join(share, 'scripts') + + if not out_json: + out_json = os.path.join(build_dir, top + '.json') + if not out_sdc: + out_sdc = os.path.join(build_dir, top + '.sdc') + if not synth_json: + synth_json = os.path.join(build_dir, top + '_io.json') + if not out_synth_v: + out_synth_v = os.path.join(build_dir, top + '_synth.v') + if not out_eblif: + out_eblif = os.path.join(build_dir, top + '.eblif') + if not out_fasm_extra: + out_fasm_extra = os.path.join(build_dir, top + '_fasm_extra.fasm') + if not database_dir: + database_dir = sub('prjxray-config').decode().replace('\n', '') + part_json_path = \ + os.path.join(database_dir, bitstream_device, part, 'part.json') + env = { + 'USE_ROI': 'FALSE', + 'TOP': top, + 'OUT_JSON': out_json, + 'OUT_SDC': out_sdc, + 'PART_JSON': os.path.realpath(part_json_path), + 'OUT_FASM_EXTRA': out_fasm_extra, + 'TECHMAP_PATH': techmap_path, + 'OUT_SYNTH_V': out_synth_v, + 'OUT_EBLIF': out_eblif, + 'PYTHON3': shutil.which('python3'), + 'UTILS_PATH': utils_path + } + if use_roi: + env['USE_ROI'] = 'TRUE' + if xdc_files and len(xdc_files) > 0: + env['INPUT_XDC_FILES'] = ' '.join(xdc_files) + return env + +def yosys_synth(tcl, tcl_env, verilog_files=[], log=None): + # Set up environment for TCL weirdness + optional = [] + if log: + optional += ['-l', log] + env = os.environ.copy() + env.update(tcl_env) + + # Execute YOSYS command + return sub(*(['yosys', '-p', 'tcl ' + tcl] + optional + verilog_files), + env=env) + +def yosys_conv(tcl, tcl_env, synth_json): + # Set up environment for TCL weirdness + env = os.environ.copy() + env.update(tcl_env) + + # Execute YOSYS command + return sub('yosys', '-p', 'read_json ' + synth_json + '; tcl ' + tcl, + env=env) + +# ----------------------------------------------------------------------------- # + +class SynthModule(Module): + def map_io(self, ctx: ModuleContext): + mapping = {} + + top = ctx.values.top + if ctx.takes.build_dir: + top = os.path.join(ctx.takes.build_dir, top) + if top: + mapping['eblif'] = top + '.eblif' + mapping['fasm_extra'] = top + '_fasm_extra.fasm' + mapping['json'] = top + '.json' + mapping['synth_json'] = top + '_io.json' + mapping['sdc'] = top + '.sdc' + mapping['synth_v'] = top + '_synth.v' + return mapping + + def execute(self, ctx: ModuleContext): + split_inouts = os.path.join(ctx.share, 'scripts/split_inouts.py') + synth_tcl = os.path.join(ctx.values.tcl_scripts, 'synth.tcl') + conv_tcl = os.path.join(ctx.values.tcl_scripts, 'conv.tcl') + + build_dir = os.path.dirname(ctx.outputs.json) + xdc_files = ctx.takes.xdc + if not xdc_files: + xdc_files = [] + tcl_env = yosys_setup_tcl_env(share=ctx.share, build_dir=build_dir, + top=ctx.values.top, + bitstream_device=\ + ctx.values.bitstream_device, + part=ctx.values.part_name, + techmap_path=ctx.values.techmap, + xdc_files=xdc_files, + out_json=ctx.outputs.json, + synth_json=ctx.outputs.synth_json, + out_eblif=ctx.outputs.eblif, + out_sdc=ctx.outputs.sdc, + out_fasm_extra=ctx.outputs.fasm_extra, + out_synth_v=ctx.outputs.synth_v) + + yield f'Sythesizing sources: {ctx.takes.sources}...' + yosys_synth(synth_tcl, tcl_env, ctx.takes.sources, ctx.outputs.synth_log) + + yield f'Splitting in/outs...' + sub('python3', split_inouts, '-i', ctx.outputs.json, '-o', + ctx.outputs.synth_json) + + yield f'Converting...' + yosys_conv(conv_tcl, tcl_env, ctx.outputs.synth_json) + + def __init__(self, _): + self.name = 'synthesize' + self.no_of_phases = 3 + self.takes = [ + 'sources', + 'xdc?', + 'build_dir?' + ] + self.produces = [ + 'eblif', + 'fasm_extra?', + 'json', + 'synth_json', + 'sdc', + 'synth_v', + 'synth_log?' + ] + self.values = [ + 'top', + 'tcl_scripts', + 'techmap', + 'bitstream_device', + 'part_name' + ] + self.prod_meta = { + 'eblif': 'Extended BLIF hierarchical sequential designs file\n' + 'generated by YOSYS', + 'json': 'JSON file containing a design generated by YOSYS', + 'synth_log': 'YOSYS synthesis log' + } + +do_module(SynthModule) \ No newline at end of file diff --git a/sfbuild/sf_module/__init__.py b/sfbuild/sf_module/__init__.py new file mode 100644 index 0000000000..f4849a71b6 --- /dev/null +++ b/sfbuild/sf_module/__init__.py @@ -0,0 +1,169 @@ +# Here are the things necessary to write a symbiflow Module + +import os +import sys +import json +from types import SimpleNamespace +from sf_common import * +from colorama import Fore, Style + +# A `Module` is a wrapper for whatever tool is used in a flow. +# Modules can request dependencies, values and are guranteed to have all the +# required ones present when entering `exec` mode. +# They also have to specify what dependencies they produce and create the files +# for these dependencies. +class Module: + no_of_phases: int + name: str + takes: 'list[str]' + produces: 'list[str]' + values: 'list[str]' + prod_meta: 'dict[str, str]' + + # Executes module. Use yield to print a message informing about current + # execution phase. + # `ctx` is `ModuleContext`. + def execute(self, ctx): + return None + + # Returns paths for ouputs derived from given inputs. + # `ctx` is `ModuleContext`. + def map_io(self, ctx): + return {} + + def __init__(self, params: 'dict[str, ] | None'): + self.no_of_phases = 0 + self.current_phase = 0 + self.name = '' + self.prod_meta = {} + +# A class for object holding mappings for dependencies and values as well as +# other information needed during modules execution. +class ModuleContext: + share: str # Absolute path to Symbiflow's share directory + takes: SimpleNamespace # Maps symbolic dependency names to relative + # paths. + produces: SimpleNamespace # Contains mappings for explicitely specified + # dependencies. Useful mostly for checking for + # on-demand optional outputs (such as logs) + # with `is_output_explicit` method. + outputs: SimpleNamespace # Contains mappings for all available outputs. + values: SimpleNamespace # Contains all available requested values. + r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings + # for current scope. + module_name: str # Name of the module. + + # True if user has explicitely specified output's path. + def is_output_explicit(self, name: str): + o = getattr(self.produces, name) + return o is not None + + # Add attribute for a dependency or panic if a required dependency has not + # been given to the module on its input. + def _getreqmaybe(self, obj, deps: 'list[str]', deps_cfg: 'dict[str, ]'): + for name in deps: + name, required = decompose_depname(name) + value = deps_cfg.get(name) + if value is None and required: + fatal(-1, f'Dependency `{name}` is required by module ' + f'`{self.module_name}` but wasn\'t provided') + setattr(obj, name, self.r_env.resolve(value)) + + # `config` should be a dictionary given as modules input. + def __init__(self, module: Module, config: 'dict[str, ]', + r_env: ResolutionEnv, share: str): + self.module_name = module.name + self.takes = SimpleNamespace() + self.produces = SimpleNamespace() + self.values = SimpleNamespace() + self.outputs = SimpleNamespace() + self.r_env = r_env + self.share = share + + self._getreqmaybe(self.takes, module.takes, config['takes']) + self._getreqmaybe(self.values, module.values, config['values']) + + produces_resolved = self.r_env.resolve(config['produces']) + for name, value in produces_resolved.items(): + setattr(self.produces, name, value) + + outputs = module.map_io(self) + outputs.update(produces_resolved) + + self._getreqmaybe(self.outputs, module.produces, outputs) + +# get descriptions for produced dependencies. +def get_mod_metadata(module: Module): + meta = {} + has_meta = hasattr(module, 'prod_meta') + for prod in module.produces: + prod = prod.replace('?', '') + if not has_meta: + meta[prod] = '' + continue + prod_meta = module.prod_meta.get(prod) + meta[prod] = prod_meta if prod_meta else '' + return meta + +def setup_module_arg_parser(): + parser = argparse.ArgumentParser(description="Parse flags") + parser.add_argument('-s', '--share', nargs=1, metavar='', + type=str, help='Symbiflow\'s "share" directory path') + parser.add_argument('-m', '--map', action='store_true', + help='Perform `output name` <-> `file path` mapping ' + 'instead of executing the stage.') + parser.add_argument('-i', '--io', action='store_true', + help='Return a JSON containing module input/output ' + 'declarations and metadata') + return parser + +# Call it at the end of module's script. Wraps the module to be used +# through shell. +def do_module(module_ctor): + parser = setup_module_arg_parser() + args = parser.parse_args() + config_json = sys.stdin.read() + config = json.loads(config_json) + + module: Module = module_ctor(config.get('params')) + + if args.io: + io = { + 'name': module.name, + 'takes': module.takes, + 'produces': module.produces, + 'meta': get_mod_metadata(module) + } + io_json = json.dumps(io) + print(io_json) + exit(0) + + if not args.share: + fatal(-1, 'Symbiflow stage module requires "share" directory to be ' + 'specified using `-s` option.') + + share = os.path.realpath(args.share[0]) + + + r_env = ResolutionEnv({ + 'shareDir': share + }) + r_env.add_values(config['values']) + + mod_ctx = ModuleContext(module, config, r_env, share) + + if (args.map): + json_map = json.dumps(vars(mod_ctx.outputs)) + print(json_map) + return + + + print( 'Executing module ' + f'`{Style.BRIGHT + module.name + Style.RESET_ALL}`:') + current_phase = 1 + for phase_msg in module.execute(mod_ctx): + print(f' {Style.BRIGHT}[{current_phase}/{module.no_of_phases}]' + f'{Style.RESET_ALL}: {phase_msg}') + current_phase += 1 + print(f'Module `{Style.BRIGHT + module.name + Style.RESET_ALL}` ' + 'has finished its work!') \ No newline at end of file diff --git a/sfbuild/sf_quicklogic_modules/bitstream.py b/sfbuild/sf_quicklogic_modules/bitstream.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sfbuild/sf_xc7_modules/bitstream.py b/sfbuild/sf_xc7_modules/bitstream.py new file mode 100644 index 0000000000..c52043534b --- /dev/null +++ b/sfbuild/sf_xc7_modules/bitstream.py @@ -0,0 +1,59 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os + +# Dumb hack to avoid the need to install sfbuild package +import sys +mypath = os.path.realpath(os.sys.argv[0]) +sys.path.append(os.path.join(mypath, '../')) + +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +def bitstream_output_name(fasm: str): + p = fasm + m = re.match('(.*)\\.[^.]*$', fasm) + if m: + p = m.groups()[0] + return p + '.bit' + +class BitstreamModule(Module): + def map_io(self, ctx: ModuleContext): + mapping = {} + mapping['bitstream'] = bitstream_output_name(ctx.takes.fasm) + return mapping + + def execute(self, ctx: ModuleContext): + database = sub('prjxray-config').decode().replace('\n', '') + database = os.path.join(database, ctx.values.bitstream_device) + + yield 'Compiling FASM to bitstream...' + sub(*(['xcfasm', + '--db-root', database, + '--part', ctx.values.part_name, + '--part_file', os.path.join(database, ctx.values.part_name, + 'part.yaml'), + '--sparse', + '--emit_pudc_b_pullup', + '--fn_in', os.path.realpath(ctx.takes.fasm), + '--frm2bit', 'xc7frames2bit', + '--bit_out', ctx.outputs.bitstream + ])) + + def __init__(self, _): + self.name = 'bitstream' + self.no_of_phases = 1 + self.takes = [ 'fasm' ] + self.produces = [ 'bitstream' ] + self.values = [ + 'part_name', + 'bitstream_device' + ] + +do_module(BitstreamModule) \ No newline at end of file diff --git a/sfbuild/sfbuild.py b/sfbuild/sfbuild.py new file mode 100755 index 0000000000..f0a09f913b --- /dev/null +++ b/sfbuild/sfbuild.py @@ -0,0 +1,627 @@ +""" +sfbuild - Symbiflow Build System + +This tool allows for building FPGA targets (such as bitstreams) for any supported +platform with just one simple command and a project file. + +The idea is that sfbuild wraps all the tools needed by different platforms in +"modules", which define inputs/outputs and various parameters. This allows +sfbuild to resolve dependencies for any target provided that a "flow definition" +file exists for such target. The flow defeinition file list modules available for +that platform and may tweak some settings of those modules. + +A basic example of using sfbuild: +$ sfbuild flow.json -p arty_35 -t bitstream + +This will make sfbuild attempt to create a bitstream for arty_35 platform. +flow.json is a flow configuration file, which should be created for a project +that uses sfbuild. Iontains project-specific definitions needed within the flow, +such as list of source code files. +""" + +from re import split +import sys +import os +import json +import argparse +from copy import copy +from subprocess import Popen, PIPE +from typing import Iterable +from colorama import Fore, Style +from sf_common import ResolutionEnv, noisy_warnings, fatal +from sf_module import * +from sf_cache import SymbiCache + +SYMBICACHEPATH = '.symbicache' + +mypath = os.path.realpath(os.sys.argv[0]) +mypath = os.path.dirname(mypath) + +share_dir_path = os.path.realpath(os.path.join(mypath, '../../share/symbiflow')) + +# Set up argument parser for the program. Pretty self-explanatory. +def setup_argparser(): + parser = argparse.ArgumentParser(description="Execute SymbiFlow flow") + parser.add_argument('flow', nargs=1, metavar='', type=str, + help='Path to flow definition file') + parser.add_argument('-t', '--target', metavar='', type=str, + help='Perform stages necessary to acquire target') + parser.add_argument('-p', '--platform', nargs=1, metavar='', + help='Target platform name') + parser.add_argument('-P', '--pretend', action='store_true', + help='Show dependency resolution without executing flow') + parser.add_argument('-i', '--info', action='store_true', + help='Display info about available targets') + # Currently unsupported + parser.add_argument('-T', '--take_explicit_paths', nargs='+', + metavar='', type=str, + help='Specify stage inputs explicitely. This might be ' + 'required if some files got renamed or deleted and ' + 'symbiflow is unable to deduce the flow that lead ' + 'to dependencies required by the requested stage') + return parser + +# Workaround for CMake failing to install sfbuild package properly. +mod_sfbuild_env = os.environ.copy() +mod_sfbuild_env['PYTHONPATH'] = os.path.realpath(mypath) + +# Runs module in on of the following modes: +# * 'map' - return output paths for given input. The result should be a +# dictionary that maps dependency names to paths. +# * 'exec' - execute module for given inputs. +# * 'io' - provide useful metdata for the user. +def _run_module(path, mode, config): + mod_res = None + out = None + config_json = json.dumps(config) + if mode == 'map': + cmd = ['python3', path, '--map', '--share', share_dir_path] + with Popen(cmd, stdin=PIPE, stdout=PIPE, env=mod_sfbuild_env) as p: + out = p.communicate(input=config_json.encode())[0] + mod_res = p + elif mode == 'exec': + # XXX: THIS IS SOOOO UGLY + cmd = ['python3', path, '--share', share_dir_path] + with Popen(cmd, stdout=sys.stdout, stdin=PIPE, bufsize=1, + env=mod_sfbuild_env) as p: + p.stdin.write(config_json.encode()) + p.stdin.flush() + mod_res = p + elif mode == 'io': + cmd = ['python3', path, '--io'] + with Popen(cmd, stdin=PIPE, stdout=PIPE, env=mod_sfbuild_env) as p: + out = p.communicate(input=config_json.encode())[0] + mod_res = p + if mod_res.returncode != 0: + print(f'Module `{path}` failed with code {mod_res.returncode}\n' + f'MODE: \'{mode}\'\n\n' + f'{Style.BRIGHT}stdout:{Style.RESET_ALL}\n{out}\n\n' + f'{Style.BRIGHT}stderr:{Style.RESET_ALL}\n{mod_res.stderr}\n\n') + exit(mod_res.returncode) + if out: + return json.loads(out.decode()) + else: + return None + +# Stage dependency input/output +class StageIO: + name: str # A symbolic name given to the dependency + required: bool # True if the dependency is marked as 'necessary'/'required' + + # Encoded name feauters special characters that imply certain qualifiers. + # Any name that ends with '?' is treated as with 'maybe' qualifier. + # The '?' Symbol is then dropped from the dependency name. + def __init__(self, encoded_name: str): + self.name, self.required = decompose_depname(encoded_name) + + def __repr__(self) -> str: + return 'StageIO { name: \'' + self.name + '\', required: ' + \ + self.required + ', auto_flow: ' + str(self.auto_flow) + ' }' + +sfbuild_home = mypath +sfbuild_home_dirs = os.listdir(sfbuild_home) +sfbuild_module_dirs = \ + [dir for dir in sfbuild_home_dirs if re.match('sf_.*_modules$', dir)] +sfbuild_module_collection_name_to_path = \ + dict([(re.match('sf_(.*)_modules$', moddir).groups()[0], + os.path.join(sfbuild_home, moddir)) + for moddir in sfbuild_module_dirs]) + +"""Resolves module location from modulestr""" +def resolve_modstr(modstr: str): + sl = modstr.split(':') + if len(sl) > 2: + raise Exception('Incorrect module sysntax. ' + 'Expected one \':\' or one \'::\'') + if len(sl) < 2: + return modstr + collection_name = sl[0] + module_filename = sl[1] + '.py' + + col_path = sfbuild_module_collection_name_to_path.get(collection_name) + if not col_path: + fatal(-1, f'Module collection {collection_name} does not exist') + return os.path.join(col_path, module_filename) + +class Stage: + name: str # Name of the stage (module's name) + takes: 'list[StageIO]' # List of symbolic names of dependencies used by + # the stage + produces: 'list[StageIO]' # List of symbolic names of dependencies + # produced by the stage + value_ovds: 'dict[str, ]' # Values used by the stage. The dictionary + # maps value's symbolic names to values that will + # overload `values` input for a given module. + # What makes values different from `takes` is that + # they don't have to be paths, and if they are, + # the files they point to are not tracked. + module: str # Path to the associated module + meta: 'dict[str, str]' # Stage's metadata extracted from module's + # output. + params: object # Module-specific parameters required to + # instatntiate the module. + + def __init__(self, name: str, modstr: str, mod_opts, + r_env: ResolutionEnv): + self.module = resolve_modstr(modstr) + + if not os.path.isfile(self.module) and not os.path.islink(self.module): + raise Exception(f'Module file {self.module} does not exist') + + if not mod_opts: + mod_opts = {} + + self.params = mod_opts.get('params') + + values = mod_opts.get('values') + if values: + r_env = copy(r_env) + values = dict(import_values(values, r_env)) + self.value_ovds = values + else: + self.value_ovds = {} + + io_config = prepare_stage_io_input(self) + mod_io = _run_module(self.module, 'io', io_config) + self.name = name + + self.takes = [] + for input in mod_io['takes']: + io = StageIO(input) + self.takes.append(io) + + self.produces = [] + for input in mod_io['produces']: + io = StageIO(input) + self.produces.append(io) + + self.meta = mod_io['meta'] + + def __repr__(self) -> str: + return 'Stage \'' + self.name + '\' {' \ + f' value_overrides: {self.value_ovds},' \ + f' args: {self.args},' \ + f' takes: {self.takes},' \ + f' produces: {self.produces} ' + '}' + +# Resolve and yield values while adding the to `r_env` for further rosolutions. +def import_values(values: dict, r_env: ResolutionEnv): + for k, v in values.items(): + vr = r_env.resolve(v) + r_env.values[k] = vr + yield k, vr + +# Iterates over all stages available in a given flow. +def platform_stages(platform_flow, r_env): + #TODO options overriding + module_options = platform_flow.get('module_options') + for stage_name, modulestr in platform_flow['modules'].items(): + mod_opts = module_options.get(stage_name) if module_options else None + yield Stage(stage_name, modulestr, mod_opts, r_env) + +# Checks whether a dependency exists on a drive. +def req_exists(r): + if type(r) is str: + if not os.path.isfile(r) and not os.path.islink(r) \ + and not os.path.isdir(r): + return False + elif type(r) is list: + return not (False in map(req_exists, r)) + else: + raise Exception('Requirements can be currently checked only for single ' + 'paths, or path lists') + return True + +# Associates a stage with every possible output. +# This is commonly refferef to as `os_map` (output-stage-map) through the code. +def map_outputs_to_stages(stages: 'list[Stage]'): + os_map: 'dict[str, Stage]' = {} # Output-Stage map + for stage in stages: + for output in stage.produces: + if not os_map.get(output.name): + os_map[output.name] = stage + elif os_map[output.name] != stage: + raise Exception(f'Dependency `{output.name}` is generated by ' + f'stage `{os_map[output.name].name}` and ' + f'`{stage.name}`. Dependencies can have only one ' + 'provider at most.') + return os_map + +# Get dependencies that were explicitely specified by the user. +def get_explicit_deps(flow_cfg: dict, platform_name: str, r_env: ResolutionEnv): + deps = {} + if flow_cfg.get('dependencies'): + deps.update(r_env.resolve(flow_cfg['dependencies'])) + if flow_cfg[platform_name].get('dependencies'): + deps.update(r_env.resolve(flow_cfg[platform_name]['dependencies'])) + return deps + +def filter_existing_deps(deps: 'dict[str, ]', symbicache): + return [(n, p) for n, p in deps.items() \ + if req_exists(p)] # and not dep_differ(p, symbicache)] + +def get_flow_values(platform_flow: dict, flow_cfg: dict, platform_name): + values = {} + + platform_flow_values = platform_flow.get('values') + if platform_flow_values: + values.update(platform_flow_values) + + project_flow_values = flow_cfg.get('values') + if project_flow_values: + values.update(project_flow_values) + + project_flow_platform_values = flow_cfg[platform_name].get('values') + if project_flow_platform_values: + values.update(project_flow_platform_values) + + return values + +def get_stage_values_override(og_values: dict, stage: Stage): + values = og_values.copy() + values.update(stage.value_ovds) + return values + +def prepare_stage_io_input(stage: Stage): + return { 'params': stage.params } if stage.params is not None else {} + +def prepare_stage_input(stage: Stage, platform_name: str, values: dict, + dep_paths: 'dict[str, ]', config_paths: 'dict[str, ]'): + takes = {} + for take in stage.takes: + paths = dep_paths.get(take.name) + if paths: # Some takes may be not required + takes[take.name] = paths + + produces = {} + for prod in stage.produces: + if dep_paths.get(prod.name): + produces[prod.name] = dep_paths[prod.name] + elif config_paths.get(prod.name): + produces[prod.name] = config_paths[prod.name] + + stage_mod_cfg = { + 'takes': takes, + 'produces': produces, + 'values': values, + 'platform': platform_name, + } + if stage.params is not None: + stage_mod_cfg['params'] = stage.params + return stage_mod_cfg + +def update_dep_statuses(paths, consumer: str, symbicache: SymbiCache): + if type(paths) is str: + symbicache.update(paths, consumer) + elif type(paths) is list: + for p in paths: + update_dep_statuses(p, consumer, symbicache) + elif type(paths) is dict: + for _, p in paths.items(): + update_dep_statuses(p, consumer, symbicache) + +# Check if a dependency differs from its last version, lackof dependency is +# treated as "differs" +def dep_differ(paths, consumer: str, symbicache: SymbiCache): + if type(paths) is str: + s = symbicache.get_status(paths, consumer) + if s == 'untracked': + symbicache.update(paths, consumer) + return symbicache.get_status(paths, consumer) != 'same' + elif type(paths) is list: + return True in [dep_differ(p, consumer, symbicache) for p in paths] + elif type(paths) is dict: + return True in [dep_differ(p, consumer, symbicache) \ + for _, p in paths.items()] + return False + +# Check if a dependency or any of the dependencies it depends on differ from +# their last versions. +def dep_will_differ(target: str, paths, consumer: str, + os_map: 'dict[str, Stage]', rerun_stages: 'set[str]', + symbicache: SymbiCache): + provider = os_map.get(target) + if provider: + return (provider.name in rerun_stages) or \ + dep_differ(paths, consumer, symbicache) + return dep_differ(paths, consumer, symbicache) + +def _print_unreachable_stage_message(provider: Stage, take: str): + print( ' Stage ' + f'`{Style.BRIGHT + provider.name + Style.RESET_ALL}` is ' + 'unreachable due to unmet dependency ' + f'`{Style.BRIGHT + take.name + Style.RESET_ALL}`') + +def config_and_run_module(stage: Stage, platform_name: str, + values: 'dict[str, ]', + dep_paths: 'dict[str, str | list[str]]', + config_paths: 'dict[str, str | list[str]]', + mode: str): + stage_values = get_stage_values_override(values, stage) + mod_input = prepare_stage_input(stage, platform_name, stage_values, + dep_paths, config_paths) + return _run_module(stage.module, mode, mod_input) + +class Flow: + target: str + platform_name: str + # dependency-producer map + os_map: 'dict[str, Stage]' + # Values in global scope + values: 'dict[str, ]' + # Paths resolved for dependencies + dep_paths: 'dict[str, str | list[str]]' + # Explicit configs for dependency paths + config_paths: 'dict[str, str | list[str]]' + # Stages that need to be run + run_stages: 'set[str]' + # Number of stages that relied on outdated version of a (checked) dependency + deps_rebuilds: 'dict[str, int]' + symbicache: SymbiCache + + def __init__(self, target: str, stages: 'list[Stage]', + platform_name: str, values: 'dict[str, ]', + config_paths: 'dict[str, ]', + symbicache: SymbiCache): + self.target = target + self.platform_name = platform_name + self.os_map = map_outputs_to_stages(stages) + self.config_paths = config_paths + self.dep_paths = dict(filter_existing_deps(config_paths, symbicache)) + self.run_stages = set() + self.symbicache = symbicache + self.values = values + self.deps_rebuilds = {} + + self._resolve_dependencies(self.target, set()) + + def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): + # Initialize the dependency status if necessary + if self.deps_rebuilds.get(dep) is None: + self.deps_rebuilds[dep] = 0 + # Check if an explicit dependency is already resolved + paths = self.dep_paths.get(dep) + if paths and not self.os_map.get(dep): + return + # Check if a stage can provide the required dependency + provider = self.os_map.get(dep) + if not provider or provider.name in stages_checked: + return + for take in provider.takes: + self._resolve_dependencies(take.name, stages_checked) + # If any of the required dependencies is unavailable, then the + # provider stage cannot be run + take_paths = self.dep_paths.get(take.name) + + if not take_paths and take.required: + _print_unreachable_stage_message(provider, take) + return + + if dep_will_differ(take.name, take_paths, provider.name, + self.os_map, self.run_stages, + self.symbicache): + # print(f'{take.name} will differ for {provider.name}') + self.run_stages.add(provider.name) + self.deps_rebuilds[take.name] += 1 + + outputs = config_and_run_module(provider, self.platform_name, + self.values, self.dep_paths, + self.config_paths, 'map') + stages_checked.add(provider.name) + self.dep_paths.update(outputs) + + for _, out_paths in outputs.items(): + if not req_exists(out_paths): + self.run_stages.add(provider.name) + + def print_resolved_dependencies(self): + deps = list(self.deps_rebuilds.keys()) + deps.sort() + + for dep in deps: + status = Fore.RED + '[X]' + Fore.RESET + source = Fore.YELLOW + 'MISSING' + Fore.RESET + paths = self.dep_paths.get(dep) + + if paths: + exists = req_exists(paths) + provider = self.os_map.get(dep) + if provider and provider.name in self.run_stages: + if exists: + status = Fore.YELLOW + '[R]' + Fore.RESET + else: + status = Fore.YELLOW + '[S]' + Fore.RESET + source = f'{Fore.BLUE + os_map[dep].name + Fore.RESET} ' \ + f'-> {paths}' + elif exists: + if self.deps_rebuilds[dep] > 0: + status = Fore.GREEN + '[N]' + Fore.RESET + else: + status = Fore.GREEN + '[O]' + Fore.RESET + source = paths + elif os_map.get(dep): + status = Fore.RED + '[U]' + Fore.RESET + source = \ + f'{Fore.BLUE + os_map[dep].name + Fore.RESET} -> ???' + + print(f' {Style.BRIGHT + status} {dep + Style.RESET_ALL}: {source}') + + def _build_dep(self, dep): + paths = self.dep_paths.get(dep) + provider = self.os_map.get(dep) + run = (provider.name in self.run_stages) if provider else False + if not paths: + print(f'Dependency {dep} is unresolved.') + return False + + if req_exists(paths) and not run: + return True + else: + if not provider: + fatal(-1, 'Something went wrong') + + for p_dep in provider.takes: + produced = self._build_dep(p_dep.name) + if not produced: + continue + update_dep_statuses(self.dep_paths[p_dep.name], provider.name, + self.symbicache) + p_dep_paths = self.dep_paths.get(p_dep.name) + if p_dep.required and not req_exists(p_dep_paths): + fatal(-1, f'Can\'t produce promised dependency ' + f'`{p_dep.name}`. Something went wrong.') + + config_and_run_module(provider, self.platform_name, self.values, + self.dep_paths, self.config_paths, 'exec') + + self.run_stages.discard(provider.name) + + if not req_exists(paths): + fatal(-1, f'Stage `{provider.name}`` did not produce promised ' + f'dependency `{dep}`') + return True + + def execute(self): + self._build_dep(self.target) + update_dep_statuses(self.dep_paths[self.target], '__target', + self.symbicache) + print(f'Target `{Style.BRIGHT + args.target + Style.RESET_ALL}` -> ' + f'{self.dep_paths[args.target]}') + +def display_dep_info(stages: 'Iterable[Stage]'): + print('Platform dependencies/targets:') + longest_out_name_len = 0 + for stage in stages: + for out in stage.produces: + l = len(out.name) + if l > longest_out_name_len: + longest_out_name_len = l + + desc_indent = longest_out_name_len + 7 + nl_indentstr = '\n' + for _ in range(0, desc_indent): + nl_indentstr += ' ' + + for stage in stages: + for out in stage.produces: + pname = Style.BRIGHT + out.name + Style.RESET_ALL + indent = '' + for _ in range(0, desc_indent - len(pname) + 3): + indent += ' ' + pgen = f'{Style.DIM}module: `{stage.name}`{Style.RESET_ALL}' + pdesc = stage.meta[out.name].replace('\n', nl_indentstr) + print(f' {Style.BRIGHT + out.name + Style.RESET_ALL}:' + f'{indent}{pdesc}{nl_indentstr}{pgen}') + +def sfbuild_done(): + print(f'sfbuild: {Style.BRIGHT + Fore.GREEN}DONE' + f'{Style.RESET_ALL + Fore.RESET}') + exit(0) + +parser = setup_argparser() +args = parser.parse_args() + +print('sfbuild: Symbiflow Build System') + +if not args.platform: + fatal(-1, 'You have to specify a platform name with `-p` option') + +platform_name = args.platform[0] + +flow_path = args.flow[0] + +flow_cfg_json = None +try: + with open(flow_path, 'r') as flow_cfg_file: + flow_cfg_json = flow_cfg_file.read() +except FileNotFoundError as _: + fatal(-1, 'The provided flow definition file does not exist') + +flow_cfg = json.loads(flow_cfg_json) + +platform_path = os.path.join(mypath, 'platforms', platform_name + '.json') +platform_def = None +try: + with open(platform_path) as platform_file: + platform_def = platform_file.read() +except FileNotFoundError as _: + fatal(-1, f'The platform flow definition file {platform_path} for the platform ' + f'{platform_name} referenced in flow definition file {flow_path} ' + 'cannot be found.') + +platform_flow = json.loads(platform_def) +device = platform_flow['values']['device'] +p_flow_cfg = flow_cfg[platform_name] + +r_env = ResolutionEnv({ + "shareDir": share_dir_path, + "noisyWarnings": noisy_warnings(device) +}) + +p_flow_values = p_flow_cfg.get('values') +if p_flow_values: + r_env.add_values(p_flow_values) + +print('Scanning modules...') +stages = list(platform_stages(platform_flow, r_env)) + +if len(stages) == 0: + fatal(-1, 'Platform flow does not define any stage') + +if args.info: + display_dep_info(stages) + sfbuild_done() + +if not args.target: + fatal(-1, 'Please specify desired target using `--target` option') + +os_map = map_outputs_to_stages(stages) +stage = os_map.get(args.target) + +config_paths: 'dict[str, ]' = get_explicit_deps(flow_cfg, platform_name, r_env) +# ??? +# update_dep_statuses(config_paths, symbicache) + +values = get_flow_values(platform_flow, flow_cfg, platform_name) + +flow = Flow( + target=args.target, + stages=stages, + platform_name=platform_name, + values=get_flow_values(platform_flow, flow_cfg, platform_name), + config_paths=get_explicit_deps(flow_cfg, platform_name, r_env), + symbicache=SymbiCache(SYMBICACHEPATH) +) + +print('\nProject status:') +flow.print_resolved_dependencies() +print('') + +if args.pretend: + sfbuild_done() + +flow.execute() + +flow.symbicache.save() + +sfbuild_done() diff --git a/xc/xc7/toolchain_wrappers/example_flow.json b/xc/xc7/toolchain_wrappers/example_flow.json new file mode 100644 index 0000000000..c882ef7190 --- /dev/null +++ b/xc/xc7/toolchain_wrappers/example_flow.json @@ -0,0 +1,21 @@ +{ + "dependencies": { + "sources": ["counter.v"], + "xdc": ["arty.xdc"], + "synth_log": "synth.log", + "pack_log": "pack.log" + }, + "xc7a100t": { + "values": { + "top": "top", + "build_dir": "build/arty_100" + } + }, + "xc7a50t": { + "values": { + "top": "top", + "build_dir": "build/arty_35", + "part": "xc7a35tcpg236-1" + } + } +} \ No newline at end of file