Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions core/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ add_custom_command(
set(LSORACLE_SOURCE
${CMAKE_CURRENT_BINARY_DIR}/kahypar_config.cpp
kahypar_temp_config.cpp
algorithms/optimization/optimizer.cpp
algorithms/optimization/resynthesis.cpp
utility.cpp
)
Expand Down
95 changes: 95 additions & 0 deletions core/algorithms/optimization/optimizer.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
/* LSOracle: A learning based Oracle for Logic Synthesis

* MIT License
* Copyright 2019 Laboratory for Nano Integrated Systems (LNIS)
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#include "algorithms/optimization/optimizer.hpp"
#include "algorithms/optimization/optimizers/aig.hpp"
#include "algorithms/optimization/optimizers/mig.hpp"
#include "algorithms/optimization/optimizers/xag.hpp"
#include "algorithms/optimization/optimizers/xmg.hpp"
#include "algorithms/optimization/optimizers/abc.hpp"
#include "algorithms/optimization/optimizers/noop.hpp"
namespace oracle {
template <typename network>
optimizer<network> *optimize(optimization_strategy_comparator<network> &comparator,
optimization_strategy strategy,
const mockturtle::window_view<mockturtle::names_view<network>> &part,
int index,
const std::string &abc_exec)
{
std::cout << "******************************** optimizing partition " << index << " ********************************" << std::endl;
std::cout << "Optimizing based on strategy " << comparator.name() << std::endl;
// depth_view part_depth(part);
// std::cout << "Original depth " << part_depth.depth() << " gates " << part_depth.num_gates() << " size " << part_depth.size() << std::endl;
// todo this is gonna leak memory.
std::vector<optimizer<network>*>optimizers {
new noop<network>(index, part, strategy, abc_exec),
new migscript_optimizer<network>(index, part, strategy, abc_exec),
new migscript2_optimizer<network>(index, part, strategy, abc_exec),
new migscript3_optimizer<network>(index, part, strategy, abc_exec),
new aigscript_optimizer<network>(index, part, strategy, abc_exec),
new aigscript2_optimizer<network>(index, part, strategy, abc_exec),
new aigscript3_optimizer<network>(index, part, strategy, abc_exec),
new aigscript4_optimizer<network>(index, part, strategy, abc_exec),
new aigscript5_optimizer<network>(index, part, strategy, abc_exec),
new xmg_optimizer<network>(index, part, strategy, abc_exec),
new xag_optimizer<network>(index, part, strategy, abc_exec),
// new abc_optimizer<network>(index, part, strategy, abc_exec),
};
optimizer<network> *best = nullptr;
for (auto opt = optimizers.begin(); opt != optimizers.end(); opt++) {
std::cout << "running optimization " << (*opt)->optimizer_name() << std::endl;
// std::cout << "converting network" << std::endl;
(*opt)->convert();
// std::cout << "trying to optimize" << std::endl;
(*opt)->optimize();
// std::cout << "checking tech independent metrics." << std::endl;
node_depth result = (*opt)->independent_metric();
std::cout << "result depth " << result.depth
<< " size " << result.nodes << std::endl;

if (best == nullptr) {
best = *opt;
continue;
}

if (comparator(**opt, *best)) {
best = *opt;
//std::cout << "found a better result" << std::endl;
continue;
}
}
std::cout << "using " << best->optimizer_name() << " for " << index << std::endl;

return best;

}
template optimizer<mockturtle::aig_network> *optimize(
optimization_strategy_comparator<mockturtle::aig_network> &,
optimization_strategy,
const mockturtle::window_view<mockturtle::names_view<mockturtle::aig_network>> &,
int,
const std::string &);
}
147 changes: 147 additions & 0 deletions core/algorithms/optimization/optimizer.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
/* LSOracle: A learning based Oracle for Logic Synthesis

* MIT License
* Copyright 2019 Laboratory for Nano Integrated Systems (LNIS)
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#pragma once
#include <string>
#include <mockturtle/mockturtle.hpp>
// #include "algorithms/optimization/optimizers/strategy.hpp"
// #include "algorithms/optimization/optimizers/aig.hpp"
// #include "algorithms/optimization/optimizers/mig.hpp"
// #include "algorithms/optimization/optimizers/xag.hpp"
// #include "algorithms/optimization/optimizers/xmg.hpp"
// #include "algorithms/optimization/optimizers/abc.hpp"

namespace oracle {

enum optimization_strategy { size, balanced, depth };

struct node_depth {
int nodes;
int depth;
};

template<typename network>
class optimizer
{
public:
/**
* human readable name
*/
virtual const std::string optimizer_name() = 0;
/**
* Do any conversion necessary from original network type to the internal network type.
*/
virtual void convert() = 0;
/**
* Perform optimization
*/
virtual void optimize() = 0;
/**
* Calculate tech independent depth and nodes metrics.
*/
virtual node_depth independent_metric() = 0;
/**
* List the type of optimization: area, delay, or balanced.
*/
virtual optimization_strategy target() = 0;
/**
* Techmap, then return a path to a file containing the techmapped verilog.
*/
virtual std::string techmap(const std::string &liberty_file, const std::string &temp_prefix) = 0;
/**
* convert the network to the superset.
*/
virtual mockturtle::names_view<mockturtle::xmg_network> export_superset() = 0;
/**
* Reapply this optimization to a different network.
*/
virtual optimizer<mockturtle::xmg_network> *reapply(int index, const mockturtle::window_view<mockturtle::names_view<mockturtle::xmg_network>> &part) = 0;
};

template <typename T>
class optimization_strategy_comparator {
public:
// Comparator function
virtual bool operator()(optimizer<T> &a, optimizer<T> &b) = 0;
virtual const std::string name() = 0;
};

template <typename network>
optimizer<network> *optimize(optimization_strategy_comparator<network> &comparator,
optimization_strategy strategy,
const mockturtle::window_view<mockturtle::names_view<network>> &part,
int index,
const std::string &abc_exec);

template <typename T>
class ndp_strategy : public optimization_strategy_comparator<T>
{
bool operator()(optimizer<T> &a, optimizer<T> &b)
{
node_depth x = a.independent_metric();
node_depth y = b.independent_metric();

return x.nodes * x.depth < y.nodes * y.depth;
}
const std::string name()
{
return "node-depth product";
}
};

template <typename T>
class d_strategy : public optimization_strategy_comparator<T>
{
bool operator()(optimizer<T> &a, optimizer<T> &b)
{
node_depth x = a.independent_metric();
node_depth y = b.independent_metric();

return x.depth < y.depth;
}
const std::string name()
{
return "depth";
}
};

template <typename T>
class n_strategy : public optimization_strategy_comparator<T>
{
bool operator()(optimizer<T> &a, optimizer<T> &b)
{
node_depth x = a.independent_metric();
node_depth y = b.independent_metric();

return x.nodes < y.nodes;
}
const std::string name()
{
return "node";
}
};

};
84 changes: 84 additions & 0 deletions core/algorithms/optimization/optimizers/abc.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/* LSOracle: A learning based Oracle for Logic Synthesis

* MIT License
* Copyright 2019 Laboratory for Nano Integrated Systems (LNIS)
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#pragma once
#include <string>
#include <mockturtle/mockturtle.hpp>
#include "algorithms/optimization/optimizers/techmapping.hpp"
#include "algorithms/optimization/optimizers/aig.hpp"

namespace oracle {

template< typename network>
class abc_optimizer: public aig_optimizer<network>
{
using partition = mockturtle::window_view<mockturtle::names_view<network>>;
public:
abc_optimizer(int index, const partition &original, optimization_strategy target, const std::string &abc_exec): aig_optimizer<network>(index, original, target, abc_exec) {}

const std::string optimizer_name()
{
return "abc resyn2";
}

optimizer<mockturtle::xmg_network> *reapply(int index, const xmg_partition &part)
{
return new abc_optimizer<mockturtle::xmg_network>(index, part, this->strategy, this->abc_exec);
}

void optimize()
{
char *blif_name_char = strdup("/tmp/lsoracle_XXXXXX.blif");
if (mkstemps(blif_name_char, 5) == -1) {
throw std::exception();
}
std::string blif_name = std::string(blif_name_char);
std::cout << "writing blif to " << blif_name << std::endl;

char *blif_output_name_char = strdup("/tmp/lsoracle_XXXXXX_optimized.blif");
if (mkstemps(blif_output_name_char, 15) == -1) {
throw std::exception();
}
std::string blif_output_name = std::string(blif_output_name_char);
std::cout << "writing abc output to " << blif_output_name << std::endl;

mockturtle::write_blif_params ps;
ps.skip_feedthrough = 1u;
mockturtle::write_blif(this->converted, blif_name, ps);
std::string script = "abc -c \"read_blif " + blif_name + "; resyn2; write_blif " + blif_output_name + " \"";
int code = system((script).c_str());
assert(code == 0);
std::cout << "optimized with abc" << std::endl;

mockturtle::names_view<mockturtle::klut_network> klut;
lorina::return_code read_blif_return_code = lorina::read_blif(blif_output_name, mockturtle::blif_reader(klut));
assert(read_blif_return_code == lorina::return_code::success);
mockturtle::xag_npn_resynthesis<mockturtle::aig_network> resyn;
mockturtle::node_resynthesis(this->optimal, klut, resyn);
this->optimal.set_network_name(this->converted.get_network_name());
}
};
};
Loading