Skip to content
Snippets Groups Projects
Commit 748f9b14 authored by Stéphane Del Pino's avatar Stéphane Del Pino
Browse files

Add `+` and `-` operators to Vector

Also perform a few cosmetic changes (matrix classes have now template
parameters)
parent 0a24f6ae
No related branches found
No related tags found
1 merge request!26Feature/linear systems
......@@ -9,23 +9,27 @@
#include <iostream>
template <typename DataType = double, typename IndexType = size_t>
class FlexibleMatrix
{
public:
using data_type = DataType;
using index_type = IndexType;
class FlexibleRow
{
friend class FlexibleMatrix;
std::map<uint64_t, double> m_id_value_map;
std::map<IndexType, DataType> m_id_value_map;
public:
const double& operator[](const size_t& j) const
const DataType& operator[](const IndexType& j) const
{
auto i_value = m_id_value_map.find(j);
Assert(i_value != m_id_value_map.end());
return i_value->second;
}
double& operator[](const size_t& j)
DataType& operator[](const IndexType& j)
{
auto i_value = m_id_value_map.find(j);
if (i_value != m_id_value_map.end()) {
......@@ -54,25 +58,25 @@ class FlexibleMatrix
public:
FlexibleRow&
row(const size_t i)
row(const IndexType i)
{
return m_row_array[i];
}
const FlexibleRow&
row(const size_t i) const
row(const IndexType i) const
{
return m_row_array[i];
}
double&
operator()(const size_t& i, const size_t& j)
DataType&
operator()(const IndexType& i, const IndexType& j)
{
return m_row_array[i][j];
}
const double&
operator()(const size_t& i, const size_t& j) const
const DataType&
operator()(const IndexType& i, const IndexType& j) const
{
return m_row_array[i][j];
}
......@@ -80,7 +84,7 @@ class FlexibleMatrix
friend std::ostream&
operator<<(std::ostream& os, const FlexibleMatrix& M)
{
for (size_t i = 0; i < M.m_row_array.size(); ++i) {
for (IndexType i = 0; i < M.m_row_array.size(); ++i) {
os << i << " |" << M.m_row_array[i] << '\n';
}
return os;
......@@ -89,8 +93,8 @@ class FlexibleMatrix
auto
graphVector() const
{
std::vector<std::vector<uint64_t>> graph_vector(m_row_array.size());
for (size_t i_row = 0; i_row < m_row_array.size(); ++i_row) {
std::vector<std::vector<IndexType>> graph_vector(m_row_array.size());
for (IndexType i_row = 0; i_row < m_row_array.size(); ++i_row) {
const FlexibleRow& row = m_row_array[i_row];
for (auto [id, value] : row.m_id_value_map) {
graph_vector[i_row].push_back(id);
......@@ -99,18 +103,18 @@ class FlexibleMatrix
return graph_vector;
}
Array<double>
Array<DataType>
valueArray() const
{
size_t size = 0;
for (size_t i_row = 0; i_row < m_row_array.size(); ++i_row) {
IndexType size = 0;
for (IndexType i_row = 0; i_row < m_row_array.size(); ++i_row) {
size += m_row_array[i_row].m_id_value_map.size();
}
Array<double> values(size);
Array<DataType> values(size);
size_t cpt = 0;
for (size_t i_row = 0; i_row < m_row_array.size(); ++i_row) {
IndexType cpt = 0;
for (IndexType i_row = 0; i_row < m_row_array.size(); ++i_row) {
const FlexibleRow& row = m_row_array[i_row];
for (auto [id, value] : row.m_id_value_map) {
values[cpt++] = value;
......@@ -119,31 +123,32 @@ class FlexibleMatrix
return values;
}
FlexibleMatrix(size_t nb_row) : m_row_array{nb_row} {}
FlexibleMatrix(IndexType nb_row) : m_row_array{nb_row} {}
~FlexibleMatrix() = default;
};
template <typename DataType = double, typename IndexType = size_t>
class CRSMatrix
{
private:
using HostMatrix = Kokkos::StaticCrsGraph<unsigned int, Kokkos::HostSpace>;
using HostMatrix = Kokkos::StaticCrsGraph<IndexType, Kokkos::HostSpace>;
HostMatrix m_host_matrix;
Array<double> m_values;
Array<DataType> m_values;
public:
Vector<double> operator*(const Vector<double>& x) const
Vector<DataType> operator*(const Vector<DataType>& x) const
{
Vector<double> Ax{m_host_matrix.numRows()};
Vector<DataType> Ax{m_host_matrix.numRows()};
auto host_row_map = m_host_matrix.row_map;
parallel_for(
m_host_matrix.numRows(), PUGS_LAMBDA(const size_t& i_row) {
m_host_matrix.numRows(), PUGS_LAMBDA(const IndexType& i_row) {
const auto row_begin = host_row_map(i_row);
const auto row_end = host_row_map(i_row + 1);
double sum{0};
for (size_t j = row_begin; j < row_end; ++j) {
DataType sum{0};
for (IndexType j = row_begin; j < row_end; ++j) {
sum += m_values[j] * x[m_host_matrix.entries(j)];
}
Ax[i_row] = sum;
......@@ -156,11 +161,11 @@ class CRSMatrix
operator<<(std::ostream& os, const CRSMatrix& M)
{
auto host_row_map = M.m_host_matrix.row_map;
for (size_t i_row = 0; i_row < M.m_host_matrix.numRows(); ++i_row) {
for (IndexType i_row = 0; i_row < M.m_host_matrix.numRows(); ++i_row) {
const auto& row_begin = host_row_map(i_row);
const auto& row_end = host_row_map(i_row + 1);
os << i_row << " #";
for (size_t j = row_begin; j < row_end; ++j) {
for (IndexType j = row_begin; j < row_end; ++j) {
os << ' ' << M.m_host_matrix.entries(j) << ':' << M.m_values[j];
}
os << '\n';
......@@ -168,7 +173,7 @@ class CRSMatrix
return os;
}
CRSMatrix(const FlexibleMatrix& M)
CRSMatrix(const FlexibleMatrix<DataType, IndexType>& M)
{
m_host_matrix = Kokkos::create_staticcrsgraph<HostMatrix>("connectivity_matrix", M.graphVector());
m_values = M.valueArray();
......
......@@ -101,6 +101,32 @@ class Vector
return *this;
}
PUGS_INLINE
Vector
operator+(const Vector& y)
{
Assert(this->size() == y.size());
Vector sum{y.size()};
parallel_for(
this->size(), PUGS_LAMBDA(const index_type& i) { sum.m_values[i] = m_values[i] + y.m_values[i]; });
return sum;
}
PUGS_INLINE
Vector
operator-(const Vector& y)
{
Assert(this->size() == y.size());
Vector sum{y.size()};
parallel_for(
this->size(), PUGS_LAMBDA(const index_type& i) { sum.m_values[i] = m_values[i] - y.m_values[i]; });
return sum;
}
PUGS_INLINE
DataType& operator[](const index_type& i) const noexcept(NO_ASSERT)
{
......
......@@ -70,8 +70,12 @@ main(int argc, char* argv[])
Vector<double> u{x.size()};
u = 0;
PCG{Ax, A, A, u, 9, 1e-4};
std::cout << "u=" << u << '\n';
PCG{Ax, A, A, u, static_cast<size_t>(-1), 1e-12};
std::cout << "u=\n" << u << '\n';
std::cout << "L2 Error = " << std::sqrt((x - u, x - u)) << '\n';
std::cout << "L2 Norm = " << std::sqrt((x, x)) << '\n';
std::cout << "L2 RelEr = " << std::sqrt((x - u, x - u)) / std::sqrt((x, x)) << '\n';
}
Kokkos::finalize();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment