Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .DS_Store
Binary file not shown.
Binary file added .github/.DS_Store
Binary file not shown.
48 changes: 48 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Use Ubuntu as the base image
FROM debian:latest

# Avoid prompts from apt
ENV DEBIAN_FRONTEND=noninteractive
ENV TERM=xterm

# Install required packages
RUN apt-get update && \
apt-get install -y curl build-essential pkg-config libssl-dev git protobuf-compiler clang libclang-dev llvm-dev librocksdb-dev jq make && \
rm -rf /var/lib/apt/lists/*

# Install Rust
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"

# Verify Rust installation
RUN rustc --version && cargo --version

# Copy the source code into the image
COPY . /app
WORKDIR /app

# Build the Rust project
RUN cargo build --release && \
rm -rf /app/target/debug

# Expose the necessary ports
EXPOSE 30333 30343 9944

# Make the release and scripts directories and its contents executable
# RUN chmod +x /app/target/release
# RUN chmod +x /app/scripts

RUN chmod +x /app


# Keep the container running
CMD ["/bin/sh", "-c", "/app/scripts/run_nodes.sh"]
# CMD ["tail", "-f", "/dev/null"]
# CMD ["/app/scripts/run_nodes.sh"]

# # Copy and make the entrypoint script executable
# COPY entrypoint.sh /usr/local/bin/entrypoint.sh
# RUN chmod +x /usr/local/bin/entrypoint.sh

# # Command to run when the container starts
# ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
Binary file added aleph-client/.DS_Store
Binary file not shown.
Binary file added baby-liminal-extension/.DS_Store
Binary file not shown.
Binary file added bin/.DS_Store
Binary file not shown.
13 changes: 4 additions & 9 deletions bin/node/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ use std::{

use fake_runtime_api::fake_runtime::RuntimeApi;
use finality_aleph::{
build_network, get_aleph_block_import, run_validator_node, AlephConfig, AllBlockMetrics,
BlockImporter, BuildNetworkOutput, ChannelProvider, FavouriteSelectChainProvider,
Justification, JustificationTranslator, MillisecsPerBlock, RateLimiterConfig,
RedirectingBlockImport, SessionPeriod, SubstrateChainStatus, SyncOracle, ValidatorAddressCache,
build_network, get_aleph_block_import, run_validator_node, AlephConfig, BlockImporter,
BuildNetworkOutput, ChannelProvider, FavouriteSelectChainProvider, Justification,
JustificationTranslator, MillisecsPerBlock, RateLimiterConfig, RedirectingBlockImport,
SessionPeriod, SubstrateChainStatus, SyncOracle, ValidatorAddressCache,
};
use log::warn;
use pallet_aleph_runtime_api::AlephSessionApi;
Expand Down Expand Up @@ -51,7 +51,6 @@ pub struct ServiceComponents {
pub keystore_container: KeystoreContainer,
pub justification_channel_provider: ChannelProvider<Justification>,
pub telemetry: Option<Telemetry>,
pub metrics: AllBlockMetrics,
}
struct LimitNonfinalized(u32);

Expand Down Expand Up @@ -133,14 +132,12 @@ pub fn new_partial(config: &Configuration) -> Result<ServiceComponents, ServiceE
SubstrateChainStatus::new(backend.clone())
.map_err(|e| ServiceError::Other(format!("failed to set up chain status: {e}")))?,
);
let metrics = AllBlockMetrics::new(config.prometheus_registry());
let justification_channel_provider = ChannelProvider::new();
let aleph_block_import = get_aleph_block_import(
client.clone(),
justification_channel_provider.get_sender(),
justification_translator,
select_chain_provider.select_chain(),
metrics.clone(),
);

let slot_duration = sc_consensus_aura::slot_duration(&*client)?;
Expand Down Expand Up @@ -182,7 +179,6 @@ pub fn new_partial(config: &Configuration) -> Result<ServiceComponents, ServiceE
transaction_pool,
justification_channel_provider,
telemetry,
metrics,
})
}

Expand Down Expand Up @@ -394,7 +390,6 @@ pub fn new_authority(
keystore: service_components.keystore_container.local_keystore(),
justification_channel_provider: service_components.justification_channel_provider,
block_rx,
metrics: service_components.metrics,
registry: prometheus_registry,
unit_creation_delay: aleph_config.unit_creation_delay(),
backup_saving_path: backup_path,
Expand Down
Binary file added bin/runtime/.DS_Store
Binary file not shown.
163 changes: 163 additions & 0 deletions bin/runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -974,6 +974,11 @@ impl pallet_tx_pause::Config for Runtime {
type WeightInfo = pallet_tx_pause::weights::SubstrateWeight<Runtime>;
}

impl pallet_template::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
//type WeightInfo = pallet_template::weights::SubstrateWeight<Runtime>;
}

// Create the runtime by composing the FRAME pallets that were previously configured.
construct_runtime!(
pub struct Runtime {
Expand Down Expand Up @@ -1005,6 +1010,7 @@ construct_runtime!(
SafeMode: pallet_safe_mode = 25,
TxPause: pallet_tx_pause = 26,
Operations: pallet_operations = 255,
TemplateModule: crate::pallet_template::{Pallet, Call, Storage, Event<T>} = 50
}
);

Expand Down Expand Up @@ -1046,6 +1052,7 @@ mod benches {
[pallet_feature_control, FeatureControl]
[pallet_vk_storage, VkStorage]
[baby_liminal_extension, baby_liminal_extension::ChainExtensionBenchmarking<Runtime>]
[pallet_template, TemplateModule]
);
}

Expand Down Expand Up @@ -1607,3 +1614,159 @@ mod tests {
assert!(lhs < rhs);
}
}


//#![cfg_attr(not(feature = "std"), no_std)]

#[frame_support::pallet]
pub mod pallet_template {
use frame_support::{pallet_prelude::*, traits::StorageVersion}; //, sp_runtime::RuntimeAppPublic};
use frame_system::pallet_prelude::*;
use scale_info::prelude::vec::Vec;

const STORAGE_VERSION: StorageVersion = StorageVersion::new(1);

#[pallet::pallet]
#[pallet::storage_version(STORAGE_VERSION)]
pub struct Pallet<T>(_);

#[pallet::config]
pub trait Config: frame_system::Config {
type RuntimeEvent: From<Event<Self>> + IsType<<Self as frame_system::Config>::RuntimeEvent>;
}

#[derive(Encode, Decode, MaxEncodedLen, TypeInfo, Debug, Clone, PartialEq, Eq)]
pub struct FSEvent {
pub eventtype: [u8; 64],
pub creationtime: [u8; 64],
pub filepath: [u8; 256],
pub eventkey: [u8; 128],
}

#[pallet::storage]
#[pallet::getter(fn info)]
// pub(super) type DisReAssembly<T: Config> = StorageMap<_, Blake2_128Concat, T::AccountId, FSEvent, OptionQuery>;
pub(super) type DisReAssembly<T: Config> = StorageDoubleMap< _, Blake2_128Concat, T::AccountId, Blake2_128Concat, u64, FSEvent, OptionQuery>;

#[pallet::storage]
#[pallet::getter(fn nonces)]
pub(super) type Nonces<T: Config> = StorageMap<_, Blake2_128Concat, T::AccountId, u64, ValueQuery>;

#[pallet::event]
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T: Config> {
FileDisassembled { who: T::AccountId, event: FSEvent },
FileReassembled { who: T::AccountId, event: FSEvent },
}

#[pallet::error]
pub enum Error<T> {
EventTypeTooLong,
CreationTimeTooLong,
FilePathTooLong,
EventKeyTooLong,
}

#[pallet::call]
impl<T: Config> Pallet<T> {
#[pallet::call_index(0)]
#[pallet::weight((Weight::from_parts(10_000, 0) + T::DbWeight::get().writes(1), DispatchClass::Operational))]
pub fn disassembled(
origin: OriginFor<T>,
event_type: Vec<u8>,
creation_time: Vec<u8>,
file_path: Vec<u8>,
event_key: Vec<u8>,
) -> DispatchResult {
let sender = ensure_signed(origin)?;

ensure!(event_type.len() <= 64, Error::<T>::EventTypeTooLong);
ensure!(creation_time.len() <= 64, Error::<T>::CreationTimeTooLong);
ensure!(file_path.len() <= 256, Error::<T>::FilePathTooLong);
ensure!(event_key.len() <= 128, Error::<T>::EventKeyTooLong);

let event = FSEvent {
eventtype: {
let mut arr = [0u8; 64];
arr[..event_type.len()].copy_from_slice(&event_type);
arr
},
creationtime: {
let mut arr = [0u8; 64];
arr[..creation_time.len()].copy_from_slice(&creation_time);
arr
},
filepath: {
let mut arr = [0u8; 256];
arr[..file_path.len()].copy_from_slice(&file_path);
arr
},
eventkey: {
let mut arr = [0u8; 128];
arr[..event_key.len()].copy_from_slice(&event_key);
arr
},
};

let nonce = Nonces::<T>::get(&sender);
<DisReAssembly<T>>::insert(&sender, nonce, &event);
Nonces::<T>::insert(&sender, nonce + 1);

// <DisReAssembly<T>>::insert(&sender, &event);

Self::deposit_event(Event::<T>::FileDisassembled { who: sender.clone(), event: event.clone() });

Ok(())
}

#[pallet::call_index(1)]
#[pallet::weight((Weight::from_parts(10_000, 0) + T::DbWeight::get().writes(1), DispatchClass::Operational))]
pub fn reassembled(
origin: OriginFor<T>,
event_type: Vec<u8>,
creation_time: Vec<u8>,
file_path: Vec<u8>,
event_key: Vec<u8>,
) -> DispatchResult {
let sender = ensure_signed(origin)?;

ensure!(event_type.len() <= 64, Error::<T>::EventTypeTooLong);
ensure!(creation_time.len() <= 64, Error::<T>::CreationTimeTooLong);
ensure!(file_path.len() <= 256, Error::<T>::FilePathTooLong);
ensure!(event_key.len() <= 128, Error::<T>::EventKeyTooLong);

let event = FSEvent {
eventtype: {
let mut arr = [0u8; 64];
arr[..event_type.len()].copy_from_slice(&event_type);
arr
},
creationtime: {
let mut arr = [0u8; 64];
arr[..creation_time.len()].copy_from_slice(&creation_time);
arr
},
filepath: {
let mut arr = [0u8; 256];
arr[..file_path.len()].copy_from_slice(&file_path);
arr
},
eventkey: {
let mut arr = [0u8; 128];
arr[..event_key.len()].copy_from_slice(&event_key);
arr
},
};

let nonce = Nonces::<T>::get(&sender);
<DisReAssembly<T>>::insert(&sender, nonce, &event);
Nonces::<T>::insert(&sender, nonce + 1);

// <DisReAssembly<T>>::insert(&sender, &event);

Self::deposit_event(Event::<T>::FileReassembled { who: sender.clone(), event: event.clone() });

Ok(())
}
}
}
6 changes: 3 additions & 3 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ RUN apt update && \

EXPOSE 30333 30343 9944

WORKDIR node
WORKDIR /node

COPY target/release/aleph-node /usr/local/bin
COPY ../target/release/aleph-node /usr/local/bin
RUN chmod +x /usr/local/bin/aleph-node

COPY docker/docker_entrypoint.sh /node/docker_entrypoint.sh
COPY docker_entrypoint.sh /node/docker_entrypoint.sh
RUN chmod +x /node/docker_entrypoint.sh

ENTRYPOINT ["./docker_entrypoint.sh"]
Binary file added e2e-tests/.DS_Store
Binary file not shown.
9 changes: 9 additions & 0 deletions entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/sh


# Start the aleph-node
/usr/local/bin/scripts/run_nodes.sh


# Keep the container running
tail -f /dev/null
13 changes: 13 additions & 0 deletions finality-aleph/src/block/substrate/chain_status.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,8 @@ impl Display for Error {
}
}

impl std::error::Error for Error {}

impl From<BackendError> for Error {
fn from(value: BackendError) -> Self {
Error::Backend(value)
Expand Down Expand Up @@ -159,6 +161,17 @@ impl SubstrateChainStatus {
fn finalized_hash(&self) -> AlephHash {
self.info().finalized_hash
}

/// Computes lowest common ancestor between two blocks. Warning: complexity
/// O(distance between blocks).
pub fn lowest_common_ancestor(&self, from: &BlockId, to: &BlockId) -> Result<BlockId, Error> {
let result = sp_blockchain::lowest_common_ancestor(
self.backend.blockchain(),
from.hash(),
to.hash(),
)?;
Ok((result.hash, result.number).into())
}
}

impl ChainStatus<Block, Justification> for SubstrateChainStatus {
Expand Down
Loading