Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions backend/BiLSTM.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@
from __future__ import print_function
from util import BIOF1Validation

import keras
from keras.optimizers import *
from keras.models import Model
from keras.layers import *
from tensorflow import keras
from tensorflow.keras.optimizers import *
from tensorflow.keras.models import Model
from tensorflow.keras.layers import *
import math
import numpy as np
import sys
Expand Down Expand Up @@ -243,7 +243,7 @@ def buildModel(self):
elif self.params['optimizer'].lower() == 'adagrad':
opt = Adagrad(**optimizerParams)
elif self.params['optimizer'].lower() == 'sgd':
opt = SGD(lr=0.1, **optimizerParams)
opt = SGD(learning_rate=0.1, **optimizerParams)


model = Model(inputs=inputNodes, outputs=[output])
Expand Down
37 changes: 15 additions & 22 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,47 +1,40 @@
# Use an official Python runtime as a parent image
FROM python:3.5
FROM python:3.11-slim@sha256:0b23cfb7425d065008b778022a17b1551c82f8b4866ee5a7a200084b7e2eafbf

# Add all Data
ADD . /

# Set the working directory to /
WORKDIR /

RUN apt-get update && apt-get install -y --no-install-recommends wget git && rm -rf /var/lib/apt/lists/*

# Install any needed packages specified in requirements.txt
RUN pip install -r requirements.txt
RUN pip install torch==0.4.1 -f https://download.pytorch.org/whl/torch_stable.html
RUN pip install torchvision==0.2.1 -f https://download.pytorch.org/whl/torch_stable.html
RUN pip install --no-cache-dir -r requirements.txt
RUN python -m nltk.downloader -d /usr/local/nltk_data punkt_tab

RUN git clone https://github.com/UKPLab/emnlp2017-bilstm-cnn-crf.git

RUN mv backend.py emnlp2017-bilstm-cnn-crf/ && mv Model.py emnlp2017-bilstm-cnn-crf/ && mv ModelNewES.py emnlp2017-bilstm-cnn-crf/ && mv ModelNewWD.py emnlp2017-bilstm-cnn-crf/ && mv Segmenter.py emnlp2017-bilstm-cnn-crf/
# Download the .h5 file to the models directory
# Download the .h5 file
RUN wget -q --show-progress -O models/IBM.h5 "https://huggingface.co/debela-arg/segmenter/resolve/main/IBM.h5?download=true" || \
RUN wget -q -O models/IBM.h5 "https://huggingface.co/debela-arg/segmenter/resolve/main/IBM.h5?download=true" || \
(echo "Download failed! Check URL or authentication." && exit 1)

RUN mv models/* emnlp2017-bilstm-cnn-crf/models/

RUN mv -f BiLSTM.py emnlp2017-bilstm-cnn-crf/neuralnets/

# Patch cloned repo keras imports for TF 2.x compatibility
RUN sed -i \
-e 's/^import keras/from tensorflow import keras/' \
-e 's/^from keras import/from tensorflow.keras import/' \
-e 's/^from keras.engine import Layer, InputSpec/from tensorflow.keras.layers import Layer, InputSpec/' \
-e 's/self.add_weight((/self.add_weight(shape=(/' \
emnlp2017-bilstm-cnn-crf/neuralnets/keraslayers/ChainCRF.py

RUN mkdir emnlp2017-bilstm-cnn-crf/lstm

RUN git clone https://github.com/achernodub/bilstm-cnn-crf-tagger.git emnlp2017-bilstm-cnn-crf/lstm
RUN pip install prometheus-flask-exporter==0.1.2

# Make port 6000 available to the world outside this container
EXPOSE 6000

WORKDIR /emnlp2017-bilstm-cnn-crf

# Run app.py when the container launches
CMD ["python3", "backend.py"]









CMD ["gunicorn", "--workers", "1", "--bind", "0.0.0.0:6000", "backend:app"]
6 changes: 2 additions & 4 deletions backend/Segmenter.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,10 +368,8 @@ def cascading_anaphora_propositionalizer(self, path):
if path.endswith("json"):
is_json_file=self.is_json(path)
if is_json_file:
data = open(path).read()
null = None
false = False
extended_json_aif = eval(data)
data = open(path).read()
extended_json_aif = json.loads(data)
json_aif = json_dict = extended_json_aif['AIF']

if 'nodes' in json_dict and 'locutions' in json_dict and 'edges' in json_dict:
Expand Down
16 changes: 3 additions & 13 deletions backend/backend.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,16 @@
#!/usr/bin/env python3

"""be.py: Description."""
from flask import Flask, jsonify, request
from flasgger import Swagger, LazyString, LazyJSONEncoder
from flask import Flask, jsonify, request, make_response
from flask_restful import Api, Resource, reqparse
from flask import make_response
from nltk.tokenize import sent_tokenize, word_tokenize
import random
import json
from flask import jsonify
from flask_cors import CORS
import json
import logging
from prometheus_flask_exporter import PrometheusMetrics

app = Flask(__name__)
#app.json_encoder = LazyJSONEncoder


# Initialize Prometheus metrics
#metrics = PrometheusMetrics(app)
CORS(app, resources={r"/*": {"origins": "https://arg-tech.github.io"}})

# group by endpoint rather than path
metrics = PrometheusMetrics(app)

@app.route('/collection/:collection_id/item/:item_id')
Expand Down
75 changes: 65 additions & 10 deletions backend/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,65 @@
Flask
flasgger
flask_restful
nltk==3.4.5
tensorflow==1.5.0
keras==2.1.5
numpy==1.17.3
scipy==1.3.1
h5py
Jinja2
absl-py==2.4.0
aniso8601==10.0.1
astunparse==1.6.3
attrs==25.4.0
blinker==1.9.0
certifi==2026.2.25
cffi==2.0.0
charset-normalizer==3.4.4
click==8.3.1
cryptography==46.0.5
flasgger==0.9.7.1
Flask==3.1.3
flask-cors==6.0.2
Flask-RESTful==0.3.10
flatbuffers==25.12.19
gast==0.7.0
google-auth==2.48.0
google-auth-oauthlib==1.2.4
google-pasta==0.2.0
grpcio==1.78.1
gunicorn==25.1.0
h5py==3.15.1
idna==3.11
itsdangerous==2.2.0
Jinja2==3.1.6
joblib==1.5.3
jsonschema==4.26.0
jsonschema-specifications==2025.9.1
keras==2.15.0
libclang==18.1.1
Markdown==3.10.2
MarkupSafe==3.0.3
mistune==3.2.0
ml-dtypes==0.3.2
nltk==3.9.3
numpy==1.26.4
oauthlib==3.3.1
opt_einsum==3.4.0
packaging==26.0
prometheus_client==0.24.1
prometheus_flask_exporter==0.23.2
protobuf==4.25.8
pyasn1==0.6.2
pyasn1_modules==0.4.2
pycparser==3.0
pytz==2025.2
PyYAML==6.0.3
referencing==0.37.0
regex==2026.2.19
requests==2.32.5
requests-oauthlib==2.0.0
rpds-py==0.30.0
rsa==4.9.1
six==1.17.0
tensorboard==2.15.2
tensorboard-data-server==0.7.2
tensorflow==2.15.1
tensorflow-estimator==2.15.0
tensorflow-io-gcs-filesystem==0.37.1
termcolor==3.3.0
tqdm==4.67.3
typing_extensions==4.15.0
urllib3==2.6.3
Werkzeug==3.1.6
wrapt==1.14.2
1 change: 0 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: "3"
services:
backend:
build: ./backend/
Expand Down
5 changes: 5 additions & 0 deletions tests/api-requests/Targer/bruno.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"version": "1",
"name": "Targer",
"type": "collection"
}
3 changes: 3 additions & 0 deletions tests/api-requests/Targer/environments/(1) local.bru
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
vars {
baseUrl: http://localhost:10600
}
3 changes: 3 additions & 0 deletions tests/api-requests/Targer/environments/(2) staging.bru
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
vars {
baseUrl: http://targer.amfws.staging.arg.tech
}
3 changes: 3 additions & 0 deletions tests/api-requests/Targer/environments/(3) production.bru
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
vars {
baseUrl: http://targer.amfws.arg.tech
}
104 changes: 104 additions & 0 deletions tests/api-requests/Targer/targer-am POST.bru
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
meta {
name: targer-am POST
type: http
seq: 1
}

post {
url: {{baseUrl}}/targer-am
body: multipartForm
}

body:multipart-form {
file: @file(test-inputs/json-aif.json)
}

assert {
res.status: eq 200
}

tests {
test("should return valid xAIF envelope", function() {
const data = res.getBody();
expect(data).to.have.property('AIF');
expect(data.AIF).to.have.property('nodes');
expect(data.AIF).to.have.property('edges');
expect(data.AIF).to.have.property('locutions');
});

test("should preserve all original I-nodes", function() {
const data = res.getBody();
const nodes = data.AIF.nodes;
const iNodes = nodes.filter(n => n.type === 'I');
// Original input has 5 I-nodes
expect(iNodes.length).to.be.at.least(5);
const iTexts = iNodes.map(n => n.text.trim());
expect(iTexts).to.include('We should go eat.');
expect(iTexts).to.include('Because I\'m hungry');
});

test("should preserve all original L-nodes and YA-nodes", function() {
const data = res.getBody();
const nodes = data.AIF.nodes;
const lNodes = nodes.filter(n => n.type === 'L');
const yaNodes = nodes.filter(n => n.type === 'YA');
expect(lNodes.length).to.be.at.least(5);
expect(yaNodes.length).to.be.at.least(5);
});

test("every edge should reference existing nodeIDs", function() {
const data = res.getBody();
const nodeIDs = new Set(data.AIF.nodes.map(n => n.nodeID));
for (const edge of data.AIF.edges) {
expect(nodeIDs.has(edge.fromID), `fromID ${edge.fromID} not in nodes`).to.be.true;
expect(nodeIDs.has(edge.toID), `toID ${edge.toID} not in nodes`).to.be.true;
}
});

test("every node should have required fields", function() {
const data = res.getBody();
for (const node of data.AIF.nodes) {
expect(node).to.have.property('nodeID');
expect(node).to.have.property('text');
expect(node).to.have.property('type');
}
});

test("every edge should have required fields", function() {
const data = res.getBody();
for (const edge of data.AIF.edges) {
expect(edge).to.have.property('edgeID');
expect(edge).to.have.property('fromID');
expect(edge).to.have.property('toID');
}
});

test("RA/CA relation nodes should have proper edge structure", function() {
const data = res.getBody();
const nodes = data.AIF.nodes;
const edges = data.AIF.edges;
const relationNodes = nodes.filter(n => n.type === 'RA' || n.type === 'CA');

for (const rNode of relationNodes) {
// Each RA/CA node should have at least one incoming and one outgoing edge
const incoming = edges.filter(e => e.toID === rNode.nodeID);
const outgoing = edges.filter(e => e.fromID === rNode.nodeID);
expect(incoming.length, `RA/CA node ${rNode.nodeID} should have incoming edge`).to.be.at.least(1);
expect(outgoing.length, `RA/CA node ${rNode.nodeID} should have outgoing edge`).to.be.at.least(1);
}
});

test("node IDs should be unique", function() {
const data = res.getBody();
const nodeIDs = data.AIF.nodes.map(n => n.nodeID);
const uniqueIDs = new Set(nodeIDs);
expect(uniqueIDs.size).to.equal(nodeIDs.length);
});

test("edge IDs should be unique", function() {
const data = res.getBody();
const edgeIDs = data.AIF.edges.map(e => e.edgeID);
const uniqueIDs = new Set(edgeIDs);
expect(uniqueIDs.size).to.equal(edgeIDs.length);
});
}
Loading
Loading