Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 19 additions & 1 deletion result_server/app.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
import sys
from datetime import timedelta

# Retrieve the API key from environment variable (needed in receive.py)
EXPECTED_API_KEY = os.environ.get("RESULT_SERVER_KEY")
Expand Down Expand Up @@ -32,7 +33,24 @@ def create_app(prefix="", base_dir=None):

# Set a secret key for session management (required for flash and OTP sessions)
# In production, use a secure random key, e.g., os.urandom(24)
app.secret_key = os.environ.get("FLASK_SECRET_KEY", "dev_secret_key")
#app.secret_key = os.environ.get("FLASK_SECRET_KEY", "dev_secret_key")

# --- Secret Key ---
secret_key = os.environ.get("FLASK_SECRET_KEY")
if not secret_key:
raise RuntimeError("FLASK_SECRET_KEY must be set in production")
app.secret_key = secret_key

# --- セッションCookieのセキュリティ設定 ---
app.config.update(
SESSION_COOKIE_SECURE=True, # HTTPS必須
SESSION_COOKIE_HTTPONLY=True, # JSからのアクセス禁止
SESSION_COOKIE_SAMESITE="Strict", # もしくは "Lax"
PERMANENT_SESSION_LIFETIME=timedelta(minutes=30), # セッション寿命を短めに
)




# make dir, !!!!!!!! received & estimated_results
received_dir = os.path.join(base_dir, "received")
Expand Down
91 changes: 79 additions & 12 deletions result_server/routes/receive.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,31 +7,98 @@

EXPECTED_API_KEY = os.environ.get("RESULT_SERVER_KEY")
SAVE_DIR = "received"
ESTIMATED_RESULT_DIR = "estimated_results"

@receive_bp.route("write-api", methods=["POST"])
def receive():
def require_api_key():
api_key = request.headers.get("X-API-Key")
if api_key != EXPECTED_API_KEY:
abort(401, description="Invalid API Key")

data = request.data
def save_json_file(data, prefix, out_dir, given_uuid=None):
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
unique_id = str(uuid.uuid4())
unique_id = given_uuid or str(uuid.uuid4())
filename = f"{prefix}_{timestamp}_{unique_id}.json"
path = os.path.join(out_dir, filename)
tmp_path = path + ".tmp"

json_filename = f"result_{timestamp}_{unique_id}.json"
json_path = os.path.join(SAVE_DIR, json_filename)
# rename (atomic)を使って確実にflushする
tmp_path = json_path + ".tmp"
#------------------------------This is for
#data = request.data
#------------------------------
with open(tmp_path, "wb") as f:
f.write(data)
f.flush()
os.fsync(f.fileno())
os.rename(tmp_path, json_path)

print(f"Saved: {json_path}", flush=True)
#-----------------------------This is for
#data = request.get_json()
#-----------------------------
#with open(tmp_path, "w", encoding="utf-8") as f:
# json.dump(data, f, indent=2, ensure_ascii=False)
# f.flush()
# os.fsync(f.fileno())

os.rename(tmp_path, path)
print(f"Saved {prefix}: {path}", flush=True)

return {
"status": "ok",
"id": unique_id,
"timestamp": timestamp,
"json_file": json_filename,
}, 200
"json_file": filename,
}


@receive_bp.route("write-api", methods=["POST"])
def receive_result():
require_api_key()
data = request.data
#data = request.get_json()
return save_json_file(
data=data,
prefix="result",
out_dir=SAVE_DIR
), 200

@receive_bp.route("write-est", methods=["POST"])
def upload_estimate():
require_api_key()
data = request.data
#data = request.get_json()
return save_json_file(
data=data,
prefix="estimate",
out_dir=ESTIMATED_RESULT_DIR,
given_uuid=request.headers.get("X-UUID")
), 200





#@receive_bp.route("write-api", methods=["POST"])
#def receive():
# api_key = request.headers.get("X-API-Key")
# if api_key != EXPECTED_API_KEY:
# abort(401, description="Invalid API Key")
#
# data = request.data
# timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
# unique_id = str(uuid.uuid4())
#
# json_filename = f"result_{timestamp}_{unique_id}.json"
# json_path = os.path.join(SAVE_DIR, json_filename)
# # rename (atomic)を使って確実にflushする
# tmp_path = json_path + ".tmp"
# with open(tmp_path, "wb") as f:
# f.write(data)
# f.flush()
# os.fsync(f.fileno())
# os.rename(tmp_path, json_path)
#
# print(f"Saved: {json_path}", flush=True)
# return {
# "status": "ok",
# "id": unique_id,
# "timestamp": timestamp,
# "json_file": json_filename,
# }, 200
2 changes: 1 addition & 1 deletion result_server/templates/_results_table.html
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
{% if key in ["json_link", "data_link"] %}
<td>
{% if row[key] %}
<a href="/results/{{ row[key] }}">
<a href="{{ row[key] }}">
{{ "json" if key == "json_link" else "data" }}
</a>
{% else %}
Expand Down
22 changes: 15 additions & 7 deletions result_server/templates/estimated_results.html
Original file line number Diff line number Diff line change
Expand Up @@ -19,38 +19,46 @@ <h1>Estimated Results</h1>
<table border="1">
<thead>
<tr>
<th rowspan="2">Timestamp</th>
<th rowspan="2">Code</th>
<th rowspan="2">Exp</th>
<th colspan="3">System A</th>
<th colspan="3">System B</th>
<th colspan="2">Benchmark</th>
<th colspan="4">System A</th>
<th colspan="4">System B</th>
<th colspan="3">Benchmark</th>
<th rowspan="2">JSON</th>
<th rowspan="2">Ratio</th>
</tr>
<tr>
<th>FOM</th>
<th>Method</th>
<th>System</th>
<th>Nodes</th>
<th>FOM</th>
<th>Method</th>
<th>FOM</th>
<th>System</th>
<th>Nodes</th>
<th>Method</th>
<th>FOM</th>
<th>System</th>
<th>Nodes</th>
</tr>
</thead>
<tbody>
{% for r in rows %}
<tr>
<td>{{ r.timestamp }}</td>
<td>{{ r.code }}</td>
<td>{{ r.exp }}</td>
<td>{{ r.systemA_fom }}</td>
<td>{{ r.systemA_method }}</td>
<td>{{ r.systemA_system }}</td>
<td>{{ r.systemA_nodes }}</td>
<td>{{ r.systemA_method }}</td>
<td>{{ r.systemB_fom }}</td>
<td>{{ r.systemB_method }}</td>
<td>{{ r.systemB_system }}</td>
<td>{{ r.systemB_nodes }}</td>
<td>{{ r.systemB_method }}</td>
<td>{{ r.benchmark_fom }}</td>
<td>{{ r.benchmark_system }}</td>
<td>{{ r.benchmark_nodes }}</td>
<td>
<a href="{{ url_for('results.show_estimated_result', filename=r.json_link) }}">json</a>
</td>
Expand Down
42 changes: 32 additions & 10 deletions result_server/utils/results_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from datetime import datetime
from utils.result_file import get_file_confidential_tags
from utils.otp_manager import get_affiliations
from flask import url_for

#--------------------------------------------------------------------------------------------------------------
def load_json_with_confidential_filter(json_file, directory, affs=None, public_only=True, authenticated=False):
Expand Down Expand Up @@ -70,6 +71,7 @@ def load_results_table(public_only=True, session_email=None, authenticated=False
gpus = data.get("gpus_per_node", "N/A")
cpu_cores = data.get("cpu_cores", "N/A")

# get timestamp and uuid
match = re.search(r"\d{8}_\d{6}", json_file)
timestamp = "Unknown"
if match:
Expand All @@ -96,8 +98,13 @@ def load_results_table(public_only=True, session_email=None, authenticated=False
"cpus": cpus,
"gpus": gpus,
"cpu_cores": cpu_cores,
"json_link": json_file,
"data_link": tgz_file,
# error handling to avoid strange link generation such as ../resuts//dev/results/result_...json
#"json_link": url_for("results.show_result", filename=json_file.split("results/")[-1].lstrip("/")),
#"data_link": url_for("results.show_result", filename=tgz_file.split("results/")[-1].lstrip("/")) if tgz_file else None,
"json_link": url_for("results.show_result", filename=json_file),
"data_link": url_for("results.show_result", filename=tgz_file) if tgz_file else None,
#"json_link": json_file,
#"data_link": tgz_file,
}
rows.append(row)

Expand Down Expand Up @@ -131,40 +138,55 @@ def load_estimated_results_table(public_only=True, session_email=None, authentic
if data is None:
continue

current = data.get("current system", {})
future = data.get("future system", {})
current = data.get("current_system", {})
future = data.get("future_system", {})

# get timestamp and uuid
match = re.search(r"\d{8}_\d{6}", json_file)
timestamp = "Unknown"
if match:
try:
ts = datetime.strptime(match.group(), "%Y%m%d_%H%M%S")
timestamp = ts.strftime("%Y-%m-%d %H:%M:%S")
except:
pass

uuid_match = re.search(r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}", json_file, re.IGNORECASE)
uid = uuid_match.group(0) if uuid_match else None

row = {
# "timestamp": timestamp,
"timestamp": timestamp,
"code": data.get("code", ""),
"exp": data.get("exp", ""),
"benchmark_system": data.get("benchmark_system", ""),
"benchmark_fom": data.get("benchmark_fom", ""),
"benchmark_nodes": data.get("benchmark_nodes", ""),
"systemA_fom": current.get("fom", ""),
"systemA_method": current.get("method", ""),
"systemA_system": current.get("system", ""),
"systemA_nodes": current.get("nodes", ""),
"systemA_method": current.get("method", ""),
"systemB_fom": future.get("fom", ""),
"systemB_method": future.get("method", ""),
"systemB_system": future.get("system", ""),
"systemB_nodes": future.get("nodes", ""),
"systemB_method": future.get("method", ""),
"performance_ratio": data.get("performance_ratio", ""),
"json_link": json_file,
}
rows.append(row)

columns = [
#("Timestamp", "timestamp"),
("Timestamp", "timestamp"),
("CODE", "code"),
("Exp", "exp"),
("Benchmark System", "benchmark_system"),
("Benchmark FOM", "benchmark_fom"),
("Benchmark Nodes", "benchmark_nodes"),
("System A FOM", "systemA_fom"),
("System A Method", "systemA_method"),
("System A Nodes", "systemA_nodes"),
("System A Method", "systemA_method"),
("System B FOM", "systemB_fom"),
("System B Method", "systemB_method"),
("System B Nodes", "systemB_nodes"),
("System B Method", "systemB_method"),
("Performance Ratio", "performance_ratio"),
("JSON", "json_link"),
]
Expand Down