You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
i am developing an app with the graph api for Microsoft Teams, and I'm trying to get the mentions the user has. My issue is that I am using the search api, and when returning the webURL, if the mention is in a reply, not a thread, the URL is broken. How to fix that
And the search api doesn't return the replyToID property
Please help me and tell me what to do.
Find below my code
`async def teams_get_my_mentions(
limit: int = 20,
days_back: int = 7,
cutoff_day: Optional[str] = None,
) -> str:
"""
Get messages that mention the current user with resolved team/channel/chat names,
using proper paging and an optional daily cut-off.
✅ Uses Search API (chatMessage) + webUrl / webLink
✅ Then, for channel messages, calls GET /teams/{team}/channels/{channel}/messages/{id}
to get replyToId and build a deep link that behaves like Teams UI.
cutoff_day: "YYYY-MM-DD" (UTC midnight). If provided, only results with createdDateTime >= that day are returned.
days_back: relative window if cutoff_day is not provided (defaults to 7 days).
"""
try:
logger.info("Getting messages that mention current user (Search + GetMessage)")
apply_oauth_config()
auth = await get_sharepoint_auth()
page_size = min(max(1, limit), 25) # Graph caps message/search pages at 25
hard_return_cap = max(limit, 100)
# Compute date filter
if cutoff_day:
from_date = cutoff_day
else:
from_date = (datetime.utcnow() - timedelta(days=days_back)).strftime("%Y-%m-%d")
tenant_id = os.getenv("AZURE_TENANT_ID")
async with await _create_http_client() as client:
# -------- Get current user --------
user_response = await client.get(
"https://graph.microsoft.com/v1.0/me",
headers=auth.headers
)
current_user = await _handle_graph_response(user_response, "Get current user for mentions")
user_id = current_user.get("id", "")
user_name = current_user.get("displayName", "")
if not user_id:
return json.dumps({
"error": "Could not retrieve current user information",
"details": "Unable to get user ID for mention search"
})
user_id_no_hyphens = user_id.replace("-", "")
# -------- Build KQL query --------
kql = f"mentions:{user_id_no_hyphens} AND sent>{from_date}"
logger.info(f"KQL for mentions: {kql}")
search_payload = {
"requests": [{
"entityTypes": ["chatMessage"],
"query": {"queryString": kql},
"from": 0,
"size": page_size,
"enableTopResults": False,
"fields": [
"id",
"webUrl",
"webLink",
"replyToId"
"createdDateTime",
"lastModifiedDateTime",
"from",
"channelIdentity",
"chatId",
"importance",
"bodyPreview",
"subject"
]
}]
}
# -------- Paged Search --------
all_hits_raw: List[dict] = []
from_idx = 0
while True:
search_payload["requests"][0]["from"] = from_idx
search_payload["requests"][0]["size"] = page_size
resp = await client.post(
"https://graph.microsoft.com/v1.0/search/query",
headers=auth.headers,
json=search_payload
)
data = await _handle_graph_response(
resp,
f"Search mentions (from={from_idx}, size={page_size})"
)
page_hits: List[dict] = []
for v in data.get("value", []):
for hc in v.get("hitsContainers", []):
page_hits.extend(hc.get("hits", []))
if not page_hits:
break
all_hits_raw.extend(page_hits)
# Early-stop by day boundary
try:
cutoff_dt = datetime.fromisoformat(from_date + "T00:00:00+00:00")
last_created = None
if page_hits:
last_created_str = (
page_hits[-1]
.get("resource", {})
.get("createdDateTime")
)
if last_created_str:
if last_created_str.endswith("Z"):
last_created = datetime.fromisoformat(
last_created_str.replace("Z", "+00:00")
)
else:
last_created = datetime.fromisoformat(last_created_str)
if last_created and last_created < cutoff_dt:
break
except Exception:
pass
if len(page_hits) < page_size:
break
from_idx += page_size
if len(all_hits_raw) >= max(hard_return_cap, page_size):
break
logger.info(f"📊 Found {len(all_hits_raw)} total mention hits from search")
# -------- Build mentions (first pass) --------
all_mentions = []
teams_to_lookup = set() # (team_id, channel_id)
chats_to_lookup = set() # chat_id
stats = {
"total_processed": 0,
"channel_messages": 0,
"chat_messages": 0,
"weburl_present": 0,
"weburl_missing": 0,
"fallback_links": 0,
"deeplink_resolved": 0,
"deeplink_errors": 0,
}
cutoff_dt = datetime.fromisoformat(from_date + "T00:00:00+00:00")
for hit in all_hits_raw:
resource = hit.get("resource", {})
stats["total_processed"] += 1
created_str = resource.get("createdDateTime")
if created_str:
try:
created = (
datetime.fromisoformat(created_str.replace("Z", "+00:00"))
if created_str.endswith("Z")
else datetime.fromisoformat(created_str)
)
if created < cutoff_dt:
continue
except Exception:
pass
context = "unknown"
location_info: Dict[str, Optional[str]] = {}
web_url = resource.get("webUrl") or resource.get("webLink")
if web_url:
stats["weburl_present"] += 1
logger.debug(f"✅ Using webUrl/webLink from Search API: {web_url}")
teams_link = web_url
else:
stats["weburl_missing"] += 1
logger.warning(f"⚠️ webUrl/webLink missing for message {resource.get('id')}")
teams_link = None
# Channel message
if resource.get("channelIdentity"):
context = "channel"
stats["channel_messages"] += 1
team_id = resource.get("channelIdentity", {}).get("teamId")
channel_id = resource.get("channelIdentity", {}).get("channelId")
message_id = resource.get("id")
location_info = {
"type": "channel",
"team_id": team_id,
"channel_id": channel_id,
}
if team_id and channel_id:
teams_to_lookup.add((team_id, channel_id))
# Fallback manual link if Search didn't return webUrl/webLink
if not teams_link and team_id and channel_id and message_id:
stats["fallback_links"] += 1
logger.info(f"🔧 Constructing fallback link for message {message_id}")
if tenant_id:
teams_link = (
f"https://teams.microsoft.com/l/message/{channel_id}/{message_id}"
f"?tenantId={tenant_id}&groupId={team_id}"
f"&parentMessageId={message_id}"
f"&createdTime={message_id}"
)
else:
teams_link = (
f"https://teams.microsoft.com/l/message/{channel_id}/{message_id}"
f"?groupId={team_id}"
f"&parentMessageId={message_id}"
f"&createdTime={message_id}"
)
# Chat message
elif resource.get("chatId"):
context = "chat"
stats["chat_messages"] += 1
chat_id = resource.get("chatId")
location_info = {"type": "chat", "chat_id": chat_id}
if chat_id:
chats_to_lookup.add(chat_id)
formatted_mention = {
"id": resource.get("id"),
"created_datetime": resource.get("createdDateTime"),
"context": context,
"location": location_info,
"summary": hit.get("summary", ""),
"content": resource.get("bodyPreview", "") or resource.get("subject", ""),
"from": {
"display_name": resource.get("from", {}).get("emailAddress", {}).get("name")
or resource.get("from", {}).get("user", {}).get("displayName"),
"email": resource.get("from", {}).get("emailAddress", {}).get("address")
or resource.get("from", {}).get("user", {}).get("userPrincipalName")
} if resource.get("from") else None,
"importance": resource.get("importance"),
"web_url": web_url, # original webUrl/webLink
"teams_link": teams_link, # will be overwritten for channel with deeplink
"reply_to_id": resource.get("replyToId"),
"search_score": hit.get("rank", 0),
"mention_type": "specific_user_mention",
"search_method": "kql_mentions_userid_search_plus_get_message",
}
all_mentions.append(formatted_mention)
logger.info("=" * 80)
logger.info("📊 MENTION PROCESSING STATISTICS (after first pass):")
logger.info(f" Total messages processed: {stats['total_processed']}")
logger.info(f" Channel messages: {stats['channel_messages']}")
logger.info(f" Chat messages: {stats['chat_messages']}")
logger.info(f" ✅ webUrl/webLink present: {stats['weburl_present']}")
logger.info(f" ⚠️ webUrl/webLink missing: {stats['weburl_missing']}")
logger.info(f" 🔧 Fallback links created: {stats['fallback_links']}")
logger.info("=" * 80)
# -------- Resolve Team / Channel / Chat names --------
team_names: Dict[str, str] = {}
channel_names: Dict[str, str] = {}
chat_names: Dict[str, str] = {}
# Team & Channel names
for team_id, channel_id in teams_to_lookup:
try:
if team_id not in team_names:
team_response = await client.get(
f"https://graph.microsoft.com/v1.0/teams/{team_id}",
headers=auth.headers
)
if team_response.status_code == 200:
team_names[team_id] = team_response.json().get("displayName", "Unknown Team")
else:
team_names[team_id] = "Unknown Team"
channel_key = f"{team_id}_{channel_id}"
if channel_key not in channel_names:
channel_response = await client.get(
f"https://graph.microsoft.com/v1.0/teams/{team_id}/channels/{channel_id}",
headers=auth.headers
)
if channel_response.status_code == 200:
channel_names[channel_key] = channel_response.json().get("displayName", "Unknown Channel")
else:
channel_names[channel_key] = "Unknown Channel"
except Exception as e:
logger.warning(f"Failed to resolve team/channel names: {e}")
# Chat friendly names
for chat_id in chats_to_lookup:
try:
chat_response = await client.get(
f"https://graph.microsoft.com/v1.0/chats/{chat_id}",
headers=auth.headers
)
if chat_response.status_code == 200:
chat_data = chat_response.json()
chat_topic = chat_data.get("topic")
if chat_topic:
chat_names[chat_id] = chat_topic
else:
members_response = await client.get(
f"https://graph.microsoft.com/v1.0/chats/{chat_id}/members",
headers=auth.headers
)
if members_response.status_code == 200:
members_data = members_response.json()
member_names = [
m.get("displayName")
for m in members_data.get("value", [])[:3]
if m.get("displayName")
]
chat_names[chat_id] = (
f"Chat with {', '.join(member_names)}"
if member_names else "Private Chat"
)
else:
chat_names[chat_id] = "Private Chat"
else:
chat_names[chat_id] = "Unknown Chat"
except Exception as e:
logger.warning(f"Failed to resolve chat name: {e}")
chat_names[chat_id] = "Unknown Chat"
# Inject names
for mention in all_mentions:
loc = mention.get("location", {})
if not isinstance(loc, dict):
continue
if loc.get("type") == "channel":
tid = loc.get("team_id")
cid = loc.get("channel_id")
loc["team_name"] = team_names.get(tid, "Unknown Team")
loc["channel_name"] = channel_names.get(f"{tid}_{cid}", "Unknown Channel")
elif loc.get("type") == "chat":
cid = loc.get("chat_id")
loc["chat_name"] = chat_names.get(cid, "Unknown Chat")
# -------- SECOND PASS: fix channel deeplinks using GET message --------
debug_first_channel = True
for mention in all_mentions:
loc = mention.get("location", {})
if not isinstance(loc, dict):
continue
if loc.get("type") != "channel":
continue
team_id = loc.get("team_id")
channel_id = loc.get("channel_id")
message_id = mention.get("id")
if not (team_id and channel_id and message_id):
continue
team_name = loc.get("team_name")
channel_name = loc.get("channel_name")
if debug_first_channel:
logger.info(
"[FIRST_CHANNEL_MENTION] Resolving deeplink for message_id=%s, team=%s, channel=%s",
message_id,
team_id,
channel_id,
)
debug_first_channel = False
try:
details = await _resolve_channel_message_deeplink(
client,
auth.headers,
team_id=team_id,
channel_id=channel_id,
message_id=message_id,
tenant_id=tenant_id,
team_name=team_name,
channel_name=channel_name,
)
mention["teams_link"] = details["deeplink"]
loc["reply_to_id"] = details["reply_to_id"]
loc["parent_message_id"] = details["parent_message_id"]
loc["is_reply"] = details["is_reply"]
stats["deeplink_resolved"] += 1
except Exception as e:
stats["deeplink_errors"] += 1
logger.warning(
f"Failed to resolve deeplink for {team_id}/{channel_id}/{message_id}: {e}"
)
logger.info("=" * 80)
logger.info("📊 FINAL MENTION STATISTICS:")
logger.info(f" Total messages processed: {stats['total_processed']}")
logger.info(f" Channel messages: {stats['channel_messages']}")
logger.info(f" Chat messages: {stats['chat_messages']}")
logger.info(f" ✅ webUrl/webLink present: {stats['weburl_present']}")
logger.info(f" ⚠️ webUrl/webLink missing: {stats['weburl_missing']}")
logger.info(f" 🔧 Fallback links created: {stats['fallback_links']}")
logger.info(f" 🔁 Deeplinks resolved via GET /messages: {stats['deeplink_resolved']}")
logger.info(f" ⚠️ Deeplink resolution errors: {stats['deeplink_errors']}")
logger.info("=" * 80)
# -------- Sort and prepare result --------
all_mentions.sort(key=lambda x: x.get("created_datetime", "") or "", reverse=True)
to_date = datetime.utcnow().strftime("%Y-%m-%d")
date_range_desc = (
f"{from_date} to {to_date}" if from_date != to_date else f"today ({to_date})"
)
result = {
"mentioned_user": {
"id": user_id,
"id_no_hyphens": user_id_no_hyphens,
"display_name": user_name,
"email": current_user.get("userPrincipalName"),
},
"total_mentions": len(all_mentions),
"mentions": all_mentions[:hard_return_cap],
"search_parameters": {
"page_size": page_size,
"hard_return_cap": hard_return_cap,
"days_back": days_back,
"cutoff_day": cutoff_day,
"date_range": date_range_desc,
"from_date": from_date,
"to_date": to_date,
"kql_query": kql,
"search_method": "kql_with_weburl_plus_get_message",
"scope": "all_contexts_channels_and_chats",
},
"statistics": stats,
}
logger.info(f"✅ Found {len(all_mentions)} mentions (with deeplink resolution pass)")
return json.dumps(result, indent=2)
except Exception as e:
logger.error(f"❌ Error in teams_get_my_mentions: {e}")
logger.exception("Full stack trace:")
return json.dumps({
"error": "Failed to get mentions",
"details": str(e),
})
reacted with thumbs up emoji reacted with thumbs down emoji reacted with laugh emoji reacted with hooray emoji reacted with confused emoji reacted with heart emoji reacted with rocket emoji reacted with eyes emoji
Uh oh!
There was an error while loading. Please reload this page.
Uh oh!
There was an error while loading. Please reload this page.
-
i am developing an app with the graph api for Microsoft Teams, and I'm trying to get the mentions the user has. My issue is that I am using the search api, and when returning the webURL, if the mention is in a reply, not a thread, the URL is broken. How to fix that
Here is a sample of URL copied from MS Teams UI that is working :
https://teams.microsoft.com/l/message/19:xQZgFPibvWwT6QGUn7Z2P6JWp-juNerlaPajkqpv9h01@thread.tacv2/1762787826440?tenantId=c515f6b1-812f-4d6c-9542-d914e95b3df1&groupId=4f74fdc3-43c5-4ba3-ad56-36d00d299426&parentMessageId=1762771615042&teamName=Digital%20Transformation&channelName=General&createdTime=1762787826440
And here is the webURL returned from the search api :
https://teams.microsoft.com/l/message/19%3axQZgFPibvWwT6QGUn7Z2P6JWp-juNerlaPajkqpv9h01%40thread.tacv2/1762787826440
And the search api doesn't return the replyToID property
Please help me and tell me what to do.
Find below my code
`async def teams_get_my_mentions(
limit: int = 20,
days_back: int = 7,
cutoff_day: Optional[str] = None,
) -> str:
"""
Get messages that mention the current user with resolved team/channel/chat names,
using proper paging and an optional daily cut-off.
`
Beta Was this translation helpful? Give feedback.
All reactions