Make messaging work
This commit is contained in:
parent
5937e75de6
commit
4ef4db8781
|
@ -315,8 +315,6 @@ function openSocket() {
|
||||||
|
|
||||||
/* ---------------------------------------------------- */
|
/* ---------------------------------------------------- */
|
||||||
|
|
||||||
const parseMsg = (raw) => (typeof raw === "string" ? JSON.parse(raw) : raw);
|
|
||||||
|
|
||||||
/* helper – does the row belong to me? */
|
/* helper – does the row belong to me? */
|
||||||
|
|
||||||
const isMe = (row) => row && row.user_id === localStorage.getItem(LS_USER_ID);
|
const isMe = (row) => row && row.user_id === localStorage.getItem(LS_USER_ID);
|
||||||
|
@ -334,11 +332,11 @@ function openSocket() {
|
||||||
/* inside openSocket() – unchanged except the helper */
|
/* inside openSocket() – unchanged except the helper */
|
||||||
|
|
||||||
hub.on("fakebook.users.post", (raw) => {
|
hub.on("fakebook.users.post", (raw) => {
|
||||||
store.dispatch(usersUpdated([parseMsg(raw)]));
|
store.dispatch(usersUpdated([JSON.parse(raw)]));
|
||||||
});
|
});
|
||||||
|
|
||||||
hub.on("fakebook.users.put", (raw) => {
|
hub.on("fakebook.users.put", (raw) => {
|
||||||
const row = parseMsg(raw);
|
const row = JSON.parse(raw);
|
||||||
|
|
||||||
store.dispatch(usersUpdated([row]));
|
store.dispatch(usersUpdated([row]));
|
||||||
|
|
||||||
|
@ -352,6 +350,22 @@ function openSocket() {
|
||||||
hub.on("fakebook.posts.put", (raw) => {
|
hub.on("fakebook.posts.put", (raw) => {
|
||||||
store.dispatch(postsUpdated([JSON.parse(raw)]));
|
store.dispatch(postsUpdated([JSON.parse(raw)]));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/* helper – my uid so we filter only my messages */
|
||||||
|
|
||||||
|
const myUID = () => localStorage.getItem(LS_USER_ID);
|
||||||
|
|
||||||
|
/* ---------- new message created -------------------------------- */
|
||||||
|
|
||||||
|
hub.on("fakebook.message.post", (raw) => {
|
||||||
|
const msg = mapRestMessage(JSON.parse(raw)); // ← reuse the mapper
|
||||||
|
|
||||||
|
if (msg.recipient === myUID())
|
||||||
|
store.dispatch(incomingMessagesUpdated([msg]));
|
||||||
|
|
||||||
|
if (msg.sender === myUID())
|
||||||
|
store.dispatch(outgoingMessagesUpdated([msg]));
|
||||||
|
});
|
||||||
})
|
})
|
||||||
|
|
||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
|
@ -839,45 +853,86 @@ export function subscribeMessages(kind = "incoming") {
|
||||||
cancelled = true;
|
cancelled = true;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
export function uploadMessage(msg) {
|
|
||||||
DB.messages.push({ ...msg, id: genId(), timestamp: nowISO(), isRead: false });
|
|
||||||
persist();
|
|
||||||
return Promise.resolve();
|
|
||||||
}
|
|
||||||
|
|
||||||
export function updateToBeRead(id) {
|
/* --------------------------------------------------------------
|
||||||
const m = DB.messages.find((m) => m.id === id);
|
|
||||||
if (m) {
|
|
||||||
m.isRead = true;
|
|
||||||
persist();
|
|
||||||
}
|
|
||||||
return Promise.resolve();
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Ensure demo DB has at least one user */
|
uploadMessage(msg)
|
||||||
|
|
||||||
if (!DB.users.length) {
|
|
||||||
DB.users.push({
|
|
||||||
userID: genId(),
|
|
||||||
|
|
||||||
firstname: "Demo",
|
msg = { recipient, text, photoURL? }
|
||||||
|
|
||||||
lastname: "User",
|
RETURNS { id } // new message_id
|
||||||
|
|
||||||
profilePictureURL: "fakebook-avatar.jpeg",
|
-------------------------------------------------------------- */
|
||||||
|
|
||||||
backgroundPictureURL: "background-server.jpg",
|
export async function uploadMessage(msg) {
|
||||||
|
const sender = localStorage.getItem(LS_USER_ID);
|
||||||
|
|
||||||
photos: [],
|
if (!sender) throw new Error("Not authenticated");
|
||||||
|
|
||||||
posts: [],
|
const message_id = genId(); // Magic table PK
|
||||||
|
|
||||||
isOnline: 0,
|
const body = {
|
||||||
|
message_id,
|
||||||
|
|
||||||
isEmailVerified: true,
|
sender, // who sends
|
||||||
|
|
||||||
index: 0,
|
recipient: msg.recipient, // who receives
|
||||||
|
|
||||||
|
text: msg.text ?? "",
|
||||||
|
|
||||||
|
photoURL: msg.photoURL ?? "",
|
||||||
|
|
||||||
|
isPhoto: msg.photoURL ? 1 : 0,
|
||||||
|
|
||||||
|
isRead: 0, // unread on insert
|
||||||
|
|
||||||
|
/* timestamp is generated by the server */
|
||||||
|
};
|
||||||
|
|
||||||
|
await $fetch(`${API_BASE}/message`, {
|
||||||
|
method: "POST",
|
||||||
|
|
||||||
|
body: JSON.stringify(body),
|
||||||
});
|
});
|
||||||
|
|
||||||
persist();
|
/* No local dispatch necessary – the hub’s `message.post` event
|
||||||
|
|
||||||
|
will arrive in a few ms and update Redux for both parties. */
|
||||||
|
|
||||||
|
return { id: message_id };
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------
|
||||||
|
|
||||||
|
updateToBeRead(message_id)
|
||||||
|
|
||||||
|
– flips the flag in Redux immediately (optimistic)
|
||||||
|
|
||||||
|
– then notifies the server; no need to wait for a hub echo
|
||||||
|
|
||||||
|
------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
export async function updateToBeRead(message_id) {
|
||||||
|
/* 1️⃣ optimistic local patch (affects whichever slice holds the row) */
|
||||||
|
|
||||||
|
const patch = [{ message_id, isRead: 1 }];
|
||||||
|
|
||||||
|
store.dispatch(incomingMessagesUpdated(patch));
|
||||||
|
|
||||||
|
store.dispatch(outgoingMessagesUpdated(patch));
|
||||||
|
|
||||||
|
/* 2️⃣ tell the server – ignore the hub echo, we no longer listen */
|
||||||
|
|
||||||
|
try {
|
||||||
|
await $fetch(`${API_BASE}/message`, {
|
||||||
|
method: "PUT",
|
||||||
|
|
||||||
|
body: JSON.stringify({ message_id, isRead: 1 }),
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.warn("[updateToBeRead] PUT failed:", err.message);
|
||||||
|
|
||||||
|
// Optional: revert optimistic change on error
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,55 @@
|
||||||
import { createSlice } from "@reduxjs/toolkit";
|
import { createSlice } from "@reduxjs/toolkit";
|
||||||
|
|
||||||
|
/* Helper to keep the array ordered (oldest → newest).
|
||||||
|
|
||||||
|
If you don’t care about ordering you can remove the sort. */
|
||||||
|
|
||||||
|
const byTimestampAsc = (a, b) =>
|
||||||
|
new Date(a.timestamp).valueOf() - new Date(b.timestamp).valueOf();
|
||||||
|
|
||||||
export const incomingMessagesSlice = createSlice({
|
export const incomingMessagesSlice = createSlice({
|
||||||
name: "incomingMessages",
|
name: "incomingMessages",
|
||||||
|
|
||||||
|
/* simple array of message objects */
|
||||||
|
|
||||||
initialState: [],
|
initialState: [],
|
||||||
|
|
||||||
reducers: {
|
reducers: {
|
||||||
|
/* ------------------------------------------------------------------
|
||||||
|
|
||||||
|
incomingMessagesUpdated
|
||||||
|
|
||||||
|
– payload is ALWAYS an array (can be 1 or many rows)
|
||||||
|
|
||||||
|
– for every row: if new → append
|
||||||
|
|
||||||
|
if exist → merge/patch
|
||||||
|
|
||||||
|
------------------------------------------------------------------ */
|
||||||
|
|
||||||
incomingMessagesUpdated: (state, action) => {
|
incomingMessagesUpdated: (state, action) => {
|
||||||
const updatedState = [];
|
action.payload.forEach((msg) => {
|
||||||
action.payload.forEach((message) => updatedState.push(message));
|
/* support both id and message_id just in case */
|
||||||
return updatedState;
|
|
||||||
|
const msgId = msg.id ?? msg.message_id;
|
||||||
|
|
||||||
|
const idx = state.findIndex((m) => (m.id ?? m.message_id) === msgId);
|
||||||
|
|
||||||
|
if (idx === -1) {
|
||||||
|
/* ① brand-new message → push */
|
||||||
|
|
||||||
|
state.push(msg);
|
||||||
|
} else {
|
||||||
|
/* ② already stored → shallow merge keeps other fields */
|
||||||
|
state[idx] = { ...state[idx], ...msg };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/* keep messages sorted for deterministic rendering */
|
||||||
|
|
||||||
|
state.sort(byTimestampAsc);
|
||||||
|
|
||||||
|
/* Immer lets us “mutate” state directly; no return needed */
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,13 +1,48 @@
|
||||||
import { createSlice } from "@reduxjs/toolkit";
|
import { createSlice } from "@reduxjs/toolkit";
|
||||||
|
|
||||||
|
/* Optional: keep the list ordered (oldest → newest). */
|
||||||
|
|
||||||
|
const byTimestampAsc = (a, b) =>
|
||||||
|
new Date(a.timestamp).valueOf() - new Date(b.timestamp).valueOf();
|
||||||
|
|
||||||
export const outgoingMessagesSlice = createSlice({
|
export const outgoingMessagesSlice = createSlice({
|
||||||
name: "outgoingMessages",
|
name: "outgoingMessages",
|
||||||
initialState: [],
|
|
||||||
|
initialState: [], // simple array of message objects
|
||||||
|
|
||||||
reducers: {
|
reducers: {
|
||||||
|
/* ------------------------------------------------------------------
|
||||||
|
|
||||||
|
outgoingMessagesUpdated
|
||||||
|
|
||||||
|
– payload is ALWAYS an array (1 or many rows)
|
||||||
|
|
||||||
|
– merge-in logic keeps existing rows and patches them if needed
|
||||||
|
|
||||||
|
------------------------------------------------------------------ */
|
||||||
|
|
||||||
outgoingMessagesUpdated: (state, action) => {
|
outgoingMessagesUpdated: (state, action) => {
|
||||||
const updatedState = [];
|
action.payload.forEach((msg) => {
|
||||||
action.payload.forEach((message) => updatedState.push(message));
|
const msgId = msg.id ?? msg.message_id; // tolerate either key
|
||||||
return updatedState;
|
|
||||||
|
const idx = state.findIndex((m) => (m.id ?? m.message_id) === msgId);
|
||||||
|
|
||||||
|
if (idx === -1) {
|
||||||
|
/* ① brand-new row → append */
|
||||||
|
|
||||||
|
state.push(msg);
|
||||||
|
} else {
|
||||||
|
/* ② existing row → shallow merge (e.g. read flag flips) */
|
||||||
|
|
||||||
|
state[idx] = { ...state[idx], ...msg };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/* Keep deterministic order for rendering (remove if unnecessary). */
|
||||||
|
|
||||||
|
state.sort(byTimestampAsc);
|
||||||
|
|
||||||
|
/* Immer lets us mutate in place; no return statement required */
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
Loading…
Reference in New Issue