1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
| // 더 많은 정보는 42jerrykim.github.io 에서 확인하세요.
#include <bits/stdc++.h>
using namespace std;
struct Fenwick {
int n;
vector<int> bit;
Fenwick(int n = 0) { init(n); }
void init(int n_) { n = n_; bit.assign(n + 1, 0); }
void add(int idx, int delta) {
for (; idx <= n; idx += idx & -idx) bit[idx] += delta;
}
int sumPrefix(int idx) const {
int s = 0;
for (; idx > 0; idx -= idx & -idx) s += bit[idx];
return s;
}
int sumRange(int l, int r) const {
if (l > r) return 0;
return sumPrefix(r) - sumPrefix(l - 1);
}
};
int main() {
ios::sync_with_stdio(false);
cin.tie(nullptr);
int N, Q;
if (!(cin >> N >> Q)) return 0;
vector<int> parent(N + 1, 0);
vector<vector<int>> children(N + 1);
for (int i = 2; i <= N; ++i) {
int a; cin >> a;
parent[i] = a;
children[a].push_back(i);
}
// HLD preprocessing
vector<int> depth(N + 1, 0), sz(N + 1, 0), heavy(N + 1, 0);
{
// iterative DFS to get order and depths
vector<int> order; order.reserve(N);
order.push_back(1);
for (size_t i = 0; i < order.size(); ++i) {
int u = order[i];
for (int v : children[u]) {
depth[v] = depth[u] + 1;
order.push_back(v);
}
}
// post-order for sizes and heavy
for (int i = (int)order.size() - 1; i >= 0; --i) {
int u = order[i];
sz[u] = 1;
int maxSize = 0, heavyChild = 0;
for (int v : children[u]) {
sz[u] += sz[v];
if (sz[v] > maxSize) {
maxSize = sz[v];
heavyChild = v;
}
}
heavy[u] = heavyChild;
}
}
vector<int> head(N + 1, 0), pos(N + 1, 0);
int curPos = 1;
{
// iterative decompose
vector<pair<int,int>> st;
st.reserve(N);
st.emplace_back(1, 1); // (node, head)
while (!st.empty()) {
auto [u, h] = st.back(); st.pop_back();
int cur = u;
while (cur != 0) {
head[cur] = h;
pos[cur] = curPos++;
for (int v : children[cur]) {
if (v == heavy[cur]) continue;
st.emplace_back(v, v);
}
cur = heavy[cur];
}
}
}
Fenwick fw(N + 2);
vector<char> cut(N + 1, 0); // cut[x] = 1 if edge (parent[x], x) removed
auto pathSum = [&](int u, int v) -> int {
int res = 0;
while (head[u] != head[v]) {
if (depth[head[u]] < depth[head[v]]) swap(u, v);
res += fw.sumRange(pos[head[u]], pos[u]);
u = parent[head[u]];
}
if (depth[u] < depth[v]) swap(u, v);
// exclude LCA node itself because edges are mapped to deeper nodes' positions
res += fw.sumRange(pos[v] + 1, pos[u]);
return res;
};
for (int i = 0; i < Q; ++i) {
int b, c, d;
cin >> b >> c >> d;
bool connected = (pathSum(b, c) == 0);
cout << (connected ? "YES\n" : "NO\n");
if (d == 1) {
int x = connected ? b : c;
if (x != 1 && !cut[x]) {
cut[x] = 1;
fw.add(pos[x], 1);
}
}
}
return 0;
}
|